answer
stringlengths 15
1.25M
|
|---|
#include "test_stl.h"
#include <QSet>
#include <QtTest/QtTest>
#include <vector>
#include <cstring>
#include <cwchar>
#include "../src/utils/stl.h"
void STLTest::testBufferArry() {
using utils::BufferArray;
// constructor
std::string test1("123456789abcdefg");
BufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == '1');
QVERIFY(test1[1] == '2');
QVERIFY(test1[2] == '3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::string test2("12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(strncmp(test2.data(), buffer.data(), test2.size()) == 0);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
#ifdef UTILS_CXX11_MODE
// move
std::string test3("gqjdiw913abc_123d");
BufferArray other_buffer(test3);
buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(strncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const char test_string[] = "abcdefg";
size_t test_size = sizeof(test_string);
buffer = BufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
void STLTest::testWBufferArry() {
using utils::WBufferArray;
// constructor
std::wstring test1(L"123456789abcdefg");
WBufferArray buffer(test1);
QVERIFY(test1.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test1.data(), buffer.data(), test1.size()) == 0);
// operator[]
QVERIFY(test1[0] == L'1');
QVERIFY(test1[1] == L'2');
QVERIFY(test1[2] == L'3');
// reserve
buffer.resize(30);
QVERIFY(buffer.capacity() == 30);
// shrink_to_fit
buffer.shrink_to_fit();
QVERIFY(buffer.capacity() == buffer.size());
// resize
buffer.resize(9);
std::wstring test2(L"12345678");
QVERIFY(test2.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() > buffer.size());
QVERIFY(wcsncmp(test2.data(), buffer.data(), test2.size()) == 0);
#ifdef UTILS_CXX11_MODE
// move
std::wstring test3(L"gqjdiw913abc_123d");
WBufferArray other_buffer(test3);
buffer = std::move(other_buffer);
QVERIFY(test3.size() + 1 == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(wcsncmp(test3.data(), buffer.data(), test3.size()) == 0);
// constructor2
const wchar_t test_string[] = L"abcdefg";
size_t test_size = sizeof(test_string) / sizeof(wchar_t);
buffer = WBufferArray(test_string);
QVERIFY(test_size == buffer.size());
QVERIFY(buffer.capacity() == buffer.size());
QVERIFY(memcmp(test_string, buffer.data(), test_size) == 0);
#endif
}
QTEST_APPLESS_MAIN(STLTest)
|
//modification, are permitted provided that the following conditions
//are met:
// with the distribution.
// Neither the name of 3Dlabs Inc. Ltd. nor the names of its
// contributors may be used to endorse or promote products derived
//"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
//LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
//FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
//INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
//BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
//LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
//CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
//LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
//ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
//POSSIBILITY OF SUCH DAMAGE.
#ifndef <API key>
#define <API key>
#include "../Include/intermediate.h"
#include "../Public/ShaderLang.h"
#include "Versions.h"
#include <algorithm>
#include <set>
class TInfoSink;
namespace glslang {
struct TVectorFields {
TVectorFields() { }
TVectorFields(int c0, int c1, int c2, int c3) : num(4)
{
offsets[0] = c0;
offsets[1] = c1;
offsets[2] = c2;
offsets[3] = c3;
}
int offsets[4];
int num;
};
// Some helper structures for TIntermediate. Their contents are encapsulated
// by TIntermediate.
// Used for detecting recursion: A "call" is a pair: <caller, callee>.
struct TCall {
TCall(const TString& pCaller, const TString& pCallee) : caller(pCaller), callee(pCallee) { }
TString caller;
TString callee;
bool visited;
bool currentPath;
bool errorGiven;
};
// A generic 1-D range.
struct TRange {
TRange(int start, int last) : start(start), last(last) { }
bool overlap(const TRange& rhs) const
{
return last >= rhs.start && start <= rhs.last;
}
int start;
int last;
};
// An IO range is a 3-D rectangle; the set of (location, component, index) triples all lying
// within the same location range, component range, and index value. Locations don't alias unless
// all other dimensions of their range overlap.
struct TIoRange {
TIoRange(TRange location, TRange component, TBasicType basicType, int index)
: location(location), component(component), basicType(basicType), index(index) { }
bool overlap(const TIoRange& rhs) const
{
return location.overlap(rhs.location) && component.overlap(rhs.component) && index == rhs.index;
}
TRange location;
TRange component;
TBasicType basicType;
int index;
};
// An offset range is a 2-D rectangle; the set of (binding, offset) pairs all lying
// within the same binding and offset range.
struct TOffsetRange {
TOffsetRange(TRange binding, TRange offset)
: binding(binding), offset(offset) { }
bool overlap(const TOffsetRange& rhs) const
{
return binding.overlap(rhs.binding) && offset.overlap(rhs.offset);
}
TRange binding;
TRange offset;
};
// Things that need to be tracked per xfb buffer.
struct TXfbBuffer {
TXfbBuffer() : stride(TQualifier::layoutXfbStrideEnd), implicitStride(0), containsDouble(false) { }
std::vector<TRange> ranges; // byte offsets that have already been assigned
unsigned int stride;
unsigned int implicitStride;
bool containsDouble;
};
class TSymbolTable;
class TSymbol;
class TVariable;
// Set of helper functions to help parse and build the tree.
class TIntermediate {
public:
explicit TIntermediate(EShLanguage l, int v = 0, EProfile p = ENoProfile) :
source(EShSourceNone), language(l), profile(p), version(v), treeRoot(0),
numMains(0), numErrors(0), numPushConstants(0), recursive(false),
invocations(TQualifier::layoutNotSet), vertices(TQualifier::layoutNotSet), inputPrimitive(ElgNone), outputPrimitive(ElgNone),
pixelCenterInteger(false), originUpperLeft(false),
vertexSpacing(EvsNone), vertexOrder(EvoNone), pointMode(false), earlyFragmentTests(false), depthLayout(EldNone), depthReplacing(false), blendEquations(0),
multiStream(false), xfbMode(false)
{
localSize[0] = 1;
localSize[1] = 1;
localSize[2] = 1;
localSizeSpecId[0] = TQualifier::layoutNotSet;
localSizeSpecId[1] = TQualifier::layoutNotSet;
localSizeSpecId[2] = TQualifier::layoutNotSet;
xfbBuffers.resize(TQualifier::layoutXfbBufferEnd);
}
void setLimits(const TBuiltInResource& r) { resources = r; }
bool postProcess(TIntermNode*, EShLanguage);
void output(TInfoSink&, bool tree);
void removeTree();
void setSource(EShSource s) { source = s; }
EShSource getSource() const { return source; }
void setEntryPoint(const char* ep) { entryPoint = ep; }
const std::string& getEntryPoint() const { return entryPoint; }
void setVersion(int v) { version = v; }
int getVersion() const { return version; }
void setProfile(EProfile p) { profile = p; }
EProfile getProfile() const { return profile; }
void setSpv(const SpvVersion& s) { spvVersion = s; }
const SpvVersion& getSpv() const { return spvVersion; }
EShLanguage getStage() const { return language; }
void <API key>(const char* extension) { requestedExtensions.insert(extension); }
const std::set<std::string>& <API key>() const { return requestedExtensions; }
void setTreeRoot(TIntermNode* r) { treeRoot = r; }
TIntermNode* getTreeRoot() const { return treeRoot; }
void addMainCount() { ++numMains; }
int getNumMains() const { return numMains; }
int getNumErrors() const { return numErrors; }
void <API key>() { ++numPushConstants; }
bool isRecursive() const { return recursive; }
TIntermSymbol* addSymbol(const TVariable&);
TIntermSymbol* addSymbol(const TVariable&, const TSourceLoc&);
TIntermSymbol* addSymbol(const TType&, const TSourceLoc&);
TIntermTyped* addConversion(TOperator, const TType&, TIntermTyped*) const;
TIntermTyped* addShapeConversion(TOperator, const TType&, TIntermTyped*);
TIntermTyped* addBinaryMath(TOperator, TIntermTyped* left, TIntermTyped* right, TSourceLoc);
TIntermTyped* addAssign(TOperator op, TIntermTyped* left, TIntermTyped* right, TSourceLoc);
TIntermTyped* addIndex(TOperator op, TIntermTyped* base, TIntermTyped* index, TSourceLoc);
TIntermTyped* addUnaryMath(TOperator, TIntermTyped* child, TSourceLoc);
TIntermTyped* <API key>(const TSourceLoc& line, TOperator, bool unary, TIntermNode*, const TType& returnType);
bool <API key>(TBasicType from, TBasicType to, TOperator op = EOpNull) const;
TOperator <API key>(const TType&) const;
TIntermAggregate* growAggregate(TIntermNode* left, TIntermNode* right);
TIntermAggregate* growAggregate(TIntermNode* left, TIntermNode* right, const TSourceLoc&);
TIntermAggregate* makeAggregate(TIntermNode* node);
TIntermAggregate* makeAggregate(TIntermNode* node, const TSourceLoc&);
TIntermTyped* <API key>(TIntermNode*, TOperator, const TType& type, TSourceLoc);
bool areAllChildConst(TIntermAggregate* aggrNode);
TIntermNode* addSelection(TIntermTyped* cond, TIntermNodePair code, const TSourceLoc&);
TIntermTyped* addSelection(TIntermTyped* cond, TIntermTyped* trueBlock, TIntermTyped* falseBlock, const TSourceLoc&);
TIntermTyped* addComma(TIntermTyped* left, TIntermTyped* right, const TSourceLoc&);
TIntermTyped* addMethod(TIntermTyped*, const TType&, const TString*, const TSourceLoc&);
<API key>* addConstantUnion(const TConstUnionArray&, const TType&, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(int, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(unsigned int, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(long long, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(unsigned long long, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(bool, const TSourceLoc&, bool literal = false) const;
<API key>* addConstantUnion(double, TBasicType, const TSourceLoc&, bool literal = false) const;
TIntermTyped* <API key>(TBasicType, <API key>*) const;
bool parseConstTree(TIntermNode*, TConstUnionArray, TOperator, const TType&, bool singleConstantParam = false);
TIntermLoop* addLoop(TIntermNode*, TIntermTyped*, TIntermTyped*, bool testFirst, const TSourceLoc&);
TIntermAggregate* addForLoop(TIntermNode*, TIntermNode*, TIntermTyped*, TIntermTyped*, bool testFirst, const TSourceLoc&);
TIntermBranch* addBranch(TOperator, const TSourceLoc&);
TIntermBranch* addBranch(TOperator, TIntermTyped*, const TSourceLoc&);
TIntermTyped* addSwizzle(TVectorFields&, const TSourceLoc&);
// Constant folding (in Constant.cpp)
TIntermTyped* fold(TIntermAggregate* aggrNode);
TIntermTyped* foldConstructor(TIntermAggregate* aggrNode);
TIntermTyped* foldDereference(TIntermTyped* node, int index, const TSourceLoc&);
TIntermTyped* foldSwizzle(TIntermTyped* node, TVectorFields& fields, const TSourceLoc&);
// Tree ops
static const TIntermTyped* findLValueBase(const TIntermTyped*, bool swizzleOkay);
// Linkage related
void <API key>(TIntermAggregate*& linkage, EShLanguage, TSymbolTable&);
void <API key>(TIntermAggregate*& linkage, TSymbolTable&, const TString&);
void <API key>(TIntermAggregate*& linkage, const TSymbol&);
bool setInvocations(int i)
{
if (invocations != TQualifier::layoutNotSet)
return invocations == i;
invocations = i;
return true;
}
int getInvocations() const { return invocations; }
bool setVertices(int m)
{
if (vertices != TQualifier::layoutNotSet)
return vertices == m;
vertices = m;
return true;
}
int getVertices() const { return vertices; }
bool setInputPrimitive(TLayoutGeometry p)
{
if (inputPrimitive != ElgNone)
return inputPrimitive == p;
inputPrimitive = p;
return true;
}
TLayoutGeometry getInputPrimitive() const { return inputPrimitive; }
bool setVertexSpacing(TVertexSpacing s)
{
if (vertexSpacing != EvsNone)
return vertexSpacing == s;
vertexSpacing = s;
return true;
}
TVertexSpacing getVertexSpacing() const { return vertexSpacing; }
bool setVertexOrder(TVertexOrder o)
{
if (vertexOrder != EvoNone)
return vertexOrder == o;
vertexOrder = o;
return true;
}
TVertexOrder getVertexOrder() const { return vertexOrder; }
void setPointMode() { pointMode = true; }
bool getPointMode() const { return pointMode; }
bool setLocalSize(int dim, int size)
{
if (localSize[dim] > 1)
return size == localSize[dim];
localSize[dim] = size;
return true;
}
unsigned int getLocalSize(int dim) const { return localSize[dim]; }
bool setLocalSizeSpecId(int dim, int id)
{
if (localSizeSpecId[dim] != TQualifier::layoutNotSet)
return id == localSizeSpecId[dim];
localSizeSpecId[dim] = id;
return true;
}
int getLocalSizeSpecId(int dim) const { return localSizeSpecId[dim]; }
void setXfbMode() { xfbMode = true; }
bool getXfbMode() const { return xfbMode; }
void setMultiStream() { multiStream = true; }
bool isMultiStream() const { return multiStream; }
bool setOutputPrimitive(TLayoutGeometry p)
{
if (outputPrimitive != ElgNone)
return outputPrimitive == p;
outputPrimitive = p;
return true;
}
TLayoutGeometry getOutputPrimitive() const { return outputPrimitive; }
void setOriginUpperLeft() { originUpperLeft = true; }
bool getOriginUpperLeft() const { return originUpperLeft; }
void <API key>() { pixelCenterInteger = true; }
bool <API key>() const { return pixelCenterInteger; }
void <API key>() { earlyFragmentTests = true; }
bool <API key>() const { return earlyFragmentTests; }
bool setDepth(TLayoutDepth d)
{
if (depthLayout != EldNone)
return depthLayout == d;
depthLayout = d;
return true;
}
TLayoutDepth getDepth() const { return depthLayout; }
void setDepthReplacing() { depthReplacing = true; }
bool isDepthReplacing() const { return depthReplacing; }
void addBlendEquation(TBlendEquationShift b) { blendEquations |= (1 << b); }
unsigned int getBlendEquations() const { return blendEquations; }
void addToCallGraph(TInfoSink&, const TString& caller, const TString& callee);
void merge(TInfoSink&, TIntermediate&);
void finalCheck(TInfoSink&);
void addIoAccessed(const TString& name) { ioAccessed.insert(name); }
bool inIoAccessed(const TString& name) const { return ioAccessed.find(name) != ioAccessed.end(); }
int addUsedLocation(const TQualifier&, const TType&, bool& typeCollision);
int checkLocationRange(int set, const TIoRange& range, const TType&, bool& typeCollision);
int addUsedOffsets(int binding, int offset, int numOffsets);
bool addUsedConstantId(int id);
int <API key>(const TType&) const;
bool setXfbBufferStride(int buffer, unsigned stride)
{
if (xfbBuffers[buffer].stride != TQualifier::layoutXfbStrideEnd)
return xfbBuffers[buffer].stride == stride;
xfbBuffers[buffer].stride = stride;
return true;
}
int addXfbBufferOffset(const TType&);
unsigned int computeTypeXfbSize(const TType&, bool& containsDouble) const;
static int getBaseAlignment(const TType&, int& size, int& stride, bool std140, bool rowMajor);
protected:
TIntermSymbol* addSymbol(int Id, const TString&, const TType&, const TConstUnionArray&, TIntermTyped* subtree, const TSourceLoc&);
void error(TInfoSink& infoSink, const char*);
void mergeBodies(TInfoSink&, TIntermSequence& globals, const TIntermSequence& unitGlobals);
void mergeLinkerObjects(TInfoSink&, TIntermSequence& linkerObjects, const TIntermSequence& unitLinkerObjects);
void <API key>(TType&, const TType&);
void mergeErrorCheck(TInfoSink&, const TIntermSymbol&, const TIntermSymbol&, bool crossStage);
void <API key>(TInfoSink&);
void inOutLocationCheck(TInfoSink&);
TIntermSequence& findLinkerObjects() const;
bool userOutputUsed() const;
static int <API key>(const TType&, int& size);
bool <API key>(const TIntermOperator&) const;
const EShLanguage language; // stage, known at construction time
EShSource source; // source language, known a bit later
std::string entryPoint;
EProfile profile;
int version;
SpvVersion spvVersion;
TIntermNode* treeRoot;
std::set<std::string> requestedExtensions; // cumulation of all enabled or required extensions; not connected to what subset of the shader used them
TBuiltInResource resources;
int numMains;
int numErrors;
int numPushConstants;
bool recursive;
int invocations;
int vertices;
TLayoutGeometry inputPrimitive;
TLayoutGeometry outputPrimitive;
bool pixelCenterInteger;
bool originUpperLeft;
TVertexSpacing vertexSpacing;
TVertexOrder vertexOrder;
bool pointMode;
int localSize[3];
int localSizeSpecId[3];
bool earlyFragmentTests;
TLayoutDepth depthLayout;
bool depthReplacing;
int blendEquations; // an 'or'ing of masks of shifts of TBlendEquationShift
bool xfbMode;
bool multiStream;
typedef std::list<TCall> TGraph;
TGraph callGraph;
std::set<TString> ioAccessed; // set of names of statically read/written I/O that might need extra checking
std::vector<TIoRange> usedIo[4]; // sets of used locations, one for each of in, out, uniform, and buffers
std::vector<TOffsetRange> usedAtomics; // sets of bindings used by atomic counters
std::vector<TXfbBuffer> xfbBuffers; // all the data we need to track per xfb buffer
std::unordered_set<int> usedConstantId; // specialization constant ids used
private:
void operator=(TIntermediate&); // prevent assignments
};
} // end namespace glslang
#endif // <API key>
|
package org.jb2011.lnf.beautyeye.winlnfutils.d;
*
// * @(#)XPStyle.java 1.28 07/01/09
*
// * <p>These classes are designed to be used while the
// * corresponding <code>LookAndFeel</code> class has been installed
// * (<code>UIManager.setLookAndFeel(new <i>XXX</i>LookAndFeel())</code>).
// * Using them while a different <code>LookAndFeel</code> is installed
// * may produce unexpected results, including exceptions.
// * Additionally, changing the <code>LookAndFeel</code>
// * maintained by the <code>UIManager</code> without updating the
// * corresponding <code>ComponentUI</code> of any
// * <code>JComponent</code>s may also produce unexpected results,
// * such as the wrong colors showing up, and is generally not
// * encouraged.
//package org.jb2011.lnf.beautyeye.winlnfutils;
//import java.awt.Color;
//import java.awt.Component;
//import java.awt.Dimension;
//import java.awt.Graphics;
//import java.awt.<API key>;
//import java.awt.Image;
//import java.awt.Insets;
//import java.awt.Point;
//import java.awt.Rectangle;
//import java.awt.Toolkit;
//import java.awt.image.BufferedImage;
//import java.awt.image.DataBufferInt;
//import java.awt.image.WritableRaster;
//import java.security.AccessController;
//import java.util.HashMap;
//import javax.swing.AbstractButton;
//import javax.swing.JButton;
//import javax.swing.JCheckBox;
//import javax.swing.JRadioButton;
//import javax.swing.JToolBar;
//import javax.swing.UIManager;
//import javax.swing.border.AbstractBorder;
//import javax.swing.border.Border;
//import javax.swing.border.EmptyBorder;
//import javax.swing.border.LineBorder;
//import javax.swing.plaf.ColorUIResource;
//import javax.swing.plaf.InsetsUIResource;
//import javax.swing.plaf.UIResource;
//import javax.swing.text.JTextComponent;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.Part;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.Prop;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.State;
//import org.jb2011.lnf.beautyeye.winlnfutils.BETMSchema.TypeEnum;
//import sun.awt.windows.ThemeReader;
//import sun.security.action.GetPropertyAction;
//import sun.swing.CachedPainter;
//import com.sun.java.swing.plaf.windows.<API key>;
*
// * java
// * Add by js 2009-09-01.
**
// * Implements Windows XP Styles for the Windows Look and Feel.
// * @version 1.28 01/09/07
// * @author Leif Samuelsson
//public class BEXPStyle {
// // Singleton instance of this class
// private static BEXPStyle xp;
// // Singleton instance of SkinPainter
// private static SkinPainter skinPainter = new SkinPainter();
// private static Boolean themeActive = null;
// private HashMap<String, Border> borderMap;
// private HashMap<String, Color> colorMap;
// private boolean flatMenus;
// static {
// invalidateStyle();
// /** Static method for clearing the hashmap and loading the
// * current XP style and theme
// */
// static synchronized void invalidateStyle() {
// xp = null;
// themeActive = null;
// /** Get the singleton instance of this class
// *
// * @return the singleton instance of this class or null if XP styles
// * are not active or if this is not Windows XP
// */
// public static synchronized BEXPStyle getXP() {
// if (themeActive == null) {
// Toolkit toolkit = Toolkit.getDefaultToolkit();
// themeActive =
// (Boolean)toolkit.getDesktopProperty("win.xpstyle.themeActive");
// if (themeActive == null) {
// themeActive = Boolean.FALSE;
// if (themeActive.booleanValue()) {
// GetPropertyAction propertyAction =
// new GetPropertyAction("swing.noxp");
// if (AccessController.doPrivileged(propertyAction) == null &&
// ThemeReader.isThemed() &&
// !(UIManager.getLookAndFeel()
// instanceof <API key>)) {
// xp = new BEXPStyle();
// return xp;
// /** Get a named <code>String</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @param state a <code>String</code>
// * @param attributeKey a <code>String</code>
// * @return a <code>String</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by <API key> for painting
// * title foregound and can be removed when no longer needed
// */
// String getString(Component c, Part part, State state, Prop prop) {
// return getTypeEnumName(c, part, state, prop);
// private static String getTypeEnumName(Component c, Part part, State state, Prop prop) {
// State.getValue(part, state),
// prop.getValue());
// if (enumValue == -1) {
// return null;
// return TypeEnum.getTypeEnum(prop, enumValue).getName();
// /** Get a named <code>int</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return an <code>int</code> or null if key is not found
// * in the current style
// */
// int getInt(Component c, Part part, State state, Prop prop, int fallback) {
// State.getValue(part, state),
// prop.getValue());
// /** Get a named <code>Dimension</code> value from the current style
// *
// * @param key a <code>String</code>
// * @return a <code>Dimension</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by <API key> and the value
// * should probably be cached there instead of here.
// */
// Dimension getDimension(Component c, Part part, State state, Prop prop) {
// State.getValue(part, state),
// prop.getValue());
// /** Get a named <code>Point</code> (e.g. a location or an offset) value
// * from the current style
// *
// * @param key a <code>String</code>
// * @return a <code>Point</code> or null if key is not found
// * in the current style
// *
// * This is currently only used by <API key> for painting
// * title foregound and can be removed when no longer needed
// */
// Point getPoint(Component c, Part part, State state, Prop prop) {
// State.getValue(part, state),
// prop.getValue());
// if (d != null) {
// return new Point(d.width, d.height);
// } else {
// return null;
// /** Get a named <code>Insets</code> value from the current style
// *
// * @param key a <code>String</code>
// * @return an <code>Insets</code> object or null if key is not found
// * in the current style
// *
// * This is currently only used to create borders and by
// * <API key> for painting title foregound.
// * The return value is already cached in those places.
// */
// Insets getMargin(Component c, Part part, State state, Prop prop) {
// State.getValue(part, state),
// prop.getValue());
// /** Get a named <code>Color</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return a <code>Color</code> or null if key is not found
// * in the current style
// */
// synchronized Color getColor(Skin skin, Prop prop, Color fallback) {
// String key = skin.toString() + "." + prop.name();
// Part part = skin.part;
// Color color = colorMap.get(key);
// if (color == null) {
// color = ThemeReader.getColor(part.getControlName(null), part.getValue(),
// State.getValue(part, skin.state),
// prop.getValue());
// if (color != null) {
// color = new ColorUIResource(color);
// colorMap.put(key, color);
// return (color != null) ? color : fallback;
// public Color getColor(Component c, Part part, State state, Prop prop, Color fallback) {
// return getColor(new Skin(c, part, state), prop, fallback);
// /** Get a named <code>Border</code> value from the current style
// *
// * @param part a <code>Part</code>
// * @return a <code>Border</code> or null if key is not found
// * in the current style or if the style for the particular
// * part is not defined as "borderfill".
// */
// public synchronized Border getBorder(Component c, Part part) {
// if (part == Part.MENU) {
// // Special case because XP has no skin for menus
// if (flatMenus) {
// // TODO: The classic border uses this color, but we should
// // create a new UI property called "PopupMenu.borderColor"
// // instead.
// return new XPFillBorder(UIManager.getColor("InternalFrame.borderShadow"),
// } else {
// return null; // Will cause L&F to use classic border
// Skin skin = new Skin(c, part, null);
// Border border = borderMap.get(skin.string);
// if (border == null) {
// String bgType = getTypeEnumName(c, part, null, Prop.BGTYPE);
// if ("borderfill".equalsIgnoreCase(bgType)) {
// int thickness = getInt(c, part, null, Prop.BORDERSIZE, 1);
// Color color = getColor(skin, Prop.BORDERCOLOR, Color.black);
// border = new XPFillBorder(color, thickness);
// } else if ("imagefile".equalsIgnoreCase(bgType)) {
// Insets m = getMargin(c, part, null, Prop.SIZINGMARGINS);
// if (m != null) {
// if (getBoolean(c, part, null, Prop.BORDERONLY)) {
// border = new XPImageBorder(c, part);
// } else {
// if(part == Part.TP_BUTTON) {
// border = new XPEmptyBorder(new Insets(3,3,3,3));
// } else {
// border = new XPEmptyBorder(m);
// if (border != null) {
// borderMap.put(skin.string, border);
// return border;
// private class XPFillBorder extends LineBorder implements UIResource {
// XPFillBorder(Color color, int thickness) {
// super(color, thickness);
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, new Insets(0,0,0,0));
// public Insets getBorderInsets(Component c, Insets insets) {
// Insets margin = null;
// // Ideally we'd have an interface defined for classes which
// // support margins (to avoid this hackery), but we've
// // decided against it for simplicity
// if (c instanceof AbstractButton) {
// margin = ((AbstractButton)c).getMargin();
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// insets.top = (margin != null? margin.top : 0) + thickness;
// insets.left = (margin != null? margin.left : 0) + thickness;
// insets.bottom = (margin != null? margin.bottom : 0) + thickness;
// insets.right = (margin != null? margin.right : 0) + thickness;
// return insets;
// private class XPImageBorder extends AbstractBorder implements UIResource {
// Skin skin;
// XPImageBorder(Component c, Part part) {
// this.skin = getSkin(c, part);
// public void paintBorder(Component c, Graphics g,
// int x, int y, int width, int height) {
// skin.paintSkin(g, x, y, width, height, null);
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, new Insets(0,0,0,0));
// public Insets getBorderInsets(Component c, Insets insets) {
// Insets margin = null;
// Insets borderInsets = skin.getContentMargin();
// // Ideally we'd have an interface defined for classes which
// // support margins (to avoid this hackery), but we've
// // decided against it for simplicity
// if (c instanceof AbstractButton) {
// margin = ((AbstractButton)c).getMargin();
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// insets.top = (margin != null? margin.top : 0) + borderInsets.top;
// insets.left = (margin != null? margin.left : 0) + borderInsets.left;
// insets.bottom = (margin != null? margin.bottom : 0) + borderInsets.bottom;
// insets.right = (margin != null? margin.right : 0) + borderInsets.right;
// return insets;
// private class XPEmptyBorder extends EmptyBorder implements UIResource {
// XPEmptyBorder(Insets m) {
// super(m.top+2, m.left+2, m.bottom+2, m.right+2);
// public Insets getBorderInsets(Component c) {
// return getBorderInsets(c, getBorderInsets());
// public Insets getBorderInsets(Component c, Insets insets) {
// insets = super.getBorderInsets(c, insets);
// Insets margin = null;
// if (c instanceof AbstractButton) {
// Insets m = ((AbstractButton)c).getMargin();
// // if this is a toolbar button then ignore getMargin()
// // and subtract the padding added by the constructor
// if(c.getParent() instanceof JToolBar
// && ! (c instanceof JRadioButton)
// && ! (c instanceof JCheckBox)
// && m instanceof InsetsUIResource) {
// insets.top -= 2;
// insets.left -= 2;
// insets.bottom -= 2;
// insets.right -= 2;
// } else {
// margin = m;
// } else if (c instanceof JToolBar) {
// margin = ((JToolBar)c).getMargin();
// } else if (c instanceof JTextComponent) {
// margin = ((JTextComponent)c).getMargin();
// if (margin != null) {
// insets.top = margin.top + 2;
// insets.left = margin.left + 2;
// insets.bottom = margin.bottom + 2;
// insets.right = margin.right + 2;
// return insets;
// public boolean isSkinDefined(Component c, Part part) {
// return (part.getValue() == 0)
// || ThemeReader.isThemePartDefined(
// /** Get a <code>Skin</code> object from the current style
// * for a named part (component type)
// *
// * @param part a <code>Part</code>
// * @return a <code>Skin</code> object
// */
// public synchronized Skin getSkin(Component c, Part part) {
// assert isSkinDefined(c, part) : "part " + part + " is not defined";
// return new Skin(c, part, null);
// /** A class which encapsulates attributes for a given part
// * (component type) and which provides methods for painting backgrounds
// * and glyphs
// */
// public static class Skin {
// final Component component;
// final Part part;
// final State state;
// private final String string;
// private Dimension size = null;
// Skin(Component component, Part part) {
// this(component, part, null);
// Skin(Part part, State state) {
// this(null, part, state);
// Skin(Component component, Part part, State state) {
// this.component = component;
// this.part = part;
// this.state = state;
// String str = part.getControlName(component) +"." + part.name();
// if (state != null) {
// str += "("+state.name()+")";
// string = str;
// public Insets getContentMargin() {
// // This is only called by <API key> so far.
// return ThemeReader.getThemeMargins(part.getControlName(null), part.getValue(),
// 0, Prop.SIZINGMARGINS.getValue());
// private int getWidth(State state) {
// if (size == null) {
// size = getPartSize(part, state);
// return size.width;
// public int getWidth() {
// return getWidth((state != null) ? state : State.NORMAL);
// private int getHeight(State state) {
// if (size == null) {
// size = getPartSize(part, state);
// return size.height;
// public int getHeight() {
// return getHeight((state != null) ? state : State.NORMAL);
// public String toString() {
// return string;
// public boolean equals(Object obj) {
// return (obj instanceof Skin && ((Skin)obj).string.equals(string));
// public int hashCode() {
// return string.hashCode();
// /** Paint a skin at x, y.
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate.
// * @param dy the destination <i>y</i> coordinate.
// * @param state which state to paint
// */
// public void paintSkin(Graphics g, int dx, int dy, State state) {
// if (state == null) {
// state = this.state;
// paintSkin(g, dx, dy, getWidth(state), getHeight(state), state);
// /** Paint a skin in an area defined by a rectangle.
// *
// * @param g the graphics context to use for painting
// * @param r a <code>Rectangle</code> defining the area to fill,
// * may cause the image to be stretched or tiled
// * @param state which state to paint
// */
// void paintSkin(Graphics g, Rectangle r, State state) {
// paintSkin(g, r.x, r.y, r.width, r.height, state);
// /** Paint a skin at a defined position and size
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate.
// * @param dy the destination <i>y</i> coordinate.
// * @param dw the width of the area to fill, may cause
// * the image to be stretched or tiled
// * @param dh the height of the area to fill, may cause
// * the image to be stretched or tiled
// * @param state which state to paint
// */
// public void paintSkin(Graphics g, int dx, int dy, int dw, int dh, State state) {
// skinPainter.paint(null, g, dx, dy, dw, dh, this, state);
// /**
// * Paint a skin at a defined position and size
// *
// * @param g the graphics context to use for painting
// * @param dx the destination <i>x</i> coordinate
// * @param dy the destination <i>y</i> coordinate
// * @param dw the width of the area to fill, may cause
// * the image to be stretched or tiled
// * @param dh the height of the area to fill, may cause
// * the image to be stretched or tiled
// * @param state which state to paint
// * @param borderFill should test if the component uses a border fill
// * and skip painting if it is
// */
// void paintSkin(Graphics g, int dx, int dy, int dw, int dh, State state,
// boolean borderFill) {
// if(borderFill && "borderfill".equals(getTypeEnumName(component, part,
// state, Prop.BGTYPE))) {
// return;
// skinPainter.paint(null, g, dx, dy, dw, dh, this, state);
// private static class SkinPainter extends CachedPainter {
// SkinPainter() {
// super(30);
// flush();
// protected void paintToImage(Component c, Image image, Graphics g,
// int w, int h, Object[] args) {
// Skin skin = (Skin)args[0];
// Part part = skin.part;
// State state = (State)args[1];
// if (state == null) {
// state = skin.state;
// if (c == null) {
// c = skin.component;
// WritableRaster raster = ((BufferedImage)image).getRaster();
// DataBufferInt buffer = (DataBufferInt)raster.getDataBuffer();
// ThemeReader.paintBackground(buffer.getData(),
// State.getValue(part, state),
// 0, 0, w, h, w);
// protected Image createImage(Component c, int w, int h,
// <API key> config, Object[] args) {
// return new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
// static class GlyphButton extends JButton {
// private Skin skin;
// public GlyphButton(Component parent, Part part) {
// BEXPStyle xp = getXP();
// skin = xp.getSkin(parent, part);
// setBorder(null);
// <API key>(false);
// public boolean isFocusTraversable() {
// return false;
// protected State getState() {
// State state = State.NORMAL;
// if (!isEnabled()) {
// state = State.DISABLED;
// } else if (getModel().isPressed()) {
// state = State.PRESSED;
// } else if (getModel().isRollover()) {
// state = State.HOT;
// return state;
// public void paintComponent(Graphics g) {
// Dimension d = getSize();
// skin.paintSkin(g, 0, 0, d.width, d.height, getState());
// public void setPart(Component parent, Part part) {
// BEXPStyle xp = getXP();
// skin = xp.getSkin(parent, part);
// revalidate();
// repaint();
// protected void paintBorder(Graphics g) {
// public Dimension getPreferredSize() {
// return new Dimension(16, 16);
// public Dimension getMinimumSize() {
// return new Dimension(5, 5);
// public Dimension getMaximumSize() {
// return new Dimension(Integer.MAX_VALUE, Integer.MAX_VALUE);
// // Private constructor
// private BEXPStyle() {
// flatMenus = getSysBoolean(Prop.FLATMENUS);
// colorMap = new HashMap<String, Color>();
// borderMap = new HashMap<String, Border>();
// // Note: All further access to the maps must be synchronized
// private boolean getBoolean(Component c, Part part, State state, Prop prop) {
// State.getValue(part, state),
// prop.getValue());
// private static Dimension getPartSize(Part part, State state) {
// return ThemeReader.getPartSize(part.getControlName(null), part.getValue(),
// State.getValue(part, state));
// private static boolean getSysBoolean(Prop prop) {
// // We can use any widget name here, I guess.
// return ThemeReader.getSysBoolean("window", prop.getValue());
|
import type { Config } from '../src/core/config'
import type VNode from '../src/core/vdom/vnode'
import type Watcher from '../src/core/observer/watcher'
declare interface Component {
// constructor information
static cid: number;
static options: Object;
// extend
static extend: (options: Object) => Function;
static superOptions: Object;
static extendOptions: Object;
static sealedOptions: Object;
static super: Class<Component>;
// assets
static directive: (id: string, def?: Function | Object) => Function | Object | void;
static component: (id: string, def?: Class<Component> | Object) => Class<Component>;
static filter: (id: string, def?: Function) => Function | void;
// public properties
$el: any; // so that we can attach __vue__ to it
$data: Object;
$options: ComponentOptions;
$parent: Component | void;
$root: Component;
$children: Array<Component>;
$refs: { [key: string]: Component | Element | Array<Component | Element> | void };
$slots: { [key: string]: Array<VNode> };
$scopedSlots: { [key: string]: () => VNodeChildren };
$vnode: VNode; // the placeholder node for the component in parent's render tree
$isServer: boolean;
$props: Object;
// public methods
$mount: (el?: Element | string, hydrating?: boolean) => Component;
$forceUpdate: () => void;
$destroy: () => void;
$set: <T>(target: Object | Array<T>, key: string | number, val: T) => T;
$delete: <T>(target: Object | Array<T>, key: string | number) => void;
$watch: (expOrFn: string | Function, cb: Function, options?: Object) => Function;
$on: (event: string | Array<string>, fn: Function) => Component;
$once: (event: string, fn: Function) => Component;
$off: (event?: string | Array<string>, fn?: Function) => Component;
$emit: (event: string, ...args: Array<mixed>) => Component;
$nextTick: (fn: Function) => void | Promise<*>;
$createElement: (tag?: string | Component, data?: Object, children?: VNodeChildren) => VNode;
// private properties
_uid: number;
_name: string; // this only exists in dev mode
_isVue: true;
_self: Component;
_renderProxy: Component;
_renderContext: ?Component;
_watcher: Watcher;
_watchers: Array<Watcher>;
_computedWatchers: { [key: string]: Watcher };
_data: Object;
_props: Object;
_events: Object;
_inactive: boolean | null;
_directInactive: boolean;
_isMounted: boolean;
_isDestroyed: boolean;
_isBeingDestroyed: boolean;
_vnode: ?VNode; // self root node
_staticTrees: ?Array<VNode>;
_hasHookEvent: boolean;
_provided: ?Object;
// private methods
// lifecycle
_init: Function;
_mount: (el?: Element | void, hydrating?: boolean) => Component;
_update: (vnode: VNode, hydrating?: boolean) => void;
// rendering
_render: () => VNode;
__patch__: (a: Element | VNode | void, b: VNode) => any;
// createElement
// _c is internal that accepts `normalizationType` optimization hint
_c: (vnode?: VNode, data?: VNodeData, children?: VNodeChildren, normalizationType?: number) => VNode | void;
// renderStatic
_m: (index: number, isInFor?: boolean) => VNode | VNodeChildren;
// markOnce
_o: (vnode: VNode | Array<VNode>, index: number, key: string) => VNode | VNodeChildren;
// toString
_s: (value: mixed) => string;
// text to VNode
_v: (value: string | number) => VNode;
// toNumber
_n: (value: string) => number | string;
// empty vnode
_e: () => VNode;
// loose equal
_q: (a: mixed, b: mixed) => boolean;
// loose indexOf
_i: (arr: Array<mixed>, val: mixed) => number;
// resolveFilter
_f: (id: string) => Function;
// renderList
_l: (val: mixed, render: Function) => ?Array<VNode>;
// renderSlot
_t: (name: string, fallback: ?Array<VNode>, props: ?Object) => ?Array<VNode>;
// apply v-bind object
_b: (data: any, value: any, asProp?: boolean) => VNodeData;
// check custom keyCode
_k: (eventKeyCode: number, key: string, builtInAlias: number | Array<number> | void) => boolean;
// resolve scoped slots
_u: (scopedSlots: ScopedSlotsData, res?: Object) => { [key: string]: Function };
// allow dynamic method registration
[key: string]: any
}
|
using System;
using System.Globalization;
using System.Runtime.InteropServices;
using EnvDTE;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
namespace Microsoft.VisualStudioTools.Project.Automation {
[ComVisible(true)]
public class OAProject : EnvDTE.Project, EnvDTE.<API key> {
#region fields
private ProjectNode project;
EnvDTE.<API key> <API key>;
#endregion
#region properties
public object Project {
get { return this.project; }
}
internal ProjectNode ProjectNode {
get { return this.project; }
}
#endregion
#region ctor
internal OAProject(ProjectNode project) {
this.project = project;
}
#endregion
#region EnvDTE.Project
<summary>
Gets or sets the name of the object.
</summary>
public virtual string Name {
get {
return project.Caption;
}
set {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
project.SetEditLabel(value);
});
}
}
}
public void Dispose() {
<API key> = null;
}
<summary>
Microsoft Internal Use Only. Gets the file name of the project.
</summary>
public virtual string FileName {
get {
return project.ProjectFile;
}
}
<summary>
Microsoft Internal Use Only. Specfies if the project is dirty.
</summary>
public virtual bool IsDirty {
get {
int dirty;
ErrorHandler.ThrowOnFailure(project.IsDirty(out dirty));
return dirty != 0;
}
set {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
project.isDirty = value;
});
}
}
}
internal void CheckProjectIsValid() {
if (this.project == null || this.project.Site == null || this.project.IsClosed) {
throw new <API key>();
}
}
<summary>
Gets the Projects collection containing the Project object supporting this property.
</summary>
public virtual EnvDTE.Projects Collection {
get { return null; }
}
<summary>
Gets the top-level extensibility object.
</summary>
public virtual EnvDTE.DTE DTE {
get {
return (EnvDTE.DTE)this.project.Site.GetService(typeof(EnvDTE.DTE));
}
}
<summary>
Gets a GUID string indicating the kind or type of the object.
</summary>
public virtual string Kind {
get { return project.ProjectGuid.ToString("B"); }
}
<summary>
Gets a ProjectItems collection for the Project object.
</summary>
public virtual EnvDTE.ProjectItems ProjectItems {
get {
return new OAProjectItems(this, project);
}
}
<summary>
Gets a collection of all properties that pertain to the Project object.
</summary>
public virtual EnvDTE.Properties Properties {
get {
return new OAProperties(this.project.NodeProperties);
}
}
<summary>
Returns the name of project as a relative path from the directory containing the solution file to the project file
</summary>
<value>Unique name if project is in a valid state. Otherwise null</value>
public virtual string UniqueName {
get {
if (this.project == null || this.project.IsClosed) {
return null;
} else {
// Get Solution service
IVsSolution solution = this.project.GetService(typeof(IVsSolution)) as IVsSolution;
Utilities.CheckNotNull(solution);
// Ask solution for unique name of project
string uniqueName;
ErrorHandler.ThrowOnFailure(
solution.<API key>(
project.GetOuterInterface<IVsHierarchy>(),
out uniqueName
)
);
return uniqueName;
}
}
}
<summary>
Gets an interface or object that can be accessed by name at run time.
</summary>
public virtual object Object {
get { return this.project.Object; }
}
<summary>
Gets the requested Extender object if it is available for this object.
</summary>
<param name="name">The name of the extender object.</param>
<returns>An Extender object. </returns>
public virtual object get_Extender(string name) {
Utilities.ArgumentNotNull("name", name);
return DTE.ObjectExtenders.GetExtender(project.NodeProperties.ExtenderCATID.ToUpper(), name, project.NodeProperties);
}
<summary>
Gets a list of available Extenders for the object.
</summary>
public virtual object ExtenderNames {
get { return DTE.ObjectExtenders.GetExtenderNames(project.NodeProperties.ExtenderCATID.ToUpper(), project.NodeProperties); }
}
<summary>
Gets the Extender category ID (CATID) for the object.
</summary>
public virtual string ExtenderCATID {
get { return project.NodeProperties.ExtenderCATID; }
}
<summary>
Gets the full path and name of the Project object's file.
</summary>
public virtual string FullName {
get {
string filename;
uint format;
ErrorHandler.ThrowOnFailure(project.GetCurFile(out filename, out format));
return filename;
}
}
<summary>
Gets or sets a value indicatingwhether the object has not been modified since last being saved or opened.
</summary>
public virtual bool Saved {
get {
return !this.IsDirty;
}
set {
IsDirty = !value;
}
}
<summary>
Gets the <API key> object for this Project .
</summary>
public virtual EnvDTE.<API key> <API key> {
get {
return ProjectNode.Site.GetUIThread().Invoke(() => {
if (this.<API key> == null) {
IVsExtensibility3 extensibility = this.project.Site.GetService(typeof(IVsExtensibility)) as IVsExtensibility3;
Utilities.CheckNotNull(extensibility);
object <API key>;
ErrorHandler.ThrowOnFailure(extensibility.GetConfigMgr(
this.project.GetOuterInterface<IVsHierarchy>(),
VSConstants.VSITEMID_ROOT,
out <API key>
));
Utilities.CheckNotNull(<API key>);
this.<API key> = (<API key>)<API key>;
}
return this.<API key>;
});
}
}
<summary>
Gets the Globals object containing add-in values that may be saved in the solution (.sln) file, the project file, or in the user's profile data.
</summary>
public virtual EnvDTE.Globals Globals {
get { return null; }
}
<summary>
Gets a ProjectItem object for the nested project in the host project.
</summary>
public virtual EnvDTE.ProjectItem ParentProjectItem {
get { return null; }
}
<summary>
Gets the CodeModel object for the project.
</summary>
public virtual EnvDTE.CodeModel CodeModel {
get { return null; }
}
<summary>
Saves the project.
</summary>
<param name="fileName">The file name with which to save the solution, project, or project item. If the file exists, it is overwritten</param>
<exception cref="<API key>">Is thrown if the save operation failes.</exception>
<exception cref="<API key>">Is thrown if fileName is null.</exception>
public virtual void SaveAs(string fileName) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.DoSave(true, fileName);
});
}
<summary>
Saves the project
</summary>
<param name="fileName">The file name of the project</param>
<exception cref="<API key>">Is thrown if the save operation failes.</exception>
<exception cref="<API key>">Is thrown if fileName is null.</exception>
public virtual void Save(string fileName) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.DoSave(false, fileName);
});
}
<summary>
Removes the project from the current solution.
</summary>
public virtual void Delete() {
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
ProjectNode.Site.GetUIThread().Invoke(() => {
this.project.Remove(false);
});
}
}
#endregion
#region <API key> methods
<summary>
Microsoft Internal Use Only.
</summary>
public virtual void <API key>() {
}
#endregion
#region private methods
<summary>
Saves or Save Asthe project.
</summary>
<param name="isCalledFromSaveAs">Flag determining which Save method called , the SaveAs or the Save.</param>
<param name="fileName">The name of the project file.</param>
private void DoSave(bool isCalledFromSaveAs, string fileName) {
Utilities.ArgumentNotNull("fileName", fileName);
CheckProjectIsValid();
using (AutomationScope scope = new AutomationScope(this.project.Site)) {
// If an empty file name is passed in for Save then make the file name the project name.
if (!isCalledFromSaveAs && string.IsNullOrEmpty(fileName)) {
// Use the solution service to save the project file. Note that we have to use the service
// so that all the shell's elements are aware that we are inside a save operation and
// all the file change listenters registered by the shell are suspended.
// Get the cookie of the project file from the RTD.
<API key> rdt = this.project.Site.GetService(typeof(<API key>)) as <API key>;
Utilities.CheckNotNull(rdt);
IVsHierarchy hier;
uint itemid;
IntPtr unkData;
uint cookie;
ErrorHandler.ThrowOnFailure(rdt.FindAndLockDocument((uint)_VSRDTFLAGS.RDT_NoLock, this.project.Url, out hier,
out itemid, out unkData, out cookie));
if (IntPtr.Zero != unkData) {
Marshal.Release(unkData);
}
// Verify that we have a cookie.
if (0 == cookie) {
// This should never happen because if the project is open, then it must be in the RDT.
throw new <API key>();
}
// Get the IVsHierarchy for the project.
IVsHierarchy prjHierarchy = project.GetOuterInterface<IVsHierarchy>();
// Now get the soulution.
IVsSolution solution = this.project.Site.GetService(typeof(SVsSolution)) as IVsSolution;
// Verify that we have both solution and hierarchy.
Utilities.CheckNotNull(prjHierarchy);
Utilities.CheckNotNull(solution);
ErrorHandler.ThrowOnFailure(solution.SaveSolutionElement((uint)__VSSLNSAVEOPTIONS.<API key>, prjHierarchy, cookie));
} else {
// We need to make some checks before we can call the save method on the project node.
// This is mainly because it is now us and not the caller like in case of SaveAs or Save that should validate the file name.
// The IPersistFileFormat.Save method only does a validation that is necessary to be performed. Example: in case of Save As the
// file name itself is not validated only the whole path. (thus a file name like file\file is accepted, since as a path is valid)
// 1. The file name has to be valid.
string fullPath = fileName;
try {
fullPath = CommonUtils.GetAbsoluteFilePath(((ProjectNode)Project).ProjectFolder, fileName);
}
// We want to be consistent in the error message and exception we throw. fileName could be for example
catch (ArgumentException ex) {
throw new <API key>(SR.GetString(SR.<API key>, fileName), ex);
}
// It might be redundant but we validate the file and the full path of the file being valid. The SaveAs would also validate the path.
// If we decide that this is performance critical then this should be refactored.
Utilities.ValidateFileName(this.project.Site, fullPath);
if (!isCalledFromSaveAs) {
// 2. The file name has to be the same
if (!CommonUtils.IsSamePath(fullPath, this.project.Url)) {
throw new <API key>();
}
ErrorHandler.ThrowOnFailure(this.project.Save(fullPath, 1, 0));
} else {
ErrorHandler.ThrowOnFailure(this.project.Save(fullPath, 0, 0));
}
}
}
}
#endregion
}
<summary>
Specifies an alternate name for a property which cannot be fully captured using
.NET attribute names.
</summary>
[AttributeUsage(AttributeTargets.Property, AllowMultiple = false)]
class <API key> : Attribute {
public readonly string Name;
public <API key>(string name) {
Name = name;
}
}
}
|
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
function plural(n) {
if (n === 0)
return 0;
if (n === 1)
return 1;
if (n === 2)
return 2;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 3 && n % 100 <= 10)
return 3;
if (n % 100 === Math.floor(n % 100) && n % 100 >= 11 && n % 100 <= 99)
return 4;
return 5;
}
export default [
'ar-JO',
[
['ص', 'م'],
,
],
[['ص', 'م'], , ['صباحًا', 'مساءً']],
[
['ح', 'ن', 'ث', 'ر', 'خ', 'ج', 'س'],
[
'الأحد', 'الاثنين', 'الثلاثاء', 'الأربعاء', 'الخميس',
'الجمعة', 'السبت'
],
,
['أحد', 'إثنين', 'ثلاثاء', 'أربعاء', 'خميس', 'جمعة', 'سبت']
],
,
[
['ك', 'ش', 'آ', 'ن', 'أ', 'ح', 'ت', 'آ', 'أ', 'ت', 'ت', 'ك'],
[
'كانون الثاني', 'شباط', 'آذار', 'نيسان', 'أيار', 'حزيران',
'تموز', 'آب', 'أيلول', 'تشرين الأول', 'تشرين الثاني',
'كانون الأول'
],
],
,
[['ق.م', 'م'], , ['قبل الميلاد', 'ميلادي']], 6, [5, 6],
['d\u200f/M\u200f/y', 'dd\u200f/MM\u200f/y', 'd MMMM y', 'EEEE، d MMMM y'],
['h:mm a', 'h:mm:ss a', 'h:mm:ss a z', 'h:mm:ss a zzzz'],
[
'{1} {0}',
,
,
],
[
'.', ',', ';', '\u200e%\u200e', '\u200e+', '\u200e-', 'E', '×', '‰', '∞',
'ليس رقمًا', ':'
],
['
];
//# sourceMappingURL=ar-JO.js.map
|
from __future__ import print_function
import shlex
import subprocess
import sys
from .config import Configuration
class PkgConfig(object):
class Error(Exception):
"""Raised when information could not be obtained from pkg-config."""
def __init__(self, package_name):
"""Query pkg-config for information about a package.
:type package_name: str
:param package_name: The name of the package to query.
:raises PkgConfig.Error: When a call to pkg-config fails.
"""
self.package_name = package_name
self._cflags = self._call("--cflags")
self._cflags_only_I = self._call("--cflags-only-I")
self._cflags_only_other = self._call("--cflags-only-other")
self._libs = self._call("--libs")
self._libs_only_l = self._call("--libs-only-l")
self._libs_only_L = self._call("--libs-only-L")
self._libs_only_other = self._call("--libs-only-other")
def _call(self, *pkg_config_args):
try:
cmd = [Configuration.current.pkg_config] + list(pkg_config_args) + [self.package_name]
print("Executing command '{}'".format(cmd), file=sys.stderr)
return shlex.split(subprocess.check_output(cmd).decode('utf-8'))
except subprocess.CalledProcessError as e:
raise self.Error("pkg-config exited with error code {}".format(e.returncode))
@property
def swiftc_flags(self):
"""Flags for this package in a format suitable for passing to `swiftc`.
:rtype: list[str]
"""
return (
["-Xcc {}".format(s) for s in self._cflags_only_other]
+ ["-Xlinker {}".format(s) for s in self._libs_only_other]
+ self._cflags_only_I
+ self._libs_only_L
+ self._libs_only_l)
@property
def cflags(self):
"""CFLAGS for this package.
:rtype: list[str]
"""
return self._cflags
@property
def ldflags(self):
"""LDFLAGS for this package.
:rtype: list[str]
"""
return self._libs
|
// Declare internals
var internals = {};
// Plugin registration
exports.register = function (plugin, options, next) {
plugin.route({ path: '/test2', method: 'GET', handler: function (request, reply) { reply('testing123'); } });
plugin.route({ path: '/test2/path', method: 'GET', handler: function (request, reply) { reply(plugin.path); } });
plugin.log('test', 'abc');
return next();
};
|
{% extends "graphos/gchart/base.html" %}
{% block create_chart %}
var chart = new google.visualization.LineChart(document.getElementById('{{ chart.get_html_id }}'));
{% endblock %}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http:
<html>
<head>
<title>Minim : : AudioPlayer : : close</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
<link href="stylesheet.css" rel="stylesheet" type="text/css">
</head>
<body>
<center>
<table width="600" border="0" cellspacing="0" cellpadding="0">
<tr>
<td height="100" valign="top" class="header">
<span class="libName">Minim</span><br>
<a href="index.html">core</a><br/>
<a href="index_ugens.html">ugens</a><br/>
<a href="index_analysis.html">analysis</a>
</td>
<td width="450" class="descList"> </td>
</tr>
<tr>
<td valign="top" class="mainTextName">Name</td>
<td class="methodName">close</td>
</tr>
<tr>
<td valign=top class="mainText">Examples</td>
<td valign=top class="descList"><pre>None available</pre></td>
</tr>
<tr>
<td valign=top class="mainText">Description</td>
<td valign=top class="descList">Release the resources associated with playing this file.
All AudioPlayers returned by Minim's loadFile method
will be closed by Minim when it's stop method is called.
If you are using Processing, Minim's stop method will be
called automatically when your application exits.</td>
</tr>
<tr>
<td valign=top class="mainText">Syntax</td>
<td valign=top class="descList"><pre>close();
</pre></td>
</tr>
<!-- begin parameters -->
<!-- end parameters -->
<!-- begin return -->
<tr>
<td valign=top class="mainText">Returns</td>
<td class="descList">None</td>
</tr>
<!-- end return -->
<tr>
<td valign=top class="mainText">Usage</td>
<td class="descList">Web & Application</td>
</tr>
<tr>
<td valign=top class="mainText">Related</td>
<td class="descList"></td>
</tr>
<tr>
<td></td>
<td class="descList"> </td>
</tr>
</table>
</center>
</body>
</html>
|
function DummyError() { }
assert.throws(DummyError, function() {
var base = undefined;
var prop = function() {
throw new DummyError();
};
var expr = function() {
$ERROR("right-hand side expression evaluated");
};
base[prop()] <<= expr();
});
assert.throws(TypeError, function() {
var base = undefined;
var prop = {
toString: function() {
$ERROR("property key evaluated");
}
};
var expr = function() {
$ERROR("right-hand side expression evaluated");
};
base[prop] <<= expr();
});
|
(function ($) {
"use strict";
$.fn.fileinputLocales['pl'] = {
fileSingle: 'plik',
filePlural: 'pliki',
browseLabel: 'Przeglądaj …',
removeLabel: 'Usuń',
removeTitle: 'Usuń zaznaczone pliki',
cancelLabel: 'Przerwij',
cancelTitle: 'Anuluj wysyłanie',
pauseLabel: 'Wstrzymaj',
pauseTitle: 'Wstrzymaj trwające przesyłanie',
uploadLabel: 'Wgraj',
uploadTitle: 'Wgraj zaznaczone pliki',
msgNo: 'Nie',
msgNoFilesSelected: 'Brak zaznaczonych plików',
msgPaused: 'Wstrzymano',
msgCancelled: 'Odwołany',
msgPlaceholder: 'Wybierz {files} ...',
msgZoomModalHeading: 'Szczegółowy podgląd',
msgFileRequired: 'Musisz wybrać plik do wgrania.',
msgSizeTooSmall: 'Plik "{name}" (<b>{size} KB</b>) jest zbyt mały i musi być większy niż <b>{minSize} KB</b>.',
msgSizeTooLarge: 'Plik o nazwie "{name}" (<b>{size} KB</b>) przekroczył maksymalną dopuszczalną wielkość pliku wynoszącą <b>{maxSize} KB</b>.',
msgFilesTooLess: 'Minimalna liczba plików do wgrania: <b>{n}</b>.',
msgFilesTooMany: 'Liczba plików wybranych do wgrania w liczbie <b>({n})</b>, przekracza maksymalny dozwolony limit wynoszący <b>{m}</b>.',
<API key>: 'Możesz wgrać maksymalnie <b>{m}</b> plików (wykryto <b>{n}</b>).',
msgFileNotFound: 'Plik "{name}" nie istnieje!',
msgFileSecured: 'Ustawienia zabezpieczeń uniemożliwiają odczyt pliku "{name}".',
msgFileNotReadable: 'Plik "{name}" nie jest plikiem do odczytu.',
<API key>: 'Podgląd pliku "{name}" został przerwany.',
msgFilePreviewError: 'Wystąpił błąd w czasie odczytu pliku "{name}".',
msgInvalidFileName: 'Nieprawidłowe lub nieobsługiwane znaki w nazwie pliku "{name}".',
msgInvalidFileType: 'Nieznany typ pliku "{name}". Tylko następujące rodzaje plików są dozwolone: "{types}".',
<API key>: 'Złe rozszerzenie dla pliku "{name}". Tylko następujące rozszerzenia plików są dozwolone: "{extensions}".',
msgUploadAborted: 'Przesyłanie pliku zostało przerwane',
msgUploadThreshold: 'Przetwarzanie …',
msgUploadBegin: 'Rozpoczynanie …',
msgUploadEnd: 'Gotowe!',
msgUploadResume: 'Wznawianie przesyłania …',
msgUploadEmpty: 'Brak poprawnych danych do przesłania.',
msgUploadError: 'Błąd przesyłania',
msgDeleteError: 'Błąd usuwania',
msgProgressError: 'Błąd',
msgValidationError: 'Błąd walidacji',
msgLoading: 'Wczytywanie pliku {index} z {files} …',
msgProgress: 'Wczytywanie pliku {index} z {files} - {name} - {percent}% zakończone.',
msgSelected: '{n} Plików zaznaczonych',
<API key>: 'Metodą przeciągnij i upuść, można przenosić tylko pliki. Pominięto {n} katalogów.',
msgImageWidthSmall: 'Szerokość pliku obrazu "{name}" musi być co najmniej {size} px.',
msgImageHeightSmall: 'Wysokość pliku obrazu "{name}" musi być co najmniej {size} px.',
msgImageWidthLarge: 'Szerokość pliku obrazu "{name}" nie może przekraczać {size} px.',
msgImageHeightLarge: 'Wysokość pliku obrazu "{name}" nie może przekraczać {size} px.',
msgImageResizeError: 'Nie udało się uzyskać wymiaru obrazu, aby zmienić rozmiar.',
<API key>: 'Błąd podczas zmiany rozmiaru obrazu.<pre>{errors}</pre>',
msgAjaxError: 'Coś poczło nie tak podczas {operation}. Spróbuj ponownie!',
<API key>: '{operation} nie powiodło się',
msgDuplicateFile: 'Plik "{name}" o identycznym rozmiarze "{size} KB" został wgrany wcześniej. Pomijanie zduplikowanego pliku.',
<API key>: 'Przekroczono limit <b>{max}</b> prób wgrania pliku <b>{file}</b>! Szczegóły błędu: <pre>{error}</pre>',
msgPendingTime: 'Pozostało {time}',
msgCalculatingTime: 'obliczanie pozostałego czasu',
ajaxOperations: {
deleteThumb: 'usuwanie pliku',
uploadThumb: 'przesyłanie pliku',
uploadBatch: 'masowe przesyłanie plików',
uploadExtra: 'przesyłanie danych formularza'
},
dropZoneTitle: 'Przeciągnij i upuść pliki tutaj …',
dropZoneClickTitle: '<br>(lub kliknij tutaj i wybierz {files} z komputera)',
fileActionSettings: {
removeTitle: 'Usuń plik',
uploadTitle: 'Przesyłanie pliku',
uploadRetryTitle: 'Ponów',
downloadTitle: 'Pobierz plik',
zoomTitle: 'Pokaż szczegóły',
dragTitle: 'Przenies / Ponownie zaaranżuj',
indicatorNewTitle: 'Jeszcze nie przesłany',
<API key>: 'Dodane',
indicatorErrorTitle: 'Błąd',
<API key>: 'Przesyłanie zatrzymane',
<API key>: 'Przesyłanie …'
},
<API key>: {
prev: 'Pokaż poprzedni plik',
next: 'Pokaż następny plik',
toggleheader: 'Włącz / wyłącz nagłówek',
fullscreen: 'Włącz / wyłącz pełny ekran',
borderless: 'Włącz / wyłącz tryb bez ramek',
close: 'Zamknij szczegółowy widok'
}
};
})(window.jQuery);
|
using UnityEngine;
using System.Collections;
<summary>
Key input enumeration for easy input sending.
</summary>
public enum KeyInput
{
GoLeft = 0,
GoRight,
GoDown,
Jump,
Count
}
|
<?php
namespace Aura\Web\Response;
class CookiesTest extends \<API key>
{
protected $cookies;
protected function setUp()
{
$this->cookies = new Cookies;
}
public function testSetAndGet()
{
$this->cookies->set('foo', 'bar', '88', '/path', 'example.com');
$expect = array(
'value' => 'bar',
'expire' => 88,
'path' => '/path',
'domain' => 'example.com',
'secure' => false,
'httponly' => true,
);
$actual = $this->cookies->get('foo');
$this->assertSame($expect, $actual);
}
public function testGetAll()
{
$this->cookies->set('foo', 'bar', '88', '/path', 'example.com');
$this->cookies->set('baz', 'dib', date('Y-m-d H:i:s', '88'), '/path', 'example.com');
$expect = array(
'foo' => array(
'value' => 'bar',
'expire' => 88,
'path' => '/path',
'domain' => 'example.com',
'secure' => false,
'httponly' => true,
),
'baz' => array(
'value' => 'dib',
'expire' => 88,
'path' => '/path',
'domain' => 'example.com',
'secure' => false,
'httponly' => true,
),
);
$actual = $this->cookies->get();
$this->assertSame($expect, $actual);
}
public function testDefault()
{
// set a cookie name and value
$this->cookies->set('foo', 'bar');
// get before defaults
$expect = array(
'value' => 'bar',
'expire' => 0,
'path' => '',
'domain' => '',
'secure' => false,
'httponly' => true,
);
$actual = $this->cookies->get('foo');
$this->assertSame($expect, $actual);
// set and get defaults
$this->cookies->setExpire(88);
$this->cookies->setPath('/path');
$this->cookies->setDomain('example.com');
$this->cookies->setSecure(true);
$this->cookies->setHttponly(false);
// get after defaults
$expect = array(
'value' => null,
'expire' => 88,
'path' => '/path',
'domain' => 'example.com',
'secure' => true,
'httponly' => false,
);
$actual = $this->cookies->getDefault();
$this->assertSame($expect, $actual);
}
}
|
/* $NetBSD: citrus_gbk2k.h,v 1.2 2003/06/25 09:51:43 tshiozak Exp $ */
#ifndef _CITRUS_GBK2K_H_
#define _CITRUS_GBK2K_H_
__BEGIN_DECLS
<API key>(GBK2K);
<API key>(GBK2K);
__END_DECLS
#endif
|
var path = require('path');
var url = require('url');
var closure = require('closure-util');
var nomnom = require('nomnom');
var log = closure.log;
var options = nomnom.options({
port: {
abbr: 'p',
'default': 4000,
help: 'Port for incoming connections',
metavar: 'PORT'
},
loglevel: {
abbr: 'l',
choices: ['silly', 'verbose', 'info', 'warn', 'error'],
'default': 'info',
help: 'Log level',
metavar: 'LEVEL'
}
}).parse();
/** @type {string} */
log.level = options.loglevel;
log.info('ol3-cesium', 'Parsing dependencies ...');
var manager = new closure.Manager({
closure: true, // use the bundled Closure Library
lib: [
'src*.js'
],
ignoreRequires: '^ol\\.'
});
manager.on('error', function(e) {
log.error('ol3-cesium', e.message);
});
manager.on('ready', function() {
var server = new closure.Server({
manager: manager,
loader: '/@loader'
});
server.listen(options.port, function() {
log.info('ol3-cesium', 'Listening on http://localhost:' +
options.port + '/ (Ctrl+C to stop)');
});
server.on('error', function(err) {
log.error('ol3-cesium', 'Server failed to start: ' + err.message);
process.exit(1);
});
});
|
#endregion
namespace Clide.VisualStudio
{
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell.Interop;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
internal static class <API key>
{
public static VsToolWindow ToolWindow(this IServiceProvider serviceProvider, Guid toolWindowId)
{
return new VsToolWindow(serviceProvider, toolWindowId);
}
public static IVsHierarchy <API key>(this IVsMonitorSelection monitorSelection, IUIThread uiThread)
{
var hierarchyPtr = IntPtr.Zero;
var selectionContainer = IntPtr.Zero;
return uiThread.Invoke(() =>
{
try
{
// Get the current project hierarchy, project item, and selection container for the current selection
// If the selection spans multiple hierarchies, hierarchyPtr is Zero.
// So fast path is for non-zero result (most common case of single active project/item).
uint itemid;
IVsMultiItemSelect multiItemSelect = null;
ErrorHandler.ThrowOnFailure(monitorSelection.GetCurrentSelection(out hierarchyPtr, out itemid, out multiItemSelect, out selectionContainer));
// There may be no selection at all.
if (itemid == VSConstants.VSITEMID_NIL)
return null;
if (itemid == VSConstants.VSITEMID_ROOT)
{
// The root selection could be the solution itself, so no project is active.
if (hierarchyPtr == IntPtr.Zero)
return null;
else
return (IVsHierarchy)Marshal.<API key>(hierarchyPtr, typeof(IVsHierarchy));
}
// We may have a single item selection, so we can safely pick its owning project/hierarchy.
if (itemid != VSConstants.VSITEMID_SELECTION)
return (IVsHierarchy)Marshal.<API key>(hierarchyPtr, typeof(IVsHierarchy));
// Otherwise, this is a multiple item selection within the same hierarchy,
// we select he hierarchy.
uint <API key>;
int <API key>;
ErrorHandler.ThrowOnFailure(multiItemSelect.GetSelectionInfo(out <API key>, out <API key>));
var isSingleHierarchy = (<API key> != 0);
if (isSingleHierarchy)
return (IVsHierarchy)Marshal.<API key>(hierarchyPtr, typeof(IVsHierarchy));
return null;
}
finally
{
if (hierarchyPtr != IntPtr.Zero)
{
Marshal.Release(hierarchyPtr);
}
if (selectionContainer != IntPtr.Zero)
{
Marshal.Release(selectionContainer);
}
}
});
}
public static IEnumerable<Tuple<IVsHierarchy, uint>> GetSelection(this IVsMonitorSelection monitorSelection, IUIThread uiThread, IVsHierarchy solution)
{
var hierarchyPtr = IntPtr.Zero;
var selectionContainer = IntPtr.Zero;
return uiThread.Invoke(() =>
{
try
{
// Get the current project hierarchy, project item, and selection container for the current selection
// If the selection spans multiple hierarchies, hierarchyPtr is Zero
uint itemid;
IVsMultiItemSelect multiItemSelect = null;
ErrorHandler.ThrowOnFailure(monitorSelection.GetCurrentSelection(out hierarchyPtr, out itemid, out multiItemSelect, out selectionContainer));
if (itemid == VSConstants.VSITEMID_NIL)
return Enumerable.Empty<Tuple<IVsHierarchy, uint>>();
if (itemid == VSConstants.VSITEMID_ROOT)
{
if (hierarchyPtr == IntPtr.Zero)
return new[] { Tuple.Create(solution, VSConstants.VSITEMID_ROOT) };
else
return new[] { Tuple.Create(
(IVsHierarchy)Marshal.<API key>(hierarchyPtr, typeof(IVsHierarchy)),
VSConstants.VSITEMID_ROOT) };
}
if (itemid != VSConstants.VSITEMID_SELECTION)
return new[] { Tuple.Create(
(IVsHierarchy)Marshal.<API key>(hierarchyPtr, typeof(IVsHierarchy)),
itemid) };
// This is a multiple item selection.
uint <API key>;
int <API key>;
ErrorHandler.ThrowOnFailure(multiItemSelect.GetSelectionInfo(out <API key>, out <API key>));
var isSingleHierarchy = (<API key> != 0);
var vsItemSelections = new VSITEMSELECTION[<API key>];
var flags = (isSingleHierarchy) ? (uint)__VSGSIFLAGS.GSI_fOmitHierPtrs : 0;
ErrorHandler.ThrowOnFailure(multiItemSelect.GetSelectedItems(flags, <API key>, vsItemSelections));
return vsItemSelections.Where(sel => sel.pHier != null)
// NOTE: we can return lazy results here, since
// the GetSelectedItems has already returned in the UI thread
// the array of results. We're just delaying the creation of the tuples
// in case they aren't all needed.
.Select(sel => Tuple.Create(sel.pHier, sel.itemid));
}
finally
{
if (hierarchyPtr != IntPtr.Zero)
{
Marshal.Release(hierarchyPtr);
}
if (selectionContainer != IntPtr.Zero)
{
Marshal.Release(selectionContainer);
}
}
});
}
}
}
|
/**
* This is specifically for the builder where the
* dependencies have been resolved and you just want
* to access the component.jsons locally.
*/
var semver = require('semver');
var fs = require('graceful-fs');
var join = require('path').join;
var resolve = require('path').resolve;
var debug = require('debug')('remotes:local');
var Remote = require('../remote')
module.exports = Local
Remote.extend(Local)
function Local(options) {
if (!(this instanceof Local))
return new Local(options)
options = Object.create(options || {});
this.out = resolve(options.out
|| options.dir
|| 'components')
debug('checking local components at %s', this.out);
Remote.call(this, options)
}
Local.prototype.name = 'local';
/**
* Local resolution is a little different than other remotes.
* In particular, if no `ref` is set,
* we check for any version.
*
* @param {String} repo
* @return {this}
* @api public
*/
Local.prototype.resolve = function* (remotes, repo, ref) {
debug('resolving local remote');
if (typeof remotes === 'string') {
ref = repo;
repo = remotes;
} else if (Array.isArray(remotes) && !~remotes.indexOf('local')) {
// if the current remote is not in this list,
// then it's obviously not valid.
return;
}
var folders = yield* this.folders(repo);
// none installed
if (!folders || !folders.length) return;
// no specific version we care about
if (!ref) return this;
// exact tag version
if (~folders.indexOf(ref)) return this;
// check for equal semantic versions
if (semver.maxSatisfying(folders.filter(valid), ref)) return this;
}
/**
* Get the currently downloaded versions of a repo.
*
* @param {String} repo
* @return {Array} folders
* @api public
*/
Local.prototype.folders = function* (repo) {
try {
var frags = repo.toLowerCase().split('/');
// ignore malformed repos for now
if (frags.length !== 2) return;
var folder = join(this.out, frags[0], frags[1]);
debug('checking folder: %s', folder);
var folders = yield readdir(folder);
debug('got folders: %s', folders.join(', '));
return folders.filter(noLeadingDot);
} catch (err) {
if (err.code === 'ENOENT') return;
throw err;
}
}
/**
* Return the currently downloaded components' semantic versions.
*
* @param {String} repo
* @return {Array} references
* @api public
*/
Local.prototype._versions = function* (repo) {
return yield* this.folders(repo);
}
/**
* Return the existing component.json, if any.
* @param {String} repo
* @param {String} reference
* @return {Object} component.json
* @api public
*/
Local.prototype._json = function* (repo, ref) {
var body;
var filename = join(this.out, repo, ref, 'component.json');
try {
body = yield read(filename);
} catch (err) {
if (err.code === 'ENOENT') return;
throw err;
}
try {
return JSON.parse(body);
} catch (_err) {
throw new Error('JSON parsing error with "' + filename + '"');
}
}
/**
* NOT RELEVANT WITH THIS REMOTE
*/
Local.prototype._tree = function* () {
/* jshint noyield:true */
}
function valid(x) {
return semver.valid(x, true);
}
function noLeadingDot(x) {
return x[0] !== '.';
}
function readdir(root) {
return function (done) {
fs.readdir(root, done)
}
}
function read(filename) {
return function (done) {
fs.readFile(filename, 'utf8', done)
}
}
|
@(user: User = null, scripts: Html = Html(""))(content: Html)
<!DOCTYPE html>
<html>
<head>
<title>@Messages("title")</title>
<link rel="stylesheet" media="screen" href="@routes.Assets.at("stylesheets/bootstrap.css")">
<link rel="stylesheet" media="screen" href="@routes.Assets.at("stylesheets/main.css")">
<link rel="shortcut icon" type="image/png" href="@routes.Assets.at("images/favicon.png")">
<script src="@routes.Assets.at("javascripts/jquery/jquery-2.1.0.min.js")" type="text/javascript"></script>
<script src="@routes.Assets.at("javascripts/bootstrap.js")" type="text/javascript"></script>
<link rel="stylesheet" media="screen" href="@routes.Assets.at("stylesheets/font-awesome.min.css")">
@scripts
</head>
<body>
<div ng-controller="MenuCtrl" class="navbar navbar-inverse navbar-default" role="navigation">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-ex1-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="fa fa-bars fa-lg fa-inverse"></span>
</button>
<a class="navbar-brand" href="@routes.Application.index()">
<i class="fa fa-rocket"></i> Project name
</a>
<ul class="nav navbar-nav navbar-right">
<li class=""><a href="@routes.Application.index()">Home</a></li>
</ul>
</div>
@logged(user)
</div>
<div class="container">
<div class="row">
@content
</div>
</div>
<hr>
<div class="footer text-center">
<div>
<small>
Hello! I'm your friendly footer. If you're actually reading this, I'm impressed....
<a href="https:
</small>
</div>
</div>
</body>
</html>
|
namespace Org.BouncyCastle.Crypto.Tls
{
<summary>
RFC 2246 6.1
</summary>
public enum CompressionMethod : byte
{
NULL = 0,
/*
* RFC 3749 2
*/
DEFLATE = 1
/*
* Values from 224 decimal (0xE0) through 255 decimal (0xFF)
* inclusive are reserved for private use.
*/
}
}
|
None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import os
# simple json is a python 2.5 library you need to install
import json
# json comes bundled with python 2.6. use one or the other
#import json
def run():
print "starting"
from receiver.models import Submission
from xformmanager.models import FormDefModel
# this part of the script walks through all the registered
# form definitions and bundles them with the original xsd
# schema for resubmission
domain = None
# you can manually set a single domain here. if you don't then
# all the data will be exported.
domain = "Grameen"
if domain:
all_schemas = FormDefModel.objects.filter(<API key>=domain)
else:
all_schemas = FormDefModel.objects.all()
for schema in all_schemas:
print "processsing %s" % schema
file_loc = schema.xsd_file_location
print "xsd file: %s" % file_loc
if file_loc:
headers = {
"<API key>" : str(schema.submit_time),
"original-submit-ip" : str(schema.submit_ip),
"bytes-received" : schema.bytes_received,
"form-name" : schema.form_name,
"form-display-name" : schema.form_display_name,
"target-namespace" : schema.target_namespace,
"date-created" : str(schema.date_created),
"domain" : str(schema.get_domain)
}
dir, filename = os.path.split(file_loc)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
write_file = os.path.join(new_dir, filename.replace(".xml", ".xsdexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
print jsoned
fout.write(jsoned)
fout.write("\n\n")
xsd_file = open(file_loc, "r")
payload = xsd_file.read()
xsd_file.close()
fout.write(payload)
fout.close()
# this part of the script walks through all the submissions
# and bundles them in an exportable format with the original
# submitting IP and time, as well as a reference to the
# original post
#all_submissions = Submission.objects.all()
if domain:
all_submissions = Submission.objects.filter(<API key>=domain)
else:
all_submissions = Submission.objects.all()
for submission in all_submissions:
#print "processing %s (%s)" % (submission,submission.raw_post)
post_file = open(submission.raw_post, "r")
submit_time = str(submission.submit_time)
# first line is content type
content_type = post_file.readline().split(":")[1].strip()
# second line is content length
content_length = post_file.readline().split(":")[1].strip()
# third line is empty
post_file.readline()
# the rest is the actual body of the post
headers = { "content-type" : content_type,
"content-length" : content_length,
"time-received" : str(submission.submit_time),
"original-ip" : str(submission.submit_ip),
"domain" : submission.domain.name
}
# check the directory and create it if it doesn't exist
dir, filename = os.path.split(submission.raw_post)
new_dir = os.path.join(dir, "export")
if not os.path.exists(new_dir):
os.makedirs(new_dir)
# the format will be:
# {headers} (dict)
# (empty line)
# <body>
write_file = os.path.join(new_dir, filename.replace("postdata", "postexport"))
fout = open(write_file, 'w')
jsoned = json.dumps(headers)
fout.write(jsoned)
fout.write("\n\n")
try:
payload = post_file.read()
fout.write(payload)
except Exception:
print "error processing %s" % write_file
fout.close()
print "done"
|
#include "chromecast/base/metrics/<API key>.h"
#include "base/logging.h"
#include "base/macros.h"
#include "chromecast/base/metrics/cast_metrics_helper.h"
namespace chromecast {
namespace metrics {
namespace {
class <API key> : public CastMetricsHelper {
public:
<API key>();
~<API key>() override;
void <API key>(const std::string& app_id,
const std::string& session_id) override;
void UpdateSDKInfo(const std::string& sdk_version) override;
void LogMediaPlay() override;
void LogMediaPause() override;
void LogTimeToFirstAudio() override;
void LogTimeToBufferAv(BufferingType buffering_type,
base::TimeDelta time) override;
std::string <API key>(
const std::string& prefix, const std::string& suffix) const override;
void SetMetricsSink(MetricsSink* delegate) override;
void RecordSimpleAction(const std::string& action) override;
private:
<API key>(<API key>);
};
bool <API key> = false;
<API key>::<API key>()
: CastMetricsHelper() {
DCHECK(!<API key>);
<API key> = true;
}
<API key>::~<API key>() {
DCHECK(<API key>);
<API key> = false;
}
void <API key>::<API key>(
const std::string& app_id,
const std::string& session_id) {
}
void <API key>::UpdateSDKInfo(const std::string& sdk_version) {
}
void <API key>::LogMediaPlay() {
}
void <API key>::LogMediaPause() {
}
void <API key>::LogTimeToFirstAudio() {
}
void <API key>::LogTimeToBufferAv(BufferingType buffering_type,
base::TimeDelta time) {
}
std::string <API key>::<API key>(
const std::string& prefix,
const std::string& suffix) const {
return "";
}
void <API key>::SetMetricsSink(MetricsSink* delegate) {
}
} // namespace
void <API key>::RecordSimpleAction(const std::string& action) {
}
void <API key>() {
if (!<API key>) {
new <API key>();
}
}
} // namespace metrics
} // namespace chromecast
|
// modification, are permitted provided that the following conditions are
// met:
// documentation and/or other materials provided with the distribution.
// * Neither the name of Image Engine Design nor the names of any
// other contributors to this software may be used to endorse or
// promote products derived from this software without specific prior
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef <API key>
#define <API key>
#include "IECore/Export.h"
#include "IECore/IECore.h"
#include "IECore/IndexedIO.h"
<API key>
#include "boost/test/unit_test.hpp"
<API key>
#include "boost/test/<API key>.hpp"
#include <cassert>
#include <iostream>
#include <string>
#include <vector>
namespace IECore
{
void addIndexedIOTest(boost::unit_test::test_suite* test);
typedef std::vector<std::string> FilenameList;
template<typename T>
struct <API key>
{
static std::string name()
{
assert(false);
return "";
}
static T value()
{
assert(false);
return T();
}
static void check(const T& v1)
{
BOOST_CHECK_EQUAL(v1, value());
}
};
template<typename T>
struct <API key><T*>
{
static std::string name()
{
assert(false);
return "";
}
static T* value()
{
assert(false);
return 0;
}
static void check(T *v1)
{
for (int i = 0; i < 10; i++)
{
BOOST_CHECK_EQUAL(v1[i], value()[i]);
}
}
};
template<typename T>
struct IndexedIOTest
{
IndexedIOTest(const FilenameList &filenames) : m_filenames(filenames) {};
template<typename D>
void test()
{
for (FilenameList::const_iterator it = m_filenames.begin(); it != m_filenames.end(); ++it)
{
IndexedIOPtr io = new T(*it, IndexedIO::rootPath, IndexedIO::Read );
bool exists = true;
try
{
io->entry( <API key><D>::name() );
}
catch (...)
{
exists = false;
}
if ( exists )
{
D v;
io->read(<API key><D>::name(), v );
<API key><D>::check(v);
}
}
}
template<typename D>
void testArray()
{
for (FilenameList::const_iterator it = m_filenames.begin(); it != m_filenames.end(); ++it)
{
IndexedIOPtr io = new T(*it, IndexedIO::rootPath, IndexedIO::Read );
bool exists = true;
try
{
io->entry( <API key><D>::name() );
}
catch (...)
{
exists = false;
}
if ( exists )
{
D *v = new D[10] ;
io->read(<API key><D*>::name(), v, 10 );
<API key><D*>::check(v);
delete[] v;
}
}
}
template<typename D>
void write( IndexedIOPtr io)
{
assert(io);
io->write( <API key><D>::name(), <API key><D>::value() );
io->entry( <API key><D>::name() );
}
template<typename D>
void writeArray( IndexedIOPtr io)
{
assert(io);
io->write( <API key><D*>::name(), <API key><D*>::value(), 10 );
io->entry( <API key><D*>::name() );
}
void write(const std::string &filename)
{
IndexedIOPtr io = new T(filename, IndexedIO::rootPath, IndexedIO::Write );
write<float>(io);
write<double>(io);
write<half>(io);
write<int>(io);
write<int64_t>(io);
write<uint64_t>(io);
write<std::string>(io);
write<unsigned int>(io);
write<char>(io);
write<unsigned char>(io);
write<short>(io);
write<unsigned short>(io);
writeArray<float>(io);
writeArray<double>(io);
writeArray<half>(io);
writeArray<int>(io);
writeArray<int64_t>(io);
writeArray<uint64_t>(io);
writeArray<std::string>(io);
writeArray<unsigned int>(io);
writeArray<char>(io);
writeArray<unsigned char>(io);
writeArray<short>(io);
writeArray<unsigned short>(io);
}
FilenameList m_filenames;
};
template<typename T>
struct IndexedIOTestSuite : public boost::unit_test::test_suite
{
IndexedIOTestSuite() : boost::unit_test::test_suite("IndexedIOTestSuite")
{
FilenameList filenames;
getFilenames(filenames);
static boost::shared_ptr<IndexedIOTest<T> > instance(new IndexedIOTest<T>(filenames));
Uncomment this line to write out new test data - change architecture first
//instance->write("./test/IECore/data/" + extension() + "Files/" + IECore::versionString() + "/cent5.x86_64/types." + extension());
add( <API key>( &IndexedIOTest<T>::template test<float>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<double>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<half>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<int>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<int64_t>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<uint64_t>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<std::string>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<unsigned int>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<char>, instance ) );
add( <API key>( &IndexedIOTest<T>::template test<unsigned char>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<float>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<double>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<half>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<int>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<int64_t>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<uint64_t>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<std::string>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<unsigned int>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<char>, instance ) );
add( <API key>( &IndexedIOTest<T>::template testArray<unsigned char>, instance ) );
}
std::string extension() const;
void getFilenames( FilenameList &filenames );
};
}
#endif
|
;; GCC machine description for CRX.
;; Copyright (C) 1988, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
;; 2001, 2002, 2003, 2004, 2007
;; Free Software Foundation, Inc.
;;
;; This file is part of GCC.
;;
;; GCC is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation; either version 3, or (at your option)
;; any later version.
;;
;; GCC is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
;; You should have received a copy of the GNU General Public License
;; along with GCC; see the file COPYING3. If not see
;; <http:
;; Register numbers
(define_constants
[(SP_REGNUM 15) ; Stack pointer
(RA_REGNUM 14) ; Return address
(LO_REGNUM 16) ; LO register
(HI_REGNUM 17) ; HI register
(CC_REGNUM 18) ; Condition code register
]
)
(define_attr "length" "" ( const_int 6 ))
(<API key>
[(set_attr "length" "6")]
)
;; Predicates
(define_predicate "u4bits_operand"
(match_code "const_int,const_double")
{
if (GET_CODE (op) == CONST_DOUBLE)
return crx_const_double_ok (op);
return (<API key>(INTVAL(op), 4)) ? 1 : 0;
}
)
(define_predicate "cst4_operand"
(and (match_code "const_int")
(match_test "INT_CST4(INTVAL(op))")))
(define_predicate "<API key>"
(ior (match_operand 0 "u4bits_operand")
(match_operand 0 "register_operand")))
(define_predicate "reg_or_cst4_operand"
(ior (match_operand 0 "cst4_operand")
(match_operand 0 "register_operand")))
(define_predicate "reg_or_sym_operand"
(ior (match_code "symbol_ref")
(match_operand 0 "register_operand")))
(define_predicate "nosp_reg_operand"
(and (match_operand 0 "register_operand")
(match_test "REGNO (op) != SP_REGNUM")))
(define_predicate "store_operand"
(and (match_operand 0 "memory_operand")
(not (match_operand 0 "push_operand"))))
;; Mode Macro Definitions
(define_mode_macro ALLMT [QI HI SI SF DI DF])
(define_mode_macro CRXMM [QI HI SI SF])
(define_mode_macro CRXIM [QI HI SI])
(define_mode_macro DIDFM [DI DF])
(define_mode_macro SISFM [SI SF])
(define_mode_macro SHORT [QI HI])
(define_mode_attr tIsa [(QI "b") (HI "w") (SI "d") (SF "d")])
(define_mode_attr lImmArith [(QI "4") (HI "4") (SI "6")])
(define_mode_attr lImmRotl [(QI "2") (HI "2") (SI "4")])
(define_mode_attr IJK [(QI "I") (HI "J") (SI "K")])
(define_mode_attr iF [(QI "i") (HI "i") (SI "i") (DI "i") (SF "F") (DF "F")])
(define_mode_attr JG [(QI "J") (HI "J") (SI "J") (DI "J") (SF "G") (DF "G")])
; In HI or QI mode we push 4 bytes.
(define_mode_attr pushCnstr [(QI "X") (HI "X") (SI "<") (SF "<") (DI "<") (DF "<")])
(define_mode_attr tpush [(QI "") (HI "") (SI "") (SF "") (DI "sp, ") (DF "sp, ")])
(define_mode_attr lpush [(QI "2") (HI "2") (SI "2") (SF "2") (DI "4") (DF "4")])
;; Code Macro Definitions
(define_code_macro sz_xtnd [sign_extend zero_extend])
(define_code_attr sIsa [(sign_extend "") (zero_extend "u")])
(define_code_attr sPat [(sign_extend "s") (zero_extend "u")])
(define_code_attr szPat [(sign_extend "") (zero_extend "zero_")])
(define_code_attr szIsa [(sign_extend "s") (zero_extend "z")])
(define_code_macro sh_oprnd [ashift ashiftrt lshiftrt])
(define_code_attr shIsa [(ashift "ll") (ashiftrt "ra") (lshiftrt "rl")])
(define_code_attr shPat [(ashift "ashl") (ashiftrt "ashr") (lshiftrt "lshr")])
(define_code_macro mima_oprnd [smax umax smin umin])
(define_code_attr mimaIsa [(smax "maxs") (umax "maxu") (smin "mins") (umin "minu")])
(define_code_macro any_cond [eq ne gt gtu lt ltu ge geu le leu])
;; Addition Instructions
(define_insn "adddi3"
[(set (match_operand:DI 0 "register_operand" "=r,r")
(plus:DI (match_operand:DI 1 "register_operand" "%0,0")
(match_operand:DI 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"addd\t%L2, %L1\;addcd\t%H2, %H1"
[(set_attr "length" "4,12")]
)
(define_insn "add<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(plus:CRXIM (match_operand:CRXIM 1 "register_operand" "%0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"add<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Subtract Instructions
(define_insn "subdi3"
[(set (match_operand:DI 0 "register_operand" "=r,r")
(minus:DI (match_operand:DI 1 "register_operand" "0,0")
(match_operand:DI 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"subd\t%L2, %L1\;subcd\t%H2, %H1"
[(set_attr "length" "4,12")]
)
(define_insn "sub<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(minus:CRXIM (match_operand:CRXIM 1 "register_operand" "0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"sub<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Multiply Instructions
(define_insn "mul<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(mult:CRXIM (match_operand:CRXIM 1 "register_operand" "%0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"mul<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; <API key> Instructions
(define_insn "<sIsa>mulsidi3"
[(set (match_operand:DI 0 "register_operand" "=k")
(mult:DI (sz_xtnd:DI (match_operand:SI 1 "register_operand" "%r"))
(sz_xtnd:DI (match_operand:SI 2 "register_operand" "r"))))
(clobber (reg:CC CC_REGNUM))]
""
"mull<sPat>d\t%2, %1"
[(set_attr "length" "4")]
)
(define_insn "<sIsa>mulhisi3"
[(set (match_operand:SI 0 "register_operand" "=r")
(mult:SI (sz_xtnd:SI (match_operand:HI 1 "register_operand" "%0"))
(sz_xtnd:SI (match_operand:HI 2 "register_operand" "r"))))
(clobber (reg:CC CC_REGNUM))]
""
"mul<sPat>wd\t%2, %0"
[(set_attr "length" "4")]
)
(define_insn "<sIsa>mulqihi3"
[(set (match_operand:HI 0 "register_operand" "=r")
(mult:HI (sz_xtnd:HI (match_operand:QI 1 "register_operand" "%0"))
(sz_xtnd:HI (match_operand:QI 2 "register_operand" "r"))))
(clobber (reg:CC CC_REGNUM))]
""
"mul<sPat>bw\t%2, %0"
[(set_attr "length" "4")]
)
;; Logical Instructions - and
(define_insn "and<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(and:CRXIM (match_operand:CRXIM 1 "register_operand" "%0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"and<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Logical Instructions - or
(define_insn "ior<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(ior:CRXIM (match_operand:CRXIM 1 "register_operand" "%0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"or<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Logical Instructions - xor
(define_insn "xor<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(xor:CRXIM (match_operand:CRXIM 1 "register_operand" "%0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,i")))
(clobber (reg:CC CC_REGNUM))]
""
"xor<tIsa>\t%2, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Sign and Zero Extend Instructions
(define_insn "<szPat>extendhisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(sz_xtnd:SI (match_operand:HI 1 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"<szIsa>extwd\t%1, %0"
[(set_attr "length" "4")]
)
(define_insn "<szPat>extendqisi2"
[(set (match_operand:SI 0 "register_operand" "=r")
(sz_xtnd:SI (match_operand:QI 1 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"<szIsa>extbd\t%1, %0"
[(set_attr "length" "4")]
)
(define_insn "<szPat>extendqihi2"
[(set (match_operand:HI 0 "register_operand" "=r")
(sz_xtnd:HI (match_operand:QI 1 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"<szIsa>extbw\t%1, %0"
[(set_attr "length" "4")]
)
;; Negation Instructions
(define_insn "neg<mode>2"
[(set (match_operand:CRXIM 0 "register_operand" "=r")
(neg:CRXIM (match_operand:CRXIM 1 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"neg<tIsa>\t%1, %0"
[(set_attr "length" "4")]
)
;; Absolute Instructions
(define_insn "abs<mode>2"
[(set (match_operand:CRXIM 0 "register_operand" "=r")
(abs:CRXIM (match_operand:CRXIM 1 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"abs<tIsa>\t%1, %0"
[(set_attr "length" "4")]
)
;; Max and Min Instructions
(define_insn "<code><mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r")
(mima_oprnd:CRXIM (match_operand:CRXIM 1 "register_operand" "%0")
(match_operand:CRXIM 2 "register_operand" "r")))]
""
"<mimaIsa><tIsa>\t%2, %0"
[(set_attr "length" "4")]
)
;; One's Complement
(define_insn "one_cmpl<mode>2"
[(set (match_operand:CRXIM 0 "register_operand" "=r")
(not:CRXIM (match_operand:CRXIM 1 "register_operand" "0")))
(clobber (reg:CC CC_REGNUM))]
""
"xor<tIsa>\t$-1, %0"
[(set_attr "length" "2")]
)
;; Rotate Instructions
(define_insn "rotl<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(rotate:CRXIM (match_operand:CRXIM 1 "register_operand" "0,0")
(match_operand:CRXIM 2 "nonmemory_operand" "r,<IJK>")))
(clobber (reg:CC CC_REGNUM))]
""
"@
rotl<tIsa>\t%2, %0
rot<tIsa>\t%2, %0"
[(set_attr "length" "4,<lImmRotl>")]
)
(define_insn "rotr<mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r")
(rotatert:CRXIM (match_operand:CRXIM 1 "register_operand" "0")
(match_operand:CRXIM 2 "register_operand" "r")))
(clobber (reg:CC CC_REGNUM))]
""
"rotr<tIsa>\t%2, %0"
[(set_attr "length" "4")]
)
;; Arithmetic Left and Right Shift Instructions
(define_insn "<shPat><mode>3"
[(set (match_operand:CRXIM 0 "register_operand" "=r,r")
(sh_oprnd:CRXIM (match_operand:CRXIM 1 "register_operand" "0,0")
(match_operand:QI 2 "nonmemory_operand" "r,<IJK>")))
(clobber (reg:CC CC_REGNUM))]
""
"s<shIsa><tIsa>\t%2, %0"
[(set_attr "length" "2,2")]
)
;; Bit Set Instructions
(define_insn "extv"
[(set (match_operand:SI 0 "register_operand" "=r")
(sign_extract:SI (match_operand:SI 1 "register_operand" "r")
(match_operand:SI 2 "const_int_operand" "n")
(match_operand:SI 3 "const_int_operand" "n")))]
""
{
static char buf[100];
int strpntr;
int size = INTVAL (operands[2]);
int pos = INTVAL (operands[3]);
strpntr = sprintf (buf, "ram\t$%d, $31, $%d, %%1, %%0\;",
BITS_PER_WORD - (size + pos), BITS_PER_WORD - size);
sprintf (buf + strpntr, "srad\t$%d, %%0", BITS_PER_WORD - size);
return buf;
}
[(set_attr "length" "6")]
)
(define_insn "extzv"
[(set (match_operand:SI 0 "register_operand" "=r")
(zero_extract:SI (match_operand:SI 1 "register_operand" "r")
(match_operand:SI 2 "const_int_operand" "n")
(match_operand:SI 3 "const_int_operand" "n")))]
""
{
static char buf[40];
int size = INTVAL (operands[2]);
int pos = INTVAL (operands[3]);
sprintf (buf, "ram\t$%d, $%d, $0, %%1, %%0",
(BITS_PER_WORD - pos) % BITS_PER_WORD, size - 1);
return buf;
}
[(set_attr "length" "4")]
)
(define_insn "insv"
[(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
(match_operand:SI 1 "const_int_operand" "n")
(match_operand:SI 2 "const_int_operand" "n"))
(match_operand:SI 3 "register_operand" "r"))]
""
{
static char buf[40];
int size = INTVAL (operands[1]);
int pos = INTVAL (operands[2]);
sprintf (buf, "rim\t$%d, $%d, $%d, %%3, %%0",
pos, size + pos - 1, pos);
return buf;
}
[(set_attr "length" "4")]
)
;; Move Instructions
(define_expand "mov<mode>"
[(set (match_operand:ALLMT 0 "<API key>" "")
(match_operand:ALLMT 1 "general_operand" ""))]
""
{
if (!(reload_in_progress || reload_completed))
{
if (!register_operand (operands[0], <MODE>mode))
{
if (push_operand (operands[0], <MODE>mode) ?
!nosp_reg_operand (operands[1], <MODE>mode) :
!<API key> (operands[1], <MODE>mode))
{
operands[1] = copy_to_mode_reg (<MODE>mode, operands[1]);
}
}
}
}
)
(define_insn "push<mode>_internal"
[(set (match_operand:ALLMT 0 "push_operand" "=<pushCnstr>")
(match_operand:ALLMT 1 "nosp_reg_operand" "b"))]
""
"push\t<tpush>%p1"
[(set_attr "length" "<lpush>")]
)
(define_insn "mov<mode>_regs"
[(set (match_operand:SISFM 0 "register_operand" "=r, r, r, k")
(match_operand:SISFM 1 "nonmemory_operand" "r, <iF>, k, r"))]
""
"@
movd\t%1, %0
movd\t%1, %0
mfpr\t%1, %0
mtpr\t%1, %0"
[(set_attr "length" "2,6,4,4")]
)
(define_insn "mov<mode>_regs"
[(set (match_operand:DIDFM 0 "register_operand" "=r, r, r, k")
(match_operand:DIDFM 1 "nonmemory_operand" "r, <iF>, k, r"))]
""
{
switch (which_alternative)
{
case 0: if (REGNO (operands[0]) > REGNO (operands[1]))
return "movd\t%H1, %H0\;movd\t%L1, %L0";
else
return "movd\t%L1, %L0\;movd\t%H1, %H0";
case 1: return "movd\t%H1, %H0\;movd\t%L1, %L0";
case 2: return "mfpr\t%H1, %H0\;mfpr\t%L1, %L0";
case 3: return "mtpr\t%H1, %H0\;mtpr\t%L1, %L0";
default: gcc_unreachable ();
}
}
[(set_attr "length" "4,12,8,8")]
)
(define_insn "mov<mode>_regs" ; no HI/QI mode in HILO regs
[(set (match_operand:SHORT 0 "register_operand" "=r, r")
(match_operand:SHORT 1 "nonmemory_operand" "r, i"))]
""
"mov<tIsa>\t%1, %0"
[(set_attr "length" "2,<lImmArith>")]
)
(define_insn "mov<mode>_load"
[(set (match_operand:CRXMM 0 "register_operand" "=r")
(match_operand:CRXMM 1 "memory_operand" "m"))]
""
"load<tIsa>\t%1, %0"
[(set_attr "length" "6")]
)
(define_insn "mov<mode>_load"
[(set (match_operand:DIDFM 0 "register_operand" "=r")
(match_operand:DIDFM 1 "memory_operand" "m"))]
""
{
rtx first_dest_reg = gen_rtx_REG (SImode, REGNO (operands[0]));
if (<API key> (first_dest_reg, operands[1]))
return "loadd\t%H1, %H0\;loadd\t%L1, %L0";
return "loadd\t%L1, %L0\;loadd\t%H1, %H0";
}
[(set_attr "length" "12")]
)
(define_insn "mov<mode>_store"
[(set (match_operand:CRXMM 0 "store_operand" "=m, m")
(match_operand:CRXMM 1 "<API key>" "r, <JG>"))]
""
"stor<tIsa>\t%1, %0"
[(set_attr "length" "6")]
)
(define_insn "mov<mode>_store"
[(set (match_operand:DIDFM 0 "store_operand" "=m, m")
(match_operand:DIDFM 1 "<API key>" "r, <JG>"))]
""
"stord\t%H1, %H0\;stord\t%L1, %L0"
[(set_attr "length" "12")]
)
;; Movmem Instruction
(define_expand "movmemsi"
[(use (match_operand:BLK 0 "memory_operand" ""))
(use (match_operand:BLK 1 "memory_operand" ""))
(use (match_operand:SI 2 "nonmemory_operand" ""))
(use (match_operand:SI 3 "const_int_operand" ""))]
""
{
if (crx_expand_movmem (operands[0], operands[1], operands[2], operands[3]))
DONE;
else
FAIL;
}
)
;; Compare and Branch Instructions
(define_insn "cbranch<mode>4"
[(set (pc)
(if_then_else (match_operator 0 "comparison_operator"
[(match_operand:CRXIM 1 "register_operand" "r")
(match_operand:CRXIM 2 "reg_or_cst4_operand" "rL")])
(label_ref (match_operand 3 "" ""))
(pc)))
(clobber (reg:CC CC_REGNUM))]
""
"cmpb%d0<tIsa>\t%2, %1, %l3"
[(set_attr "length" "6")]
)
;; Compare Instructions
(define_expand "cmp<mode>"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:CRXIM 0 "register_operand" "")
(match_operand:CRXIM 1 "nonmemory_operand" "")))]
""
{
crx_compare_op0 = operands[0];
crx_compare_op1 = operands[1];
DONE;
}
)
(define_insn "cmp<mode>_internal"
[(set (reg:CC CC_REGNUM)
(compare:CC (match_operand:CRXIM 0 "register_operand" "r,r")
(match_operand:CRXIM 1 "nonmemory_operand" "r,i")))]
""
"cmp<tIsa>\t%1, %0"
[(set_attr "length" "2,<lImmArith>")]
)
;; Conditional Branch Instructions
(define_expand "b<code>"
[(set (pc)
(if_then_else (any_cond (reg:CC CC_REGNUM)
(const_int 0))
(label_ref (match_operand 0 ""))
(pc)))]
""
{
crx_expand_branch (<CODE>, operands[0]);
DONE;
}
)
(define_insn "bCOND_internal"
[(set (pc)
(if_then_else (match_operator 0 "comparison_operator"
[(reg:CC CC_REGNUM)
(const_int 0)])
(label_ref (match_operand 1 ""))
(pc)))]
""
"b%d0\t%l1"
[(set_attr "length" "6")]
)
;; Scond Instructions
(define_expand "s<code>"
[(set (match_operand:SI 0 "register_operand")
(any_cond:SI (reg:CC CC_REGNUM) (const_int 0)))]
""
{
crx_expand_scond (<CODE>, operands[0]);
DONE;
}
)
(define_insn "sCOND_internal"
[(set (match_operand:SI 0 "register_operand" "=r")
(match_operator:SI 1 "comparison_operator"
[(reg:CC CC_REGNUM) (const_int 0)]))]
""
"s%d1\t%0"
[(set_attr "length" "2")]
)
;; Jumps and Branches
(define_insn "<API key>"
[(parallel
[(set (pc)
(reg:SI RA_REGNUM))
(return)])
]
"reload_completed"
"jump\tra"
[(set_attr "length" "2")]
)
(define_insn "indirect_jump"
[(set (pc)
(match_operand:SI 0 "reg_or_sym_operand" "r,i"))]
""
"@
jump\t%0
br\t%a0"
[(set_attr "length" "2,6")]
)
(define_insn "interrupt_return"
[(parallel
[(unspec_volatile [(const_int 0)] 0)
(return)])]
""
{
return <API key> (1);
}
[(set_attr "length" "14")]
)
(define_insn "jump_to_imm"
[(set (pc)
(match_operand 0 "immediate_operand" "i"))]
""
"br\t%c0"
[(set_attr "length" "6")]
)
(define_insn "jump"
[(set (pc)
(label_ref (match_operand 0 "" "")))]
""
"br\t%l0"
[(set_attr "length" "6")]
)
;; Function Prologue and Epilogue
(define_expand "prologue"
[(const_int 0)]
""
{
crx_expand_prologue ();
DONE;
}
)
(define_insn "push_for_prologue"
[(parallel
[(set (reg:SI SP_REGNUM)
(minus:SI (reg:SI SP_REGNUM)
(match_operand:SI 0 "immediate_operand" "i")))])]
"reload_completed"
{
return <API key> (0);
}
[(set_attr "length" "4")]
)
(define_expand "epilogue"
[(return)]
""
{
crx_expand_epilogue ();
DONE;
}
)
(define_insn "<API key>"
[(parallel
[(set (reg:SI SP_REGNUM)
(plus:SI (reg:SI SP_REGNUM)
(match_operand:SI 0 "immediate_operand" "i")))
(use (reg:SI RA_REGNUM))
(return)])
]
"reload_completed"
{
return <API key> (1);
}
[(set_attr "length" "4")]
)
(define_insn "popret_RA_return"
[(parallel
[(use (reg:SI RA_REGNUM))
(return)])
]
"reload_completed"
"popret\tra"
[(set_attr "length" "2")]
)
;; Table Jump
(define_insn "tablejump"
[(set (pc)
(match_operand:SI 0 "register_operand" "r"))
(use (label_ref:SI (match_operand 1 "" "" )))]
""
"jump\t%0"
[(set_attr "length" "2")]
)
;; Call Instructions
(define_expand "call"
[(call (match_operand:QI 0 "memory_operand" "")
(match_operand 1 "" ""))]
""
{
emit_call_insn (gen_crx_call (operands[0], operands[1]));
DONE;
}
)
(define_expand "crx_call"
[(parallel
[(call (match_operand:QI 0 "memory_operand" "")
(match_operand 1 "" ""))
(clobber (reg:SI RA_REGNUM))])]
""
""
)
(define_insn "<API key>"
[(call (mem:QI (match_operand:SI 0 "immediate_operand" "i"))
(match_operand 1 "" ""))
(clobber (match_operand:SI 2 "register_operand" "+r"))]
""
"bal\tra, %a0"
[(set_attr "length" "6")]
)
(define_insn "crx_call_insn_jump"
[(call (mem:QI (match_operand:SI 0 "register_operand" "r"))
(match_operand 1 "" ""))
(clobber (match_operand:SI 2 "register_operand" "+r"))]
""
"jal\t%0"
[(set_attr "length" "2")]
)
(define_insn "crx_call_insn_jalid"
[(call (mem:QI (mem:SI (plus:SI
(match_operand:SI 0 "register_operand" "r")
(match_operand:SI 1 "register_operand" "r"))))
(match_operand 2 "" ""))
(clobber (match_operand:SI 3 "register_operand" "+r"))]
""
"jalid\t%0, %1"
[(set_attr "length" "4")]
)
;; Call Value Instructions
(define_expand "call_value"
[(set (match_operand 0 "general_operand" "")
(call (match_operand:QI 1 "memory_operand" "")
(match_operand 2 "" "")))]
""
{
emit_call_insn (gen_crx_call_value (operands[0], operands[1], operands[2]));
DONE;
}
)
(define_expand "crx_call_value"
[(parallel
[(set (match_operand 0 "general_operand" "")
(call (match_operand 1 "memory_operand" "")
(match_operand 2 "" "")))
(clobber (reg:SI RA_REGNUM))])]
""
""
)
(define_insn "<API key>"
[(set (match_operand 0 "" "=g")
(call (mem:QI (match_operand:SI 1 "immediate_operand" "i"))
(match_operand 2 "" "")))
(clobber (match_operand:SI 3 "register_operand" "+r"))]
""
"bal\tra, %a1"
[(set_attr "length" "6")]
)
(define_insn "<API key>"
[(set (match_operand 0 "" "=g")
(call (mem:QI (match_operand:SI 1 "register_operand" "r"))
(match_operand 2 "" "")))
(clobber (match_operand:SI 3 "register_operand" "+r"))]
""
"jal\t%1"
[(set_attr "length" "2")]
)
(define_insn "<API key>"
[(set (match_operand 0 "" "=g")
(call (mem:QI (mem:SI (plus:SI
(match_operand:SI 1 "register_operand" "r")
(match_operand:SI 2 "register_operand" "r"))))
(match_operand 3 "" "")))
(clobber (match_operand:SI 4 "register_operand" "+r"))]
""
"jalid\t%0, %1"
[(set_attr "length" "4")]
)
;; Nop
(define_insn "nop"
[(const_int 0)]
""
""
)
;; Multiply and Accumulate Instructions
(define_insn "<sPat>madsidi3"
[(set (match_operand:DI 0 "register_operand" "+k")
(plus:DI
(mult:DI (sz_xtnd:DI (match_operand:SI 1 "register_operand" "%r"))
(sz_xtnd:DI (match_operand:SI 2 "register_operand" "r")))
(match_dup 0)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_MAC"
"mac<sPat>d\t%2, %1"
[(set_attr "length" "4")]
)
(define_insn "<sPat>madhisi3"
[(set (match_operand:SI 0 "register_operand" "+l")
(plus:SI
(mult:SI (sz_xtnd:SI (match_operand:HI 1 "register_operand" "%r"))
(sz_xtnd:SI (match_operand:HI 2 "register_operand" "r")))
(match_dup 0)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_MAC"
"mac<sPat>w\t%2, %1"
[(set_attr "length" "4")]
)
(define_insn "<sPat>madqihi3"
[(set (match_operand:HI 0 "register_operand" "+l")
(plus:HI
(mult:HI (sz_xtnd:HI (match_operand:QI 1 "register_operand" "%r"))
(sz_xtnd:HI (match_operand:QI 2 "register_operand" "r")))
(match_dup 0)))
(clobber (reg:CC CC_REGNUM))]
"TARGET_MAC"
"mac<sPat>b\t%2, %1"
[(set_attr "length" "4")]
)
;; Loop Instructions
(define_expand "doloop_end"
[(use (match_operand 0 "" "")) ; loop pseudo
(use (match_operand 1 "" "")) ; iterations; zero if unknown
(use (match_operand 2 "" "")) ; max iterations
(use (match_operand 3 "" "")) ; loop level
(use (match_operand 4 "" ""))] ; label
""
{
if (INTVAL (operands[3]) > crx_loop_nesting)
FAIL;
switch (GET_MODE (operands[0]))
{
case SImode:
emit_jump_insn (gen_doloop_end_si (operands[4], operands[0]));
break;
case HImode:
emit_jump_insn (gen_doloop_end_hi (operands[4], operands[0]));
break;
case QImode:
emit_jump_insn (gen_doloop_end_qi (operands[4], operands[0]));
break;
default:
FAIL;
}
DONE;
}
)
; CRX dbnz[bwd] used explicitly (see above) but also by the combiner.
(define_insn "doloop_end_<mode>"
[(set (pc)
(if_then_else (ne (match_operand:CRXIM 1 "register_operand" "+r,!m")
(const_int 1))
(label_ref (match_operand 0 "" ""))
(pc)))
(set (match_dup 1) (plus:CRXIM (match_dup 1) (const_int -1)))
(clobber (match_scratch:CRXIM 2 "=X,r"))
(clobber (reg:CC CC_REGNUM))]
""
"@
dbnz<tIsa>\t%1, %l0
load<tIsa>\t%1, %2\;add<tIsa>\t$-1, %2\;stor<tIsa>\t%2, %1\;bne\t%l0"
[(set_attr "length" "6, 12")]
)
|
@echo off
for /D %%f in (*) do for %%i in (%%f\*.vcxproj) do call :SUB_RENAME "%%i"
goto END
:SUB_RENAME
set filename=%~n1
if "%filename:~-4%" == "2012" goto END
if "%filename:~-4%" == "2010" goto END
if "%filename:~-4%" == "2008" goto END
if "%filename:~-4%" == "2005" goto END
if "%filename:~-4%" == "2003" goto END
echo %1 will be renamed
rename %~p1%filename%.vcxproj %filename%.2012.vcxproj
rename %~p1%filename%.vcxproj.filters %filename%.2012.vcxproj.filters
rename %~p1%filename%.vcxproj.user %filename%.2012.vcxproj.user
goto END
:END
|
#include <xpcc/architecture/platform.hpp>
#include <xpcc/debug/logger.hpp>
// Set the log level
#undef XPCC_LOG_LEVEL
#define XPCC_LOG_LEVEL xpcc::log::INFO
typedef GpioInputC0 Adc1In;
typedef GpioInputC2 Adc2In;
typedef GpioInputB13 Adc3In;
typedef GpioInputB12 Adc4In;
xpcc::IODeviceWrapper< Usart2, xpcc::IOBuffer::BlockIfFull > loggerDevice;
xpcc::log::Logger xpcc::log::info(loggerDevice);
static void
printAdc()
{
const float maxVoltage = 3.3;
float voltage = 0.0;
int adcValue = 0;
adcValue = Adc1::getValue();
XPCC_LOG_INFO << "Adc1: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
/*
adcValue = Adc2::getValue();
XPCC_LOG_INFO << "Adc2: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
adcValue = Adc3::getValue();
XPCC_LOG_INFO << "Adc3: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
adcValue = Adc4::getValue();
XPCC_LOG_INFO << "Adc4: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
*/
}
int
main()
{
Board::initialize();
// initialize Uart2 for XPCC_LOG_INFO
GpioOutputA2::connect(Usart2::Tx);
GpioInputA3::connect(Usart2::Rx, Gpio::InputType::PullUp);
Usart2::initialize<Board::systemClock, 115200>(12);
// initialize Adc
Adc1::initialize(Adc1::ClockMode::Asynchronous, Adc1::Prescaler::Div128,
Adc1::CalibrationMode::<API key>, true);
Adc1::setFreeRunningMode(true);
Adc1In::connect(Adc1::Channel6);
Adc1::setChannel(Adc1In::Adc1Channel, Adc1::SampleTime::Cycles2);
Adc1::startConversion();
Adc2::initialize(Adc2::ClockMode::Asynchronous, Adc2::Prescaler::Div128,
Adc2::CalibrationMode::<API key>, true);
Adc2::setFreeRunningMode(true);
Adc2In::connect(Adc2::Channel8);
Adc2::setChannel(Adc2In::Adc2Channel, Adc2::SampleTime::Cycles2);
Adc2::startConversion();
Adc3::initialize(Adc3::ClockMode::Asynchronous, Adc3::Prescaler::Div128,
Adc3::CalibrationMode::<API key>, true);
Adc3::setFreeRunningMode(true);
Adc3In::connect(Adc3::Channel5);
Adc3::setChannel(Adc3In::Adc3Channel, Adc3::SampleTime::Cycles2);
Adc3::startConversion();
Adc4::initialize(Adc4::ClockMode::Asynchronous, Adc4::Prescaler::Div128,
Adc4::CalibrationMode::<API key>, true);
Adc4::setFreeRunningMode(true);
Adc4In::connect(Adc4::Channel3);
Adc4::setChannel(Adc4In::Adc4Channel, Adc4::SampleTime::Cycles2);
Adc4::startConversion();
while (1)
{
xpcc::delayMilliseconds(200);
printAdc();
}
return 0;
}
|
hc := ghc
hcflags := --make -O2 -fvia-C -optc-O2
examples := words spellchecker
all: $(examples)
words: Words.hs
$(hc) $(hcflags) -o $@ $^
spellchecker: SpellChecker.hs
$(hc) $(hcflags) -o $@ $^
clean:
-rm -f *.hi *.o $(examples)
|
\file
This file is a part of pattern matching testing suite.
\autor Yuriy Solodkyy <yuriy.solodkyy@gmail.com>
This file is a part of the XTL framework (http://parasol.tamu.edu/xtl/).
Copyright (C) 2005-2012 Texas A&M University.
All rights reserved.
#include "testshape.hpp"
#include "config.hpp"
#include "ptrtools.hpp"
#if !<API key>
#define dynamic_cast <API key>
#endif
static size_t fdc_id(size_t n);
template <size_t N>
struct shape_kind : shape_kind<N/2>
{
typedef shape_kind<N/2> base_class;
shape_kind(size_t n = N) : base_class(n) {}
void accept(ShapeVisitor&) const;
};
template <>
struct shape_kind<0> : OtherBase, Shape
{
typedef Shape base_class;
shape_kind(size_t n = 0) : base_class(n,fdc_id(n)) {}
void accept(ShapeVisitor&) const;
};
struct ShapeVisitor
{
virtual void visit(const shape_kind<0>&) {}
#define FOR_EACH_MAX NUMBER_OF_DERIVED-2
#define FOR_EACH_N(N) virtual void visit(const shape_kind<N+1>& s) { visit(static_cast<const shape_kind<N+1>::base_class&>(s)); }
#include "loop_over_numbers.hpp"
#undef FOR_EACH_N
#undef FOR_EACH_MAX
};
template <size_t N> void shape_kind<N>::accept(ShapeVisitor& v) const { v.visit(*this); }
void shape_kind<0>::accept(ShapeVisitor& v) const { v.visit(*this); }
enum { fdc_size = 10 };
Primes numbers for each level of the binary hierarchy
const size_t <API key>[fdc_size][2] =
{
{ 2, 2}, // Because the root is 2
{ 3, 5},
{ 7,11},
{13,17},
{19,23},
{29,31},
{37,41},
{43,47},
{53,59},
{61,67}
};
static size_t fdc_id(size_t n)
{
XTL_ASSERT(req_bits(n) < fdc_size);
size_t id = 1;
if (n)
for (size_t m = req_bits(n), i = m; i; --i)
id *= <API key>[m-i][(n & (1 << (i-1))) != 0];
//std::cout << n << "->" << id << std::endl;
return id;
}
inline size_t id(size_t n) { return fdc_id(n); }
const size_t shape_ids[100] =
{
id( 0), id( 1), id( 2), id( 3), id( 4), id( 5), id( 6), id( 7), id( 8), id( 9),
id(10), id(11), id(12), id(13), id(14), id(15), id(16), id(17), id(18), id(19),
id(20), id(21), id(22), id(23), id(24), id(25), id(26), id(27), id(28), id(29),
id(30), id(31), id(32), id(33), id(34), id(35), id(36), id(37), id(38), id(39),
id(40), id(41), id(42), id(43), id(44), id(45), id(46), id(47), id(48), id(49),
id(50), id(51), id(52), id(53), id(54), id(55), id(56), id(57), id(58), id(59),
id(60), id(61), id(62), id(63), id(64), id(65), id(66), id(67), id(68), id(69),
id(70), id(71), id(72), id(73), id(74), id(75), id(76), id(77), id(78), id(79),
id(80), id(81), id(82), id(83), id(84), id(85), id(86), id(87), id(88), id(89),
id(90), id(91), id(92), id(93), id(94), id(95), id(96), id(97), id(98), id(99),
};
template <size_t N>
inline const shape_kind<N>* <API key>(const shape_kind<N>*, const Shape* u)
{
return u->m_fdc_id % shape_ids[N] == 0
? static_cast<const shape_kind<N>*>(u)
: 0;
}
template <typename T>
inline T <API key>(const Shape* u)
{
return <API key>(static_cast<T>(0), u);
}
<API key>
size_t do_match(const Shape& s, size_t)
{
if (const shape_kind< 0>* p0 = dynamic_cast<const shape_kind< 0>*>(&s))
{
if (const shape_kind< 1>* p1 = dynamic_cast<const shape_kind< 1>*>(p0))
if (const shape_kind< 2>* p2 = dynamic_cast<const shape_kind< 2>*>(p1))
if (const shape_kind< 4>* p4 = dynamic_cast<const shape_kind< 4>*>(p2))
if (const shape_kind< 8>* p8 = dynamic_cast<const shape_kind< 8>*>(p4))
if (const shape_kind<16>* p16 = dynamic_cast<const shape_kind<16>*>(p8))
if (const shape_kind<32>* p32 = dynamic_cast<const shape_kind<32>*>(p16))
if (const shape_kind<64>* p64 = dynamic_cast<const shape_kind<64>*>(p32))
return p64->m_member7 + 64 ;
else
if (const shape_kind<65>* p65 = dynamic_cast<const shape_kind<65>*>(p32))
return p65->m_member7 + 65 ;
else
return p32->m_member7 + 32 ;
else
if (const shape_kind<33>* p33 = dynamic_cast<const shape_kind<33>*>(p16))
if (const shape_kind<66>* p66 = dynamic_cast<const shape_kind<66>*>(p33))
return p66->m_member7 + 66 ;
else
if (const shape_kind<67>* p67 = dynamic_cast<const shape_kind<67>*>(p33))
return p67->m_member7 + 67 ;
else
return p33->m_member7 + 33 ;
else
return p16->m_member7 + 16 ;
else
if (const shape_kind<17>* p17 = dynamic_cast<const shape_kind<17>*>(p8))
if (const shape_kind<34>* p34 = dynamic_cast<const shape_kind<34>*>(p17))
if (const shape_kind<68>* p68 = dynamic_cast<const shape_kind<68>*>(p34))
return p68->m_member7 + 68 ;
else
if (const shape_kind<69>* p69 = dynamic_cast<const shape_kind<69>*>(p34))
return p69->m_member7 + 69 ;
else
return p34->m_member7 + 34 ;
else
if (const shape_kind<35>* p35 = dynamic_cast<const shape_kind<35>*>(p17))
if (const shape_kind<70>* p70 = dynamic_cast<const shape_kind<70>*>(p35))
return p70->m_member7 + 70 ;
else
if (const shape_kind<71>* p71 = dynamic_cast<const shape_kind<71>*>(p35))
return p71->m_member7 + 71 ;
else
return p35->m_member7 + 35 ;
else
return p17->m_member7 + 17 ;
else
return p8->m_member7 + 8 ;
else
if (const shape_kind< 9>* p9 = dynamic_cast<const shape_kind< 9>*>(p4))
if (const shape_kind<18>* p18 = dynamic_cast<const shape_kind<18>*>(p9))
if (const shape_kind<36>* p36 = dynamic_cast<const shape_kind<36>*>(p18))
if (const shape_kind<72>* p72 = dynamic_cast<const shape_kind<72>*>(p36))
return p72->m_member7 + 72 ;
else
if (const shape_kind<73>* p73 = dynamic_cast<const shape_kind<73>*>(p36))
return p73->m_member7 + 73 ;
else
return p36->m_member7 + 36 ;
else
if (const shape_kind<37>* p37 = dynamic_cast<const shape_kind<37>*>(p18))
if (const shape_kind<74>* p74 = dynamic_cast<const shape_kind<74>*>(p37))
return p74->m_member7 + 74 ;
else
if (const shape_kind<75>* p75 = dynamic_cast<const shape_kind<75>*>(p37))
return p75->m_member7 + 75 ;
else
return p37->m_member7 + 37 ;
else
return p18->m_member7 + 18 ;
else
if (const shape_kind<19>* p19 = dynamic_cast<const shape_kind<19>*>(p9))
if (const shape_kind<38>* p38 = dynamic_cast<const shape_kind<38>*>(p19))
if (const shape_kind<76>* p76 = dynamic_cast<const shape_kind<76>*>(p38))
return p76->m_member7 + 76 ;
else
if (const shape_kind<77>* p77 = dynamic_cast<const shape_kind<77>*>(p38))
return p77->m_member7 + 77 ;
else
return p38->m_member7 + 38 ;
else
if (const shape_kind<39>* p39 = dynamic_cast<const shape_kind<39>*>(p19))
if (const shape_kind<78>* p78 = dynamic_cast<const shape_kind<78>*>(p39))
return p78->m_member7 + 78 ;
else
if (const shape_kind<79>* p79 = dynamic_cast<const shape_kind<79>*>(p39))
return p79->m_member7 + 79 ;
else
return p39->m_member7 + 39 ;
else
return p19->m_member7 + 19 ;
else
return p9->m_member7 + 9 ;
else
return p4->m_member7 + 4 ;
else
if (const shape_kind< 5>* p5 = dynamic_cast<const shape_kind< 5>*>(p2))
if (const shape_kind<10>* p10 = dynamic_cast<const shape_kind<10>*>(p5))
if (const shape_kind<20>* p20 = dynamic_cast<const shape_kind<20>*>(p10))
if (const shape_kind<40>* p40 = dynamic_cast<const shape_kind<40>*>(p20))
if (const shape_kind<80>* p80 = dynamic_cast<const shape_kind<80>*>(p40))
return p80->m_member7 + 80 ;
else
if (const shape_kind<81>* p81 = dynamic_cast<const shape_kind<81>*>(p40))
return p81->m_member7 + 81 ;
else
return p40->m_member7 + 40 ;
else
if (const shape_kind<41>* p41 = dynamic_cast<const shape_kind<41>*>(p20))
if (const shape_kind<82>* p82 = dynamic_cast<const shape_kind<82>*>(p41))
return p82->m_member7 + 82 ;
else
if (const shape_kind<83>* p83 = dynamic_cast<const shape_kind<83>*>(p41))
return p83->m_member7 + 83 ;
else
return p41->m_member7 + 41 ;
else
return p20->m_member7 + 20 ;
else
if (const shape_kind<21>* p21 = dynamic_cast<const shape_kind<21>*>(p10))
if (const shape_kind<42>* p42 = dynamic_cast<const shape_kind<42>*>(p21))
if (const shape_kind<84>* p84 = dynamic_cast<const shape_kind<84>*>(p42))
return p84->m_member7 + 84 ;
else
if (const shape_kind<85>* p85 = dynamic_cast<const shape_kind<85>*>(p42))
return p85->m_member7 + 85 ;
else
return p42->m_member7 + 42 ;
else
if (const shape_kind<43>* p43 = dynamic_cast<const shape_kind<43>*>(p21))
if (const shape_kind<86>* p86 = dynamic_cast<const shape_kind<86>*>(p43))
return p86->m_member7 + 86 ;
else
if (const shape_kind<87>* p87 = dynamic_cast<const shape_kind<87>*>(p43))
return p87->m_member7 + 87 ;
else
return p43->m_member7 + 43 ;
else
return p21->m_member7 + 21 ;
else
return p10->m_member7 + 10 ;
else
if (const shape_kind<11>* p11 = dynamic_cast<const shape_kind<11>*>(p5))
if (const shape_kind<22>* p22 = dynamic_cast<const shape_kind<22>*>(p11))
if (const shape_kind<44>* p44 = dynamic_cast<const shape_kind<44>*>(p22))
if (const shape_kind<88>* p88 = dynamic_cast<const shape_kind<88>*>(p44))
return p88->m_member7 + 88 ;
else
if (const shape_kind<89>* p89 = dynamic_cast<const shape_kind<89>*>(p44))
return p89->m_member7 + 89 ;
else
return p44->m_member7 + 44 ;
else
if (const shape_kind<45>* p45 = dynamic_cast<const shape_kind<45>*>(p22))
if (const shape_kind<90>* p90 = dynamic_cast<const shape_kind<90>*>(p45))
return p90->m_member7 + 90 ;
else
if (const shape_kind<91>* p91 = dynamic_cast<const shape_kind<91>*>(p45))
return p91->m_member7 + 91 ;
else
return p45->m_member7 + 45 ;
else
return p22->m_member7 + 22 ;
else
if (const shape_kind<23>* p23 = dynamic_cast<const shape_kind<23>*>(p11))
if (const shape_kind<46>* p46 = dynamic_cast<const shape_kind<46>*>(p23))
if (const shape_kind<92>* p92 = dynamic_cast<const shape_kind<92>*>(p46))
return p92->m_member7 + 92 ;
else
if (const shape_kind<93>* p93 = dynamic_cast<const shape_kind<93>*>(p46))
return p93->m_member7 + 93 ;
else
return p46->m_member7 + 46 ;
else
if (const shape_kind<47>* p47 = dynamic_cast<const shape_kind<47>*>(p23))
if (const shape_kind<94>* p94 = dynamic_cast<const shape_kind<94>*>(p47))
return p94->m_member7 + 94 ;
else
if (const shape_kind<95>* p95 = dynamic_cast<const shape_kind<95>*>(p47))
return p95->m_member7 + 95 ;
else
return p47->m_member7 + 47 ;
else
return p23->m_member7 + 23 ;
else
return p11->m_member7 + 11 ;
else
return p5->m_member7 + 5 ;
else
return p2->m_member7 + 2 ;
else
if (const shape_kind< 3>* p3 = dynamic_cast<const shape_kind< 3>*>(p1))
if (const shape_kind< 6>* p6 = dynamic_cast<const shape_kind< 6>*>(p3))
if (const shape_kind<12>* p12 = dynamic_cast<const shape_kind<12>*>(p6))
if (const shape_kind<24>* p24 = dynamic_cast<const shape_kind<24>*>(p12))
if (const shape_kind<48>* p48 = dynamic_cast<const shape_kind<48>*>(p24))
if (const shape_kind<96>* p96 = dynamic_cast<const shape_kind<96>*>(p48))
return p96->m_member7 + 96 ;
else
if (const shape_kind<97>* p97 = dynamic_cast<const shape_kind<97>*>(p48))
return p97->m_member7 + 97 ;
else
return p48->m_member7 + 48 ;
else
if (const shape_kind<49>* p49 = dynamic_cast<const shape_kind<49>*>(p24))
if (const shape_kind<98>* p98 = dynamic_cast<const shape_kind<98>*>(p49))
return p98->m_member7 + 98 ;
else
if (const shape_kind<99>* p99 = dynamic_cast<const shape_kind<99>*>(p49))
return p99->m_member7 + 99 ;
else
return p49->m_member7 + 49 ;
else
return p24->m_member7 + 24 ;
else
if (const shape_kind<25>* p25 = dynamic_cast<const shape_kind<25>*>(p12))
if (const shape_kind<50>* p50 = dynamic_cast<const shape_kind<50>*>(p25))
return p50->m_member7 + 50 ;
else
if (const shape_kind<51>* p51 = dynamic_cast<const shape_kind<51>*>(p25))
return p51->m_member7 + 51 ;
else
return p25->m_member7 + 25 ;
else
return p12->m_member7 + 12 ;
else
if (const shape_kind<13>* p13 = dynamic_cast<const shape_kind<13>*>(p6))
if (const shape_kind<26>* p26 = dynamic_cast<const shape_kind<26>*>(p13))
if (const shape_kind<52>* p52 = dynamic_cast<const shape_kind<52>*>(p26))
return p52->m_member7 + 52 ;
else
if (const shape_kind<53>* p53 = dynamic_cast<const shape_kind<53>*>(p26))
return p53->m_member7 + 53 ;
else
return p26->m_member7 + 26 ;
else
if (const shape_kind<27>* p27 = dynamic_cast<const shape_kind<27>*>(p13))
if (const shape_kind<54>* p54 = dynamic_cast<const shape_kind<54>*>(p27))
return p54->m_member7 + 54 ;
else
if (const shape_kind<55>* p55 = dynamic_cast<const shape_kind<55>*>(p27))
return p55->m_member7 + 55 ;
else
return p27->m_member7 + 27 ;
else
return p13->m_member7 + 13 ;
else
return p6->m_member7 + 6 ;
else
if (const shape_kind< 7>* p7 = dynamic_cast<const shape_kind< 7>*>(p3))
if (const shape_kind<14>* p14 = dynamic_cast<const shape_kind<14>*>(p7))
if (const shape_kind<28>* p28 = dynamic_cast<const shape_kind<28>*>(p14))
if (const shape_kind<56>* p56 = dynamic_cast<const shape_kind<56>*>(p28))
return p56->m_member7 + 56 ;
else
if (const shape_kind<57>* p57 = dynamic_cast<const shape_kind<57>*>(p28))
return p57->m_member7 + 57 ;
else
return p28->m_member7 + 28 ;
else
if (const shape_kind<29>* p29 = dynamic_cast<const shape_kind<29>*>(p14))
if (const shape_kind<58>* p58 = dynamic_cast<const shape_kind<58>*>(p29))
return p58->m_member7 + 58 ;
else
if (const shape_kind<59>* p59 = dynamic_cast<const shape_kind<59>*>(p29))
return p59->m_member7 + 59 ;
else
return p29->m_member7 + 29 ;
else
return p14->m_member7 + 14 ;
else
if (const shape_kind<15>* p15 = dynamic_cast<const shape_kind<15>*>(p7))
if (const shape_kind<30>* p30 = dynamic_cast<const shape_kind<30>*>(p15))
if (const shape_kind<60>* p60 = dynamic_cast<const shape_kind<60>*>(p30))
return p60->m_member7 + 60 ;
else
if (const shape_kind<61>* p61 = dynamic_cast<const shape_kind<61>*>(p30))
return p61->m_member7 + 61 ;
else
return p30->m_member7 + 30 ;
else
if (const shape_kind<31>* p31 = dynamic_cast<const shape_kind<31>*>(p15))
if (const shape_kind<62>* p62 = dynamic_cast<const shape_kind<62>*>(p31))
return p62->m_member7 + 62 ;
else
if (const shape_kind<63>* p63 = dynamic_cast<const shape_kind<63>*>(p31))
return p63->m_member7 + 63 ;
else
return p31->m_member7 + 31 ;
else
return p15->m_member7 + 15 ;
else
return p7->m_member7 + 7 ;
else
return p3->m_member7 + 3 ;
else
return p1->m_member7 + 1 ;
else
return p0->m_member7 + 0 ;
}
return invalid;
}
XTL_TIMED_FUNC_END
<API key>
size_t do_visit(const Shape& s, size_t)
{
struct Visitor : ShapeVisitor
{
#define FOR_EACH_MAX NUMBER_OF_DERIVED-1
#define FOR_EACH_N(N) virtual void visit(const shape_kind<N>& s) { result = s.m_member7 + N; }
#include "loop_over_numbers.hpp"
#undef FOR_EACH_N
#undef FOR_EACH_MAX
size_t result;
};
Visitor v;
v.result = invalid;
s.accept(v);
return v.result;
}
XTL_TIMED_FUNC_END
Shape* make_shape(size_t i)
{
switch (i % NUMBER_OF_DERIVED)
{
#define FOR_EACH_MAX NUMBER_OF_DERIVED-1
#define FOR_EACH_N(N) case N: return new shape_kind<N>;
#include "loop_over_numbers.hpp"
#undef FOR_EACH_N
#undef FOR_EACH_MAX
}
return 0;
}
#include "testvismat.hpp" // Utilities for timing tests
int main()
{
verdict pp = test_repetitive();
verdict ps = test_sequential();
verdict pr = test_randomized();
std::cout << "OVERALL: "
<< "Repetitive: " << pp << "; "
<< "Sequential: " << ps << "; "
<< "Random: " << pr
<< std::endl;
}
|
"""
Control global computation context
"""
from collections import defaultdict
_globals = defaultdict(lambda: None)
_globals['callbacks'] = set()
class set_options(object):
def __init__(self, **kwargs):
self.old = _globals.copy()
_globals.update(kwargs)
def __enter__(self):
return
def __exit__(self, type, value, traceback):
_globals.clear()
_globals.update(self.old)
|
.tip {
opacity:0.9;
z-index:1000;
text-align:left;
border-radius:4px;
-moz-border-radius:4px;
-<API key>:4px;
padding:8px 8px;
color: black;
background-color:#E6E6E6;
border: 1px solid #B3B3B3;
box-shadow: 2px 2px 5px #888;
pointer-events:none;
}
|
#ifndef <API key>
#define <API key>
#include "base/containers/flat_set.h"
#include "base/time/time.h"
#include "build/build_config.h"
#include "build/chromeos_buildflags.h"
#include "components/viz/common/surfaces/frame_sink_id.h"
#include "ui/compositor/compositor_export.h"
namespace gfx {
class Size;
struct <API key>;
} // namespace gfx
namespace ui {
class Compositor;
// A compositor observer is notified when compositing completes.
class COMPOSITOR_EXPORT CompositorObserver {
public:
virtual ~CompositorObserver() = default;
// A commit proxies information from the main thread to the compositor
// thread. It typically happens when some state changes that will require a
// composite. In the multi-threaded case, many commits may happen between
// two successive composites. In the single-threaded, a single commit
// between two composites (just before the composite as part of the
// composite cycle). If the compositor is locked, it will not send this
// this signal.
virtual void <API key>(Compositor* compositor) {}
// Called when compositing started: it has taken all the layer changes into
// account and has issued the graphics commands.
virtual void <API key>(Compositor* compositor,
base::TimeTicks start_time) {}
// Called when compositing completes: the present to screen has completed.
virtual void OnCompositingEnded(Compositor* compositor) {}
// Called when a child of the compositor is resizing.
virtual void <API key>(Compositor* compositor) {}
// TODO(crbug.com/1052397): Revisit the macro expression once build flag switch
// of lacros-chrome is complete.
#if BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS_LACROS)
// Called when a swap with new size is completed.
virtual void <API key>(ui::Compositor* compositor,
const gfx::Size& size) {}
#endif // BUILDFLAG(IS_LINUX) || BUILDFLAG(IS_CHROMEOS)
// Called at the top of the compositor's destructor, to give observers a
// chance to remove themselves.
virtual void <API key>(Compositor* compositor) {}
// Called when the presentation feedback was received from the viz.
virtual void <API key>(
uint32_t frame_token,
const gfx::<API key>& feedback) {}
virtual void <API key>(Compositor* compositor) {}
virtual void <API key>(Compositor* compositor) {}
virtual void <API key>(
const base::flat_set<viz::FrameSinkId>& ids) {}
};
} // namespace ui
#endif // <API key>
|
package uatparse
import (
"encoding/hex"
"errors"
"fmt"
"io/ioutil"
"strconv"
"strings"
)
const (
<API key> = 576
UPLINK_BLOCK_BITS = (<API key> + 160)
<API key> = (<API key> / 8)
UPLINK_BLOCK_BYTES = (UPLINK_BLOCK_BITS / 8)
UPLINK_FRAME_BLOCKS = 6
<API key> = (UPLINK_FRAME_BLOCKS * <API key>)
UPLINK_FRAME_BITS = (UPLINK_FRAME_BLOCKS * UPLINK_BLOCK_BITS)
<API key> = (<API key> / 8)
UPLINK_FRAME_BYTES = (UPLINK_FRAME_BITS / 8)
// assume 6 byte frames: 2 header bytes, 4 byte payload
// (TIS-B heartbeat with one address, or empty FIS-B APDU)
<API key> = (424 / 6)
dlac_alpha = "\<API key>\x1A\t\x1E\n| !\"#$%&'()*+,-./0123456789:;<=>?"
)
type UATFrame struct {
Raw_data []byte
FISB_data []byte
FISB_month uint32
FISB_day uint32
FISB_hours uint32
FISB_minutes uint32
FISB_seconds uint32
FISB_length uint32
frame_length uint32
Frame_type uint32
Product_id uint32
// Text data, if applicable.
Text_data []string
// Flags.
a_f bool
g_f bool
p_f bool
s_f bool //TODO: Segmentation.
// For AIRMET/NOTAM.
//FIXME: Temporary.
Points []GeoPoint
ReportNumber uint16
ReportYear uint16
LocationIdentifier string
RecordFormat uint8
ReportStart string
ReportEnd string
}
type UATMsg struct {
// Metadata from demodulation.
RS_Err int
SignalStrength int
msg []byte
decoded bool
// Station location for uplink frames, aircraft position for downlink frames.
Lat float64
Lon float64
Frames []*UATFrame
}
func dlac_decode(data []byte, data_len uint32) string {
step := 0
tab := false
ret := ""
for i := uint32(0); i < data_len; i++ {
var ch uint32
switch step {
case 0:
ch = uint32(data[i+0]) >> 2
case 1:
ch = ((uint32(data[i-1]) & 0x03) << 4) | (uint32(data[i+0]) >> 4)
case 2:
ch = ((uint32(data[i-1]) & 0x0f) << 2) | (uint32(data[i+0]) >> 6)
i = i - 1
case 3:
ch = uint32(data[i+0]) & 0x3f
}
if tab {
for ch > 0 {
ret += " "
ch
}
tab = false
} else if ch == 28 { // tab
tab = true
} else {
ret += string(dlac_alpha[ch])
}
step = (step + 1) % 4
}
return ret
}
// Decodes the time format and aligns 'FISB_data' accordingly.
//TODO: Make a new "FISB Time" structure that also encodes the type of timestamp received.
//TODO: pass up error.
func (f *UATFrame) decodeTimeFormat() {
if len(f.Raw_data) < 3 {
return // Can't determine time format.
}
t_opt := ((uint32(f.Raw_data[1]) & 0x01) << 1) | (uint32(f.Raw_data[2]) >> 7)
var fisb_data []byte
switch t_opt {
case 0: // Hours, Minutes.
if f.frame_length < 4 {
return
}
f.FISB_hours = (uint32(f.Raw_data[2]) & 0x7c) >> 2
f.FISB_minutes = ((uint32(f.Raw_data[2]) & 0x03) << 4) | (uint32(f.Raw_data[3]) >> 4)
f.FISB_length = f.frame_length - 4
fisb_data = f.Raw_data[4:]
case 1: // Hours, Minutes, Seconds.
if f.frame_length < 5 {
return
}
f.FISB_hours = (uint32(f.Raw_data[2]) & 0x7c) >> 2
f.FISB_minutes = ((uint32(f.Raw_data[2]) & 0x03) << 4) | (uint32(f.Raw_data[3]) >> 4)
f.FISB_seconds = ((uint32(f.Raw_data[3]) & 0x0f) << 2) | (uint32(f.Raw_data[4]) >> 6)
f.FISB_length = f.frame_length - 5
fisb_data = f.Raw_data[5:]
case 2: // Month, Day, Hours, Minutes.
if f.frame_length < 5 {
return
}
f.FISB_month = (uint32(f.Raw_data[2]) & 0x78) >> 3
f.FISB_day = ((uint32(f.Raw_data[2]) & 0x07) << 2) | (uint32(f.Raw_data[3]) >> 6)
f.FISB_hours = (uint32(f.Raw_data[3]) & 0x3e) >> 1
f.FISB_minutes = ((uint32(f.Raw_data[3]) & 0x01) << 5) | (uint32(f.Raw_data[4]) >> 3)
f.FISB_length = f.frame_length - 5
fisb_data = f.Raw_data[5:]
case 3: // Month, Day, Hours, Minutes, Seconds.
if f.frame_length < 6 {
return
}
f.FISB_month = (uint32(f.Raw_data[2]) & 0x78) >> 3
f.FISB_day = ((uint32(f.Raw_data[2]) & 0x07) << 2) | (uint32(f.Raw_data[3]) >> 6)
f.FISB_hours = (uint32(f.Raw_data[3]) & 0x3e) >> 1
f.FISB_minutes = ((uint32(f.Raw_data[3]) & 0x01) << 5) | (uint32(f.Raw_data[4]) >> 3)
f.FISB_seconds = ((uint32(f.Raw_data[4]) & 0x03) << 3) | (uint32(f.Raw_data[5]) >> 5)
f.FISB_length = f.frame_length - 6
fisb_data = f.Raw_data[6:]
default:
return // Should never reach this.
}
f.FISB_data = fisb_data
if (uint16(f.Raw_data[1]) & 0x02) != 0 {
f.s_f = true // Default false.
}
}
// Format newlines.
func formatDLACData(p string) []string {
ret := make([]string, 0)
for {
pos := strings.Index(p, "\x1E")
if pos == -1 {
pos = strings.Index(p, "\x03")
if pos == -1 {
ret = append(ret, p)
break
}
}
ret = append(ret, p[:pos])
p = p[pos+1:]
}
return ret
}
// Whole frame contents is DLAC encoded text.
func (f *UATFrame) decodeTextFrame() {
if len(f.FISB_data) < int(f.FISB_length) {
return
}
p := dlac_decode(f.FISB_data, f.FISB_length)
f.Text_data = formatDLACData(p)
}
// Gets month, day, hours, minutes.
// Formats into a string.
func airmetParseDate(b []byte, date_time_format uint8) string {
switch date_time_format {
case 0: // No date/time used.
return ""
case 1: // Month, Day, Hours, Minutes.
month := uint8(b[0])
day := uint8(b[1])
hours := uint8(b[2])
minutes := uint8(b[3])
return fmt.Sprintf("%02d-%02d %02d:%02d", month, day, hours, minutes)
case 2: // Day, Hours, Minutes.
day := uint8(b[0])
hours := uint8(b[1])
minutes := uint8(b[2])
return fmt.Sprintf("%02d %02d:%02d", day, hours, minutes)
case 3: // Hours, Minutes.
hours := uint8(b[0])
minutes := uint8(b[1])
return fmt.Sprintf("%02d:%02d", hours, minutes)
}
return ""
}
func airmetLatLng(lat_raw, lng_raw int32, alt bool) (float64, float64) {
fct := float64(0.000687)
if alt {
fct = float64(0.001373)
}
lat := fct * float64(lat_raw)
lng := fct * float64(lng_raw)
if lat > 90.0 {
lat = lat - 180.0
}
if lng > 180.0 {
lng = lng - 360.0
}
return lat, lng
}
//TODO: Ignoring flags (segmentation, etc.)
// <API key>.pdf
// Decode product IDs 8-13.
func (f *UATFrame) decodeAirmet() {
// APDU header: 48 bits (3-3) - assume no segmentation.
record_format := (uint8(f.FISB_data[0]) & 0xF0) >> 4
f.RecordFormat = record_format
fmt.Fprintf(ioutil.Discard, "record_format=%d\n", record_format)
product_version := (uint8(f.FISB_data[0]) & 0x0F)
fmt.Fprintf(ioutil.Discard, "product_version=%d\n", product_version)
record_count := (uint8(f.FISB_data[1]) & 0xF0) >> 4
fmt.Fprintf(ioutil.Discard, "record_count=%d\n", record_count)
location_identifier := dlac_decode(f.FISB_data[2:], 3)
fmt.Fprintf(ioutil.Discard, "%s\n", hex.Dump(f.FISB_data))
f.LocationIdentifier = location_identifier
fmt.Fprintf(ioutil.Discard, "location_identifier=%s\n", location_identifier)
record_reference := (uint8(f.FISB_data[5])) //FIXME: Special values. 0x00 means "use location_identifier". 0xFF means "use different reference". (4-3).
fmt.Fprintf(ioutil.Discard, "record_reference=%d\n", record_reference)
// Not sure when this is even used.
// rwy_designator := (record_reference & FC) >> 4
// <API key> := record_reference & 0x03 // 0 = NA, 1 = R, 2 = L, 3 = C (Figure 4-2).
//FIXME: Assume one record.
if record_count != 1 {
fmt.Fprintf(ioutil.Discard, "record_count=%d, != 1\n", record_count)
return
}
/*
0 - No data
1 - Unformatted ASCII Text
2 - Unformatted DLAC Text
3 - Unformatted DLAC Text w/ dictionary
4 - Formatted Text using ASN.1/PER
5-7 - Future Use
8 - Graphical Overlay
9-15 - Future Use
*/
switch record_format {
case 2:
record_length := (uint16(f.FISB_data[6]) << 8) | uint16(f.FISB_data[7])
if len(f.FISB_data)-int(record_length) < 6 {
fmt.Fprintf(ioutil.Discard, "FISB record not long enough: record_length=%d, len(f.FISB_data)=%d\n", record_length, len(f.FISB_data))
return
}
fmt.Fprintf(ioutil.Discard, "record_length=%d\n", record_length)
// Report identifier = report number + report year.
report_number := (uint16(f.FISB_data[8]) << 6) | ((uint16(f.FISB_data[9]) & 0xFC) >> 2)
f.ReportNumber = report_number
fmt.Fprintf(ioutil.Discard, "report_number=%d\n", report_number)
report_year := ((uint16(f.FISB_data[9]) & 0x03) << 5) | ((uint16(f.FISB_data[10]) & 0xF8) >> 3)
f.ReportYear = report_year
fmt.Fprintf(ioutil.Discard, "report_year=%d\n", report_year)
report_status := (uint8(f.FISB_data[10]) & 0x04) >> 2 //TODO: 0 = cancelled, 1 = active.
fmt.Fprintf(ioutil.Discard, "report_status=%d\n", report_status)
fmt.Fprintf(ioutil.Discard, "record_length=%d,len=%d\n", record_length, len(f.FISB_data))
text_data_len := record_length - 5
text_data := dlac_decode(f.FISB_data[11:], uint32(text_data_len))
fmt.Fprintf(ioutil.Discard, "text_data=%s\n", text_data)
f.Text_data = formatDLACData(text_data)
case 8:
// (6-1). (6.22 - Graphical Overlay Record Format).
record_data := f.FISB_data[6:] // Start after the record header.
record_length := (uint16(record_data[0]) << 2) | ((uint16(record_data[1]) & 0xC0) >> 6)
fmt.Fprintf(ioutil.Discard, "record_length=%d\n", record_length)
// Report identifier = report number + report year.
report_number := ((uint16(record_data[1]) & 0x3F) << 8) | uint16(record_data[2])
f.ReportNumber = report_number
fmt.Fprintf(ioutil.Discard, "report_number=%d\n", report_number)
report_year := (uint16(record_data[3]) & 0xFE) >> 1
f.ReportYear = report_year
fmt.Fprintf(ioutil.Discard, "report_year=%d\n", report_year)
<API key> := ((uint8(record_data[4]) & 0x1E) >> 1) + 1 // Document instructs to add 1.
fmt.Fprintf(ioutil.Discard, "<API key>=%d\n", <API key>)
object_label_flag := uint8(record_data[4] & 0x01)
fmt.Fprintf(ioutil.Discard, "object_label_flag=%d\n", object_label_flag)
if object_label_flag == 0 { // Numeric index.
object_label := (uint8(record_data[5]) << 8) | uint8(record_data[6])
record_data = record_data[7:]
fmt.Fprintf(ioutil.Discard, "object_label=%d\n", object_label)
} else {
object_label := dlac_decode(record_data[5:], 9)
record_data = record_data[14:]
fmt.Fprintf(ioutil.Discard, "object_label=%s\n", object_label)
}
element_flag := (uint8(record_data[0]) & 0x80) >> 7
fmt.Fprintf(ioutil.Discard, "element_flag=%d\n", element_flag)
qualifier_flag := (uint8(record_data[0]) & 0x40) >> 6
fmt.Fprintf(ioutil.Discard, "qualifier_flag=%d\n", qualifier_flag)
param_flag := (uint8(record_data[0]) & 0x20) >> 5
fmt.Fprintf(ioutil.Discard, "param_flag=%d\n", param_flag)
object_element := uint8(record_data[0]) & 0x1F
fmt.Fprintf(ioutil.Discard, "object_element=%d\n", object_element)
object_type := (uint8(record_data[1]) & 0xF0) >> 4
fmt.Fprintf(ioutil.Discard, "object_type=%d\n", object_type)
object_status := uint8(record_data[1]) & 0x0F
fmt.Fprintf(ioutil.Discard, "object_status=%d\n", object_status)
//FIXME
if qualifier_flag == 0 { //TODO: Check.
record_data = record_data[2:]
} else {
object_qualifier := (uint32(record_data[2]) << 16) | (uint32(record_data[3]) << 8) | uint32(record_data[4])
fmt.Fprintf(ioutil.Discard, "object_qualifier=%d\n", object_qualifier)
fmt.Fprintf(ioutil.Discard, "%02x%02x%02x\n", record_data[2], record_data[3], record_data[4])
record_data = record_data[5:]
}
//FIXME
//if param_flag == 0 { //TODO: Check.
// record_data = record_data[2:]
//} else {
// //TODO.
// // record_data = record_data[4:]
<API key> := (uint8(record_data[0]) & 0xC0) >> 6
fmt.Fprintf(ioutil.Discard, "<API key>=%d\n", <API key>)
date_time_format := (uint8(record_data[0]) & 0x30) >> 4
fmt.Fprintf(ioutil.Discard, "date_time_format=%d\n", date_time_format)
<API key> := uint8(record_data[0]) & 0x0F
fmt.Fprintf(ioutil.Discard, "<API key>=%d\n", <API key>)
overlay_operator := (uint8(record_data[1]) & 0xC0) >> 6
fmt.Fprintf(ioutil.Discard, "overlay_operator=%d\n", overlay_operator)
<API key> := (uint8(record_data[1]) & 0x3F) + 1 // Document instructs to add 1. (6.20).
fmt.Fprintf(ioutil.Discard, "<API key>=%d\n", <API key>)
// Parse all of the dates.
switch <API key> {
case 0: // No times given. UFN.
record_data = record_data[2:]
case 1: // Start time only. WEF.
f.ReportStart = airmetParseDate(record_data[2:], date_time_format)
record_data = record_data[6:]
case 2: // End time only. TIL.
f.ReportEnd = airmetParseDate(record_data[2:], date_time_format)
record_data = record_data[6:]
case 3: // Both start and end times. WEF.
f.ReportStart = airmetParseDate(record_data[2:], date_time_format)
f.ReportEnd = airmetParseDate(record_data[6:], date_time_format)
record_data = record_data[10:]
}
// Now we have the vertices.
switch <API key> {
case 3: // Extended Range 3D Polygon (MSL).
points := make([]GeoPoint, 0) // Slice containing all of the points.
fmt.Fprintf(ioutil.Discard, "%d\n", len(record_data))
for i := 0; i < int(<API key>); i++ {
lng_raw := (int32(record_data[6*i]) << 11) | (int32(record_data[6*i+1]) << 3) | (int32(record_data[6*i+2]) & 0xE0 >> 5)
lat_raw := ((int32(record_data[6*i+2]) & 0x1F) << 14) | (int32(record_data[6*i+3]) << 6) | ((int32(record_data[6*i+4]) & 0xFC) >> 2)
alt_raw := ((int32(record_data[6*i+4]) & 0x03) << 8) | int32(record_data[6*i+5])
fmt.Fprintf(ioutil.Discard, "lat_raw=%d, lng_raw=%d, alt_raw=%d\n", lat_raw, lng_raw, alt_raw)
lat, lng := airmetLatLng(lat_raw, lng_raw, false)
alt := alt_raw * 100
fmt.Fprintf(ioutil.Discard, "lat=%f,lng=%f,alt=%d\n", lat, lng, alt)
fmt.Fprintf(ioutil.Discard, "coord:%f,%f\n", lat, lng)
var point GeoPoint
point.Lat = lat
point.Lon = lng
point.Alt = alt
points = append(points, point)
f.Points = points
}
case 9: // Extended Range 3D Point (AGL). p.47.
if len(record_data) < 6 {
fmt.Fprintf(ioutil.Discard, "invalid data: Extended Range 3D Point. Should be 6 bytes; % seen.\n", len(record_data))
} else {
lng_raw := (int32(record_data[0]) << 11) | (int32(record_data[1]) << 3) | (int32(record_data[2]) & 0xE0 >> 5)
lat_raw := ((int32(record_data[2]) & 0x1F) << 14) | (int32(record_data[3]) << 6) | ((int32(record_data[4]) & 0xFC) >> 2)
alt_raw := ((int32(record_data[4]) & 0x03) << 8) | int32(record_data[5])
fmt.Fprintf(ioutil.Discard, "lat_raw=%d, lng_raw=%d, alt_raw=%d\n", lat_raw, lng_raw, alt_raw)
lat, lng := airmetLatLng(lat_raw, lng_raw, false)
alt := alt_raw * 100
fmt.Fprintf(ioutil.Discard, "lat=%f,lng=%f,alt=%d\n", lat, lng, alt)
fmt.Fprintf(ioutil.Discard, "coord:%f,%f\n", lat, lng)
var point GeoPoint
point.Lat = lat
point.Lon = lng
point.Alt = alt
f.Points = []GeoPoint{point}
}
case 7, 8: // Extended Range Circular Prism (7 = MSL, 8 = AGL)
if len(record_data) < 14 {
fmt.Fprintf(ioutil.Discard, "invalid data: Extended Range Circular Prism. Should be 14 bytes; % seen.\n", len(record_data))
} else {
lng_bot_raw := (int32(record_data[0]) << 10) | (int32(record_data[1]) << 2) | (int32(record_data[2]) & 0xC0 >> 6)
lat_bot_raw := ((int32(record_data[2]) & 0x3F) << 12) | (int32(record_data[3]) << 4) | ((int32(record_data[4]) & 0xF0) >> 4)
lng_top_raw := ((int32(record_data[4]) & 0x0F) << 14) | (int32(record_data[5]) << 6) | ((int32(record_data[6]) & 0xFC) >> 2)
lat_top_raw := ((int32(record_data[6]) & 0x03) << 16) | (int32(record_data[7]) << 8) | int32(record_data[8])
alt_bot_raw := (int32(record_data[9]) & 0xFE) >> 1
alt_top_raw := ((int32(record_data[9]) & 0x01) << 6) | ((int32(record_data[10]) & 0xFC) >> 2)
r_lng_raw := ((int32(record_data[10]) & 0x03) << 7) | ((int32(record_data[11]) & 0xFE) >> 1)
r_lat_raw := ((int32(record_data[11]) & 0x01) << 8) | int32(record_data[12])
alpha := int32(record_data[13])
lat_bot, lng_bot := airmetLatLng(lat_bot_raw, lng_bot_raw, true)
lat_top, lng_top := airmetLatLng(lat_top_raw, lng_top_raw, true)
alt_bot := alt_bot_raw * 5
alt_top := alt_top_raw * 500
r_lng := float64(r_lng_raw) * float64(0.2)
r_lat := float64(r_lat_raw) * float64(0.2)
fmt.Fprintf(ioutil.Discard, "lat_bot, lng_bot = %f, %f\n", lat_bot, lng_bot)
fmt.Fprintf(ioutil.Discard, "lat_top, lng_top = %f, %f\n", lat_top, lng_top)
if <API key> == 8 {
fmt.Fprintf(ioutil.Discard, "alt_bot, alt_top = %d AGL, %d AGL\n", alt_bot, alt_top)
} else {
fmt.Fprintf(ioutil.Discard, "alt_bot, alt_top = %d MSL, %d MSL\n", alt_bot, alt_top)
}
fmt.Fprintf(ioutil.Discard, "r_lng, r_lat = %f, %f\n", r_lng, r_lat)
fmt.Fprintf(ioutil.Discard, "alpha=%d\n", alpha)
}
default:
fmt.Fprintf(ioutil.Discard, "unknown geometry: %d\n", <API key>)
}
//case 1: // Unformatted ASCII Text.
default:
fmt.Fprintf(ioutil.Discard, "unknown record format: %d\n", record_format)
}
fmt.Fprintf(ioutil.Discard, "\n\n\n")
}
func (f *UATFrame) decodeInfoFrame() {
if len(f.Raw_data) < 2 {
return // Can't determine Product_id.
}
f.Product_id = ((uint32(f.Raw_data[0]) & 0x1f) << 6) | (uint32(f.Raw_data[1]) >> 2)
if f.Frame_type != 0 {
return // Not FIS-B.
}
f.decodeTimeFormat()
switch f.Product_id {
case 413:
f.decodeTextFrame()
/*
case 8, 11, 13:
f.decodeAirmet()
*/
default:
fmt.Fprintf(ioutil.Discard, "don't know what to do with product id: %d\n", f.Product_id)
}
// logger.Printf("pos=%d,len=%d,t_opt=%d,product_id=%d, time=%d:%d\n", frame_start, frame_len, t_opt, product_id, fisb_hours, fisb_minutes)
}
func (u *UATMsg) DecodeUplink() error {
// position_valid := (uint32(frame[5]) & 0x01) != 0
frame := u.msg
if len(frame) < <API key> {
return errors.New(fmt.Sprintf("DecodeUplink: short read (%d).", len(frame)))
}
raw_lat := (uint32(frame[0]) << 15) | (uint32(frame[1]) << 7) | (uint32(frame[2]) >> 1)
raw_lon := ((uint32(frame[2]) & 0x01) << 23) | (uint32(frame[3]) << 15) | (uint32(frame[4]) << 7) | (uint32(frame[5]) >> 1)
lat := float64(raw_lat) * 360.0 / 16777216.0
lon := float64(raw_lon) * 360.0 / 16777216.0
if lat > 90 {
lat = lat - 180
}
if lon > 180 {
lon = lon - 360
}
u.Lat = lat
u.Lon = lon
// utc_coupled := (uint32(frame[6]) & 0x80) != 0
app_data_valid := (uint32(frame[6]) & 0x20) != 0
// slot_id := uint32(frame[6]) & 0x1f
// tisb_site_id := uint32(frame[7]) >> 4
// logger.Printf("position_valid=%t, %.04f, %.04f, %t, %t, %d, %d\n", position_valid, lat, lon, utc_coupled, app_data_valid, slot_id, tisb_site_id)
if !app_data_valid {
return nil // Not sure when this even happens?
}
app_data := frame[8:432]
num_info_frames := 0
pos := 0
total_len := len(app_data)
for (num_info_frames < <API key>) && (pos+2 <= total_len) {
data := app_data[pos:]
frame_length := (uint32(data[0]) << 1) | (uint32(data[1]) >> 7)
frame_type := uint32(data[1]) & 0x0f
if pos+int(frame_length) > total_len {
break // Overrun?
}
if frame_length == 0 { // Empty frame. Quit here.
break
}
pos = pos + 2
data = data[2 : frame_length+2]
thisFrame := new(UATFrame)
thisFrame.Raw_data = data
thisFrame.frame_length = frame_length
thisFrame.Frame_type = frame_type
thisFrame.decodeInfoFrame()
// Save the decoded frame.
u.Frames = append(u.Frames, thisFrame)
pos = pos + int(frame_length)
}
u.decoded = true
return nil
}
/*
Aggregate all of the text rates across the frames in the message and return as an array.
*/
func (u *UATMsg) GetTextReports() ([]string, error) {
ret := make([]string, 0)
if !u.decoded {
err := u.DecodeUplink()
if err != nil {
return ret, err
}
}
for _, f := range u.Frames {
for _, m := range f.Text_data {
if len(m) > 0 {
ret = append(ret, m)
}
}
}
return ret, nil
}
/*
Parse out the message from the "dump978" output format.
*/
func New(buf string) (*UATMsg, error) {
ret := new(UATMsg)
buf = strings.Trim(buf, "\r\n") // Remove newlines.
x := strings.Split(buf, ";") // We want to discard everything before the first ';'.
if len(x) < 2 {
return ret, errors.New(fmt.Sprintf("New UATMsg: Invalid format (%s).", buf))
}
/*
Parse _;rs=?;ss=? - if available.
RS_Err int
SignalStrength int
*/
ret.SignalStrength = -1
ret.RS_Err = -1
for _, f := range x[1:] {
x2 := strings.Split(f, "=")
if len(x2) != 2 {
continue
}
i, err := strconv.Atoi(x2[1])
if err != nil {
continue
}
if x2[0] == "ss" {
ret.SignalStrength = i
} else if x2[0] == "rs" {
ret.RS_Err = i
}
}
s := x[0]
// Only want "long" uplink messages.
if (len(s)-1)%2 != 0 || (len(s)-1)/2 != <API key> {
return ret, errors.New(fmt.Sprintf("New UATMsg: short read (%d).", len(s)))
}
if s[0] != '+' { // Only want + ("Uplink") messages currently. - (Downlink) or messages that start with other are discarded.
return ret, errors.New("New UATMsg: expecting uplink frame.")
}
s = s[1:] // Remove the preceding '+' or '-' character.
// Convert the hex string into a byte array.
frame := make([]byte, <API key>)
hex.Decode(frame, []byte(s))
ret.msg = frame
return ret, nil
}
|
<?php
use yii\helpers\Url;
class <API key> extends UrlCreationTest
{
protected $showScriptName = true;
}
|
#include "gm.h"
#include "SkGradientShader.h"
namespace skiagm {
class FillTypePerspGM : public GM {
SkPath fPath;
public:
FillTypePerspGM() {}
void makePath() {
if (fPath.isEmpty()) {
const SkScalar radius = SkIntToScalar(45);
fPath.addCircle(SkIntToScalar(50), SkIntToScalar(50), radius);
fPath.addCircle(SkIntToScalar(100), SkIntToScalar(100), radius);
}
}
protected:
SkString onShortName() SK_OVERRIDE {
return SkString("filltypespersp");
}
SkISize onISize() SK_OVERRIDE {
return SkISize::Make(835, 840);
}
void showPath(SkCanvas* canvas, int x, int y, SkPath::FillType ft,
SkScalar scale, const SkPaint& paint) {
const SkRect r = { 0, 0, SkIntToScalar(150), SkIntToScalar(150) };
canvas->save();
canvas->translate(SkIntToScalar(x), SkIntToScalar(y));
canvas->clipRect(r);
canvas->drawColor(SK_ColorWHITE);
fPath.setFillType(ft);
canvas->translate(r.centerX(), r.centerY());
canvas->scale(scale, scale);
canvas->translate(-r.centerX(), -r.centerY());
canvas->drawPath(fPath, paint);
canvas->restore();
}
void showFour(SkCanvas* canvas, SkScalar scale, bool aa) {
SkPaint paint;
SkPoint center = SkPoint::Make(SkIntToScalar(100), SkIntToScalar(100));
SkColor colors[] = {SK_ColorBLUE, SK_ColorRED, SK_ColorGREEN};
SkScalar pos[] = {0, SK_ScalarHalf, SK_Scalar1};
SkShader* s = SkGradientShader::CreateRadial(center,
SkIntToScalar(100),
colors,
pos,
SK_ARRAY_COUNT(colors),
SkShader::kClamp_TileMode);
paint.setShader(s)->unref();
paint.setAntiAlias(aa);
showPath(canvas, 0, 0, SkPath::kWinding_FillType,
scale, paint);
showPath(canvas, 200, 0, SkPath::kEvenOdd_FillType,
scale, paint);
showPath(canvas, 00, 200, SkPath::<API key>,
scale, paint);
showPath(canvas, 200, 200, SkPath::<API key>,
scale, paint);
}
void onDraw(SkCanvas* canvas) SK_OVERRIDE {
this->makePath();
// do perspective drawPaint as the background;
SkPaint bkgnrd;
SkPoint center = SkPoint::Make(SkIntToScalar(100),
SkIntToScalar(100));
SkColor colors[] = {SK_ColorBLACK, SK_ColorCYAN,
SK_ColorYELLOW, SK_ColorWHITE};
SkScalar pos[] = {0, SK_ScalarHalf / 2,
3 * SK_ScalarHalf / 2, SK_Scalar1};
SkShader* s = SkGradientShader::CreateRadial(center,
SkIntToScalar(1000),
colors,
pos,
SK_ARRAY_COUNT(colors),
SkShader::kClamp_TileMode);
bkgnrd.setShader(s)->unref();
canvas->save();
canvas->translate(SkIntToScalar(100), SkIntToScalar(100));
SkMatrix mat;
mat.reset();
mat.setPerspY(SK_Scalar1 / 1000);
canvas->concat(mat);
canvas->drawPaint(bkgnrd);
canvas->restore();
// draw the paths in perspective
SkMatrix persp;
persp.reset();
persp.setPerspX(-SK_Scalar1 / 1800);
persp.setPerspY(SK_Scalar1 / 500);
canvas->concat(persp);
canvas->translate(SkIntToScalar(20), SkIntToScalar(20));
const SkScalar scale = SkIntToScalar(5)/4;
showFour(canvas, SK_Scalar1, false);
canvas->translate(SkIntToScalar(450), 0);
showFour(canvas, scale, false);
canvas->translate(SkIntToScalar(-450), SkIntToScalar(450));
showFour(canvas, SK_Scalar1, true);
canvas->translate(SkIntToScalar(450), 0);
showFour(canvas, scale, true);
}
private:
typedef GM INHERITED;
};
static GM* MyFactory(void*) { return new FillTypePerspGM; }
static GMRegistry reg(MyFactory);
}
|
#include "config.h"
#include "platform/graphics/filters/FEDiffuseLighting.h"
#include "platform/graphics/filters/LightSource.h"
#include "platform/text/TextStream.h"
namespace blink {
FEDiffuseLighting::FEDiffuseLighting(Filter* filter, const Color& lightingColor, float surfaceScale,
float diffuseConstant, PassRefPtr<LightSource> lightSource)
: FELighting(filter, DiffuseLighting, lightingColor, surfaceScale, diffuseConstant, 0, 0, lightSource)
{
}
<API key><FEDiffuseLighting> FEDiffuseLighting::create(Filter* filter, const Color& lightingColor,
float surfaceScale, float diffuseConstant, PassRefPtr<LightSource> lightSource)
{
return adoptRefWillBeNoop(new FEDiffuseLighting(filter, lightingColor, surfaceScale, diffuseConstant, lightSource));
}
FEDiffuseLighting::~FEDiffuseLighting()
{
}
Color FEDiffuseLighting::lightingColor() const
{
return m_lightingColor;
}
bool FEDiffuseLighting::setLightingColor(const Color& lightingColor)
{
if (m_lightingColor == lightingColor)
return false;
m_lightingColor = lightingColor;
return true;
}
float FEDiffuseLighting::surfaceScale() const
{
return m_surfaceScale;
}
bool FEDiffuseLighting::setSurfaceScale(float surfaceScale)
{
if (m_surfaceScale == surfaceScale)
return false;
m_surfaceScale = surfaceScale;
return true;
}
float FEDiffuseLighting::diffuseConstant() const
{
return m_diffuseConstant;
}
bool FEDiffuseLighting::setDiffuseConstant(float diffuseConstant)
{
diffuseConstant = std::max(diffuseConstant, 0.0f);
if (m_diffuseConstant == diffuseConstant)
return false;
m_diffuseConstant = diffuseConstant;
return true;
}
const LightSource* FEDiffuseLighting::lightSource() const
{
return m_lightSource.get();
}
void FEDiffuseLighting::setLightSource(PassRefPtr<LightSource> lightSource)
{
m_lightSource = lightSource;
}
TextStream& FEDiffuseLighting::<API key>(TextStream& ts, int indent) const
{
writeIndent(ts, indent);
ts << "[feDiffuseLighting";
FilterEffect::<API key>(ts);
ts << " surfaceScale=\"" << m_surfaceScale << "\" " << "diffuseConstant=\"" << m_diffuseConstant << "\"]\n";
inputEffect(0)-><API key>(ts, indent + 1);
return ts;
}
} // namespace blink
|
import {<API key>} from 'chrome://resources/js/util.m.js';
/**
* Create by |LineChart.LineChart|.
* Create a dummy scrollbar to show the position of the line chart and to scroll
* the line chart, so we can draw the visible part of the line chart only
* instead of drawing the whole chart.
* @const
*/
export class Scrollbar {
constructor(/** function(): undefined */ callback) {
/** @const {function(): undefined} - Handle the scrolling event. */
this.callback_ = callback;
/** @type {number} - The range the scrollbar can scroll. */
this.range_ = 0;
/** @type {number} - The current position of the scrollbar. */
this.position_ = 0;
/** @type {number} - The real width of this scrollbar, in pixels. */
this.width_ = 0;
/** @type {Element} - The outer div to show the scrollbar. */
this.outerDiv_ =
<API key>('div', '<API key>');
this.outerDiv_.addEventListener('scroll', this.onScroll_.bind(this));
/** @type {Element} - The inner div to make outer div scrollable. */
this.innerDiv_ =
<API key>('div', '<API key>');
this.outerDiv_.appendChild(this.innerDiv_);
}
/**
* Scrolling event handler.
*/
onScroll_() {
const /** number */ newPosition = this.outerDiv_.scrollLeft;
if (newPosition == this.position_)
return;
this.position_ = newPosition;
this.callback_();
}
/** @return {Element} */
getRootDiv() {
return this.outerDiv_;
}
/**
* Return the height of scrollbar element.
* @return {number}
*/
getHeight() {
return this.outerDiv_.offsetHeight;
}
/** @return {number} */
getRange() {
return this.range_;
}
/**
* Position may be float point number because |document.scrollLeft| may be
* float point number.
* @return {number}
*/
getPosition() {
return Math.round(this.position_);
}
/**
* Change the size of the outer div and update the scrollbar position.
* @param {number} width
*/
resize(width) {
if (this.width_ == width)
return;
this.width_ = width;
this.<API key>();
}
<API key>() {
this.constructor.setNodeWidth(this.outerDiv_, this.width_);
}
/**
* Set the scrollable range to |range|. Use the inner div's width to control
* the scrollable range. If position go out of range after range update, set
* it to the boundary value.
* @param {number} range
*/
setRange(range) {
this.range_ = range;
this.<API key>();
if (range < this.position_) {
this.position_ = range;
this.<API key>();
}
}
<API key>() {
const width = this.outerDiv_.clientWidth;
this.constructor.setNodeWidth(this.innerDiv_, width + this.range_);
}
/**
* @param {Element} node
* @param {number} width
*/
static setNodeWidth(node, width) {
node.style.width = width + 'px';
}
/**
* Set the scrollbar position to |position|. If the new position go out of
* range, set it to the boundary value.
* @param {number} position
*/
setPosition(position) {
const /** number */ newPosition =
Math.max(0, Math.min(position, this.range_));
this.position_ = newPosition;
this.<API key>();
}
/**
* Update the scrollbar position via Javascript scrollbar api. Position may
* not be the same value as what we assigned even if the value is in the
* range. See crbug.com/760425.
*/
<API key>() {
if (this.outerDiv_.scrollLeft == this.position_)
return;
this.outerDiv_.scrollLeft = this.position_;
}
/**
* Return true if scrollbar is at the right edge of the chart.
* @return {boolean}
*/
<API key>() {
/* |scrollLeft| may become a float point number even if we set it to some
* integer value. If the distance to the right edge less than 2 pixels, we
* consider that it is scrolled to the right edge.
*/
const <API key> = 2;
return this.position_ + <API key> > this.range_;
}
/**
* Scroll the scrollbar to the right edge.
*/
scrollToRightEdge() {
this.setPosition(this.range_);
}
}
|
package org.xtreemfs.test.osd.rwre;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.xtreemfs.osd.storage.HashStorageLayout;
import org.xtreemfs.osd.storage.MetadataCache;
import org.xtreemfs.test.SetupUtils;
import org.xtreemfs.test.TestHelper;
public class <API key> {
@Rule
public final TestRule testLog = TestHelper.testLog;
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
}
@Test
public void <API key>() throws IOException {
final String globalFileId = "<API key>:13193";
final String correctedFileId = globalFileId;
final String brokenFileId = "/" + correctedFileId;
final HashStorageLayout hsl = new HashStorageLayout(SetupUtils.createOSD1Config(), new MetadataCache());
// Cleanup previous runs.
hsl.deleteFile(brokenFileId, true);
hsl.deleteFile(correctedFileId, true);
final File brokenFileDir = new File(hsl.<API key>(brokenFileId));
final File correctedFileDir = new File(hsl.<API key>(correctedFileId));
// Set masterepoch using the wrong id.
assertFalse(brokenFileDir.isDirectory());
assertFalse(correctedFileDir.isDirectory());
hsl.setMasterEpoch(brokenFileId, 1);
assertTrue(brokenFileDir.isDirectory());
// Get the masterepoch with the correct id.
assertEquals(1, hsl.getMasterEpoch(correctedFileId));
assertFalse(brokenFileDir.isDirectory());
assertTrue(correctedFileDir.isDirectory());
// Get the masterepoch of a file which does not exist.
assertEquals(0, hsl.getMasterEpoch("fileIdDoesNotExist"));
}
}
|
import {
Context,
DocumentRegistry,
TextModelFactory
} from '@jupyterlab/docregistry';
import * as Mock from '@jupyterlab/testutils/lib/mock';
import { UUID } from '@lumino/coreutils';
import { CellRenderer, DataGrid, JSONModel } from '@lumino/datagrid';
import { CSVViewer, GridSearchService } from '../src';
function createContext(): Context<DocumentRegistry.IModel> {
const factory = new TextModelFactory();
const manager = new Mock.ServiceManagerMock();
const path = UUID.uuid4() + '.csv';
return new Context({ factory, manager, path });
}
describe('csvviewer/widget', () => {
const context = createContext();
describe('CSVViewer', () => {
describe('#constructor()', () => {
it('should instantiate a `CSVViewer`', () => {
const widget = new CSVViewer({ context });
expect(widget).toBeInstanceOf(CSVViewer);
widget.dispose();
});
});
describe('#context', () => {
it('should be the context for the file', () => {
const widget = new CSVViewer({ context });
expect(widget.context).toBe(context);
});
});
describe('#dispose()', () => {
it('should dispose of the resources held by the widget', () => {
const widget = new CSVViewer({ context });
expect(widget.isDisposed).toBe(false);
widget.dispose();
expect(widget.isDisposed).toBe(true);
});
it('should be safe to call multiple times', () => {
const widget = new CSVViewer({ context });
expect(widget.isDisposed).toBe(false);
widget.dispose();
widget.dispose();
expect(widget.isDisposed).toBe(true);
});
});
});
describe('GridSearchService', () => {
function createModel(): JSONModel {
return new JSONModel({
data: [
{ index: 0, a: 'other', b: 'match 1' },
{ index: 1, a: 'other', b: 'match 2' }
],
schema: {
primaryKey: ['index'],
fields: [
{
name: 'a'
},
{ name: 'b' }
]
}
});
}
function <API key>(model: JSONModel): GridSearchService {
const grid = new DataGrid();
grid.dataModel = model;
return new GridSearchService(grid);
}
it('searches incrementally and set background color', () => {
const model = createModel();
const searchService = <API key>(model);
const cellRenderer = searchService.<API key>({
<API key>: 'anotherMatch',
<API key>: 'currentMatch',
textColor: '',
horizontalAlignment: 'right'
});
/**
* fake rendering a cell and returns the background color for this coordinate.
*/
function fakeRenderCell(row: number, column: number) {
const cellConfig = {
value: model.data('body', row, column),
row,
column
} as CellRenderer.CellConfig;
return cellRenderer(cellConfig);
}
// searching for "match", cells at (0,1) and (1,1) should match.
// (0,1) is the current match
const query = /match/;
searchService.find(query);
expect(fakeRenderCell(0, 1)).toBe('currentMatch');
expect(fakeRenderCell(1, 1)).toBe('anotherMatch');
expect(fakeRenderCell(0, 0)).toBe('');
// search again, the current match "moves" to be (1,1)
searchService.find(query);
expect(fakeRenderCell(0, 1)).toBe('anotherMatch');
expect(fakeRenderCell(1, 1)).toBe('currentMatch');
});
});
});
|
object Virt extends Application {
class Foo {
trait Inner <: { val x : Int = 3 }
}
class Bar extends Foo {
trait Inner <: { val y : Int = x }
}
}
|
// <API key>: Apache-2.0 WITH LLVM-exception
#ifndef _WIN32
#include "lldb/Host/ProcessRunLock.h"
namespace lldb_private {
ProcessRunLock::ProcessRunLock() : m_running(false) {
int err = ::pthread_rwlock_init(&m_rwlock, nullptr);
(void)err;
}
ProcessRunLock::~ProcessRunLock() {
int err = ::<API key>(&m_rwlock);
(void)err;
}
bool ProcessRunLock::ReadTryLock() {
::<API key>(&m_rwlock);
if (!m_running) {
return true;
}
::<API key>(&m_rwlock);
return false;
}
bool ProcessRunLock::ReadUnlock() {
return ::<API key>(&m_rwlock) == 0;
}
bool ProcessRunLock::SetRunning() {
::<API key>(&m_rwlock);
m_running = true;
::<API key>(&m_rwlock);
return true;
}
bool ProcessRunLock::TrySetRunning() {
bool r;
if (::<API key>(&m_rwlock) == 0) {
r = !m_running;
m_running = true;
::<API key>(&m_rwlock);
return r;
}
return false;
}
bool ProcessRunLock::SetStopped() {
::<API key>(&m_rwlock);
m_running = false;
::<API key>(&m_rwlock);
return true;
}
}
#endif
|
function safeMatchMedia(query) {
var m = window.matchMedia(query);
return !!m && m.matches;
}
define('capabilities', [], function() {
var capabilities = {
'JSON': window.JSON && typeof JSON.parse == 'function',
'debug': (('' + document.location).indexOf('dbg') >= 0),
'debug_in_page': (('' + document.location).indexOf('dbginpage') >= 0),
'console': window.console && (typeof window.console.log == 'function'),
'replaceState': typeof history.replaceState === 'function',
'chromeless': window.locationbar && !window.locationbar.visible,
'localStorage': false,
'sessionStorage': false,
'webApps': !!(navigator.mozApps && navigator.mozApps.install),
'app_runtime': !!(
navigator.mozApps &&
typeof navigator.mozApps.html5Implementation === 'undefined'
),
'fileAPI': !!window.FileReader,
'userAgent': navigator.userAgent,
'desktop': false,
'tablet': false,
'mobile': safeMatchMedia('(max-width: 600px)'),
'firefoxAndroid': (navigator.userAgent.indexOf('Firefox') != -1 && navigator.userAgent.indexOf('Android') != -1),
'touch': ('ontouchstart' in window) || window.DocumentTouch && document instanceof DocumentTouch,
'nativeScroll': (function() {
return '<API key>' in document.createElement('div').style;
})(),
'performance': !!(window.performance || window.msPerformance || window.webkitPerformance || window.mozPerformance),
'navPay': !!navigator.mozPay,
'webactivities': !!(window.setMessageHandler || window.<API key>),
'firefoxOS': null // This is set below.
};
// We're probably tablet if we have touch and we're larger than mobile.
capabilities.tablet = capabilities.touch && safeMatchMedia('(min-width: 601px)');
// We're probably desktop if we don't have touch and we're larger than some arbitrary dimension.
capabilities.desktop = !capabilities.touch && safeMatchMedia('(min-width: 673px)');
// Packaged-app installation are supported only on Firefox OS, so this is how we sniff.
capabilities.gaia = !!(capabilities.mobile && navigator.mozApps && navigator.mozApps.installPackage);
capabilities.getDeviceType = function() {
return this.desktop ? 'desktop' : (this.tablet ? 'tablet' : 'mobile');
};
if (capabilities.tablet) {
// If we're on tablet, then we're not on desktop.
capabilities.desktop = false;
}
if (capabilities.mobile) {
// If we're on mobile, then we're not on desktop nor tablet.
capabilities.desktop = capabilities.tablet = false;
}
// Detect Firefox OS.
// This will be true if the request is from a Firefox OS phone *or*
// a desktop B2G build with the correct UA pref, such as this:
capabilities.firefoxOS = capabilities.gaia && !capabilities.firefoxAndroid;
try {
if ('localStorage' in window && window.localStorage !== null) {
capabilities.localStorage = true;
}
} catch (e) {
}
try {
if ('sessionStorage' in window && window.sessionStorage !== null) {
capabilities.sessionStorage = true;
}
} catch (e) {
}
return capabilities;
});
z.capabilities = require('capabilities');
|
#include "tools/gn/ninja_target_writer.h"
#include <sstream>
#include "base/files/file_util.h"
#include "base/strings/string_util.h"
#include "tools/gn/err.h"
#include "tools/gn/filesystem_utils.h"
#include "tools/gn/<API key>.h"
#include "tools/gn/<API key>.h"
#include "tools/gn/<API key>.h"
#include "tools/gn/<API key>.h"
#include "tools/gn/ninja_utils.h"
#include "tools/gn/output_file.h"
#include "tools/gn/scheduler.h"
#include "tools/gn/string_utils.h"
#include "tools/gn/substitution_writer.h"
#include "tools/gn/target.h"
#include "tools/gn/trace.h"
NinjaTargetWriter::NinjaTargetWriter(const Target* target,
std::ostream& out)
: settings_(target->settings()),
target_(target),
out_(out),
path_output_(settings_->build_settings()->build_dir(),
settings_->build_settings()->root_path_utf8(),
ESCAPE_NINJA) {
}
NinjaTargetWriter::~NinjaTargetWriter() {
}
// static
void NinjaTargetWriter::RunAndWriteFile(const Target* target) {
const Settings* settings = target->settings();
ScopedTrace trace(TraceItem::TRACE_FILE_WRITE,
target->label().GetUserVisibleName(false));
trace.SetToolchain(settings->toolchain_label());
base::FilePath ninja_file(settings->build_settings()->GetFullPath(
<API key>(target)));
if (g_scheduler->verbose_logging())
g_scheduler->Log("Writing", FilePathToUTF8(ninja_file));
base::CreateDirectory(ninja_file.DirName());
// It's rediculously faster to write to a string and then write that to
// disk in one operation than to use an fstream here.
std::stringstream file;
// Call out to the correct sub-type of writer.
if (target->output_type() == Target::COPY_FILES) {
<API key> writer(target, file);
writer.Run();
} else if (target->output_type() == Target::ACTION ||
target->output_type() == Target::ACTION_FOREACH) {
<API key> writer(target, file);
writer.Run();
} else if (target->output_type() == Target::GROUP) {
<API key> writer(target, file);
writer.Run();
} else if (target->output_type() == Target::EXECUTABLE ||
target->output_type() == Target::STATIC_LIBRARY ||
target->output_type() == Target::SHARED_LIBRARY ||
target->output_type() == Target::SOURCE_SET) {
<API key> writer(target, file);
writer.Run();
} else {
CHECK(0);
}
std::string contents = file.str();
base::WriteFile(ninja_file, contents.c_str(),
static_cast<int>(contents.size()));
}
void NinjaTargetWriter::WriteSharedVars(const SubstitutionBits& bits) {
bool written_anything = false;
// Target label.
if (bits.used[SUBSTITUTION_LABEL]) {
out_ << <API key>[SUBSTITUTION_LABEL] << " = "
<< SubstitutionWriter::<API key>(
target_, SUBSTITUTION_LABEL)
<< std::endl;
written_anything = true;
}
// Root gen dir.
if (bits.used[<API key>]) {
out_ << <API key>[<API key>] << " = "
<< SubstitutionWriter::<API key>(
target_, <API key>)
<< std::endl;
written_anything = true;
}
// Root out dir.
if (bits.used[<API key>]) {
out_ << <API key>[<API key>] << " = "
<< SubstitutionWriter::<API key>(
target_, <API key>)
<< std::endl;
written_anything = true;
}
// Target gen dir.
if (bits.used[<API key>]) {
out_ << <API key>[<API key>] << " = "
<< SubstitutionWriter::<API key>(
target_, <API key>)
<< std::endl;
written_anything = true;
}
// Target out dir.
if (bits.used[<API key>]) {
out_ << <API key>[<API key>] << " = "
<< SubstitutionWriter::<API key>(
target_, <API key>)
<< std::endl;
written_anything = true;
}
// Target output name.
if (bits.used[<API key>]) {
out_ << <API key>[<API key>] << " = "
<< SubstitutionWriter::<API key>(
target_, <API key>)
<< std::endl;
written_anything = true;
}
// If we wrote any vars, separate them from the rest of the file that follows
// with a blank line.
if (written_anything)
out_ << std::endl;
}
OutputFile NinjaTargetWriter::<API key>(
const std::vector<const Target*>& extra_hard_deps) const {
CHECK(target_->toolchain())
<< "Toolchain not set on target "
<< target_->label().GetUserVisibleName(true);
// For an action (where we run a script only once) the sources are the same
// as the source prereqs.
bool <API key> = (target_->output_type() == Target::ACTION);
// Actions get implicit dependencies on the script itself.
bool <API key> =
(target_->output_type() == Target::ACTION) ||
(target_->output_type() == Target::ACTION_FOREACH);
if (!<API key> &&
extra_hard_deps.empty() &&
target_->inputs().empty() &&
target_->recursive_hard_deps().empty() &&
(!<API key> || target_->sources().empty()) &&
target_->toolchain()->deps().empty())
return OutputFile(); // No input/hard deps.
// One potential optimization is if there are few input dependencies (or
// potentially few sources that depend on these) it's better to just write
// all hard deps on each sources line than have this intermediate stamp. We
// do the stamp file because duplicating all the order-only deps for each
// source file can really explode the ninja file but this won't be the most
// optimal thing in all cases.
OutputFile input_stamp_file(
RebasePath(GetTargetOutputDir(target_).value(),
settings_->build_settings()->build_dir(),
settings_->build_settings()->root_path_utf8()));
input_stamp_file.value().append(target_->label().name());
input_stamp_file.value().append(".inputdeps.stamp");
out_ << "build ";
path_output_.WriteFile(out_, input_stamp_file);
out_ << ": "
<< <API key>(settings_)
<< Toolchain::ToolTypeToName(Toolchain::TYPE_STAMP);
// Script file (if applicable).
if (<API key>) {
out_ << " ";
path_output_.WriteFile(out_, target_->action_values().script());
}
// Input files are order-only deps.
for (const auto& input : target_->inputs()) {
out_ << " ";
path_output_.WriteFile(out_, input);
}
if (<API key>) {
for (const auto& source : target_->sources()) {
out_ << " ";
path_output_.WriteFile(out_, source);
}
}
// The different souces of input deps may duplicate some targets, so uniquify
// them (ordering doesn't matter for this case).
std::set<const Target*> unique_deps;
// Hard dependencies that are direct or indirect dependencies.
const std::set<const Target*>& hard_deps = target_->recursive_hard_deps();
for (const auto& dep : hard_deps)
unique_deps.insert(dep);
// Extra hard dependencies passed in.
unique_deps.insert(extra_hard_deps.begin(), extra_hard_deps.end());
// Toolchain dependencies. These must be resolved before doing anything.
// This just writs all toolchain deps for simplicity. If we find that
// toolchains often have more than one dependency, we could consider writing
// a toolchain-specific stamp file and only include the stamp here.
const LabelTargetVector& toolchain_deps = target_->toolchain()->deps();
for (const auto& toolchain_dep : toolchain_deps)
unique_deps.insert(toolchain_dep.ptr);
for (const auto& dep : unique_deps) {
DCHECK(!dep-><API key>().value().empty());
out_ << " ";
path_output_.WriteFile(out_, dep-><API key>());
}
out_ << "\n";
return input_stamp_file;
}
void NinjaTargetWriter::WriteStampForTarget(
const std::vector<OutputFile>& files,
const std::vector<OutputFile>& order_only_deps) {
const OutputFile& stamp_file = target_-><API key>();
// First validate that the target's dependency is a stamp file. Otherwise,
// we shouldn't have gotten here!
CHECK(base::EndsWith(stamp_file.value(), ".stamp", false))
<< "Output should end in \".stamp\" for stamp file output. Instead got: "
<< "\"" << stamp_file.value() << "\"";
out_ << "build ";
path_output_.WriteFile(out_, stamp_file);
out_ << ": "
<< <API key>(settings_)
<< Toolchain::ToolTypeToName(Toolchain::TYPE_STAMP);
path_output_.WriteFiles(out_, files);
if (!order_only_deps.empty()) {
out_ << " ||";
path_output_.WriteFiles(out_, order_only_deps);
}
out_ << std::endl;
}
|
#include "SkTileImageFilter.h"
#include "SkColorSpaceXformer.h"
#include "SkCanvas.h"
#include "SkImage.h"
#include "SkImageFilterPriv.h"
#include "SkMatrix.h"
#include "SkOffsetImageFilter.h"
#include "SkPaint.h"
#include "SkReadBuffer.h"
#include "SkShader.h"
#include "SkSpecialImage.h"
#include "SkSpecialSurface.h"
#include "SkSurface.h"
#include "SkValidationUtils.h"
#include "SkWriteBuffer.h"
sk_sp<SkImageFilter> SkTileImageFilter::Make(const SkRect& srcRect, const SkRect& dstRect,
sk_sp<SkImageFilter> input) {
if (!SkIsValidRect(srcRect) || !SkIsValidRect(dstRect)) {
return nullptr;
}
if (srcRect.width() == dstRect.width() && srcRect.height() == dstRect.height()) {
SkRect ir = dstRect;
if (!ir.intersect(srcRect)) {
return input;
}
CropRect cropRect(ir);
return SkOffsetImageFilter::Make(dstRect.x() - srcRect.x(),
dstRect.y() - srcRect.y(),
std::move(input),
&cropRect);
}
return sk_sp<SkImageFilter>(new SkTileImageFilter(srcRect, dstRect, std::move(input)));
}
sk_sp<SkSpecialImage> SkTileImageFilter::onFilterImage(SkSpecialImage* source,
const Context& ctx,
SkIPoint* offset) const {
SkIPoint inputOffset = SkIPoint::Make(0, 0);
sk_sp<SkSpecialImage> input(this->filterInput(0, source, ctx, &inputOffset));
if (!input) {
return nullptr;
}
SkRect dstRect;
ctx.ctm().mapRect(&dstRect, fDstRect);
if (!dstRect.intersect(SkRect::Make(ctx.clipBounds()))) {
return nullptr;
}
const SkIRect dstIRect = dstRect.roundOut();
if (!fSrcRect.width() || !fSrcRect.height() || !dstIRect.width() || !dstIRect.height()) {
return nullptr;
}
SkRect srcRect;
ctx.ctm().mapRect(&srcRect, fSrcRect);
SkIRect srcIRect;
srcRect.roundOut(&srcIRect);
srcIRect.offset(-inputOffset);
const SkIRect inputBounds = SkIRect::MakeWH(input->width(), input->height());
if (!SkIRect::Intersects(srcIRect, inputBounds)) {
return nullptr;
}
// We create an SkImage here b.c. it needs to be a tight fit for the tiling
sk_sp<SkImage> subset;
if (inputBounds.contains(srcIRect)) {
subset = input->asImage(&srcIRect);
} else {
sk_sp<SkSurface> surf(input->makeTightSurface(ctx.outputProperties(), srcIRect.size()));
if (!surf) {
return nullptr;
}
SkCanvas* canvas = surf->getCanvas();
SkASSERT(canvas);
SkPaint paint;
paint.setBlendMode(SkBlendMode::kSrc);
input->draw(canvas,
SkIntToScalar(inputOffset.x()), SkIntToScalar(inputOffset.y()),
&paint);
subset = surf->makeImageSnapshot();
}
if (!subset) {
return nullptr;
}
SkASSERT(subset->width() == srcIRect.width());
SkASSERT(subset->height() == srcIRect.height());
sk_sp<SkSpecialSurface> surf(source->makeSurface(ctx.outputProperties(), dstIRect.size()));
if (!surf) {
return nullptr;
}
SkCanvas* canvas = surf->getCanvas();
SkASSERT(canvas);
SkPaint paint;
paint.setBlendMode(SkBlendMode::kSrc);
paint.setShader(subset->makeShader(SkShader::kRepeat_TileMode, SkShader::kRepeat_TileMode));
canvas->translate(-dstRect.fLeft, -dstRect.fTop);
canvas->drawRect(dstRect, paint);
offset->fX = dstIRect.fLeft;
offset->fY = dstIRect.fTop;
return surf->makeImageSnapshot();
}
sk_sp<SkImageFilter> SkTileImageFilter::onMakeColorSpace(SkColorSpaceXformer* xformer) const {
SkASSERT(1 == this->countInputs());
auto input = xformer->apply(this->getInput(0));
if (input.get() != this->getInput(0)) {
return SkTileImageFilter::Make(fSrcRect, fDstRect, std::move(input));
}
return this->refMe();
}
SkIRect SkTileImageFilter::onFilterNodeBounds(const SkIRect& src, const SkMatrix& ctm,
MapDirection dir, const SkIRect* inputRect) const {
SkRect rect = <API key> == dir ? fSrcRect : fDstRect;
ctm.mapRect(&rect);
return rect.roundOut();
}
SkIRect SkTileImageFilter::onFilterBounds(const SkIRect& src, const SkMatrix&,
MapDirection, const SkIRect* inputRect) const {
// Don't recurse into inputs.
return src;
}
SkRect SkTileImageFilter::computeFastBounds(const SkRect& src) const {
return fDstRect;
}
sk_sp<SkFlattenable> SkTileImageFilter::CreateProc(SkReadBuffer& buffer) {
<API key>(common, 1);
SkRect src, dst;
buffer.readRect(&src);
buffer.readRect(&dst);
return Make(src, dst, common.getInput(0));
}
void SkTileImageFilter::flatten(SkWriteBuffer& buffer) const {
this->INHERITED::flatten(buffer);
buffer.writeRect(fSrcRect);
buffer.writeRect(fDstRect);
}
|
from __future__ import absolute_import, unicode_literals
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from wagtail.wagtailadmin.forms import <API key>
from wagtail.wagtailadmin.modal_workflow import <API key>
from wagtail.wagtailcore.models import Page, PageViewRestriction
def set_privacy(request, page_id):
page = get_object_or_404(Page, id=page_id)
page_perms = page.<API key>(request.user)
if not page_perms.<API key>():
raise PermissionDenied
# fetch restriction records in depth order so that ancestors appear first
restrictions = page.<API key>().order_by('page__depth')
if restrictions:
restriction = restrictions[0]
<API key> = (restriction.page != page)
else:
restriction = None
<API key> = False
if request.method == 'POST':
form = <API key>(request.POST, instance=restriction)
if form.is_valid() and not <API key>:
if form.cleaned_data['restriction_type'] == PageViewRestriction.NONE:
# remove any existing restriction
if restriction:
restriction.delete()
else:
restriction = form.save(commit=False)
restriction.page = page
form.save()
return <API key>(
request, None, 'wagtailadmin/page_privacy/set_privacy_done.js', {
'is_public': (form.cleaned_data['restriction_type'] == 'none')
}
)
else: # request is a GET
if not <API key>:
if restriction:
form = <API key>(instance=restriction)
else:
# no current view restrictions on this page
form = <API key>(initial={
'restriction_type': 'none'
})
if <API key>:
# display a message indicating that there is a restriction at ancestor level -
# do not provide the form for setting up new restrictions
return <API key>(
request, 'wagtailadmin/page_privacy/ancestor_privacy.html', None,
{
'<API key>': restriction.page,
}
)
else:
# no restriction set at ancestor level - can set restrictions here
return <API key>(
request,
'wagtailadmin/page_privacy/set_privacy.html',
'wagtailadmin/page_privacy/set_privacy.js', {
'page': page,
'form': form,
}
)
|
#ifndef <API key>
#define <API key>
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "ui/views/controls/menu/menu_item_view.h"
namespace views {
class MenuButton;
class Widget;
namespace internal {
class <API key>;
class MenuRunnerImpl;
}
// MenuRunner is responsible for showing (running) the menu and additionally
// owning the MenuItemView. RunMenuAt() runs a nested message loop. It is safe
// to delete MenuRunner at any point, but MenuRunner internally only deletes the
// MenuItemView *after* the nested message loop completes. If MenuRunner is
// deleted while the menu is showing the delegate of the menu is reset. This is
// done to ensure delegates aren't notified after they may have been deleted.
// NOTE: while you can delete a MenuRunner at any point, the nested message loop
// won't return immediately. This means if you delete the object that owns
// the MenuRunner while the menu is running, your object is effectively still
// on the stack. A return value of MENU_DELETED indicated this. In most cases
// if RunMenuAt() returns MENU_DELETED, you should return immediately.
// Similarly you should avoid creating MenuRunner on the stack. Doing so means
// MenuRunner may not be immediately destroyed if your object is destroyed,
// resulting in possible callbacks to your now deleted object. Instead you
// should define MenuRunner as a scoped_ptr in your class so that when your
// object is destroyed MenuRunner initiates the proper cleanup and ensures your
// object isn't accessed again.
class VIEWS_EXPORT MenuRunner {
public:
enum RunTypes {
// The menu has mnemonics.
HAS_MNEMONICS = 1 << 0,
// The menu is a nested context menu. For example, click a folder on the
// bookmark bar, then right click an entry to get its context menu.
IS_NESTED = 1 << 1,
// Used for showing a menu during a drop operation. This does NOT block the
// caller, instead the delegate is notified when the menu closes via the
// DropMenuClosed method.
FOR_DROP = 1 << 2,
// The menu is a context menu (not necessarily nested), for example right
// click on a link on a website in the browser.
CONTEXT_MENU = 1 << 3,
};
enum RunResult {
// Indicates RunMenuAt is returning because the MenuRunner was deleted.
MENU_DELETED,
// Indicates RunMenuAt returned and MenuRunner was not deleted.
NORMAL_EXIT
};
// Creates a new MenuRunner. MenuRunner owns the supplied menu.
explicit MenuRunner(MenuItemView* menu);
~MenuRunner();
// Returns the menu.
MenuItemView* GetMenu();
// Takes ownership of |menu|, deleting it when MenuRunner is deleted. You
// only need call this if you create additional menus from
// MenuDelegate::GetSiblingMenu.
void OwnMenu(MenuItemView* menu);
// Runs the menu. |types| is a bitmask of RunTypes. If this returns
// MENU_DELETED the method is returning because the MenuRunner was deleted.
// Typically callers should NOT do any processing if this returns
// MENU_DELETED.
RunResult RunMenuAt(Widget* parent,
MenuButton* button,
const gfx::Rect& bounds,
MenuItemView::AnchorPosition anchor,
int32 types) WARN_UNUSED_RESULT;
// Returns true if we're in a nested message loop running the menu.
bool IsRunning() const;
// Hides and cancels the menu. This does nothing if the menu is not open.
void Cancel();
private:
internal::MenuRunnerImpl* holder_;
scoped_ptr<internal::<API key>> <API key>;
<API key>(MenuRunner);
};
namespace internal {
// <API key> is intended to listen for changes in the display size
// and cancel the menu. <API key> is created when the menu is
// shown.
class <API key> {
public:
virtual ~<API key>() {}
// Creates the platform specified <API key>, or NULL if there
// isn't one. Caller owns the returned value.
static <API key>* Create(Widget* parent,
MenuRunner* runner);
protected:
<API key>() {}
};
}
} // namespace views
#endif // <API key>
|
// modification, are permitted provided that the following conditions
// are met:
// and/or other materials provided with the distribution.
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
#if !NET_CF && !SILVERLIGHT
namespace NLog.Targets
{
using System.ComponentModel;
using System.Text;
using System.Text.RegularExpressions;
using NLog.Config;
<summary>
Highlighting rule for Win32 colorful console.
</summary>
[<API key>]
public class <API key>
{
private Regex compiledRegex;
<summary>
Initializes a new instance of the <see cref="<API key>" /> class.
</summary>
public <API key>()
{
this.BackgroundColor = ConsoleOutputColor.NoChange;
this.ForegroundColor = ConsoleOutputColor.NoChange;
}
<summary>
Initializes a new instance of the <see cref="<API key>" /> class.
</summary>
<param name="text">The text to be matched..</param>
<param name="foregroundColor">Color of the foreground.</param>
<param name="backgroundColor">Color of the background.</param>
public <API key>(string text, ConsoleOutputColor foregroundColor, ConsoleOutputColor backgroundColor)
{
this.Text = text;
this.ForegroundColor = foregroundColor;
this.BackgroundColor = backgroundColor;
}
<summary>
Gets or sets the regular expression to be matched. You must specify either <c>text</c> or <c>regex</c>.
</summary>
<docgen category='Rule Matching Options' order='10' />
public string Regex { get; set; }
<summary>
Gets or sets the text to be matched. You must specify either <c>text</c> or <c>regex</c>.
</summary>
<docgen category='Rule Matching Options' order='10' />
public string Text { get; set; }
<summary>
Gets or sets a value indicating whether to match whole words only.
</summary>
<docgen category='Rule Matching Options' order='10' />
[DefaultValue(false)]
public bool WholeWords { get; set; }
<summary>
Gets or sets a value indicating whether to ignore case when comparing texts.
</summary>
<docgen category='Rule Matching Options' order='10' />
[DefaultValue(false)]
public bool IgnoreCase { get; set; }
<summary>
Gets the compiled regular expression that matches either Text or Regex property.
</summary>
public Regex CompiledRegex
{
get
{
if (this.compiledRegex == null)
{
string regexpression = this.Regex;
if (regexpression == null && this.Text != null)
{
regexpression = System.Text.RegularExpressions.Regex.Escape(this.Text);
if (this.WholeWords)
{
regexpression = "\b" + regexpression + "\b";
}
}
RegexOptions regexOptions = RegexOptions.Compiled;
if (this.IgnoreCase)
{
regexOptions |= RegexOptions.IgnoreCase;
}
this.compiledRegex = new Regex(regexpression, regexOptions);
}
return this.compiledRegex;
}
}
<summary>
Gets or sets the foreground color.
</summary>
<docgen category='Formatting Options' order='10' />
[DefaultValue("NoChange")]
public ConsoleOutputColor ForegroundColor { get; set; }
<summary>
Gets or sets the background color.
</summary>
<docgen category='Formatting Options' order='10' />
[DefaultValue("NoChange")]
public ConsoleOutputColor BackgroundColor { get; set; }
internal string MatchEvaluator(Match m)
{
StringBuilder result = new StringBuilder();
result.Append('\a');
result.Append((char)((int)this.ForegroundColor + 'A'));
result.Append((char)((int)this.BackgroundColor + 'A'));
result.Append(m.Value);
result.Append('\a');
result.Append('X');
return result.ToString();
}
internal string <API key>(string message)
{
return this.CompiledRegex.Replace(message, new MatchEvaluator(this.MatchEvaluator));
}
}
}
#endif
|
.<API key> {
flex: none;
padding: 4px;
display: flex;
}
.<API key> input[type="text"].<API key> {
-webkit-appearance: none;
padding: 0 3px;
margin: 0;
border: 1px solid rgb(163, 163, 163);
height: 20px;
border-radius: 2px;
color: #303030;
}
.<API key> input[type="search"].<API key>:focus {
border: 1px solid rgb(190, 190, 190);
outline: none;
}
:host-context(.platform-mac) .<API key> input[type="search"].<API key> {
top: 1px;
}
.<API key> label.search-config-label {
margin: auto 0;
margin-left: 8px;
color: #303030;
display: flex;
}
.<API key> {
background-color: #eee;
border-top: 1px solid #ccc;
padding-left: 5px;
flex: 0 0 19px;
display: flex;
padding-right: 5px;
}
.<API key> .search-message {
padding-top: 2px;
padding-left: 1ex;
}
#<API key> li {
list-style: none;
}
#<API key> ol {
-<API key>: 0;
margin-top: 0;
}
#<API key> ol.children {
display: none;
}
#<API key> ol.children.expanded {
display: block;
}
#<API key> li.parent::before {
-webkit-user-select: none;
background-image: url(Images/toolbarButtonGlyphs.png);
background-size: 352px 168px;
opacity: 0.5;
width: 12px;
content: "a";
color: transparent;
margin-left: -5px;
padding-right: 4px;
display: inline-block;
box-sizing: border-box;
}
@media (-<API key>: 1.5) {
#<API key> li.parent::before {
background-image: url(Images/<API key>.png);
}
} /* media */
#<API key> li.parent::before {
background-position: -4px -96px;
}
#<API key> li.parent.expanded::before {
background-position: -20px -96px;
}
#<API key> .search-result {
font-size: 11px;
padding: 2px 0 2px 10px;
word-wrap: normal;
white-space: pre;
cursor: pointer;
}
#<API key> .search-result:hover {
background-color: rgba(121, 121, 121, 0.1);
}
#<API key> .search-result .<API key> {
font-weight: bold;
color: #222;
}
#<API key> .search-result .<API key> {
margin-left: 5px;
color: #222;
}
#<API key> .show-more-matches {
padding: 4px 0;
color: #222;
cursor: pointer;
font-size: 11px;
margin-left: 20px;
}
#<API key> .show-more-matches:hover {
text-decoration: underline;
}
#<API key> .search-match {
word-wrap: normal;
white-space: pre;
}
#<API key> .search-match .<API key> {
color: rgb(128, 128, 128);
text-align: right;
vertical-align: top;
word-break: normal;
padding-right: 4px;
padding-left: 6px;
margin-right: 5px;
border-right: 1px solid #BBB;
}
#<API key> .search-match:not(:hover) .<API key> {
background-color: #F0F0F0;
}
#<API key> .search-match:hover {
background-color: rgba(56, 121, 217, 0.1);
}
#<API key> .search-match .highlighted-match {
background-color: #F1EA00;
}
:host-context(.-<API key>) #<API key> .search-match .highlighted-match {
background-color: hsl(133, 100%, 30%) !important;
}
#<API key> a {
text-decoration: none;
display: block;
}
#<API key> .search-match .<API key> {
color: #000;
}
.search-view .search-results {
overflow-y: auto;
display: flex;
flex: auto;
}
.search-results .empty-view {
pointer-events: none;
}
.empty-view {
font-size: 24px;
color: rgb(75%, 75%, 75%);
font-weight: bold;
padding: 10px;
display: flex;
align-items: center;
justify-content: center;
}
|
#ifndef <API key>
#define <API key>
#include "base/memory/weak_ptr.h"
#include "base/task/<API key>.h"
#include "chrome/browser/download/download_ui_model.h"
#include "chrome/browser/ui/views/download/bubble/<API key>.h"
#include "ui/base/metadata/<API key>.h"
#include "ui/views/view.h"
namespace views {
class ImageView;
} // namespace views
class <API key> : public views::View {
public:
METADATA_HEADER(<API key>);
explicit <API key>(DownloadUIModel::DownloadUIModelPtr model);
<API key>(const <API key>&) = delete;
<API key>& operator=(const <API key>&) = delete;
~<API key>() override;
// Overrides views::View:
void AddedToWidget() override;
protected:
// Overrides ui::LayerDelegate:
void <API key>(float <API key>,
float <API key>) override;
private:
// Load the icon, from the cache or from IconManager::LoadIcon.
void LoadIcon();
// Called when icon has been loaded by IconManager::LoadIcon.
void SetIcon(gfx::Image icon);
// TODO(bhatiarohit): Add <API key> icons.
// The icon for the file. We get platform-specific icons from IconLoader.
raw_ptr<views::ImageView> icon_ = nullptr;
// Device scale factor, used to load icons.
float current_scale_ = 1.0f;
// Tracks tasks requesting file icons.
base::<API key> <API key>;
// The model controlling this object's state.
const DownloadUIModel::DownloadUIModelPtr model_;
base::WeakPtrFactory<<API key>> weak_factory_{this};
};
#endif // <API key>
|
#ifndef <API key>
#define <API key>
#include <memory>
#include <string>
#include "base/check_op.h"
#include "base/containers/flat_map.h"
#include "base/containers/flat_set.h"
#include "base/observer_list.h"
#include "base/scoped_observation.h"
#include "content/public/browser/<API key>.h"
#include "content/public/browser/<API key>.h"
namespace blink {
class StorageKey;
} // namespace blink
namespace performance_manager {
// This class adapts an existing <API key> to ensure that the
// <API key>() notifications are sent as soon as the render
// process of a running service worker exits.
// It implements <API key> so it can be used interchangeably where a
// <API key>* is needed, and it also observes the underlying context
// so that it can receive the original notifications and control when they are
// sent to the observers.
// Lives on the UI thread. Must outlive |underlying_context|.
// Note: This is a temporary class that can be removed when the representation
// of a worker in the content/ layer (<API key>) is moved to
// the UI thread. At that point, it'll be able to observe its associated
class <API key>
: public content::<API key>,
public content::<API key> {
public:
explicit <API key>(
content::<API key>* underlying_context);
~<API key>() override;
// content::<API key>:
// Note that this is a minimal implementation for the use case of the
// PerformanceManager. Only AddObserver/RemoveObserver are implemented.
void AddObserver(content::<API key>* observer) override;
void RemoveObserver(content::<API key>* observer) override;
void <API key>(
const GURL& script_url,
const blink::StorageKey& key,
const blink::mojom::<API key>& options,
StatusCodeCallback callback) override;
void <API key>(const GURL& scope,
const blink::StorageKey& key,
ResultCallback callback) override;
content::<API key> <API key>(
int64_t <API key>,
content::<API key> timeout_type,
const std::string& request_uuid) override;
content::<API key> <API key>(
int64_t <API key>,
const std::string& request_uuid) override;
size_t <API key>(const blink::StorageKey& key) override;
bool <API key>(
const std::string& script,
int64_t <API key>,
content::<API key> callback) override;
bool <API key>(const blink::StorageKey& key) override;
void GetAllOriginsInfo(<API key> callback) override;
void DeleteForStorageKey(const blink::StorageKey& key,
ResultCallback callback) override;
void <API key>(const GURL& url,
const blink::StorageKey& key,
<API key> callback) override;
void <API key>(const GURL& url,
const blink::StorageKey& key,
<API key> callback) override;
void <API key>(base::OnceClosure callback) override;
void StartWorkerForScope(const GURL& scope,
const blink::StorageKey& key,
StartWorkerCallback info_callback,
StatusCodeCallback failure_callback) override;
void <API key>(
const GURL& scope,
const blink::StorageKey& key,
blink::TransferableMessage message,
ResultCallback result_callback) override;
void <API key>(
const GURL& document_url,
const blink::StorageKey& key,
<API key> callback) override;
void <API key>(
const blink::StorageKey& key) override;
void <API key>(base::OnceClosure callback) override;
const base::flat_map<int64_t /* version_id */,
content::<API key>>&
<API key>() override;
// content::<API key>:
void <API key>(const GURL& scope) override;
void <API key>(int64_t registration_id,
const GURL& scope) override;
void OnVersionActivated(int64_t version_id, const GURL& scope) override;
void OnVersionRedundant(int64_t version_id, const GURL& scope) override;
void <API key>(
int64_t version_id,
const content::<API key>& running_info) override;
void <API key>(int64_t version_id) override;
void OnControlleeAdded(
int64_t version_id,
const std::string& client_uuid,
const content::<API key>& client_info) override;
void OnControlleeRemoved(int64_t version_id,
const std::string& client_uuid) override;
void OnNoControllees(int64_t version_id, const GURL& scope) override;
void <API key>(
int64_t version_id,
const std::string& uuid,
content::<API key> <API key>) override;
void <API key>(int64_t version_id,
const GURL& scope,
const content::ConsoleMessage& message) override;
void OnDestruct(<API key>* context) override;
private:
class <API key>;
// Invoked by a <API key> when it observes that the render process
// has exited.
void <API key>(int64_t version_id);
// Adds a registration to |worker_process_host| that will result in
// |<API key>| with |version_id| when it exits.
void <API key>(int64_t version_id,
content::RenderProcessHost* worker_process_host);
// Removes a registration made by |<API key>| if one exists,
// returns true if a registration existed, false otherwise.
bool <API key>(int64_t version_id);
base::ScopedObservation<content::<API key>,
content::<API key>>
<API key>{this};
base::ObserverList<content::<API key>, true, false>::
Unchecked observer_list_;
// For each running service worker, tracks when their render process exits.
base::flat_map<int64_t /*version_id*/, std::unique_ptr<<API key>>>
<API key>;
// Tracks the OnControlleeAdded and OnControlleeRemoved notification for each
// service worker, with the goal of cleaning up duplicate notifications for
// observers of this class.
// TODO(1015692): Fix the underlying code in content/browser/service_worker so
// that duplicate notifications are no longer sent.
base::flat_map<int64_t /*version_id*/,
base::flat_set<std::string /*client_uuid*/>>
<API key>;
#if DCHECK_IS_ON()
// Keeps track of service worker whose render process exited early.
base::flat_set<int64_t> <API key>;
#endif // DCHECK_IS_ON()
};
} // namespace performance_manager
#endif // <API key>
|
package ibxm;
/* A data array dynamically loaded from an InputStream. */
public class Data {
private int bufLen;
private byte[] buffer;
private java.io.InputStream stream;
public Data( java.io.InputStream inputStream ) throws java.io.IOException {
bufLen = 1 << 16;
buffer = new byte[ bufLen ];
stream = inputStream;
readFully( stream, buffer, 0, bufLen );
}
public Data( byte[] data ) {
bufLen = data.length;
buffer = data;
}
public byte sByte( int offset ) throws java.io.IOException {
load( offset, 1 );
return buffer[ offset ];
}
public int uByte( int offset ) throws java.io.IOException {
load( offset, 1 );
return buffer[ offset ] & 0xFF;
}
public int ubeShort( int offset ) throws java.io.IOException {
load( offset, 2 );
return ( ( buffer[ offset ] & 0xFF ) << 8 ) | ( buffer[ offset + 1 ] & 0xFF );
}
public int uleShort( int offset ) throws java.io.IOException {
load( offset, 2 );
return ( buffer[ offset ] & 0xFF ) | ( ( buffer[ offset + 1 ] & 0xFF ) << 8 );
}
public int uleInt( int offset ) throws java.io.IOException {
load( offset, 4 );
int value = buffer[ offset ] & 0xFF;
value = value | ( ( buffer[ offset + 1 ] & 0xFF ) << 8 );
value = value | ( ( buffer[ offset + 2 ] & 0xFF ) << 16 );
value = value | ( ( buffer[ offset + 3 ] & 0x7F ) << 24 );
return value;
}
public String strLatin1( int offset, int length ) throws java.io.IOException {
load( offset, length );
char[] str = new char[ length ];
for( int idx = 0; idx < length; idx++ ) {
int chr = buffer[ offset + idx ] & 0xFF;
str[ idx ] = chr < 32 ? 32 : ( char ) chr;
}
return new String( str );
}
public String strCp850( int offset, int length ) throws java.io.IOException {
load( offset, length );
try {
char[] str = new String( buffer, offset, length, "Cp850" ).toCharArray();
for( int idx = 0; idx < str.length; idx++ ) {
str[ idx ] = str[ idx ] < 32 ? 32 : str[ idx ];
}
return new String( str );
} catch( java.io.<API key> e ) {
return strLatin1( offset, length );
}
}
public short[] samS8( int offset, int length ) throws java.io.IOException {
load( offset, length );
short[] sampleData = new short[ length ];
for( int idx = 0; idx < length; idx++ ) {
sampleData[ idx ] = ( short ) ( buffer[ offset + idx ] << 8 );
}
return sampleData;
}
public short[] samS8D( int offset, int length ) throws java.io.IOException {
load( offset, length );
short[] sampleData = new short[ length ];
int sam = 0;
for( int idx = 0; idx < length; idx++ ) {
sam += buffer[ offset + idx ];
sampleData[ idx ] = ( short ) ( sam << 8 );
}
return sampleData;
}
public short[] samU8( int offset, int length ) throws java.io.IOException {
load( offset, length );
short[] sampleData = new short[ length ];
for( int idx = 0; idx < length; idx++ ) {
sampleData[ idx ] = ( short ) ( ( ( buffer[ offset + idx ] & 0xFF ) - 128 ) << 8 );
}
return sampleData;
}
public short[] samS16( int offset, int samples ) throws java.io.IOException {
load( offset, samples * 2 );
short[] sampleData = new short[ samples ];
for( int idx = 0; idx < samples; idx++ ) {
sampleData[ idx ] = ( short ) ( ( buffer[ offset + idx * 2 ] & 0xFF ) | ( buffer[ offset + idx * 2 + 1 ] << 8 ) );
}
return sampleData;
}
public short[] samS16D( int offset, int samples ) throws java.io.IOException {
load( offset, samples * 2 );
short[] sampleData = new short[ samples ];
int sam = 0;
for( int idx = 0; idx < samples; idx++ ) {
sam += ( buffer[ offset + idx * 2 ] & 0xFF ) | ( buffer[ offset + idx * 2 + 1 ] << 8 );
sampleData[ idx ] = ( short ) sam;
}
return sampleData;
}
public short[] samU16( int offset, int samples ) throws java.io.IOException {
load( offset, samples * 2 );
short[] sampleData = new short[ samples ];
for( int idx = 0; idx < samples; idx++ ) {
int sam = ( buffer[ offset + idx * 2 ] & 0xFF ) | ( ( buffer[ offset + idx * 2 + 1 ] & 0xFF ) << 8 );
sampleData[ idx ] = ( short ) ( sam - 32768 );
}
return sampleData;
}
private void load( int offset, int length ) throws java.io.IOException {
while( offset + length > bufLen ) {
int newBufLen = bufLen << 1;
byte[] newBuf = new byte[ newBufLen ];
System.arraycopy( buffer, 0, newBuf, 0, bufLen );
if( stream != null ) {
readFully( stream, newBuf, bufLen, newBufLen - bufLen );
}
bufLen = newBufLen;
buffer = newBuf;
}
}
private static void readFully( java.io.InputStream inputStream, byte[] buffer, int offset, int length ) throws java.io.IOException {
int read = 1, end = offset + length;
while( read > 0 ) {
read = inputStream.read( buffer, offset, end - offset );
offset += read;
}
}
}
|
#include "components/spellcheck/browser/android/<API key>.h"
#include "base/android/jni_android.h"
#include "base/android/jni_registrar.h"
#include "components/spellcheck/browser/<API key>.h"
namespace spellcheck {
namespace android {
static base::android::RegistrationMethod <API key>[] = {
{"<API key>", <API key>::RegisterJNI},
};
bool <API key>(JNIEnv* env) {
return base::android::<API key>(
env, <API key>,
std::size(<API key>));
}
} // namespace android
} // namespace spellcheck
|
export { enableDebugTools, disableDebugTools } from 'angular2/src/tools/tools';
|
class <API key> < QuestionPresenter
include ActionView::Helpers::NumberHelper
def response_label(value)
<API key>(value)
end
end
|
#if !NETSTANDARD2_0
using OfficeDevPnP.Core.IdentityModel.WSTrustBindings;
using System;
using System.IdentityModel.Protocols.WSTrust;
using System.IdentityModel.Tokens;
using System.Net;
using System.ServiceModel;
using System.ServiceModel.Security;
namespace OfficeDevPnP.Core.IdentityModel.TokenProviders.ADFS
{
<summary>
ADFS Active authentication based on username + password. Uses the trust/13/usernamemixed ADFS endpoint.
</summary>
public class UsernameMixed : BaseProvider
{
<summary>
Performs active authentication against ADFS using the trust/13/usernamemixed ADFS endpoint.
</summary>
<param name="siteUrl">Url of the SharePoint site that's secured via ADFS</param>
<param name="userName">Name of the user (e.g. domain\administrator) </param>
<param name="password">Password of th user</param>
<param name="userNameMixed">Uri to the ADFS usernamemixed endpoint</param>
<param name="<API key>">Identifier of the ADFS relying party that we're hitting</param>
<param name="logon<API key>>Logon TokenCache expiration window integer value</param>
<returns>A cookiecontainer holding the FedAuth cookie</returns>
public CookieContainer GetFedAuthCookie(string siteUrl, string userName, string password, Uri userNameMixed, string <API key>, int logon<API key>)
{
UsernameMixed adfsTokenProvider = new UsernameMixed();
var <API key>.RequestToken(userName, password, userNameMixed, <API key>);
string fedAuthValue = <API key>(token.TokenXml.OuterXml, siteUrl, <API key>);
// Construct the cookie expiration date
TimeSpan lifeTime = SamlTokenlifeTime(token.TokenXml.OuterXml);
if (lifeTime == TimeSpan.Zero)
{
lifeTime = new TimeSpan(0, 60, 0);
}
int cookieLifeTime = Math.Min((int)lifeTime.TotalMinutes, logon<API key>);
DateTime expiresOn = DateTime.Now.AddMinutes(cookieLifeTime);
CookieContainer cc = null;
if (!string.IsNullOrEmpty(fedAuthValue))
{
cc = new CookieContainer();
Cookie samlAuth = new Cookie("FedAuth", fedAuthValue);
samlAuth.Expires = expiresOn;
samlAuth.Path = "/";
samlAuth.Secure = true;
samlAuth.HttpOnly = true;
Uri samlUri = new Uri(siteUrl);
samlAuth.Domain = samlUri.Host;
cc.Add(samlAuth);
}
return cc;
}
private <API key> RequestToken(string userName, string passWord, Uri userNameMixed, string <API key>)
{
<API key> genericToken = null;
using (var factory = new <API key>(new <API key>(SecurityMode.<API key>), new EndpointAddress(userNameMixed)))
{
factory.TrustVersion = TrustVersion.WSTrust13;
// Hookup the user and password
factory.Credentials.UserName.UserName = userName;
factory.Credentials.UserName.Password = passWord;
var <API key> = new <API key>
{
RequestType = RequestTypes.Issue,
AppliesTo = new EndpointReference(<API key>),
KeyType = KeyTypes.Bearer
};
<API key> channel = factory.CreateChannel();
genericToken = channel.Issue(<API key>) as <API key>;
factory.Close();
}
return genericToken;
}
}
}
#endif
|
# -*- coding: utf-8 -*-
"""API Request cache tests."""
from __future__ import unicode_literals
__version__ = '$Id$'
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()
|
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")" \
&& . "../../utils.sh"
print_in_purple "\n Dashboard\n\n"
execute "defaults write com.apple.dashboard mcx-disabled -bool true" \
"Disable Dashboard"
# `killall Dashboard` doesn't actually do anything. To apply the
# changes for `Dashboard`, `killall Dock` is enough as `Dock` is
# `Dashboard`'s parent process.
killall "Dock" &> /dev/null
|
var expect = require('expect.js');
var path = require('path');
var fs = require('../extfs');
describe('extfs', function () {
var rootPath = path.join(__dirname, '../');
it('should return all directories', function (done) {
fs.getDirs(rootPath, function (err, dirs) {
expect(dirs).to.be.an(Array);
expect(dirs.length).to.be.greaterThan(0);
done();
});
});
it('should return all directories sync', function () {
var dirs = fs.getDirsSync(rootPath);
expect(dirs).to.be.an(Array);
expect(dirs.length > 0).to.be.ok();
});
it('should check if a file is empty', function (done) {
var notEmptyFile = path.join(__dirname, '../README.md');
var emptyFile = './AN EMPTY FILE';
fs.isEmpty(notEmptyFile, function (empty) {
expect(empty).to.be(false);
fs.isEmpty(emptyFile, function (empty) {
expect(empty).to.be(true);
done();
});
});
});
it('should check if a file is empty sync', function () {
var notEmptyFile = path.join(__dirname, '../README.md');
var emptyFile = './AN EMPTY FILE';
var empty = fs.isEmptySync(notEmptyFile);
expect(empty).to.be(false);
empty = fs.isEmptySync(emptyFile);
expect(empty).to.be(true);
});
it('should check if a directory is empty', function (done) {
var notEmptyDir = __dirname;
var emptyDir = './AN EMPTY DIR';
fs.isEmpty(notEmptyDir, function (empty) {
expect(empty).to.be(false);
fs.isEmpty(emptyDir, function (empty) {
expect(empty).to.be(true);
done();
})
});
});
it('should check if a directory is empty sync', function () {
var notEmptyDir = __dirname;
var emptyDir = './AN EMPTY DIR';
expect(fs.isEmptySync(notEmptyDir)).to.be(false);
expect(fs.isEmptySync(emptyDir)).to.be(true);
});
describe('remove directories', function () {
var tmpPath = path.join(rootPath, 'tmp');
var folders = [ 'folder1', 'folder2', 'folder3' ];
var files = [ '1.txt', '2.txt', '3.txt' ];
folders = folders.map(function (folder) {
return path.join(tmpPath, folder);
});
/**
* Create 3 folders with 3 files each
*/
beforeEach(function () {
if (!fs.existsSync(tmpPath)) {
fs.mkdirSync(tmpPath, '0755');
}
folders.forEach(function (folder) {
if (!fs.existsSync(folder)) {
fs.mkdirSync(folder, '0755');
}
files.forEach(function (file) {
fs.writeFile(path.join(folder, file), 'file content');
});
});
});
it('should remove a non empty directory', function (done) {
fs.remove(tmpPath, function (err) {
expect(err).to.be(null);
expect(fs.existsSync(tmpPath)).to.be(false);
done();
});
});
it('should remove a non empty directory synchronously', function () {
fs.removeSync(tmpPath);
expect(fs.existsSync(tmpPath)).to.be(false);
});
it('should remove an array of directories', function (done) {
fs.remove(folders, function (err) {
expect(err).to.be(null);
expect(fs.existsSync(folders[0])).to.be(false);
expect(fs.existsSync(folders[1])).to.be(false);
expect(fs.existsSync(folders[2])).to.be(false);
expect(fs.existsSync(tmpPath)).to.be(true);
done();
});
});
it('should remove an array of directories synchronously', function () {
fs.removeSync(folders);
expect(fs.existsSync(folders[0])).to.be(false);
expect(fs.existsSync(folders[1])).to.be(false);
expect(fs.existsSync(folders[2])).to.be(false);
expect(fs.existsSync(tmpPath)).to.be(true);
});
});
it('should extends to fs', function () {
expect(fs.readdir).to.be.a(Function);
});
});
|
require 'set'
require 'tsort'
module Librarian
class ManifestSet
class GraphHash < Hash
include TSort
alias tsort_each_node each_key
def tsort_each_child(node, &block)
self[node].each(&block)
end
end
class << self
def shallow_strip(manifests, names)
new(manifests).shallow_strip!(names).send(method_for(manifests))
end
def deep_strip(manifests, names)
new(manifests).deep_strip!(names).send(method_for(manifests))
end
def shallow_keep(manifests, names)
new(manifests).shallow_keep!(names).send(method_for(manifests))
end
def deep_keep(manifests, names)
new(manifests).deep_keep!(names).send(method_for(manifests))
end
def sort(manifests)
manifests = Hash[manifests.map{|m| [m.name, m]}] if Array === manifests
manifest_pairs = GraphHash[manifests.map{|k, m| [k, m.dependencies.map{|d| d.name}]}]
manifest_names = manifest_pairs.tsort
manifest_names.map{|n| manifests[n]}
end
private
def method_for(manifests)
case manifests
when Hash
:to_hash
when Array
:to_a
end
end
end
def initialize(manifests)
self.index = Hash === manifests ? manifests.dup : Hash[manifests.map{|m| [m.name, m]}]
end
def to_a
index.values
end
def to_hash
index.dup
end
def dup
self.class.new(index)
end
def shallow_strip(names)
dup.shallow_strip!(names)
end
def shallow_strip!(names)
assert_strings!(names)
names.each do |name|
index.delete(name)
end
self
end
def deep_strip(names)
dup.deep_strip!(names)
end
def deep_strip!(names)
names = Array === names ? names.dup : names.to_a
assert_strings!(names)
strippables = dependencies_of(names)
shallow_strip!(strippables)
self
end
def shallow_keep(names)
dup.shallow_keep!(names)
end
def shallow_keep!(names)
assert_strings!(names)
names = Set.new(names) unless Set === names
index.reject! { |k, v| !names.include?(k) }
self
end
def deep_keep(names)
dup.conservative_strip!(names)
end
def deep_keep!(names)
names = Array === names ? names.dup : names.to_a
assert_strings!(names)
keepables = dependencies_of(names)
shallow_keep!(keepables)
self
end
def consistent?
index.values.all? do |manifest|
in_compliance_with?(manifest.dependencies)
end
end
def in_compliance_with?(dependencies)
dependencies.all? do |dependency|
manifest = index[dependency.name]
manifest && manifest.satisfies?(dependency)
end
end
private
attr_accessor :index
def assert_strings!(names)
non_strings = names.reject{|name| String === name}
non_strings.empty? or raise TypeError, "names must all be strings"
end
# Straightforward breadth-first graph traversal algorithm.
def dependencies_of(names)
names = Array === names ? names.dup : names.to_a
assert_strings!(names)
deps = Set.new
until names.empty?
name = names.shift
next if deps.include?(name)
deps << name
names.concat index[name].dependencies.map(&:name)
end
deps.to_a
end
end
end
|
<?php
class <API key> extends <API key>
{
public function __construct()
{
$this->init('catalogsearch');
}
}
|
<?php
class <API key> extends <API key>
{
}
|
describe("BASIC CRUD SCENARIOS", function() {
require("./basic");
});
describe("VALIDATE CRUD SCENARIOS", function() {
require("./validation");
});
describe("REPORT SCENARIOS", function() {
require("./report");
});
|
var mongodb = process.env['TEST_NATIVE'] != null ? require('../../lib/mongodb').native() : require('../../lib/mongodb').pure();
var testCase = require('../../deps/nodeunit').testCase,
debug = require('util').debug,
inspect = require('util').inspect,
nodeunit = require('../../deps/nodeunit'),
gleak = require('../../tools/gleak'),
Db = mongodb.Db,
Cursor = mongodb.Cursor,
Collection = mongodb.Collection,
Server = mongodb.Server,
ServerManager = require('../../test/tools/server_manager').ServerManager,
Step = require("../../deps/step/lib/step");
var MONGODB = 'integration_tests';
var client = new Db(MONGODB, new Server("127.0.0.1", 27017, {auto_reconnect: true, poolSize: 1}), {native_parser: (process.env['TEST_NATIVE'] != null)});
var serverManager = null;
// Define the tests, we want them to run as a nested test so we only clean up the
// db connection once
var tests = testCase({
setUp: function(callback) {
callback();
},
tearDown: function(callback) {
// serverManager.stop(9, function(err, result) {
callback();
},
<API key> : function(test) {
var db1 = new Db('<API key>', new Server("127.0.0.1", 27017, {auto_reconnect: true}), {native_parser: (process.env['TEST_NATIVE'] != null)});
// All inserted docs
var docs = [];
var errs = [];
var insertDocs = [];
// Start server
serverManager = new ServerManager({auth:false, purgedirectories:true, journal:true})
serverManager.start(true, function() {
db1.open(function(err, db) {
// Startup the insert of documents
var intervalId = setInterval(function() {
db.collection('inserts', function(err, collection) {
var doc = {timestamp:new Date().getTime()};
insertDocs.push(doc);
// Insert document
collection.insert(doc, {safe:{fsync:true}}, function(err, result) {
// Save errors
if(err != null) errs.push(err);
if(err == null) {
docs.push(result[0]);
}
})
});
}, 500);
// Wait for a second and then kill the server
setTimeout(function() {
// Kill server instance
serverManager.stop(9, function(err, result) {
// Server down for 1 second
setTimeout(function() {
// Restart server
serverManager = new ServerManager({auth:false, purgedirectories:false, journal:true});
serverManager.start(true, function() {
// Wait for it
setTimeout(function() {
// Drop db
db.dropDatabase(function(err, result) {
// Close db
db.close();
// Check that we got at least one error
// test.ok(errs.length > 0);
test.ok(docs.length > 0);
test.ok(insertDocs.length > 0);
// Finish up
test.done();
});
}, 5000)
})
}, 1000);
});
}, 3000);
})
});
},
<API key> : function(test) {
var db = new Db('<API key>', new Server("127.0.0.1", 27017, {auto_reconnect: true}), {numberOfRetries:3, retryMiliSeconds:500, native_parser: (process.env['TEST_NATIVE'] != null)});
// All inserted docs
var docs = [];
var errs = [];
var insertDocs = [];
// Start server
serverManager = new ServerManager({auth:false, purgedirectories:true, journal:true})
serverManager.start(true, function() {
db.open(function(err, db) {
// Add an error handler
db.on("error", function(err) {
console.log("
console.dir(err)
errs.push(err);
});
db.collection('inserts', function(err, collection) {
var doc = {timestamp:new Date().getTime(), a:1};
collection.insert(doc, {safe:true}, function(err, result) {
test.equal(null, err);
// Kill server instance
serverManager.stop(9, function(err, result) {
// Attemp insert (should timeout)
var doc = {timestamp:new Date().getTime(), b:1};
collection.insert(doc, {safe:true}, function(err, result) {
test.ok(err != null);
test.equal(null, result);
// Restart server
serverManager = new ServerManager({auth:false, purgedirectories:false, journal:true});
serverManager.start(true, function() {
// Attemp insert again
collection.insert(doc, {safe:true}, function(err, result) {
// Fetch the documents
collection.find({b:1}).toArray(function(err, items) {
test.equal(null, err);
test.equal(1, items[0].b);
test.done();
});
});
});
});
});
})
});
});
});
},
noGlobalsLeaked : function(test) {
var leaks = gleak.detectNew();
test.equal(0, leaks.length, "global var leak detected: " + leaks.join(', '));
test.done();
}
})
// Assign out tests
module.exports = tests;
|
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.ResourceManager.Models
{
using System.Linq;
<summary>
Deployment operation information.
</summary>
public partial class DeploymentOperation
{
<summary>
Initializes a new instance of the DeploymentOperation class.
</summary>
public DeploymentOperation() { }
<summary>
Initializes a new instance of the DeploymentOperation class.
</summary>
<param name="id">Full deployment operation ID.</param>
<param name="operationId">Deployment operation ID.</param>
<param name="properties">Deployment properties.</param>
public DeploymentOperation(string id = default(string), string operationId = default(string), <API key> properties = default(<API key>))
{
Id = id;
OperationId = operationId;
Properties = properties;
}
<summary>
Gets full deployment operation ID.
</summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "id")]
public string Id { get; private set; }
<summary>
Gets deployment operation ID.
</summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "operationId")]
public string OperationId { get; private set; }
<summary>
Gets or sets deployment properties.
</summary>
[Newtonsoft.Json.JsonProperty(PropertyName = "properties")]
public <API key> Properties { get; set; }
}
}
|
package com.iluwatar.factorykit;
public class Bow implements Weapon {
@Override
public String toString() {
return "Bow";
}
}
|
Dir[File.join(Rails.root, "lib", "core_ext", "*.rb")].each {|l| require l }
|
namespace System.Runtime.CompilerServices
{
using System;
using System.Reflection;
// This Enum matchs the miImpl flags defined in corhdr.h. It is used to specify
// certain method properties.
[Flags]
[Serializable]
public enum MethodImplOptions
{
Unmanaged = <API key>.Unmanaged ,
ForwardRef = <API key>.ForwardRef ,
PreserveSig = <API key>.PreserveSig ,
InternalCall = <API key>.InternalCall,
Synchronized = <API key>.Synchronized,
NoInlining = <API key>.NoInlining ,
}
[Serializable]
public enum MethodCodeType
{
IL = System.Reflection.<API key>.IL ,
Native = System.Reflection.<API key>.Native ,
<internalonly/>
OPTIL = System.Reflection.<API key>.OPTIL ,
Runtime = System.Reflection.<API key>.Runtime,
}
// Custom attribute to specify additional method properties.
[Serializable]
[AttributeUsage( AttributeTargets.Method | AttributeTargets.Constructor, Inherited = false )]
sealed public class MethodImplAttribute : Attribute
{
internal MethodImplOptions m_val;
public MethodCodeType MethodCodeType;
internal MethodImplAttribute( <API key> <API key> )
{
MethodImplOptions all = MethodImplOptions.Unmanaged | MethodImplOptions.ForwardRef | MethodImplOptions.PreserveSig |
MethodImplOptions.InternalCall | MethodImplOptions.Synchronized | MethodImplOptions.NoInlining;
m_val = ((MethodImplOptions)<API key>) & all;
}
public MethodImplAttribute( MethodImplOptions methodImplOptions )
{
m_val = methodImplOptions;
}
/ public MethodImplAttribute( short value )
/ {
/ m_val = (MethodImplOptions)value;
/ }
public MethodImplAttribute()
{
}
public MethodImplOptions Value
{
get
{
return m_val;
}
}
}
}
|
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Threading;
using Microsoft.Build.Framework;
using Microsoft.Build.BuildEngine.Shared;
using System.Security.AccessControl;
namespace Microsoft.Build.BuildEngine
{
<summary>
This class hosts a node class in the child process. It uses shared memory to communicate
with the local node provider.
Wraps a Node.
</summary>
public class LocalNode
{
#region Static Constructors
<summary>
Hook up an unhandled exception handler, in case our error handling paths are leaky
</summary>
static LocalNode()
{
AppDomain currentDomain = AppDomain.CurrentDomain;
currentDomain.UnhandledException += new <API key>(<API key>);
}
#endregion
#region Static Methods
<summary>
Dump any unhandled exceptions to a file so they can be diagnosed
</summary>
private static void <API key>(object sender, <API key> e)
{
Exception ex = (Exception)e.ExceptionObject;
DumpExceptionToFile(ex);
}
<summary>
Dump the exception information to a file
</summary>
internal static void DumpExceptionToFile(Exception ex)
{
// Lock as multiple threads may throw simultaneously
lock (dumpFileLocker)
{
if (dumpFileName == null)
{
Guid guid = Guid.NewGuid();
string tempPath = Path.GetTempPath();
// For some reason we get Watson buckets because GetTempPath gives us a folder here that doesn't exist.
// Either because %TMP% is misdefined, or because they deleted the temp folder during the build.
if (!Directory.Exists(tempPath))
{
// If this throws, no sense catching it, we can't log it now, and we're here
// because we're a child node with no console to log to, so die
Directory.CreateDirectory(tempPath);
}
dumpFileName = Path.Combine(tempPath, "MSBuild_" + guid.ToString());
using (StreamWriter writer = new StreamWriter(dumpFileName, true /*append*/))
{
writer.WriteLine("UNHANDLED EXCEPTIONS FROM CHILD NODE:");
writer.WriteLine("===================");
}
}
using (StreamWriter writer = new StreamWriter(dumpFileName, true /*append*/))
{
writer.WriteLine(DateTime.Now.ToLongTimeString());
writer.WriteLine(ex.ToString());
writer.WriteLine("===================");
}
}
}
#endregion
#region Constructors
<summary>
Creates an instance of this class.
</summary>
internal LocalNode(int nodeNumberIn)
{
this.nodeNumber = nodeNumberIn;
engineCallback = new LocalNodeCallback(<API key>, this);
}
#endregion
#region Communication Methods
<summary>
This method causes the reader and writer threads to start and create the shared memory structures
</summary>
void <API key>()
{
// The writer thread should be created before the
// reader thread because some <API key>
// assume the shared memory for the writer thread
// has already been created. The method will both
// instantiate the shared memory for the writer
// thread and also start the writer thread itself.
// We will verifyThrow in the method if the
// sharedMemory was not created correctly.
engineCallback.StartWriterThread(nodeNumber);
// Create the shared memory buffer
this.sharedMemory =
new SharedMemory
(
// Generate the name for the shared memory region
<API key>.NodeInputMemoryName(nodeNumber),
SharedMemoryType.ReadOnly,
// Reuse an existing shared memory region as it should have already
true
);
ErrorUtilities.VerifyThrow(this.sharedMemory.IsUsable,
"Failed to create shared memory for local node input.");
// Start the thread that will be processing the calls from the parent engine
ThreadStart threadState = new ThreadStart(this.<API key>);
readerThread = new Thread(threadState);
readerThread.Name = "MSBuild Child<-Parent Reader";
readerThread.Start();
}
<summary>
This method causes the reader and writer threads to exit and dispose of the shared memory structures
</summary>
void <API key>()
{
<API key>.Set();
// Wait for communication threads to exit
Thread writerThread = engineCallback.GetWriterThread();
// The threads may not exist if the child has timed out before the parent has told the node
// to start up its communication threads. This can happen if the node is started with /nodemode:x
// and no parent is running, or if the parent node has spawned a new process and then crashed
// before establishing communication with the child node.
if(writerThread != null)
{
writerThread.Join();
}
if (readerThread != null)
{
readerThread.Join();
}
// Make sure the exit event is not set
<API key>.Reset();
}
#endregion
#region Startup Methods
<summary>
Create global events necessary for handshaking with the parent
</summary>
<param name="nodeNumber"></param>
<returns>True if events created successfully and false otherwise</returns>
private static bool CreateGlobalEvents(int nodeNumber)
{
bool createdNew = false;
if (NativeMethods.IsUserAdministrator())
{
<API key> mSec = new <API key>();
// Add a rule that grants the access only to admins and systems
mSec.<API key>(NativeMethods.ADMINONLYSDDL);
// Create an initiation event to allow the parent side to prove to the child that we have the same level of privilege as it does.
<API key> = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.<API key>(nodeNumber), out createdNew, mSec);
}
else
{
// Create an initiation event to allow the parent side to prove to the child that we have the same level of privilege as it does.
<API key> = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.<API key>(nodeNumber), out createdNew);
}
// This process must be the creator of the event to prevent squating by a lower privilaged attacker
if (!createdNew)
{
return false;
}
// Informs the parent process that the child process has been created.
globalNodeActive = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.NodeActiveEventName(nodeNumber));
globalNodeActive.Set();
// Indicate to the parent process, this node is currently is ready to start to recieve requests
globalNodeInUse = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.NodeInUseEventName(nodeNumber));
// Used by the parent process to inform the child process to shutdown due to the child process
// not recieving the initialization command.
<API key> = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.<API key>(nodeNumber));
// Inform the parent process the node has started its communication threads.
globalNodeActivate = new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.<API key>(nodeNumber));
return true;
}
<summary>
This function starts local node when process is launched and shuts it down on time out
Called by msbuild.exe.
</summary>
public static void <API key>(int nodeNumber)
{
// Create global events necessary for handshaking with the parent
if (!CreateGlobalEvents(nodeNumber))
{
return;
}
LocalNode localNode = new LocalNode(nodeNumber);
WaitHandle[] waitHandles = new WaitHandle[4];
waitHandles[0] = shutdownEvent;
waitHandles[1] = <API key>;
waitHandles[2] = inUseEvent;
waitHandles[3] = <API key>;
// This is necessary to make build.exe finish promptly. Dont remove.
if (!Engine.debugMode)
{
// Create null streams for the current input/output/error streams
Console.SetOut(new StreamWriter(Stream.Null));
Console.SetError(new StreamWriter(Stream.Null));
Console.SetIn(new StreamReader(Stream.Null));
}
bool continueRunning = true;
while (continueRunning)
{
int eventType = WaitHandle.WaitAny(waitHandles, inactivityTimeout, false);
if (eventType == 0 || eventType == 1 || eventType == WaitHandle.WaitTimeout)
{
continueRunning = false;
localNode.ShutdownNode(eventType != 1 ?
Node.NodeShutdownLevel.PoliteShutdown :
Node.NodeShutdownLevel.ErrorShutdown, true, true);
}
else if (eventType == 2)
{
// reset the event as we do not want it to go into this state again when we are done with this if statement.
inUseEvent.Reset();
// The parent knows at this point the child process has been launched
globalNodeActivate.Reset();
// Set the global inuse event so other parent processes know this node is now initialized
globalNodeInUse.Set();
// Make a copy of the parents handle to protect ourselves in case the parent dies,
// this is to prevent a parent from reserving a node another parent is trying to use.
<API key> =
new EventWaitHandle(false, EventResetMode.ManualReset, <API key>.<API key>(nodeNumber));
WaitHandle[] waitHandlesActive = new WaitHandle[3];
waitHandlesActive[0] = shutdownEvent;
waitHandlesActive[1] = <API key>;
waitHandlesActive[2] = notInUseEvent;
eventType = WaitHandle.WaitTimeout;
while (eventType == WaitHandle.WaitTimeout && continueRunning == true)
{
eventType = WaitHandle.WaitAny(waitHandlesActive, parentCheckInterval, false);
if (eventType == 0 || /* nice shutdown due to shutdownEvent */
eventType == 1 || /* error shutdown due to <API key> */
eventType == WaitHandle.WaitTimeout && !localNode.<API key>())
{
continueRunning = false;
// If the exit is not triggered by running of shutdown method
if (eventType != 0)
{
localNode.ShutdownNode(Node.NodeShutdownLevel.ErrorShutdown, true, true);
}
}
else if (eventType == 2)
{
// Trigger a collection before the node goes idle to insure that
// the memory is released to the system as soon as possible
GC.Collect();
// Change the current directory to a safe one so that the directory
// last used by the build can be safely deleted. We must have read
// access to the safe directory so use SystemDirectory for this purpose.
Directory.SetCurrentDirectory(Environment.SystemDirectory);
notInUseEvent.Reset();
globalNodeInUse.Reset();
}
}
ErrorUtilities.VerifyThrow(localNode.node == null,
"Expected either node to be null or continueRunning to be false.");
// Stop the communication threads and release the shared memory object so that the next parent can create it
localNode.<API key>();
// Close the local copy of the reservation handle (this allows another parent to reserve
// the node)
<API key>.Close();
<API key> = null;
}
else if (eventType == 3)
{
<API key>.Reset();
localNode.<API key>();
globalNodeActivate.Set();
}
}
// Stop the communication threads and release the shared memory object so that the next parent can create it
localNode.<API key>();
globalNodeActive.Close();
globalNodeInUse.Close();
}
#endregion
#region Methods
<summary>
This method is run in its own thread, it is responsible for reading messages sent from the parent process
through the shared memory region.
</summary>
private void <API key>()
{
// Create an array of event to the node thread responds
WaitHandle[] waitHandles = new WaitHandle[2];
waitHandles[0] = <API key>;
waitHandles[1] = sharedMemory.ReadFlag;
bool continueExecution = true;
try
{
while (continueExecution)
{
// Wait for the next work item or an exit command
int eventType = WaitHandle.WaitAny(waitHandles);
if (eventType == 0)
{
// Exit node event
continueExecution = false;
}
else
{
// Read the list of <API key> from sharedMemory,
// this will be null if a large object is being read from shared
// memory and will continue to be null until the large object has
// been completly sent.
IList <API key> = sharedMemory.Read();
if (<API key> != null)
{
foreach (LocalCallDescriptor callDescriptor in <API key>)
{
// Execute the command method which relates to running on a child node
callDescriptor.NodeAction(node, this);
if ((callDescriptor.IsReply) && (callDescriptor is <API key>))
{
// Process the reply from the parent so it can be looked in a hashtable based
// on the call descriptor who requested the reply.
engineCallback.PostReplyFromParent((<API key>) callDescriptor);
}
}
}
}
}
}
catch (Exception e)
{
// Will rethrow the exception if necessary
<API key>(e);
}
// Dispose of the shared memory buffer
if (sharedMemory != null)
{
sharedMemory.Dispose();
sharedMemory = null;
}
}
<summary>
This method will shutdown the node being hosted by the child process and notify the parent process if requested,
</summary>
<param name="shutdownLevel">What kind of shutdown is causing the child node to shutdown</param>
<param name="exitProcess">should the child process exit as part of the shutdown process</param>
<param name="<API key>">Indicates if the parent process should be notified the child node is being shutdown</param>
internal void ShutdownNode(Node.NodeShutdownLevel shutdownLevel, bool exitProcess, bool <API key>)
{
if (node != null)
{
try
{
node.ShutdownNode(shutdownLevel);
if (!<API key>)
{
// Write the last event out directly
<API key> callDescriptor =
new <API key>(shutdownLevel, node.TotalTaskTime);
// Post the message indicating that the shutdown is complete
engineCallback.PostMessageToParent(callDescriptor, true);
}
}
catch (Exception e)
{
if (shutdownLevel != Node.NodeShutdownLevel.ErrorShutdown)
{
<API key>(e);
}
}
}
// If the shutdownLevel is not a build complete message, then this means there was a politeshutdown or an error shutdown, null the node out
// as either it is no longer needed due to the node goign idle or there was a error and it is now in a bad state.
if (shutdownLevel != Node.NodeShutdownLevel.<API key> &&
shutdownLevel != Node.NodeShutdownLevel.<API key>)
{
node = null;
notInUseEvent.Set();
}
if (exitProcess)
{
// Even if we completed a build, if we are goign to exit the process we need to null out the node and set the notInUseEvent, this is
// accomplished by calling this method again with the ErrorShutdown handle
if ( shutdownLevel == Node.NodeShutdownLevel.<API key> || shutdownLevel == Node.NodeShutdownLevel.<API key> )
{
ShutdownNode(Node.NodeShutdownLevel.ErrorShutdown, false, true);
}
// Signal all the communication threads to exit
shutdownEvent.Set();
}
}
<summary>
This methods activates the local node
</summary>
internal void Activate
(
Hashtable <API key>,
LoggerDescription[] nodeLoggers,
int nodeId,
BuildPropertyGroup <API key>,
<API key> <API key>,
int parentId,
string <API key>
)
{
ErrorUtilities.VerifyThrow(node == null, "Expected node to be null on activation.");
this.parentProcessId = parentId;
engineCallback.Reset();
inUseEvent.Set();
// Clear the environment so that we dont have extra variables laying around, this
// may be a performance hog but needs to be done
IDictionary variableDictionary = Environment.<API key>();
foreach (string variableName in variableDictionary.Keys)
{
Environment.<API key>(variableName, null);
}
foreach(string key in <API key>.Keys)
{
Environment.<API key>(key,(string)<API key>[key]);
}
// Host the msbuild engine and system
node = new Node(nodeId, nodeLoggers, engineCallback, <API key>, <API key>, <API key>);
// Write the initialization complete event out directly
<API key> callDescriptor =
new <API key>(Process.GetCurrentProcess().Id);
// Post the message indicating that the initialization is complete
engineCallback.PostMessageToParent(callDescriptor, true);
}
<summary>
This method checks is the parent process has not exited
</summary>
<returns>True if the parent process is still alive</returns>
private bool <API key>()
{
bool isParentAlive = true;
try
{
// Check if the parent is still there
if (Process.GetProcessById(parentProcessId).HasExited)
{
isParentAlive = false;
}
}
catch (ArgumentException)
{
isParentAlive = false;
}
if (!isParentAlive)
{
// No logging's going to reach the parent at this point:
// indicate on the console what's going on
string message = ResourceUtilities.<API key>("<API key>", node.NodeId);
Console.WriteLine(message);
}
return isParentAlive;
}
<summary>
Any error occuring in the shared memory transport is considered to be fatal
</summary>
<param name="originalException"></param>
<exception cref="Exception">Re-throws exception passed in</exception>
internal void <API key>(Exception originalException)
{
try
{
DumpExceptionToFile(originalException);
}
finally
{
if (node != null)
{
node.<API key>(originalException, null);
}
}
}
<summary>
This function is used to report exceptions which don't indicate breakdown
of communication with the parent
</summary>
<param name="originalException"></param>
internal void <API key>(Exception originalException)
{
if (node != null)
{
try
{
DumpExceptionToFile(originalException);
}
finally
{
node.<API key>(originalException);
}
}
else
{
// Since there is no node object report rethrow the exception
<API key>(originalException);
}
}
#endregion
#region Properties
internal static string DumpFileName
{
get
{
return dumpFileName;
}
}
#endregion
#region Member data
private Node node;
private SharedMemory sharedMemory;
private LocalNodeCallback engineCallback;
private int parentProcessId;
private int nodeNumber;
private Thread readerThread;
private static object dumpFileLocker = new Object();
// Public named events
// If this event is set the node host process is currently running
private static EventWaitHandle globalNodeActive;
// If this event is set the node is currently running a build
private static EventWaitHandle globalNodeInUse;
// If this event exists the node is reserved for use by a particular parent engine
// the node keeps a handle to this event during builds to prevent it from being used
// by another parent engine if the original dies
private static EventWaitHandle <API key>;
// If this event is set the node will immediatelly exit. The event is used by the
// parent engine to cause the node to exit if communication is lost.
private static EventWaitHandle <API key>;
// This event is used to cause the child to create the shared memory structures to start communication
// with the parent
private static EventWaitHandle <API key>;
// This event is used to indicate to the parent that shared memory buffers have been created and are ready for
// use
private static EventWaitHandle globalNodeActivate;
// Private local events
private static ManualResetEvent <API key> = new ManualResetEvent(false);
private static ManualResetEvent shutdownEvent = new ManualResetEvent(false);
private static ManualResetEvent notInUseEvent = new ManualResetEvent(false);
<summary>
Indicates the node is now in use. This means the node has recieved an activate command with initialization
data from the parent procss
</summary>
private static ManualResetEvent inUseEvent = new ManualResetEvent(false);
<summary>
Randomly generated file name for all exceptions thrown by this node that need to be dumped to a file.
(There may be more than one exception, if they occur on different threads.)
</summary>
private static string dumpFileName = null;
// Timeouts && Constants
private const int inactivityTimeout = 60 * 1000; // 60 seconds of inactivity to exit
private const int parentCheckInterval = 5 * 1000; // Check if the parent process is there every 5 seconds
#endregion
}
}
|
<?php get_header(); ?>
<?php if ( have_posts() ) while ( have_posts() ) : the_post(); ?>
<article role="main" class="primary-content type-post" id="post-<?php the_ID(); ?>">
<header>
<h1><?php the_title(); ?></h1>
</header>
<?php the_post_thumbnail('full');?>
<?php the_content(); ?>
<?php wp_link_pages( array( 'before' => '<div class="page-link">' . __( 'Pages:' ), 'after' => '</div>' ) ); ?>
<footer class="entry-meta">
<p>Posted <strong><?php echo human_time_diff(get_the_time('U'), current_time('timestamp')) . ' ago'; ?></strong> on <time datetime="<?php the_time('l, F jS, Y') ?>" pubdate><?php the_time('l, F jS, Y') ?></time> · <a href="<?php the_permalink(); ?>">Permalink</a></p>
</footer>
<?php comments_template( '', true ); ?>
<ul class="navigation">
<li class="older">
<?php previous_post_link( '%link', '← %title' ); ?>
</li>
<li class="newer">
<?php next_post_link( '%link', '%title →' ); ?>
</li>
</ul>
<?php endwhile; // end of the loop. ?>
</article>
<?php get_footer(); ?>
|
# What is Plone?
[Plone](https://plone.org) is a free and open source content management system built on top of the Zope application server.
%%LOGO%%
## Features
- Images for Plone 5 and Plone 4
- Enable add-ons via environment variables
## Usage
Start a single Plone instance
This will download and start the latest Plone 5 container, based on [Debian](https:
console
$ docker run -p 8080:8080 plone
This image includes `EXPOSE 8080` (the Plone port), so standard container linking will make it automatically available to the linked containers. Now you can add a Plone Site at http://localhost:8080 - default Zope user and password are `admin/admin`.
Start Plone within a ZEO cluster
Start ZEO server
console
$ docker run --name=zeo plone zeoserver
Start 2 Plone clients
console
$ docker run --link=zeo -e ZEO_ADDRESS=zeo:8100 -p 8081:8080 plone
$ docker run --link=zeo -e ZEO_ADDRESS=zeo:8100 -p 8082:8080 plone
Start Plone in debug mode
You can also start Plone in debug mode (`fg`) by running
console
$ docker run -p 8080:8080 plone fg
Add-ons
You can enable Plone add-ons via the `PLONE_ADDONS` environment variable
console
$ docker run -p 8080:8080 -e PLONE_ADDONS="eea.facetednavigation Products.PloneFormGen" plone
For more information on how to extend this image with your own custom settings, adding more add-ons, building it or mounting volumes, please refer to our [documentation](https://github.com/plone/plone.docker/blob/master/docs/usage.rst).
Supported Environment Variables
The Plone image uses several environment variable that allow to specify a more specific setup.
- `PLONE_ADDONS`, `ADDONS` - Customize Plone via Plone add-ons using this environment variable
- `PLONE_ZCML`, `ZCML` - Include custom Plone add-ons ZCML files
- `PLONE_DEVELOP`, `DEVELOP` - Develop new or existing Plone add-ons
- `ZEO_ADDRESS` - This environment variable allows you to run Plone image as a ZEO client.
- `ZEO_READ_ONLY` - Run Plone as a read-only ZEO client. Defaults to `off`.
- `<API key>` - A flag indicating whether a read-only remote storage should be acceptable as a fallback when no writable storages are available. Defaults to `false`.
- `ZEO_SHARED_BLOB_DIR` - Set this to on if the ZEO server and the instance have access to the same directory. Defaults to `off`.
- `ZEO_STORAGE` - Set the storage number of the ZEO storage. Defaults to `1`.
- `<API key>` - Set the size of the ZEO client cache. Defaults to `128MB`.
- `ZEO_PACK_KEEP_OLD` - Can be set to false to disable the creation of `*.fs.old` files before the pack is run. Defaults to true.
- `<API key>` - Time in seconds to wait until health check starts. Defaults to `1` second.
- `<API key>` - Interval in seconds to check that the Zope application is still healthy. Defaults to `1` second.
## Documentation
Full documentation for end users can be found in the ["docs"](https:
## Credits
This docker image was originally financed by the [European Environment Agency](http://eea.europa.eu), an agency of the European Union.
Thanks to [Antonio De Marinis](https:
|
#include <assert.h>
#include <string.h>
#include <pomelo_trans.h>
#include "pc_lib.h"
#include "pc_pomelo_i.h"
void pc_trans_fire_event(pc_client_t* client, int ev_type, const char* arg1, const char* arg2)
{
int pending = 0;
if (!client) {
pc_lib_log(PC_LOG_ERROR, "<API key> - client is null");
return ;
}
if (client->config.enable_polling) {
pending = 1;
}
<API key>(client, ev_type, arg1, arg2, pending);
}
void <API key>(pc_client_t* client, int ev_type, const char* arg1, const char* arg2, int pending)
{
QUEUE* q;
pc_ev_handler_t* handler;
pc_event_t* ev;
int i;
if (ev_type >= PC_EV_COUNT || ev_type < 0) {
pc_lib_log(PC_LOG_ERROR, "<API key> - error event type");
return;
}
if (ev_type == <API key> && (!arg1 || !arg2)) {
pc_lib_log(PC_LOG_ERROR, "<API key> - push msg but without a route or msg");
return;
}
if (ev_type == PC_EV_CONNECT_ERROR || ev_type == <API key>
|| ev_type == PC_EV_PROTO_ERROR || ev_type == <API key>) {
if (!arg1) {
pc_lib_log(PC_LOG_ERROR, "<API key> - event should be with a reason description");
return ;
}
}
if (pending) {
assert(client->config.enable_polling);
pc_lib_log(PC_LOG_INFO, "<API key> - add pending event: %s", pc_client_ev_str(ev_type));
pc_mutex_lock(&client->event_mutex);
ev = NULL;
for (i = 0; i < <API key>; ++i) {
if (<API key>(client->pending_events[i].type)) {
ev = &client->pending_events[i];
<API key>(ev->type);
break;
}
}
if (!ev) {
ev = (pc_event_t* )pc_lib_malloc(sizeof(pc_event_t));
memset(ev, 0, sizeof(pc_event_t));
ev->type = PC_DYN_ALLOC;
}
PC_EV_SET_NET_EVENT(ev->type);
QUEUE_INIT(&ev->queue);
QUEUE_INSERT_TAIL(&client->pending_ev_queue, &ev->queue);
ev->data.ev.ev_type = ev_type;
if (arg1) {
ev->data.ev.arg1 = pc_lib_strdup(arg1);
} else {
ev->data.ev.arg1 = NULL;
}
if (arg2) {
ev->data.ev.arg2 = pc_lib_strdup(arg2);
} else {
ev->data.ev.arg2 = NULL;
}
pc_mutex_unlock(&client->event_mutex);
return ;
}
pc_lib_log(PC_LOG_INFO, "<API key> - fire event: %s, arg1: %s, arg2: %s",
pc_client_ev_str(ev_type), arg1 ? arg1 : "", arg2 ? arg2 : "");
pc_mutex_lock(&client->state_mutex);
switch(ev_type) {
case PC_EV_CONNECTED:
assert(client->state == PC_ST_CONNECTING);
client->state = PC_ST_CONNECTED;
break;
case PC_EV_CONNECT_ERROR:
assert(client->state == PC_ST_CONNECTING || client->state == PC_ST_DISCONNECTING);
break;
case <API key>:
assert(client->state == PC_ST_CONNECTING || client->state == PC_ST_DISCONNECTING);
client->state = PC_ST_INITED;
break;
case PC_EV_DISCONNECT:
assert(client->state == PC_ST_DISCONNECTING);
client->state = PC_ST_INITED;
break;
case <API key>:
assert(client->state == PC_ST_CONNECTED || client->state == PC_ST_DISCONNECTING);
client->state = PC_ST_INITED;
break;
case <API key>:
case PC_EV_PROTO_ERROR:
assert(client->state == PC_ST_CONNECTING || client->state == PC_ST_CONNECTED
|| client->state == PC_ST_DISCONNECTING);
client->state = PC_ST_CONNECTING;
break;
case <API key>:
/* do nothing here */
break;
default:
/* never run to here */
pc_lib_log(PC_LOG_ERROR, "<API key> - unknown network event: %d", ev_type);
}
pc_mutex_unlock(&client->state_mutex);
/* invoke handler */
pc_mutex_lock(&client->handler_mutex);
QUEUE_FOREACH(q, &client->ev_handlers) {
handler = QUEUE_DATA(q, pc_ev_handler_t, queue);
assert(handler && handler->cb);
handler->cb(client, ev_type, handler->ex_data, arg1, arg2);
}
pc_mutex_unlock(&client->handler_mutex);
}
void pc_trans_sent(pc_client_t* client, unsigned int seq_num, int rc)
{
int pending = 0;
if (!client) {
pc_lib_log(PC_LOG_ERROR, "pc_trans_sent - client is null");
return ;
}
if (client->config.enable_polling) {
pending = 1;
}
pc__trans_sent(client, seq_num, rc, pending);
}
void pc__trans_sent(pc_client_t* client, unsigned int seq_num, int rc, int pending)
{
QUEUE* q;
pc_notify_t* notify;
pc_notify_t* target;
pc_event_t* ev;
int i;
if (pending) {
pc_mutex_lock(&client->event_mutex);
pc_lib_log(PC_LOG_INFO, "pc__trans_sent - add pending sent event, seq_num: %u, rc: %s",
seq_num, pc_client_rc_str(rc));
ev = NULL;
for (i = 0; i < <API key>; ++i) {
if (<API key>(client->pending_events[i].type)) {
ev = &client->pending_events[i];
<API key>(ev->type);
break;
}
}
if (!ev) {
ev = (pc_event_t* )pc_lib_malloc(sizeof(pc_event_t));
memset(ev, 0, sizeof(pc_event_t));
ev->type = PC_DYN_ALLOC;
}
QUEUE_INIT(&ev->queue);
<API key>(ev->type);
ev->data.notify.seq_num = seq_num;
ev->data.notify.rc = rc;
QUEUE_INSERT_TAIL(&client->pending_ev_queue, &ev->queue);
pc_mutex_unlock(&client->event_mutex);
return ;
}
/* callback immediately */
pc_mutex_lock(&client->notify_mutex);
target = NULL;
QUEUE_FOREACH(q, &client->notify_queue) {
notify = (pc_notify_t* )QUEUE_DATA(q, pc_common_req_t, queue);
if (notify->base.seq_num == seq_num) {
pc_lib_log(PC_LOG_INFO, "pc__trans_sent - fire sent event, seq_num: %u, rc: %s",
seq_num, pc_client_rc_str(rc));
target = notify;
QUEUE_REMOVE(q);
QUEUE_INIT(q);
break;
}
}
pc_mutex_unlock(&client->notify_mutex);
if (target) {
target->cb(target, rc);
pc_lib_free((char*)target->base.msg);
pc_lib_free((char*)target->base.route);
target->base.msg = NULL;
target->base.route = NULL;
if (PC_IS_PRE_ALLOC(target->base.type)) {
pc_mutex_lock(&client->notify_mutex);
<API key>(target->base.type);
pc_mutex_unlock(&client->notify_mutex);
} else {
pc_lib_free(target);
}
} else {
pc_lib_log(PC_LOG_ERROR, "pc__trans_sent - no pending notify found"
" when transport has sent it, seq num: %u", seq_num);
}
}
void pc_trans_resp(pc_client_t* client, unsigned int req_id, int rc, const char* resp)
{
int pending = 0;
if (!client) {
pc_lib_log(PC_LOG_ERROR, "pc_trans_resp - client is null");
return ;
}
if (client->config.enable_polling) {
pending = 1;
}
pc__trans_resp(client, req_id, rc, resp, pending);
}
void pc__trans_resp(pc_client_t* client, unsigned int req_id, int rc, const char* resp, int pending)
{
QUEUE* q;
pc_request_t* req;
pc_event_t* ev;
pc_request_t* target;
int i;
if (pending) {
pc_mutex_lock(&client->event_mutex);
pc_lib_log(PC_LOG_INFO, "pc__trans_resp - add pending resp event, req_id: %u, rc: %s",
req_id, pc_client_rc_str(rc));
ev = NULL;
for (i = 0; i < <API key>; ++i) {
if (<API key>(client->pending_events[i].type)) {
ev = &client->pending_events[i];
<API key>(ev->type);
break;
}
}
if (!ev) {
ev = (pc_event_t* )pc_lib_malloc(sizeof(pc_event_t));
memset(ev, 0, sizeof(pc_event_t));
ev->type = PC_DYN_ALLOC;
}
PC_EV_SET_RESP(ev->type);
QUEUE_INIT(&ev->queue);
ev->data.req.req_id = req_id;
ev->data.req.rc = rc;
ev->data.req.resp = pc_lib_strdup(resp);
QUEUE_INSERT_TAIL(&client->pending_ev_queue, &ev->queue);
pc_mutex_unlock(&client->event_mutex);
return ;
}
/* invoke callback immediately */
target = NULL;
pc_mutex_lock(&client->req_mutex);
QUEUE_FOREACH(q, &client->req_queue) {
req= (pc_request_t* )QUEUE_DATA(q, pc_common_req_t, queue);
if (req->req_id == req_id) {
pc_lib_log(PC_LOG_INFO, "pc__trans_resp - fire resp event, req_id: %u, rc: %s",
req_id, pc_client_rc_str(rc));
target = req;
QUEUE_REMOVE(q);
QUEUE_INIT(q);
break;
}
}
pc_mutex_unlock(&client->req_mutex);
if (target) {
target->cb(target, rc, resp);
pc_lib_free((char*)target->base.msg);
pc_lib_free((char*)target->base.route);
target->base.msg = NULL;
target->base.route = NULL;
if (PC_IS_PRE_ALLOC(target->base.type)) {
pc_mutex_lock(&client->req_mutex);
<API key>(target->base.type);
pc_mutex_unlock(&client->req_mutex);
} else {
pc_lib_free(target);
}
} else {
pc_lib_log(PC_LOG_ERROR, "pc__trans_resp - no pending request found when"
" get a response, req id: %u", req_id);
}
}
|
#pragma once
#include <cstddef>
#include <cstring>
#include <functional>
#define <API key>(type) \
namespace Crypto { \
inline bool operator==(const type &_v1, const type &_v2) { \
return std::memcmp(&_v1, &_v2, sizeof(type)) == 0; \
} \
inline bool operator!=(const type &_v1, const type &_v2) { \
return std::memcmp(&_v1, &_v2, sizeof(type)) != 0; \
} \
}
#define <API key>(type) \
<API key>(type) \
namespace Crypto { \
static_assert(sizeof(size_t) <= sizeof(type), "Size of " #type " must be at least that of size_t"); \
inline size_t hash_value(const type &_v) { \
return reinterpret_cast<const size_t &>(_v); \
} \
} \
namespace std { \
template<> \
struct hash<Crypto::type> { \
size_t operator()(const Crypto::type &_v) const { \
return reinterpret_cast<const size_t &>(_v); \
} \
}; \
}
|
// <auto-generated/>
#nullable disable
namespace Azure.ResourceManager.Storage.Models
{
<summary> The resource model definition for a Azure Resource Manager resource with an etag. </summary>
public partial class AzureEntityResource : Resource
{
<summary> Initializes a new instance of AzureEntityResource. </summary>
public AzureEntityResource()
{
}
<summary> Initializes a new instance of AzureEntityResource. </summary>
<param name="id"> Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{<API key>}/{resourceType}/{resourceName}. </param>
<param name="name"> The name of the resource. </param>
<param name="type"> The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. </param>
<param name="etag"> Resource Etag. </param>
internal AzureEntityResource(string id, string name, string type, string etag) : base(id, name, type)
{
Etag = etag;
}
<summary> Resource Etag. </summary>
public string Etag { get; }
}
}
|
// CCHDevice.h
// ContextHub
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#define kDeviceErrorDomain @"com.contexthub.device.error"
/**
ContextHub Device error codes.
*/
typedef NS_ENUM(NSInteger, CCHDeviceErrorCode) {
/**
Device id cannot be nil
*/
<API key>,
/**
Alias cannot be nil
*/
<API key>,
/**
Tags cannot be nil
*/
<API key>
};
@interface CCHDevice : NSObject
/**
@return The singleton instance of CCHDevice.
*/
+ (instancetype)sharedInstance;
/**
@return The vendor device id as UUIDString.
*/
- (NSString *)deviceId;
/**
Gets a device from ContextHub using the device Id.
@param deviceId The id of the device stored in ContextHub.
@param completionHandler Called when the request completes. The block is passed an NSDictionary object that represents the device. If an error occurs, the NSError will be passed to the block.
*/
- (void)getDeviceWithId:(NSString *)deviceId completionHandler:(void(^)(NSDictionary *device, NSError *error))completionHandler;
/**
Gets devices from ContextHub using the device alias.
@param alias The alias associated with the devices that you are interested in.
@param completionHandler Called when the request completes. The block is passed an NSArray of NSDictionary objects that represent the devices. If an error occurs, the NSError will be passed to the block.
*/
- (void)getDevicesWithAlias:(NSString *)alias completionHandler:(void(^)(NSArray *devices, NSError *error))completionHandler;
/**
Gets devices from ContextHub using tags.
@param tags Tags of the devices that you are interested in.
@param completionHandler Called when the request completes. The block is passed an NSArray of NSDictionary objects that represent the devices. If an error occurs, the NSError will be passed to the block.
*/
- (void)getDevicesWithTags:(NSArray *)tags completionHandler:(void(^)(NSArray *devices, NSError *error))completionHandler;
/**
Updates the device record on contexthub.
@param alias (optional) The alias associated with the device.
@param tags (optional) The tags to be applied to the device.
@param completionHandler Called when the request completes. The block is passed an NSDictionary object that represents the device. If an error occurs, the NSError will be passed to the block.
@note This method updates the data for the current device. The tags and alias that are set here can be used with CCHPush. The tags can also be used with the <API key>. This method gathers meta-data about the device and sends it to ContextHub along with the alias and tags. You can call this method multiple times.
*/
- (void)setDeviceAlias:(NSString *)alias tags:(NSArray *)tags completionHandler:(void(^)(NSDictionary *device, NSError *error))completionHandler;
@end
|
'use strict';
describe('Controller: AboutCtrl', function () {
// load the controller's module
beforeEach(module('e01App'));
var AboutCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
AboutCtrl = $controller('AboutCtrl', {
$scope: scope
// place here mocked dependencies
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(AboutCtrl.awesomeThings.length).toBe(3);
});
});
|
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - trackball camera</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
color: #000;
font-family:Monospace;
font-size:13px;
text-align:center;
font-weight: bold;
background-color: #fff;
margin: 0px;
overflow: hidden;
}
#info {
color:#000;
position: absolute;
top: 0px; width: 100%;
padding: 5px;
}
a {
color: red;
}
</style>
</head>
<body>
<div id="container"></div>
<div id="info">
<a href="http://github.com/mrdoob/three.js" target="_blank">three.js</a> - trackball camera example</br>
MOVE mouse & press LEFT/A: rotate, MIDDLE/S: zoom, RIGHT/D: pan
</div>
<script src="../build/three.min.js"></script>
<script src="js/Detector.js"></script>
<script src="js/Stats.js"></script>
<script>
if ( ! Detector.webgl ) Detector.addGetWebGLMessage();
var container, stats;
var camera, controls, scene, renderer;
var cross;
init();
animate();
function init() {
camera = new THREE.PerspectiveCamera( 60, window.innerWidth / window.innerHeight, 1, 1000 );
camera.position.z = 500;
controls = new THREE.TrackballControls( camera );
controls.rotateSpeed = 1.0;
controls.zoomSpeed = 1.2;
controls.panSpeed = 0.8;
controls.noZoom = false;
controls.noPan = false;
controls.staticMoving = true;
controls.<API key> = 0.3;
controls.keys = [ 65, 83, 68 ];
controls.addEventListener( 'change', render );
// world
scene = new THREE.Scene();
scene.fog = new THREE.FogExp2( 0xcccccc, 0.002 );
var geometry = new THREE.CylinderGeometry( 0, 10, 30, 4, 1 );
var material = new THREE.MeshLambertMaterial( { color:0xffffff, shading: THREE.FlatShading } );
for ( var i = 0; i < 500; i ++ ) {
var mesh = new THREE.Mesh( geometry, material );
mesh.position.x = ( Math.random() - 0.5 ) * 1000;
mesh.position.y = ( Math.random() - 0.5 ) * 1000;
mesh.position.z = ( Math.random() - 0.5 ) * 1000;
mesh.updateMatrix();
mesh.matrixAutoUpdate = false;
scene.add( mesh );
}
// lights
light = new THREE.DirectionalLight( 0xffffff );
light.position.set( 1, 1, 1 );
scene.add( light );
light = new THREE.DirectionalLight( 0x002288 );
light.position.set( -1, -1, -1 );
scene.add( light );
light = new THREE.AmbientLight( 0x222222 );
scene.add( light );
// renderer
renderer = new THREE.WebGLRenderer( { antialias: false } );
renderer.setClearColor( scene.fog.color, 1 );
renderer.setSize( window.innerWidth, window.innerHeight );
container = document.getElementById( 'container' );
container.appendChild( renderer.domElement );
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
stats.domElement.style.zIndex = 100;
container.appendChild( stats.domElement );
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.<API key>();
renderer.setSize( window.innerWidth, window.innerHeight );
controls.handleResize();
render();
}
function animate() {
<API key>( animate );
controls.update();
}
function render() {
renderer.render( scene, camera );
stats.update();
}
</script>
</body>
</html>
|
var searchData=
[
['digitalpin_2eh',['DigitalPin.h',['../_digital_pin_8h.html',1,'']]]
];
|
{% extends "base.html" %}
{% block title %}All notes ({{ notes|length }}){% endblock %}
{% block page_title %} <span>All notes ({{ notes|length }})</span> {% endblock %}
{% block content %}
{% if notes %}
<table class="notes">
<tr>
<th class="note">Note <a href="/?order=name" class="sort_arrow" >↓</a><a href="/?order=-name" class="sort_arrow" >↑</a></th>
<th>Pad</th>
<th class="date">Last modified <a href="/?order=updated_at" class="sort_arrow" >↓</a><a href="/?order=-updated_at" class="sort_arrow" >↑</a></th>
</tr>
{% for note in notes %}
<tr>
<td><a href="{{ url_for('view_note', note_id=note.id) }}">{{ note.name }}</a></td>
<td class="pad">
{% if note.pad %}
<a href="{{ url_for('pad_notes', pad_id=note.pad.id) }}">{{ note.pad.name }}</a>
{% else %}
No pad
{% endif %}
</td>
<td class="hidden-text date">{{ note.updated_at|smart_date }}</td>
</tr>
{% endfor %}
</table>
{% else %}
<p class="empty">Create your first note.</p>
{% endif %}
<a href="{{ url_for('create_note') }}" class="button">New note</a>
{% endblock %}
|
package org.knowm.xchange.ripple;
import static org.assertj.core.api.Assertions.assertThat;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.<API key>;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.text.ParseException;
import org.junit.Test;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.AccountInfo;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.ripple.dto.account.ITransferFeeSource;
import org.knowm.xchange.ripple.dto.account.<API key>;
import org.knowm.xchange.ripple.dto.account.<API key>;
import org.knowm.xchange.ripple.dto.marketdata.RippleOrderBook;
import org.knowm.xchange.ripple.dto.trade.<API key>;
import org.knowm.xchange.ripple.dto.trade.RippleAccountOrders;
import org.knowm.xchange.ripple.dto.trade.RippleLimitOrder;
import org.knowm.xchange.ripple.dto.trade.<API key>;
import org.knowm.xchange.ripple.dto.trade.<API key>;
import org.knowm.xchange.ripple.dto.trade.RippleUserTrade;
import org.knowm.xchange.ripple.service.params.<API key>;
import org.knowm.xchange.ripple.service.params.<API key>;
import org.knowm.xchange.service.trade.params.TradeHistoryParams;
public class RippleAdaptersTest implements ITransferFeeSource {
@Test
public void <API key>() throws IOException {
// Read in the JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/account/<API key>.json");
// Use Jackson to parse it
final ObjectMapper mapper = new ObjectMapper();
final <API key> rippleAccount = mapper.readValue(is, <API key>.class);
// Convert to xchange object and check field values
final AccountInfo account = RippleAdapters.adaptAccountInfo(rippleAccount, "username");
assertThat(account.getWallets()).hasSize(2);
assertThat(account.getUsername()).isEqualTo("username");
assertThat(account.getTradingFee()).isEqualTo(BigDecimal.ZERO);
final Wallet counterWallet = account.getWallet("<API key>");
assertThat(counterWallet.getId()).isEqualTo("<API key>");
assertThat(counterWallet.getBalances()).hasSize(2);
final Balance btcBalance = counterWallet.getBalance(Currency.BTC);
assertThat(btcBalance.getTotal()).isEqualTo("0.038777349225374");
assertThat(btcBalance.getCurrency()).isEqualTo(Currency.BTC);
final Balance usdBalance = counterWallet.getBalance(Currency.USD);
assertThat(usdBalance.getTotal()).isEqualTo("10");
assertThat(usdBalance.getCurrency()).isEqualTo(Currency.USD);
final Wallet mainWallet = account.getWallet("main");
assertThat(mainWallet.getBalances()).hasSize(1);
final Balance xrpBalance = mainWallet.getBalance(Currency.XRP);
assertThat(xrpBalance.getTotal()).isEqualTo("861.401578");
assertThat(xrpBalance.getCurrency()).isEqualTo(Currency.XRP);
}
@Test
public void adaptOrderBookTest() throws IOException {
// Read in the JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/marketdata/example-order-book.json");
final CurrencyPair currencyPair = CurrencyPair.XRP_BTC;
// Test data uses Bitstamp issued BTC
final <API key> params = new <API key>();
params.<API key>("<API key>");
// Use Jackson to parse it
final ObjectMapper mapper = new ObjectMapper();
final RippleOrderBook rippleOrderBook = mapper.readValue(is, RippleOrderBook.class);
// Convert to xchange object and check field values
final OrderBook orderBook =
RippleAdapters.adaptOrderBook(rippleOrderBook, params, currencyPair);
assertThat(orderBook.getBids()).hasSize(10);
assertThat(orderBook.getAsks()).hasSize(10);
final LimitOrder lastBid = orderBook.getBids().get(9);
assertThat(lastBid).isInstanceOf(RippleLimitOrder.class);
assertThat(lastBid.getCurrencyPair()).isEqualTo(currencyPair);
assertThat(((RippleLimitOrder) lastBid).<API key>())
.isEqualTo("<API key>");
assertThat(lastBid.getType()).isEqualTo(OrderType.BID);
assertThat(lastBid.getId()).isEqualTo("1303704");
assertThat(lastBid.getOriginalAmount()).isEqualTo("66314.537782");
assertThat(lastBid.getLimitPrice()).isEqualTo("0.<API key>");
final LimitOrder firstAsk = orderBook.getAsks().get(0);
assertThat(firstAsk).isInstanceOf(RippleLimitOrder.class);
assertThat(firstAsk.getCurrencyPair()).isEqualTo(currencyPair);
assertThat(((RippleLimitOrder) firstAsk).<API key>())
.isEqualTo("<API key>");
assertThat(firstAsk.getType()).isEqualTo(OrderType.ASK);
assertThat(firstAsk.getId()).isEqualTo("1011310");
assertThat(firstAsk.getOriginalAmount()).isEqualTo("35447.914936");
assertThat(firstAsk.getLimitPrice()).isEqualTo("0.<API key>");
}
@Test
public void adaptOpenOrdersTest() throws JsonParseException, <API key>, IOException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read in the JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream("/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final RippleAccountOrders response = mapper.readValue(is, RippleAccountOrders.class);
// Convert to XChange orders
final OpenOrders orders = RippleAdapters.adaptOpenOrders(response, roundingScale);
assertThat(orders.getOpenOrders()).hasSize(12);
final LimitOrder firstOrder = orders.getOpenOrders().get(0);
assertThat(firstOrder).isInstanceOf(RippleLimitOrder.class);
assertThat(firstOrder.getCurrencyPair()).isEqualTo(CurrencyPair.XRP_BTC);
assertThat(((RippleLimitOrder) firstOrder).<API key>())
.isEqualTo("<API key>");
assertThat(firstOrder.getId()).isEqualTo("5");
assertThat(firstOrder.getLimitPrice()).isEqualTo("0.00003226");
assertThat(firstOrder.getTimestamp()).isNull();
assertThat(firstOrder.getOriginalAmount()).isEqualTo("1");
assertThat(firstOrder.getType()).isEqualTo(OrderType.BID);
final LimitOrder secondOrder = orders.getOpenOrders().get(1);
assertThat(secondOrder).isInstanceOf(RippleLimitOrder.class);
assertThat(secondOrder.getCurrencyPair()).isEqualTo(CurrencyPair.XRP_BTC);
assertThat(((RippleLimitOrder) secondOrder).<API key>())
.isEqualTo("<API key>");
assertThat(secondOrder.getId()).isEqualTo("7");
// Price = 15159.38551342023 / 123.123456
assertThat(secondOrder.getLimitPrice())
.isEqualTo("123.<API key>");
assertThat(secondOrder.getTimestamp()).isNull();
assertThat(secondOrder.getOriginalAmount()).isEqualTo("123.123456");
assertThat(secondOrder.getType()).isEqualTo(OrderType.ASK);
}
@Override
public BigDecimal getTransferFeeRate(final String address) throws IOException {
final InputStream is =
getClass()
.getResourceAsStream(
String.format(
"/org/knowm/xchange/ripple/dto/account/<API key>-%s.json",
address));
final ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(is, <API key>.class).getSettings().getTransferFeeRate();
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final <API key> params = new <API key>();
params.<API key>(Currency.BTC);
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair()).isEqualTo(CurrencyPair.XRP_BTC);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<SHA256-like>");
assertThat(trade.getOrderId()).isEqualTo("1010");
// Price = 0.000029309526038 * 0.998
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("0.000029250906985924")
.setScale(roundingScale, RoundingMode.HALF_UP)
.stripTrailingZeros());
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2000-00-00T00:00:00.000Z"));
assertThat(trade.getOriginalAmount()).isEqualTo("1");
assertThat(trade.getType()).isEqualTo(OrderType.BID);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEmpty();
assertThat(ripple.getBaseTransferFee()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.XRP);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEqualTo("<API key>");
// Transfer fee = 0.000029309526038 * 0.002
assertThat(ripple.<API key>()).isEqualTo("0.000000058619052076");
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final <API key> params = new <API key>();
params.<API key>(Currency.BTC);
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair()).isEqualTo(CurrencyPair.BTC_XRP);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<SHA256-like>");
assertThat(trade.getOrderId()).isEqualTo("1010");
// Price = 1.0 / (0.000029309526038 * 0.998)
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("34186.<API key>")
.setScale(roundingScale, RoundingMode.HALF_UP));
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2000-00-00T00:00:00.000Z"));
// Quantity = 0.000029309526038 * 0.998
assertThat(trade.getOriginalAmount()).isEqualTo("0.000029250906985924");
assertThat(trade.getType()).isEqualTo(OrderType.ASK);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEqualTo("<API key>");
// Transfer fee = 0.000029309526038 * 0.002
assertThat(ripple.getBaseTransferFee()).isEqualTo("0.000000058619052076");
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEmpty();
assertThat(ripple.<API key>()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.XRP);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final <API key> params = new <API key>();
params.setCurrencyPair(CurrencyPair.XRP_BTC);
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair()).isEqualTo(CurrencyPair.XRP_BTC);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<SHA256-like>");
assertThat(trade.getOrderId()).isEqualTo("1111");
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("0.000028572057152")
.setScale(roundingScale, RoundingMode.HALF_UP)
.stripTrailingZeros());
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2011-11-11T11:11:11.111Z"));
assertThat(trade.getOriginalAmount()).isEqualTo("1");
assertThat(trade.getType()).isEqualTo(OrderType.ASK);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEmpty();
assertThat(ripple.getBaseTransferFee()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.XRP);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEqualTo("<API key>");
assertThat(ripple.<API key>()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
// make sure that if the <API key> is adapted again it returns the same values
final UserTrade trade2 = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade2.getCurrencyPair()).isEqualTo(trade.getCurrencyPair());
assertThat(trade2.getFeeAmount()).isEqualTo(trade.getFeeAmount());
assertThat(trade2.getFeeCurrency()).isEqualTo(trade.getFeeCurrency());
assertThat(trade2.getId()).isEqualTo(trade.getId());
assertThat(trade2.getOrderId()).isEqualTo(trade.getOrderId());
assertThat(trade2.getPrice()).isEqualTo(trade.getPrice());
assertThat(trade2.getTimestamp()).isEqualTo(trade.getTimestamp());
assertThat(trade2.getOriginalAmount()).isEqualTo(trade.getOriginalAmount());
assertThat(trade2.getType()).isEqualTo(trade.getType());
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final <API key> params = new <API key>();
params.<API key>(Currency.BTC);
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair()).isEqualTo(CurrencyPair.BTC_XRP);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<SHA256-like>");
assertThat(trade.getOrderId()).isEqualTo("1111");
// Price = 1.0 / 0.000028572057152
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("34999.<API key>")
.setScale(roundingScale, RoundingMode.HALF_UP)
.stripTrailingZeros());
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2011-11-11T11:11:11.111Z"));
assertThat(trade.getOriginalAmount()).isEqualTo("0.000028572057152");
assertThat(trade.getType()).isEqualTo(OrderType.BID);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEqualTo("<API key>");
assertThat(ripple.getBaseTransferFee()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEmpty();
assertThat(ripple.<API key>()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.XRP);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final TradeHistoryParams params = new TradeHistoryParams() {};
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair().base).isEqualTo(Currency.BTC);
assertThat(trade.getCurrencyPair().counter).isEqualTo(Currency.BTC);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<SHA256-like>");
assertThat(trade.getOrderId()).isEqualTo("2222");
// Price = 0.501 * 0.998 / 0.<API key>
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("0.<API key>")
.setScale(roundingScale, RoundingMode.HALF_UP));
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2022-22-22T22:22:22.222Z"));
assertThat(trade.getOriginalAmount()).isEqualTo("0.<API key>");
assertThat(trade.getType()).isEqualTo(OrderType.BID);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEqualTo("<API key>");
assertThat(ripple.getBaseTransferFee()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEqualTo("<API key>");
// Transfer fee = 0.501 * 0.002
assertThat(ripple.<API key>()).isEqualTo("0.001002");
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
}
@Test
public void <API key>()
throws JsonParseException, <API key>, IOException, ParseException {
final RippleExchange exchange = new RippleExchange();
final int roundingScale = exchange.getRoundingScale();
// Read the trade JSON from the example resources
final InputStream is =
getClass()
.getResourceAsStream(
"/org/knowm/xchange/ripple/dto/trade/<API key>.json");
final ObjectMapper mapper = new ObjectMapper();
final <API key> response = mapper.readValue(is, <API key>.class);
final TradeHistoryParams params = new TradeHistoryParams() {};
final UserTrade trade = RippleAdapters.adaptTrade(response, params, this, roundingScale);
assertThat(trade.getCurrencyPair().base).isEqualTo(Currency.XRP);
assertThat(trade.getCurrencyPair().counter).isEqualTo(Currency.BTC);
assertThat(trade.getFeeAmount()).isEqualTo("0.012");
assertThat(trade.getFeeCurrency()).isEqualTo(Currency.XRP);
assertThat(trade.getId())
.isEqualTo("<API key>");
assertThat(trade.getOrderId()).isEqualTo("9338");
// Price = 0.009941478580724 / (349.559725 - 0.012)
assertThat(trade.getPrice())
.isEqualTo(
new BigDecimal("0.<API key>")
.setScale(roundingScale, RoundingMode.HALF_UP));
assertThat(trade.getTimestamp()).isEqualTo(RippleExchange.ToDate("2015-08-07T03:58:10.000Z"));
assertThat(trade.getOriginalAmount()).isEqualTo("349.547725");
assertThat(trade.getType()).isEqualTo(OrderType.ASK);
assertThat(trade).isInstanceOf(RippleUserTrade.class);
final RippleUserTrade ripple = (RippleUserTrade) trade;
assertThat(ripple.getBaseCounterparty()).isEqualTo("");
assertThat(ripple.getBaseTransferFee()).isZero();
assertThat(ripple.<API key>()).isEqualTo(Currency.XRP);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().base);
assertThat(ripple.<API key>()).isEqualTo("<API key>");
// Transfer fee = 0.501 * 0.002
assertThat(ripple.<API key>()).isEqualTo("0");
assertThat(ripple.<API key>()).isEqualTo(Currency.BTC);
assertThat(ripple.<API key>()).isEqualTo(trade.getCurrencyPair().counter);
}
}
|
<?php
namespace Oro\Bundle\EmailBundle\Tests\Unit\Entity;
use Oro\Bundle\EmailBundle\Entity\EmailFolder;
use Oro\Bundle\EmailBundle\Tests\Unit\ReflectionUtil;
class EmailFolderTest extends \<API key>
{
public function testIdGetter()
{
$entity = new EmailFolder();
ReflectionUtil::setId($entity, 1);
$this->assertEquals(1, $entity->getId());
}
public function <API key>()
{
$entity = new EmailFolder();
$entity->setName('test');
$this->assertEquals('test', $entity->getName());
}
public function <API key>()
{
$entity = new EmailFolder();
$entity->setFullName('test');
$this->assertEquals('test', $entity->getFullName());
}
public function <API key>()
{
$entity = new EmailFolder();
$entity->setType('test');
$this->assertEquals('test', $entity->getType());
}
public function <API key>()
{
$origin = $this->getMock('Oro\Bundle\EmailBundle\Entity\EmailOrigin');
$entity = new EmailFolder();
$entity->setOrigin($origin);
$this->assertTrue($origin === $entity->getOrigin());
}
public function <API key>()
{
$email = $this->getMock('Oro\Bundle\EmailBundle\Entity\Email');
$entity = new EmailFolder();
$entity->addEmail($email);
$emails = $entity->getEmails();
$this->assertInstanceOf('Doctrine\Common\Collections\ArrayCollection', $emails);
$this->assertCount(1, $emails);
$this->assertTrue($email === $emails[0]);
}
}
|
module Serverspec
module Helper
module Configuration
def subject
<API key>
super
end
# You can create a set of configurations provided to all specs in your spec_helper:
# RSpec.configure { |c| c.pre_command = "source ~/.zshrc" }
# Any configurations you provide with `let(:option_name)` in a spec will
# automatically be merged on top of the configurations.
# @example
# describe 'Gem' do
# let(:pre_command) { "source ~/.zshrc" }
# %w(pry awesome_print bundler).each do |p|
# describe package(p) do
# it { should be_installed.by('gem') }
# end
# end
# end
def <API key>
Serverspec::Configuration.defaults.keys.each do |c|
value = self.respond_to?(c.to_sym) ?
self.send(c) : RSpec.configuration.send(c)
Serverspec::Configuration.send(:"#{c}=", value)
end
end
end
end
end
|
// <auto-generated>
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.Batch.Models
{
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.Runtime;
using System.Runtime.Serialization;
<summary>
Defines values for ComputeNodeFillType.
</summary>
[JsonConverter(typeof(StringEnumConverter))]
public enum ComputeNodeFillType
{
[EnumMember(Value = "Spread")]
Spread,
[EnumMember(Value = "Pack")]
Pack
}
internal static class <API key>
{
internal static string ToSerializedValue(this ComputeNodeFillType? value)
{
return value == null ? null : ((ComputeNodeFillType)value).ToSerializedValue();
}
internal static string ToSerializedValue(this ComputeNodeFillType value)
{
switch( value )
{
case ComputeNodeFillType.Spread:
return "Spread";
case ComputeNodeFillType.Pack:
return "Pack";
}
return null;
}
internal static ComputeNodeFillType? <API key>(this string value)
{
switch( value )
{
case "Spread":
return ComputeNodeFillType.Spread;
case "Pack":
return ComputeNodeFillType.Pack;
}
return null;
}
}
}
|
/**
* @typedef {object} Phaser.Types.GameObjects.BitmapText.<API key>
* @since 3.0.0
*
* @property {Phaser.GameObjects.DynamicBitmapText} parent - The Dynamic Bitmap Text object that owns this character being rendered.
* @property {Phaser.Types.GameObjects.BitmapText.TintConfig} tint - The tint of the character being rendered. Always zero in Canvas.
* @property {number} index - The index of the character being rendered.
* @property {number} charCode - The character code of the character being rendered.
* @property {number} x - The x position of the character being rendered.
* @property {number} y - The y position of the character being rendered.
* @property {number} scale - The scale of the character being rendered.
* @property {number} rotation - The rotation of the character being rendered.
* @property {any} data - Custom data stored with the character being rendered.
*/
/**
* @callback Phaser.Types.GameObjects.BitmapText.DisplayCallback
*
* @param {Phaser.Types.GameObjects.BitmapText.<API key>} display - Settings of the character that is about to be rendered.
*
* @return {Phaser.Types.GameObjects.BitmapText.<API key>} Altered position, scale and rotation values for the character that is about to be rendered.
*/
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ScintillaNET
{
<summary>
Lexer property types.
</summary>
public enum PropertyType
{
<summary>
A Boolean property. This is the default.
</summary>
Boolean = NativeMethods.SC_TYPE_BOOLEAN,
<summary>
An integer property.
</summary>
Integer = NativeMethods.SC_TYPE_INTEGER,
<summary>
A string property.
</summary>
String = NativeMethods.SC_TYPE_STRING
}
}
|
from __future__ import print_function
import sys
def func():
print('{0}.{1}'.format(*sys.version_info[:2]))
print(repr(sys.argv[1:]))
print('Hello World')
return 0
|
import '@angular/compiler';
import * as fs from 'fs';
import * as path from 'path';
const UTF8 = {
encoding: 'utf-8'
};
const PACKAGE = 'angular/packages/core/test/bundling/hello_world_r2';
describe('treeshaking with uglify', () => {
let content: string;
const contentPath = require.resolve(path.join(PACKAGE, 'bundle.debug.min.js'));
beforeAll(() => {
content = fs.readFileSync(contentPath, UTF8);
});
it('should drop unused TypeScript helpers', () => {
expect(content).not.toContain('__asyncGenerator');
});
it('should not contain rxjs from commonjs distro', () => {
expect(content).not.toContain('commonjsGlobal');
expect(content).not.toContain('<API key>');
});
});
|
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
var base64 = require('base64-js')
var ieee754 = require('ieee754')
var isArray = require('is-array')
exports.Buffer = Buffer
exports.SlowBuffer = Buffer
exports.INSPECT_MAX_BYTES = 50
Buffer.poolSize = 8192 // not used by this implementation
var kMaxLength = 0x3fffffff
Buffer.TYPED_ARRAY_SUPPORT = (function () {
try {
var buf = new ArrayBuffer(0)
var arr = new Uint8Array(buf)
arr.foo = function () { return 42 }
return 42 === arr.foo() && // typed array instances can be augmented
typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray`
new Uint8Array(1).subarray(1, 1).byteLength === 0 // ie10 has broken `subarray`
} catch (e) {
return false
}
})()
function Buffer (subject, encoding, noZero) {
if (!(this instanceof Buffer))
return new Buffer(subject, encoding, noZero)
var type = typeof subject
// Find the length
var length
if (type === 'number')
length = subject > 0 ? subject >>> 0 : 0
else if (type === 'string') {
if (encoding === 'base64')
subject = base64clean(subject)
length = Buffer.byteLength(subject, encoding)
} else if (type === 'object' && subject !== null) { // assume object is array-like
if (subject.type === 'Buffer' && isArray(subject.data))
subject = subject.data
length = +subject.length > 0 ? Math.floor(+subject.length) : 0
} else
throw new TypeError('must start with number, buffer, array or string')
if (this.length > kMaxLength)
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
'size: 0x' + kMaxLength.toString(16) + ' bytes')
var buf
if (Buffer.TYPED_ARRAY_SUPPORT) {
// Preferred: Return an augmented `Uint8Array` instance for best performance
buf = Buffer._augment(new Uint8Array(length))
} else {
buf = this
buf.length = length
buf._isBuffer = true
}
var i
if (Buffer.TYPED_ARRAY_SUPPORT && typeof subject.byteLength === 'number') {
// Speed optimization -- use set if we're copying from a typed array
buf._set(subject)
} else if (isArrayish(subject)) {
// Treat array-ish objects as a byte array
if (Buffer.isBuffer(subject)) {
for (i = 0; i < length; i++)
buf[i] = subject.readUInt8(i)
} else {
for (i = 0; i < length; i++)
buf[i] = ((subject[i] % 256) + 256) % 256
}
} else if (type === 'string') {
buf.write(subject, 0, encoding)
} else if (type === 'number' && !Buffer.TYPED_ARRAY_SUPPORT && !noZero) {
for (i = 0; i < length; i++) {
buf[i] = 0
}
}
return buf
}
Buffer.isBuffer = function (b) {
return !!(b != null && b._isBuffer)
}
Buffer.compare = function (a, b) {
if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b))
throw new TypeError('Arguments must be Buffers')
var x = a.length
var y = b.length
for (var i = 0, len = Math.min(x, y); i < len && a[i] === b[i]; i++) {}
if (i !== len) {
x = a[i]
y = b[i]
}
if (x < y) return -1
if (y < x) return 1
return 0
}
Buffer.isEncoding = function (encoding) {
switch (String(encoding).toLowerCase()) {
case 'hex':
case 'utf8':
case 'utf-8':
case 'ascii':
case 'binary':
case 'base64':
case 'raw':
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return true
default:
return false
}
}
Buffer.concat = function (list, totalLength) {
if (!isArray(list)) throw new TypeError('Usage: Buffer.concat(list[, length])')
if (list.length === 0) {
return new Buffer(0)
} else if (list.length === 1) {
return list[0]
}
var i
if (totalLength === undefined) {
totalLength = 0
for (i = 0; i < list.length; i++) {
totalLength += list[i].length
}
}
var buf = new Buffer(totalLength)
var pos = 0
for (i = 0; i < list.length; i++) {
var item = list[i]
item.copy(buf, pos)
pos += item.length
}
return buf
}
Buffer.byteLength = function (str, encoding) {
var ret
str = str + ''
switch (encoding || 'utf8') {
case 'ascii':
case 'binary':
case 'raw':
ret = str.length
break
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
ret = str.length * 2
break
case 'hex':
ret = str.length >>> 1
break
case 'utf8':
case 'utf-8':
ret = utf8ToBytes(str).length
break
case 'base64':
ret = base64ToBytes(str).length
break
default:
ret = str.length
}
return ret
}
// pre-set for values that may exist in the future
Buffer.prototype.length = undefined
Buffer.prototype.parent = undefined
// toString(encoding, start=0, end=buffer.length)
Buffer.prototype.toString = function (encoding, start, end) {
var loweredCase = false
start = start >>> 0
end = end === undefined || end === Infinity ? this.length : end >>> 0
if (!encoding) encoding = 'utf8'
if (start < 0) start = 0
if (end > this.length) end = this.length
if (end <= start) return ''
while (true) {
switch (encoding) {
case 'hex':
return hexSlice(this, start, end)
case 'utf8':
case 'utf-8':
return utf8Slice(this, start, end)
case 'ascii':
return asciiSlice(this, start, end)
case 'binary':
return binarySlice(this, start, end)
case 'base64':
return base64Slice(this, start, end)
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
return utf16leSlice(this, start, end)
default:
if (loweredCase)
throw new TypeError('Unknown encoding: ' + encoding)
encoding = (encoding + '').toLowerCase()
loweredCase = true
}
}
}
Buffer.prototype.equals = function (b) {
if(!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
return Buffer.compare(this, b) === 0
}
Buffer.prototype.inspect = function () {
var str = ''
var max = exports.INSPECT_MAX_BYTES
if (this.length > 0) {
str = this.toString('hex', 0, max).match(/.{2}/g).join(' ')
if (this.length > max)
str += ' ... '
}
return '<Buffer ' + str + '>'
}
Buffer.prototype.compare = function (b) {
if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer')
return Buffer.compare(this, b)
}
// `get` will be removed in Node 0.13+
Buffer.prototype.get = function (offset) {
console.log('.get() is deprecated. Access using array indexes instead.')
return this.readUInt8(offset)
}
// `set` will be removed in Node 0.13+
Buffer.prototype.set = function (v, offset) {
console.log('.set() is deprecated. Access using array indexes instead.')
return this.writeUInt8(v, offset)
}
function hexWrite (buf, string, offset, length) {
offset = Number(offset) || 0
var remaining = buf.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
// must be an even number of digits
var strLen = string.length
if (strLen % 2 !== 0) throw new Error('Invalid hex string')
if (length > strLen / 2) {
length = strLen / 2
}
for (var i = 0; i < length; i++) {
var byte = parseInt(string.substr(i * 2, 2), 16)
if (isNaN(byte)) throw new Error('Invalid hex string')
buf[offset + i] = byte
}
return i
}
function utf8Write (buf, string, offset, length) {
var charsWritten = blitBuffer(utf8ToBytes(string), buf, offset, length)
return charsWritten
}
function asciiWrite (buf, string, offset, length) {
var charsWritten = blitBuffer(asciiToBytes(string), buf, offset, length)
return charsWritten
}
function binaryWrite (buf, string, offset, length) {
return asciiWrite(buf, string, offset, length)
}
function base64Write (buf, string, offset, length) {
var charsWritten = blitBuffer(base64ToBytes(string), buf, offset, length)
return charsWritten
}
function utf16leWrite (buf, string, offset, length) {
var charsWritten = blitBuffer(utf16leToBytes(string), buf, offset, length)
return charsWritten
}
Buffer.prototype.write = function (string, offset, length, encoding) {
// Support both (string, offset, length, encoding)
// and the legacy (string, encoding, offset, length)
if (isFinite(offset)) {
if (!isFinite(length)) {
encoding = length
length = undefined
}
} else { // legacy
var swap = encoding
encoding = offset
offset = length
length = swap
}
offset = Number(offset) || 0
var remaining = this.length - offset
if (!length) {
length = remaining
} else {
length = Number(length)
if (length > remaining) {
length = remaining
}
}
encoding = String(encoding || 'utf8').toLowerCase()
var ret
switch (encoding) {
case 'hex':
ret = hexWrite(this, string, offset, length)
break
case 'utf8':
case 'utf-8':
ret = utf8Write(this, string, offset, length)
break
case 'ascii':
ret = asciiWrite(this, string, offset, length)
break
case 'binary':
ret = binaryWrite(this, string, offset, length)
break
case 'base64':
ret = base64Write(this, string, offset, length)
break
case 'ucs2':
case 'ucs-2':
case 'utf16le':
case 'utf-16le':
ret = utf16leWrite(this, string, offset, length)
break
default:
throw new TypeError('Unknown encoding: ' + encoding)
}
return ret
}
Buffer.prototype.toJSON = function () {
return {
type: 'Buffer',
data: Array.prototype.slice.call(this._arr || this, 0)
}
}
function base64Slice (buf, start, end) {
if (start === 0 && end === buf.length) {
return base64.fromByteArray(buf)
} else {
return base64.fromByteArray(buf.slice(start, end))
}
}
function utf8Slice (buf, start, end) {
var res = ''
var tmp = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; i++) {
if (buf[i] <= 0x7F) {
res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i])
tmp = ''
} else {
tmp += '%' + buf[i].toString(16)
}
}
return res + decodeUtf8Char(tmp)
}
function asciiSlice (buf, start, end) {
var ret = ''
end = Math.min(buf.length, end)
for (var i = start; i < end; i++) {
ret += String.fromCharCode(buf[i])
}
return ret
}
function binarySlice (buf, start, end) {
return asciiSlice(buf, start, end)
}
function hexSlice (buf, start, end) {
var len = buf.length
if (!start || start < 0) start = 0
if (!end || end < 0 || end > len) end = len
var out = ''
for (var i = start; i < end; i++) {
out += toHex(buf[i])
}
return out
}
function utf16leSlice (buf, start, end) {
var bytes = buf.slice(start, end)
var res = ''
for (var i = 0; i < bytes.length; i += 2) {
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256)
}
return res
}
Buffer.prototype.slice = function (start, end) {
var len = this.length
start = ~~start
end = end === undefined ? len : ~~end
if (start < 0) {
start += len;
if (start < 0)
start = 0
} else if (start > len) {
start = len
}
if (end < 0) {
end += len
if (end < 0)
end = 0
} else if (end > len) {
end = len
}
if (end < start)
end = start
if (Buffer.TYPED_ARRAY_SUPPORT) {
return Buffer._augment(this.subarray(start, end))
} else {
var sliceLen = end - start
var newBuf = new Buffer(sliceLen, undefined, true)
for (var i = 0; i < sliceLen; i++) {
newBuf[i] = this[i + start]
}
return newBuf
}
}
/*
* Need to make sure that buffer isn't trying to write out of bounds.
*/
function checkOffset (offset, ext, length) {
if ((offset % 1) !== 0 || offset < 0)
throw new RangeError('offset is not uint')
if (offset + ext > length)
throw new RangeError('Trying to access beyond buffer length')
}
Buffer.prototype.readUInt8 = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 1, this.length)
return this[offset]
}
Buffer.prototype.readUInt16LE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 2, this.length)
return this[offset] | (this[offset + 1] << 8)
}
Buffer.prototype.readUInt16BE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 2, this.length)
return (this[offset] << 8) | this[offset + 1]
}
Buffer.prototype.readUInt32LE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return ((this[offset]) |
(this[offset + 1] << 8) |
(this[offset + 2] << 16)) +
(this[offset + 3] * 0x1000000)
}
Buffer.prototype.readUInt32BE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return (this[offset] * 0x1000000) +
((this[offset + 1] << 16) |
(this[offset + 2] << 8) |
this[offset + 3])
}
Buffer.prototype.readInt8 = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 1, this.length)
if (!(this[offset] & 0x80))
return (this[offset])
return ((0xff - this[offset] + 1) * -1)
}
Buffer.prototype.readInt16LE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 2, this.length)
var val = this[offset] | (this[offset + 1] << 8)
return (val & 0x8000) ? val | 0xFFFF0000 : val
}
Buffer.prototype.readInt16BE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 2, this.length)
var val = this[offset + 1] | (this[offset] << 8)
return (val & 0x8000) ? val | 0xFFFF0000 : val
}
Buffer.prototype.readInt32LE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return (this[offset]) |
(this[offset + 1] << 8) |
(this[offset + 2] << 16) |
(this[offset + 3] << 24)
}
Buffer.prototype.readInt32BE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return (this[offset] << 24) |
(this[offset + 1] << 16) |
(this[offset + 2] << 8) |
(this[offset + 3])
}
Buffer.prototype.readFloatLE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return ieee754.read(this, offset, true, 23, 4)
}
Buffer.prototype.readFloatBE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 4, this.length)
return ieee754.read(this, offset, false, 23, 4)
}
Buffer.prototype.readDoubleLE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 8, this.length)
return ieee754.read(this, offset, true, 52, 8)
}
Buffer.prototype.readDoubleBE = function (offset, noAssert) {
if (!noAssert)
checkOffset(offset, 8, this.length)
return ieee754.read(this, offset, false, 52, 8)
}
function checkInt (buf, value, offset, ext, max, min) {
if (!Buffer.isBuffer(buf)) throw new TypeError('buffer must be a Buffer instance')
if (value > max || value < min) throw new TypeError('value is out of bounds')
if (offset + ext > buf.length) throw new TypeError('index out of range')
}
Buffer.prototype.writeUInt8 = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 1, 0xff, 0)
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
this[offset] = value
return offset + 1
}
function objectWriteUInt16 (buf, value, offset, littleEndian) {
if (value < 0) value = 0xffff + value + 1
for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; i++) {
buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
(littleEndian ? i : 1 - i) * 8
}
}
Buffer.prototype.writeUInt16LE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 2, 0xffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = value
this[offset + 1] = (value >>> 8)
} else objectWriteUInt16(this, value, offset, true)
return offset + 2
}
Buffer.prototype.writeUInt16BE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 2, 0xffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 8)
this[offset + 1] = value
} else objectWriteUInt16(this, value, offset, false)
return offset + 2
}
function objectWriteUInt32 (buf, value, offset, littleEndian) {
if (value < 0) value = 0xffffffff + value + 1
for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; i++) {
buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff
}
}
Buffer.prototype.writeUInt32LE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 4, 0xffffffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset + 3] = (value >>> 24)
this[offset + 2] = (value >>> 16)
this[offset + 1] = (value >>> 8)
this[offset] = value
} else objectWriteUInt32(this, value, offset, true)
return offset + 4
}
Buffer.prototype.writeUInt32BE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 4, 0xffffffff, 0)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 24)
this[offset + 1] = (value >>> 16)
this[offset + 2] = (value >>> 8)
this[offset + 3] = value
} else objectWriteUInt32(this, value, offset, false)
return offset + 4
}
Buffer.prototype.writeInt8 = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 1, 0x7f, -0x80)
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value)
if (value < 0) value = 0xff + value + 1
this[offset] = value
return offset + 1
}
Buffer.prototype.writeInt16LE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 2, 0x7fff, -0x8000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = value
this[offset + 1] = (value >>> 8)
} else objectWriteUInt16(this, value, offset, true)
return offset + 2
}
Buffer.prototype.writeInt16BE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 2, 0x7fff, -0x8000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 8)
this[offset + 1] = value
} else objectWriteUInt16(this, value, offset, false)
return offset + 2
}
Buffer.prototype.writeInt32LE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = value
this[offset + 1] = (value >>> 8)
this[offset + 2] = (value >>> 16)
this[offset + 3] = (value >>> 24)
} else objectWriteUInt32(this, value, offset, true)
return offset + 4
}
Buffer.prototype.writeInt32BE = function (value, offset, noAssert) {
value = +value
offset = offset >>> 0
if (!noAssert)
checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000)
if (value < 0) value = 0xffffffff + value + 1
if (Buffer.TYPED_ARRAY_SUPPORT) {
this[offset] = (value >>> 24)
this[offset + 1] = (value >>> 16)
this[offset + 2] = (value >>> 8)
this[offset + 3] = value
} else objectWriteUInt32(this, value, offset, false)
return offset + 4
}
function checkIEEE754 (buf, value, offset, ext, max, min) {
if (value > max || value < min) throw new TypeError('value is out of bounds')
if (offset + ext > buf.length) throw new TypeError('index out of range')
}
function writeFloat (buf, value, offset, littleEndian, noAssert) {
if (!noAssert)
checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38)
ieee754.write(buf, value, offset, littleEndian, 23, 4)
return offset + 4
}
Buffer.prototype.writeFloatLE = function (value, offset, noAssert) {
return writeFloat(this, value, offset, true, noAssert)
}
Buffer.prototype.writeFloatBE = function (value, offset, noAssert) {
return writeFloat(this, value, offset, false, noAssert)
}
function writeDouble (buf, value, offset, littleEndian, noAssert) {
if (!noAssert)
checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308)
ieee754.write(buf, value, offset, littleEndian, 52, 8)
return offset + 8
}
Buffer.prototype.writeDoubleLE = function (value, offset, noAssert) {
return writeDouble(this, value, offset, true, noAssert)
}
Buffer.prototype.writeDoubleBE = function (value, offset, noAssert) {
return writeDouble(this, value, offset, false, noAssert)
}
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
Buffer.prototype.copy = function (target, target_start, start, end) {
var source = this
if (!start) start = 0
if (!end && end !== 0) end = this.length
if (!target_start) target_start = 0
// Copy 0 bytes; we're done
if (end === start) return
if (target.length === 0 || source.length === 0) return
// Fatal error conditions
if (end < start) throw new TypeError('sourceEnd < sourceStart')
if (target_start < 0 || target_start >= target.length)
throw new TypeError('targetStart out of bounds')
if (start < 0 || start >= source.length) throw new TypeError('sourceStart out of bounds')
if (end < 0 || end > source.length) throw new TypeError('sourceEnd out of bounds')
// Are we oob?
if (end > this.length)
end = this.length
if (target.length - target_start < end - start)
end = target.length - target_start + start
var len = end - start
if (len < 100 || !Buffer.TYPED_ARRAY_SUPPORT) {
for (var i = 0; i < len; i++) {
target[i + target_start] = this[i + start]
}
} else {
target._set(this.subarray(start, start + len), target_start)
}
}
// fill(value, start=0, end=buffer.length)
Buffer.prototype.fill = function (value, start, end) {
if (!value) value = 0
if (!start) start = 0
if (!end) end = this.length
if (end < start) throw new TypeError('end < start')
// Fill 0 bytes; we're done
if (end === start) return
if (this.length === 0) return
if (start < 0 || start >= this.length) throw new TypeError('start out of bounds')
if (end < 0 || end > this.length) throw new TypeError('end out of bounds')
var i
if (typeof value === 'number') {
for (i = start; i < end; i++) {
this[i] = value
}
} else {
var bytes = utf8ToBytes(value.toString())
var len = bytes.length
for (i = start; i < end; i++) {
this[i] = bytes[i % len]
}
}
return this
}
/**
* Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance.
* Added in Node 0.12. Only available in browsers that support ArrayBuffer.
*/
Buffer.prototype.toArrayBuffer = function () {
if (typeof Uint8Array !== 'undefined') {
if (Buffer.TYPED_ARRAY_SUPPORT) {
return (new Buffer(this)).buffer
} else {
var buf = new Uint8Array(this.length)
for (var i = 0, len = buf.length; i < len; i += 1) {
buf[i] = this[i]
}
return buf.buffer
}
} else {
throw new TypeError('Buffer.toArrayBuffer not supported in this browser')
}
}
// HELPER FUNCTIONS
var BP = Buffer.prototype
/**
* Augment a Uint8Array *instance* (not the Uint8Array class!) with Buffer methods
*/
Buffer._augment = function (arr) {
arr._isBuffer = true
// save reference to original Uint8Array get/set methods before overwriting
arr._get = arr.get
arr._set = arr.set
// deprecated, will be removed in node 0.13+
arr.get = BP.get
arr.set = BP.set
arr.write = BP.write
arr.toString = BP.toString
arr.toLocaleString = BP.toString
arr.toJSON = BP.toJSON
arr.equals = BP.equals
arr.compare = BP.compare
arr.copy = BP.copy
arr.slice = BP.slice
arr.readUInt8 = BP.readUInt8
arr.readUInt16LE = BP.readUInt16LE
arr.readUInt16BE = BP.readUInt16BE
arr.readUInt32LE = BP.readUInt32LE
arr.readUInt32BE = BP.readUInt32BE
arr.readInt8 = BP.readInt8
arr.readInt16LE = BP.readInt16LE
arr.readInt16BE = BP.readInt16BE
arr.readInt32LE = BP.readInt32LE
arr.readInt32BE = BP.readInt32BE
arr.readFloatLE = BP.readFloatLE
arr.readFloatBE = BP.readFloatBE
arr.readDoubleLE = BP.readDoubleLE
arr.readDoubleBE = BP.readDoubleBE
arr.writeUInt8 = BP.writeUInt8
arr.writeUInt16LE = BP.writeUInt16LE
arr.writeUInt16BE = BP.writeUInt16BE
arr.writeUInt32LE = BP.writeUInt32LE
arr.writeUInt32BE = BP.writeUInt32BE
arr.writeInt8 = BP.writeInt8
arr.writeInt16LE = BP.writeInt16LE
arr.writeInt16BE = BP.writeInt16BE
arr.writeInt32LE = BP.writeInt32LE
arr.writeInt32BE = BP.writeInt32BE
arr.writeFloatLE = BP.writeFloatLE
arr.writeFloatBE = BP.writeFloatBE
arr.writeDoubleLE = BP.writeDoubleLE
arr.writeDoubleBE = BP.writeDoubleBE
arr.fill = BP.fill
arr.inspect = BP.inspect
arr.toArrayBuffer = BP.toArrayBuffer
return arr
}
var INVALID_BASE64_RE = /[^+\/0-9A-z]/g
function base64clean (str) {
// Node strips out invalid characters like \n and \t from the string, base64-js does not
str = stringtrim(str).replace(INVALID_BASE64_RE, '')
while (str.length % 4 !== 0) {
str = str + '='
}
return str
}
function stringtrim (str) {
if (str.trim) return str.trim()
return str.replace(/^\s+|\s+$/g, '')
}
function isArrayish (subject) {
return isArray(subject) || Buffer.isBuffer(subject) ||
subject && typeof subject === 'object' &&
typeof subject.length === 'number'
}
function toHex (n) {
if (n < 16) return '0' + n.toString(16)
return n.toString(16)
}
function utf8ToBytes (str) {
var byteArray = []
for (var i = 0; i < str.length; i++) {
var b = str.charCodeAt(i)
if (b <= 0x7F) {
byteArray.push(b)
} else {
var start = i
if (b >= 0xD800 && b <= 0xDFFF) i++
var h = encodeURIComponent(str.slice(start, i+1)).substr(1).split('%')
for (var j = 0; j < h.length; j++) {
byteArray.push(parseInt(h[j], 16))
}
}
}
return byteArray
}
function asciiToBytes (str) {
var byteArray = []
for (var i = 0; i < str.length; i++) {
// Node's code seems to be doing this and not & 0x7F..
byteArray.push(str.charCodeAt(i) & 0xFF)
}
return byteArray
}
function utf16leToBytes (str) {
var c, hi, lo
var byteArray = []
for (var i = 0; i < str.length; i++) {
c = str.charCodeAt(i)
hi = c >> 8
lo = c % 256
byteArray.push(lo)
byteArray.push(hi)
}
return byteArray
}
function base64ToBytes (str) {
return base64.toByteArray(str)
}
function blitBuffer (src, dst, offset, length) {
for (var i = 0; i < length; i++) {
if ((i + offset >= dst.length) || (i >= src.length))
break
dst[i + offset] = src[i]
}
return i
}
function decodeUtf8Char (str) {
try {
return decodeURIComponent(str)
} catch (err) {
return String.fromCharCode(0xFFFD) // UTF 8 invalid char
}
}
},{"base64-js":2,"ieee754":3,"is-array":4}],2:[function(require,module,exports){
var lookup = '<API key>+/';
;(function (exports) {
'use strict';
var Arr = (typeof Uint8Array !== 'undefined')
? Uint8Array
: Array
var PLUS = '+'.charCodeAt(0)
var SLASH = '/'.charCodeAt(0)
var NUMBER = '0'.charCodeAt(0)
var LOWER = 'a'.charCodeAt(0)
var UPPER = 'A'.charCodeAt(0)
function decode (elt) {
var code = elt.charCodeAt(0)
if (code === PLUS)
return 62
if (code === SLASH)
return 63
if (code < NUMBER)
return -1 //no match
if (code < NUMBER + 10)
return code - NUMBER + 26 + 26
if (code < UPPER + 26)
return code - UPPER
if (code < LOWER + 26)
return code - LOWER + 26
}
function b64ToByteArray (b64) {
var i, j, l, tmp, placeHolders, arr
if (b64.length % 4 > 0) {
throw new Error('Invalid string. Length must be a multiple of 4')
}
// the number of equal signs (place holders)
// if there are two placeholders, than the two characters before it
// represent one byte
// if there is only one, then the three characters before it represent 2 bytes
// this is just a cheap hack to not do indexOf twice
var len = b64.length
placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0
// base64 is 4/3 + up to two characters of the original data
arr = new Arr(b64.length * 3 / 4 - placeHolders)
// if there are placeholders, only get up to the last complete 4 chars
l = placeHolders > 0 ? b64.length - 4 : b64.length
var L = 0
function push (v) {
arr[L++] = v
}
for (i = 0, j = 0; i < l; i += 4, j += 3) {
tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3))
push((tmp & 0xFF0000) >> 16)
push((tmp & 0xFF00) >> 8)
push(tmp & 0xFF)
}
if (placeHolders === 2) {
tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4)
push(tmp & 0xFF)
} else if (placeHolders === 1) {
tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2)
push((tmp >> 8) & 0xFF)
push(tmp & 0xFF)
}
return arr
}
function uint8ToBase64 (uint8) {
var i,
extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes
output = "",
temp, length
function encode (num) {
return lookup.charAt(num)
}
function tripletToBase64 (num) {
return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F)
}
// go through the array every three bytes, we'll deal with trailing stuff later
for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) {
temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2])
output += tripletToBase64(temp)
}
// pad the end with zeros, but make sure to not forget the extra bytes
switch (extraBytes) {
case 1:
temp = uint8[uint8.length - 1]
output += encode(temp >> 2)
output += encode((temp << 4) & 0x3F)
output += '=='
break
case 2:
temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1])
output += encode(temp >> 10)
output += encode((temp >> 4) & 0x3F)
output += encode((temp << 2) & 0x3F)
output += '='
break
}
return output
}
exports.toByteArray = b64ToByteArray
exports.fromByteArray = uint8ToBase64
}(typeof exports === 'undefined' ? (this.base64js = {}) : exports))
},{}],3:[function(require,module,exports){
exports.read = function(buffer, offset, isLE, mLen, nBytes) {
var e, m,
eLen = nBytes * 8 - mLen - 1,
eMax = (1 << eLen) - 1,
eBias = eMax >> 1,
nBits = -7,
i = isLE ? (nBytes - 1) : 0,
d = isLE ? -1 : 1,
s = buffer[offset + i];
i += d;
e = s & ((1 << (-nBits)) - 1);
s >>= (-nBits);
nBits += eLen;
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8);
m = e & ((1 << (-nBits)) - 1);
e >>= (-nBits);
nBits += mLen;
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8);
if (e === 0) {
e = 1 - eBias;
} else if (e === eMax) {
return m ? NaN : ((s ? -1 : 1) * Infinity);
} else {
m = m + Math.pow(2, mLen);
e = e - eBias;
}
return (s ? -1 : 1) * m * Math.pow(2, e - mLen);
};
exports.write = function(buffer, value, offset, isLE, mLen, nBytes) {
var e, m, c,
eLen = nBytes * 8 - mLen - 1,
eMax = (1 << eLen) - 1,
eBias = eMax >> 1,
rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0),
i = isLE ? 0 : (nBytes - 1),
d = isLE ? 1 : -1,
s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
value = Math.abs(value);
if (isNaN(value) || value === Infinity) {
m = isNaN(value) ? 1 : 0;
e = eMax;
} else {
e = Math.floor(Math.log(value) / Math.LN2);
if (value * (c = Math.pow(2, -e)) < 1) {
e
c *= 2;
}
if (e + eBias >= 1) {
value += rt / c;
} else {
value += rt * Math.pow(2, 1 - eBias);
}
if (value * c >= 2) {
e++;
c /= 2;
}
if (e + eBias >= eMax) {
m = 0;
e = eMax;
} else if (e + eBias >= 1) {
m = (value * c - 1) * Math.pow(2, mLen);
e = e + eBias;
} else {
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
e = 0;
}
}
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8);
e = (e << mLen) | m;
eLen += mLen;
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8);
buffer[offset + i - d] |= s * 128;
};
},{}],4:[function(require,module,exports){
/**
* isArray
*/
var isArray = Array.isArray;
/**
* toString
*/
var str = Object.prototype.toString;
/**
* Whether or not the given `val`
* is an array.
*
* example:
*
* isArray([]);
* // > true
* isArray(arguments);
* // > false
* isArray('');
* // > false
*
* @param {mixed} val
* @return {bool}
*/
module.exports = isArray || function (val) {
return !! val && '[object Array]' == str.call(val);
};
},{}],5:[function(require,module,exports){
ndarray = require( 'ndarray' );
},{"ndarray":6}],6:[function(require,module,exports){
(function (Buffer){
var iota = require("iota-array")
var hasTypedArrays = ((typeof Float64Array) !== "undefined")
var hasBuffer = ((typeof Buffer) !== "undefined")
function compare1st(a, b) {
return a[0] - b[0]
}
function order() {
var stride = this.stride
var terms = new Array(stride.length)
var i
for(i=0; i<terms.length; ++i) {
terms[i] = [Math.abs(stride[i]), i]
}
terms.sort(compare1st)
var result = new Array(terms.length)
for(i=0; i<result.length; ++i) {
result[i] = terms[i][1]
}
return result
}
function compileConstructor(dtype, dimension) {
var className = ["View", dimension, "d", dtype].join("")
if(dimension < 0) {
className = "View_Nil" + dtype
}
var useGetters = (dtype === "generic")
if(dimension === -1) {
//Special case for trivial arrays
var code =
"function "+className+"(a){this.data=a;};\
var proto="+className+".prototype;\
proto.dtype='"+dtype+"';\
proto.index=function(){return -1};\
proto.size=0;\
proto.dimension=-1;\
proto.shape=proto.stride=proto.order=[];\
proto.lo=proto.hi=proto.transpose=proto.step=\
function(){return new "+className+"(this.data);};\
proto.get=proto.set=function(){};\
proto.pick=function(){return null};\
return function construct_"+className+"(a){return new "+className+"(a);}"
var procedure = new Function(code)
return procedure()
} else if(dimension === 0) {
//Special case for 0d arrays
var code =
"function "+className+"(a,d) {\
this.data = a;\
this.offset = d\
};\
var proto="+className+".prototype;\
proto.dtype='"+dtype+"';\
proto.index=function(){return this.offset};\
proto.dimension=0;\
proto.size=1;\
proto.shape=\
proto.stride=\
proto.order=[];\
proto.lo=\
proto.hi=\
proto.transpose=\
proto.step=function "+className+"_copy() {\
return new "+className+"(this.data,this.offset)\
};\
proto.pick=function "+className+"_pick(){\
return TrivialArray(this.data);\
};\
proto.valueOf=proto.get=function "+className+"_get(){\
return "+(useGetters ? "this.data.get(this.offset)" : "this.data[this.offset]")+
"};\
proto.set=function "+className+"_set(v){\
return "+(useGetters ? "this.data.set(this.offset,v)" : "this.data[this.offset]=v")+"\
};\
return function construct_"+className+"(a,b,c,d){return new "+className+"(a,d)}"
var procedure = new Function("TrivialArray", code)
return procedure(CACHED_CONSTRUCTORS[dtype][0])
}
var code = ["'use strict'"]
//Create constructor for view
var indices = iota(dimension)
var args = indices.map(function(i) { return "i"+i })
var index_str = "this.offset+" + indices.map(function(i) {
return "this.stride[" + i + "]*i" + i
}).join("+")
var shapeArg = indices.map(function(i) {
return "b"+i
}).join(",")
var strideArg = indices.map(function(i) {
return "c"+i
}).join(",")
code.push(
"function "+className+"(a," + shapeArg + "," + strideArg + ",d){this.data=a",
"this.shape=[" + shapeArg + "]",
"this.stride=[" + strideArg + "]",
"this.offset=d|0}",
"var proto="+className+".prototype",
"proto.dtype='"+dtype+"'",
"proto.dimension="+dimension)
//view.size:
code.push("Object.defineProperty(proto,'size',{get:function "+className+"_size(){\
return "+indices.map(function(i) { return "this.shape["+i+"]" }).join("*"),
"}})")
//view.order:
if(dimension === 1) {
code.push("proto.order=[0]")
} else {
code.push("Object.defineProperty(proto,'order',{get:")
if(dimension < 4) {
code.push("function "+className+"_order(){")
if(dimension === 2) {
code.push("return (Math.abs(this.stride[0])>Math.abs(this.stride[1]))?[1,0]:[0,1]}})")
} else if(dimension === 3) {
code.push(
"var s0=Math.abs(this.stride[0]),s1=Math.abs(this.stride[1]),s2=Math.abs(this.stride[2]);\
if(s0>s1){\
if(s1>s2){\
return [2,1,0];\
}else if(s0>s2){\
return [1,2,0];\
}else{\
return [1,0,2];\
}\
}else if(s0>s2){\
return [2,0,1];\
}else if(s2>s1){\
return [0,1,2];\
}else{\
return [0,2,1];\
}}})")
}
} else {
code.push("ORDER})")
}
}
//view.set(i0, ..., v):
code.push(
"proto.set=function "+className+"_set("+args.join(",")+",v){")
if(useGetters) {
code.push("return this.data.set("+index_str+",v)}")
} else {
code.push("return this.data["+index_str+"]=v}")
}
//view.get(i0, ...):
code.push("proto.get=function "+className+"_get("+args.join(",")+"){")
if(useGetters) {
code.push("return this.data.get("+index_str+")}")
} else {
code.push("return this.data["+index_str+"]}")
}
//view.index:
code.push(
"proto.index=function "+className+"_index(", args.join(), "){return "+index_str+"}")
//view.hi():
code.push("proto.hi=function "+className+"_hi("+args.join(",")+"){return new "+className+"(this.data,"+
indices.map(function(i) {
return ["(typeof i",i,"!=='number'||i",i,"<0)?this.shape[", i, "]:i", i,"|0"].join("")
}).join(",")+","+
indices.map(function(i) {
return "this.stride["+i + "]"
}).join(",")+",this.offset)}")
//view.lo():
var a_vars = indices.map(function(i) { return "a"+i+"=this.shape["+i+"]" })
var c_vars = indices.map(function(i) { return "c"+i+"=this.stride["+i+"]" })
code.push("proto.lo=function "+className+"_lo("+args.join(",")+"){var b=this.offset,d=0,"+a_vars.join(",")+","+c_vars.join(","))
for(var i=0; i<dimension; ++i) {
code.push(
"if(typeof i"+i+"==='number'&&i"+i+">=0){\
d=i"+i+"|0;\
b+=c"+i+"*d;\
a"+i+"-=d}")
}
code.push("return new "+className+"(this.data,"+
indices.map(function(i) {
return "a"+i
}).join(",")+","+
indices.map(function(i) {
return "c"+i
}).join(",")+",b)}")
//view.step():
code.push("proto.step=function "+className+"_step("+args.join(",")+"){var "+
indices.map(function(i) {
return "a"+i+"=this.shape["+i+"]"
}).join(",")+","+
indices.map(function(i) {
return "b"+i+"=this.stride["+i+"]"
}).join(",")+",c=this.offset,d=0,ceil=Math.ceil")
for(var i=0; i<dimension; ++i) {
code.push(
"if(typeof i"+i+"==='number'){\
d=i"+i+"|0;\
if(d<0){\
c+=b"+i+"*(a"+i+"-1);\
a"+i+"=ceil(-a"+i+"/d)\
}else{\
a"+i+"=ceil(a"+i+"/d)\
}\
b"+i+"*=d\
}")
}
code.push("return new "+className+"(this.data,"+
indices.map(function(i) {
return "a" + i
}).join(",")+","+
indices.map(function(i) {
return "b" + i
}).join(",")+",c)}")
//view.transpose():
var tShape = new Array(dimension)
var tStride = new Array(dimension)
for(var i=0; i<dimension; ++i) {
tShape[i] = "a[i"+i+"]"
tStride[i] = "b[i"+i+"]"
}
code.push("proto.transpose=function "+className+"_transpose("+args+"){"+
args.map(function(n,idx) { return n + "=(" + n + "===undefined?" + idx + ":" + n + "|0)"}).join(";"),
"var a=this.shape,b=this.stride;return new "+className+"(this.data,"+tShape.join(",")+","+tStride.join(",")+",this.offset)}")
//view.pick():
code.push("proto.pick=function "+className+"_pick("+args+"){var a=[],b=[],c=this.offset")
for(var i=0; i<dimension; ++i) {
code.push("if(typeof i"+i+"==='number'&&i"+i+">=0){c=(c+this.stride["+i+"]*i"+i+")|0}else{a.push(this.shape["+i+"]);b.push(this.stride["+i+"])}")
}
code.push("var ctor=CTOR_LIST[a.length+1];return ctor(this.data,a,b,c)}")
//Add return statement
code.push("return function construct_"+className+"(data,shape,stride,offset){return new "+className+"(data,"+
indices.map(function(i) {
return "shape["+i+"]"
}).join(",")+","+
indices.map(function(i) {
return "stride["+i+"]"
}).join(",")+",offset)}")
//Compile procedure
var procedure = new Function("CTOR_LIST", "ORDER", code.join("\n"))
return procedure(CACHED_CONSTRUCTORS[dtype], order)
}
function arrayDType(data) {
if(hasBuffer) {
if(Buffer.isBuffer(data)) {
return "buffer"
}
}
if(hasTypedArrays) {
switch(Object.prototype.toString.call(data)) {
case "[object Float64Array]":
return "float64"
case "[object Float32Array]":
return "float32"
case "[object Int8Array]":
return "int8"
case "[object Int16Array]":
return "int16"
case "[object Int32Array]":
return "int32"
case "[object Uint8Array]":
return "uint8"
case "[object Uint16Array]":
return "uint16"
case "[object Uint32Array]":
return "uint32"
case "[object Uint8ClampedArray]":
return "uint8_clamped"
}
}
if(Array.isArray(data)) {
return "array"
}
return "generic"
}
var CACHED_CONSTRUCTORS = {
"float32":[],
"float64":[],
"int8":[],
"int16":[],
"int32":[],
"uint8":[],
"uint16":[],
"uint32":[],
"array":[],
"uint8_clamped":[],
"buffer":[],
"generic":[]
}
;(function() {
for(var id in CACHED_CONSTRUCTORS) {
CACHED_CONSTRUCTORS[id].push(compileConstructor(id, -1))
}
});
function wrappedNDArrayCtor(data, shape, stride, offset) {
if(data === undefined) {
var ctor = CACHED_CONSTRUCTORS.array[0]
return ctor([])
} else if(typeof data === "number") {
data = [data]
}
if(shape === undefined) {
shape = [ data.length ]
}
var d = shape.length
if(stride === undefined) {
stride = new Array(d)
for(var i=d-1, sz=1; i>=0; --i) {
stride[i] = sz
sz *= shape[i]
}
}
if(offset === undefined) {
offset = 0
for(var i=0; i<d; ++i) {
if(stride[i] < 0) {
offset -= (shape[i]-1)*stride[i]
}
}
}
var dtype = arrayDType(data)
var ctor_list = CACHED_CONSTRUCTORS[dtype]
while(ctor_list.length <= d+1) {
ctor_list.push(compileConstructor(dtype, ctor_list.length-1))
}
var ctor = ctor_list[d+1]
return ctor(data, shape, stride, offset)
}
module.exports = wrappedNDArrayCtor
}).call(this,require("buffer").Buffer)
},{"buffer":1,"iota-array":7}],7:[function(require,module,exports){
"use strict"
function iota(n) {
var result = new Array(n)
for(var i=0; i<n; ++i) {
result[i] = i
}
return result
}
module.exports = iota
},{}]},{},[5]);
|
package com.aspose.cells.model;
public class SideWall {
private Link link = null;
public Link getLink() {
return link;
}
public void setLink(Link link) {
this.link = link;
}
}
|
class Admin::BaseController < <API key>
end
|
package com.punchthrough.bean.sdk.internal.upload.sketch;
public enum SketchUploadState {
INACTIVE, RESETTING_REMOTE, <API key>, SENDING_BLOCKS, FINISHED
}
|
define('lodash/internal/createWrapper', ['exports', 'lodash/internal/baseSetData', 'lodash/internal/createBindWrapper', 'lodash/internal/createHybridWrapper', 'lodash/internal/<API key>', 'lodash/internal/getData', 'lodash/internal/mergeData', 'lodash/internal/setData'], function (exports, <API key>, <API key>, <API key>, <API key>, <API key>, <API key>, <API key>) {
'use strict';
/** Used to compose bitmasks for wrapper metadata. */
var BIND_FLAG = 1,
BIND_KEY_FLAG = 2,
PARTIAL_FLAG = 32,
PARTIAL_RIGHT_FLAG = 64;
/** Used as the `TypeError` message for "Functions" methods. */
var FUNC_ERROR_TEXT = 'Expected a function';
/* Native method references for those with the same name as other `lodash` methods. */
var nativeMax = Math.max;
/**
* Creates a function that either curries or invokes `func` with optional
* `this` binding and partially applied arguments.
*
* @private
* @param {Function|string} func The function or method name to reference.
* @param {number} bitmask The bitmask of flags.
* The bitmask may be composed of the following flags:
* 1 - `_.bind`
* 2 - `_.bindKey`
* 4 - `_.curry` or `_.curryRight` of a bound function
* 8 - `_.curry`
* 16 - `_.curryRight`
* 32 - `_.partial`
* 64 - `_.partialRight`
* 128 - `_.rearg`
* 256 - `_.ary`
* @param {*} [thisArg] The `this` binding of `func`.
* @param {Array} [partials] The arguments to be partially applied.
* @param {Array} [holders] The `partials` placeholder indexes.
* @param {Array} [argPos] The argument positions of the new function.
* @param {number} [ary] The arity cap of `func`.
* @param {number} [arity] The arity of `func`.
* @returns {Function} Returns the new wrapped function.
*/
function createWrapper(func, bitmask, thisArg, partials, holders, argPos, ary, arity) {
var isBindKey = bitmask & BIND_KEY_FLAG;
if (!isBindKey && typeof func != 'function') {
throw new TypeError(FUNC_ERROR_TEXT);
}
var length = partials ? partials.length : 0;
if (!length) {
bitmask &= ~(PARTIAL_FLAG | PARTIAL_RIGHT_FLAG);
partials = holders = undefined;
}
length -= holders ? holders.length : 0;
if (bitmask & PARTIAL_RIGHT_FLAG) {
var partialsRight = partials,
holdersRight = holders;
partials = holders = undefined;
}
var data = isBindKey ? undefined : (0, <API key>['default'])(func),
newData = [func, bitmask, thisArg, partials, holders, partialsRight, holdersRight, argPos, ary, arity];
if (data) {
(0, <API key>['default'])(newData, data);
bitmask = newData[1];
arity = newData[9];
}
newData[9] = arity == null ? isBindKey ? 0 : func.length : nativeMax(arity - length, 0) || 0;
if (bitmask == BIND_FLAG) {
var result = (0, <API key>['default'])(newData[0], newData[2]);
} else if ((bitmask == PARTIAL_FLAG || bitmask == (BIND_FLAG | PARTIAL_FLAG)) && !newData[4].length) {
result = <API key>['default'].apply(undefined, newData);
} else {
result = <API key>['default'].apply(undefined, newData);
}
var setter = data ? <API key>['default'] : <API key>['default'];
return setter(result, newData);
}
exports['default'] = createWrapper;
});
|
// JDFAppDelegate.h
// JDFPeekaboo
#import <UIKit/UIKit.h>
@interface JDFAppDelegate : UIResponder <<API key>>
@property (strong, nonatomic) UIWindow *window;
@end
|
using Xunit;
using System;
using System.Collections;
using System.Collections.Specialized;
namespace System.Collections.Specialized.Tests
{
public class <API key>
{
private String _strErr = "Error!";
[Fact]
public void Test01()
{
<API key> noc = new <API key>();
IEnumerator en = null;
bool res;
// [] Enumerator for empty collection
// Get enumerator
en = noc.GetEnumerator();
// MoveNext should return false
res = en.MoveNext();
if (res)
{
Assert.False(true, _strErr + "MoveNext returned true");
}
// Attempt to get Current should result in exception
Assert.Throws<<API key>>(() => { String curr = (String)en.Current; });
// [] Enumerator for non-empty collection
// Add items
for (int i = 0; i < 10; i++)
{
noc.Add("key_" + i.ToString(), new Foo());
}
// Get enumerator
en = noc.GetEnumerator();
// Attempt to get Current should result in exception
Assert.Throws<<API key>>(() => { String curr = (String)en.Current; });
// Iterate over collection
for (int i = 0; i < noc.Count; i++)
{
// MoveNext should return true
res = en.MoveNext();
if (!res)
{
Assert.False(true, string.Format(_strErr + "#{0}, MoveNext returned false", i));
}
// Check current
String curr = (String)en.Current;
if (noc[curr] == null)
{
Assert.False(true, string.Format(_strErr + "#{0}, Current={1}, key not found in collection", i, curr));
}
// Check current again
String current1 = (String)en.Current;
if (current1 != curr)
{
Assert.False(true, string.Format(_strErr + "#{0}, Value of Current changed! Was {1}, now {2}", i, curr, current1));
}
}
// next MoveNext should bring us outside of the collection, return false
res = en.MoveNext();
if (res)
{
Assert.False(true, _strErr + "MoveNext returned true");
}
// Attempt to get Current should result in exception
Assert.Throws<<API key>>(() => { String curr = (String)en.Current; });
// Reset
en.Reset();
// Attempt to get Current should result in exception
Assert.Throws<<API key>>(() => { String curr = (String)en.Current; });
// Modify collection and then then try MoveNext, Current, Reset
// new collection
noc = new <API key>();
noc.Add("key1", new Foo());
noc.Add("key2", new Foo());
noc.Add("key3", new Foo());
en = noc.GetEnumerator();
// MoveNext
if (!en.MoveNext())
{
Assert.False(true, _strErr + "MoveNext returned false");
}
// Current
String current = (String)en.Current;
// Modify collection
noc.RemoveAt(0);
if (noc.Count != 2)
{
Assert.False(true, string.Format(_strErr + "Collection Count wrong. Expected {0}, got {1}", 2, noc.Count));
}
// Current should not throw, but no guarantee is made on the return value
string curr1 = (String)en.Current;
// MoveNext should throw exception
Assert.Throws<<API key>>(() => { en.MoveNext(); });
// Reset should throw exception
Assert.Throws<<API key>>(() => { en.Reset(); });
// Current should not throw, but no guarantee is made on the return value
curr1 = (String)en.Current;
// MoveNext should still throw exception if collection is ReadOnly
noc.IsReadOnly = true;
Assert.Throws<<API key>>(() => { en.MoveNext(); });
// Clear collection and then then try MoveNext, Current, Reset
// new collection
noc = new <API key>();
noc.Add("key1", new Foo());
noc.Add("key2", new Foo());
noc.Add("key3", new Foo());
en = noc.GetEnumerator();
// MoveNext
if (!en.MoveNext())
{
Assert.False(true, _strErr + "MoveNext returned false");
}
// Current
current = (String)en.Current;
// Modify collection
noc.Clear();
if (noc.Count != 0)
{
Assert.False(true, string.Format(_strErr + "Collection Count wrong. Expected {0}, got {1}", 2, noc.Count));
}
// Current throws. Should it throw here?!
Assert.Throws<<API key>>(() => { String curr = (String)en.Current; });
// MoveNext should throw exception
Assert.Throws<<API key>>(() => { en.MoveNext(); });
// Reset should throw exception
Assert.Throws<<API key>>(() => { en.Reset(); });
}
}
}
|
// <auto-generated/>
#nullable disable
using System;
using System.Collections;
using System.Collections.Generic;
using Azure.Core;
namespace Azure.Graph.Rbac.Models
{
<summary> Active Directory Password Credential information. </summary>
public partial class PasswordCredential : IDictionary<string, object>
{
<summary> Initializes a new instance of PasswordCredential. </summary>
public PasswordCredential()
{
<API key> = new <API key><string, object>();
}
<summary> Initializes a new instance of PasswordCredential. </summary>
<param name="startDate"> Start date. </param>
<param name="endDate"> End date. </param>
<param name="keyId"> Key ID. </param>
<param name="value"> Key value. </param>
<param name="customKeyIdentifier"> Custom Key Identifier. </param>
<param name="<API key>"> . </param>
internal PasswordCredential(DateTimeOffset? startDate, DateTimeOffset? endDate, string keyId, string value, byte[] customKeyIdentifier, IDictionary<string, object> <API key>)
{
StartDate = startDate;
EndDate = endDate;
KeyId = keyId;
Value = value;
CustomKeyIdentifier = customKeyIdentifier;
<API key> = <API key>;
}
<summary> Start date. </summary>
public DateTimeOffset? StartDate { get; set; }
<summary> End date. </summary>
public DateTimeOffset? EndDate { get; set; }
<summary> Key ID. </summary>
public string KeyId { get; set; }
<summary> Key value. </summary>
public string Value { get; set; }
<summary> Custom Key Identifier. </summary>
public byte[] CustomKeyIdentifier { get; set; }
internal IDictionary<string, object> <API key> { get; }
<inheritdoc />
public IEnumerator<KeyValuePair<string, object>> GetEnumerator() => <API key>.GetEnumerator();
<inheritdoc />
IEnumerator IEnumerable.GetEnumerator() => <API key>.GetEnumerator();
<inheritdoc />
public bool TryGetValue(string key, out object value) => <API key>.TryGetValue(key, out value);
<inheritdoc />
public bool ContainsKey(string key) => <API key>.ContainsKey(key);
<inheritdoc />
public ICollection<string> Keys => <API key>.Keys;
<inheritdoc />
public ICollection<object> Values => <API key>.Values;
<inheritdoc cref="ICollection{T}.Count"/>
int ICollection<KeyValuePair<string, object>>.Count => <API key>.Count;
<inheritdoc />
public void Add(string key, object value) => <API key>.Add(key, value);
<inheritdoc />
public bool Remove(string key) => <API key>.Remove(key);
<inheritdoc cref="ICollection{T}.IsReadOnly"/>
bool ICollection<KeyValuePair<string, object>>.IsReadOnly => <API key>.IsReadOnly;
<inheritdoc cref="ICollection{T}.Add"/>
void ICollection<KeyValuePair<string, object>>.Add(KeyValuePair<string, object> value) => <API key>.Add(value);
<inheritdoc cref="ICollection{T}.Remove"/>
bool ICollection<KeyValuePair<string, object>>.Remove(KeyValuePair<string, object> value) => <API key>.Remove(value);
<inheritdoc cref="ICollection{T}.Contains"/>
bool ICollection<KeyValuePair<string, object>>.Contains(KeyValuePair<string, object> value) => <API key>.Contains(value);
<inheritdoc cref="ICollection{T}.CopyTo"/>
void ICollection<KeyValuePair<string, object>>.CopyTo(KeyValuePair<string, object>[] destination, int offset) => <API key>.CopyTo(destination, offset);
<inheritdoc cref="ICollection{T}.Clear"/>
void ICollection<KeyValuePair<string, object>>.Clear() => <API key>.Clear();
<inheritdoc />
public object this[string key]
{
get => <API key>[key];
set => <API key>[key] = value;
}
}
}
|
Clazz.declarePackage ("JU");
Clazz.load (["JU.V3"], "JU.Measure", ["java.lang.Float", "javajs.api.Interface", "JU.Lst", "$.P3", "$.P4", "$.Quat"], function () {
c$ = Clazz.declareType (JU, "Measure");
c$.computeAngle = Clazz.defineMethod (c$, "computeAngle",
function (pointA, pointB, pointC, vectorBA, vectorBC, asDegrees) {
vectorBA.sub2 (pointA, pointB);
vectorBC.sub2 (pointC, pointB);
var angle = vectorBA.angle (vectorBC);
return (asDegrees ? angle / 0.017453292 : angle);
}, "JU.T3,JU.T3,JU.T3,JU.V3,JU.V3,~B");
c$.computeAngleABC = Clazz.defineMethod (c$, "computeAngleABC",
function (pointA, pointB, pointC, asDegrees) {
var vectorBA = new JU.V3 ();
var vectorBC = new JU.V3 ();
return JU.Measure.computeAngle (pointA, pointB, pointC, vectorBA, vectorBC, asDegrees);
}, "JU.T3,JU.T3,JU.T3,~B");
c$.computeTorsion = Clazz.defineMethod (c$, "computeTorsion",
function (p1, p2, p3, p4, asDegrees) {
var ijx = p1.x - p2.x;
var ijy = p1.y - p2.y;
var ijz = p1.z - p2.z;
var kjx = p3.x - p2.x;
var kjy = p3.y - p2.y;
var kjz = p3.z - p2.z;
var klx = p3.x - p4.x;
var kly = p3.y - p4.y;
var klz = p3.z - p4.z;
var ax = ijy * kjz - ijz * kjy;
var ay = ijz * kjx - ijx * kjz;
var az = ijx * kjy - ijy * kjx;
var cx = kjy * klz - kjz * kly;
var cy = kjz * klx - kjx * klz;
var cz = kjx * kly - kjy * klx;
var ai2 = 1 / (ax * ax + ay * ay + az * az);
var ci2 = 1 / (cx * cx + cy * cy + cz * cz);
var ai = Math.sqrt (ai2);
var ci = Math.sqrt (ci2);
var denom = ai * ci;
var cross = ax * cx + ay * cy + az * cz;
var cosang = cross * denom;
if (cosang > 1) {
cosang = 1;
}if (cosang < -1) {
cosang = -1;
}var torsion = Math.acos (cosang);
var dot = ijx * cx + ijy * cy + ijz * cz;
var absDot = Math.abs (dot);
torsion = (dot / absDot > 0) ? torsion : -torsion;
return (asDegrees ? torsion / 0.017453292 : torsion);
}, "JU.T3,JU.T3,JU.T3,JU.T3,~B");
c$.computeHelicalAxis = Clazz.defineMethod (c$, "computeHelicalAxis",
function (a, b, dq) {
var vab = new JU.V3 ();
vab.sub2 (b, a);
var theta = dq.getTheta ();
var n = dq.getNormal ();
var v_dot_n = vab.dot (n);
if (Math.abs (v_dot_n) < 0.0001) v_dot_n = 0;
var va_prime_d = new JU.V3 ();
va_prime_d.cross (vab, n);
if (va_prime_d.dot (va_prime_d) != 0) va_prime_d.normalize ();
var vda = new JU.V3 ();
var vcb = JU.V3.newV (n);
if (v_dot_n == 0) v_dot_n = 1.4E-45;
vcb.scale (v_dot_n);
vda.sub2 (vcb, vab);
vda.scale (0.5);
va_prime_d.scale (theta == 0 ? 0 : (vda.length () / Math.tan (theta / 2 / 180 * 3.141592653589793)));
var r = JU.V3.newV (va_prime_d);
if (theta != 0) r.add (vda);
var pt_a_prime = JU.P3.newP (a);
pt_a_prime.sub (r);
if (v_dot_n != 1.4E-45) n.scale (v_dot_n);
var pt_b_prime = JU.P3.newP (pt_a_prime);
pt_b_prime.add (n);
theta = JU.Measure.computeTorsion (a, pt_a_prime, pt_b_prime, b, true);
if (Float.isNaN (theta) || r.length () < 0.0001) theta = dq.getThetaDirectedV (n);
var residuesPerTurn = Math.abs (theta == 0 ? 0 : 360 / theta);
var pitch = Math.abs (v_dot_n == 1.4E-45 ? 0 : n.length () * (theta == 0 ? 1 : 360 / theta));
return Clazz.newArray (-1, [pt_a_prime, n, r, JU.P3.new3 (theta, pitch, residuesPerTurn), pt_b_prime]);
}, "JU.P3,JU.P3,JU.Quat");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pointA, pointB, pointC, vNorm, vAB, plane) {
var w = JU.Measure.<API key> (pointA, pointB, pointC, vNorm, vAB);
plane.set4 (vNorm.x, vNorm.y, vNorm.z, w);
return plane;
}, "JU.T3,JU.T3,JU.T3,JU.V3,JU.V3,JU.P4");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pt, normal, plane) {
plane.set4 (normal.x, normal.y, normal.z, -normal.dot (pt));
}, "JU.T3,JU.V3,JU.P4");
c$.distanceToPlane = Clazz.defineMethod (c$, "distanceToPlane",
function (plane, pt) {
return (plane == null ? NaN : (plane.dot (pt) + plane.w) / Math.sqrt (plane.dot (plane)));
}, "JU.P4,JU.T3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pt, plane, ptref) {
var f = plane.dot (pt) + plane.w;
var f1 = plane.dot (ptref) + plane.w;
return Math.signum (f1) * f / Math.sqrt (plane.dot (plane));
}, "JU.P3,JU.P4,JU.P3");
c$.distanceToPlaneD = Clazz.defineMethod (c$, "distanceToPlaneD",
function (plane, d, pt) {
return (plane == null ? NaN : (plane.dot (pt) + plane.w) / d);
}, "JU.P4,~N,JU.P3");
c$.distanceToPlaneV = Clazz.defineMethod (c$, "distanceToPlaneV",
function (norm, w, pt) {
return (norm == null ? NaN : (norm.dot (pt) + w) / Math.sqrt (norm.dot (norm)));
}, "JU.V3,~N,JU.P3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pointA, pointB, pointC, vNormNorm, vAB) {
vAB.sub2 (pointB, pointA);
vNormNorm.sub2 (pointC, pointA);
vNormNorm.cross (vAB, vNormNorm);
vNormNorm.normalize ();
}, "JU.T3,JU.T3,JU.T3,JU.V3,JU.V3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pointA, pointB, pointC, ptRef, vNorm, vAB) {
var nd = JU.Measure.<API key> (pointA, pointB, pointC, vNorm, vAB);
if (ptRef != null) {
var pt0 = JU.P3.newP (pointA);
pt0.add (vNorm);
var d = pt0.distance (ptRef);
pt0.sub2 (pointA, vNorm);
if (d > pt0.distance (ptRef)) {
vNorm.scale (-1);
nd = -nd;
}}return nd;
}, "JU.T3,JU.T3,JU.T3,JU.T3,JU.V3,JU.V3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (pointA, pointB, pointC, vNorm, vTemp) {
JU.Measure.<API key> (pointA, pointB, pointC, vNorm, vTemp);
vTemp.setT (pointA);
return -vTemp.dot (vNorm);
}, "JU.T3,JU.T3,JU.T3,JU.V3,JU.V3");
c$.getPlaneProjection = Clazz.defineMethod (c$, "getPlaneProjection",
function (pt, plane, ptProj, vNorm) {
var dist = JU.Measure.distanceToPlane (plane, pt);
vNorm.set (plane.x, plane.y, plane.z);
vNorm.normalize ();
vNorm.scale (-dist);
ptProj.add2 (pt, vNorm);
}, "JU.P3,JU.P4,JU.P3,JU.V3");
c$.getNormalFromCenter = Clazz.defineMethod (c$, "getNormalFromCenter",
function (ptCenter, ptA, ptB, ptC, isOutward, normal, vTemp) {
var d = JU.Measure.<API key> (ptA, ptB, ptC, normal, vTemp);
var isReversed = (JU.Measure.distanceToPlaneV (normal, d, ptCenter) > 0);
if (isReversed == isOutward) normal.scale (-1.0);
return !isReversed;
}, "JU.P3,JU.P3,JU.P3,JU.P3,~B,JU.V3,JU.V3");
c$.getNormalToLine = Clazz.defineMethod (c$, "getNormalToLine",
function (pointA, pointB, vNormNorm) {
vNormNorm.sub2 (pointA, pointB);
vNormNorm.cross (vNormNorm, JU.Measure.axisY);
vNormNorm.normalize ();
if (Float.isNaN (vNormNorm.x)) vNormNorm.set (1, 0, 0);
}, "JU.P3,JU.P3,JU.V3");
c$.getBisectingPlane = Clazz.defineMethod (c$, "getBisectingPlane",
function (pointA, vAB, ptTemp, vTemp, plane) {
ptTemp.scaleAdd2 (0.5, vAB, pointA);
vTemp.setT (vAB);
vTemp.normalize ();
JU.Measure.<API key> (ptTemp, vTemp, plane);
}, "JU.P3,JU.V3,JU.T3,JU.V3,JU.P4");
c$.projectOntoAxis = Clazz.defineMethod (c$, "projectOntoAxis",
function (point, axisA, axisUnitVector, vectorProjection) {
vectorProjection.sub2 (point, axisA);
var projectedLength = vectorProjection.dot (axisUnitVector);
point.scaleAdd2 (projectedLength, axisUnitVector, axisA);
vectorProjection.sub2 (point, axisA);
}, "JU.P3,JU.P3,JU.V3,JU.V3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (points, axisA, axisUnitVector, vectorProjection, nTriesMax) {
var nPoints = points.length;
axisA.setT (points[0]);
axisUnitVector.sub2 (points[nPoints - 1], axisA);
axisUnitVector.normalize ();
JU.Measure.calcAveragePointN (points, nPoints, axisA);
var nTries = 0;
while (nTries++ < nTriesMax && JU.Measure.findAxis (points, nPoints, axisA, axisUnitVector, vectorProjection) > 0.001) {
}
var tempA = JU.P3.newP (points[0]);
JU.Measure.projectOntoAxis (tempA, axisA, axisUnitVector, vectorProjection);
axisA.setT (tempA);
}, "~A,JU.P3,JU.V3,JU.V3,~N");
c$.findAxis = Clazz.defineMethod (c$, "findAxis",
function (points, nPoints, axisA, axisUnitVector, vectorProjection) {
var sumXiYi = new JU.V3 ();
var vTemp = new JU.V3 ();
var pt = new JU.P3 ();
var ptProj = new JU.P3 ();
var a = JU.V3.newV (axisUnitVector);
var sum_Xi2 = 0;
for (var i = nPoints; --i >= 0; ) {
pt.setT (points[i]);
ptProj.setT (pt);
JU.Measure.projectOntoAxis (ptProj, axisA, axisUnitVector, vectorProjection);
vTemp.sub2 (pt, ptProj);
vTemp.cross (vectorProjection, vTemp);
sumXiYi.add (vTemp);
sum_Xi2 += vectorProjection.lengthSquared ();
}
var m = JU.V3.newV (sumXiYi);
m.scale (1 / sum_Xi2);
vTemp.cross (m, axisUnitVector);
axisUnitVector.add (vTemp);
axisUnitVector.normalize ();
vTemp.sub2 (axisUnitVector, a);
return vTemp.length ();
}, "~A,~N,JU.P3,JU.V3,JU.V3");
c$.calcAveragePoint = Clazz.defineMethod (c$, "calcAveragePoint",
function (pointA, pointB, pointC) {
pointC.set ((pointA.x + pointB.x) / 2, (pointA.y + pointB.y) / 2, (pointA.z + pointB.z) / 2);
}, "JU.P3,JU.P3,JU.P3");
c$.calcAveragePointN = Clazz.defineMethod (c$, "calcAveragePointN",
function (points, nPoints, averagePoint) {
averagePoint.setT (points[0]);
for (var i = 1; i < nPoints; i++) averagePoint.add (points[i]);
averagePoint.scale (1 / nPoints);
}, "~A,~N,JU.P3");
c$.transformPoints = Clazz.defineMethod (c$, "transformPoints",
function (vPts, m4, center) {
var v = new JU.Lst ();
for (var i = 0; i < vPts.size (); i++) {
var pt = JU.P3.newP (vPts.get (i));
pt.sub (center);
m4.rotTrans (pt);
pt.add (center);
v.addLast (pt);
}
return v;
}, "JU.Lst,JU.M4,JU.P3");
c$.isInTetrahedron = Clazz.defineMethod (c$, "isInTetrahedron",
function (pt, ptA, ptB, ptC, ptD, plane, vTemp, vTemp2, fullyEnclosed) {
var b = (JU.Measure.distanceToPlane (JU.Measure.<API key> (ptC, ptD, ptA, vTemp, vTemp2, plane), pt) >= 0);
if (b != (JU.Measure.distanceToPlane (JU.Measure.<API key> (ptA, ptD, ptB, vTemp, vTemp2, plane), pt) >= 0)) return false;
if (b != (JU.Measure.distanceToPlane (JU.Measure.<API key> (ptB, ptD, ptC, vTemp, vTemp2, plane), pt) >= 0)) return false;
var d = JU.Measure.distanceToPlane (JU.Measure.<API key> (ptA, ptB, ptC, vTemp, vTemp2, plane), pt);
if (fullyEnclosed) return (b == (d >= 0));
var d1 = JU.Measure.distanceToPlane (plane, ptD);
return d1 * d <= 0 || Math.abs (d1) > Math.abs (d);
}, "JU.P3,JU.P3,JU.P3,JU.P3,JU.P3,JU.P4,JU.V3,JU.V3,~B");
c$.getIntersectionPP = Clazz.defineMethod (c$, "getIntersectionPP",
function (plane1, plane2) {
var a1 = plane1.x;
var b1 = plane1.y;
var c1 = plane1.z;
var d1 = plane1.w;
var a2 = plane2.x;
var b2 = plane2.y;
var c2 = plane2.z;
var d2 = plane2.w;
var norm1 = JU.V3.new3 (a1, b1, c1);
var norm2 = JU.V3.new3 (a2, b2, c2);
var nxn = new JU.V3 ();
nxn.cross (norm1, norm2);
var ax = Math.abs (nxn.x);
var ay = Math.abs (nxn.y);
var az = Math.abs (nxn.z);
var x;
var y;
var z;
var diff;
var type = (ax > ay ? (ax > az ? 1 : 3) : ay > az ? 2 : 3);
switch (type) {
case 1:
x = 0;
diff = (b1 * c2 - b2 * c1);
if (Math.abs (diff) < 0.01) return null;
y = (c1 * d2 - c2 * d1) / diff;
z = (b2 * d1 - d2 * b1) / diff;
break;
case 2:
diff = (a1 * c2 - a2 * c1);
if (Math.abs (diff) < 0.01) return null;
x = (c1 * d2 - c2 * d1) / diff;
y = 0;
z = (a2 * d1 - d2 * a1) / diff;
break;
case 3:
default:
diff = (a1 * b2 - a2 * b1);
if (Math.abs (diff) < 0.01) return null;
x = (b1 * d2 - b2 * d1) / diff;
y = (a2 * d1 - d2 * a1) / diff;
z = 0;
}
var list = new JU.Lst ();
list.addLast (JU.P3.new3 (x, y, z));
nxn.normalize ();
list.addLast (nxn);
return list;
}, "JU.P4,JU.P4");
c$.getIntersection = Clazz.defineMethod (c$, "getIntersection",
function (pt1, v, plane, ptRet, tempNorm, vTemp) {
JU.Measure.getPlaneProjection (pt1, plane, ptRet, tempNorm);
tempNorm.set (plane.x, plane.y, plane.z);
tempNorm.normalize ();
if (v == null) v = JU.V3.newV (tempNorm);
var l_dot_n = v.dot (tempNorm);
if (Math.abs (l_dot_n) < 0.01) return null;
vTemp.sub2 (ptRet, pt1);
ptRet.scaleAdd2 (vTemp.dot (tempNorm) / l_dot_n, v, pt1);
return ptRet;
}, "JU.P3,JU.V3,JU.P4,JU.P3,JU.V3,JU.V3");
c$.<API key> = Clazz.defineMethod (c$, "<API key>",
function (centerAndPoints, retStddev) {
retStddev[1] = NaN;
var q = new JU.Quat ();
if (centerAndPoints[0].length == 1 || centerAndPoints[0].length != centerAndPoints[1].length) return q;
var n = centerAndPoints[0].length - 1;
if (n < 2) return q;
var Sxx = 0;
var Sxy = 0;
var Sxz = 0;
var Syx = 0;
var Syy = 0;
var Syz = 0;
var Szx = 0;
var Szy = 0;
var Szz = 0;
var ptA = new JU.P3 ();
var ptB = new JU.P3 ();
for (var i = n + 1; --i >= 1; ) {
var aij = centerAndPoints[0][i];
var bij = centerAndPoints[1][i];
ptA.sub2 (aij, centerAndPoints[0][0]);
ptB.sub2 (bij, centerAndPoints[0][1]);
Sxx += ptA.x * ptB.x;
Sxy += ptA.x * ptB.y;
Sxz += ptA.x * ptB.z;
Syx += ptA.y * ptB.x;
Syy += ptA.y * ptB.y;
Syz += ptA.y * ptB.z;
Szx += ptA.z * ptB.x;
Szy += ptA.z * ptB.y;
Szz += ptA.z * ptB.z;
}
retStddev[0] = JU.Measure.getRmsd (centerAndPoints, q);
var N = Clazz.newDoubleArray (4, 4, 0);
N[0][0] = Sxx + Syy + Szz;
N[0][1] = N[1][0] = Syz - Szy;
N[0][2] = N[2][0] = Szx - Sxz;
N[0][3] = N[3][0] = Sxy - Syx;
N[1][1] = Sxx - Syy - Szz;
N[1][2] = N[2][1] = Sxy + Syx;
N[1][3] = N[3][1] = Szx + Sxz;
N[2][2] = -Sxx + Syy - Szz;
N[2][3] = N[3][2] = Syz + Szy;
N[3][3] = -Sxx - Syy + Szz;
var v = (javajs.api.Interface.getInterface ("JU.Eigen")).setM (N).<API key> ()[3];
q = JU.Quat.newP4 (JU.P4.new4 (v[1], v[2], v[3], v[0]));
retStddev[1] = JU.Measure.getRmsd (centerAndPoints, q);
return q;
}, "~A,~A");
c$.getTransformMatrix4 = Clazz.defineMethod (c$, "getTransformMatrix4",
function (ptsA, ptsB, m, centerA) {
var cptsA = JU.Measure.getCenterAndPoints (ptsA);
var cptsB = JU.Measure.getCenterAndPoints (ptsB);
var retStddev = Clazz.newFloatArray (2, 0);
var q = JU.Measure.<API key> ( Clazz.newArray (-1, [cptsA, cptsB]), retStddev);
var r = q.getMatrix ();
if (centerA == null) r.rotate (cptsA[0]);
else centerA.setT (cptsA[0]);
var t = JU.V3.newVsub (cptsB[0], cptsA[0]);
m.setMV (r, t);
return retStddev[1];
}, "JU.Lst,JU.Lst,JU.M4,JU.P3");
c$.getCenterAndPoints = Clazz.defineMethod (c$, "getCenterAndPoints",
function (vPts) {
var n = vPts.size ();
var pts = new Array (n + 1);
pts[0] = new JU.P3 ();
if (n > 0) {
for (var i = 0; i < n; i++) {
pts[0].add (pts[i + 1] = vPts.get (i));
}
pts[0].scale (1 / n);
}return pts;
}, "JU.Lst");
c$.getRmsd = Clazz.defineMethod (c$, "getRmsd",
function (centerAndPoints, q) {
var sum2 = 0;
var ptsA = centerAndPoints[0];
var ptsB = centerAndPoints[1];
var cA = ptsA[0];
var cB = ptsB[0];
var n = ptsA.length - 1;
var ptAnew = new JU.P3 ();
for (var i = n + 1; --i >= 1; ) {
ptAnew.sub2 (ptsA[i], cA);
q.transform2 (ptAnew, ptAnew).add (cB);
sum2 += ptAnew.distanceSquared (ptsB[i]);
}
return Math.sqrt (sum2 / n);
}, "~A,JU.Quat");
Clazz.defineStatics (c$,
"radiansPerDegree", (0.017453292519943295));
c$.axisY = c$.prototype.axisY = JU.V3.new3 (0, 1, 0);
});
|
<?php
namespace Magento\TestModuleMSC\Model;
use Magento\TestModuleMSC\Api\Data\<API key>;
use Magento\TestModuleMSC\Api\Data\<API key>;
class AllSoapAndRest implements \Magento\TestModuleMSC\Api\<API key>
{
/**
* @var <API key>
*/
protected $itemDataFactory;
/**
* @var <API key>
*/
protected $<API key>;
/**
* @param <API key> $itemDataFactory
* @param <API key> $<API key>
*/
public function __construct(
<API key> $itemDataFactory,
<API key> $<API key>
) {
$this->itemDataFactory = $itemDataFactory;
$this-><API key> = $<API key>;
}
/**
* {@inheritdoc}
*/
public function item($itemId)
{
return $this->itemDataFactory->create()->setItemId($itemId)->setName('testProduct1');
}
/**
* {@inheritdoc}
*/
public function items()
{
$result1 = $this->itemDataFactory->create()->setItemId(1)->setName('testProduct1');
$result2 = $this->itemDataFactory->create()->setItemId(2)->setName('testProduct2');
return [$result1, $result2];
}
/**
* {@inheritdoc}
*/
public function create($name)
{
return $this->itemDataFactory->create()->setItemId(rand())->setName($name);
}
/**
* {@inheritdoc}
*/
public function update(\Magento\TestModuleMSC\Api\Data\ItemInterface $entityItem)
{
return $this->itemDataFactory->create()->setItemId($entityItem->getItemId())
->setName('Updated' . $entityItem->getName());
}
public function testOptionalParam($name = null)
{
if ($name === null) {
return $this->itemDataFactory->create()->setItemId(3)->setName('No Name');
} else {
return $this->itemDataFactory->create()->setItemId(3)->setName($name);
}
}
/**
* {@inheritdoc}
*/
public function itemAnyType(\Magento\TestModuleMSC\Api\Data\ItemInterface $entityItem)
{
return $entityItem;
}
/**
* {@inheritdoc}
*/
public function <API key>()
{
$<API key> = $this-><API key>->create()
->setName('nameValue')
->setCustomAttribute('<API key>', 1);
$item = $this->itemDataFactory->create()
->setItemId(1)
->setName('testProductAnyType')
->setCustomAttribute('<API key>', $<API key>)
->setCustomAttribute('<API key>', 'someStringValue');
return $item;
}
}
|
# This file is auto-generated by the Perl DateTime Suite time zone
# code generator (0.07) This code generator comes with the
# DateTime::TimeZone module distribution in the tools/ directory
# Generated from /tmp/rnClxBLdxJ/northamerica. Olson data version 2013a
# Do not edit this file directly.
package DateTime::TimeZone::America::St_Lucia;
{
$DateTime::TimeZone::America::St_Lucia::VERSION = '1.57';
}
use strict;
use Class::Singleton 1.03;
use DateTime::TimeZone;
use DateTime::TimeZone::OlsonDB;
@DateTime::TimeZone::America::St_Lucia::ISA = ( 'Class::Singleton', 'DateTime::TimeZone' );
my $spans =
[
[
DateTime::TimeZone::NEG_INFINITY, # utc_start
59611176240, # utc_end 1890-01-01 04:04:00 (Wed)
DateTime::TimeZone::NEG_INFINITY, # local_start
59611161600, # local_end 1890-01-01 00:00:00 (Wed)
-14640,
0,
'LMT',
],
[
59611176240, # utc_start 1890-01-01 04:04:00 (Wed)
60305313840, # utc_end 1912-01-01 04:04:00 (Mon)
59611161600, # local_start 1890-01-01 00:00:00 (Wed)
60305299200, # local_end 1912-01-01 00:00:00 (Mon)
-14640,
0,
'CMT',
],
[
60305313840, # utc_start 1912-01-01 04:04:00 (Mon)
DateTime::TimeZone::INFINITY, # utc_end
60305299440, # local_start 1912-01-01 00:04:00 (Mon)
DateTime::TimeZone::INFINITY, # local_end
-14400,
0,
'AST',
],
];
sub olson_version { '2013a' }
sub has_dst_changes { 0 }
sub _max_year { 2023 }
sub _new_instance
{
return shift->_init( @_, spans => $spans );
}
1;
|
import Ember from 'ember';
export default Ember.Object.extend({
content: {},
contentLength: 0,
add: function(obj) {
var id = this.generateId();
this.get('content')[id] = obj;
this.incrementProperty("contentLength");
return id;
},
getObj: function(key) {
var res = this.get('content')[key];
if (!res) {
throw "no obj for key "+key;
}
return res;
},
generateId: function() {
var num = Math.random() * 1000000000000.0;
num = parseInt(num);
num = ""+num;
return num;
},
keys: function() {
var res = [];
for (var key in this.get('content')) {
res.push(key);
}
return Ember.A(res);
},
lengthBinding: "contentLength"
});
|
# Image Patches Differential Optical Flow Rotation/Scale
# This example shows off using your OpenMV Cam to measure
# rotation/scale by comparing the current and the previous
# image against each other. Note that only rotation/scale is
# handled - not X and Y translation in this mode.
# However, this examples goes beyond doing optical flow on the whole
# image at once. Instead it breaks up the process by working on groups
# of pixels in the image. This gives you a "new" image of results.
# NOTE that surfaces need to have some type of "edge" on them for the
# algorithm to work. A featureless surface produces crazy results.
# NOTE: Unless you have a very nice test rig this example is hard to see usefulness of...
BLOCK_W = 16 # pow2
BLOCK_H = 16 # pow2
# To run this demo effectively please mount your OpenMV Cam on a steady
# base and SLOWLY rotate the camera around the lens and move the camera
# forward/backwards to see the numbers change.
# I.e. Z direction changes only.
import sensor, image, time, math
# NOTE!!! You have to use a small power of 2 resolution when using
# find_displacement(). This is because the algorithm is powered by
# something called phase correlation which does the image comparison
# using FFTs. A non-power of 2 resolution requires padding to a power
# of 2 which reduces the usefulness of the algorithm results. Please
# use a resolution like B128X128 or B128X64 (2x faster).
# Your OpenMV Cam supports power of 2 resolutions of 64x32, 64x64,
# 128x64, and 128x128. If you want a resolution of 32x32 you can create
# it by doing "img.pool(2, 2)" on a 64x64 image.
sensor.reset() # Reset and initialize the sensor.
sensor.set_pixformat(sensor.GRAYSCALE) # Set pixel format to GRAYSCALE (or RGB565)
sensor.set_framesize(sensor.B128X128) # Set frame size to 128x128... (or 128x64)...
sensor.skip_frames(time = 2000) # Wait for settings take effect.
clock = time.clock() # Create a clock object to track the FPS.
# Take from the main frame buffer's RAM to allocate a second frame buffer.
# There's a lot more RAM in the frame buffer than in the MicroPython heap.
# However, after doing this you have a lot less RAM for some algorithms...
# So, be aware that it's a lot easier to get out of RAM issues now.
extra_fb = sensor.alloc_extra_fb(sensor.width(), sensor.height(), sensor.GRAYSCALE)
extra_fb.replace(sensor.snapshot())
while(True):
clock.tick() # Track elapsed milliseconds between snapshots().
img = sensor.snapshot() # Take a picture and return the image.
for y in range(0, sensor.height(), BLOCK_H):
for x in range(0, sensor.width(), BLOCK_W):
displacement = extra_fb.find_displacement(img, logpolar=True, \
roi = (x, y, BLOCK_W, BLOCK_H), template_roi = (x, y, BLOCK_W, BLOCK_H))
# Below 0.1 or so (YMMV) and the results are just noise.
if(displacement.response() > 0.1):
rotation_change = displacement.rotation()
zoom_amount = 1.0 + displacement.scale()
pixel_x = x + (BLOCK_W//2) + int(math.sin(rotation_change) * zoom_amount * (BLOCK_W//4))
pixel_y = y + (BLOCK_H//2) + int(math.cos(rotation_change) * zoom_amount * (BLOCK_H//4))
img.draw_line((x + BLOCK_W//2, y + BLOCK_H//2, pixel_x, pixel_y), \
color = 255)
else:
img.draw_line((x + BLOCK_W//2, y + BLOCK_H//2, x + BLOCK_W//2, y + BLOCK_H//2), \
color = 0)
extra_fb.replace(img)
print(clock.fps())
|
import { Observable } from 'rxjs/Observable';
/**
* @name Keyboard
* @description
* @usage
* ```typescript
* import { Keyboard } from 'ionic-native';
*
*
*
* ```
*/
export declare class Keyboard {
/**
* Hide the keyboard accessory bar with the next, previous and done buttons.
* @param hide {boolean}
*/
static <API key>(hide: boolean): void;
/**
* Force keyboard to be shown.
*/
static show(): void;
/**
* Close the keyboard if open.
*/
static close(): void;
/**
* Prevents the native UIScrollView from moving when an input is focused.
* @param disable
*/
static disableScroll(disable: boolean): void;
/**
* Creates an observable that notifies you when the keyboard is shown. Unsubscribe to observable to cancel event watch.
* @returns {Observable<any>}
*/
static onKeyboardShow(): Observable<any>;
/**
* Creates an observable that notifies you when the keyboard is hidden. Unsubscribe to observable to cancel event watch.
* @returns {Observable<any>}
*/
static onKeyboardHide(): Observable<any>;
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http:
<html>
<head>
<title>Jasmine Spec Runner</title>
<link rel="shortcut icon" type="image/png" href="lib/jasmine-1.3.1/jasmine_favicon.png">
<link rel="stylesheet" type="text/css" href="lib/jasmine-1.3.1/jasmine.css">
<script type="text/javascript" src="lib/jasmine-1.3.1/jasmine.js"></script>
<script type="text/javascript" src="lib/jasmine-1.3.1/jasmine-html.js"></script>
<!-- include source files here... -->
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jqueryui/1.9.2/jquery-ui.min.js"></script>
<script src="https://cdn.firebase.com/js/client/1.1.0/firebase.js"></script>
<script src="https://cdn-staging.firebase.com/v0/<API key>.js"></script>
<script src="../www/js/firefeed.js"></script>
<!-- include spec files here... -->
<script type="text/javascript" src="spec/helper.js"></script>
<script type="text/javascript" src="spec/initialization.js"></script>
<script type="text/javascript" src="spec/operations.js"></script>
<script type="text/javascript" src="spec/events.js"></script>
<script type="text/javascript">
(function() {
var jasmineEnv = jasmine.getEnv();
jasmineEnv.updateInterval = 1000;
var htmlReporter = new jasmine.HtmlReporter();
jasmineEnv.addReporter(htmlReporter);
jasmineEnv.specFilter = function(spec) {
return htmlReporter.specFilter(spec);
};
var currentWindowOnload = window.onload;
window.onload = function() {
if (currentWindowOnload) {
currentWindowOnload();
}
execJasmine();
};
function execJasmine() {
jasmineEnv.execute();
}
})();
</script>
</head>
<body>
</body>
</html>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.