answer
stringlengths 15
1.25M
|
|---|
#ifndef <API key>
#define <API key>
#include <jni.h>
#include <string>
#include "base/base_export.h"
#include "base/memory/singleton.h"
namespace base {
namespace android {
// This enumeration maps to the values returned by BuildInfo::sdk_int(),
// indicating the Android release associated with a given SDK version.
enum SdkVersion {
<API key> = 14,
<API key> = 15,
<API key> = 16,
<API key> = 17,
<API key> = 18,
SDK_VERSION_KITKAT = 19,
<API key> = 20,
<API key> = 21,
<API key> = 22
};
// BuildInfo is a singleton class that stores android build and device
// information. It will be called from Android specific code and gets used
// primarily in crash reporting.
// It is also used to store the last java exception seen during JNI.
// TODO(nileshagrawal): Find a better place to store this info.
class BASE_EXPORT BuildInfo {
public:
~BuildInfo() {}
// Static factory method for getting the singleton BuildInfo instance.
// Note that ownership is not conferred on the caller and the BuildInfo in
// question isn't actually freed until shutdown. This is ok because there
// should only be one instance of BuildInfo ever created.
static BuildInfo* GetInstance();
// Const char* is used instead of std::strings because these values must be
// available even if the process is in a crash state. Sadly
// std::string.c_str() doesn't guarantee that memory won't be allocated when
// it is called.
const char* device() const {
return device_;
}
const char* manufacturer() const {
return manufacturer_;
}
const char* model() const {
return model_;
}
const char* brand() const {
return brand_;
}
const char* android_build_id() const {
return android_build_id_;
}
const char* android_build_fp() const {
return android_build_fp_;
}
const char* <API key>() const {
return <API key>;
}
const char* <API key>() const {
return <API key>;
}
const char* package_label() const {
return package_label_;
}
const char* package_name() const {
return package_name_;
}
const char* build_type() const {
return build_type_;
}
int sdk_int() const {
return sdk_int_;
}
int <API key>() const {
return <API key>;
}
const char* java_exception_info() const {
return <API key>;
}
void <API key>(const std::string& info);
void <API key>();
static bool RegisterBindings(JNIEnv* env);
private:
friend struct <API key>;
explicit BuildInfo(JNIEnv* env);
// Const char* is used instead of std::strings because these values must be
// available even if the process is in a crash state. Sadly
// std::string.c_str() doesn't guarantee that memory won't be allocated when
// it is called.
const char* const device_;
const char* const manufacturer_;
const char* const model_;
const char* const brand_;
const char* const android_build_id_;
const char* const android_build_fp_;
const char* const <API key>;
const char* const <API key>;
const char* const package_label_;
const char* const package_name_;
const char* const build_type_;
const int sdk_int_;
const bool <API key>;
// This is set via <API key>, not at constructor time.
const char* <API key>;
<API key>(BuildInfo);
};
} // namespace android
} // namespace base
#endif // <API key>
|
#include "FLAME.h"
FLA_Error FLA_Bsvd_find_split( FLA_Obj d, FLA_Obj e )
{
FLA_Datatype datatype;
int m_A;
int inc_d;
int inc_e;
datatype = FLA_Obj_datatype( d );
m_A = FLA_Obj_vector_dim( d );
inc_d = FLA_Obj_vector_inc( d );
inc_e = FLA_Obj_vector_inc( e );
switch ( datatype )
{
case FLA_FLOAT:
{
float* buff_d = FLA_FLOAT_PTR( d );
float* buff_e = FLA_FLOAT_PTR( e );
<API key>( m_A,
buff_d, inc_d,
buff_e, inc_e );
break;
}
case FLA_DOUBLE:
{
double* buff_d = FLA_DOUBLE_PTR( d );
double* buff_e = FLA_DOUBLE_PTR( e );
<API key>( m_A,
buff_d, inc_d,
buff_e, inc_e );
break;
}
}
return FLA_SUCCESS;
}
FLA_Error <API key>( int m_A,
float* buff_d, int inc_d,
float* buff_e, int inc_e )
{
<API key>( <API key> );
return FLA_SUCCESS;
}
FLA_Error <API key>( int m_A,
double* buff_d, int inc_d,
double* buff_e, int inc_e )
{
int i;
for ( i = 0; i < m_A - 1; ++i )
{
double* epsilon1 = buff_e + (i )*inc_e;
if ( *epsilon1 == 0.0 )
{
// Return index of split as i+1 since e_i is in the same
// column as d_(i+1).
return i + 1;
}
}
// Return with no split found found.
return FLA_FAILURE;
}
|
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http:
<html xmlns="http:
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
<title>A quick animation test</title>
</head>
<body>
<table>
<tr>
<td align="center">SVG Image</td>
<td align="center">Reference Image</td>
</tr>
<tr>
<td align="right">
<object data="../svggen/animate-elem-20-t.svg" width="480" height="360" type="image/svg+xml"/>
</td>
<td align="left">
<img alt="raster image of animate-elem-20-t" src="../png/<API key>.png" width="480" height="360"/>
</td>
</tr>
</table>
<p>
Click on "fade in", after completed animation compare with the reference image and then click on "fade out".
With a second click on "fade in" the red square goes from white to red, and then goes back from red to white.
</p>
</body>
</html>
|
#ifndef TALK_XMPP_JID_H_
#define TALK_XMPP_JID_H_
#include <string>
#include "webrtc/libjingle/xmllite/xmlconstants.h"
#include "webrtc/base/basictypes.h"
namespace buzz {
// The Jid class encapsulates and provides parsing help for Jids. A Jid
// consists of three parts: the node, the domain and the resource, e.g.:
// node@domain/resource
// The node and resource are both optional. A valid jid is defined to have
// a domain. A bare jid is defined to not have a resource and a full jid
// *does* have a resource.
class Jid {
public:
explicit Jid();
explicit Jid(const std::string& jid_string);
explicit Jid(const std::string& node_name,
const std::string& domain_name,
const std::string& resource_name);
~Jid();
const std::string & node() const { return node_name_; }
const std::string & domain() const { return domain_name_; }
const std::string & resource() const { return resource_name_; }
std::string Str() const;
Jid BareJid() const;
bool IsEmpty() const;
bool IsValid() const;
bool IsBare() const;
bool IsFull() const;
bool BareEquals(const Jid& other) const;
void CopyFrom(const Jid& jid);
bool operator==(const Jid& other) const;
bool operator!=(const Jid& other) const { return !operator==(other); }
bool operator<(const Jid& other) const { return Compare(other) < 0; };
bool operator>(const Jid& other) const { return Compare(other) > 0; };
int Compare(const Jid & other) const;
private:
void ValidateOrReset();
static std::string PrepNode(const std::string& node, bool* valid);
static char PrepNodeAscii(char ch, bool* valid);
static std::string PrepResource(const std::string& start, bool* valid);
static char PrepResourceAscii(char ch, bool* valid);
static std::string PrepDomain(const std::string& domain, bool* valid);
static void PrepDomain(const std::string& domain,
std::string* buf, bool* valid);
static void PrepDomainLabel(
std::string::const_iterator start, std::string::const_iterator end,
std::string* buf, bool* valid);
static char <API key>(char ch, bool *valid);
std::string node_name_;
std::string domain_name_;
std::string resource_name_;
};
}
#endif // TALK_XMPP_JID_H_
|
#!/usr/bin/python
# encoding: utf-8
# Jan 2011 (markus kossner) Cleaned up the code, added some documentation
# somwhere around Aug 2008 (markus kossner) created
# This script extracts the molecular framework for a database of molecules.
# You can use two modes (hard coded):
# - Scaff: The molecular frame is extracted
# - RedScaff: All linking chains between rings are deleted. The rings are directly connected.
# You can comment in/out the code snippets indicated by the comments
# to force each atom of the frame to be a Carbon.
# Usage: Frames.py <database.sdf>
# Output:
# - sd files containing all molecules belonging to one frame (1.sdf, 2.sdf etc)
# - frames.smi containing the (caninical) smiles and count of occurrence
from __future__ import print_function
import os,sys
from Chem import AllChem as Chem
def flatten(x):
"""flatten(sequence) -> list
Returns a single, flat list which contains all elements retrieved
from the sequence and all nested sub-sequences (iterables).
Examples:
[1, 2, [3,4], (5,6)]
[1, 2, [3, 4], (5, 6)]
flatten([[[1,2,3], (42,None)], [4,5], [6], 7, MyVector(8,9,10)])
[1, 2, 3, 42, None, 4, 5, 6, 7, 8, 9, 10]"""
result = []
for el in x:
if hasattr(el, "__iter__") and not isinstance(el, basestring):
result.extend(flatten(el))
else:
result.append(el)
return result
def GetFrame(mol, mode='Scaff'):
'''return a ganeric molecule defining the reduced scaffold of the input mol.
mode can be 'Scaff' or 'RedScaff':
Scaff -> chop off the side chains and return the scaffold
RedScaff -> remove all linking chains and connect the rings
directly at the atoms where the linker was
'''
ring = mol.GetRingInfo()
RingAtoms = flatten(ring.AtomRings())
NonRingAtoms = [ atom.GetIdx() for atom in mol.GetAtoms() if atom.GetIdx() not in RingAtoms ]
RingNeighbors = []
Paths = []
for NonRingAtom in NonRingAtoms:
for neighbor in mol.GetAtomWithIdx(NonRingAtom).GetNeighbors():
if neighbor.GetIdx() in RingAtoms:
RingNeighbors.append(NonRingAtom)
Paths.append([neighbor.GetIdx(),NonRingAtom]) #The ring Atoms having a non ring Nieghbor will be the start of a walk
break
PosConnectors = [x for x in NonRingAtoms if x not in RingNeighbors] #Only these Atoms are potential starting points of a Linker chain
#print 'PosConnectors:'
#print PosConnectors
Framework = [ x for x in RingAtoms ]
#Start a list of pathways which we will have to walk
#print 'Path atoms:'
#print Paths
Linkers = []
while len(Paths)>0:
NewPaths = []
for P in Paths:
if P == None:
print('ooh')
else:
for neighbor in mol.GetAtomWithIdx(P[-1]).GetNeighbors():
if neighbor.GetIdx() not in P:
if neighbor.GetIdx() in NonRingAtoms:
n = P[:]
n.append(neighbor.GetIdx())
NewPaths.append(n[:])
elif neighbor.GetIdx() in RingAtoms:
#print 'adding the following path to Framework:'
#print P
n = P[:]
n.append(neighbor.GetIdx())
Linkers.append(n)
Framework=Framework+P[:]
Paths = NewPaths[:]
#print 'Linkers:',Linkers
#print 'RingAtoms:',RingAtoms
#em.AddBond(3,4,Chem.BondType.SINGLE)
if mode == 'RedScaff':
Framework = list(set(Framework))
todel = []
NonRingAtoms.sort(reverse=True)
em = Chem.EditableMol(mol)
BondsToAdd = [ sorted([i[0],i[-1]]) for i in Linkers ]
mem = []
for i in BondsToAdd:
if i not in mem:
em.AddBond(i[0],i[1],Chem.BondType.SINGLE)
mem.append(i)
for i in NonRingAtoms:
todel.append(i)
for i in todel:
em.RemoveAtom(i)
m = em.GetMol()
# Now do the flattening of atoms and bonds!
# Any heavy atom will become a carbon and any bond will become a single bond! #
# for atom in m.GetAtoms(): #
# atom.SetAtomicNum(6) #
# atom.SetFormalCharge(0) #
# for bond in m.GetBonds(): #
# bond.SetBondType(Chem.BondType.SINGLE) #
# Chem.SanitizeMol(m) #
return m
if mode == 'Scaff':
Framework = list(set(Framework))
todel = []
NonRingAtoms.sort(reverse=True)
for i in NonRingAtoms:
if i != None:
if i not in Framework:
todel.append(i)
em = Chem.EditableMol(mol)
for i in todel:
em.RemoveAtom(i)
m = em.GetMol()
# Now do the flattening of atoms and bonds!
# Any heavy atom will become a carbon and any bond will become a single bond!! #
# for atom in m.GetAtoms(): #
# atom.SetAtomicNum(6) #
# atom.SetFormalCharge(0) #
# for bond in m.GetBonds(): #
# bond.SetBondType(Chem.BondType.SINGLE) #
# Chem.SanitizeMol(m) #
return m
if __name__=='__main__':
if len(sys.argv) < 2:
print("No input file provided: Frames.py filetosprocess.ext")
sys.exit(1)
suppl = Chem.SDMolSupplier(sys.argv[1])
FrameDict = {}
for mol in suppl:
m = GetFrame(mol)
cansmiles = Chem.MolToSmiles(m, isomericSmiles=True)
if FrameDict.has_key(cansmiles):
FrameDict[cansmiles].append(mol)
else:
FrameDict[cansmiles]=[mol,]
counter=0
w=open('frames.smi','w')
for key,item in FrameDict.items():
counter+=1
d=Chem.SDWriter(str(counter)+'.sdf')
for i in item:
i.SetProp('Scaffold',key)
i.SetProp('Cluster',str(counter))
d.write(i)
print(key,len(item))
w.write(key+'\t'+str(len(item))+'\n')
w.close
print('number of Clusters: %d' %(counter))
|
#ifndef <API key>
#define <API key>
#include <list>
#include <set>
#include <vector>
#include "base/callback.h"
#include "base/compiler_specific.h"
#include "base/file_path.h"
#include "base/memory/ref_counted.h"
#include "base/synchronization/lock.h"
#include "base/time.h"
#include "content/public/browser/dom_storage_context.h"
#include "chrome/common/url_constants.h"
#include "googleurl/src/gurl.h"
class Profile;
// This class fetches local storage information and provides a
// means to delete the data associated with an origin.
class <API key>
: public base::RefCounted<<API key>> {
public:
// Contains detailed information about local storage.
struct LocalStorageInfo {
LocalStorageInfo(
const GURL& origin_url,
int64 size,
base::Time last_modified);
~LocalStorageInfo();
GURL origin_url;
int64 size;
base::Time last_modified;
};
explicit <API key>(Profile* profile);
// Starts the fetching process, which will notify its completion via
// callback. This must be called only in the UI thread.
virtual void StartFetching(
const base::Callback<void(const std::list<LocalStorageInfo>&)>& callback);
// Deletes the local storage for the |origin|.
virtual void DeleteOrigin(const GURL& origin);
protected:
friend class base::RefCounted<<API key>>;
virtual ~<API key>();
void <API key>();
content::DOMStorageContext* <API key>; // Owned by the profile
base::Callback<void(const std::list<LocalStorageInfo>&)> <API key>;
bool is_fetching_;
std::list<LocalStorageInfo> local_storage_info_;
private:
void <API key>(
const std::vector<dom_storage::<API key>>& infos);
<API key>(<API key>);
};
// This class is a thin wrapper around <API key> that does
// not fetch its information from the local storage tracker, but gets them
// passed as a parameter during construction.
class <API key>
: public <API key> {
public:
explicit <API key>(Profile* profile);
// Return a copy of the local storage helper. Only one consumer can use the
// StartFetching method at a time, so we need to create a copy of the helper
// every time we instantiate a cookies tree model for it.
<API key>* Clone();
// Add a local storage to the set of canned local storages that is returned
// by this helper.
void AddLocalStorage(const GURL& origin);
// Clear the list of canned local storages.
void Reset();
// True if no local storages are currently stored.
bool empty() const;
// Returns the number of local storages currently stored.
size_t <API key>() const;
// Returns the set of origins that use local storage.
const std::set<GURL>& GetLocalStorageInfo() const;
// <API key> implementation.
virtual void StartFetching(
const base::Callback<void(const std::list<LocalStorageInfo>&)>& callback)
OVERRIDE;
private:
virtual ~<API key>();
// Convert the pending local storage info to local storage info objects.
void ConvertPendingInfo();
std::set<GURL> <API key>;
Profile* profile_;
<API key>(<API key>);
};
#endif // <API key>
|
#ifndef <API key>
#define <API key>
#define T_CHAR char
STATUS
SFPInit (
VOID
)
;
STATUS
SFPOpenFile (
char *FileName
)
;
BOOLEAN
SFPIsKeyword (
T_CHAR *Str
)
;
BOOLEAN
SFPIsToken (
T_CHAR *Str
)
;
BOOLEAN
SFPGetNextToken (
T_CHAR *Str,
unsigned int Len
)
;
BOOLEAN
SFPGetGuidToken (
T_CHAR *Str,
UINT32 Len
)
;
#define <API key> 0
BOOLEAN
SFPGetGuid (
int GuidStyle,
EFI_GUID *Value
)
;
BOOLEAN
SFPSkipToToken (
T_CHAR *Str
)
;
BOOLEAN
SFPGetNumber (
unsigned int *Value
)
;
BOOLEAN
SFPGetQuotedString (
T_CHAR *Str,
int Length
)
;
BOOLEAN
SFPIsEOF (
VOID
)
;
STATUS
SFPCloseFile (
VOID
)
;
unsigned
int
SFPGetLineNumber (
VOID
)
;
T_CHAR *
SFPGetFileName (
VOID
)
;
#endif // #ifndef <API key>
|
#ifndef <API key>
#define <API key>
// OVERVIEW
// Bencoding is a common representation in bittorrent used for
// for dictionary, list, int and string hierarchies. It's used
// to encode .torrent files and some messages in the network
// protocol. libtorrent also uses it to store settings, resume
// data and other state between sessions.
// Strings in bencoded structures are not necessarily representing
// text. Strings are raw byte buffers of a certain length. If a
// string is meant to be interpreted as text, it is required to
// be UTF-8 encoded. See `BEP 3`_.
// There are two mechanims to *decode* bencoded buffers in libtorrent.
// The most flexible one is bdecode(), which returns a structure
// represented by entry. When a buffer is decoded with this function,
// it can be discarded. The entry does not contain any references back
// to it. This means that bdecode() actually copies all the data out
// of the buffer and into its own hierarchy. This makes this
// function potentially expensive, if you're parsing large amounts
// of data.
// Another consideration is that bdecode() is a recursive parser.
// For this reason, in order to avoid DoS attacks by triggering
// a stack overflow, there is a recursion limit. This limit is
// a sanity check to make sure it doesn't run the risk of
// busting the stack.
// The second mechanism is lazy_bdecode(), which returns a
// bencoded structure represented by lazy_entry. This function
// builds a tree that points back into the original buffer.
// The returned lazy_entry will not be valid once the buffer
// it was parsed out of is discarded.
// Not only is this function more efficient because of less
// memory allocation and data copy, the parser is also not
// recursive, which means it probably performs a little bit
// better and can have a higher recursion limit on the structures
// it's parsing.
#include <stdlib.h>
#include <string>
#include <exception>
#include <iterator> // for distance
#ifdef _MSC_VER
#pragma warning(push, 1)
#endif
#include <boost/static_assert.hpp>
#ifdef _MSC_VER
#pragma warning(pop)
#endif
#include "libtorrent/entry.hpp"
#include "libtorrent/config.hpp"
#include "libtorrent/assert.hpp"
#include "libtorrent/escape_string.hpp"
#include "libtorrent/io.hpp" // for write_string
namespace libtorrent
{
#ifndef <API key>
// thrown by bdecode() if the provided bencoded buffer does not contain
// valid encoding.
struct TORRENT_EXPORT invalid_encoding: std::exception
{
// hidden
virtual const char* what() const throw() { return "invalid bencoding"; }
};
#endif
namespace detail
{
// this is used in the template, so it must be available to the client
TORRENT_EXPORT char const* integer_to_str(char* buf, int size
, entry::integer_type val);
template <class OutIt>
int write_integer(OutIt& out, entry::integer_type val)
{
// the stack allocated buffer for keeping the
// decimal representation of the number can
// not hold number bigger than this:
BOOST_STATIC_ASSERT(sizeof(entry::integer_type) <= 8);
char buf[21];
int ret = 0;
for (char const* str = integer_to_str(buf, 21, val);
*str != 0; ++str)
{
*out = *str;
++out;
++ret;
}
return ret;
}
template <class OutIt>
void write_char(OutIt& out, char c)
{
*out = c;
++out;
}
template <class InIt>
std::string read_until(InIt& in, InIt end, char end_token, bool& err)
{
std::string ret;
if (in == end)
{
err = true;
return ret;
}
while (*in != end_token)
{
ret += *in;
++in;
if (in == end)
{
err = true;
return ret;
}
}
return ret;
}
template<class InIt>
void read_string(InIt& in, InIt end, int len, std::string& str, bool& err)
{
TORRENT_ASSERT(len >= 0);
for (int i = 0; i < len; ++i)
{
if (in == end)
{
err = true;
return;
}
str += *in;
++in;
}
}
template<class OutIt>
int bencode_recursive(OutIt& out, const entry& e)
{
int ret = 0;
switch(e.type())
{
case entry::int_t:
write_char(out, 'i');
ret += write_integer(out, e.integer());
write_char(out, 'e');
ret += 2;
break;
case entry::string_t:
ret += write_integer(out, e.string().length());
write_char(out, ':');
ret += write_string(e.string(), out);
ret += 1;
break;
case entry::list_t:
write_char(out, 'l');
for (entry::list_type::const_iterator i = e.list().begin(); i != e.list().end(); ++i)
ret += bencode_recursive(out, *i);
write_char(out, 'e');
ret += 2;
break;
case entry::dictionary_t:
write_char(out, 'd');
for (entry::dictionary_type::const_iterator i = e.dict().begin();
i != e.dict().end(); ++i)
{
// write key
ret += write_integer(out, i->first.length());
write_char(out, ':');
ret += write_string(i->first, out);
// write value
ret += bencode_recursive(out, i->second);
ret += 1;
}
write_char(out, 'e');
ret += 2;
break;
default:
// trying to encode a structure with uninitialized values!
TORRENT_ASSERT_VAL(false, e.type());
// do nothing
break;
}
return ret;
}
template<class InIt>
void bdecode_recursive(InIt& in, InIt end, entry& ret, bool& err, int depth)
{
if (depth >= 100)
{
err = true;
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
switch (*in)
{
// integer
case 'i':
{
++in;
std::string val = read_until(in, end, 'e', err);
if (err) return;
TORRENT_ASSERT(*in == 'e');
++in;
ret = entry(entry::int_t);
char* end_pointer;
ret.integer() = strtoll(val.c_str(), &end_pointer, 10);
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
if (end_pointer == val.c_str())
{
err = true;
return;
}
} break;
// list
case 'l':
{
ret = entry(entry::list_t);
++in;
while (*in != 'e')
{
ret.list().push_back(entry());
entry& e = ret.list().back();
bdecode_recursive(in, end, e, err, depth + 1);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
TORRENT_ASSERT(*in == 'e');
++in;
} break;
// dictionary
case 'd':
{
ret = entry(entry::dictionary_t);
++in;
while (*in != 'e')
{
entry key;
bdecode_recursive(in, end, key, err, depth + 1);
if (err || key.type() != entry::string_t)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
entry& e = ret[key.string()];
bdecode_recursive(in, end, e, err, depth + 1);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
if (in == end)
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
TORRENT_ASSERT(*in == 'e');
++in;
} break;
// string
default:
if (is_digit((unsigned char)*in))
{
std::string len_s = read_until(in, end, ':', err);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
TORRENT_ASSERT(*in == ':');
++in;
int len = atoi(len_s.c_str());
ret = entry(entry::string_t);
read_string(in, end, len, ret.string(), err);
if (err)
{
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
}
else
{
err = true;
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
return;
}
#ifdef TORRENT_DEBUG
ret.m_type_queried = false;
#endif
}
}
}
// These functions will encode data to bencoded_ or decode bencoded_ data.
// If possible, lazy_bdecode() should be preferred over ``bdecode()``.
// The entry_ class is the internal representation of the bencoded data
// and it can be used to retrieve information, an entry_ can also be build by
// the program and given to ``bencode()`` to encode it into the ``OutIt``
// iterator.
// The ``OutIt`` and ``InIt`` are iterators
// (InputIterator_ and OutputIterator_ respectively). They
// are templates and are usually instantiated as ostream_iterator_,
// <API key> or istream_iterator_. These
// functions will assume that the iterator refers to a character
// (``char``). So, if you want to encode entry ``e`` into a buffer
// in memory, you can do it like this::
// std::vector<char> buffer;
// bencode(std::back_inserter(buf), e);
// If you want to decode a torrent file from a buffer in memory, you can do it like this::
// std::vector<char> buffer;
// entry e = bdecode(buf.begin(), buf.end());
// Or, if you have a raw char buffer::
// const char* buf;
// entry e = bdecode(buf, buf + data_size);
// Now we just need to know how to retrieve information from the entry.
// If ``bdecode()`` encounters invalid encoded data in the range given to it
// it will return a default constructed ``entry`` object.
template<class OutIt> int bencode(OutIt out, const entry& e)
{
return detail::bencode_recursive(out, e);
}
template<class InIt> entry bdecode(InIt start, InIt end)
{
entry e;
bool err = false;
detail::bdecode_recursive(start, end, e, err, 0);
#ifdef TORRENT_DEBUG
TORRENT_ASSERT(e.m_type_queried == false);
#endif
if (err) return entry();
return e;
}
template<class InIt> entry bdecode(InIt start, InIt end, int& len)
{
entry e;
bool err = false;
InIt s = start;
detail::bdecode_recursive(start, end, e, err, 0);
len = std::distance(s, start);
TORRENT_ASSERT(len >= 0);
if (err) return entry();
return e;
}
}
#endif // <API key>
|
title: SQL and Operator
localeTitle: SQL
## SQL AND
ANDWHEREGROUP BY HAVINGAND
student
WHEREstudent
sql
select * from student;

WHERE
sql
select * from student
where programOfStudy = 'Programming';

ANDWHERESAT800
sql
select * from student
where programOfStudy = 'Programming'
and sat_score > 800;

HAVINGGROUP BYAND20163001800
sql
select Candidate, Office_Sought, Election_Year, FORMAT(sum(Total_$),2) from combined_party_data
where Office_Sought = 'PRESIDENT / VICE PRESIDENT'
group by Candidate, Office_Sought, Election_Year
having Election_Year = 2016 and sum(Total_$) between 3000000 and 18000000
order by sum(Total_$) desc;

|
using System;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using Nini.Config;
using OpenMetaverse;
using Mono.Addins;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Region.CoreModules.World.WorldMap
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "MapSearchModule")]
public class MapSearchModule : ISharedRegionModule
{
private static readonly ILog m_log =
LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
Scene m_scene = null; // only need one for communication with GridService
List<Scene> m_scenes = new List<Scene>();
List<UUID> m_Clients;
IWorldMapModule m_WorldMap;
IWorldMapModule WorldMap
{
get
{
if (m_WorldMap == null)
m_WorldMap = m_scene.<API key><IWorldMapModule>();
return m_WorldMap;
}
}
#region ISharedRegionModule Members
public void Initialise(IConfigSource source)
{
}
public void AddRegion(Scene scene)
{
if (m_scene == null)
{
m_scene = scene;
}
m_scenes.Add(scene);
scene.EventManager.OnNewClient += OnNewClient;
m_Clients = new List<UUID>();
}
public void RemoveRegion(Scene scene)
{
m_scenes.Remove(scene);
if (m_scene == scene && m_scenes.Count > 0)
m_scene = m_scenes[0];
scene.EventManager.OnNewClient -= OnNewClient;
}
public void PostInitialise()
{
}
public void Close()
{
m_scene = null;
m_scenes.Clear();
}
public string Name
{
get { return "MapSearchModule"; }
}
public Type <API key>
{
get { return null; }
}
public void RegionLoaded(Scene scene)
{
}
#endregion
private void OnNewClient(IClientAPI client)
{
client.OnMapNameRequest += <API key>;
}
private void <API key>(IClientAPI remoteClient, string mapName, uint flags)
{
lock (m_Clients)
{
if (m_Clients.Contains(remoteClient.AgentId))
return;
m_Clients.Add(remoteClient.AgentId);
}
OnMapNameRequest(remoteClient, mapName, flags);
}
private void OnMapNameRequest(IClientAPI remoteClient, string mapName, uint flags)
{
Util.FireAndForget(x =>
{
try
{
List<MapBlockData> blocks = new List<MapBlockData>();
if (mapName.Length < 3 || (mapName.EndsWith("#") && mapName.Length < 4))
{
// final block, closing the search result
AddFinalBlock(blocks,mapName);
// flags are agent flags sent from the viewer.
// they have different values depending on different viewers, apparently
remoteClient.SendMapBlock(blocks, flags);
remoteClient.SendAlertMessage("Use a search string with at least 3 characters");
return;
}
//m_log.DebugFormat("MAP NAME=({0})", mapName);
// Hack to get around the fact that ll V3 now drops the port from the
// Caller, use this magic form instead:
// secondlife://http|!!mygrid.com|8002|Region+Name/128/128
// or url encode if possible.
// the hacks we do with this viewer...
bool needOriginalName = false;
string mapNameOrig = mapName;
if (mapName.Contains("|"))
{
mapName = mapName.Replace('|', ':');
needOriginalName = true;
}
if (mapName.Contains("+"))
{
mapName = mapName.Replace('+', ' ');
needOriginalName = true;
}
if (mapName.Contains("!"))
{
mapName = mapName.Replace('!', '/');
needOriginalName = true;
}
if (mapName.Contains("."))
needOriginalName = true;
// try to fetch from GridServer
List<GridRegion> regionInfos = m_scene.GridService.GetRegionsByName(m_scene.RegionInfo.ScopeID, mapName, 20);
// if (regionInfos.Count == 0)
// remoteClient.SendAlertMessage("Hyperlink could not be established.");
//m_log.DebugFormat("[MAPSEARCHMODULE]: search {0} returned {1} regions", mapName, regionInfos.Count);
MapBlockData data;
if (regionInfos != null && regionInfos.Count > 0)
{
foreach (GridRegion info in regionInfos)
{
data = new MapBlockData();
data.Agents = 0;
data.Access = info.Access;
MapBlockData block = new MapBlockData();
WorldMap.<API key>(block, info, flags);
if (flags == 2 && regionInfos.Count == 1 && needOriginalName)
block.Name = mapNameOrig;
blocks.Add(block);
}
}
// final block, closing the search result
AddFinalBlock(blocks,mapNameOrig);
// flags are agent flags sent from the viewer.
// they have different values depending on different viewers, apparently
remoteClient.SendMapBlock(blocks, flags);
// send extra user messages for V3
// because the UI is very confusing
// while we don't fix the hard-coded urls
if (flags == 2)
{
if (regionInfos == null || regionInfos.Count == 0)
remoteClient.<API key>("No regions found with that name.", true);
// else if (regionInfos.Count == 1)
// remoteClient.<API key>("Region found!", false);
}
}
finally
{
lock (m_Clients)
m_Clients.Remove(remoteClient.AgentId);
}
});
}
private void AddFinalBlock(List<MapBlockData> blocks,string name)
{
// final block, closing the search result
MapBlockData data = new MapBlockData();
data.Agents = 0;
data.Access = (byte)SimAccess.NonExistent;
data.MapImageId = UUID.Zero;
data.Name = name;
data.RegionFlags = 0;
data.WaterHeight = 0; // not used
data.X = 0;
data.Y = 0;
blocks.Add(data);
}
// private Scene GetClientScene(IClientAPI client)
// foreach (Scene s in m_scenes)
// if (client.Scene.RegionInfo.RegionHandle == s.RegionInfo.RegionHandle)
// return s;
// return m_scene;
}
}
|
#include "cc/layers/tiled_layer.h"
#include <algorithm>
#include <vector>
#include "base/auto_reset.h"
#include "base/basictypes.h"
#include "build/build_config.h"
#include "cc/layers/layer_impl.h"
#include "cc/layers/tiled_layer_impl.h"
#include "cc/resources/layer_updater.h"
#include "cc/resources/<API key>.h"
#include "cc/resources/priority_calculator.h"
#include "cc/trees/layer_tree_host.h"
#include "cc/trees/occlusion_tracker.h"
#include "third_party/khronos/GLES2/gl2.h"
#include "ui/gfx/rect_conversions.h"
namespace cc {
// Maximum predictive expansion of the visible area.
static const int <API key> = 2;
// Number of rows/columns of tiles to pre-paint.
// We should increase these further as all textures are
// prioritized and we insure performance doesn't suffer.
static const int kPrepaintRows = 4;
static const int kPrepaintColumns = 2;
class UpdatableTile : public LayerTilingData::Tile {
public:
static scoped_ptr<UpdatableTile> Create(
scoped_ptr<LayerUpdater::Resource> updater_resource) {
return make_scoped_ptr(new UpdatableTile(updater_resource.Pass()));
}
LayerUpdater::Resource* updater_resource() { return updater_resource_.get(); }
PrioritizedResource* managed_resource() {
return updater_resource_->texture();
}
bool is_dirty() const { return !dirty_rect.IsEmpty(); }
// Reset update state for the current frame. This should occur before painting
// for all layers. Since painting one layer can invalidate another layer after
// it has already painted, mark all non-dirty tiles as valid before painting
// such that invalidations during painting won't prevent them from being
// pushed.
void ResetUpdateState() {
update_rect = gfx::Rect();
occluded = false;
partial_update = false;
valid_for_frame = !is_dirty();
}
// This promises to update the tile and therefore also guarantees the tile
// will be valid for this frame. dirty_rect is copied into update_rect so we
// can continue to track re-entrant invalidations that occur during painting.
void MarkForUpdate() {
valid_for_frame = true;
update_rect = dirty_rect;
dirty_rect = gfx::Rect();
}
gfx::Rect dirty_rect;
gfx::Rect update_rect;
bool partial_update;
bool valid_for_frame;
bool occluded;
private:
explicit UpdatableTile(scoped_ptr<LayerUpdater::Resource> updater_resource)
: partial_update(false),
valid_for_frame(false),
occluded(false),
updater_resource_(updater_resource.Pass()) {}
scoped_ptr<LayerUpdater::Resource> updater_resource_;
<API key>(UpdatableTile);
};
TiledLayer::TiledLayer()
: <API key>(),
texture_format_(RGBA_8888),
skips_draw_(false),
failed_update_(false),
tiling_option_(AUTO_TILE) {
tiler_ =
LayerTilingData::Create(gfx::Size(), LayerTilingData::HAS_BORDER_TEXELS);
}
TiledLayer::~TiledLayer() {}
scoped_ptr<LayerImpl> TiledLayer::CreateLayerImpl(LayerTreeImpl* tree_impl) {
return TiledLayerImpl::Create(tree_impl, id()).PassAs<LayerImpl>();
}
void TiledLayer::<API key>() {
DCHECK(layer_tree_host());
gfx::Size default_tile_size = layer_tree_host()->settings().default_tile_size;
gfx::Size <API key> =
layer_tree_host()->settings().<API key>;
int layer_width = content_bounds().width();
int layer_height = content_bounds().height();
gfx::Size tile_size(std::min(default_tile_size.width(), layer_width),
std::min(default_tile_size.height(), layer_height));
// Tile if both dimensions large, or any one dimension large and the other
// extends into a second tile but the total layer area isn't larger than that
// of the largest possible untiled layer. This heuristic allows for long
// skinny layers (e.g. scrollbars) that are Nx1 tiles to minimize wasted
// texture space but still avoids creating very large tiles.
bool any_dimension_large = layer_width > <API key>.width() ||
layer_height > <API key>.height();
bool <API key> =
(layer_width <= default_tile_size.width() ||
layer_height <= default_tile_size.height()) &&
(layer_width * layer_height) <= (<API key>.width() *
<API key>.height());
bool auto_tiled = any_dimension_large && !<API key>;
bool is_tiled;
if (tiling_option_ == ALWAYS_TILE)
is_tiled = true;
else if (tiling_option_ == NEVER_TILE)
is_tiled = false;
else
is_tiled = auto_tiled;
gfx::Size requested_size = is_tiled ? tile_size : content_bounds();
const int max_size =
layer_tree_host()-><API key>().max_texture_size;
requested_size.SetToMin(gfx::Size(max_size, max_size));
SetTileSize(requested_size);
}
void TiledLayer::UpdateBounds() {
gfx::Size old_tiling_size = tiler_->tiling_size();
gfx::Size new_tiling_size = content_bounds();
if (old_tiling_size == new_tiling_size)
return;
tiler_->SetTilingSize(new_tiling_size);
// Invalidate any areas that the new bounds exposes.
Region new_region =
SubtractRegions(gfx::Rect(new_tiling_size), gfx::Rect(old_tiling_size));
for (Region::Iterator new_rects(new_region); new_rects.has_rect();
new_rects.next())
<API key>(new_rects.rect());
UpdateDrawsContent(HasDrawableContent());
}
void TiledLayer::SetTileSize(const gfx::Size& size) {
tiler_->SetTileSize(size);
UpdateDrawsContent(HasDrawableContent());
}
void TiledLayer::<API key>(
LayerTilingData::BorderTexelOption border_texel_option) {
tiler_-><API key>(border_texel_option);
UpdateDrawsContent(HasDrawableContent());
}
bool TiledLayer::HasDrawableContent() const {
bool <API key> =
(tiler_->num_tiles_x() > 1) || (tiler_->num_tiles_y() > 1);
return !(tiling_option_ == NEVER_TILE && <API key>) &&
<API key>::HasDrawableContent();
}
void TiledLayer::ReduceMemoryUsage() {
if (Updater())
Updater()->ReduceMemoryUsage();
}
void TiledLayer::SetIsMask(bool is_mask) {
set_tiling_option(is_mask ? NEVER_TILE : AUTO_TILE);
}
void TiledLayer::PushPropertiesTo(LayerImpl* layer) {
<API key>::PushPropertiesTo(layer);
TiledLayerImpl* tiled_layer = static_cast<TiledLayerImpl*>(layer);
tiled_layer->set_skips_draw(skips_draw_);
tiled_layer->SetTilingData(*tiler_);
std::vector<UpdatableTile*> invalid_tiles;
for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin();
iter != tiler_->tiles().end();
++iter) {
int i = iter->first.first;
int j = iter->first.second;
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
// TODO(enne): This should not ever be null.
if (!tile)
continue;
if (!tile->managed_resource()-><API key>()) {
// Evicted tiles get deleted from both layers
invalid_tiles.push_back(tile);
continue;
}
if (!tile->valid_for_frame) {
// Invalidated tiles are set so they can get different debug colors.
tiled_layer->PushInvalidTile(i, j);
continue;
}
tiled_layer->PushTileProperties(
i,
j,
tile->managed_resource()->resource_id(),
tile->opaque_rect(),
tile->managed_resource()->contents_swizzled());
}
for (std::vector<UpdatableTile*>::const_iterator iter = invalid_tiles.begin();
iter != invalid_tiles.end();
++iter)
tiler_->TakeTile((*iter)->i(), (*iter)->j());
// TiledLayer must push properties every frame, since viewport state and
// occlusion from anywhere in the tree can change what the layer decides to
// push to the impl tree.
<API key> = true;
}
<API key>* TiledLayer::ResourceManager() {
if (!layer_tree_host())
return NULL;
return layer_tree_host()-><API key>();
}
const PrioritizedResource* TiledLayer::<API key>(int i,
int j) const {
UpdatableTile* tile = TileAt(i, j);
if (!tile)
return NULL;
return tile->managed_resource();
}
void TiledLayer::SetLayerTreeHost(LayerTreeHost* host) {
if (host && host != layer_tree_host()) {
for (LayerTilingData::TileMap::const_iterator
iter = tiler_->tiles().begin();
iter != tiler_->tiles().end();
++iter) {
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
// TODO(enne): This should not ever be null.
if (!tile)
continue;
tile->managed_resource()->SetTextureManager(
host-><API key>());
}
}
<API key>::SetLayerTreeHost(host);
}
UpdatableTile* TiledLayer::TileAt(int i, int j) const {
return static_cast<UpdatableTile*>(tiler_->TileAt(i, j));
}
UpdatableTile* TiledLayer::CreateTile(int i, int j) {
<API key>();
scoped_ptr<UpdatableTile> tile(
UpdatableTile::Create(Updater()->CreateResource(ResourceManager())));
tile->managed_resource()->SetDimensions(tiler_->tile_size(), texture_format_);
UpdatableTile* added_tile = tile.get();
tiler_->AddTile(tile.PassAs<LayerTilingData::Tile>(), i, j);
added_tile->dirty_rect = tiler_->TileRect(added_tile);
// Temporary diagnostic crash.
CHECK(added_tile);
CHECK(TileAt(i, j));
return added_tile;
}
void TiledLayer::SetNeedsDisplayRect(const gfx::RectF& dirty_rect) {
<API key>(<API key>(dirty_rect));
<API key>::SetNeedsDisplayRect(dirty_rect);
}
void TiledLayer::<API key>(const gfx::Rect& content_rect) {
UpdateBounds();
if (tiler_->is_empty() || content_rect.IsEmpty() || skips_draw_)
return;
for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin();
iter != tiler_->tiles().end();
++iter) {
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
DCHECK(tile);
// TODO(enne): This should not ever be null.
if (!tile)
continue;
gfx::Rect bound = tiler_->TileRect(tile);
bound.Intersect(content_rect);
tile->dirty_rect.Union(bound);
}
}
// Returns true if tile is dirty and only part of it needs to be updated.
bool TiledLayer::<API key>(UpdatableTile* tile) {
return !tile->dirty_rect.Contains(tiler_->TileRect(tile)) &&
tile->managed_resource()-><API key>();
}
bool TiledLayer::UpdateTiles(int left,
int top,
int right,
int bottom,
ResourceUpdateQueue* queue,
const OcclusionTracker<Layer>* occlusion,
bool* updated) {
<API key>();
bool ignore_occlusions = !occlusion;
if (!<API key>(left, top, right, bottom, ignore_occlusions)) {
failed_update_ = true;
return false;
}
gfx::Rect update_rect;
gfx::Rect paint_rect;
MarkTilesForUpdate(
&update_rect, &paint_rect, left, top, right, bottom, ignore_occlusions);
if (paint_rect.IsEmpty())
return true;
*updated = true;
UpdateTileTextures(
update_rect, paint_rect, left, top, right, bottom, queue, occlusion);
return true;
}
void TiledLayer::<API key>(
int left,
int top,
int right,
int bottom,
const OcclusionTracker<Layer>* occlusion) {
int occluded_tile_count = 0;
bool succeeded = true;
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
UpdatableTile* tile = TileAt(i, j);
DCHECK(tile); // Did <API key> get skipped?
// TODO(enne): This should not ever be null.
if (!tile)
continue;
// Did ResetUpdateState get skipped? Are we doing more than one occlusion
// pass?
DCHECK(!tile->occluded);
gfx::Rect visible_tile_rect = gfx::IntersectRects(
tiler_->tile_bounds(i, j), <API key>());
if (!<API key>() && occlusion &&
occlusion->Occluded(
render_target(), visible_tile_rect, draw_transform())) {
tile->occluded = true;
occluded_tile_count++;
} else {
succeeded &= tile->managed_resource()->RequestLate();
}
}
}
}
bool TiledLayer::<API key>(int left,
int top,
int right,
int bottom,
bool ignore_occlusions) {
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
UpdatableTile* tile = TileAt(i, j);
DCHECK(tile); // Did SetTexturePriorites get skipped?
// TODO(enne): This should not ever be null.
if (!tile)
continue;
// Ensure the entire tile is dirty if we don't have the texture.
if (!tile->managed_resource()-><API key>())
tile->dirty_rect = tiler_->TileRect(tile);
// If using occlusion and the visible region of the tile is occluded,
// don't reserve a texture or update the tile.
if (tile->occluded && !ignore_occlusions)
continue;
if (!tile->managed_resource()-><API key>())
return false;
}
}
return true;
}
void TiledLayer::MarkTilesForUpdate(gfx::Rect* update_rect,
gfx::Rect* paint_rect,
int left,
int top,
int right,
int bottom,
bool ignore_occlusions) {
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
UpdatableTile* tile = TileAt(i, j);
DCHECK(tile); // Did SetTexturePriorites get skipped?
// TODO(enne): This should not ever be null.
if (!tile)
continue;
if (tile->occluded && !ignore_occlusions)
continue;
// Prepare update rect from original dirty rects.
update_rect->Union(tile->dirty_rect);
// TODO(reveman): Decide if partial update should be allowed based on cost
if (tile->is_dirty() &&
!layer_tree_host()-><API key>()) {
// If we get a partial update, we use the same texture, otherwise return
// the current texture backing, so we don't update visible textures
// non-atomically. If the current backing is in-use, it won't be
// deleted until after the commit as the texture manager will not allow
// deletion or recycling of in-use textures.
if (<API key>(tile) &&
layer_tree_host()-><API key>()) {
tile->partial_update = true;
} else {
tile->dirty_rect = tiler_->TileRect(tile);
tile->managed_resource()-><API key>();
}
}
paint_rect->Union(tile->dirty_rect);
tile->MarkForUpdate();
}
}
}
void TiledLayer::UpdateTileTextures(const gfx::Rect& update_rect,
const gfx::Rect& paint_rect,
int left,
int top,
int right,
int bottom,
ResourceUpdateQueue* queue,
const OcclusionTracker<Layer>* occlusion) {
// The update_rect should be in layer space. So we have to convert the
// paint_rect from content space to layer space.
float width_scale =
paint_properties().bounds.width() /
static_cast<float>(content_bounds().width());
float height_scale =
paint_properties().bounds.height() /
static_cast<float>(content_bounds().height());
update_rect_ = gfx::ScaleRect(update_rect, width_scale, height_scale);
// Calling PrepareToUpdate() calls into WebKit to paint, which may have the
// side effect of disabling compositing, which causes our reference to the
// texture updater to be deleted. However, we can't free the memory backing
// the SkCanvas until the paint finishes, so we grab a local reference here to
// hold the updater alive until the paint completes.
scoped_refptr<LayerUpdater> protector(Updater());
gfx::Rect painted_opaque_rect;
Updater()->PrepareToUpdate(paint_rect,
tiler_->tile_size(),
1.f / width_scale,
1.f / height_scale,
&painted_opaque_rect);
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
UpdatableTile* tile = TileAt(i, j);
DCHECK(tile); // Did SetTexturePriorites get skipped?
// TODO(enne): This should not ever be null.
if (!tile)
continue;
gfx::Rect tile_rect = tiler_->tile_bounds(i, j);
// Use update_rect as the above loop copied the dirty rect for this frame
// to update_rect.
gfx::Rect dirty_rect = tile->update_rect;
if (dirty_rect.IsEmpty())
continue;
// Save what was painted opaque in the tile. Keep the old area if the
// paint didn't touch it, and didn't paint some other part of the tile
// opaque.
gfx::Rect tile_painted_rect = gfx::IntersectRects(tile_rect, paint_rect);
gfx::Rect <API key> =
gfx::IntersectRects(tile_rect, painted_opaque_rect);
if (!tile_painted_rect.IsEmpty()) {
gfx::Rect <API key> =
gfx::IntersectRects(tile->opaque_rect(), tile_painted_rect);
bool <API key> =
!<API key>.IsEmpty() &&
!<API key>.Contains(<API key>);
bool <API key> =
!<API key>.IsEmpty() &&
!tile->opaque_rect().Contains(<API key>);
if (<API key> ||
<API key>)
tile->set_opaque_rect(<API key>);
}
// source_rect starts as a full-sized tile with border texels included.
gfx::Rect source_rect = tiler_->TileRect(tile);
source_rect.Intersect(dirty_rect);
// Paint rect not guaranteed to line up on tile boundaries, so
// make sure that source_rect doesn't extend outside of it.
source_rect.Intersect(paint_rect);
tile->update_rect = source_rect;
if (source_rect.IsEmpty())
continue;
const gfx::Point anchor = tiler_->TileRect(tile).origin();
// Calculate tile-space rectangle to upload into.
gfx::Vector2d dest_offset = source_rect.origin() - anchor;
CHECK_GE(dest_offset.x(), 0);
CHECK_GE(dest_offset.y(), 0);
// Offset from paint rectangle to this tile's dirty rectangle.
gfx::Vector2d paint_offset = source_rect.origin() - paint_rect.origin();
CHECK_GE(paint_offset.x(), 0);
CHECK_GE(paint_offset.y(), 0);
CHECK_LE(paint_offset.x() + source_rect.width(), paint_rect.width());
CHECK_LE(paint_offset.y() + source_rect.height(), paint_rect.height());
tile->updater_resource()->Update(
queue, source_rect, dest_offset, tile->partial_update);
}
}
}
// This picks a small animated layer to be anything less than one viewport. This
// is specifically for page transitions which are viewport-sized layers. The
// extra tile of padding is due to these layers being slightly larger than the
// viewport in some cases.
bool TiledLayer::<API key>() const {
if (!<API key>() && !<API key>())
return false;
gfx::Size viewport_size =
layer_tree_host() ? layer_tree_host()-><API key>()
: gfx::Size();
gfx::Rect content_rect(content_bounds());
return content_rect.width() <=
viewport_size.width() + tiler_->tile_size().width() &&
content_rect.height() <=
viewport_size.height() + tiler_->tile_size().height();
}
namespace {
// TODO(epenner): Remove this and make this based on distance once distance can
// be calculated for offscreen layers. For now, prioritize all small animated
// layers after 512 pixels of pre-painting.
void <API key>(const gfx::Rect& visible_rect,
const gfx::Rect& tile_rect,
bool draws_to_root,
bool <API key>,
PrioritizedResource* texture) {
int priority = PriorityCalculator::LowestPriority();
if (!visible_rect.IsEmpty()) {
priority = PriorityCalculator::<API key>(
visible_rect, tile_rect, draws_to_root);
}
if (<API key>) {
priority = PriorityCalculator::max_priority(
priority, PriorityCalculator::<API key>());
}
if (priority != PriorityCalculator::LowestPriority())
texture-><API key>(priority);
}
} // namespace
void TiledLayer::<API key>(const PriorityCalculator& priority_calc) {
UpdateBounds();
ResetUpdateState();
<API key>();
if (tiler_->has_empty_bounds())
return;
bool draws_to_root = !render_target()->parent();
bool <API key> = <API key>();
// Minimally create the tiles in the desired pre-paint rect.
gfx::Rect create_tiles_rect = IdlePaintRect();
if (<API key>)
create_tiles_rect = gfx::Rect(content_bounds());
if (!create_tiles_rect.IsEmpty()) {
int left, top, right, bottom;
tiler_-><API key>(
create_tiles_rect, &left, &top, &right, &bottom);
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
if (!TileAt(i, j))
CreateTile(i, j);
}
}
}
// Now update priorities on all tiles we have in the layer, no matter where
// they are.
for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin();
iter != tiler_->tiles().end();
++iter) {
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
// TODO(enne): This should not ever be null.
if (!tile)
continue;
gfx::Rect tile_rect = tiler_->TileRect(tile);
<API key>(<API key>,
tile_rect,
draws_to_root,
<API key>,
tile->managed_resource());
}
}
Region TiledLayer::<API key>() const {
if (skips_draw_)
return Region();
if (contents_opaque())
return <API key>();
return tiler_-><API key>(<API key>());
}
void TiledLayer::ResetUpdateState() {
skips_draw_ = false;
failed_update_ = false;
LayerTilingData::TileMap::const_iterator end = tiler_->tiles().end();
for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin();
iter != end;
++iter) {
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
// TODO(enne): This should not ever be null.
if (!tile)
continue;
tile->ResetUpdateState();
}
}
namespace {
gfx::Rect ExpandRectByDelta(const gfx::Rect& rect, const gfx::Vector2d& delta) {
int width = rect.width() + std::abs(delta.x());
int height = rect.height() + std::abs(delta.y());
int x = rect.x() + ((delta.x() < 0) ? delta.x() : 0);
int y = rect.y() + ((delta.y() < 0) ? delta.y() : 0);
return gfx::Rect(x, y, width, height);
}
}
void TiledLayer::<API key>() {
// This scroll prediction is very primitive and should be replaced by a
// a recursive calculation on all layers which uses actual scroll/animation
// velocities. To insure this doesn't miss-predict, we only use it to predict
// the visible_rect if:
// - content_bounds() hasn't changed.
// - visible_rect.size() hasn't changed.
// These two conditions prevent rotations, scales, pinch-zooms etc. where
// the prediction would be incorrect.
gfx::Vector2d delta = <API key>().CenterPoint() -
<API key>.CenterPoint();
predicted_scroll_ = -delta;
<API key> = <API key>();
if (<API key> == content_bounds() &&
<API key>.size() == <API key>().size()) {
// Only expand the visible rect in the major scroll direction, to prevent
// massive paints due to diagonal scrolls.
gfx::Vector2d major_scroll_delta =
(std::abs(delta.x()) > std::abs(delta.y())) ?
gfx::Vector2d(delta.x(), 0) :
gfx::Vector2d(0, delta.y());
<API key> =
ExpandRectByDelta(<API key>(), major_scroll_delta);
// Bound the prediction to prevent unbounded paints, and clamp to content
// bounds.
gfx::Rect bound = <API key>();
bound.Inset(-tiler_->tile_size().width() * <API key>,
-tiler_->tile_size().height() * <API key>);
bound.Intersect(gfx::Rect(content_bounds()));
<API key>.Intersect(bound);
}
<API key> = content_bounds();
<API key> = <API key>();
}
bool TiledLayer::Update(ResourceUpdateQueue* queue,
const OcclusionTracker<Layer>* occlusion) {
DCHECK(!skips_draw_ && !failed_update_); // Did ResetUpdateState get skipped?
// Tiled layer always causes commits to wait for activation, as it does
// not support pending trees.
<API key>();
bool updated = false;
{
base::AutoReset<bool> <API key>(&<API key>,
true);
updated |= <API key>::Update(queue, occlusion);
UpdateBounds();
}
if (tiler_->has_empty_bounds() || !DrawsContent())
return false;
// Animation pre-paint. If the layer is small, try to paint it all
// immediately whether or not it is occluded, to avoid paint/upload
// hiccups while it is animating.
if (<API key>()) {
int left, top, right, bottom;
tiler_-><API key>(gfx::Rect(content_bounds()),
&left,
&top,
&right,
&bottom);
UpdateTiles(left, top, right, bottom, queue, NULL, &updated);
if (updated)
return updated;
// This was an attempt to paint the entire layer so if we fail it's okay,
// just fallback on painting visible etc. below.
failed_update_ = false;
}
if (<API key>.IsEmpty())
return updated;
// Visible painting. First occlude visible tiles and paint the non-occluded
// tiles.
int left, top, right, bottom;
tiler_-><API key>(
<API key>, &left, &top, &right, &bottom);
<API key>(left, top, right, bottom, occlusion);
skips_draw_ = !UpdateTiles(
left, top, right, bottom, queue, occlusion, &updated);
if (skips_draw_)
tiler_->reset();
if (skips_draw_ || updated)
return true;
// If we have already painting everything visible. Do some pre-painting while
// idle.
gfx::Rect <API key> = IdlePaintRect();
if (<API key>.IsEmpty())
return updated;
// Prepaint anything that was occluded but inside the layer's visible region.
if (!UpdateTiles(left, top, right, bottom, queue, NULL, &updated) ||
updated)
return updated;
int prepaint_left, prepaint_top, prepaint_right, prepaint_bottom;
tiler_-><API key>(<API key>,
&prepaint_left,
&prepaint_top,
&prepaint_right,
&prepaint_bottom);
// Then expand outwards one row/column at a time until we find a dirty
// row/column to update. Increment along the major and minor scroll directions
// first.
gfx::Vector2d delta = -predicted_scroll_;
delta = gfx::Vector2d(delta.x() == 0 ? 1 : delta.x(),
delta.y() == 0 ? 1 : delta.y());
gfx::Vector2d major_delta =
(std::abs(delta.x()) > std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0)
: gfx::Vector2d(0, delta.y());
gfx::Vector2d minor_delta =
(std::abs(delta.x()) <= std::abs(delta.y())) ? gfx::Vector2d(delta.x(), 0)
: gfx::Vector2d(0, delta.y());
gfx::Vector2d deltas[4] = { major_delta, minor_delta, -major_delta,
-minor_delta };
for (int i = 0; i < 4; i++) {
if (deltas[i].y() > 0) {
while (bottom < prepaint_bottom) {
++bottom;
if (!UpdateTiles(
left, bottom, right, bottom, queue, NULL, &updated) ||
updated)
return updated;
}
}
if (deltas[i].y() < 0) {
while (top > prepaint_top) {
--top;
if (!UpdateTiles(
left, top, right, top, queue, NULL, &updated) ||
updated)
return updated;
}
}
if (deltas[i].x() < 0) {
while (left > prepaint_left) {
--left;
if (!UpdateTiles(
left, top, left, bottom, queue, NULL, &updated) ||
updated)
return updated;
}
}
if (deltas[i].x() > 0) {
while (right < prepaint_right) {
++right;
if (!UpdateTiles(
right, top, right, bottom, queue, NULL, &updated) ||
updated)
return updated;
}
}
}
return updated;
}
void TiledLayer::<API key>() {
// Ensure that all textures are of the right format.
for (LayerTilingData::TileMap::const_iterator iter = tiler_->tiles().begin();
iter != tiler_->tiles().end();
++iter) {
UpdatableTile* tile = static_cast<UpdatableTile*>(iter->second);
if (!tile)
continue;
PrioritizedResource* resource = tile->managed_resource();
resource->SetDimensions(resource->size(), texture_format_);
}
}
bool TiledLayer::NeedsIdlePaint() {
// Don't trigger more paints if we failed (as we'll just fail again).
if (failed_update_ || <API key>().IsEmpty() ||
tiler_->has_empty_bounds() || !DrawsContent())
return false;
gfx::Rect <API key> = IdlePaintRect();
if (<API key>.IsEmpty())
return false;
int left, top, right, bottom;
tiler_-><API key>(
<API key>, &left, &top, &right, &bottom);
for (int j = top; j <= bottom; ++j) {
for (int i = left; i <= right; ++i) {
UpdatableTile* tile = TileAt(i, j);
DCHECK(tile); // Did <API key> get skipped?
if (!tile)
continue;
bool updated = !tile->update_rect.IsEmpty();
bool can_acquire =
tile->managed_resource()-><API key>();
bool dirty =
tile->is_dirty() || !tile->managed_resource()-><API key>();
if (!updated && can_acquire && dirty)
return true;
}
}
return false;
}
gfx::Rect TiledLayer::IdlePaintRect() {
// Don't inflate an empty rect.
if (<API key>().IsEmpty())
return gfx::Rect();
gfx::Rect prepaint_rect = <API key>();
prepaint_rect.Inset(-tiler_->tile_size().width() * kPrepaintColumns,
-tiler_->tile_size().height() * kPrepaintRows);
gfx::Rect content_rect(content_bounds());
prepaint_rect.Intersect(content_rect);
return prepaint_rect;
}
} // namespace cc
|
# cf-builder-card
> Cloudflare Card Builder
## Installation
sh
$ npm install cf-builder-card
## Usage
jsx
import React from 'react';
import { CardBuilder } from 'cf-builder-card';
import { Table, TableBody, TableRow, TableCell } from 'cf-component-table';
import { Button } from 'cf-component-button';
const EXAMPLE_CARD = 'EXAMPLE_CARD';
const MyButton = (
<Button type="default" onClick={() => console.log('Button clicked!')}>
Click me!
</Button>
);
const BuilderCard = () => (
<CardBuilder
cardName={EXAMPLE_CARD}
title="This is a Card"
description="This is the description of a card."
control={MyButton}
table={
<Table striped>
<TableBody>
<TableRow>
<TableCell>One</TableCell>
<TableCell>Two</TableCell>
</TableRow>
<TableRow>
<TableCell>Three</TableCell>
<TableCell>Four</TableCell>
</TableRow>
</TableBody>
</Table>
}
drawers={[
{
id: 'api',
name: 'API',
content: 'API Content'
},
{
id: 'help',
name: 'Help',
content: 'Help Content'
}
]}
/>
);
export default BuilderCard;
|
(function ($) {
$.Redactor.opts.langs['ua'] = {
html: 'Код',
video: 'Відео',
image: 'Зображення',
table: 'Таблиця',
link: 'Посилання',
link_insert: 'Вставити посилання ...',
link_edit: 'Edit link',
unlink: 'Видалити посилання',
formatting: 'Стилі',
paragraph: 'Звичайний текст',
quote: 'Цитата',
code: 'Код',
header1: 'Заголовок 1',
header2: 'Заголовок 2',
header3: 'Заголовок 3',
header4: 'Заголовок 4',
bold: 'Жирний',
italic: 'Похилий',
fontcolor: 'Колір тексту',
backcolor: 'Заливка тексту',
unorderedlist: 'Звичайний список',
orderedlist: 'Нумерований список',
outdent: 'Зменшити відступ',
indent: 'Збільшити відступ',
cancel: 'Скасувати',
insert: 'Вставити',
save: 'Зберегти',
_delete: 'Видалити',
insert_table: 'Вставити таблицю',
insert_row_above: 'Додати рядок зверху',
insert_row_below: 'Додати рядок знизу',
insert_column_left: 'Додати стовпець ліворуч',
insert_column_right: 'Додати стовпець праворуч',
delete_column: 'Видалити стовпець',
delete_row: 'Видалити рядок',
delete_table: 'Видалити таблицю',
rows: 'Рядки',
columns: 'Стовпці',
add_head: 'Додати заголовок',
delete_head: 'Видалити заголовок',
title: 'Підказка',
image_view: 'Завантажити зображення',
image_position: 'Обтікання текстом',
none: 'ні',
left: 'ліворуч',
right: 'праворуч',
image_web_link: 'Посилання на зображення',
text: 'Текст',
mailto: 'Ел. пошта',
web: 'URL',
video_html_code: 'Код відео ролика',
file: 'Файл',
upload: 'Завантажити',
download: 'Завантажити',
choose: 'Вибрати',
or_choose: 'Або виберіть',
drop_file_here: 'Перетягніть файл сюди',
align_left: 'По лівому краю',
align_center: 'По центру',
align_right: 'По правому краю',
align_justify: 'Вирівняти текст по ширині',
horizontalrule: 'Горизонтальная лінійка',
fullscreen: 'На весь екран',
deleted: 'Закреслений',
anchor: 'Anchor',
link_new_tab: 'Open link in new tab',
underline: 'Underline',
alignment: 'Alignment',
filename: 'Name (optional)'
};
})( jQuery );
|
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Text;
using System.Windows;
using System.Windows.Data;
namespace Microsoft.Management.UI.Internal
{
<summary>
Takes a value and returns the largest value which is a integral amount of the second value.
</summary>
[SuppressMessage("Microsoft.MSInternal", "CA903:<API key>")]
public class IntegralConverter : <API key>
{
<summary>
Takes a value and returns the largest value which is a integral amount of the second value.
</summary>
<param name="values">
The first value is the source. The second is the factor.
</param>
<param name="targetType">The parameter is not used.</param>
<param name="parameter">The padding to subtract from the first value.</param>
<param name="culture">The parameter is not used.</param>
<returns>
The integral value.
</returns>
public object Convert(object[] values, Type targetType, object parameter, System.Globalization.CultureInfo culture)
{
if (values == null)
{
throw new <API key>("values");
}
if (values.Length != 2)
{
throw new ArgumentException("Two values expected", "values");
}
if (values[0] == DependencyProperty.UnsetValue ||
values[1] == DependencyProperty.UnsetValue)
{
return DependencyProperty.UnsetValue;
}
var source = (double)values[0];
var factor = (double)values[1];
double padding = 0;
if (parameter != null)
{
padding = double.Parse((string)parameter, CultureInfo.InvariantCulture);
}
var newSource = source - padding;
if (newSource < factor)
{
return source;
}
var remainder = newSource % factor;
var result = newSource - remainder;
return result;
}
<summary>
This method is not used.
</summary>
<param name="value">The parameter is not used.</param>
<param name="targetTypes">The parameter is not used.</param>
<param name="parameter">The parameter is not used.</param>
<param name="culture">The parameter is not used.</param>
<returns>The parameter is not used.</returns>
public object[] ConvertBack(object value, Type[] targetTypes, object parameter, System.Globalization.CultureInfo culture)
{
throw new <API key>();
}
}
}
|
$.extend(frappe.model, {
docinfo: {},
sync: function(r) {
/* docs:
extract docs, docinfo (attachments, comments, assignments)
from incoming request and set in `locals` and `frappe.model.docinfo`
*/
var isPlain;
if(!r.docs && !r.docinfo) r = {docs:r};
isPlain = $.isPlainObject(r.docs);
if(isPlain) r.docs = [r.docs];
if(r.docs) {
var last_parent_name = null;
for(var i=0, l=r.docs.length; i<l; i++) {
var d = r.docs[i];
frappe.model.add_to_locals(d);
d.__last_sync_on = new Date();
if(d.doctype==="DocType") {
frappe.meta.sync(d);
}
if(cur_frm && cur_frm.doctype==d.doctype && cur_frm.docname==d.name) {
cur_frm.doc = d;
}
if(d.localname) {
frappe.model.new_names[d.localname] = d.name;
$(document).trigger('rename', [d.doctype, d.localname, d.name]);
delete locals[d.doctype][d.localname];
// update docinfo to new dict keys
if(i===0) {
frappe.model.docinfo[d.doctype][d.name] = frappe.model.docinfo[d.doctype][d.localname];
frappe.model.docinfo[d.doctype][d.localname] = undefined;
}
}
}
if(cur_frm && isPlain) cur_frm.dirty();
}
// set docinfo (comments, assign, attachments)
if(r.docinfo) {
if(r.docs) {
var doc = r.docs[0];
} else {
if(cur_frm)
var doc = cur_frm.doc;
}
if(doc) {
if(!frappe.model.docinfo[doc.doctype])
frappe.model.docinfo[doc.doctype] = {};
frappe.model.docinfo[doc.doctype][doc.name] = r.docinfo;
}
}
return r.docs;
},
add_to_locals: function(doc) {
if(!locals[doc.doctype])
locals[doc.doctype] = {};
if(!doc.name && doc.__islocal) { // get name (local if required)
if(!doc.parentfield) frappe.model.clear_doc(doc);
doc.name = frappe.model.get_new_name(doc.doctype);
if(!doc.parentfield) frappe.provide("frappe.model.docinfo." + doc.doctype + "." + doc.name);
}
locals[doc.doctype][doc.name] = doc;
// add child docs to locals
if(!doc.parentfield) {
for(var i in doc) {
var value = doc[i];
if($.isArray(value)) {
for (var x=0, y=value.length; x < y; x++) {
var d = value[x];
if(!d.parent)
d.parent = doc.name;
frappe.model.add_to_locals(d);
}
}
}
}
}
});
|
#include <iostream>
#include <cstdio>
using namespace std;
int main() {
cout<<"\n";
printf('\n');
// your code goes here
return 0;
}
|
package org.knowm.xchange.bitmarket;
import static org.assertj.core.api.Assertions.assertThat;
import java.math.BigDecimal;
import java.util.List;
import java.util.Map;
import org.knowm.xchange.bitmarket.dto.account.BitMarketBalance;
import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketOrderBook;
import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTicker;
import org.knowm.xchange.bitmarket.dto.marketdata.BitMarketTrade;
import org.knowm.xchange.bitmarket.dto.trade.BitMarketOrder;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.UserTrade;
public class BitMarketAssert {
public static void assertEquals(Balance o1, Balance o2) {
assertThat(o1.getCurrency()).isEqualTo(o2.getCurrency());
assertThat(o1.getTotal()).isEqualTo(o2.getTotal());
assertThat(o1.getAvailable()).isEqualTo(o2.getAvailable());
assertThat(o1.getFrozen()).isEqualTo(o2.getFrozen());
}
public static void assertEquals(Trade o1, Trade o2) {
assertThat(o1.getType()).isEqualTo(o2.getType());
assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount());
assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair());
assertThat(o1.getPrice()).isEqualTo(o2.getPrice());
assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp());
assertThat(o1.getId()).isEqualTo(o2.getId());
}
public static void assertEquals(UserTrade o1, UserTrade o2) {
assertThat(o1.getType()).isEqualTo(o2.getType());
assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount());
assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair());
assertThat(o1.getPrice()).isEqualTo(o2.getPrice());
assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp());
assertThat(o1.getId()).isEqualTo(o2.getId());
assertThat(o1.getOrderId()).isEqualTo(o2.getOrderId());
assertThat(o1.getFeeAmount()).isEqualTo(o2.getFeeAmount());
assertThat(o1.getFeeCurrency()).isEqualTo(o2.getFeeCurrency());
}
public static void assertEquals(LimitOrder o1, LimitOrder o2) {
assertThat(o1.getId()).isEqualTo(o2.getId());
assertThat(o1.getType()).isEqualTo(o2.getType());
assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair());
assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice());
assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount());
assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp());
}
public static void <API key>(LimitOrder o1, LimitOrder o2) {
assertThat(o1.getId()).isEqualTo(o2.getId());
assertThat(o1.getType()).isEqualTo(o2.getType());
assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair());
assertThat(o1.getLimitPrice()).isEqualTo(o2.getLimitPrice());
assertThat(o1.getOriginalAmount()).isEqualTo(o2.getOriginalAmount());
}
public static void assertEquals(Ticker o1, Ticker o2) {
assertThat(o1.getBid()).isEqualTo(o2.getBid());
assertThat(o1.getAsk()).isEqualTo(o2.getAsk());
assertThat(o1.getCurrencyPair()).isEqualTo(o2.getCurrencyPair());
assertThat(o1.getHigh()).isEqualTo(o2.getHigh());
assertThat(o1.getLast()).isEqualTo(o2.getLast());
assertThat(o1.getLow()).isEqualTo(o2.getLow());
assertThat(o1.getTimestamp()).isEqualTo(o2.getTimestamp());
assertThat(o1.getVolume()).isEqualTo(o2.getVolume());
assertThat(o1.getVwap()).isEqualTo(o2.getVwap());
}
public static void assertEquals(OrderBook o1, OrderBook o2) {
assertThat(o1.getTimeStamp()).isEqualTo(o2.getTimeStamp());
assertEquals(o1.getAsks(), o2.getAsks());
assertEquals(o1.getBids(), o2.getBids());
}
public static void assertEquals(List<LimitOrder> o1, List<LimitOrder> o2) {
assertThat(o1.size()).isEqualTo(o2.size());
for (int i = 0; i < o1.size(); i++) {
<API key>(o1.get(i), o2.get(i));
}
}
public static void assertEquals(BitMarketOrder o1, BitMarketOrder o2) {
assertThat(o1.getId()).isEqualTo(o2.getId());
assertThat(o1.getMarket()).isEqualTo(o2.getMarket());
assertThat(o1.getAmount()).isEqualTo(o2.getAmount());
assertThat(o1.getRate()).isEqualTo(o2.getRate());
assertThat(o1.getFiat()).isEqualTo(o2.getFiat());
assertThat(o1.getType()).isEqualTo(o2.getType());
assertThat(o1.getTime()).isEqualTo(o2.getTime());
}
public static void assertEquals(BitMarketOrderBook o1, BitMarketOrderBook o2) {
assertEquals(o1.getAsks(), o2.getAsks());
assertEquals(o1.getBids(), o2.getBids());
assertThat(o1.toString()).isEqualTo(o2.toString());
}
public static void assertEquals(BitMarketTicker o1, BitMarketTicker o2) {
assertThat(o1.getAsk()).isEqualTo(o2.getAsk());
assertThat(o1.getBid()).isEqualTo(o2.getBid());
assertThat(o1.getLast()).isEqualTo(o2.getLast());
assertThat(o1.getLow()).isEqualTo(o2.getLow());
assertThat(o1.getHigh()).isEqualTo(o2.getHigh());
assertThat(o1.getVwap()).isEqualTo(o2.getVwap());
assertThat(o1.getVolume()).isEqualTo(o2.getVolume());
assertThat(o1.toString()).isEqualTo(o2.toString());
}
public static void assertEquals(BitMarketTrade o1, BitMarketTrade o2) {
assertThat(o1.getTid()).isEqualTo(o2.getTid());
assertThat(o1.getPrice()).isEqualTo(o2.getPrice());
assertThat(o1.getAmount()).isEqualTo(o2.getAmount());
assertThat(o1.getDate()).isEqualTo(o2.getDate());
assertThat(o1.toString()).isEqualTo(o2.toString());
}
public static void assertEquals(BitMarketBalance o1, BitMarketBalance o2) {
assertEquals(o1.getAvailable(), o2.getAvailable());
assertEquals(o1.getBlocked(), o2.getBlocked());
}
private static void assertEquals(Map<String, BigDecimal> o1, Map<String, BigDecimal> o2) {
assertThat(o1.size()).isEqualTo(o2.size());
for (String key : o1.keySet()) {
assertThat(o1.get(key)).isEqualTo(o2.get(key));
}
}
private static void assertEquals(BigDecimal[][] o1, BigDecimal[][] o2) {
assertThat(o1.length).isEqualTo(o2.length);
for (int i = 0; i < o1.length; i++) {
assertThat(o1[i].length).isEqualTo(o2[i].length);
for (int j = 0; j < o1[i].length; j++) {
assertThat(o1[i][j]).isEqualTo(o2[i][j]);
}
}
}
}
|
package org.knowm.xchange.test.exx;
import java.io.IOException;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.ExchangeFactory;
import org.knowm.xchange.<API key>;
import org.knowm.xchange.exx.EXXExchange;
import org.knowm.xchange.service.account.AccountService;
/**
* kevinobamatheus@gmail.com
*
* @author kevingates
*/
public class <API key> {
public static void main(String[] args) {
try {
getAssetInfo();
} catch (IOException e) {
e.printStackTrace();
}
}
private static void getAssetInfo() throws IOException {
String apiKey = "";
String secretKey = "";
Exchange exchange = ExchangeFactory.INSTANCE.createExchange(EXXExchange.class.getName());
<API key> <API key> = exchange.<API key>();
<API key>.setSslUri("https://trade.exx.com");
<API key>.setApiKey(apiKey);
<API key>.setSecretKey(secretKey);
exchange.applySpecification(<API key>);
AccountService accountService = exchange.getAccountService();
try {
System.out.println("accountInfo");
System.out.println(accountService.getAccountInfo());
System.out.println(accountService.getAccountInfo().getWallets());
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
from django.shortcuts import render
def home(request):
return render(request, 'home.html', {})
|
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>org.apache.commons.math3.geometry.euclidean.twod.hull (Apache Commons Math 3.5 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.apache.commons.math3.geometry.euclidean.twod.hull (Apache Commons Math 3.5 API)";
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar_top">
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=<API key>"></script></em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/package-summary.html">PREV PACKAGE</a></li>
<li><a href="../../../../../../../../org/apache/commons/math3/geometry/hull/package-summary.html">NEXT PACKAGE</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/commons/math3/geometry/euclidean/twod/hull/package-summary.html" target="_top">FRAMES</a></li>
<li><a href="package-summary.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_top">
</a></div>
<div class="header">
<h1 title="Package" class="title">Package org.apache.commons.math3.geometry.euclidean.twod.hull</h1>
<p class="subTitle">
<div class="block">
This package provides algorithms to generate the convex hull
for a set of points in an two-dimensional euclidean space.</div>
</p>
<p>See: <a href="#package_description">Description</a></p>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Interface Summary table, listing interfaces, and an explanation">
<caption><span>Interface Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Interface</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/hull/<API key>.html" title="interface in org.apache.commons.math3.geometry.euclidean.twod.hull"><API key></a></td>
<td class="colLast">
<div class="block">Interface for convex hull generators in the two-dimensional euclidean space.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/hull/<API key>.html" title="class in org.apache.commons.math3.geometry.euclidean.twod.hull"><API key></a></td>
<td class="colLast">
<div class="block">A simple heuristic to improve the performance of convex hull algorithms.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/hull/ConvexHull2D.html" title="class in org.apache.commons.math3.geometry.euclidean.twod.hull">ConvexHull2D</a></td>
<td class="colLast">
<div class="block">This class represents a convex hull in an two-dimensional euclidean space.</div>
</td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/hull/MonotoneChain.html" title="class in org.apache.commons.math3.geometry.euclidean.twod.hull">MonotoneChain</a></td>
<td class="colLast">
<div class="block">Implements Andrew's monotone chain method to generate the convex hull of a finite set of
points in the two-dimensional euclidean space.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
<a name="package_description">
</a>
<h2 title="Package org.apache.commons.math3.geometry.euclidean.twod.hull Description">Package org.apache.commons.math3.geometry.euclidean.twod.hull Description</h2>
<div class="block"><p>
This package provides algorithms to generate the convex hull
for a set of points in an two-dimensional euclidean space.
</p></div>
</div>
<div class="bottomNav"><a name="navbar_bottom">
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage"><em><script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=<API key>"></script></em></div>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../../../../org/apache/commons/math3/geometry/euclidean/twod/package-summary.html">PREV PACKAGE</a></li>
<li><a href="../../../../../../../../org/apache/commons/math3/geometry/hull/package-summary.html">NEXT PACKAGE</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../../index.html?org/apache/commons/math3/geometry/euclidean/twod/hull/package-summary.html" target="_top">FRAMES</a></li>
<li><a href="package-summary.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_bottom">
</a></div>
<p class="legalCopy"><small>Copyright &
</body>
</html>
|
# Check the various features of the ShTest format.
# RUN: not %{lit} -j 1 -v %{inputs}/shtest-format > %t.out
# RUN: FileCheck < %t.out %s
# END.
# CHECK: -- Testing:
# CHECK: FAIL: shtest-format :: external_shell/fail.txt
# CHECK: Command Output (stderr):
# CHECK: cat: does-not-exist: No such file or directory
# CHECK: --
# CHECK: PASS: shtest-format :: external_shell/pass.txt
# CHECK: FAIL: shtest-format :: fail.txt
# CHECK: UNRESOLVED: shtest-format :: no-test-line.txt
# CHECK: PASS: shtest-format :: pass.txt
# CHECK: UNSUPPORTED: shtest-format :: requires-missing.txt
# CHECK: PASS: shtest-format :: requires-present.txt
# CHECK: UNSUPPORTED: shtest-format :: unsupported_dir/some-test.txt
# CHECK: XFAIL: shtest-format :: xfail-feature.txt
# CHECK: XFAIL: shtest-format :: xfail-target.txt
# CHECK: XFAIL: shtest-format :: xfail.txt
# CHECK: XPASS: shtest-format :: xpass.txt
# CHECK: Testing Time
# CHECK: Unexpected Passing Tests (1)
# CHECK: shtest-format :: xpass.txt
# CHECK: Failing Tests (2)
# CHECK: shtest-format :: external_shell/fail.txt
# CHECK: shtest-format :: fail.txt
# CHECK: Expected Passes : 3
# CHECK: Expected Failures : 3
# CHECK: Unsupported Tests : 2
# CHECK: Unresolved Tests : 1
# CHECK: Unexpected Passes : 1
# CHECK: Unexpected Failures: 2
|
// Time Complexity: O(n^2)
// Space Complexity: O(1)
class Solution {
public:
vector<vector<int> > threeSum(vector<int> &num) {
vector<vector<int> > ans;
const int target = 0;
sort(num.begin(), num.end());
auto last = num.rend();
for(auto a = num.rbegin(); a < prev(last, 2); ++a) {
if(a > num.rbegin() && *a == *(a - 1))
continue;
auto b = next(a);
auto c = prev(last);
while(b < c) {
if(b > next(a) && *b == *(b - 1)) {
++b;
}
else if(c < prev(last) && *c == *(c + 1)) {
--c;
}
else {
const int sum = *a + *b + *c;
if(sum < target)
--c;
else if(sum > target)
++b;
else {
ans.push_back({ *c, *b, *a});
++b;
--c;
}
}
}
}
return ans;
}
};
|
#include "qhull_a.h"
void qh_qhull(void) {
int numoutside;
qh hulltime= qh_CPUclock;
if (qh RERUN || qh JOGGLEmax < REALmax/2)
<API key>();
else {
qh_initbuild();
qh_buildhull();
}
if (!qh STOPpoint && !qh STOPcone) {
if (qh ZEROall_ok && !qh TESTvneighbors && qh MERGEexact)
qh_checkzero( qh_ALL);
if (qh ZEROall_ok && !qh TESTvneighbors && !qh WAScoplanar) {
trace2((qh ferr, 2055, "qh_qhull: all facets are clearly convex and no coplanar points. Post-merging and check of maxout not needed.\n"));
qh DOcheckmax= False;
}else {
if (qh MERGEexact || (qh hull_dim > qh_DIMreduceBuild && qh PREmerge))
qh_postmerge("First post-merge", qh premerge_centrum, qh premerge_cos,
(qh POSTmerge ? False : qh TESTvneighbors));
else if (!qh POSTmerge && qh TESTvneighbors)
qh_postmerge("For testing vertex neighbors", qh premerge_centrum,
qh premerge_cos, True);
if (qh POSTmerge)
qh_postmerge("For post-merging", qh postmerge_centrum,
qh postmerge_cos, qh TESTvneighbors);
if (qh visible_list == qh facet_list) { /* i.e., merging done */
qh findbestnew= True;
qh_partitionvisible(/*qh.visible_list*/ !qh_ALL, &numoutside);
qh findbestnew= False;
qh_deletevisible(/*qh.visible_list*/);
qh_resetlists(False, qh_RESETvisible /*qh.visible_list newvertex_list newfacet_list */);
}
}
if (qh DOcheckmax){
if (qh REPORTfreq) {
qh_buildtracing(NULL, NULL);
qh_fprintf(qh ferr, 8115, "\nTesting all coplanar points.\n");
}
qh_check_maxout();
}
if (qh KEEPnearinside && !qh maxoutdone)
qh_nearcoplanar();
}
if (qh_setsize(qhmem.tempstack) != 0) {
qh_fprintf(qh ferr, 6164, "qhull internal error (qh_qhull): temporary sets not empty(%d)\n",
qh_setsize(qhmem.tempstack));
qh_errexit(qh_ERRqhull, NULL, NULL);
}
qh hulltime= qh_CPUclock - qh hulltime;
qh QHULLfinished= True;
trace1((qh ferr, 1036, "Qhull: algorithm completed\n"));
} /* qhull */
boolT qh_addpoint(pointT *furthest, facetT *facet, boolT checkdist) {
int goodvisible, goodhorizon;
vertexT *vertex;
facetT *newfacet;
realT dist, newbalance, pbalance;
boolT isoutside= False;
int numpart, numpoints, numnew, firstnew;
qh maxoutdone= False;
if (qh_pointid(furthest) == qh_IDunknown)
qh_setappend(&qh other_points, furthest);
if (!facet) {
qh_fprintf(qh ferr, 6213, "qhull internal error (qh_addpoint): NULL facet. Need to call qh_findbestfacet first\n");
qh_errexit(qh_ERRqhull, NULL, NULL);
}
if (checkdist) {
facet= qh_findbest(furthest, facet, !qh_ALL, !qh_ISnewfacets, !qh_NOupper,
&dist, &isoutside, &numpart);
zzadd_(Zpartition, numpart);
if (!isoutside) {
zinc_(Znotmax); /* last point of outsideset is no longer furthest. */
facet->notfurthest= True;
<API key>(furthest, facet, &dist);
return True;
}
}
qh_buildtracing(furthest, facet);
if (qh STOPpoint < 0 && qh furthest_id == -qh STOPpoint-1) {
facet->notfurthest= True;
return False;
}
qh_findhorizon(furthest, facet, &goodvisible, &goodhorizon);
if (qh ONLYgood && !(goodvisible+goodhorizon) && !qh GOODclosest) {
zinc_(Znotgood);
facet->notfurthest= True;
/* last point of outsideset is no longer furthest. This is ok
since all points of the outside are likely to be bad */
qh_resetlists(False, qh_RESETvisible /*qh.visible_list newvertex_list newfacet_list */);
return True;
}
zzinc_(Zprocessed);
firstnew= qh facet_id;
vertex= qh_makenewfacets(furthest /*visible_list, attaches if !ONLYgood */);
qh_makenewplanes(/* newfacet_list */);
numnew= qh facet_id - firstnew;
newbalance= numnew - (realT) (qh num_facets-qh num_visible)
* qh hull_dim/qh num_vertices;
wadd_(Wnewbalance, newbalance);
wadd_(Wnewbalance2, newbalance * newbalance);
if (qh ONLYgood
&& !qh_findgood(qh newfacet_list, goodhorizon) && !qh GOODclosest) {
FORALLnew_facets
qh_delfacet(newfacet);
qh_delvertex(vertex);
qh_resetlists(True, qh_RESETvisible /*qh.visible_list newvertex_list newfacet_list */);
zinc_(Znotgoodnew);
facet->notfurthest= True;
return True;
}
if (qh ONLYgood)
qh_attachnewfacets(/*visible_list*/);
qh_matchnewfacets();
qh_updatevertices();
if (qh STOPcone && qh furthest_id == qh STOPcone-1) {
facet->notfurthest= True;
return False; /* visible_list etc. still defined */
}
qh findbestnew= False;
if (qh PREmerge || qh MERGEexact) {
qh_premerge(vertex, qh premerge_centrum, qh premerge_cos);
if (qh_USEfindbestnew)
qh findbestnew= True;
else {
FORALLnew_facets {
if (!newfacet->simplicial) {
qh findbestnew= True; /* use qh_findbestnew instead of qh_findbest*/
break;
}
}
}
}else if (qh BESToutside)
qh findbestnew= True;
qh_partitionvisible(/*qh.visible_list*/ !qh_ALL, &numpoints);
qh findbestnew= False;
qh findbest_notsharp= False;
zinc_(Zpbalance);
pbalance= numpoints - (realT) qh hull_dim /* assumes all points extreme */
* (qh num_points - qh num_vertices)/qh num_vertices;
wadd_(Wpbalance, pbalance);
wadd_(Wpbalance2, pbalance * pbalance);
qh_deletevisible(/*qh.visible_list*/);
zmax_(Zmaxvertex, qh num_vertices);
qh NEWfacets= False;
if (qh IStracing >= 4) {
if (qh num_facets < 2000)
qh_printlists();
qh_printfacetlist(qh newfacet_list, NULL, True);
qh_checkpolygon(qh facet_list);
}else if (qh CHECKfrequently) {
if (qh num_facets < 50)
qh_checkpolygon(qh facet_list);
else
qh_checkpolygon(qh newfacet_list);
}
if (qh STOPpoint > 0 && qh furthest_id == qh STOPpoint-1)
return False;
qh_resetlists(True, qh_RESETvisible /*qh.visible_list newvertex_list newfacet_list */);
/* qh_triangulate(); to test qh.TRInormals */
trace2((qh ferr, 2056, "qh_addpoint: added p%d new facets %d new balance %2.2g point balance %2.2g\n",
qh_pointid(furthest), numnew, newbalance, pbalance));
return True;
} /* addpoint */
void <API key>(void) {
int restart;
qh ALLOWrestart= True;
while (True) {
restart= setjmp(qh restartexit); /* simple statement for CRAY J916 */
if (restart) { /* only from qh_precision() */
zzinc_(Zretry);
wmax_(Wretrymax, qh JOGGLEmax);
/* QH7078 warns about using 'TCn' with 'QJn' */
qh STOPcone= qh_IDunknown; /* if break from joggle, prevents normal output */
}
if (!qh RERUN && qh JOGGLEmax < REALmax/2) {
if (qh build_cnt > qh_JOGGLEmaxretry) {
qh_fprintf(qh ferr, 6229, "qhull precision error: %d attempts to construct a convex hull\n\
with joggled input. Increase joggle above 'QJ%2.2g'\n\
or modify qh_JOGGLE... parameters in user.h\n",
qh build_cnt, qh JOGGLEmax);
qh_errexit(qh_ERRqhull, NULL, NULL);
}
if (qh build_cnt && !restart)
break;
}else if (qh build_cnt && qh build_cnt >= qh RERUN)
break;
qh STOPcone= 0;
qh_freebuild(True); /* first call is a nop */
qh build_cnt++;
if (!qh qhull_optionsiz)
qh qhull_optionsiz= (int)strlen(qh qhull_options); /* WARN64 */
else {
qh qhull_options [qh qhull_optionsiz]= '\0';
qh qhull_optionlen= qh_OPTIONline; /* starts a new line */
}
qh_option("_run", &qh build_cnt, NULL);
if (qh build_cnt == qh RERUN) {
qh IStracing= qh TRACElastrun; /* duplicated from <API key> */
if (qh TRACEpoint != qh_IDunknown || qh TRACEdist < REALmax/2 || qh TRACEmerge) {
qh TRACElevel= (qh IStracing? qh IStracing : 3);
qh IStracing= 0;
}
qhmem.IStracing= qh IStracing;
}
if (qh JOGGLEmax < REALmax/2)
qh_joggleinput();
qh_initbuild();
qh_buildhull();
if (qh JOGGLEmax < REALmax/2 && !qh MERGING)
qh_checkconvex(qh facet_list, qh_ALGORITHMfault);
}
qh ALLOWrestart= False;
} /* <API key> */
void qh_buildhull(void) {
facetT *facet;
pointT *furthest;
vertexT *vertex;
int id;
trace1((qh ferr, 1037, "qh_buildhull: start build hull\n"));
FORALLfacets {
if (facet->visible || facet->newfacet) {
qh_fprintf(qh ferr, 6165, "qhull internal error (qh_buildhull): visible or new facet f%d in facet list\n",
facet->id);
qh_errexit(qh_ERRqhull, facet, NULL);
}
}
FORALLvertices {
if (vertex->newlist) {
qh_fprintf(qh ferr, 6166, "qhull internal error (qh_buildhull): new vertex f%d in vertex list\n",
vertex->id);
qh_errprint("ERRONEOUS", NULL, NULL, NULL, vertex);
qh_errexit(qh_ERRqhull, NULL, NULL);
}
id= qh_pointid(vertex->point);
if ((qh STOPpoint>0 && id == qh STOPpoint-1) ||
(qh STOPpoint<0 && id == -qh STOPpoint-1) ||
(qh STOPcone>0 && id == qh STOPcone-1)) {
trace1((qh ferr, 1038,"qh_buildhull: stop point or cone P%d in initial hull\n", id));
return;
}
}
qh facet_next= qh facet_list; /* advance facet when processed */
while ((furthest= qh_nextfurthest(&facet))) {
qh num_outside--; /* if ONLYmax, furthest may not be outside */
if (!qh_addpoint(furthest, facet, qh ONLYmax))
break;
}
if (qh NARROWhull) /* move points from outsideset to coplanarset */
qh_outcoplanar( /* facet_list */ );
if (qh num_outside && !furthest) {
qh_fprintf(qh ferr, 6167, "qhull internal error (qh_buildhull): %d outside points were never processed.\n", qh num_outside);
qh_errexit(qh_ERRqhull, NULL, NULL);
}
trace1((qh ferr, 1039, "qh_buildhull: completed the hull construction\n"));
} /* buildhull */
void qh_buildtracing(pointT *furthest, facetT *facet) {
realT dist= 0;
float cpu;
int total, furthestid;
time_t timedata;
struct tm *tp;
vertexT *vertex;
qh old_randomdist= qh RANDOMdist;
qh RANDOMdist= False;
if (!furthest) {
time(&timedata);
tp= localtime(&timedata);
cpu= (float)qh_CPUclock - (float)qh hulltime;
cpu /= (float)qh_SECticks;
total= zzval_(Ztotmerge) - zzval_(Zcyclehorizon) + zzval_(Zcyclefacettot);
qh_fprintf(qh ferr, 8118, "\n\
At %02d:%02d:%02d & %2.5g CPU secs, qhull has created %d facets and merged %d.\n\
The current hull contains %d facets and %d vertices. Last point was p%d\n",
tp->tm_hour, tp->tm_min, tp->tm_sec, cpu, qh facet_id -1,
total, qh num_facets, qh num_vertices, qh furthest_id);
return;
}
furthestid= qh_pointid(furthest);
if (qh TRACEpoint == furthestid) {
qh IStracing= qh TRACElevel;
qhmem.IStracing= qh TRACElevel;
}else if (qh TRACEpoint != qh_IDunknown && qh TRACEdist < REALmax/2) {
qh IStracing= 0;
qhmem.IStracing= 0;
}
if (qh REPORTfreq && (qh facet_id-1 > qh lastreport+qh REPORTfreq)) {
qh lastreport= qh facet_id-1;
time(&timedata);
tp= localtime(&timedata);
cpu= (float)qh_CPUclock - (float)qh hulltime;
cpu /= (float)qh_SECticks;
total= zzval_(Ztotmerge) - zzval_(Zcyclehorizon) + zzval_(Zcyclefacettot);
zinc_(Zdistio);
qh_distplane(furthest, facet, &dist);
qh_fprintf(qh ferr, 8119, "\n\
At %02d:%02d:%02d & %2.5g CPU secs, qhull has created %d facets and merged %d.\n\
The current hull contains %d facets and %d vertices. There are %d\n\
outside points. Next is point p%d(v%d), %2.2g above f%d.\n",
tp->tm_hour, tp->tm_min, tp->tm_sec, cpu, qh facet_id -1,
total, qh num_facets, qh num_vertices, qh num_outside+1,
furthestid, qh vertex_id, dist, getid_(facet));
}else if (qh IStracing >=1) {
cpu= (float)qh_CPUclock - (float)qh hulltime;
cpu /= (float)qh_SECticks;
qh_distplane(furthest, facet, &dist);
qh_fprintf(qh ferr, 8120, "qh_addpoint: add p%d(v%d) to hull of %d facets(%2.2g above f%d) and %d outside at %4.4g CPU secs. Previous was p%d.\n",
furthestid, qh vertex_id, qh num_facets, dist,
getid_(facet), qh num_outside+1, cpu, qh furthest_id);
}
zmax_(Zvisit2max, (int)qh visit_id/2);
if (qh visit_id > (unsigned) INT_MAX) { /* 31 bits */
zinc_(Zvisit);
qh visit_id= 0;
FORALLfacets
facet->visitid= 0;
}
zmax_(Zvvisit2max, (int)qh vertex_visit/2);
if (qh vertex_visit > (unsigned) INT_MAX) { /* 31 bits */
zinc_(Zvvisit);
qh vertex_visit= 0;
FORALLvertices
vertex->visitid= 0;
}
qh furthest_id= furthestid;
qh RANDOMdist= qh old_randomdist;
} /* buildtracing */
void qh_errexit2(int exitcode, facetT *facet, facetT *otherfacet) {
qh_errprint("ERRONEOUS", facet, otherfacet, NULL, NULL);
qh_errexit(exitcode, NULL, NULL);
} /* errexit2 */
void qh_findhorizon(pointT *point, facetT *facet, int *goodvisible, int *goodhorizon) {
facetT *neighbor, **neighborp, *visible;
int numhorizon= 0, coplanar= 0;
realT dist;
trace1((qh ferr, 1040,"qh_findhorizon: find horizon for point p%d facet f%d\n",qh_pointid(point),facet->id));
*goodvisible= *goodhorizon= 0;
zinc_(Ztotvisible);
qh_removefacet(facet); /* visible_list at end of qh facet_list */
qh_appendfacet(facet);
qh num_visible= 1;
if (facet->good)
(*goodvisible)++;
qh visible_list= facet;
facet->visible= True;
facet->f.replace= NULL;
if (qh IStracing >=4)
qh_errprint("visible", facet, NULL, NULL, NULL);
qh visit_id++;
<API key> {
if (visible->tricoplanar && !qh TRInormals) {
qh_fprintf(qh ferr, 6230, "Qhull internal error (qh_findhorizon): does not work for tricoplanar facets. Use option 'Q11'\n");
qh_errexit(qh_ERRqhull, visible, NULL);
}
visible->visitid= qh visit_id;
FOREACHneighbor_(visible) {
if (neighbor->visitid == qh visit_id)
continue;
neighbor->visitid= qh visit_id;
zzinc_(Znumvisibility);
qh_distplane(point, neighbor, &dist);
if (dist > qh MINvisible) {
zinc_(Ztotvisible);
qh_removefacet(neighbor); /* append to end of qh visible_list */
qh_appendfacet(neighbor);
neighbor->visible= True;
neighbor->f.replace= NULL;
qh num_visible++;
if (neighbor->good)
(*goodvisible)++;
if (qh IStracing >=4)
qh_errprint("visible", neighbor, NULL, NULL, NULL);
}else {
if (dist > - qh MAXcoplanar) {
neighbor->coplanar= True;
zzinc_(Zcoplanarhorizon);
qh_precision("coplanar horizon");
coplanar++;
if (qh MERGING) {
if (dist > 0) {
maximize_(qh max_outside, dist);
maximize_(qh max_vertex, dist);
#if qh_MAXoutside
maximize_(neighbor->maxoutside, dist);
#endif
}else
minimize_(qh min_vertex, dist); /* due to merge later */
}
trace2((qh ferr, 2057, "qh_findhorizon: point p%d is coplanar to horizon f%d, dist=%2.7g < qh MINvisible(%2.7g)\n",
qh_pointid(point), neighbor->id, dist, qh MINvisible));
}else
neighbor->coplanar= False;
zinc_(Ztothorizon);
numhorizon++;
if (neighbor->good)
(*goodhorizon)++;
if (qh IStracing >=4)
qh_errprint("horizon", neighbor, NULL, NULL, NULL);
}
}
}
if (!numhorizon) {
qh_precision("empty horizon");
qh_fprintf(qh ferr, 6168, "qhull precision error (qh_findhorizon): empty horizon\n\
QhullPoint p%d was above all facets.\n", qh_pointid(point));
qh_printfacetlist(qh facet_list, NULL, True);
qh_errexit(qh_ERRprec, NULL, NULL);
}
trace1((qh ferr, 1041, "qh_findhorizon: %d horizon facets(good %d), %d visible(good %d), %d coplanar\n",
numhorizon, *goodhorizon, qh num_visible, *goodvisible, coplanar));
if (qh IStracing >= 4 && qh num_facets < 50)
qh_printlists();
} /* findhorizon */
pointT *qh_nextfurthest(facetT **visible) {
facetT *facet;
int size, idx;
realT randr, dist;
pointT *furthest;
while ((facet= qh facet_next) != qh facet_tail) {
if (!facet->outsideset) {
qh facet_next= facet->next;
continue;
}
SETreturnsize_(facet->outsideset, size);
if (!size) {
qh_setfree(&facet->outsideset);
qh facet_next= facet->next;
continue;
}
if (qh NARROWhull) {
if (facet->notfurthest)
qh_furthestout(facet);
furthest= (pointT*)qh_setlast(facet->outsideset);
#if qh_COMPUTEfurthest
qh_distplane(furthest, facet, &dist);
zinc_(Zcomputefurthest);
#else
dist= facet->furthestdist;
#endif
if (dist < qh MINoutside) { /* remainder of outside set is coplanar for qh_outcoplanar */
qh facet_next= facet->next;
continue;
}
}
if (!qh RANDOMoutside && !qh VIRTUALmemory) {
if (qh PICKfurthest) {
qh_furthestnext(/* qh.facet_list */);
facet= qh facet_next;
}
*visible= facet;
return((pointT*)qh_setdellast(facet->outsideset));
}
if (qh RANDOMoutside) {
int outcoplanar = 0;
if (qh NARROWhull) {
FORALLfacets {
if (facet == qh facet_next)
break;
if (facet->outsideset)
outcoplanar += qh_setsize( facet->outsideset);
}
}
randr= qh_RANDOMint;
randr= randr/(qh_RANDOMmax+1);
idx= (int)floor((qh num_outside - outcoplanar) * randr);
FORALLfacet_(qh facet_next) {
if (facet->outsideset) {
SETreturnsize_(facet->outsideset, size);
if (!size)
qh_setfree(&facet->outsideset);
else if (size > idx) {
*visible= facet;
return((pointT*)qh_setdelnth(facet->outsideset, idx));
}else
idx -= size;
}
}
qh_fprintf(qh ferr, 6169, "qhull internal error (qh_nextfurthest): num_outside %d is too low\nby at least %d, or a random real %g >= 1.0\n",
qh num_outside, idx+1, randr);
qh_errexit(qh_ERRqhull, NULL, NULL);
}else { /* VIRTUALmemory */
facet= qh facet_tail->previous;
if (!(furthest= (pointT*)qh_setdellast(facet->outsideset))) {
if (facet->outsideset)
qh_setfree(&facet->outsideset);
qh_removefacet(facet);
qh_prependfacet(facet, &qh facet_list);
continue;
}
*visible= facet;
return furthest;
}
}
return NULL;
} /* nextfurthest */
void qh_partitionall(setT *vertices, pointT *points, int numpoints){
setT *pointset;
vertexT *vertex, **vertexp;
pointT *point, **pointp, *bestpoint;
int size, point_i, point_n, point_end, remaining, i, id;
facetT *facet;
realT bestdist= -REALmax, dist, distoutside;
trace1((qh ferr, 1042, "qh_partitionall: partition all points into outside sets\n"));
pointset= qh_settemp(numpoints);
qh num_outside= 0;
pointp= SETaddr_(pointset, pointT);
for (i=numpoints, point= points; i--; point += qh hull_dim)
*(pointp++)= point;
qh_settruncate(pointset, numpoints);
FOREACHvertex_(vertices) {
if ((id= qh_pointid(vertex->point)) >= 0)
SETelem_(pointset, id)= NULL;
}
id= qh_pointid(qh GOODpointp);
if (id >=0 && qh STOPcone-1 != id && -qh STOPpoint-1 != id)
SETelem_(pointset, id)= NULL;
if (qh GOODvertexp && qh ONLYgood && !qh MERGING) { /* matches qhull()*/
if ((id= qh_pointid(qh GOODvertexp)) >= 0)
SETelem_(pointset, id)= NULL;
}
if (!qh BESToutside) { /* matches conditional for qh_partitionpoint below */
distoutside= qh_DISToutside; /* multiple of qh.MINoutside & qh.max_outside, see user.h */
zval_(Ztotpartition)= qh num_points - qh hull_dim - 1; /*misses GOOD... */
remaining= qh num_facets;
point_end= numpoints;
FORALLfacets {
size= point_end/(remaining--) + 100;
facet->outsideset= qh_setnew(size);
bestpoint= NULL;
point_end= 0;
FOREACHpoint_i_(pointset) {
if (point) {
zzinc_(Zpartitionall);
qh_distplane(point, facet, &dist);
if (dist < distoutside)
SETelem_(pointset, point_end++)= point;
else {
qh num_outside++;
if (!bestpoint) {
bestpoint= point;
bestdist= dist;
}else if (dist > bestdist) {
qh_setappend(&facet->outsideset, bestpoint);
bestpoint= point;
bestdist= dist;
}else
qh_setappend(&facet->outsideset, point);
}
}
}
if (bestpoint) {
qh_setappend(&facet->outsideset, bestpoint);
#if !qh_COMPUTEfurthest
facet->furthestdist= bestdist;
#endif
}else
qh_setfree(&facet->outsideset);
qh_settruncate(pointset, point_end);
}
}
/* if !qh BESToutside, pointset contains points not assigned to outsideset */
if (qh BESToutside || qh MERGING || qh KEEPcoplanar || qh KEEPinside) {
qh findbestnew= True;
FOREACHpoint_i_(pointset) {
if (point)
qh_partitionpoint(point, qh facet_list);
}
qh findbestnew= False;
}
zzadd_(Zpartitionall, zzval_(Zpartition));
zzval_(Zpartition)= 0;
qh_settempfree(&pointset);
if (qh IStracing >= 4)
qh_printfacetlist(qh facet_list, NULL, True);
} /* partitionall */
void <API key>(pointT *point, facetT *facet, realT *dist) {
facetT *bestfacet;
pointT *oldfurthest;
realT bestdist, dist2= 0, angle;
int numpart= 0, oldfindbest;
boolT isoutside;
qh WAScoplanar= True;
if (!dist) {
if (qh findbestnew)
bestfacet= qh_findbestnew(point, facet, &bestdist, qh_ALL, &isoutside, &numpart);
else
bestfacet= qh_findbest(point, facet, qh_ALL, !qh_ISnewfacets, qh DELAUNAY,
&bestdist, &isoutside, &numpart);
zinc_(Ztotpartcoplanar);
zzadd_(Zpartcoplanar, numpart);
if (!qh DELAUNAY && !qh KEEPinside) { /* for 'd', bestdist skips upperDelaunay facets */
if (qh KEEPnearinside) {
if (bestdist < -qh NEARinside) {
zinc_(Zcoplanarinside);
trace4((qh ferr, 4062, "<API key>: point p%d is more than near-inside facet f%d dist %2.2g findbestnew %d\n",
qh_pointid(point), bestfacet->id, bestdist, qh findbestnew));
return;
}
}else if (bestdist < -qh MAXcoplanar) {
trace4((qh ferr, 4063, "<API key>: point p%d is inside facet f%d dist %2.2g findbestnew %d\n",
qh_pointid(point), bestfacet->id, bestdist, qh findbestnew));
zinc_(Zcoplanarinside);
return;
}
}
}else {
bestfacet= facet;
bestdist= *dist;
}
if (bestdist > qh max_outside) {
if (!dist && facet != bestfacet) {
zinc_(Zpartangle);
angle= qh_getangle(facet->normal, bestfacet->normal);
if (angle < 0) {
/* typically due to deleted vertex and coplanar facets, e.g.,
RBOX 1000 s Z1 G1e-13 t1001185205 | QHULL Tv */
zinc_(Zpartflip);
trace2((qh ferr, 2058, "<API key>: repartition point p%d from f%d. It is above flipped facet f%d dist %2.2g\n",
qh_pointid(point), facet->id, bestfacet->id, bestdist));
oldfindbest= qh findbestnew;
qh findbestnew= False;
qh_partitionpoint(point, bestfacet);
qh findbestnew= oldfindbest;
return;
}
}
qh max_outside= bestdist;
if (bestdist > qh TRACEdist) {
qh_fprintf(qh ferr, 8122, "<API key>: ====== p%d from f%d increases max_outside to %2.2g of f%d last p%d\n",
qh_pointid(point), facet->id, bestdist, bestfacet->id, qh furthest_id);
qh_errprint("DISTANT", facet, bestfacet, NULL, NULL);
}
}
if (qh KEEPcoplanar + qh KEEPinside + qh KEEPnearinside) {
oldfurthest= (pointT*)qh_setlast(bestfacet->coplanarset);
if (oldfurthest) {
zinc_(Zcomputefurthest);
qh_distplane(oldfurthest, bestfacet, &dist2);
}
if (!oldfurthest || dist2 < bestdist)
qh_setappend(&bestfacet->coplanarset, point);
else
qh_setappend2ndlast(&bestfacet->coplanarset, point);
}
trace4((qh ferr, 4064, "<API key>: point p%d is coplanar with facet f%d(or inside) dist %2.2g\n",
qh_pointid(point), bestfacet->id, bestdist));
} /* partitioncoplanar */
void qh_partitionpoint(pointT *point, facetT *facet) {
realT bestdist;
boolT isoutside;
facetT *bestfacet;
int numpart;
#if qh_COMPUTEfurthest
realT dist;
#endif
if (qh findbestnew)
bestfacet= qh_findbestnew(point, facet, &bestdist, qh BESToutside, &isoutside, &numpart);
else
bestfacet= qh_findbest(point, facet, qh BESToutside, qh_ISnewfacets, !qh_NOupper,
&bestdist, &isoutside, &numpart);
zinc_(Ztotpartition);
zzadd_(Zpartition, numpart);
if (qh NARROWhull) {
if (qh DELAUNAY && !isoutside && bestdist >= -qh MAXcoplanar)
qh_precision("nearly incident point(narrow hull)");
if (qh KEEPnearinside) {
if (bestdist >= -qh NEARinside)
isoutside= True;
}else if (bestdist >= -qh MAXcoplanar)
isoutside= True;
}
if (isoutside) {
if (!bestfacet->outsideset
|| !qh_setlast(bestfacet->outsideset)) {
qh_setappend(&(bestfacet->outsideset), point);
if (!bestfacet->newfacet) {
qh_removefacet(bestfacet); /* make sure it's after qh facet_next */
qh_appendfacet(bestfacet);
}
#if !qh_COMPUTEfurthest
bestfacet->furthestdist= bestdist;
#endif
}else {
#if qh_COMPUTEfurthest
zinc_(Zcomputefurthest);
qh_distplane(oldfurthest, bestfacet, &dist);
if (dist < bestdist)
qh_setappend(&(bestfacet->outsideset), point);
else
qh_setappend2ndlast(&(bestfacet->outsideset), point);
#else
if (bestfacet->furthestdist < bestdist) {
qh_setappend(&(bestfacet->outsideset), point);
bestfacet->furthestdist= bestdist;
}else
qh_setappend2ndlast(&(bestfacet->outsideset), point);
#endif
}
qh num_outside++;
trace4((qh ferr, 4065, "qh_partitionpoint: point p%d is outside facet f%d new? %d (or narrowhull)\n",
qh_pointid(point), bestfacet->id, bestfacet->newfacet));
}else if (qh DELAUNAY || bestdist >= -qh MAXcoplanar) { /* for 'd', bestdist skips upperDelaunay facets */
zzinc_(Zcoplanarpart);
if (qh DELAUNAY)
qh_precision("nearly incident point");
if ((qh KEEPcoplanar + qh KEEPnearinside) || bestdist > qh max_outside)
<API key>(point, bestfacet, &bestdist);
else {
trace4((qh ferr, 4066, "qh_partitionpoint: point p%d is coplanar to facet f%d (dropped)\n",
qh_pointid(point), bestfacet->id));
}
}else if (qh KEEPnearinside && bestdist > -qh NEARinside) {
zinc_(Zpartnear);
<API key>(point, bestfacet, &bestdist);
}else {
zinc_(Zpartinside);
trace4((qh ferr, 4067, "qh_partitionpoint: point p%d is inside all facets, closest to f%d dist %2.2g\n",
qh_pointid(point), bestfacet->id, bestdist));
if (qh KEEPinside)
<API key>(point, bestfacet, &bestdist);
}
} /* partitionpoint */
void qh_partitionvisible(/*qh.visible_list*/ boolT allpoints, int *numoutside) {
facetT *visible, *newfacet;
pointT *point, **pointp;
int coplanar=0, size;
unsigned count;
vertexT *vertex, **vertexp;
if (qh ONLYmax)
maximize_(qh MINoutside, qh max_vertex);
*numoutside= 0;
<API key> {
if (!visible->outsideset && !visible->coplanarset)
continue;
newfacet= visible->f.replace;
count= 0;
while (newfacet && newfacet->visible) {
newfacet= newfacet->f.replace;
if (count++ > qh facet_id)
qh_infiniteloop(visible);
}
if (!newfacet)
newfacet= qh newfacet_list;
if (newfacet == qh facet_tail) {
qh_fprintf(qh ferr, 6170, "qhull precision error (qh_partitionvisible): all new facets deleted as\n degenerate facets. Can not continue.\n");
qh_errexit(qh_ERRprec, NULL, NULL);
}
if (visible->outsideset) {
size= qh_setsize(visible->outsideset);
*numoutside += size;
qh num_outside -= size;
FOREACHpoint_(visible->outsideset)
qh_partitionpoint(point, newfacet);
}
if (visible->coplanarset && (qh KEEPcoplanar + qh KEEPinside + qh KEEPnearinside)) {
size= qh_setsize(visible->coplanarset);
coplanar += size;
FOREACHpoint_(visible->coplanarset) {
if (allpoints) /* not used */
qh_partitionpoint(point, newfacet);
else
<API key>(point, newfacet, NULL);
}
}
}
FOREACHvertex_(qh del_vertices) {
if (vertex->point) {
if (allpoints) /* not used */
qh_partitionpoint(vertex->point, qh newfacet_list);
else
<API key>(vertex->point, qh newfacet_list, NULL);
}
}
trace1((qh ferr, 1043,"qh_partitionvisible: partitioned %d points from outsidesets and %d points from coplanarsets\n", *numoutside, coplanar));
} /* partitionvisible */
void qh_precision(const char *reason) {
if (qh ALLOWrestart && !qh PREmerge && !qh MERGEexact) {
if (qh JOGGLEmax < REALmax/2) {
trace0((qh ferr, 26, "qh_precision: qhull restart because of %s\n", reason));
/* May be called repeatedly if qh->ALLOWrestart */
longjmp(qh restartexit, qh_ERRprec);
}
}
} /* qh_precision */
void qh_printsummary(FILE *fp) {
realT ratio, outerplane, innerplane;
float cpu;
int size, id, nummerged, numvertices, numcoplanars= 0, nonsimplicial=0;
int goodused;
facetT *facet;
const char *s;
int numdel= zzval_(Zdelvertextot);
int numtricoplanars= 0;
size= qh num_points + qh_setsize(qh other_points);
numvertices= qh num_vertices - qh_setsize(qh del_vertices);
id= qh_pointid(qh GOODpointp);
FORALLfacets {
if (facet->coplanarset)
numcoplanars += qh_setsize( facet->coplanarset);
if (facet->good) {
if (facet->simplicial) {
if (facet->keepcentrum && facet->tricoplanar)
numtricoplanars++;
}else if (qh_setsize(facet->vertices) != qh hull_dim)
nonsimplicial++;
}
}
if (id >=0 && qh STOPcone-1 != id && -qh STOPpoint-1 != id)
size
if (qh STOPcone || qh STOPpoint)
qh_fprintf(fp, 9288, "\nAt a premature exit due to 'TVn', 'TCn', 'TRn', or precision error with 'QJn'.");
if (qh UPPERdelaunay)
goodused= qh GOODvertex + qh GOODpoint + qh SPLITthresholds;
else if (qh DELAUNAY)
goodused= qh GOODvertex + qh GOODpoint + qh GOODthreshold;
else
goodused= qh num_good;
nummerged= zzval_(Ztotmerge) - zzval_(Zcyclehorizon) + zzval_(Zcyclefacettot);
if (qh VORONOI) {
if (qh UPPERdelaunay)
qh_fprintf(fp, 9289, "\n\
Furthest-site Voronoi vertices by the convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
else
qh_fprintf(fp, 9290, "\n\
Voronoi diagram by the convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
qh_fprintf(fp, 9291, " Number of Voronoi regions%s: %d\n",
qh ATinfinity ? " and at-infinity" : "", numvertices);
if (numdel)
qh_fprintf(fp, 9292, " Total number of deleted points due to merging: %d\n", numdel);
if (numcoplanars - numdel > 0)
qh_fprintf(fp, 9293, " Number of nearly incident points: %d\n", numcoplanars - numdel);
else if (size - numvertices - numdel > 0)
qh_fprintf(fp, 9294, " Total number of nearly incident points: %d\n", size - numvertices - numdel);
qh_fprintf(fp, 9295, " Number of%s Voronoi vertices: %d\n",
goodused ? " 'good'" : "", qh num_good);
if (nonsimplicial)
qh_fprintf(fp, 9296, " Number of%s non-simplicial Voronoi vertices: %d\n",
goodused ? " 'good'" : "", nonsimplicial);
}else if (qh DELAUNAY) {
if (qh UPPERdelaunay)
qh_fprintf(fp, 9297, "\n\
Furthest-site Delaunay triangulation by the convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
else
qh_fprintf(fp, 9298, "\n\
Delaunay triangulation by the convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
qh_fprintf(fp, 9299, " Number of input sites%s: %d\n",
qh ATinfinity ? " and at-infinity" : "", numvertices);
if (numdel)
qh_fprintf(fp, 9300, " Total number of deleted points due to merging: %d\n", numdel);
if (numcoplanars - numdel > 0)
qh_fprintf(fp, 9301, " Number of nearly incident points: %d\n", numcoplanars - numdel);
else if (size - numvertices - numdel > 0)
qh_fprintf(fp, 9302, " Total number of nearly incident points: %d\n", size - numvertices - numdel);
qh_fprintf(fp, 9303, " Number of%s Delaunay regions: %d\n",
goodused ? " 'good'" : "", qh num_good);
if (nonsimplicial)
qh_fprintf(fp, 9304, " Number of%s non-simplicial Delaunay regions: %d\n",
goodused ? " 'good'" : "", nonsimplicial);
}else if (qh HALFspace) {
qh_fprintf(fp, 9305, "\n\
Halfspace intersection by the convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
qh_fprintf(fp, 9306, " Number of halfspaces: %d\n", size);
qh_fprintf(fp, 9307, " Number of non-redundant halfspaces: %d\n", numvertices);
if (numcoplanars) {
if (qh KEEPinside && qh KEEPcoplanar)
s= "similar and redundant";
else if (qh KEEPinside)
s= "redundant";
else
s= "similar";
qh_fprintf(fp, 9308, " Number of %s halfspaces: %d\n", s, numcoplanars);
}
qh_fprintf(fp, 9309, " Number of intersection points: %d\n", qh num_facets - qh num_visible);
if (goodused)
qh_fprintf(fp, 9310, " Number of 'good' intersection points: %d\n", qh num_good);
if (nonsimplicial)
qh_fprintf(fp, 9311, " Number of%s non-simplicial intersection points: %d\n",
goodused ? " 'good'" : "", nonsimplicial);
}else {
qh_fprintf(fp, 9312, "\n\
Convex hull of %d points in %d-d:\n\n", size, qh hull_dim);
qh_fprintf(fp, 9313, " Number of vertices: %d\n", numvertices);
if (numcoplanars) {
if (qh KEEPinside && qh KEEPcoplanar)
s= "coplanar and interior";
else if (qh KEEPinside)
s= "interior";
else
s= "coplanar";
qh_fprintf(fp, 9314, " Number of %s points: %d\n", s, numcoplanars);
}
qh_fprintf(fp, 9315, " Number of facets: %d\n", qh num_facets - qh num_visible);
if (goodused)
qh_fprintf(fp, 9316, " Number of 'good' facets: %d\n", qh num_good);
if (nonsimplicial)
qh_fprintf(fp, 9317, " Number of%s non-simplicial facets: %d\n",
goodused ? " 'good'" : "", nonsimplicial);
}
if (numtricoplanars)
qh_fprintf(fp, 9318, " Number of triangulated facets: %d\n", numtricoplanars);
qh_fprintf(fp, 9319, "\nStatistics for: %s | %s",
qh rbox_command, qh qhull_command);
if (qh ROTATErandom != INT_MIN)
qh_fprintf(fp, 9320, " QR%d\n\n", qh ROTATErandom);
else
qh_fprintf(fp, 9321, "\n\n");
qh_fprintf(fp, 9322, " Number of points processed: %d\n", zzval_(Zprocessed));
qh_fprintf(fp, 9323, " Number of hyperplanes created: %d\n", zzval_(Zsetplane));
if (qh DELAUNAY)
qh_fprintf(fp, 9324, " Number of facets in hull: %d\n", qh num_facets - qh num_visible);
qh_fprintf(fp, 9325, " Number of distance tests for qhull: %d\n", zzval_(Zpartition)+
zzval_(Zpartitionall)+zzval_(Znumvisibility)+zzval_(Zpartcoplanar));
#if 0 /* NOTE: must print before printstatistics() */
{realT stddev, ave;
qh_fprintf(fp, 9326, " average new facet balance: %2.2g\n",
wval_(Wnewbalance)/zval_(Zprocessed));
stddev= qh_stddev(zval_(Zprocessed), wval_(Wnewbalance),
wval_(Wnewbalance2), &ave);
qh_fprintf(fp, 9327, " new facet standard deviation: %2.2g\n", stddev);
qh_fprintf(fp, 9328, " average partition balance: %2.2g\n",
wval_(Wpbalance)/zval_(Zpbalance));
stddev= qh_stddev(zval_(Zpbalance), wval_(Wpbalance),
wval_(Wpbalance2), &ave);
qh_fprintf(fp, 9329, " partition standard deviation: %2.2g\n", stddev);
}
#endif
if (nummerged) {
qh_fprintf(fp, 9330," Number of distance tests for merging: %d\n",zzval_(Zbestdist)+
zzval_(Zcentrumtests)+zzval_(Zdistconvex)+zzval_(Zdistcheck)+
zzval_(Zdistzero));
qh_fprintf(fp, 9331," Number of distance tests for checking: %d\n",zzval_(Zcheckpart));
qh_fprintf(fp, 9332," Number of merged facets: %d\n", nummerged);
}
if (!qh RANDOMoutside && qh QHULLfinished) {
cpu= (float)qh hulltime;
cpu /= (float)qh_SECticks;
wval_(Wcpu)= cpu;
qh_fprintf(fp, 9333, " CPU seconds to compute hull (after input): %2.4g\n", cpu);
}
if (qh RERUN) {
if (!qh PREmerge && !qh MERGEexact)
qh_fprintf(fp, 9334, " Percentage of runs with precision errors: %4.1f\n",
zzval_(Zretry)*100.0/qh build_cnt); /* careful of order */
}else if (qh JOGGLEmax < REALmax/2) {
if (zzval_(Zretry))
qh_fprintf(fp, 9335, " After %d retries, input joggled by: %2.2g\n",
zzval_(Zretry), qh JOGGLEmax);
else
qh_fprintf(fp, 9336, " Input joggled by: %2.2g\n", qh JOGGLEmax);
}
if (qh totarea != 0.0)
qh_fprintf(fp, 9337, " %s facet area: %2.8g\n",
zzval_(Ztotmerge) ? "Approximate" : "Total", qh totarea);
if (qh totvol != 0.0)
qh_fprintf(fp, 9338, " %s volume: %2.8g\n",
zzval_(Ztotmerge) ? "Approximate" : "Total", qh totvol);
if (qh MERGING) {
qh_outerinner(NULL, &outerplane, &innerplane);
if (outerplane > 2 * qh DISTround) {
qh_fprintf(fp, 9339, " Maximum distance of %spoint above facet: %2.2g",
(qh QHULLfinished ? "" : "merged "), outerplane);
ratio= outerplane/(qh ONEmerge + qh DISTround);
/* don't report ratio if MINoutside is large */
if (ratio > 0.05 && 2* qh ONEmerge > qh MINoutside && qh JOGGLEmax > REALmax/2)
qh_fprintf(fp, 9340, " (%.1fx)\n", ratio);
else
qh_fprintf(fp, 9341, "\n");
}
if (innerplane < -2 * qh DISTround) {
qh_fprintf(fp, 9342, " Maximum distance of %svertex below facet: %2.2g",
(qh QHULLfinished ? "" : "merged "), innerplane);
ratio= -innerplane/(qh ONEmerge+qh DISTround);
if (ratio > 0.05 && qh JOGGLEmax > REALmax/2)
qh_fprintf(fp, 9343, " (%.1fx)\n", ratio);
else
qh_fprintf(fp, 9344, "\n");
}
}
qh_fprintf(fp, 9345, "\n");
} /* printsummary */
|
package gueei.binding;
import java.util.Collection;
import java.util.ArrayList;
public abstract class DependentObservable<T> extends Observable<T> implements Observer{
protected IObservable<?>[] mDependents;
public DependentObservable(Class<T> type, IObservable<?>... dependents) {
super(type);
for(IObservable<?> o : dependents){
o.subscribe(this);
}
this.mDependents = dependents;
this.onPropertyChanged(null, new ArrayList<Object>());
}
// This is provided in case the constructor can't be used.
// Not intended for normal usage
public void addDependents(IObservable<?>... dependents){
IObservable<?>[] temp = mDependents;
mDependents = new IObservable<?>[temp.length + dependents.length];
int len = temp.length;
for(int i=0; i<len; i++){
mDependents[i] = temp[i];
}
int len2 = dependents.length;
for(int i=0; i<len2; i++){
mDependents[i+len] = dependents[i];
dependents[i].subscribe(this);
}
this.onPropertyChanged(null, new ArrayList<Object>());
}
public abstract T calculateValue(Object... args) throws Exception;
public final void onPropertyChanged(IObservable<?> prop,
Collection<Object> initiators) {
dirty = true;
initiators.add(this);
this.notifyChanged(initiators);
}
private boolean dirty = false;
@Override
public T get() {
if (dirty){
int len = mDependents.length;
Object[] values = new Object[len];
for(int i=0; i<len; i++){
values[i] = mDependents[i].get();
}
try{
T value = this.calculateValue(values);
this.setWithoutNotify(value);
}catch(Exception e){
BindingLog.exception
("DependentObservable.CalculateValue()", e);
}
dirty = false;
}
return super.get();
}
public boolean isDirty() {
return dirty;
}
public void setDirty(boolean dirty) {
this.dirty = dirty;
}
}
|
error() {
echo " ! $*" >&2
exit 1
}
status() {
echo "
}
protip() {
echo
echo "PRO TIP: $*" | indent
echo "See https://devcenter.heroku.com/articles/nodejs-support" | indent
echo
}
# sed -l basically makes sed replace and buffer through stdin to stdout
# so you get updates while the command runs and dont wait for the end
# e.g. npm install | indent
indent() {
c='s/^/ /'
case $(uname) in
Darwin) sed -l "$c";; # mac/bsd sed: -l buffers on line boundaries
*) sed -u "$c";; # unix/gnu sed: -u unbuffered (arbitrary) chunks of data
esac
}
cat_npm_debug_log() {
test -f $build_dir/npm-debug.log && cat $build_dir/npm-debug.log
}
unique_array() {
echo "$*" | tr ' ' '\n' | sort -u | tr '\n' ' '
}
init_log_plex() {
for log_file in $*; do
echo "mkdir -p `dirname ${log_file}`"
done
for log_file in $*; do
echo "touch ${log_file}"
done
}
tail_log_plex() {
for log_file in $*; do
echo "tail -n 0 -qF --pid=\$\$ ${log_file} &"
done
}
# Show script name and line number when errors occur to make buildpack errors easier to debug
trap 'error "Script error in $0 on or near line ${LINENO}"' ERR
|
var expect = require('chai').expect;
var runner = require('../runner');
describe('nasm runner', function() {
describe('.run', function() {
it('should handle basic code evaluation (no libc)', function(done) {
runner.run({
language: 'nasm',
code: [
' global _start',
' section .text',
'_start:',
' mov rax, 1',
' mov rdi, 1',
' mov rsi, message',
' mov rdx, 25',
' syscall',
' mov eax, 60',
' xor rdi, rdi',
' syscall',
'message:',
'db "Hello, Netwide Assembler!", 25'
].join('\n')
}, function(buffer) {
expect(buffer.stdout).to.equal('Hello, Netwide Assembler!');
done();
});
});
it('should handle basic code evaluation (with libc)', function(done) {
runner.run({
language: 'nasm',
code: [
' global main',
' extern puts',
' section .text',
'main:',
' mov rdi, message',
' call puts',
' ret',
'message:',
'db "Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/", 0'
].join('\n')
}, function(buffer) {
expect(buffer.stdout).to.equal('Netwide Assembler together with LIBC! Let\'s Port Codewars From Rails to THIS! \\m/\n');
done();
});
});
});
});
|
#ifndef <API key>
#define <API key>
#include "navrenderer.h"
class QGLShaderProgram;
class NavRendererSagittal : public NavRenderer
{
public:
NavRendererSagittal( QString name );
virtual ~NavRendererSagittal();
void draw();
void leftMouseDown( int x, int y );
void adjustRatios();
private:
void initGeometry();
QMatrix4x4 m_pMatrix;
QMatrix4x4 m_pMatrixWorkaround;
};
#endif /* SCENERENDERER_H_ */
|
/**
* error handling middleware loosely based off of the connect/errorHandler code. This handler chooses
* to render errors using Jade / Express instead of the manual templating used by the connect middleware
* sample. This may or may not be a good idea :-)
* @param options {object} array of options
**/
exports = module.exports = function errorHandler(options) {
options = options || {};
// defaults
var showStack = options.showStack || options.stack
, showMessage = options.showMessage || options.message
, dumpExceptions = options.dumpExceptions || options.dump
, formatUrl = options.formatUrl;
return function errorHandler(err, req, res, next) {
res.statusCode = 500;
if(dumpExceptions) console.error(err.stack);
var app = res.app;
if(err instanceof exports.NotFound) {
res.render('errors/404', { locals: {
title: '404 - Not Found'
}, status: 404
});
} else {
res.render('errors/500', { locals: {
title: 'The Server Encountered an Error'
, error: showStack ? err : undefined
}, status: 500
});
}
};
};
exports.NotFound = function(msg) {
this.name = 'NotFound';
Error.call(this, msg);
Error.captureStackTrace(this, arguments.callee);
}
|
#include <unistd.h>
#include <inttypes.h>
#include <rte_mbuf.h>
#include "rte_rawdev.h"
#include "rte_ioat_rawdev.h"
#include "ioat_private.h"
#define <API key> 64
#define TEST_SKIPPED 77
#define COPY_LEN 1024
int ioat_rawdev_test(uint16_t dev_id); /* pre-define to keep compiler happy */
static struct rte_mempool *pool;
static unsigned short expected_ring_size[<API key>];
#define PRINT_ERR(...) print_err(__func__, __LINE__, __VA_ARGS__)
static inline int
__rte_format_printf(3, 4)
print_err(const char *func, int lineno, const char *format, ...)
{
va_list ap;
int ret;
ret = fprintf(stderr, "In %s:%d - ", func, lineno);
va_start(ap, format);
ret += vfprintf(stderr, format, ap);
va_end(ap);
return ret;
}
static int
do_multi_copies(int dev_id, int split_batches, int split_completions)
{
struct rte_mbuf *srcs[32], *dsts[32];
struct rte_mbuf *completed_src[64];
struct rte_mbuf *completed_dst[64];
unsigned int i, j;
for (i = 0; i < RTE_DIM(srcs); i++) {
char *src_data;
if (split_batches && i == RTE_DIM(srcs) / 2)
<API key>(dev_id);
srcs[i] = rte_pktmbuf_alloc(pool);
dsts[i] = rte_pktmbuf_alloc(pool);
src_data = rte_pktmbuf_mtod(srcs[i], char *);
for (j = 0; j < COPY_LEN; j++)
src_data[j] = rand() & 0xFF;
if (<API key>(dev_id,
srcs[i]->buf_iova + srcs[i]->data_off,
dsts[i]->buf_iova + dsts[i]->data_off,
COPY_LEN,
(uintptr_t)srcs[i],
(uintptr_t)dsts[i]) != 1) {
PRINT_ERR("Error with <API key> for buffer %u\n",
i);
return -1;
}
}
<API key>(dev_id);
usleep(100);
if (split_completions) {
/* gather completions in two halves */
uint16_t half_len = RTE_DIM(srcs) / 2;
if (<API key>(dev_id, half_len, NULL, NULL,
(void *)completed_src,
(void *)completed_dst) != half_len) {
PRINT_ERR("Error with <API key> - first half request\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (<API key>(dev_id, half_len, NULL, NULL,
(void *)&completed_src[half_len],
(void *)&completed_dst[half_len]) != half_len) {
PRINT_ERR("Error with <API key> - second half request\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
} else {
/* gather all completions in one go */
if (<API key>(dev_id, RTE_DIM(completed_src), NULL, NULL,
(void *)completed_src,
(void *)completed_dst) != RTE_DIM(srcs)) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
}
for (i = 0; i < RTE_DIM(srcs); i++) {
char *src_data, *dst_data;
if (completed_src[i] != srcs[i]) {
PRINT_ERR("Error with source pointer %u\n", i);
return -1;
}
if (completed_dst[i] != dsts[i]) {
PRINT_ERR("Error with dest pointer %u\n", i);
return -1;
}
src_data = rte_pktmbuf_mtod(srcs[i], char *);
dst_data = rte_pktmbuf_mtod(dsts[i], char *);
for (j = 0; j < COPY_LEN; j++)
if (src_data[j] != dst_data[j]) {
PRINT_ERR("Error with copy of packet %u, byte %u\n",
i, j);
return -1;
}
rte_pktmbuf_free(srcs[i]);
rte_pktmbuf_free(dsts[i]);
}
return 0;
}
static int
test_enqueue_copies(int dev_id)
{
unsigned int i;
/* test doing a single copy */
do {
struct rte_mbuf *src, *dst;
char *src_data, *dst_data;
struct rte_mbuf *completed[2] = {0};
src = rte_pktmbuf_alloc(pool);
dst = rte_pktmbuf_alloc(pool);
src_data = rte_pktmbuf_mtod(src, char *);
dst_data = rte_pktmbuf_mtod(dst, char *);
for (i = 0; i < COPY_LEN; i++)
src_data[i] = rand() & 0xFF;
if (<API key>(dev_id,
src->buf_iova + src->data_off,
dst->buf_iova + dst->data_off,
COPY_LEN,
(uintptr_t)src,
(uintptr_t)dst) != 1) {
PRINT_ERR("Error with <API key>\n");
return -1;
}
<API key>(dev_id);
usleep(10);
if (<API key>(dev_id, 1, NULL, NULL, (void *)&completed[0],
(void *)&completed[1]) != 1) {
PRINT_ERR("Error with <API key>\n");
return -1;
}
if (completed[0] != src || completed[1] != dst) {
PRINT_ERR("Error with completions: got (%p, %p), not (%p,%p)\n",
completed[0], completed[1], src, dst);
return -1;
}
for (i = 0; i < COPY_LEN; i++)
if (dst_data[i] != src_data[i]) {
PRINT_ERR("Data mismatch at char %u [Got %02x not %02x]\n",
i, dst_data[i], src_data[i]);
return -1;
}
rte_pktmbuf_free(src);
rte_pktmbuf_free(dst);
/* check ring is now empty */
if (<API key>(dev_id, 1, NULL, NULL, (void *)&completed[0],
(void *)&completed[1]) != 0) {
PRINT_ERR("Error: got unexpected returned handles from <API key>\n");
return -1;
}
} while (0);
/* test doing a multiple single copies */
do {
const uint16_t max_ops = 4;
struct rte_mbuf *src, *dst;
char *src_data, *dst_data;
struct rte_mbuf *completed[32] = {0};
const uint16_t max_completions = RTE_DIM(completed) / 2;
src = rte_pktmbuf_alloc(pool);
dst = rte_pktmbuf_alloc(pool);
src_data = rte_pktmbuf_mtod(src, char *);
dst_data = rte_pktmbuf_mtod(dst, char *);
for (i = 0; i < COPY_LEN; i++)
src_data[i] = rand() & 0xFF;
/* perform the same copy <max_ops> times */
for (i = 0; i < max_ops; i++) {
if (<API key>(dev_id,
src->buf_iova + src->data_off,
dst->buf_iova + dst->data_off,
COPY_LEN,
(uintptr_t)src,
(uintptr_t)dst) != 1) {
PRINT_ERR("Error with <API key>\n");
return -1;
}
<API key>(dev_id);
}
usleep(10);
if (<API key>(dev_id, max_completions, NULL, NULL,
(void *)&completed[0],
(void *)&completed[max_completions]) != max_ops) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (completed[0] != src || completed[max_completions] != dst) {
PRINT_ERR("Error with completions: got (%p, %p), not (%p,%p)\n",
completed[0], completed[max_completions], src, dst);
return -1;
}
for (i = 0; i < COPY_LEN; i++)
if (dst_data[i] != src_data[i]) {
PRINT_ERR("Data mismatch at char %u\n", i);
return -1;
}
rte_pktmbuf_free(src);
rte_pktmbuf_free(dst);
} while (0);
/* test doing multiple copies */
do_multi_copies(dev_id, 0, 0); /* enqueue and complete one batch at a time */
do_multi_copies(dev_id, 1, 0); /* enqueue 2 batches and then complete both */
do_multi_copies(dev_id, 0, 1); /* enqueue 1 batch, then complete in two halves */
return 0;
}
static int
test_enqueue_fill(int dev_id)
{
const unsigned int lengths[] = {8, 64, 1024, 50, 100, 89};
struct rte_mbuf *dst = rte_pktmbuf_alloc(pool);
char *dst_data = rte_pktmbuf_mtod(dst, char *);
struct rte_mbuf *completed[2] = {0};
uint64_t pattern = 0xfedcba9876543210;
unsigned int i, j;
for (i = 0; i < RTE_DIM(lengths); i++) {
/* reset dst_data */
memset(dst_data, 0, lengths[i]);
/* perform the fill operation */
if (<API key>(dev_id, pattern,
dst->buf_iova + dst->data_off, lengths[i],
(uintptr_t)dst) != 1) {
PRINT_ERR("Error with <API key>\n");
return -1;
}
<API key>(dev_id);
usleep(100);
if (<API key>(dev_id, 1, NULL, NULL, (void *)&completed[0],
(void *)&completed[1]) != 1) {
PRINT_ERR("Error with completed ops\n");
return -1;
}
/* check the result */
for (j = 0; j < lengths[i]; j++) {
char pat_byte = ((char *)&pattern)[j % 8];
if (dst_data[j] != pat_byte) {
PRINT_ERR("Error with fill operation (lengths = %u): got (%x), not (%x)\n",
lengths[i], dst_data[j], pat_byte);
return -1;
}
}
}
rte_pktmbuf_free(dst);
return 0;
}
static int
test_burst_capacity(int dev_id)
{
#define BURST_SIZE 64
const unsigned int ring_space = <API key>(dev_id);
struct rte_mbuf *src, *dst;
unsigned int length = 1024;
unsigned int i, j, iter;
unsigned int old_cap, cap;
uintptr_t completions[BURST_SIZE];
src = rte_pktmbuf_alloc(pool);
dst = rte_pktmbuf_alloc(pool);
old_cap = ring_space;
/* to test capacity, we enqueue elements and check capacity is reduced
* by one each time - rebaselining the expected value after each burst
* as the capacity is only for a burst. We enqueue multiple bursts to
* fill up half the ring, before emptying it again. We do this twice to
* ensure that we get to test scenarios where we get ring wrap-around
*/
for (iter = 0; iter < 2; iter++) {
for (i = 0; i < ring_space / (2 * BURST_SIZE); i++) {
cap = <API key>(dev_id);
if (cap > old_cap) {
PRINT_ERR("Error, avail ring capacity has gone up, not down\n");
return -1;
}
old_cap = cap;
for (j = 0; j < BURST_SIZE; j++) {
if (<API key>(dev_id, rte_pktmbuf_iova(src),
rte_pktmbuf_iova(dst), length, 0, 0) != 1) {
PRINT_ERR("Error with <API key>\n");
return -1;
}
if (cap - <API key>(dev_id) != j + 1) {
PRINT_ERR("Error, ring capacity did not change as expected\n");
return -1;
}
}
<API key>(dev_id);
}
usleep(100);
for (i = 0; i < ring_space / (2 * BURST_SIZE); i++) {
if (<API key>(dev_id, BURST_SIZE,
NULL, NULL,
completions, completions) != BURST_SIZE) {
PRINT_ERR("Error with completions\n");
return -1;
}
}
if (<API key>(dev_id) != ring_space) {
PRINT_ERR("Error, ring capacity has not reset to original value\n");
return -1;
}
old_cap = ring_space;
}
rte_pktmbuf_free(src);
rte_pktmbuf_free(dst);
return 0;
}
static int
<API key>(int dev_id)
{
#define COMP_BURST_SZ 16
const unsigned int fail_copy[] = {0, 7, 15};
struct rte_mbuf *srcs[COMP_BURST_SZ], *dsts[COMP_BURST_SZ];
struct rte_mbuf *completed_src[COMP_BURST_SZ * 2];
struct rte_mbuf *completed_dst[COMP_BURST_SZ * 2];
unsigned int length = 1024;
unsigned int i;
uint8_t not_ok = 0;
/* Test single full batch statuses */
for (i = 0; i < RTE_DIM(fail_copy); i++) {
uint32_t status[COMP_BURST_SZ] = {0};
unsigned int j;
for (j = 0; j < COMP_BURST_SZ; j++) {
srcs[j] = rte_pktmbuf_alloc(pool);
dsts[j] = rte_pktmbuf_alloc(pool);
if (<API key>(dev_id,
(j == fail_copy[i] ? (phys_addr_t)NULL :
(srcs[j]->buf_iova + srcs[j]->data_off)),
dsts[j]->buf_iova + dsts[j]->data_off,
length,
(uintptr_t)srcs[j],
(uintptr_t)dsts[j]) != 1) {
PRINT_ERR("Error with <API key> for buffer %u\n", j);
return -1;
}
}
<API key>(dev_id);
usleep(100);
if (<API key>(dev_id, COMP_BURST_SZ, status, ¬_ok,
(void *)completed_src, (void *)completed_dst) != COMP_BURST_SZ) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (not_ok != 1 || status[fail_copy[i]] == RTE_IOAT_OP_SUCCESS) {
unsigned int j;
PRINT_ERR("Error, missing expected failed copy, %u\n", fail_copy[i]);
for (j = 0; j < COMP_BURST_SZ; j++)
printf("%u ", status[j]);
printf("<-- Statuses\n");
return -1;
}
for (j = 0; j < COMP_BURST_SZ; j++) {
rte_pktmbuf_free(completed_src[j]);
rte_pktmbuf_free(completed_dst[j]);
}
}
/* Test gathering status for two batches at once */
for (i = 0; i < RTE_DIM(fail_copy); i++) {
uint32_t status[COMP_BURST_SZ] = {0};
unsigned int batch, j;
unsigned int expected_failures = 0;
for (batch = 0; batch < 2; batch++) {
for (j = 0; j < COMP_BURST_SZ/2; j++) {
srcs[j] = rte_pktmbuf_alloc(pool);
dsts[j] = rte_pktmbuf_alloc(pool);
if (j == fail_copy[i])
expected_failures++;
if (<API key>(dev_id,
(j == fail_copy[i] ? (phys_addr_t)NULL :
(srcs[j]->buf_iova + srcs[j]->data_off)),
dsts[j]->buf_iova + dsts[j]->data_off,
length,
(uintptr_t)srcs[j],
(uintptr_t)dsts[j]) != 1) {
PRINT_ERR("Error with <API key> for buffer %u\n",
j);
return -1;
}
}
<API key>(dev_id);
}
usleep(100);
if (<API key>(dev_id, COMP_BURST_SZ, status, ¬_ok,
(void *)completed_src, (void *)completed_dst) != COMP_BURST_SZ) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (not_ok != expected_failures) {
unsigned int j;
PRINT_ERR("Error, missing expected failed copy, got %u, not %u\n",
not_ok, expected_failures);
for (j = 0; j < COMP_BURST_SZ; j++)
printf("%u ", status[j]);
printf("<-- Statuses\n");
return -1;
}
for (j = 0; j < COMP_BURST_SZ; j++) {
rte_pktmbuf_free(completed_src[j]);
rte_pktmbuf_free(completed_dst[j]);
}
}
/* Test gathering status for half batch at a time */
for (i = 0; i < RTE_DIM(fail_copy); i++) {
uint32_t status[COMP_BURST_SZ] = {0};
unsigned int j;
for (j = 0; j < COMP_BURST_SZ; j++) {
srcs[j] = rte_pktmbuf_alloc(pool);
dsts[j] = rte_pktmbuf_alloc(pool);
if (<API key>(dev_id,
(j == fail_copy[i] ? (phys_addr_t)NULL :
(srcs[j]->buf_iova + srcs[j]->data_off)),
dsts[j]->buf_iova + dsts[j]->data_off,
length,
(uintptr_t)srcs[j],
(uintptr_t)dsts[j]) != 1) {
PRINT_ERR("Error with <API key> for buffer %u\n", j);
return -1;
}
}
<API key>(dev_id);
usleep(100);
if (<API key>(dev_id, COMP_BURST_SZ / 2, status, ¬_ok,
(void *)completed_src,
(void *)completed_dst) != (COMP_BURST_SZ / 2)) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (fail_copy[i] < COMP_BURST_SZ / 2 &&
(not_ok != 1 || status[fail_copy[i]] == RTE_IOAT_OP_SUCCESS)) {
PRINT_ERR("Missing expected failure in first half-batch\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (<API key>(dev_id, COMP_BURST_SZ / 2, status, ¬_ok,
(void *)&completed_src[COMP_BURST_SZ / 2],
(void *)&completed_dst[COMP_BURST_SZ / 2]) != (COMP_BURST_SZ / 2)) {
PRINT_ERR("Error with <API key>\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
if (fail_copy[i] >= COMP_BURST_SZ / 2 && (not_ok != 1 ||
status[fail_copy[i] - (COMP_BURST_SZ / 2)]
== RTE_IOAT_OP_SUCCESS)) {
PRINT_ERR("Missing expected failure in second half-batch\n");
rte_rawdev_dump(dev_id, stdout);
return -1;
}
for (j = 0; j < COMP_BURST_SZ; j++) {
rte_pktmbuf_free(completed_src[j]);
rte_pktmbuf_free(completed_dst[j]);
}
}
/* Test gathering statuses with fence */
for (i = 1; i < RTE_DIM(fail_copy); i++) {
uint32_t status[COMP_BURST_SZ * 2] = {0};
unsigned int j;
uint16_t count;
for (j = 0; j < COMP_BURST_SZ; j++) {
srcs[j] = rte_pktmbuf_alloc(pool);
dsts[j] = rte_pktmbuf_alloc(pool);
/* always fail the first copy */
if (<API key>(dev_id,
(j == 0 ? (phys_addr_t)NULL :
(srcs[j]->buf_iova + srcs[j]->data_off)),
dsts[j]->buf_iova + dsts[j]->data_off,
length,
(uintptr_t)srcs[j],
(uintptr_t)dsts[j]) != 1) {
PRINT_ERR("Error with <API key> for buffer %u\n", j);
return -1;
}
/* put in a fence which will stop any further transactions
* because we had a previous failure.
*/
if (j == fail_copy[i])
rte_ioat_fence(dev_id);
}
<API key>(dev_id);
usleep(100);
count = <API key>(dev_id, COMP_BURST_SZ * 2, status, ¬_ok,
(void *)completed_src, (void *)completed_dst);
if (count != COMP_BURST_SZ) {
PRINT_ERR("Error with <API key>, got %u not %u\n",
count, COMP_BURST_SZ);
for (j = 0; j < count; j++)
printf("%u ", status[j]);
printf("<-- Statuses\n");
return -1;
}
if (not_ok != COMP_BURST_SZ - fail_copy[i]) {
PRINT_ERR("Unexpected failed copy count, got %u, expected %u\n",
not_ok, COMP_BURST_SZ - fail_copy[i]);
for (j = 0; j < COMP_BURST_SZ; j++)
printf("%u ", status[j]);
printf("<-- Statuses\n");
return -1;
}
if (status[0] == RTE_IOAT_OP_SUCCESS || status[0] == RTE_IOAT_OP_SKIPPED) {
PRINT_ERR("Error, op 0 unexpectedly did not fail.\n");
return -1;
}
for (j = 1; j <= fail_copy[i]; j++) {
if (status[j] != RTE_IOAT_OP_SUCCESS) {
PRINT_ERR("Error, op %u unexpectedly failed\n", j);
return -1;
}
}
for (j = fail_copy[i] + 1; j < COMP_BURST_SZ; j++) {
if (status[j] != RTE_IOAT_OP_SKIPPED) {
PRINT_ERR("Error, all descriptors after fence should be invalid\n");
return -1;
}
}
for (j = 0; j < COMP_BURST_SZ; j++) {
rte_pktmbuf_free(completed_src[j]);
rte_pktmbuf_free(completed_dst[j]);
}
}
return 0;
}
int
ioat_rawdev_test(uint16_t dev_id)
{
#define IOAT_TEST_RINGSIZE 512
const struct rte_idxd_rawdev *idxd =
(struct rte_idxd_rawdev *)rte_rawdevs[dev_id].dev_private;
const enum rte_ioat_dev_type ioat_type = idxd->type;
struct <API key> p = { .ring_size = -1 };
struct rte_rawdev_info info = { .dev_private = &p };
struct <API key> *snames = NULL;
uint64_t *stats = NULL;
unsigned int *ids = NULL;
unsigned int nb_xstats;
unsigned int i;
if (dev_id >= <API key>) {
printf("Skipping test. Cannot test rawdevs with id's greater than %d\n",
<API key>);
return TEST_SKIPPED;
}
rte_rawdev_info_get(dev_id, &info, sizeof(p));
if (p.ring_size != expected_ring_size[dev_id]) {
PRINT_ERR("Error, initial ring size is not as expected (Actual: %d, Expected: %d)\n",
(int)p.ring_size, expected_ring_size[dev_id]);
return -1;
}
p.ring_size = IOAT_TEST_RINGSIZE;
if (<API key>(dev_id, &info, sizeof(p)) != 0) {
PRINT_ERR("Error with <API key>()\n");
return -1;
}
rte_rawdev_info_get(dev_id, &info, sizeof(p));
if (p.ring_size != IOAT_TEST_RINGSIZE) {
PRINT_ERR("Error, ring size is not %d (%d)\n",
IOAT_TEST_RINGSIZE, (int)p.ring_size);
return -1;
}
expected_ring_size[dev_id] = p.ring_size;
if (rte_rawdev_start(dev_id) != 0) {
PRINT_ERR("Error with rte_rawdev_start()\n");
return -1;
}
pool = <API key>("TEST_IOAT_POOL",
p.ring_size * 2, /* n == num elements */
32, /* cache size */
0, /* priv size */
2048, /* data room size */
info.socket_id);
if (pool == NULL) {
PRINT_ERR("Error with mempool creation\n");
return -1;
}
/* allocate memory for xstats names and values */
nb_xstats = <API key>(dev_id, NULL, 0);
snames = malloc(sizeof(*snames) * nb_xstats);
if (snames == NULL) {
PRINT_ERR("Error allocating xstat names memory\n");
goto err;
}
<API key>(dev_id, snames, nb_xstats);
ids = malloc(sizeof(*ids) * nb_xstats);
if (ids == NULL) {
PRINT_ERR("Error allocating xstat ids memory\n");
goto err;
}
for (i = 0; i < nb_xstats; i++)
ids[i] = i;
stats = malloc(sizeof(*stats) * nb_xstats);
if (stats == NULL) {
PRINT_ERR("Error allocating xstat memory\n");
goto err;
}
/* run the test cases */
printf("Running Copy Tests\n");
for (i = 0; i < 100; i++) {
unsigned int j;
if (test_enqueue_copies(dev_id) != 0)
goto err;
<API key>(dev_id, ids, stats, nb_xstats);
for (j = 0; j < nb_xstats; j++)
printf("%s: %"PRIu64" ", snames[j].name, stats[j]);
printf("\r");
}
printf("\n");
/* test enqueue fill operation */
printf("Running Fill Tests\n");
for (i = 0; i < 100; i++) {
unsigned int j;
if (test_enqueue_fill(dev_id) != 0)
goto err;
<API key>(dev_id, ids, stats, nb_xstats);
for (j = 0; j < nb_xstats; j++)
printf("%s: %"PRIu64" ", snames[j].name, stats[j]);
printf("\r");
}
printf("\n");
printf("Running Burst Capacity Test\n");
if (test_burst_capacity(dev_id) != 0)
goto err;
/* only DSA devices report address errors, and we can only use null pointers
* to generate those errors when DPDK is in VA mode.
*/
if (rte_eal_iova_mode() == RTE_IOVA_VA && ioat_type == RTE_IDXD_DEV) {
printf("Running Completions Status Test\n");
if (<API key>(dev_id) != 0)
goto err;
}
rte_rawdev_stop(dev_id);
if (<API key>(dev_id, NULL, 0) != 0) {
PRINT_ERR("Error resetting xstat values\n");
goto err;
}
rte_mempool_free(pool);
free(snames);
free(stats);
free(ids);
return 0;
err:
rte_rawdev_stop(dev_id);
<API key>(dev_id, NULL, 0);
rte_mempool_free(pool);
free(snames);
free(stats);
free(ids);
return -1;
}
|
using Scharfrichter.Codec;
using Scharfrichter.Codec.Archives;
using Scharfrichter.Codec.Charts;
using Scharfrichter.Codec.Sounds;
using Scharfrichter.Common;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace ConvertHelper
{
static public class BemaniToBMS
{
private const string configFileName = "Convert";
private const string databaseFileName = "BeatmaniaDB";
static public void Convert(string[] inArgs, long unitNumerator, long unitDenominator)
{
// configuration
Configuration config = LoadConfig();
Configuration db = LoadDB();
int quantizeMeasure = config["BMS"].GetValue("QuantizeMeasure");
int quantizeNotes = config["BMS"].GetValue("QuantizeNotes");
// splash
Splash.Show("Bemani to BeMusic Script");
Console.WriteLine("Timing: " + unitNumerator.ToString() + "/" + unitDenominator.ToString());
Console.WriteLine("Measure Quantize: " + quantizeMeasure.ToString());
// args
string[] args;
if (inArgs.Length > 0)
args = Subfolder.Parse(inArgs);
else
args = inArgs;
// debug args (if applicable)
if (System.Diagnostics.Debugger.IsAttached && args.Length == 0)
{
Console.WriteLine();
Console.WriteLine("Debugger attached. Input file name:");
args = new string[] { Console.ReadLine() };
}
// show usage if no args provided
if (args.Length == 0)
{
Console.WriteLine();
Console.WriteLine("Usage: BemaniToBMS <input file>");
Console.WriteLine();
Console.WriteLine("Drag and drop with files and folders is fully supported for this application.");
Console.WriteLine();
Console.WriteLine("Supported formats:");
Console.WriteLine("1, 2DX, CS, SD9, SSP");
}
// process files
for (int i = 0; i < args.Length; i++)
{
if (File.Exists(args[i]))
{
Console.WriteLine();
Console.WriteLine("Processing File: " + args[i]);
string filename = args[i];
string IIDXDBName = Path.<API key>(filename);
while (IIDXDBName.StartsWith("0"))
IIDXDBName = IIDXDBName.Substring(1);
byte[] data = File.ReadAllBytes(args[i]);
switch (Path.GetExtension(args[i]).ToUpper())
{
case @".1":
using (MemoryStream source = new MemoryStream(data))
{
Bemani1 archive = Bemani1.Read(source, unitNumerator, unitDenominator);
if (db[IIDXDBName]["TITLE"] != "")
{
for (int j = 0; j < archive.ChartCount; j++)
{
Chart chart = archive.Charts[j];
if (chart != null)
{
chart.Tags["TITLE"] = db[IIDXDBName]["TITLE"];
chart.Tags["ARTIST"] = db[IIDXDBName]["ARTIST"];
chart.Tags["GENRE"] = db[IIDXDBName]["GENRE"];
if (j < 6)
chart.Tags["PLAYLEVEL"] = db[IIDXDBName]["DIFFICULTYSP" + config["IIDX"]["DIFFICULTY" + j.ToString()]];
else if (j < 12)
chart.Tags["PLAYLEVEL"] = db[IIDXDBName]["DIFFICULTYDP" + config["IIDX"]["DIFFICULTY" + j.ToString()]];
}
}
}
ConvertArchive(archive, config, args[i]);
}
break;
case @".2DX":
using (MemoryStream source = new MemoryStream(data))
{
Console.WriteLine("Converting Samples");
Bemani2DX archive = Bemani2DX.Read(source);
ConvertSounds(archive.Sounds, filename, 0.6f);
}
break;
case @".CS":
using (MemoryStream source = new MemoryStream(data))
ConvertChart(BeatmaniaIIDXCSNew.Read(source), config, filename, -1, null);
break;
case @".CS2":
using (MemoryStream source = new MemoryStream(data))
ConvertChart(BeatmaniaIIDXCSOld.Read(source), config, filename, -1, null);
break;
case @".CS5":
using (MemoryStream source = new MemoryStream(data))
ConvertChart(Beatmania5Key.Read(source), config, filename, -1, null);
break;
case @".CS9":
break;
case @".SD9":
using (MemoryStream source = new MemoryStream(data))
{
Sound sound = BemaniSD9.Read(source);
string targetFile = Path.<API key>(filename);
string targetPath = Path.Combine(Path.GetDirectoryName(filename), targetFile) + ".wav";
sound.WriteFile(targetPath, 1.0f);
}
break;
case @".SSP":
using (MemoryStream source = new MemoryStream(data))
ConvertSounds(BemaniSSP.Read(source).Sounds, filename, 1.0f);
break;
}
}
}
// wrap up
Console.WriteLine("BemaniToBMS finished.");
}
static public void ConvertArchive(Archive archive, Configuration config, string filename)
{
for (int j = 0; j < archive.ChartCount; j++)
{
if (archive.Charts[j] != null)
{
Console.WriteLine("Converting Chart " + j.ToString());
ConvertChart(archive.Charts[j], config, filename, j, null);
}
}
}
static public void ConvertChart(Chart chart, Configuration config, string filename, int index, int[] map)
{
if (config == null)
{
config = LoadConfig();
}
int quantizeNotes = config["BMS"].GetValue("QuantizeNotes");
int quantizeMeasure = config["BMS"].GetValue("QuantizeMeasure");
int difficulty = config["IIDX"].GetValue("Difficulty" + index.ToString());
string title = config["BMS"]["Players" + config["IIDX"]["Players" + index.ToString()]] + " " + config["BMS"]["Difficulty" + difficulty.ToString()];
title = title.Trim();
if (quantizeMeasure > 0)
chart.<API key>(quantizeMeasure);
using (MemoryStream mem = new MemoryStream())
{
BMS bms = new BMS();
bms.Charts = new Chart[] { chart };
string name = "";
if (chart.Tags.ContainsKey("TITLE"))
name = chart.Tags["TITLE"];
if (name == "")
name = Path.<API key>(Path.GetFileName(filename)); //ex: "1204 [1P Another]"
// write some tags
bms.Charts[0].Tags["TITLE"] = name;
if (chart.Tags.ContainsKey("ARTIST"))
bms.Charts[0].Tags["ARTIST"] = chart.Tags["ARTIST"];
if (chart.Tags.ContainsKey("GENRE"))
bms.Charts[0].Tags["GENRE"] = chart.Tags["GENRE"];
if (difficulty > 0)
bms.Charts[0].Tags["DIFFICULTY"] = difficulty.ToString();
if (bms.Charts[0].Players > 1)
bms.Charts[0].Tags["PLAYER"] = "3";
else
bms.Charts[0].Tags["PLAYER"] = "1";
name = name.Replace(":", "_");
name = name.Replace("/", "_");
name = name.Replace("?", "_");
name = name.Replace("\\", "_");
if (title != null && title.Length > 0)
{
name += " [" + title + "]";
}
string output = Path.Combine(Path.GetDirectoryName(filename), @"@" + name + ".bms");
if (map == null)
bms.GenerateSampleMap();
else
bms.SampleMap = map;
if (quantizeNotes > 0)
bms.Charts[0].QuantizeNoteOffsets(quantizeNotes);
bms.GenerateSampleTags();
bms.Write(mem, true);
File.WriteAllBytes(output, mem.ToArray());
}
}
static public void ConvertSounds(Sound[] sounds, string filename, float volume)
{
string name = Path.<API key>(Path.GetFileName(filename));
string targetPath = Path.Combine(Path.GetDirectoryName(filename), name);
if (!Directory.Exists(targetPath))
Directory.CreateDirectory(targetPath);
int count = sounds.Length;
for (int j = 0; j < count; j++)
{
int sampleIndex = j + 1;
sounds[j].WriteFile(Path.Combine(targetPath, Scharfrichter.Codec.Util.ConvertToBMEString(sampleIndex, 4) + @".wav"), volume);
}
}
static private Configuration LoadConfig()
{
Configuration config = Configuration.ReadFile(configFileName);
config["BMS"].SetDefaultValue("QuantizeMeasure", 16);
config["BMS"].SetDefaultValue("QuantizeNotes", 192);
config["BMS"].SetDefaultString("Difficulty1", "Beginner");
config["BMS"].SetDefaultString("Difficulty2", "Normal");
config["BMS"].SetDefaultString("Difficulty3", "Hyper");
config["BMS"].SetDefaultString("Difficulty4", "Another");
config["BMS"].SetDefaultString("Players1", "1P");
config["BMS"].SetDefaultString("Players2", "2P");
config["BMS"].SetDefaultString("Players3", "DP");
config["IIDX"].SetDefaultString("Difficulty0", "3");
config["IIDX"].SetDefaultString("Difficulty1", "2");
config["IIDX"].SetDefaultString("Difficulty2", "4");
config["IIDX"].SetDefaultString("Difficulty3", "1");
config["IIDX"].SetDefaultString("Difficulty6", "3");
config["IIDX"].SetDefaultString("Difficulty7", "2");
config["IIDX"].SetDefaultString("Difficulty8", "4");
config["IIDX"].SetDefaultString("Difficulty9", "1");
config["IIDX"].SetDefaultString("Players0", "1");
config["IIDX"].SetDefaultString("Players1", "1");
config["IIDX"].SetDefaultString("Players2", "1");
config["IIDX"].SetDefaultString("Players3", "1");
config["IIDX"].SetDefaultString("Players6", "3");
config["IIDX"].SetDefaultString("Players7", "3");
config["IIDX"].SetDefaultString("Players8", "3");
config["IIDX"].SetDefaultString("Players9", "3");
return config;
}
static private Configuration LoadDB()
{
Configuration config = Configuration.ReadFile(databaseFileName);
return config;
}
}
}
|
package com.iluwatar;
public class Sergeant extends Unit {
public Sergeant(Unit ... children) {
super(children);
}
@Override
public void accept(UnitVisitor visitor) {
visitor.visitSergeant(this);
super.accept(visitor);
}
@Override
public String toString() {
return "sergeant";
}
}
|
#ifndef vm_h
#define vm_h
#include "uv.h"
#include "wren.h"
// Executes the Wren script at [path] in a new VM.
// Exits if the script failed or could not be loaded.
void runFile(const char* path);
// Runs the Wren interactive REPL.
int runRepl();
// Gets the currently running VM.
WrenVM* getVM();
// Gets the event loop the VM is using.
uv_loop_t* getLoop();
// Set the exit code the CLI should exit with when done.
void setExitCode(int exitCode);
// Adds additional callbacks to use when binding foreign members from Wren.
// Used by the API test executable to let it wire up its own foreign functions.
// This must be called before calling [createVM()].
void setTestCallbacks(<API key> bindMethod,
<API key> bindClass,
void (*afterLoad)(WrenVM* vm));
#endif
|
<p>Magra is a sans serif typeface designed for contexts in which both spatial economy and multiple composition styles are required. Its neutral personality and humanist features makes it a perfect candidate for corporate uses too. Its large x-height and robust stems provide good legibility and economy, plus great behavior in smaller sizes. Magra was selected to be part of the German editorial project Typodarium 2012.</p>
|
<?php
declare(strict_types=1);
namespace spec\Sylius\Bundle\CoreBundle\EventListener;
use Doctrine\ORM\Event\LifecycleEventArgs;
use PhpSpec\ObjectBehavior;
use Prophecy\Argument;
use Sylius\Component\Core\Model\CustomerInterface;
use Sylius\Component\Core\Model\ShopUserInterface;
use Sylius\Component\User\Canonicalizer\<API key>;
final class <API key> extends ObjectBehavior
{
function let(<API key> $canonicalizer): void
{
$this->beConstructedWith($canonicalizer);
}
function <API key>($canonicalizer, LifecycleEventArgs $event, ShopUserInterface $user): void
{
$event->getEntity()->willReturn($user);
$user->getUsername()->willReturn('testUser');
$user->getEmail()->willReturn('test@email.com');
$user-><API key>('testuser')->shouldBeCalled();
$user->setEmailCanonical('test@email.com')->shouldBeCalled();
$canonicalizer->canonicalize('testUser')->willReturn('testuser')->shouldBeCalled();
$canonicalizer->canonicalize('test@email.com')->willReturn('test@email.com')->shouldBeCalled();
$this->prePersist($event);
}
function <API key>($canonicalizer, LifecycleEventArgs $event, CustomerInterface $customer): void
{
$event->getEntity()->willReturn($customer);
$customer->getEmail()->willReturn('testUser@Email.com');
$customer->setEmailCanonical('testuser@email.com')->shouldBeCalled();
$canonicalizer->canonicalize('testUser@Email.com')->willReturn('testuser@email.com')->shouldBeCalled();
$this->prePersist($event);
}
function <API key>($canonicalizer, LifecycleEventArgs $event, ShopUserInterface $user): void
{
$event->getEntity()->willReturn($user);
$user->getUsername()->willReturn('testUser');
$user->getEmail()->willReturn('test@email.com');
$user-><API key>('testuser')->shouldBeCalled();
$user->setEmailCanonical('test@email.com')->shouldBeCalled();
$canonicalizer->canonicalize('testUser')->willReturn('testuser')->shouldBeCalled();
$canonicalizer->canonicalize('test@email.com')->willReturn('test@email.com')->shouldBeCalled();
$this->preUpdate($event);
}
function <API key>($canonicalizer, LifecycleEventArgs $event, CustomerInterface $customer): void
{
$event->getEntity()->willReturn($customer);
$customer->getEmail()->willReturn('testUser@Email.com');
$customer->setEmailCanonical('testuser@email.com')->shouldBeCalled();
$canonicalizer->canonicalize('testUser@Email.com')->willReturn('testuser@email.com')->shouldBeCalled();
$this->preUpdate($event);
}
function <API key>($canonicalizer, LifecycleEventArgs $event): void
{
$item = new \stdClass();
$event->getEntity()->willReturn($item);
$canonicalizer->canonicalize(Argument::any())->shouldNotBeCalled();
$this->prePersist($event);
}
function <API key>($canonicalizer, LifecycleEventArgs $event): void
{
$item = new \stdClass();
$event->getEntity()->willReturn($item);
$canonicalizer->canonicalize(Argument::any())->shouldNotBeCalled();
$this->preUpdate($event);
}
}
|
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<TITLE>
Uses of Class org.apache.poi.hmef.Attachment (POI API Documentation)
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.poi.hmef.Attachment (POI API Documentation)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/poi/hmef/\class-useAttachment.html" target="_top"><B>FRAMES</B></A>
<A HREF="Attachment.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.poi.hmef.Attachment</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef">Attachment</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.poi.hmef"><B>org.apache.poi.hmef</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.poi.hmef"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef">Attachment</A> in <A HREF="../../../../../org/apache/poi/hmef/package-summary.html">org.apache.poi.hmef</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="<API key>">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../org/apache/poi/hmef/package-summary.html">org.apache.poi.hmef</A> that return types with arguments of type <A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef">Attachment</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> java.util.List<<A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef">Attachment</A>></CODE></FONT></TD>
<TD><CODE><B>HMEFMessage.</B><B><A HREF="../../../../../org/apache/poi/hmef/HMEFMessage.html#getAttachments()">getAttachments</A></B>()</CODE>
<BR>
Returns all the Attachments of the message.</TD>
</TR>
</TABLE>
<P>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/poi/hmef/Attachment.html" title="class in org.apache.poi.hmef"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/poi/hmef/\class-useAttachment.html" target="_top"><B>FRAMES</B></A>
<A HREF="Attachment.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
<i>Copyright 2016 The Apache Software Foundation or
its licensors, as applicable.</i>
</BODY>
</HTML>
|
<h1>Pin audio file from SoundCloud</h1>
<form id="<API key>" name="webaudio_form" method="post" action="/webaudio_action" >
<p> </p>
Pin From :
<input type="radio" name="sub" value="local"/> Local
<input type="radio" name="sub" value="web" checked/> SoundCloud
<p> </p>
<select name="board_id" id="board_id" required class="inputText">
<option value="" selected="selected">Select Board</option>
{{#each boards}}
<option value="{{this._id}}">{{this.board_name}}</option>
{{/each}}
</select>
<p> </p>
<input type="text" class="inputText" name="audio_link" id="audio_link" value="" placeholder="Audio File Link" required/>
<!-- <input type="button" id="get_song" value="Find Song">-->
<div id="show_song" style="width:65%; margin:15px auto 0 15px;"></div>
<br />
<textarea name="description" id="description" placeholder="Description" required class="inputText" ></textarea>
<p> </p>
<!-- <input type="submit" name="upload_webaudio" id="upload_webaudio" value="Upload" />-->
</form>
<script>
$("input[type=radio][name=sub]").unbind('click').bind('click',function(){
var sub = $("input[type=radio][name=sub]:checked").val();
if(sub=='local'){
$("#pop_cont").load('/audio_upload',function(){
$('#get_img').html('Upload');
$('#get_img').unbind('click').bind('click',function(){
$("#<API key>").validate();
$('#<API key>').submit();
});
});
}
});
</script>
|
<form name="deleteForm" ng-submit="confirmDelete(user.login)">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true"
ng-click="clear()">×</button>
<h4 class="modal-title">Confirm delete operation</h4>
</div>
<div class="modal-body">
<p>Are you sure you want to delete this User?</p>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-default" data-dismiss="modal" ng-click="clear()">
<span class="glyphicon <API key>"></span> <span>Cancel</span>
</button>
<button type="submit" ng-disabled="deleteForm.$invalid" class="btn btn-danger">
<span class="glyphicon <API key>"></span> <span>Delete</span>
</button>
</div>
</form>
|
using System;
using System.Drawing;
using System.Globalization;
using System.Windows.Forms;
using DotSpatial.Controls;
using DotSpatial.Controls.Header;
using DotSpatial.Symbology;
namespace DotSpatial.Plugins.Contourer
{
public class Snapin : Extension
{
public override void Activate()
{
AddMenuItems(App.HeaderControl);
base.Activate();
}
private void AddMenuItems(IHeaderControl header)
{
SimpleActionItem contourerItem = new SimpleActionItem("Contour...", menu_Click) { Key = "kC" };
header.Add(contourerItem);
}
private void menu_Click(object sender, EventArgs e)
{
using (FormContour frm = new FormContour())
{
frm.Layers = App.Map.GetRasterLayers();
if (frm.Layers.GetLength(0) <= 0)
{
MessageBox.Show("No raster layer found!");
return;
}
if (frm.ShowDialog() != DialogResult.OK) return;
IMapFeatureLayer fl = App.Map.Layers.Add(frm.Contours);
fl.LegendText = frm.LayerName + " - Contours";
int numlevs = frm.Lev.GetLength(0);
switch (frm.Contourtype)
{
case (Contour.ContourType.Line):
{
LineScheme ls = new LineScheme();
ls.Categories.Clear();
for (int i = 0; i < frm.Color.GetLength(0); i++)
{
LineCategory lc = new LineCategory(frm.Color[i], 2.0)
{
FilterExpression = "[Value] = " + frm.Lev[i],
LegendText = frm.Lev[i].ToString(CultureInfo.InvariantCulture)
};
ls.AddCategory(lc);
}
fl.Symbology = ls;
}
break;
case (Contour.ContourType.Polygon):
{
PolygonScheme ps = new PolygonScheme();
ps.Categories.Clear();
for (int i = 0; i < frm.Color.GetLength(0); i++)
{
PolygonCategory pc = new PolygonCategory(frm.Color[i], Color.Transparent, 0)
{
FilterExpression = "[Lev] = " + i,
LegendText = frm.Lev[i] + " - " + frm.Lev[i + 1]
};
ps.AddCategory(pc);
}
fl.Symbology = ps;
}
break;
}
}
}
}
}
|
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
// in all copies or substantial portions of the Software.
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
if (!process.versions.openssl) {
console.error('Skipping because node compiled without OpenSSL.');
process.exit(0);
}
var common = require('../common');
var assert = require('assert');
var fs = require('fs');
var tls = require('tls');
var key = fs.readFileSync(common.fixturesDir + '/keys/agent1-key.pem');
var cert = fs.readFileSync(common.fixturesDir + '/keys/agent1-cert.pem');
tls.createServer({ key: key, cert: cert }, function(conn) {
conn.end();
this.close();
}).listen(0, function() {
var options = { port: this.address().port, rejectUnauthorized: true };
tls.connect(options).on('error', common.mustCall(function(err) {
assert.equal(err.code, '<API key>');
assert.equal(err.message, 'unable to verify the first certificate');
this.destroy();
}));
});
|
Kanboard.App = function() {
this.controllers = {};
};
Kanboard.App.prototype.get = function(controller) {
return this.controllers[controller];
};
Kanboard.App.prototype.execute = function() {
for (var className in Kanboard) {
if (className !== "App") {
var controller = new Kanboard[className](this);
this.controllers[className] = controller;
if (typeof controller.execute === "function") {
controller.execute();
}
if (typeof controller.listen === "function") {
controller.listen();
}
if (typeof controller.focus === "function") {
controller.focus();
}
}
}
this.focus();
this.datePicker();
this.autoComplete();
this.tagAutoComplete();
};
Kanboard.App.prototype.focus = function() {
// Auto-select input fields
$(document).on('focus', '.auto-select', function() {
$(this).select();
});
// Workaround for chrome
$(document).on('mouseup', '.auto-select', function(e) {
e.preventDefault();
});
};
Kanboard.App.prototype.datePicker = function() {
var bodyElement = $("body");
var dateFormat = bodyElement.data("js-date-format");
var timeFormat = bodyElement.data("js-time-format");
var lang = bodyElement.data("js-lang");
$.datepicker.setDefaults($.datepicker.regional[lang]);
$.timepicker.setDefaults($.timepicker.regional[lang]);
// Datepicker
$(".form-date").datepicker({
showOtherMonths: true,
selectOtherMonths: true,
dateFormat: dateFormat,
constrainInput: false
});
// Datetime picker
$(".form-datetime").datetimepicker({
dateFormat: dateFormat,
timeFormat: timeFormat,
constrainInput: false
});
};
Kanboard.App.prototype.tagAutoComplete = function() {
$(".tag-autocomplete").select2({
tags: true
});
};
Kanboard.App.prototype.autoComplete = function() {
$(".autocomplete").each(function() {
var input = $(this);
var field = input.data("dst-field");
var extraFields = input.data("dst-extra-fields");
if ($('
input.parent().find("button[type=submit]").attr('disabled','disabled');
}
input.autocomplete({
source: input.data("search-url"),
minLength: 1,
select: function(event, ui) {
$("input[name=" + field + "]").val(ui.item.id);
if (extraFields) {
var fields = extraFields.split(',');
for (var i = 0; i < fields.length; i++) {
var fieldName = fields[i].trim();
$("input[name=" + fieldName + "]").val(ui.item[fieldName]);
}
}
input.parent().find("button[type=submit]").removeAttr('disabled');
}
});
});
};
Kanboard.App.prototype.hasId = function(id) {
return !!document.getElementById(id);
};
Kanboard.App.prototype.showLoadingIcon = function() {
$("body").append('<span id="app-loading-icon"> <i class="fa fa-spinner fa-spin"></i></span>');
};
Kanboard.App.prototype.hideLoadingIcon = function() {
$("#app-loading-icon").remove();
};
|
module.exports = function(should, Assertion) {
/**
* Assert given object is NaN
* @name NaN
* @memberOf Assertion
* @category assertion numbers
* @example
*
* (10).should.not.be.NaN();
* NaN.should.be.NaN();
*/
Assertion.add('NaN', function() {
this.params = { operator: 'to be NaN' };
this.assert(this.obj !== this.obj);
});
/**
* Assert given object is not finite (positive or negative)
*
* @name Infinity
* @memberOf Assertion
* @category assertion numbers
* @example
*
* (10).should.not.be.Infinity();
* NaN.should.not.be.Infinity();
*/
Assertion.add('Infinity', function() {
this.params = { operator: 'to be Infinity' };
this.is.a.Number()
.and.not.a.NaN()
.and.assert(!isFinite(this.obj));
});
/**
* Assert given number between `start` and `finish` or equal one of them.
*
* @name within
* @memberOf Assertion
* @category assertion numbers
* @param {number} start Start number
* @param {number} finish Finish number
* @param {string} [description] Optional message
* @example
*
* (10).should.be.within(0, 20);
*/
Assertion.add('within', function(start, finish, description) {
this.params = { operator: 'to be within ' + start + '..' + finish, message: description };
this.assert(this.obj >= start && this.obj <= finish);
});
/**
* Assert given number near some other `value` within `delta`
*
* @name approximately
* @memberOf Assertion
* @category assertion numbers
* @param {number} value Center number
* @param {number} delta Radius
* @param {string} [description] Optional message
* @example
*
* (9.99).should.be.approximately(10, 0.1);
*/
Assertion.add('approximately', function(value, delta, description) {
this.params = { operator: 'to be approximately ' + value + " ±" + delta, message: description };
this.assert(Math.abs(this.obj - value) <= delta);
});
/**
* Assert given number above `n`.
*
* @name above
* @alias Assertion#greaterThan
* @memberOf Assertion
* @category assertion numbers
* @param {number} n Margin number
* @param {string} [description] Optional message
* @example
*
* (10).should.be.above(0);
*/
Assertion.add('above', function(n, description) {
this.params = { operator: 'to be above ' + n, message: description };
this.assert(this.obj > n);
});
/**
* Assert given number below `n`.
*
* @name below
* @alias Assertion#lessThan
* @memberOf Assertion
* @category assertion numbers
* @param {number} n Margin number
* @param {string} [description] Optional message
* @example
*
* (0).should.be.below(10);
*/
Assertion.add('below', function(n, description) {
this.params = { operator: 'to be below ' + n, message: description };
this.assert(this.obj < n);
});
Assertion.alias('above', 'greaterThan');
Assertion.alias('below', 'lessThan');
};
|
<?php
class <API key> extends <API key>
{
/**
* @var <API key>
*/
protected $_result;
protected $_facets;
public function setUp()
{
$this->_facets = array(
'facet1' => 'content1',
'facet2' => 'content2',
);
$this->_result = new <API key>($this->_facets);
}
public function testGetFacets()
{
$this->assertEquals($this->_facets, $this->_result->getFacets());
}
public function testGetFacet()
{
$this->assertEquals(
$this->_facets['facet2'],
$this->_result->getFacet('facet2')
);
}
public function testGetInvalidFacet()
{
$this->assertEquals(
null,
$this->_result->getFacet('invalid')
);
}
public function testIterator()
{
$items = array();
foreach($this->_result AS $key => $item)
{
$items[$key] = $item;
}
$this->assertEquals($this->_facets, $items);
}
public function testCount()
{
$this->assertEquals(count($this->_facets), count($this->_result));
}
}
|
// <auto-generated>
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Batch.Protocol
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
<summary>
JobOperations operations.
</summary>
public partial interface IJobOperations
{
<summary>
Gets lifetime summary statistics for all of the Jobs in the
specified Account.
</summary>
<remarks>
Statistics are aggregated across all Jobs that have ever existed in
the Account, from Account creation to the last update time of the
statistics. The statistics may not be immediately available. The
Batch service performs periodic roll-up of statistics. The typical
delay is about 30 minutes.
</remarks>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobStatistics,<API key>>> <API key>(<API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Deletes a Job.
</summary>
<remarks>
Deleting a Job also deletes all Tasks that are part of that Job,
and all Job statistics. This also overrides the retention period
for Task data; that is, if the Job contains Tasks which are still
retained on Compute Nodes, the Batch services deletes those Tasks'
working directories and all their contents. When a Delete Job
request is received, the Batch service sets the Job to the deleting
state. All update operations on a Job that is in deleting state
will fail with status code 409 (Conflict), with additional
information indicating that the Job is being deleted.
</remarks>
<param name='jobId'>
The ID of the Job to delete.
</param>
<param name='jobDeleteOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobDeleteHeaders>> <API key>(string jobId, JobDeleteOptions jobDeleteOptions = default(JobDeleteOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Gets information about the specified Job.
</summary>
<param name='jobId'>
The ID of the Job.
</param>
<param name='jobGetOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><CloudJob,JobGetHeaders>> <API key>(string jobId, JobGetOptions jobGetOptions = default(JobGetOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Updates the properties of the specified Job.
</summary>
<remarks>
This replaces only the Job properties specified in the request. For
example, if the Job has constraints, and a request does not specify
the constraints element, then the Job keeps the existing
constraints.
</remarks>
<param name='jobId'>
The ID of the Job whose properties you want to update.
</param>
<param name='jobPatchParameter'>
The parameters for the request.
</param>
<param name='jobPatchOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobPatchHeaders>> <API key>(string jobId, JobPatchParameter jobPatchParameter, JobPatchOptions jobPatchOptions = default(JobPatchOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Updates the properties of the specified Job.
</summary>
<remarks>
This fully replaces all the updatable properties of the Job. For
example, if the Job has constraints associated with it and if
constraints is not specified with this request, then the Batch
service will remove the existing constraints.
</remarks>
<param name='jobId'>
The ID of the Job whose properties you want to update.
</param>
<param name='jobUpdateParameter'>
The parameters for the request.
</param>
<param name='jobUpdateOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobUpdateHeaders>> <API key>(string jobId, JobUpdateParameter jobUpdateParameter, JobUpdateOptions jobUpdateOptions = default(JobUpdateOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Disables the specified Job, preventing new Tasks from running.
</summary>
<remarks>
The Batch Service immediately moves the Job to the disabling state.
Batch then uses the disableTasks parameter to determine what to do
with the currently running Tasks of the Job. The Job remains in the
disabling state until the disable operation is completed and all
Tasks have been dealt with according to the disableTasks option;
the Job then moves to the disabled state. No new Tasks are started
under the Job until it moves back to active state. If you try to
disable a Job that is in any state other than active, disabling, or
disabled, the request fails with status code 409.
</remarks>
<param name='jobId'>
The ID of the Job to disable.
</param>
<param name='disableTasks'>
What to do with active Tasks associated with the Job. Possible
values include: 'requeue', 'terminate', 'wait'
</param>
<param name='jobDisableOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobDisableHeaders>> <API key>(string jobId, DisableJobOption disableTasks, JobDisableOptions jobDisableOptions = default(JobDisableOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Enables the specified Job, allowing new Tasks to run.
</summary>
<remarks>
When you call this API, the Batch service sets a disabled Job to
the enabling state. After the this operation is completed, the Job
moves to the active state, and scheduling of new Tasks under the
Job resumes. The Batch service does not allow a Task to remain in
the active state for more than 180 days. Therefore, if you enable a
Job containing active Tasks which were added more than 180 days
ago, those Tasks will not run.
</remarks>
<param name='jobId'>
The ID of the Job to enable.
</param>
<param name='jobEnableOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobEnableHeaders>> <API key>(string jobId, JobEnableOptions jobEnableOptions = default(JobEnableOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Terminates the specified Job, marking it as completed.
</summary>
<remarks>
When a Terminate Job request is received, the Batch service sets
the Job to the terminating state. The Batch service then terminates
any running Tasks associated with the Job and runs any required Job
release Tasks. Then the Job moves into the completed state. If
there are any Tasks in the Job in the active state, they will
remain in the active state. Once a Job is terminated, new Tasks
cannot be added and any remaining active Tasks will not be
scheduled.
</remarks>
<param name='jobId'>
The ID of the Job to terminate.
</param>
<param name='terminateReason'>
The text you want to appear as the Job's TerminateReason. The
default is 'UserTerminate'.
</param>
<param name='jobTerminateOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobTerminateHeaders>> <API key>(string jobId, string terminateReason = default(string), JobTerminateOptions jobTerminateOptions = default(JobTerminateOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Adds a Job to the specified Account.
</summary>
<remarks>
The Batch service supports two ways to control the work done as
part of a Job. In the first approach, the user specifies a Job
Manager Task. The Batch service launches this Task when it is ready
to start the Job. The Job Manager Task controls all other Tasks
that run under this Job, by using the Task APIs. In the second
approach, the user directly controls the execution of Tasks under
an active Job, by using the Task APIs. Also note: when naming Jobs,
avoid including sensitive information such as user names or secret
project names. This information may appear in telemetry logs
accessible to Microsoft Support engineers.
</remarks>
<param name='job'>
The Job to be added.
</param>
<param name='jobAddOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><JobAddHeaders>> <API key>(JobAddParameter job, JobAddOptions jobAddOptions = default(JobAddOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists all of the Jobs in the specified Account.
</summary>
<param name='jobListOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<CloudJob>,JobListHeaders>> <API key>(JobListOptions jobListOptions = default(JobListOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists the Jobs that have been created under the specified Job
Schedule.
</summary>
<param name='jobScheduleId'>
The ID of the Job Schedule from which you want to get a list of
Jobs.
</param>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<CloudJob>,<API key>>> <API key>(string jobScheduleId, <API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists the execution status of the Job Preparation and Job Release
Task for the specified Job across the Compute Nodes where the Job
has run.
</summary>
<remarks>
This API returns the Job Preparation and Job Release Task status on
all Compute Nodes that have run the Job Preparation or Job Release
Task. This includes Compute Nodes which have since been removed
from the Pool. If this API is invoked on a Job which has no Job
Preparation or Job Release Task, the Batch service returns HTTP
status code 409 (Conflict) with an error code of
<API key>.
</remarks>
<param name='jobId'>
The ID of the Job.
</param>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<<API key>>,<API key>>> <API key>(string jobId, <API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Gets the Task counts for the specified Job.
</summary>
<remarks>
Task counts provide a count of the Tasks by active, running or
completed Task state, and a count of Tasks which succeeded or
failed. Tasks in the preparing state are counted as running.
</remarks>
<param name='jobId'>
The ID of the Job.
</param>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><TaskCountsResult,<API key>>> <API key>(string jobId, <API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists all of the Jobs in the specified Account.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='jobListNextOptions'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<CloudJob>,JobListHeaders>> <API key>(string nextPageLink, JobListNextOptions jobListNextOptions = default(JobListNextOptions), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists the Jobs that have been created under the specified Job
Schedule.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<CloudJob>,<API key>>> <API key>(string nextPageLink, <API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
<summary>
Lists the execution status of the Job Preparation and Job Release
Task for the specified Job across the Compute Nodes where the Job
has run.
</summary>
<remarks>
This API returns the Job Preparation and Job Release Task status on
all Compute Nodes that have run the Job Preparation or Job Release
Task. This includes Compute Nodes which have since been removed
from the Pool. If this API is invoked on a Job which has no Job
Preparation or Job Release Task, the Batch service returns HTTP
status code 409 (Conflict) with an error code of
<API key>.
</remarks>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='<API key>'>
Additional parameters for the operation
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="BatchErrorException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="Microsoft.Rest.<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="Microsoft.Rest.ValidationException">
Thrown when a required parameter is null
</exception>
Task<<API key><IPage<<API key>>,<API key>>> <API key>(string nextPageLink, <API key> <API key> = default(<API key>), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken));
}
}
|
using System;
using System.IO;
using System.Collections;
using System.Runtime.InteropServices;
namespace Earlab
{
public delegate void LogCallback(String LogMessage);
<summary>
DesktopEarlabDLL is a wrapper class that wraps the DesktopEarlabDLL.dll functionality
and exposes it to C# applications
</summary>
class DesktopEarlabDLL
{
IntPtr theModel;
private const string DLLFileName = @"DesktopEarlabDLL.dll";
private string SavedPath;
[DllImport(DLLFileName, EntryPoint="CreateModel", CallingConvention=CallingConvention.Cdecl)]
private static extern IntPtr CreateModelExternal();
[DllImport(DLLFileName, EntryPoint="SetLogCallback", CallingConvention=CallingConvention.Cdecl)]
private static extern void <API key>(IntPtr ModelPtr, LogCallback theCallback);
[DllImport(DLLFileName, EntryPoint="SetModuleDirectory", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr, string ModuleDirectoryPath);
[DllImport(DLLFileName, EntryPoint="SetInputDirectory", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr, string InputDirectoryPath);
[DllImport(DLLFileName, EntryPoint="SetOutputDirectory", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr, string OutputDirectoryPath);
[DllImport(DLLFileName, EntryPoint="LoadModelConfigFile", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr, string ModelConfigFileName, float FrameSize_uS);
[DllImport(DLLFileName, EntryPoint="<API key>", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr, string <API key>);
[DllImport(DLLFileName, EntryPoint="StartModules", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr);
[DllImport(DLLFileName, EntryPoint="RunModel", CallingConvention=CallingConvention.Cdecl)]
private static extern int RunModelExternal(IntPtr ModelPtr, int NumFrames);
[DllImport(DLLFileName, EntryPoint="AdvanceModules", CallingConvention=CallingConvention.Cdecl)]
private static extern int <API key>(IntPtr ModelPtr);
[DllImport(DLLFileName, EntryPoint="StopModules", CallingConvention=CallingConvention.Cdecl)]
private static extern int StopModulesExternal(IntPtr ModelPtr);
[DllImport(DLLFileName, EntryPoint="UnloadModel", CallingConvention=CallingConvention.Cdecl)]
private static extern int UnloadModelExternal(IntPtr ModelPtr);
public DesktopEarlabDLL()
{
theModel = DesktopEarlabDLL.CreateModelExternal();
if (theModel == IntPtr.Zero)
throw new <API key>("Failed to initialize model");
}
public int SetModuleDirectory(string ModuleDirectoryPath)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.<API key>(theModel, ModuleDirectoryPath);
}
public void SetInputDirectory(string InputDirectoryPath)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
DesktopEarlabDLL.<API key>(theModel, InputDirectoryPath);
}
public void SetOutputDirectory(string OutputDirectoryPath)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
DesktopEarlabDLL.<API key>(theModel, OutputDirectoryPath);
}
public void SetLogCallback(LogCallback theCallback)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
DesktopEarlabDLL.<API key>(theModel, theCallback);
}
public int LoadModelConfigFile(string ModelConfigFileName, float FrameSize_uS)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
SavedPath = Environment.CurrentDirectory;
Environment.CurrentDirectory = Path.GetDirectoryName(ModelConfigFileName);
return DesktopEarlabDLL.<API key>(theModel, ModelConfigFileName, FrameSize_uS);
}
public int <API key>(string <API key>)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.<API key>(theModel, <API key>);
}
public int StartModules()
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.<API key>(theModel);
}
public int RunModel(int NumFrames)
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.RunModelExternal(theModel, NumFrames);
}
public int AdvanceModules()
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.<API key>(theModel);
}
public int StopModules()
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
return DesktopEarlabDLL.StopModulesExternal(theModel);
}
public int UnloadModel()
{
if (theModel == IntPtr.Zero)
throw new <API key>("Model not initialized");
Environment.CurrentDirectory = SavedPath;
return DesktopEarlabDLL.UnloadModelExternal(theModel);
}
}
}
|
class SurveyMrqAnswer < ActiveRecord::Base
acts_as_paranoid
attr_accessible :user_course_id, :question_id, :option_id, :selected_options, :<API key>
belongs_to :user_course
belongs_to :question, class_name: "SurveyQuestion"
belongs_to :option, class_name: "<API key>"
belongs_to :survey_submission
#TODO: not in use, can be removed
def options
<API key>.where(id: eval(selected_options))
end
end
|
<?php
namespace Kendo\UI;
class <API key> extends \Kendo\SerializableObject {
//>> Properties
/**
* The text similar to "Day " displayed in the scheduler recurrence editor.
* @param string $value
* @return \Kendo\UI\<API key>
*/
public function day($value) {
return $this->setProperty('day', $value);
}
/**
* The text similar to " month(s)" displayed in the scheduler recurrence editor.
* @param string $value
* @return \Kendo\UI\<API key>
*/
public function interval($value) {
return $this->setProperty('interval', $value);
}
/**
* The text similar to "Repeat every: " displayed in the scheduler recurrence editor.
* @param string $value
* @return \Kendo\UI\<API key>
*/
public function repeatEvery($value) {
return $this->setProperty('repeatEvery', $value);
}
/**
* The text similar to "Repeat on: " displayed in the scheduler recurrence editor.
* @param string $value
* @return \Kendo\UI\<API key>
*/
public function repeatOn($value) {
return $this->setProperty('repeatOn', $value);
}
//<< Properties
}
?>
|
// File: plserver_internal.h
// Contents: Server internal functions for Corman Lisp
// History: 8/5/97 RGC Created.
#include "CharBuf.h"
extern IUnknown* ClientUnknown;
extern <API key>* ClientTextOutput;
extern <API key>* ClientMessage;
extern CharBuf TerminalInputBuf;
|
<p id="sherman_jason" data-toggle="tooltip" title="<img src='{{site.filesurl}}/people/sherman_jason.jpg' alt='Jason Sherman' />">
<span class="person">Jason Sherman</span> is an Oklahoma native, a
student of the liberal arts, and an IT Analyst at the University
of Oklahoma Libraries. He spends much of his time building
infrastructure for developers, but also pitches in by writing the
occasional integration module or migration script.
</p>
|
package main
import (
"bytes"
"errors"
"fmt"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"strconv"
"strings"
"sync"
"time"
)
var (
bashPath string
debugging = false
erroring = false
maxprocs = 4
testPattern = regexp.MustCompile(`test[/\\]test-([a-z\-]+)\.sh$`)
)
func mainIntegration() {
if len(os.Getenv("DEBUG")) > 0 {
debugging = true
}
setBash()
if max, _ := strconv.Atoi(os.Getenv("<API key>")); max > 0 {
maxprocs = max
}
fmt.Println("Running this maxprocs", maxprocs)
files := testFiles()
if len(files) == 0 {
fmt.Println("no tests to run")
os.Exit(1)
}
var wg sync.WaitGroup
tests := make(chan string, len(files))
output := make(chan string, len(files))
for _, file := range files {
tests <- file
}
go printOutput(output)
for i := 0; i < maxprocs; i++ {
wg.Add(1)
go worker(tests, output, &wg)
}
close(tests)
wg.Wait()
close(output)
printOutput(output)
if erroring {
os.Exit(1)
}
}
func runTest(output chan string, testname string) {
buf := &bytes.Buffer{}
cmd := exec.Command(bashPath, testname)
cmd.Stdout = buf
cmd.Stderr = buf
err := cmd.Start()
if err != nil {
sendTestOutput(output, testname, buf, err)
return
}
done := make(chan error)
go func() {
if err := cmd.Wait(); err != nil {
done <- err
}
close(done)
}()
select {
case err = <-done:
sendTestOutput(output, testname, buf, err)
return
case <-time.After(3 * time.Minute):
sendTestOutput(output, testname, buf, errors.New("Timed out"))
cmd.Process.Kill()
return
}
}
func sendTestOutput(output chan string, testname string, buf *bytes.Buffer, err error) {
cli := strings.TrimSpace(buf.String())
if len(cli) == 0 {
cli = fmt.Sprintf("<no output for %s>", testname)
}
if err == nil {
output <- cli
} else {
basetestname := filepath.Base(testname)
if debugging {
fmt.Printf("Error on %s: %s\n", basetestname, err)
}
erroring = true
output <- fmt.Sprintf("error: %s => %s\n%s", basetestname, err, cli)
}
}
func printOutput(output <-chan string) {
for {
select {
case out, ok := <-output:
if !ok {
return
}
fmt.Println(out)
}
}
}
func worker(tests <-chan string, output chan string, wg *sync.WaitGroup) {
defer wg.Done()
for {
select {
case testname, ok := <-tests:
if !ok {
return
}
runTest(output, testname)
}
}
}
func testFiles() []string {
if len(os.Args) < 4 {
return allTestFiles()
}
fileMap := make(map[string]bool)
for _, file := range allTestFiles() {
fileMap[file] = true
}
files := make([]string, 0, len(os.Args)-3)
for _, arg := range os.Args {
fullname := "test/test-" + arg + ".sh"
if fileMap[fullname] {
files = append(files, fullname)
}
}
return files
}
func allTestFiles() []string {
files := make([]string, 0, 100)
filepath.Walk("test", func(path string, info os.FileInfo, err error) error {
if debugging {
fmt.Println("FOUND:", path)
}
if err != nil || info.IsDir() || !testPattern.MatchString(path) {
return nil
}
if debugging {
fmt.Println("MATCHING:", path)
}
files = append(files, path)
return nil
})
return files
}
func setBash() {
findcmd := "which"
if runtime.GOOS == "windows" {
// Can't use paths returned from which even if it's on PATH in Windows
// Because our Go binary is a separate Windows app & not MinGW, it
// can't understand paths like '/usr/bin/bash', needs Windows version
findcmd = "where"
}
out, err := exec.Command(findcmd, "bash").Output()
if err != nil {
fmt.Println("Unable to find bash:", err)
os.Exit(1)
}
if len(out) == 0 {
fmt.Printf("No output from '%s bash'\n", findcmd)
os.Exit(1)
}
bashPath = strings.TrimSpace(strings.Split(string(out), "\n")[0])
if debugging {
fmt.Println("Using", bashPath)
}
// Test
_, err = exec.Command(bashPath, "--version").CombinedOutput()
if err != nil {
fmt.Println("Error calling bash:", err)
os.Exit(1)
}
}
|
require 'oauth/signature/base'
module OAuth::Signature
class PLAINTEXT < Base
implements 'plaintext'
def signature
<API key>
end
def ==(cmp_signature)
signature == escape(cmp_signature)
end
def <API key>
secret
end
def secret
escape(super)
end
end
end
|
'use strict';
var APPLICATION_JSON = 'application/json';
var <API key> = {'Content-Type': APPLICATION_JSON + ';charset=utf-8'};
var JSON_START = /^\[|^\{(?!\{)/;
var JSON_ENDS = {
'[': /]$/,
'{': /}$/
};
var <API key> = /^\)\]\}',?\n/;
var $httpMinErr = minErr('$http');
var $httpMinErrLegacyFn = function(method) {
return function() {
throw $httpMinErr('legacy', 'The method `{0}` on the promise returned from `$http` has been disabled.', method);
};
};
function serializeValue(v) {
if (isObject(v)) {
return isDate(v) ? v.toISOString() : toJson(v);
}
return v;
}
function $<API key>() {
/**
* @ngdoc service
* @name $httpParamSerializer
* @description
*
* Default {@link $http `$http`} params serializer that converts objects to strings
* according to the following rules:
*
* * `{'foo': 'bar'}` results in `foo=bar`
* * `{'foo': Date.now()}` results in `foo=2015-04-01T09%3A50%3A49.262Z` (`toISOString()` and encoded representation of a Date object)
* * `{'foo': ['bar', 'baz']}` results in `foo=bar&foo=baz` (repeated key for each array element)
* * `{'foo': {'bar':'baz'}}` results in `foo=%7B%22bar%22%3A%22baz%22%7D"` (stringified and encoded representation of an object)
*
* Note that serializer will sort the request parameters alphabetically.
* */
this.$get = function() {
return function ngParamSerializer(params) {
if (!params) return '';
var parts = [];
forEachSorted(params, function(value, key) {
if (value === null || isUndefined(value)) return;
if (isArray(value)) {
forEach(value, function(v, k) {
parts.push(encodeUriQuery(key) + '=' + encodeUriQuery(serializeValue(v)));
});
} else {
parts.push(encodeUriQuery(key) + '=' + encodeUriQuery(serializeValue(value)));
}
});
return parts.join('&');
};
};
}
function $<API key>() {
this.$get = function() {
return function <API key>(params) {
if (!params) return '';
var parts = [];
serialize(params, '', true);
return parts.join('&');
function serialize(toSerialize, prefix, topLevel) {
if (toSerialize === null || isUndefined(toSerialize)) return;
if (isArray(toSerialize)) {
forEach(toSerialize, function(value, index) {
serialize(value, prefix + '[' + (isObject(value) ? index : '') + ']');
});
} else if (isObject(toSerialize) && !isDate(toSerialize)) {
forEachSorted(toSerialize, function(value, key) {
serialize(value, prefix +
(topLevel ? '' : '[') +
key +
(topLevel ? '' : ']'));
});
} else {
parts.push(encodeUriQuery(prefix) + '=' + encodeUriQuery(serializeValue(toSerialize)));
}
}
};
};
}
function <API key>(data, headers) {
if (isString(data)) {
// Strip json vulnerability protection prefix and trim whitespace
var tempData = data.replace(<API key>, '').trim();
if (tempData) {
var contentType = headers('Content-Type');
if ((contentType && (contentType.indexOf(APPLICATION_JSON) === 0)) || isJsonLike(tempData)) {
data = fromJson(tempData);
}
}
}
return data;
}
function isJsonLike(str) {
var jsonStart = str.match(JSON_START);
return jsonStart && JSON_ENDS[jsonStart[0]].test(str);
}
/**
* Parse headers into key value object
*
* @param {string} headers Raw headers as a string
* @returns {Object} Parsed headers as key value object
*/
function parseHeaders(headers) {
var parsed = createMap(), i;
function fillInParsed(key, val) {
if (key) {
parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val;
}
}
if (isString(headers)) {
forEach(headers.split('\n'), function(line) {
i = line.indexOf(':');
fillInParsed(lowercase(trim(line.substr(0, i))), trim(line.substr(i + 1)));
});
} else if (isObject(headers)) {
forEach(headers, function(headerVal, headerKey) {
fillInParsed(lowercase(headerKey), trim(headerVal));
});
}
return parsed;
}
/**
* Returns a function that provides access to parsed headers.
*
* Headers are lazy parsed when first requested.
* @see parseHeaders
*
* @param {(string|Object)} headers Headers to provide access to.
* @returns {function(string=)} Returns a getter function which if called with:
*
* - if called with single an argument returns a single header value or null
* - if called with no arguments returns an object containing all headers.
*/
function headersGetter(headers) {
var headersObj;
return function(name) {
if (!headersObj) headersObj = parseHeaders(headers);
if (name) {
var value = headersObj[lowercase(name)];
if (value === void 0) {
value = null;
}
return value;
}
return headersObj;
};
}
/**
* Chain all given functions
*
* This function is used for both request and response transforming
*
* @param {*} data Data to transform.
* @param {function(string=)} headers HTTP headers getter fn.
* @param {number} status HTTP status code of the response.
* @param {(Function|Array.<Function>)} fns Function or an array of functions.
* @returns {*} Transformed data.
*/
function transformData(data, headers, status, fns) {
if (isFunction(fns)) {
return fns(data, headers, status);
}
forEach(fns, function(fn) {
data = fn(data, headers, status);
});
return data;
}
function isSuccess(status) {
return 200 <= status && status < 300;
}
/**
* @ngdoc provider
* @name $httpProvider
* @description
* Use `$httpProvider` to change the default behavior of the {@link ng.$http $http} service.
* */
function $HttpProvider() {
/**
* @ngdoc property
* @name $httpProvider#defaults
* @description
*
* Object containing default values for all {@link ng.$http $http} requests.
*
* - **`defaults.cache`** - {Object} - an object built with {@link ng.$cacheFactory `$cacheFactory`}
* that will provide the cache for all requests who set their `cache` property to `true`.
* If you set the `defaults.cache = false` then only requests that specify their own custom
* cache object will be cached. See {@link $http#caching $http Caching} for more information.
*
* - **`defaults.xsrfCookieName`** - {string} - Name of cookie containing the XSRF token.
* Defaults value is `'XSRF-TOKEN'`.
*
* - **`defaults.xsrfHeaderName`** - {string} - Name of HTTP header to populate with the
* XSRF token. Defaults value is `'X-XSRF-TOKEN'`.
*
* - **`defaults.headers`** - {Object} - Default headers for all $http requests.
* Refer to {@link ng.$http#<API key> $http} for documentation on
* setting default headers.
* - **`defaults.headers.common`**
* - **`defaults.headers.post`**
* - **`defaults.headers.put`**
* - **`defaults.headers.patch`**
*
*
* - **`defaults.paramSerializer`** - `{string|function(Object<string,string>):string}` - A function
* used to the prepare string representation of request parameters (specified as an object).
* If specified as string, it is interpreted as a function registered with the {@link auto.$injector $injector}.
* Defaults to {@link ng.$httpParamSerializer $httpParamSerializer}.
*
**/
var defaults = this.defaults = {
// transform incoming response data
transformResponse: [<API key>],
// transform outgoing request data
transformRequest: [function(d) {
return isObject(d) && !isFile(d) && !isBlob(d) && !isFormData(d) ? toJson(d) : d;
}],
// default headers
headers: {
common: {
'Accept': 'application/json, text/plain, */*'
},
post: shallowCopy(<API key>),
put: shallowCopy(<API key>),
patch: shallowCopy(<API key>)
},
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
paramSerializer: '$httpParamSerializer'
};
var useApplyAsync = false;
/**
* @ngdoc method
* @name $httpProvider#useApplyAsync
* @description
*
* Configure $http service to combine processing of multiple http responses received at around
* the same time via {@link ng.$rootScope.Scope#$applyAsync $rootScope.$applyAsync}. This can result in
* significant performance improvement for bigger applications that make many HTTP requests
* concurrently (common during application bootstrap).
*
* Defaults to false. If no value is specified, returns the current configured value.
*
* @param {boolean=} value If true, when requests are loaded, they will schedule a deferred
* "apply" on the next tick, giving time for subsequent requests in a roughly ~10ms window
* to load and share the same digest cycle.
*
* @returns {boolean|Object} If a value is specified, returns the $httpProvider for chaining.
* otherwise, returns the current configured value.
**/
this.useApplyAsync = function(value) {
if (isDefined(value)) {
useApplyAsync = !!value;
return this;
}
return useApplyAsync;
};
var useLegacyPromise = true;
/**
* @ngdoc method
* @name $httpProvider#<API key>
* @description
*
* Configure `$http` service to return promises without the shorthand methods `success` and `error`.
* This should be used to make sure that applications work without these methods.
*
* Defaults to true. If no value is specified, returns the current configured value.
*
* @param {boolean=} value If true, `$http` will return a promise with the deprecated legacy `success` and `error` methods.
*
* @returns {boolean|Object} If a value is specified, returns the $httpProvider for chaining.
* otherwise, returns the current configured value.
**/
this.<API key> = function(value) {
if (isDefined(value)) {
useLegacyPromise = !!value;
return this;
}
return useLegacyPromise;
};
/**
* @ngdoc property
* @name $httpProvider#interceptors
* @description
*
* Array containing service factories for all synchronous or asynchronous {@link ng.$http $http}
* pre-processing of request or postprocessing of responses.
*
* These service factories are ordered by request, i.e. they are applied in the same order as the
* array, on request, but reverse order, on response.
*
* {@link ng.$http#interceptors Interceptors detailed info}
**/
var <API key> = this.interceptors = [];
this.$get = ['$httpBackend', '$$cookieReader', '$cacheFactory', '$rootScope', '$q', '$injector',
function($httpBackend, $$cookieReader, $cacheFactory, $rootScope, $q, $injector) {
var defaultCache = $cacheFactory('$http');
/**
* Make sure that default param serializer is exposed as a function
*/
defaults.paramSerializer = isString(defaults.paramSerializer) ?
$injector.get(defaults.paramSerializer) : defaults.paramSerializer;
/**
* Interceptors stored in reverse order. Inner interceptors before outer interceptors.
* The reversal is needed so that we can build up the interception chain around the
* server request.
*/
var <API key> = [];
forEach(<API key>, function(interceptorFactory) {
<API key>.unshift(isString(interceptorFactory)
? $injector.get(interceptorFactory) : $injector.invoke(interceptorFactory));
});
function $http(requestConfig) {
if (!isObject(requestConfig)) {
throw minErr('$http')('badreq', 'Http request configuration must be an object. Received: {0}', requestConfig);
}
var config = extend({
method: 'get',
transformRequest: defaults.transformRequest,
transformResponse: defaults.transformResponse,
paramSerializer: defaults.paramSerializer
}, requestConfig);
config.headers = mergeHeaders(requestConfig);
config.method = uppercase(config.method);
config.paramSerializer = isString(config.paramSerializer) ?
$injector.get(config.paramSerializer) : config.paramSerializer;
var serverRequest = function(config) {
var headers = config.headers;
var reqData = transformData(config.data, headersGetter(headers), undefined, config.transformRequest);
// strip content-type if data is undefined
if (isUndefined(reqData)) {
forEach(headers, function(value, header) {
if (lowercase(header) === 'content-type') {
delete headers[header];
}
});
}
if (isUndefined(config.withCredentials) && !isUndefined(defaults.withCredentials)) {
config.withCredentials = defaults.withCredentials;
}
// send request
return sendReq(config, reqData).then(transformResponse, transformResponse);
};
var chain = [serverRequest, undefined];
var promise = $q.when(config);
// apply interceptors
forEach(<API key>, function(interceptor) {
if (interceptor.request || interceptor.requestError) {
chain.unshift(interceptor.request, interceptor.requestError);
}
if (interceptor.response || interceptor.responseError) {
chain.push(interceptor.response, interceptor.responseError);
}
});
while (chain.length) {
var thenFn = chain.shift();
var rejectFn = chain.shift();
promise = promise.then(thenFn, rejectFn);
}
if (useLegacyPromise) {
promise.success = function(fn) {
assertArgFn(fn, 'fn');
promise.then(function(response) {
fn(response.data, response.status, response.headers, config);
});
return promise;
};
promise.error = function(fn) {
assertArgFn(fn, 'fn');
promise.then(null, function(response) {
fn(response.data, response.status, response.headers, config);
});
return promise;
};
} else {
promise.success = $httpMinErrLegacyFn('success');
promise.error = $httpMinErrLegacyFn('error');
}
return promise;
function transformResponse(response) {
// make a copy since the response must be cacheable
var resp = extend({}, response);
resp.data = transformData(response.data, response.headers, response.status,
config.transformResponse);
return (isSuccess(response.status))
? resp
: $q.reject(resp);
}
function executeHeaderFns(headers, config) {
var headerContent, processedHeaders = {};
forEach(headers, function(headerFn, header) {
if (isFunction(headerFn)) {
headerContent = headerFn(config);
if (headerContent != null) {
processedHeaders[header] = headerContent;
}
} else {
processedHeaders[header] = headerFn;
}
});
return processedHeaders;
}
function mergeHeaders(config) {
var defHeaders = defaults.headers,
reqHeaders = extend({}, config.headers),
defHeaderName, <API key>, reqHeaderName;
defHeaders = extend({}, defHeaders.common, defHeaders[lowercase(config.method)]);
// using for-in instead of forEach to avoid unecessary iteration after header has been found
<API key>:
for (defHeaderName in defHeaders) {
<API key> = lowercase(defHeaderName);
for (reqHeaderName in reqHeaders) {
if (lowercase(reqHeaderName) === <API key>) {
continue <API key>;
}
}
reqHeaders[defHeaderName] = defHeaders[defHeaderName];
}
// execute if header value is a function for merged headers
return executeHeaderFns(reqHeaders, shallowCopy(config));
}
}
$http.pendingRequests = [];
/**
* @ngdoc method
* @name $http#get
*
* @description
* Shortcut method to perform `GET` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
/**
* @ngdoc method
* @name $http#delete
*
* @description
* Shortcut method to perform `DELETE` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
/**
* @ngdoc method
* @name $http#head
*
* @description
* Shortcut method to perform `HEAD` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
/**
* @ngdoc method
* @name $http#jsonp
*
* @description
* Shortcut method to perform `JSONP` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request.
* The name of the callback should be the string `JSON_CALLBACK`.
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
createShortMethods('get', 'delete', 'head', 'jsonp');
/**
* @ngdoc method
* @name $http#post
*
* @description
* Shortcut method to perform `POST` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {*} data Request content
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
/**
* @ngdoc method
* @name $http#put
*
* @description
* Shortcut method to perform `PUT` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {*} data Request content
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
/**
* @ngdoc method
* @name $http#patch
*
* @description
* Shortcut method to perform `PATCH` request.
*
* @param {string} url Relative or absolute URL specifying the destination of the request
* @param {*} data Request content
* @param {Object=} config Optional configuration object
* @returns {HttpPromise} Future object
*/
<API key>('post', 'put', 'patch');
/**
* @ngdoc property
* @name $http#defaults
*
* @description
* Runtime equivalent of the `$httpProvider.defaults` property. Allows configuration of
* default headers, withCredentials as well as request and response transformations.
*
* See "Setting HTTP Headers" and "Transforming Requests and Responses" sections above.
*/
$http.defaults = defaults;
return $http;
function createShortMethods(names) {
forEach(arguments, function(name) {
$http[name] = function(url, config) {
return $http(extend({}, config || {}, {
method: name,
url: url
}));
};
});
}
function <API key>(name) {
forEach(arguments, function(name) {
$http[name] = function(url, data, config) {
return $http(extend({}, config || {}, {
method: name,
url: url,
data: data
}));
};
});
}
/**
* Makes the request.
*
* !!! ACCESSES CLOSURE VARS:
* $httpBackend, defaults, $log, $rootScope, defaultCache, $http.pendingRequests
*/
function sendReq(config, reqData) {
var deferred = $q.defer(),
promise = deferred.promise,
cache,
cachedResp,
reqHeaders = config.headers,
url = buildUrl(config.url, config.paramSerializer(config.params));
$http.pendingRequests.push(config);
promise.then(removePendingReq, removePendingReq);
if ((config.cache || defaults.cache) && config.cache !== false &&
(config.method === 'GET' || config.method === 'JSONP')) {
cache = isObject(config.cache) ? config.cache
: isObject(defaults.cache) ? defaults.cache
: defaultCache;
}
if (cache) {
cachedResp = cache.get(url);
if (isDefined(cachedResp)) {
if (isPromiseLike(cachedResp)) {
// cached request has already been sent, but there is no response yet
cachedResp.then(<API key>, <API key>);
} else {
// serving from cache
if (isArray(cachedResp)) {
resolvePromise(cachedResp[1], cachedResp[0], shallowCopy(cachedResp[2]), cachedResp[3]);
} else {
resolvePromise(cachedResp, 200, {}, 'OK');
}
}
} else {
// put the promise for the non-transformed response into cache as a placeholder
cache.put(url, promise);
}
}
// if we won't have the response in cache, set the xsrf headers and
// send the request to the backend
if (isUndefined(cachedResp)) {
var xsrfValue = urlIsSameOrigin(config.url)
? $$cookieReader()[config.xsrfCookieName || defaults.xsrfCookieName]
: undefined;
if (xsrfValue) {
reqHeaders[(config.xsrfHeaderName || defaults.xsrfHeaderName)] = xsrfValue;
}
$httpBackend(config.method, url, reqData, done, reqHeaders, config.timeout,
config.withCredentials, config.responseType);
}
return promise;
/**
* Callback registered to $httpBackend():
* - caches the response if desired
* - resolves the raw $http promise
* - calls $apply
*/
function done(status, response, headersString, statusText) {
if (cache) {
if (isSuccess(status)) {
cache.put(url, [status, response, parseHeaders(headersString), statusText]);
} else {
// remove promise from the cache
cache.remove(url);
}
}
function resolveHttpPromise() {
resolvePromise(response, status, headersString, statusText);
}
if (useApplyAsync) {
$rootScope.$applyAsync(resolveHttpPromise);
} else {
resolveHttpPromise();
if (!$rootScope.$$phase) $rootScope.$apply();
}
}
/**
* Resolves the raw $http promise.
*/
function resolvePromise(response, status, headers, statusText) {
//status: HTTP response status code, 0, -1 (aborted by timeout / promise)
status = status >= -1 ? status : 0;
(isSuccess(status) ? deferred.resolve : deferred.reject)({
data: response,
status: status,
headers: headersGetter(headers),
config: config,
statusText: statusText
});
}
function <API key>(result) {
resolvePromise(result.data, result.status, shallowCopy(result.headers()), result.statusText);
}
function removePendingReq() {
var idx = $http.pendingRequests.indexOf(config);
if (idx !== -1) $http.pendingRequests.splice(idx, 1);
}
}
function buildUrl(url, serializedParams) {
if (serializedParams.length > 0) {
url += ((url.indexOf('?') == -1) ? '?' : '&') + serializedParams;
}
return url;
}
}];
}
|
import { run } from '@ember/runloop';
import { guidFor, setName } from 'ember-utils';
import { context } from 'ember-environment';
import EmberObject from '../../../lib/system/object';
import Namespace from '../../../lib/system/namespace';
import { moduleFor, AbstractTestCase } from '<API key>';
let originalLookup = context.lookup;
let lookup;
moduleFor(
'system/object/toString',
class extends AbstractTestCase {
beforeEach() {
context.lookup = lookup = {};
}
afterEach() {
context.lookup = originalLookup;
}
['@test toString() returns the same value if called twice'](assert) {
let Foo = Namespace.create();
Foo.toString = function() {
return 'Foo';
};
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
let obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
run(Foo, 'destroy');
}
['@test toString on a class returns a useful value when nested in a namespace'](assert) {
let obj;
let Foo = Namespace.create();
Foo.toString = function() {
return 'Foo';
};
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
Foo.Baz = Foo.Bar.extend();
assert.equal(Foo.Baz.toString(), 'Foo.Baz');
obj = Foo.Baz.create();
assert.equal(obj.toString(), '<Foo.Baz:' + guidFor(obj) + '>');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
run(Foo, 'destroy');
}
['@test toString on a namespace finds the namespace in lookup'](assert) {
let Foo = (lookup.Foo = Namespace.create());
assert.equal(Foo.toString(), 'Foo');
run(Foo, 'destroy');
}
['@test toString on a namespace finds the namespace in lookup'](assert) {
let Foo = (lookup.Foo = Namespace.create());
let obj;
Foo.Bar = EmberObject.extend();
assert.equal(Foo.Bar.toString(), 'Foo.Bar');
obj = Foo.Bar.create();
assert.equal(obj.toString(), '<Foo.Bar:' + guidFor(obj) + '>');
run(Foo, 'destroy');
}
['@test toString on a namespace falls back to modulePrefix, if defined'](assert) {
let Foo = Namespace.create({ modulePrefix: 'foo' });
assert.equal(Foo.toString(), 'foo');
run(Foo, 'destroy');
}
['@test toString includes toStringExtension if defined'](assert) {
let Foo = EmberObject.extend({
toStringExtension() {
return 'fooey';
},
});
let foo = Foo.create();
let Bar = EmberObject.extend({});
let bar = Bar.create();
// simulate these classes being defined on a Namespace
setName(Foo, 'Foo');
setName(Bar, 'Bar');
assert.equal(
bar.toString(),
'<Bar:' + guidFor(bar) + '>',
'does not include toStringExtension part'
);
assert.equal(
foo.toString(),
'<Foo:' + guidFor(foo) + ':fooey>',
'Includes toStringExtension result'
);
}
}
);
|
define(["../core","../manipulation"],function(e){function t(t,n){var o,a=e(n.createElement(t)).appendTo(n.body),d=window.<API key>&&(o=window.<API key>(a[0]))?o.display:e.css(a[0],"display");return a.detach(),d}function n(n){var d=document,r=a[n];return r||(r=t(n,d),"none"!==r&&r||(o=(o||e("<iframe frameborder='0' width='0' height='0'/>")).appendTo(d.documentElement),d=o[0].contentDocument,d.write(),d.close(),r=t(n,d),o.detach()),a[n]=r),r}var o,a={};return n});
|
using System.Linq;
using System.Reflection;
using System.Runtime.InteropServices;
using COM = System.Runtime.InteropServices.ComTypes;
// Disable obsolete warnings about VarEnum and COM-marshaling APIs in CoreCLR
#pragma warning disable 618
namespace System.Management.Automation
{
internal static class ComInvoker
{
// DISP HRESULTS - may be returned by IDispatch.Invoke
private const int DISP_E_EXCEPTION = unchecked((int)0x80020009);
// LCID for en-US culture
private const int LCID_DEFAULT = 0x0409;
// The dispatch identifier for a parameter that receives the value of an assignment in a PROPERTYPUT.
private const int DISPID_PROPERTYPUT = -3;
// Alias of GUID_NULL. It's a GUID set to all zero
private static readonly Guid s_IID_NULL = new Guid();
// Size of the Variant struct
private static readonly int s_variantSize = Marshal.SizeOf<Variant>();
<summary>
Make a by-Ref VARIANT value based on the passed-in VARIANT argument.
</summary>
<param name="srcVariantPtr">The source Variant pointer.</param>
<param name="destVariantPtr">The destination Variant pointer.</param>
private static unsafe void MakeByRefVariant(IntPtr srcVariantPtr, IntPtr destVariantPtr)
{
var srcVariant = (Variant*)srcVariantPtr;
var destVariant = (Variant*)destVariantPtr;
switch ((VarEnum)srcVariant->_typeUnion._vt)
{
case VarEnum.VT_EMPTY:
case VarEnum.VT_NULL:
// These cannot combine with VT_BYREF. Should try passing as a variant reference
// We follow the code in ComBinder to handle 'VT_EMPTY' and 'VT_NULL'
destVariant->_typeUnion._unionTypes._byref = new IntPtr(srcVariant);
destVariant->_typeUnion._vt = (ushort)VarEnum.VT_VARIANT | (ushort)VarEnum.VT_BYREF;
return;
case VarEnum.VT_RECORD:
// Representation of record is the same with or without byref
destVariant->_typeUnion._unionTypes._record._record = srcVariant->_typeUnion._unionTypes._record._record;
destVariant->_typeUnion._unionTypes._record._recordInfo = srcVariant->_typeUnion._unionTypes._record._recordInfo;
break;
case VarEnum.VT_VARIANT:
destVariant->_typeUnion._unionTypes._byref = new IntPtr(srcVariant);
break;
case VarEnum.VT_DECIMAL:
destVariant->_typeUnion._unionTypes._byref = new IntPtr(&(srcVariant->_decimal));
break;
default:
// All the other cases start at the same offset (it's a Union) so using &_i4 should work.
// This is the same code as in CLR implementation. It could be &_i1, &_i2 and etc. CLR implementation just prefer using &_i4.
destVariant->_typeUnion._unionTypes._byref = new IntPtr(&(srcVariant->_typeUnion._unionTypes._i4));
break;
}
destVariant->_typeUnion._vt = (ushort)(srcVariant->_typeUnion._vt | (ushort)VarEnum.VT_BYREF);
}
<summary>
Alloc memory for a VARIANT array with the specified length.
Also initialize the VARIANT elements to be the type 'VT_EMPTY'.
</summary>
<param name="length">Array length.</param>
<returns>Pointer to the array.</returns>
private static unsafe IntPtr NewVariantArray(int length)
{
IntPtr variantArray = Marshal.AllocCoTaskMem(s_variantSize * length);
for (int i = 0; i < length; i++)
{
IntPtr currentVarPtr = variantArray + s_variantSize * i;
var currentVar = (Variant*)currentVarPtr;
currentVar->_typeUnion._vt = (ushort)VarEnum.VT_EMPTY;
}
return variantArray;
}
<summary>
Generate the ByRef array indicating whether the corresponding argument is by-reference.
</summary>
<param name="parameters">Parameters retrieved from metadata.</param>
<param name="argumentCount">Count of arguments to pass in IDispatch.Invoke.</param>
<param name="isPropertySet">Indicate if we are handling arguments for PropertyPut/PropertyPutRef.</param>
<returns></returns>
internal static bool[] GetByRefArray(<API key>[] parameters, int argumentCount, bool isPropertySet)
{
if (parameters.Length == 0)
{
return null;
}
var byRef = new bool[argumentCount];
int argsToProcess = argumentCount;
if (isPropertySet)
{
// If it's PropertySet, then the last value in arguments is the right-hand side value.
// There is no corresponding parameter for that value, so it's for sure not by-ref.
// Hence, set the last item of byRef array to be false.
argsToProcess = argumentCount - 1;
byRef[argsToProcess] = false;
}
Diagnostics.Assert(parameters.Length >= argsToProcess,
"There might be more parameters than argsToProcess due unspecified optional arguments");
for (int i = 0; i < argsToProcess; i++)
{
byRef[i] = parameters[i].isByRef;
}
return byRef;
}
<summary>
Invoke the COM member.
</summary>
<param name="target">IDispatch object.</param>
<param name="dispId">Dispatch identifier that identifies the member.</param>
<param name="args">Arguments passed in.</param>
<param name="byRef">Boolean array that indicates by-Ref parameters.</param>
<param name="invokeKind">Invocation kind.</param>
<returns></returns>
internal static object Invoke(IDispatch target, int dispId, object[] args, bool[] byRef, COM.INVOKEKIND invokeKind)
{
Diagnostics.Assert(target != null, "Caller makes sure an IDispatch object passed in.");
Diagnostics.Assert(args == null || byRef == null || args.Length == byRef.Length,
"If 'args' and 'byRef' are not null, then they should be one-on-one mapping.");
int argCount = args != null ? args.Length : 0;
int refCount = byRef != null ? byRef.Count(c => c) : 0;
IntPtr variantArgArray = IntPtr.Zero, dispIdArray = IntPtr.Zero, tmpVariants = IntPtr.Zero;
try
{
// Package arguments
if (argCount > 0)
{
variantArgArray = NewVariantArray(argCount);
int refIndex = 0;
for (int i = 0; i < argCount; i++)
{
// !! The arguments should be in REVERSED order!!
int actualIndex = argCount - i - 1;
IntPtr varArgPtr = variantArgArray + s_variantSize * actualIndex;
// If need to pass by ref, create a by-ref variant
if (byRef != null && byRef[i])
{
// Allocate memory for temporary VARIANTs used in by-ref marshalling
if (tmpVariants == IntPtr.Zero)
{
tmpVariants = NewVariantArray(refCount);
}
// Create a VARIANT that the by-ref VARIANT points to
IntPtr tmpVarPtr = tmpVariants + s_variantSize * refIndex;
Marshal.<API key>(args[i], tmpVarPtr);
// Create the by-ref VARIANT
MakeByRefVariant(tmpVarPtr, varArgPtr);
refIndex++;
}
else
{
Marshal.<API key>(args[i], varArgPtr);
}
}
}
var paramArray = new COM.DISPPARAMS[1];
paramArray[0].rgvarg = variantArgArray;
paramArray[0].cArgs = argCount;
if (invokeKind == COM.INVOKEKIND.INVOKE_PROPERTYPUT || invokeKind == COM.INVOKEKIND.<API key>)
{
// For property putters, the first DISPID argument needs to be DISPID_PROPERTYPUT
dispIdArray = Marshal.AllocCoTaskMem(4); // Allocate 4 bytes to hold a 32-bit signed integer
Marshal.WriteInt32(dispIdArray, DISPID_PROPERTYPUT);
paramArray[0].cNamedArgs = 1;
paramArray[0].rgdispidNamedArgs = dispIdArray;
}
else
{
// Otherwise, no named parameters are necessary since powershell parser doesn't support named parameter
paramArray[0].cNamedArgs = 0;
paramArray[0].rgdispidNamedArgs = IntPtr.Zero;
}
// Make the call
EXCEPINFO info = default(EXCEPINFO);
object result = null;
try
{
// 'puArgErr' is set when IDispatch.Invoke fails with error code '<API key>' and 'DISP_E_TYPEMISMATCH'.
// Appropriate exceptions will be thrown in such cases, but FullCLR doesn't use 'puArgErr' in the exception handling, so we also ignore it.
uint puArgErrNotUsed = 0;
target.Invoke(dispId, s_IID_NULL, LCID_DEFAULT, invokeKind, paramArray, out result, out info, out puArgErrNotUsed);
}
catch (Exception innerException)
{
// When 'IDispatch.Invoke' returns error code, CLR will raise exception based on internal HR-to-Exception mapping.
// According to CoreCLR team (yzha), the exception needs to be wrapped as an inner exception of <API key>.
string exceptionMsg = null;
if (innerException.HResult == DISP_E_EXCEPTION)
{
// Invoke was successful but the actual underlying method failed.
// In this case, we use EXCEPINFO to get additional error info.
// Use EXCEPINFO.scode or EXCEPINFO.wCode as HR to construct the correct exception.
int code = info.scode != 0 ? info.scode : info.wCode;
innerException = Marshal.GetExceptionForHR(code, IntPtr.Zero) ?? innerException;
// Get the richer error description if it's available.
if (info.bstrDescription != IntPtr.Zero)
{
exceptionMsg = Marshal.PtrToStringBSTR(info.bstrDescription);
Marshal.FreeBSTR(info.bstrDescription);
}
// Free the BSTRs
if (info.bstrSource != IntPtr.Zero)
{
Marshal.FreeBSTR(info.bstrSource);
}
if (info.bstrHelpFile != IntPtr.Zero)
{
Marshal.FreeBSTR(info.bstrHelpFile);
}
}
var outerException = exceptionMsg == null
? new <API key>(innerException)
: new <API key>(exceptionMsg, innerException);
throw outerException;
}
// Now back propagate the by-ref arguments
if (refCount > 0)
{
for (int i = 0; i < argCount; i++)
{
// !! The arguments should be in REVERSED order!!
int actualIndex = argCount - i - 1;
// If need to pass by ref, back propagate
if (byRef != null && byRef[i])
{
args[i] = Marshal.<API key>(variantArgArray + s_variantSize * actualIndex);
}
}
}
return result;
}
finally
{
// Free the variant argument array
if (variantArgArray != IntPtr.Zero)
{
for (int i = 0; i < argCount; i++)
{
VariantClear(variantArgArray + s_variantSize * i);
}
Marshal.FreeCoTaskMem(variantArgArray);
}
// Free the dispId array
if (dispIdArray != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(dispIdArray);
}
// Free the temporary variants created when handling by-Ref arguments
if (tmpVariants != IntPtr.Zero)
{
for (int i = 0; i < refCount; i++)
{
VariantClear(tmpVariants + s_variantSize * i);
}
Marshal.FreeCoTaskMem(tmpVariants);
}
}
}
<summary>
Clear variables of type VARIANTARG (or VARIANT) before the memory containing the VARIANTARG is freed.
</summary>
<param name="pVariant"></param>
[DllImport("oleaut32.dll")]
internal static extern void VariantClear(IntPtr pVariant);
<summary>
We have to declare 'bstrSource', 'bstrDescription' and 'bstrHelpFile' as pointers because
CLR marshalling layer would try to free those BSTRs by default and that is not correct.
Therefore, manually marshalling might be needed to extract 'bstrDescription'.
</summary>
[StructLayout(LayoutKind.Sequential)]
internal struct EXCEPINFO
{
public short wCode;
public short wReserved;
public IntPtr bstrSource;
public IntPtr bstrDescription;
public IntPtr bstrHelpFile;
public int dwHelpContext;
public IntPtr pvReserved;
public IntPtr pfnDeferredFillIn;
public int scode;
}
<summary>
VARIANT type used for passing arguments in COM interop.
</summary>
[StructLayout(LayoutKind.Explicit)]
internal struct Variant
{
// Most of the data types in the Variant are carried in _typeUnion
[FieldOffset(0)]
internal TypeUnion _typeUnion;
// Decimal is the largest data type and it needs to use the space that is normally unused in TypeUnion._wReserved1, etc.
// Hence, it is declared to completely overlap with TypeUnion. A Decimal does not use the first two bytes, and so
// TypeUnion._vt can still be used to encode the type.
[FieldOffset(0)]
internal Decimal _decimal;
[StructLayout(LayoutKind.Explicit)]
internal struct TypeUnion
{
[FieldOffset(0)]
internal ushort _vt;
[FieldOffset(2)]
internal ushort _wReserved1;
[FieldOffset(4)]
internal ushort _wReserved2;
[FieldOffset(6)]
internal ushort _wReserved3;
[FieldOffset(8)]
internal UnionTypes _unionTypes;
}
[StructLayout(LayoutKind.Sequential)]
internal struct Record
{
internal IntPtr _record;
internal IntPtr _recordInfo;
}
[StructLayout(LayoutKind.Explicit)]
internal struct UnionTypes
{
[FieldOffset(0)]
internal sbyte _i1;
[FieldOffset(0)]
internal Int16 _i2;
[FieldOffset(0)]
internal Int32 _i4;
[FieldOffset(0)]
internal Int64 _i8;
[FieldOffset(0)]
internal byte _ui1;
[FieldOffset(0)]
internal UInt16 _ui2;
[FieldOffset(0)]
internal UInt32 _ui4;
[FieldOffset(0)]
internal UInt64 _ui8;
[FieldOffset(0)]
internal Int32 _int;
[FieldOffset(0)]
internal UInt32 _uint;
[FieldOffset(0)]
internal Int16 _bool;
[FieldOffset(0)]
internal Int32 _error;
[FieldOffset(0)]
internal Single _r4;
[FieldOffset(0)]
internal double _r8;
[FieldOffset(0)]
internal Int64 _cy;
[FieldOffset(0)]
internal double _date;
[FieldOffset(0)]
internal IntPtr _bstr;
[FieldOffset(0)]
internal IntPtr _unknown;
[FieldOffset(0)]
internal IntPtr _dispatch;
[FieldOffset(0)]
internal IntPtr _pvarVal;
[FieldOffset(0)]
internal IntPtr _byref;
[FieldOffset(0)]
internal Record _record;
}
}
}
}
|
#nullable enable
using System.Collections.Generic;
namespace Microsoft.CodeAnalysis.CSharp
{
public static partial class SyntaxFacts
{
private sealed class <API key> : IEqualityComparer<SyntaxKind>
{
public bool Equals(SyntaxKind x, SyntaxKind y)
{
return x == y;
}
public int GetHashCode(SyntaxKind obj)
{
return (int)obj;
}
}
<summary>
A custom equality comparer for <see cref="SyntaxKind"/>
</summary>
<remarks>
PERF: The framework specializes EqualityComparer for enums, but only if the underlying type is System.Int32
Since SyntaxKind's underlying type is System.UInt16, <API key> will be chosen instead.
</remarks>
public static IEqualityComparer<SyntaxKind> EqualityComparer { get; } = new <API key>();
}
}
|
#!/usr/bin/env node
'use strict';
var fs = require('fs'),
path = require('path'),
exec = require('child_process').exec,
chalk = require('chalk'),
Table = require('cli-table');
var fileNames = [
'abc',
'amazon',
//'eloquentjavascript',
//'es6-draft',
'es6-table',
'google',
'html-minifier',
'msn',
'newyorktimes',
'stackoverflow',
'wikipedia',
'es6'
];
fileNames = fileNames.sort().reverse();
var table = new Table({
head: ['File', 'Before', 'After', 'Savings', 'Time'],
colWidths: [20, 25, 25, 20, 20]
});
function toKb(size) {
return (size / 1024).toFixed(2);
}
function redSize(size) {
return chalk.red.bold(size) + chalk.white(' (' + toKb(size) + ' KB)');
}
function greenSize(size) {
return chalk.green.bold(size) + chalk.white(' (' + toKb(size) + ' KB)');
}
function blueSavings(oldSize, newSize) {
var savingsPercent = (1 - newSize / oldSize) * 100;
var savings = (oldSize - newSize) / 1024;
return chalk.cyan.bold(savingsPercent.toFixed(2)) + chalk.white('% (' + savings.toFixed(2) + ' KB)');
}
function blueTime(time) {
return chalk.cyan.bold(time) + chalk.white(' ms');
}
function test(fileName, done) {
if (!fileName) {
console.log('\n' + table.toString());
return;
}
console.log('Processing...', fileName);
var filePath = path.join('benchmarks/', fileName + '.html');
var minifiedFilePath = path.join('benchmarks/generated/', fileName + '.min.html');
var gzFilePath = path.join('benchmarks/generated/', fileName + '.html.gz');
var gzMinifiedFilePath = path.join('benchmarks/generated/', fileName + '.min.html.gz');
var command = path.normalize('./cli.js') + ' ' + filePath + ' -c benchmark.conf' + ' -o ' + minifiedFilePath;
// Open and read the size of the original input
fs.stat(filePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + filePath);
}
var originalSize = stats.size;
exec('gzip --keep --force --best --stdout ' + filePath + ' > ' + gzFilePath, function () {
// Open and read the size of the gzipped original
fs.stat(gzFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + gzFilePath);
}
var gzOriginalSize = stats.size;
// Begin timing after gzipped fixtures have been created
var startTime = new Date();
exec('node ' + command, function () {
// Open and read the size of the minified output
fs.stat(minifiedFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + minifiedFilePath);
}
var minifiedSize = stats.size;
var minifiedTime = new Date() - startTime;
// Gzip the minified output
exec('gzip --keep --force --best --stdout ' + minifiedFilePath + ' > ' + gzMinifiedFilePath, function () {
// Open and read the size of the minified+gzipped output
fs.stat(gzMinifiedFilePath, function (err, stats) {
if (err) {
throw new Error('There was an error reading ' + gzMinifiedFilePath);
}
var gzMinifiedSize = stats.size;
var gzMinifiedTime = new Date() - startTime;
table.push([
[fileName, '+ gzipped'].join('\n'),
[redSize(originalSize), redSize(gzOriginalSize)].join('\n'),
[greenSize(minifiedSize), greenSize(gzMinifiedSize)].join('\n'),
[blueSavings(originalSize, minifiedSize), blueSavings(gzOriginalSize, gzMinifiedSize)].join('\n'),
[blueTime(minifiedTime), blueTime(gzMinifiedTime)].join('\n')
]);
done();
});
});
});
});
});
});
});
}
(function run() {
test(fileNames.pop(), run);
})();
|
#include "tomcrypt.h"
/**
@file hmac_memory.c
LTC_HMAC support, process a block of memory, Tom St Denis/Dobes Vandermeer
*/
#ifdef LTC_HMAC
/**
LTC_HMAC a block of memory to produce the authentication tag
@param hash The index of the hash to use
@param key The secret key
@param keylen The length of the secret key (octets)
@param in The data to LTC_HMAC
@param inlen The length of the data to LTC_HMAC (octets)
@param out [out] Destination of the authentication tag
@param outlen [in/out] Max size and resulting size of authentication tag
@return CRYPT_OK if successful
*/
int hmac_memory(int hash,
const unsigned char *key, unsigned long keylen,
const unsigned char *in, unsigned long inlen,
unsigned char *out, unsigned long *outlen)
{
hmac_state *hmac;
int err;
LTC_ARGCHK(key != NULL);
LTC_ARGCHK(in != NULL);
LTC_ARGCHK(out != NULL);
LTC_ARGCHK(outlen != NULL);
/* make sure hash descriptor is valid */
if ((err = hash_is_valid(hash)) != CRYPT_OK) {
return err;
}
/* is there a descriptor? */
if (hash_descriptor[hash].hmac_block != NULL) {
return hash_descriptor[hash].hmac_block(key, keylen, in, inlen, out, outlen);
}
/* nope, so call the hmac functions */
/* allocate ram for hmac state */
hmac = XMALLOC(sizeof(hmac_state));
if (hmac == NULL) {
return CRYPT_MEM;
}
if ((err = hmac_init(hmac, hash, key, keylen)) != CRYPT_OK) {
goto LBL_ERR;
}
if ((err = hmac_process(hmac, in, inlen)) != CRYPT_OK) {
goto LBL_ERR;
}
if ((err = hmac_done(hmac, out, outlen)) != CRYPT_OK) {
goto LBL_ERR;
}
err = CRYPT_OK;
LBL_ERR:
#ifdef LTC_CLEAN_STACK
zeromem(hmac, sizeof(hmac_state));
#endif
XFREE(hmac);
return err;
}
#endif
/* $Source: /cvs/libtom/libtomcrypt/src/mac/hmac/hmac_memory.c,v $ */
/* $Revision: 1.8 $ */
|
steal('can/util', 'can/observe', function(can) {
// ** - 'this' will be the deepest item changed
// * - 'this' will be any changes within *, but * will be the
// this returned
// tells if the parts part of a delegate matches the broken up props of the event
// gives the prop to use as 'this'
// - parts - the attribute name of the delegate split in parts ['foo','*']
// - props - the split props of the event that happened ['foo','bar','0']
// - returns - the attribute to delegate too ('foo.bar'), or null if not a match
var matches = function(parts, props){
//check props parts are the same or
var len = parts.length,
i =0,
// keeps the matched props we will use
matchedProps = [],
prop;
// if the event matches
for(i; i< len; i++){
prop = props[i]
// if no more props (but we should be matching them)
// return null
if( typeof prop !== 'string' ) {
return null;
} else
// if we have a "**", match everything
if( parts[i] == "**" ) {
return props.join(".");
} else
// a match, but we want to delegate to "*"
if (parts[i] == "*"){
// only do this if there is nothing after ...
matchedProps.push(prop);
}
else if( prop === parts[i] ) {
matchedProps.push(prop);
} else {
return null;
}
}
return matchedProps.join(".");
},
// gets a change event and tries to figure out which
// delegates to call
delegate = function(event, prop, how, newVal, oldVal){
// pre-split properties to save some regexp time
var props = prop.split("."),
delegates = (this._observe_delegates || []).slice(0),
delegate,
attr,
matchedAttr,
hasMatch,
valuesEqual;
event.attr = prop;
event.lastAttr = props[props.length -1 ];
// for each delegate
for(var i =0; delegate = delegates[i++];){
// if there is a batchNum, this means that this
// event is part of a series of events caused by a single
// attrs call. We don't want to issue the same event
// multiple times
// setting the batchNum happens later
if((event.batchNum && delegate.batchNum === event.batchNum) || delegate.undelegated ){
continue;
}
// reset match and values tests
hasMatch = undefined;
valuesEqual = true;
// for each attr in a delegate
for(var a =0 ; a < delegate.attrs.length; a++){
attr = delegate.attrs[a];
// check if it is a match
if(matchedAttr = matches(attr.parts, props)){
hasMatch = matchedAttr;
}
// if it has a value, make sure it's the right value
// if it's set, we should probably check that it has a
// value no matter what
if(attr.value && valuesEqual /* || delegate.hasValues */){
valuesEqual = attr.value === ""+this.attr(attr.attr)
} else if (valuesEqual && delegate.attrs.length > 1){
// if there are multiple attributes, each has to at
// least have some value
valuesEqual = this.attr(attr.attr) !== undefined
}
}
// if there is a match and valuesEqual ... call back
if(hasMatch && valuesEqual) {
// how to get to the changed property from the delegate
var from = prop.replace(hasMatch+".","");
// if this event is part of a batch, set it on the delegate
// to only send one event
if(event.batchNum ){
delegate.batchNum = event.batchNum
}
// if we listen to change, fire those with the same attrs
// TODO: the attrs should probably be using from
if( delegate.event === 'change' ){
arguments[1] = from;
event.curAttr = hasMatch;
delegate.callback.apply(this.attr(hasMatch), can.makeArray( arguments));
} else if(delegate.event === how ){
// if it's a match, callback with the location of the match
delegate.callback.apply(this.attr(hasMatch), [event,newVal, oldVal, from]);
} else if(delegate.event === 'set' &&
how == 'add' ) {
// if we are listening to set, we should also listen to add
delegate.callback.apply(this.attr(hasMatch), [event,newVal, oldVal, from]);
}
}
}
};
can.extend(can.Observe.prototype,{
/**
* @function can.Observe.prototype.delegate
* @parent can.Observe.delegate
* @plugin can/observe/delegate
*
* `delegate( selector, event, handler(ev,newVal,oldVal,from) )` listen for changes
* in a child attribute from the parent. The child attribute
* does not have to exist.
*
*
* // create an observable
* var observe = can.Observe({
* foo : {
* bar : "Hello World"
* }
* })
*
* //listen to changes on a property
* observe.delegate("foo.bar","change", function(ev, prop, how, newVal, oldVal){
* // foo.bar has been added, set, or removed
* this //->
* });
*
* // change the property
* observe.attr('foo.bar',"Goodbye Cruel World")
*
* ## Types of events
*
* Delegate lets you listen to add, set, remove, and change events on property.
*
* __add__
*
* An add event is fired when a new property has been added.
*
* var o = new can.Control({});
* o.delegate("name","add", function(ev, value){
* // called once
* can.$('#name').show()
* })
* o.attr('name',"Justin")
* o.attr('name',"Brian");
*
* Listening to add events is useful for 'setup' functionality (in this case
* showing the <code>#name</code> element.
*
* __set__
*
* Set events are fired when a property takes on a new value. set events are
* always fired after an add.
*
* o.delegate("name","set", function(ev, value){
* // called twice
* can.$('#name').text(value)
* })
* o.attr('name',"Justin")
* o.attr('name',"Brian");
*
* __remove__
*
* Remove events are fired after a property is removed.
*
* o.delegate("name","remove", function(ev){
* // called once
* $('#name').text(value)
* })
* o.attr('name',"Justin");
* o.removeAttr('name');
*
* ## Wildcards - matching multiple properties
*
* Sometimes, you want to know when any property within some part
* of an observe has changed. Delegate lets you use wildcards to
* match any property name. The following listens for any change
* on an attribute of the params attribute:
*
* var o = can.Control({
* options : {
* limit : 100,
* offset: 0,
* params : {
* parentId: 5
* }
* }
* })
* o.delegate('options.*','change', function(){
* alert('1');
* })
* o.delegate('options.**','change', function(){
* alert('2');
* })
*
* // alerts 1
* // alerts 2
* o.attr('options.offset',100)
*
* // alerts 2
* o.attr('options.params.parentId',6);
*
* Using a single wildcard (<code>*</code>) matches single level
* properties. Using a double wildcard (<code>**</code>) matches
* any deep property.
*
* ## Listening on multiple properties and values
*
* Delegate lets you listen on multiple values at once. The following listens
* for first and last name changes:
*
* var o = new can.Observe({
* name : {first: "Justin", last: "Meyer"}
* })
*
* o.bind("name.first,name.last",
* "set",
* function(ev,newVal,oldVal,from){
*
* })
*
* ## Listening when properties are a particular value
*
* Delegate lets you listen when a property is __set__ to a specific value:
*
* var o = new can.Observe({
* name : "Justin"
* })
*
* o.bind("name=Brian",
* "set",
* function(ev,newVal,oldVal,from){
*
* })
*
* @param {String} selector The attributes you want to listen for changes in.
*
* Selector should be the property or
* property names of the element you are searching. Examples:
*
* "name" - listens to the "name" property changing
* "name, address" - listens to "name" or "address" changing
* "name address" - listens to "name" or "address" changing
* "address.*" - listens to property directly in address
* "address.**" - listens to any property change in address
* "foo=bar" - listens when foo is "bar"
*
* @param {String} event The event name. One of ("set","add","remove","change")
* @param {Function} handler(ev,newVal,oldVal,prop) The callback handler
* called with:
*
* - newVal - the new value set on the observe
* - oldVal - the old value set on the observe
* - prop - the prop name that was changed
*
* @return {jQuery.Delegate} the delegate for chaining
*/
delegate : function(selector, event, handler){
selector = can.trim(selector);
var delegates = this._observe_delegates || (this._observe_delegates = []),
attrs = [];
// split selector by spaces
selector.replace(/([^\s=]+)=?([^\s]+)?/g, function(whole, attr, value){
attrs.push({
// the attribute name
attr: attr,
// the attribute's pre-split names (for speed)
parts: attr.split('.'),
// the value associated with this prop
value: value
})
});
// delegates has pre-processed info about the event
delegates.push({
// the attrs name for unbinding
selector : selector,
// an object of attribute names and values {type: 'recipe',id: undefined}
// undefined means a value was not defined
attrs : attrs,
callback : handler,
event: event
});
if(delegates.length === 1){
this.bind("change",delegate)
}
return this;
},
/**
* @function can.Observe.prototype.undelegate
* @parent can.Observe.delegate
*
* `undelegate( selector, event, handler )` removes a delegated event handler from an observe.
*
* observe.undelegate("name","set", handler )
*
* @param {String} selector the attribute name of the object you want to undelegate from.
* @param {String} event the event name
* @param {Function} handler the callback handler
* @return {jQuery.Delegate} the delegate for chaining
*/
undelegate : function(selector, event, handler){
selector = can.trim(selector);
var i =0,
delegates = this._observe_delegates || [],
delegateOb;
if(selector){
while(i < delegates.length){
delegateOb = delegates[i];
if( delegateOb.callback === handler ||
(!handler && delegateOb.selector === selector) ){
delegateOb.undelegated = true;
delegates.splice(i,1)
} else {
i++;
}
}
} else {
// remove all delegates
delegates = [];
}
if(!delegates.length){
//can.removeData(this, "_observe_delegates");
this.unbind("change",delegate)
}
return this;
}
});
// add helpers for testing ..
can.Observe.prototype.delegate.matches = matches;
return can.Observe;
})
|
namespace Microsoft.PowerShell.Commands
{
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Management.Automation;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
using Microsoft.PowerShell.Commands.<API key>;
<summary>
Show-Command displays a GUI for a cmdlet, or for all cmdlets if no specific cmdlet is specified.
</summary>
[Cmdlet(VerbsCommon.Show, "Command", HelpUri = "https://go.microsoft.com/fwlink/?LinkID=217448")]
public class ShowCommandCommand : PSCmdlet, IDisposable
{
#region Private Fields
<summary>
Set to true when ProcessRecord is reached, since it will always open a window
</summary>
private bool _hasOpenedWindow;
<summary>
Determines if the command should be sent to the pipeline as a string instead of run.
</summary>
private bool _passThrough;
<summary>
Uses ShowCommandProxy to invoke WPF GUI object.
</summary>
private ShowCommandProxy _showCommandProxy;
<summary>
Data container for all cmdlets. This is populated when show-command is called with no command name.
</summary>
private List<<API key>> _commands;
<summary>
List of modules that have been loaded indexed by module name
</summary>
private Dictionary<string, <API key>> _importedModules;
<summary>
Record the EndProcessing error.
</summary>
private PSDataCollection<ErrorRecord> _errors = new PSDataCollection<ErrorRecord>();
<summary>
Field used for the NoCommonParameter parameter.
</summary>
private SwitchParameter _noCommonParameter;
<summary>
Object used for ShowCommand with a command name that holds the view model created for the command
</summary>
private object <API key>;
#endregion
<summary>
Finalizes an instance of the ShowCommandCommand class
</summary>
~ShowCommandCommand()
{
this.Dispose(false);
}
#region Input Cmdlet Parameter
<summary>
Gets or sets the command name.
</summary>
[Parameter(Position = 0)]
[Alias("CommandName")]
public string Name { get; set; }
<summary>
Gets or sets the Width.
</summary>
[Parameter]
[ValidateRange(300, Int32.MaxValue)]
public double Height { get; set; }
<summary>
Gets or sets the Width.
</summary>
[Parameter]
[ValidateRange(300, Int32.MaxValue)]
public double Width { get; set; }
<summary>
Gets or sets a value indicating Common Parameters should not be displayed
</summary>
[Parameter]
public SwitchParameter NoCommonParameter
{
get { return _noCommonParameter; }
set { _noCommonParameter = value; }
}
<summary>
Gets or sets a value indicating errors should not cause a message window to be displayed
</summary>
[Parameter]
public SwitchParameter ErrorPopup { get; set; }
<summary>
Gets or sets a value indicating the command should be sent to the pipeline as a string instead of run
</summary>
[Parameter]
public SwitchParameter PassThru
{
get { return _passThrough; }
set { _passThrough = value; }
} // PassThru
#endregion
#region Public and Protected Methods
<summary>
Executes a PowerShell script, writing the output objects to the pipeline.
</summary>
<param name="script">Script to execute</param>
public void RunScript(string script)
{
if (_showCommandProxy == null || string.IsNullOrEmpty(script))
{
return;
}
if (_passThrough)
{
this.WriteObject(script);
return;
}
if (ErrorPopup)
{
this.<API key>(script);
return;
}
if (_showCommandProxy.HasHostWindow)
{
if (!_showCommandProxy.<API key>(script))
{
this.<API key>(script);
}
return;
}
if (!<API key>.<API key>(script, true))
{
this.WriteDebug(<API key>.<API key>);
this.<API key>(script);
}
}
<summary>
Dispose method in IDisposable
</summary>
public void Dispose()
{
this.Dispose(true);
GC.SuppressFinalize(this);
}
<summary>
Initialize a proxy instance for show-command.
</summary>
protected override void BeginProcessing()
{
_showCommandProxy = new ShowCommandProxy(this);
if (_showCommandProxy.ScreenHeight < this.Height)
{
ErrorRecord error = new ErrorRecord(
new <API key>(String.Format(CultureInfo.CurrentUICulture, <API key>.PropertyValidate, "Height", _showCommandProxy.ScreenHeight)),
"<API key>",
ErrorCategory.InvalidData,
null);
this.<API key>(error);
}
if (_showCommandProxy.ScreenWidth < this.Width)
{
ErrorRecord error = new ErrorRecord(
new <API key>(String.Format(CultureInfo.CurrentUICulture, <API key>.PropertyValidate, "Width", _showCommandProxy.ScreenWidth)),
"<API key>",
ErrorCategory.InvalidData,
null);
this.<API key>(error);
}
}
<summary>
ProcessRecord with or without CommandName.
</summary>
protected override void ProcessRecord()
{
if (Name == null)
{
_hasOpenedWindow = this.<API key>();
}
else
{
_hasOpenedWindow = this.<API key>();
}
}
<summary>
Optionally displays errors in a message
</summary>
protected override void EndProcessing()
{
if (!_hasOpenedWindow)
{
return;
}
// We wait untill the window is loaded and then activate it
// to work arround the console window gaining activation somewhere
// in the end of ProcessRecord, which causes the keyboard focus
// (and use oif tab key to focus controls) to go away from the window
_showCommandProxy.WindowLoaded.WaitOne();
_showCommandProxy.ActivateWindow();
this.<API key>();
this.RunScript(_showCommandProxy.GetScript());
if (_errors.Count == 0 || !ErrorPopup)
{
return;
}
StringBuilder errorString = new StringBuilder();
for (int i = 0; i < _errors.Count; i++)
{
if (i != 0)
{
errorString.AppendLine();
}
ErrorRecord error = _errors[i];
errorString.Append(error.Exception.Message);
}
_showCommandProxy.ShowErrorString(errorString.ToString());
}
<summary>
StopProcessing is called close the window when user press Ctrl+C in the command prompt.
</summary>
protected override void StopProcessing()
{
_showCommandProxy.CloseWindow();
}
#endregion
#region Private Methods
<summary>
Runs the script in a new PowerShell instance and hooks up error stream to potentially display error popup.
This method has the inconvenience of not showing to the console user the script being executed.
</summary>
<param name="script">script to be run</param>
private void <API key>(string script)
{
// errors are not created here, because there is a field for it used in the final pop up
PSDataCollection<object> output = new PSDataCollection<object>();
output.DataAdded += new EventHandler<DataAddedEventArgs>(this.Output_DataAdded);
_errors.DataAdded += new EventHandler<DataAddedEventArgs>(this.Error_DataAdded);
PowerShell ps = PowerShell.Create(RunspaceMode.CurrentRunspace);
ps.Streams.Error = _errors;
ps.Commands.AddScript(script);
ps.Invoke(null, output, null);
}
<summary>
Issues an error when this.commandName was not found
</summary>
private void <API key>()
{
<API key> errorException = new <API key>(
String.Format(
CultureInfo.CurrentUICulture,
<API key>.CommandNotFound,
Name));
this.<API key>(new ErrorRecord(errorException, "NoCommand", ErrorCategory.InvalidOperation, Name));
}
<summary>
Issues an error when there is more than one command matching this.commandName
</summary>
private void <API key>()
{
<API key> errorException = new <API key>(
String.Format(
CultureInfo.CurrentUICulture,
<API key>.MoreThanOneCommand,
Name,
"Show-Command"));
this.<API key>(new ErrorRecord(errorException, "MoreThanOneCommand", ErrorCategory.InvalidOperation, Name));
}
<summary>
Called from <API key> to run the command that will get the CommandInfo and list of modules
</summary>
<param name="command">command to be retrieved</param>
<param name="modules">list of loaded modules</param>
private void <API key>(out CommandInfo command, out Dictionary<string, <API key>> modules)
{
command = null;
modules = null;
string commandText = _showCommandProxy.<API key>(Name, true);
Collection<PSObject> commandResults = this.InvokeCommand.InvokeScript(commandText);
object[] commandObjects = (object[])commandResults[0].BaseObject;
object[] moduleObjects = (object[])commandResults[1].BaseObject;
if (commandResults == null || moduleObjects == null || commandObjects.Length == 0)
{
this.<API key>();
return;
}
if (commandObjects.Length > 1)
{
this.<API key>();
}
command = ((PSObject)commandObjects[0]).BaseObject as CommandInfo;
if (command == null)
{
this.<API key>();
return;
}
if (command.CommandType == CommandTypes.Alias)
{
commandText = _showCommandProxy.<API key>(command.Definition, false);
commandResults = this.InvokeCommand.InvokeScript(commandText);
if (commandResults == null || commandResults.Count != 1)
{
this.<API key>();
return;
}
command = (CommandInfo)commandResults[0].BaseObject;
}
modules = _showCommandProxy.<API key>(moduleObjects);
}
<summary>
ProcessRecord when a command name is specified.
</summary>
<returns>true if there was no exception processing this record</returns>
private bool <API key>()
{
CommandInfo commandInfo;
this.<API key>(out commandInfo, out _importedModules);
Diagnostics.Assert(commandInfo != null, "<API key> would throw a terminating error/exception");
try
{
<API key> = _showCommandProxy.GetCommandViewModel(new <API key>(commandInfo), _noCommonParameter.ToBool(), _importedModules, this.Name.IndexOf('\\') != -1);
_showCommandProxy.ShowCommandWindow(<API key>, _passThrough);
}
catch (<API key> ti)
{
this.WriteError(new ErrorRecord(ti.InnerException, "<API key>", ErrorCategory.InvalidOperation, Name));
return false;
}
return true;
}
<summary>
ProcessRecord when a command name is not specified.
</summary>
<returns>true if there was no exception processing this record</returns>
private bool <API key>()
{
Collection<PSObject> rawCommands = this.InvokeCommand.InvokeScript(_showCommandProxy.<API key>());
_commands = _showCommandProxy.GetCommandList((object[])rawCommands[0].BaseObject);
_importedModules = _showCommandProxy.<API key>((object[])rawCommands[1].BaseObject);
try
{
_showCommandProxy.<API key>(_importedModules, _commands, _noCommonParameter.ToBool(), _passThrough);
}
catch (<API key> ti)
{
this.WriteError(new ErrorRecord(ti.InnerException, "<API key>", ErrorCategory.InvalidOperation, Name));
return false;
}
return true;
}
<summary>
Waits untill the window has been closed answering HelpNeeded events
</summary>
private void <API key>()
{
do
{
int which = WaitHandle.WaitAny(new WaitHandle[] { _showCommandProxy.WindowClosed, _showCommandProxy.HelpNeeded, _showCommandProxy.ImportModuleNeeded });
if (which == 0)
{
break;
}
if (which == 1)
{
Collection<PSObject> helpResults = this.InvokeCommand.InvokeScript(_showCommandProxy.GetHelpCommand(_showCommandProxy.CommandNeedingHelp));
_showCommandProxy.DisplayHelp(helpResults);
continue;
}
Diagnostics.Assert(which == 2, "which is 0,1 or 2 and 0 and 1 have been eliminated in the ifs above");
string commandToRun = _showCommandProxy.<API key>(_showCommandProxy.<API key>);
Collection<PSObject> rawCommands;
try
{
rawCommands = this.InvokeCommand.InvokeScript(commandToRun);
}
catch (RuntimeException e)
{
_showCommandProxy.ImportModuleFailed(e);
continue;
}
_commands = _showCommandProxy.GetCommandList((object[])rawCommands[0].BaseObject);
_importedModules = _showCommandProxy.<API key>((object[])rawCommands[1].BaseObject);
_showCommandProxy.ImportModuleDone(_importedModules, _commands);
continue;
}
while (true);
}
<summary>
Writes the output of a script being run into the pipeline
</summary>
<param name="sender">output collection</param>
<param name="e">output event</param>
private void Output_DataAdded(object sender, DataAddedEventArgs e)
{
this.WriteObject(((PSDataCollection<object>)sender)[e.Index]);
}
<summary>
Writes the errors of a script being run into the pipeline
</summary>
<param name="sender">error collection</param>
<param name="e">error event</param>
private void Error_DataAdded(object sender, DataAddedEventArgs e)
{
this.WriteError(((PSDataCollection<ErrorRecord>)sender)[e.Index]);
}
<summary>
Implements IDisposable logic
</summary>
<param name="isDisposing">true if being called from Dispose</param>
private void Dispose(bool isDisposing)
{
if (isDisposing)
{
if (_errors != null)
{
_errors.Dispose();
_errors = null;
}
}
}
#endregion
<summary>
Wraps interop code for console input buffer
</summary>
internal static class <API key>
{
<summary>
Constant used in calls to GetStdHandle
</summary>
internal const int STD_INPUT_HANDLE = -10;
<summary>
Adds a string to the console input buffer
</summary>
<param name="str">string to add to console input buffer</param>
<param name="newLine">true to add Enter after the string</param>
<returns>true if it was successful in adding all characters to console input buffer</returns>
internal static bool <API key>(string str, bool newLine)
{
IntPtr handle = <API key>.GetStdHandle(<API key>.STD_INPUT_HANDLE);
if (handle == IntPtr.Zero)
{
return false;
}
uint strLen = (uint)str.Length;
<API key>.INPUT_RECORD[] records = new <API key>.INPUT_RECORD[strLen + (newLine ? 1 : 0)];
for (int i = 0; i < strLen; i++)
{
<API key>.INPUT_RECORD.SetInputRecord(ref records[i], str[i]);
}
uint written;
if (!<API key>.WriteConsoleInput(handle, records, strLen, out written) || written != strLen)
{
// I do not know of a case where written is not going to be strlen. Maybe for some character that
// is not supported in the console. The API suggests this can happen,
// so we handle it by returning false
return false;
}
// Enter is written separately, because if this is a command, and one of the characters in the command was not written
// (written != strLen) it is desireable to fail (return false) before typing enter and running the command
if (newLine)
{
<API key>.INPUT_RECORD[] enterArray = new <API key>.INPUT_RECORD[1];
<API key>.INPUT_RECORD.SetInputRecord(ref enterArray[0], (char)13);
written = 0;
if (!<API key>.WriteConsoleInput(handle, enterArray, 1, out written))
{
// I don't think this will happen
return false;
}
Diagnostics.Assert(written == 1, "only Enter is being added and it is a supported character");
}
return true;
}
<summary>
Gets the console handle
</summary>
<param name="nStdHandle">which console handle to get</param>
<returns>the console handle</returns>
[DllImport("kernel32.dll", SetLastError = true)]
internal static extern IntPtr GetStdHandle(int nStdHandle);
<summary>
Writes to the console input buffer
</summary>
<param name="hConsoleInput">console handle</param>
<param name="lpBuffer">inputs to be written</param>
<param name="nLength">number of inputs to be written</param>
<param name="<API key>">returned number of inputs actually written</param>
<returns>0 if the function fails</returns>
[DllImport("kernel32.dll", SetLastError = true)]
[return: MarshalAs(UnmanagedType.Bool)]
internal static extern bool WriteConsoleInput(
IntPtr hConsoleInput,
INPUT_RECORD[] lpBuffer,
uint nLength,
out uint <API key>);
<summary>
A record to be added to the console buffer
</summary>
internal struct INPUT_RECORD
{
<summary>
The proper event type for a KeyEvent KEY_EVENT_RECORD
</summary>
internal const int KEY_EVENT = 0x0001;
<summary>
input buffer event type
</summary>
internal ushort EventType;
<summary>
The actual event. The original structure is a union of many others, but this is the largest of them
And we don't need other kinds of events
</summary>
internal KEY_EVENT_RECORD KeyEvent;
<summary>
Sets the necessary fields of <paramref name="inputRecord"/> for a KeyDown event for the <paramref name="character"/>
</summary>
<param name="inputRecord">input record to be set</param>
<param name="character">character to set the record with</param>
internal static void SetInputRecord(ref INPUT_RECORD inputRecord, char character)
{
inputRecord.EventType = INPUT_RECORD.KEY_EVENT;
inputRecord.KeyEvent.bKeyDown = true;
inputRecord.KeyEvent.UnicodeChar = character;
}
}
<summary>
Type of INPUT_RECORD which is a key
</summary>
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal struct KEY_EVENT_RECORD
{
<summary>
true for key down and false for key up, but only needed if wVirtualKeyCode is used
</summary>
internal bool bKeyDown;
<summary>
repeat count
</summary>
internal ushort wRepeatCount;
<summary>
virtual key code
</summary>
internal ushort wVirtualKeyCode;
<summary>
virtual key scan code
</summary>
internal ushort wVirtualScanCode;
<summary>
character in input. If this is specified, wVirtualKeyCode, and others don't need to be
</summary>
internal char UnicodeChar;
<summary>
State of keys like Shift and control
</summary>
internal uint dwControlKeyState;
}
}
}
}
|
var name = "Wanderer";
var collection_type = 0;
var is_secret = 0;
var desc = "Visited 503 new locations.";
var status_text = "Gosh, where HAVEN'T you traveled? Your peregrinations have earned you this <API key> Wanderer badge.";
var last_published = 1348803094;
var is_shareworthy = 1;
var url = "wanderer";
var category = "exploring";
var url_swf = "\/c2.glitch.bz\/achievements\/2011-09-18\/wanderer_1316414516.swf";
var url_img_180 = "\/c2.glitch.bz\/achievements\/2011-09-18\/<API key>.png";
var url_img_60 = "\/c2.glitch.bz\/achievements\/2011-09-18\/<API key>.png";
var url_img_40 = "\/c2.glitch.bz\/achievements\/2011-09-18\/<API key>.png";
function on_apply(pc){
}
var conditions = {
8 : {
type : "group_count",
group : "locations_visited",
value : "503"
},
};
function onComplete(pc){ // generated from rewards
var multiplier = pc.buffs_has('gift_of_gab') ? 1.2 : pc.buffs_has('silvertongue') ? 1.05 : 1.0;
multiplier += pc.<API key>();
if (/completist/i.exec(this.name)) {
var level = pc.stats_get_level();
if (level > 4) {
multiplier *= (pc.stats_get_level()/4);
}
}
pc.stats_add_xp(round_to_5(1000 * multiplier), true);
pc.<API key>("lem", round_to_5(200 * multiplier));
if(pc.buffs_has('gift_of_gab')) {
pc.buffs_remove('gift_of_gab');
}
else if(pc.buffs_has('silvertongue')) {
pc.buffs_remove('silvertongue');
}
}
var rewards = {
"xp" : 1000,
"favor" : {
"giant" : "lem",
"points" : 200
}
};
// generated ok (NO DATE)
|
@charset "utf-8";
/* CSS Document */
#board_container{padding:20px 20px 0 ;
margin: 0 1%;
position:relative; background:#FFF;
box-shadow: 0 1px 0 rgba(255, 255, 255, 0.15) inset, 0 1px 5px rgba(0, 0, 0, 0.075);
}
.board h2{color:#333; font-size:30px; padding:5px 0; margin:0; font-family:'Varela Round', sans-serif;}
.board h3{color:#666; font-size:24px; padding:0;}
.board p{color:#444; font-size:14px; line-height:24px;}
.board_right h2{color:#333; font-size:24px; margin:10px 0; padding:0;}
.board_right .follower{clear:both; font-size:14px; color:#333; padding:5px 0; margin:5px; font-weight:bold;}
.icon_board{margin:0 8px; font-size:13px !important;color:#FC6;}
.icon_board_text{font-size:13px; color:#333 !important;}
.board_small{font-size:20px; color:#333;}
.board_small_m{margin:0 10px;}
.row_min{position:fixed; z-index:100 !important; top:50px; left:0; background:linear-gradient(to bottom, rgb(255, 255, 255) 0%, rgb(248, 248, 248) 100%); width:120%; padding:10px 0;}
.row_lrg{z-index:1001;}
@media only screen and (min-width : 1200px) {
.board img{ height:auto; width:90%; margin:auto; }
.board_right img{width:72%; height:auto;}
.board_right h2{color:#333; font-size:24px; margin:10px 0; padding:0;}
}
@media only screen and (min-width : 768px) and (max-width : 1199px) {
.board img{ height:auto; width:100%;}
.board_right{width:25%; margin:0 auto; float:right;}
.board_right img{width:45%; height:auto; border:5px solid #333;}
}
@media only screen and (min-width : 481px) and (max-width : 767px) {
.board {width:80%; margin:50px auto;}
.board img{ height:auto; width:100%;}
.board_right{width:80%; margin:20px auto;}
.board_right img{width:30%; height:auto; border:5px solid #333;}
.row_min{top:100px !important;}
}
@media only screen and (max-width : 480px) {
.board img{ height:auto; width:100%;}
.board_right img{width:45%; height:auto; border:5px solid #333;}
.row_min{top:150px !important;}
#board_container{margin-top: 60px !important;}
}
.user_dis{display:none;}
.user_bg{background:#FFF; padding:0; box-shadow:0 1px 0 rgba(255, 255, 255, 0.15) inset, 0 1px 5px rgba(0, 0, 0, 0.075);}
.user_margin{margin-top:15px;}
.user_bg .img{float:left; height:140px; height:140px;}
.user_bg h2{float:left; width:60%; color:#666; font-size:28px;}
.user_pro_bg{background:#FFF; margin:0 20px;box-shadow:0 1px 0 rgba(255, 255, 255, 0.15) inset, 0 1px 5px rgba(0, 0, 0, 0.075);}
.user_pro_bg h2{color:#666; font-size:14px; font-weight:600; padding:0;}
.user_pro{list-style:none; margin:0; padding:0 0 10px; }
.user_pro li{padding-bottom:12px; color:#888; font-weight:600}
.user_pro li img{padding-right:5px; }
|
using System;
using Avalonia.Controls.Primitives;
using Avalonia.Input;
using Avalonia.Markup.Xaml;
using Avalonia.Platform;
using Avalonia.Rendering;
using Avalonia.UnitTests;
using Moq;
using Xunit;
namespace Avalonia.Controls.UnitTests
{
public class ContextMenuTests
{
private Mock<IPopupImpl> popupImpl;
private MouseTestHelper _mouse = new MouseTestHelper();
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
int openedCount = 0;
sut.MenuOpened += (sender, args) =>
{
openedCount++;
};
target.RaiseEvent(new <API key>());
Assert.True(sut.IsOpen);
Assert.Equal(1, openedCount);
}
}
[Fact]
public void <API key>()
{
// We have this test for backwards compatability with the code that already sets custom ContextMenu.
using (Application())
{
var sut = new ContextMenu();
var flyout = new Flyout();
var target = new Panel
{
ContextMenu = sut,
ContextFlyout = flyout
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
target.RaiseEvent(new <API key>());
Assert.True(sut.IsOpen);
Assert.False(flyout.IsOpen);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var <API key> = 0;
target.AddHandler(Control.<API key>, (s, a) => <API key>++, Interactivity.RoutingStrategies.Tunnel);
var window = PreparedWindow(target);
window.Show();
target.RaiseEvent(new KeyEventArgs { RoutedEvent = InputElement.KeyUpEvent, Key = Key.Apps, Source = window });
Assert.True(sut.IsOpen);
Assert.Equal(1, <API key>);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = PreparedWindow(target);
window.Show();
target.RaiseEvent(new <API key>());
Assert.True(sut.IsOpen);
sut.RaiseEvent(new KeyEventArgs { RoutedEvent = InputElement.KeyUpEvent, Key = Key.Apps, Source = window });
Assert.False(sut.IsOpen);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
int openedCount = 0;
sut.MenuOpened += (sender, args) =>
{
openedCount++;
};
sut.Open(target);
Assert.Equal(1, openedCount);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
bool opened = false;
sut.MenuOpened += (sender, args) =>
{
opened = true;
};
sut.Open();
Assert.True(opened);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
target.ContextMenu = null;
Assert.ThrowsAny<Exception>(()=> sut.Open());
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = new Window { Content = target };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
sut.Open(target);
int closedCount = 0;
sut.MenuClosed += (sender, args) =>
{
closedCount++;
};
sut.Close();
Assert.Equal(1, closedCount);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
popupImpl.Setup(x => x.Hide()).Verifiable();
var window = PreparedWindow();
window.Width = 100;
window.Height = 100;
var button = new Button
{
Height = 10,
Width = 10,
HorizontalAlignment = Layout.HorizontalAlignment.Left,
VerticalAlignment = Layout.VerticalAlignment.Top
};
window.Content = button;
window.ApplyTemplate();
window.Show();
var tracker = 0;
var c = new ContextMenu();
c.ContextMenuClosing += (s, e) =>
{
tracker++;
e.Cancel = true;
};
button.ContextMenu = c;
c.Open(button);
var overlay = <API key>.<API key>(window);
_mouse.Down(overlay, MouseButton.Left, new Point(90, 90));
_mouse.Up(button, MouseButton.Left, new Point(90, 90));
Assert.Equal(1, tracker);
Assert.True(c.IsOpen);
popupImpl.Verify(x => x.Hide(), Times.Never);
popupImpl.Verify(x => x.Show(true, false), Times.Exactly(1));
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
popupImpl.Setup(x => x.Hide()).Verifiable();
var window = PreparedWindow();
window.Width = 100;
window.Height = 100;
var button = new Button
{
Height = 10,
Width = 10,
HorizontalAlignment = Layout.HorizontalAlignment.Left,
VerticalAlignment = Layout.VerticalAlignment.Top
};
window.Content = button;
window.ApplyTemplate();
window.Show();
var c = new ContextMenu();
c.PlacementMode = PlacementMode.Bottom;
c.Open(button);
var overlay = <API key>.<API key>(window);
_mouse.Down(overlay, MouseButton.Left, new Point(90, 90));
_mouse.Up(button, MouseButton.Left, new Point(90, 90));
Assert.False(c.IsOpen);
popupImpl.Verify(x => x.Hide(), Times.Exactly(1));
popupImpl.Verify(x => x.Show(true, false), Times.Exactly(1));
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
popupImpl.Setup(x => x.Hide()).Verifiable();
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = PreparedWindow(target);
window.Show();
var overlay = <API key>.<API key>(window);
_mouse.Click(target, MouseButton.Right);
Assert.True(sut.IsOpen);
_mouse.Down(overlay);
_mouse.Up(target);
Assert.False(sut.IsOpen);
popupImpl.Verify(x => x.Show(true, false), Times.Once);
popupImpl.Verify(x => x.Hide(), Times.Once);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
popupImpl.Setup(x => x.Hide()).Verifiable();
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = PreparedWindow(target);
window.Show();
var overlay = <API key>.<API key>(window);
_mouse.Click(target, MouseButton.Right);
Assert.True(sut.IsOpen);
_mouse.Down(overlay, MouseButton.Right);
_mouse.Up(target, MouseButton.Right);
Assert.True(sut.IsOpen);
popupImpl.Verify(x => x.Hide(), Times.Once);
popupImpl.Verify(x => x.Show(true, false), Times.Exactly(2));
}
}
[Fact]
public void Context_Menu_Can_Be_Shared_Between_Controls_Even_After_A_Control_Is_Removed_From_Visual_Tree()
{
using (Application())
{
var sut = new ContextMenu();
var target1 = new Panel
{
ContextMenu = sut
};
var target2 = new Panel
{
ContextMenu = sut
};
var sp = new StackPanel { Children = { target1, target2 } };
var window = new Window { Content = sp };
window.ApplyTemplate();
window.Presenter.ApplyTemplate();
_mouse.Click(target1, MouseButton.Right);
Assert.True(sut.IsOpen);
sp.Children.Remove(target1);
Assert.False(sut.IsOpen);
_mouse.Click(target2, MouseButton.Right);
Assert.True(sut.IsOpen);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
bool eventCalled = false;
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
new Window { Content = target };
sut.ContextMenuOpening += (c, e) => { eventCalled = true; e.Cancel = true; };
_mouse.Click(target, MouseButton.Right);
Assert.True(eventCalled);
Assert.False(sut.IsOpen);
popupImpl.Verify(x => x.Show(true, false), Times.Never);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var target = new ContextMenu();
var control = new Panel();
control.ContextMenu = target;
control.ContextMenu = null;
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Window.Resources>
<ContextMenu x:Key='contextMenu'>
<MenuItem>Foo</MenuItem>
</ContextMenu>
</Window.Resources>
<StackPanel>
<TextBlock Name='target1' ContextMenu='{StaticResource contextMenu}'/>
<TextBlock Name='target2' ContextMenu='{StaticResource contextMenu}'/>
</StackPanel>
</Window>";
var window = (Window)<API key>.Load(xaml);
var target1 = window.Find<TextBlock>("target1");
var target2 = window.Find<TextBlock>("target2");
var mouse = new MouseTestHelper();
Assert.NotNull(target1.ContextMenu);
Assert.NotNull(target2.ContextMenu);
Assert.Same(target1.ContextMenu, target2.ContextMenu);
window.Show();
var menu = target1.ContextMenu;
mouse.Click(target1, MouseButton.Right);
Assert.True(menu.IsOpen);
mouse.Click(target2, MouseButton.Right);
Assert.True(menu.IsOpen);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
var xaml = @"
<Window xmlns='https://github.com/avaloniaui'
xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml'>
<Window.Styles>
<Style Selector='TextBlock'>
<Setter Property='ContextMenu'>
<ContextMenu>
<MenuItem>Foo</MenuItem>
</ContextMenu>
</Setter>
</Style>
</Window.Styles>
<StackPanel>
<TextBlock Name='target1'/>
<TextBlock Name='target2'/>
</StackPanel>
</Window>";
var window = (Window)<API key>.Load(xaml);
var target1 = window.Find<TextBlock>("target1");
var target2 = window.Find<TextBlock>("target2");
var mouse = new MouseTestHelper();
Assert.NotNull(target1.ContextMenu);
Assert.NotNull(target2.ContextMenu);
Assert.Same(target1.ContextMenu, target2.ContextMenu);
window.Show();
var menu = target1.ContextMenu;
mouse.Click(target1, MouseButton.Right);
Assert.True(menu.IsOpen);
mouse.Click(target2, MouseButton.Right);
Assert.True(menu.IsOpen);
}
}
[Fact]
public void <API key>()
{
using (Application())
{
popupImpl.Setup(x => x.Show(true, false)).Verifiable();
popupImpl.Setup(x => x.Hide()).Verifiable();
bool eventCalled = false;
var sut = new ContextMenu();
var target = new Panel
{
ContextMenu = sut
};
var window = PreparedWindow(target);
var overlay = <API key>.<API key>(window);
sut.ContextMenuClosing += (c, e) => { eventCalled = true; e.Cancel = true; };
window.Show();
_mouse.Click(target, MouseButton.Right);
Assert.True(sut.IsOpen);
_mouse.Down(overlay, MouseButton.Right);
_mouse.Up(target, MouseButton.Right);
Assert.True(eventCalled);
Assert.True(sut.IsOpen);
popupImpl.Verify(x => x.Show(true, false), Times.Once());
popupImpl.Verify(x => x.Hide(), Times.Never);
}
}
private Window PreparedWindow(object content = null)
{
var renderer = new Mock<IRenderer>();
var platform = AvaloniaLocator.Current.GetService<IWindowingPlatform>();
var windowImpl = Mock.Get(platform.CreateWindow());
windowImpl.Setup(x => x.CreateRenderer(It.IsAny<IRenderRoot>())).Returns(renderer.Object);
var w = new Window(windowImpl.Object) { Content = content };
w.ApplyTemplate();
w.Presenter.ApplyTemplate();
return w;
}
private IDisposable Application()
{
var screen = new PixelRect(new PixelPoint(), new PixelSize(100, 100));
var screenImpl = new Mock<IScreenImpl>();
screenImpl.Setup(x => x.ScreenCount).Returns(1);
screenImpl.Setup(X => X.AllScreens).Returns( new[] { new Screen(1, screen, screen, true) });
var windowImpl = <API key>.CreateWindowMock();
popupImpl = <API key>.CreatePopupMock(windowImpl.Object);
popupImpl.SetupGet(x => x.RenderScaling).Returns(1);
windowImpl.Setup(x => x.CreatePopup()).Returns(popupImpl.Object);
windowImpl.Setup(x => x.Screen).Returns(screenImpl.Object);
var services = TestServices.StyledWindow.With(
inputManager: new InputManager(),
windowImpl: windowImpl.Object,
windowingPlatform: new <API key>(() => windowImpl.Object, x => popupImpl.Object));
return UnitTestApplication.Start(services);
}
}
}
|
// tion, are permitted provided that the following conditions are met:
// and/or other materials provided with the distribution.
// THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
// FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// APACHE SOFTWARE FOUNDATION OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLU-
// DING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
// OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/** @file */
#ifndef <API key>
#define <API key>
#include <log4cplus/config.hxx>
#if defined (<API key>)
#pragma once
#endif
#if defined (_WIN32)
#include <cstddef>
#endif
#ifdef UNICODE
# define LOG4CPLUS_TEXT2(STRING) L##STRING
#else
# define LOG4CPLUS_TEXT2(STRING) STRING
#endif // UNICODE
#define LOG4CPLUS_TEXT(STRING) LOG4CPLUS_TEXT2(STRING)
namespace log4cplus
{
#if defined (UNICODE)
typedef wchar_t tchar;
#else
typedef char tchar;
#endif
} // namespace log4cplus
#endif // <API key>
|
<!DOCTYPE html>
<html>
<style>
div {
border-color: green;
border-style: solid;
border-width: 3px 10px;
}
</style>
<div>This text should have top and bottom borders of 3px and left and right borders of 10px</div>
</html>
|
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<link href="lib/mocha.css" rel="stylesheet" />
</head>
<body>
<div id="mocha"></div>
<script src="http://localhost:8889/socket.io/socket.io.js"></script>
<script src="lib/mocha.js"></script>
<script data-main="integration.js" src="lib/require.js"></script>
</body>
</html>
|
// <auto-generated>
// This code was generated by a tool.
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
namespace <API key>.Properties {
[global::System.Runtime.CompilerServices.<API key>()]
[global::System.CodeDom.Compiler.<API key>("Microsoft.VisualStudio.Editors.SettingsDesigner.<API key>", "10.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.<API key> {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.<API key>.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}
|
using System.Drawing;
namespace JR.DevFw.Framework.Graphic
{
<summary>
</summary>
<param name="img"></param>
public delegate void <API key>(Image img);
}
|
// <auto-generated>
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.Sql
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
<summary>
<API key> operations.
</summary>
internal partial class <API key> : IServiceOperations<SqlManagementClient>, <API key>
{
<summary>
Initializes a new instance of the <API key> class.
</summary>
<param name='client'>
Reference to the service client.
</param>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
internal <API key>(SqlManagementClient client)
{
if (client == null)
{
throw new System.<API key>("client");
}
Client = client;
}
<summary>
Gets a reference to the SqlManagementClient
</summary>
public SqlManagementClient Client { get; private set; }
<summary>
Gets a collection of sync database ids.
</summary>
<param name='locationName'>
The name of the region where the resource is located.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string locationName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (locationName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "locationName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("locationName", locationName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListSyncDatabaseIds", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Sql/locations/{locationName}/syncDatabaseIds").ToString();
_url = _url.Replace("{locationName}", System.Uri.EscapeDataString(locationName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Refreshes a hub database schema.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
// Send request
<API key> _response = await <API key>(resourceGroupName, serverName, databaseName, syncGroupName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.<API key>(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
<summary>
Gets a collection of hub database schemas.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListHubSchemas", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/hubSchemas").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Gets a collection of sync group logs.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='startTime'>
Get logs generated after this time.
</param>
<param name='endTime'>
Get logs generated before this time.
</param>
<param name='type'>
The types of logs to retrieve. Possible values include: 'All', 'Error',
'Warning', 'Success'
</param>
<param name='continuationToken'>
The continuation token for this operation.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, string startTime, string endTime, string type, string continuationToken = default(string), Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (startTime == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "startTime");
}
if (endTime == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "endTime");
}
if (type == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "type");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("startTime", startTime);
tracingParameters.Add("endTime", endTime);
tracingParameters.Add("type", type);
tracingParameters.Add("continuationToken", continuationToken);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListLogs", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/logs").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (startTime != null)
{
_queryParameters.Add(string.Format("startTime={0}", System.Uri.EscapeDataString(startTime)));
}
if (endTime != null)
{
_queryParameters.Add(string.Format("endTime={0}", System.Uri.EscapeDataString(endTime)));
}
if (type != null)
{
_queryParameters.Add(string.Format("type={0}", System.Uri.EscapeDataString(Rest.Serialization.SafeJsonConvert.SerializeObject(type, Client.<API key>).Trim('"'))));
}
if (continuationToken != null)
{
_queryParameters.Add(string.Format("continuationToken={0}", System.Uri.EscapeDataString(continuationToken)));
}
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Cancels a sync group synchronization.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "CancelSync", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/cancelSync").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Triggers a sync group synchronization.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "TriggerSync", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/triggerSync").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Gets a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><SyncGroup>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><SyncGroup>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<SyncGroup>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Creates or updates a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='parameters'>
The requested sync group resource state.
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
public async Task<<API key><SyncGroup>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
// Send Request
<API key><SyncGroup> _response = await <API key>(resourceGroupName, serverName, databaseName, syncGroupName, parameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.<API key>(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
<summary>
Deletes a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
// Send request
<API key> _response = await <API key>(resourceGroupName, serverName, databaseName, syncGroupName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.<API key>(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
<summary>
Updates a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='parameters'>
The requested sync group resource state.
</param>
<param name='customHeaders'>
The headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
public async Task<<API key><SyncGroup>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
// Send Request
<API key><SyncGroup> _response = await <API key>(resourceGroupName, serverName, databaseName, syncGroupName, parameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.<API key>(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
<summary>
Lists sync groups under a hub database.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<SyncGroup>>> <API key>(string resourceGroupName, string serverName, string databaseName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListByDatabase", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<SyncGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<SyncGroup>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Refreshes a hub database schema.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "<API key>", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/refreshHubSchema").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Creates or updates a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='parameters'>
The requested sync group resource state.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><SyncGroup>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.<API key>);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.<API key>.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 201 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><SyncGroup>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<SyncGroup>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<SyncGroup>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Deletes a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "BeginDelete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202 && (int)_statusCode != 204)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Updates a sync group.
</summary>
<param name='resourceGroupName'>
The name of the resource group that contains the resource. You can obtain
this value from the Azure Resource Manager API or the portal.
</param>
<param name='serverName'>
The name of the server.
</param>
<param name='databaseName'>
The name of the database on which the sync group is hosted.
</param>
<param name='syncGroupName'>
The name of the sync group.
</param>
<param name='parameters'>
The requested sync group resource state.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><SyncGroup>> <API key>(string resourceGroupName, string serverName, string databaseName, string syncGroupName, SyncGroup parameters, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (serverName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "serverName");
}
if (databaseName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "databaseName");
}
if (syncGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "syncGroupName");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
string apiVersion = "2015-05-01-preview";
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("serverName", serverName);
tracingParameters.Add("databaseName", databaseName);
tracingParameters.Add("syncGroupName", syncGroupName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "BeginUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{serverName}", System.Uri.EscapeDataString(serverName));
_url = _url.Replace("{databaseName}", System.Uri.EscapeDataString(databaseName));
_url = _url.Replace("{syncGroupName}", System.Uri.EscapeDataString(syncGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PATCH");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.<API key>);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.<API key>.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><SyncGroup>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<SyncGroup>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Gets a collection of sync database ids.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "<API key>", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Gets a collection of hub database schemas.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListHubSchemasNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Gets a collection of sync group logs.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<<API key>>>> <API key>(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListLogsNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<<API key>>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<<API key>>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
<summary>
Lists sync groups under a hub database.
</summary>
<param name='nextPageLink'>
The NextLink from the previous successful call to List operation.
</param>
<param name='customHeaders'>
Headers that will be added to request.
</param>
<param name='cancellationToken'>
The cancellation token.
</param>
<exception cref="CloudException">
Thrown when the operation returned an invalid status code
</exception>
<exception cref="<API key>">
Thrown when unable to deserialize the response
</exception>
<exception cref="ValidationException">
Thrown when a required parameter is null
</exception>
<exception cref="System.<API key>">
Thrown when a required parameter is null
</exception>
<return>
A response object containing the response body and response headers.
</return>
public async Task<<API key><IPage<SyncGroup>>> <API key>(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, Cancellation<API key> = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = <API key>.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = <API key>.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
<API key>.Enter(_invocationId, this, "ListByDatabaseNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.<API key> != null && Client.<API key>.Value)
{
_httpRequest.Headers.<API key>("<API key>", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.<API key>("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.<API key>(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.<API key>();
await Client.Credentials.<API key>(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
<API key>.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.<API key>();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
<API key>.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.<API key>();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.<API key>);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new <API key>(_httpRequest, _requestContent);
ex.Response = new <API key>(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
<API key>.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new <API key><IPage<SyncGroup>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<SyncGroup>>(_responseContent, Client.<API key>);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new <API key>("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
<API key>.Exit(_invocationId, _result);
}
return _result;
}
}
}
|
using System;
using Xunit;
namespace HealthMonitoring.AcceptanceTests.Helpers
{
static class CustomAssertions
{
public static void EqualNotStrict(string first, string second)
{
bool equal = string.Equals(first, second, StringComparison.<API key>);
Assert.True(equal, $"{first} != {second}");
}
}
}
|
export default class ModelAccessor {
constructor() {
this.value = 10
}
get highCount() {
return this.value + 100
}
set highCount(v) {
this.value = v - 100
}
get doubleHigh() {
return this.highCount * 2
}
incr() {
this.value++
}
}
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE html PUBLIC "-
<html xmlns="http:
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Preface</title>
<link rel="stylesheet" href="gettingStarted.css" type="text/css" />
<meta name="generator" content="DocBook XSL Stylesheets V1.62.4" />
<link rel="home" href="index.html" title="Porting Berkeley DB" />
<link rel="up" href="index.html" title="Porting Berkeley DB" />
<link rel="previous" href="index.html" title="Porting Berkeley DB" />
<link rel="next" href="introduction.html" title="Chapter 1. Introduction to Porting Berkeley DB " />
</head>
<body>
<div class="navheader">
<table width="100%" summary="Navigation header">
<tr>
<th colspan="3" align="center">Preface</th>
</tr>
<tr>
<td width="20%" align="left"><a accesskey="p" href="index.html">Prev</a> </td>
<th width="60%" align="center"> </th>
<td width="20%" align="right"> <a accesskey="n" href="introduction.html">Next</a></td>
</tr>
</table>
<hr />
</div>
<div class="preface" lang="en" xml:lang="en">
<div class="titlepage">
<div>
<div>
<h2 class="title"><a id="preface"></a>Preface</h2>
</div>
</div>
<div></div>
</div>
<div class="toc">
<p>
<b>Table of Contents</b>
</p>
<dl>
<dt>
<span class="sect1">
<a href="preface.html#conventions">Conventions Used in this Book</a>
</span>
</dt>
<dd>
<dl>
<dt>
<span class="sect2">
<a href="preface.html#audience">Audience</a>
</span>
</dt>
<dt>
<span class="sect2">
<a href="preface.html#moreinfo">For More Information</a>
</span>
</dt>
</dl>
</dd>
</dl>
</div>
<p>
The Berkeley DB family of open source, embeddable databases
provides developers with fast, reliable persistence with zero
administration. Often deployed as "edge" databases, the Berkeley DB
family provides very high performance, reliability, scalability,
and availability for application use cases that do not require SQL.
</p>
<p>
As an open source database, Berkeley DB works on many different
platforms, from Wind River's Tornado system, to VMS, to
Windows NT and Windows 95, and most existing UNIX
platforms. It runs on 32 and 64-bit machines, little or big-endian.
</p>
<p>
<span class="emphasis"><em>Berkeley DB Porting Guide</em></span> provides the information you need to
port Berkeley DB to additional platforms.
</p>
<div class="sect1" lang="en" xml:lang="en">
<div class="titlepage">
<div>
<div>
<h2 class="title" style="clear: both"><a id="conventions"></a>Conventions Used in this Book</h2>
</div>
</div>
<div></div>
</div>
<p>
The following typographical conventions are used within in this manual:
</p>
<p>
Structure names are represented in <tt class="classname">monospaced font</tt>, as are <tt class="methodname">method
names</tt>. For example: "<tt class="methodname">DB->open()</tt> is a method
on a <tt class="classname">DB</tt> handle."
</p>
<p>
Variable or non-literal text is presented in <span class="emphasis"><em>italics</em></span>. For example: "Go to your
<span class="emphasis"><em>DB_INSTALL</em></span>
directory."
</p>
<p>
Program examples are displayed in a <tt class="classname">monospaced font</tt> on a shaded background.
For example:
</p>
<pre class="programlisting">/* File: <API key>.h */
typedef struct stock_dbs {
DB *inventory_dbp; /* Database containing inventory information */
DB *vendor_dbp; /* Database containing vendor information */
char *db_home_dir; /* Directory containing the database files */
char *inventory_db_name; /* Name of the inventory database */
char *vendor_db_name; /* Name of the vendor database */
} STOCK_DBS; </pre>
<div class="note" style="margin-left: 0.5in; margin-right: 0.5in;">
<h3 class="title">Note</h3>
<p>
Finally, notes of interest are represented using a note block such
as this.
</p>
</div>
<div class="sect2" lang="en" xml:lang="en">
<div class="titlepage">
<div>
<div>
<h3 class="title"><a id="audience"></a>Audience</h3>
</div>
</div>
<div></div>
</div>
<p>
This guide is intended
for programmers porting Berkeley DB to a new platform. It
assumes that these programmers possess:
</p>
<div class="itemizedlist">
<ul type="disc">
<li>
<p>
Familiarity with standard ANSI C and POSIX C 1003.1 and 1003.2 library and system
calls.
</p>
</li>
<li>
<p>
Working knowledge of the target platform as well as the development tools (for example, compilers, linkers, and debuggers) available on that platform.
</p>
</li>
</ul>
</div>
</div>
<div class="sect2" lang="en" xml:lang="en">
<div class="titlepage">
<div>
<div>
<h3 class="title"><a id="moreinfo"></a>For More Information</h3>
</div>
</div>
<div></div>
</div>
<p>
Beyond this manual, you may also find the following sources of information useful when building a
DB application:
</p>
<div class="itemizedlist">
<ul type="disc">
<li>
<p>
<a href="http:
Getting Started with Berkeley DB for C
</a>
</p>
</li>
<li>
<p>
<a href="http:
Getting Started with Transaction Processing for C
</a>
</p>
</li>
<li>
<p>
<a href="http:
Berkeley DB Getting Started with Replicated Applications for C
</a>
</p>
</li>
<li>
<p>
<a href="http:
Berkeley DB Programmer's Reference Guide
</a>
</p>
</li>
<li>
<p>
<a href="http:
Berkeley DB C API
</a>
</p>
</li>
</ul>
</div>
</div>
</div>
</div>
<div class="navfooter">
<hr />
<table width="100%" summary="Navigation footer">
<tr>
<td width="40%" align="left"><a accesskey="p" href="index.html">Prev</a> </td>
<td width="20%" align="center">
<a accesskey="u" href="index.html">Up</a>
</td>
<td width="40%" align="right"> <a accesskey="n" href="introduction.html">Next</a></td>
</tr>
<tr>
<td width="40%" align="left" valign="top">Porting Berkeley DB </td>
<td width="20%" align="center">
<a accesskey="h" href="index.html">Home</a>
</td>
<td width="40%" align="right" valign="top"> Chapter 1. Introduction to Porting Berkeley DB </td>
</tr>
</table>
</div>
</body>
</html>
|
// UIBarItem+CASAdditions.h
#import <UIKit/UIKit.h>
#import "CASStyleableItem.h"
@interface UIBarItem (CASAdditions) <CASStyleableItem>
+ (void)bootstrapClassy;
@property (nonatomic, weak, readwrite) id<CASStyleableItem> cas_parent;
@end
|
<?php
namespace Oro\Bundle\UIBundle\Tests\Unit\Twig;
use Symfony\Component\HttpFoundation\Request;
use Oro\Bundle\UIBundle\Twig\UrlExtension;
class UrlExtensionTest extends \<API key>
{
/**
* @var UrlExtension
*/
protected $extension;
protected function setUp()
{
$this->extension = new UrlExtension();
}
public function testGetName()
{
$this->assertEquals(UrlExtension::NAME, $this->extension->getName());
}
public function testGetFunctions()
{
$functions = $this->extension->getFunctions();
$this->assertCount(1, $functions);
/** @var \Twig_SimpleFunction $function */
$function = current($functions);
$this->assertInstanceOf('\Twig_SimpleFunction', $function);
$this->assertEquals('oro_url_add_query', $function->getName());
$this->assertEquals([$this->extension, 'addQuery'], $function->getCallable());
}
/**
* @param string $expected
* @param string $source
* @param array|null $query
* @dataProvider <API key>
*/
public function testAddQuery($expected, $source, array $query = null)
{
if (null !== $query) {
$request = new Request($query);
$this->extension->setRequest($request);
}
$this->assertEquals($expected, $this->extension->addQuery($source));
}
/**
* @return array
*/
public function <API key>()
{
return [
'no request' => [
'expected' => 'http://test.url/',
'source' => 'http://test.url/',
],
'no query params' => [
'expected' => 'http://test.url/',
'source' => 'http://test.url/',
'query' => [],
],
'no query params without host' => [
'expected' => '/',
'source' => '/',
'query' => [],
],
'same query params' => [
'expected' => 'http://test.url/?foo=1#bar',
'source' => 'http://test.url/?foo=1#bar',
'query' => ['foo' => 1],
],
'same query params without host' => [
'expected' => '/?foo=1#bar',
'source' => '/?foo=1#bar',
'query' => ['foo' => 1],
],
'only new query params' => [
'expected' => 'http://test.url/?foo=1#bar',
'source' => 'http://test.url/#bar',
'query' => ['foo' => 1],
],
'only new query params without host' => [
'expected' => '/?foo=1#bar',
'source' => '/#bar',
'query' => ['foo' => 1],
],
'existing and new query params' => [
'expected' => 'http://test.url/?baz=2&foo=1#bar',
'source' => 'http://test.url/?foo=1#bar',
'query' => ['baz' => 2],
],
'existing and new query params without host' => [
'expected' => '/?baz=2&foo=1#bar',
'source' => '/?foo=1#bar',
'query' => ['baz' => 2],
],
'existing and new query params without host with path' => [
'expected' => '/path/?baz=2&foo=1#bar',
'source' => '/path/?foo=1#bar',
'query' => ['baz' => 2],
],
'existing and new query params without host with short path' => [
'expected' => '/path?baz=2&foo=1#bar',
'source' => '/path?foo=1#bar',
'query' => ['baz' => 2],
],
];
}
}
|
// <API key>.h
// <API key>
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
// all copies or substantial portions of the Software.
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#import <UIKit/UIKit.h>
#import "RECommonFunctions.h"
#import "REComposeSheetView.h"
#import "<API key>.h"
@class <API key>;
typedef enum _REComposeResult {
<API key>,
<API key>
} REComposeResult;
typedef void (^<API key>)(<API key> *<API key>, REComposeResult result);
@protocol <API key>;
@interface <API key> : UIViewController <<API key>> {
REComposeSheetView *_sheetView;
<API key> *_backgroundView;
UIView *_backView;
UIView *_containerView;
UIImageView *_paperclipView;
}
@property (copy, readwrite, nonatomic) <API key> completionHandler;
@property (weak, readwrite, nonatomic) id<<API key>> delegate;
@property (assign, readwrite, nonatomic) NSInteger cornerRadius;
@property (assign, readwrite, nonatomic) BOOL hasAttachment;
@property (assign, readonly, nonatomic) BOOL <API key>;
@property (strong, readwrite, nonatomic) NSString *text;
@property (strong, readwrite, nonatomic) NSString *placeholderText;
@property (strong, readonly, nonatomic) UINavigationBar *navigationBar;
@property (strong, readonly, nonatomic) UINavigationItem *navigationItem;
@property (strong, readwrite, nonatomic) UIColor *tintColor;
@property (strong, readwrite, nonatomic) UIImage *attachmentImage;
@property (weak, readonly, nonatomic) UIViewController *rootViewController;
- (void)<API key>;
- (void)<API key>:(UIViewController *)controller;
@end
@protocol <API key> <NSObject>
- (void)<API key>:(<API key> *)<API key> didFinishWithResult:(REComposeResult)result;
@end
|
require 'spec_helper'
describe "Callback on Notice" do
describe "email notifications (configured individually for each app)" do
custom_thresholds = [2, 4, 8, 16, 32, 64]
before do
Errbit::Config.<API key> = true
@app = Fabricate(:app_with_watcher, :email_at_notices => custom_thresholds)
@problem = Fabricate(:problem, :app => @app)
end
after do
Errbit::Config.<API key> = false
end
custom_thresholds.each do |threshold|
it "sends an email notification after #{threshold} notice(s)" do
<API key>(Problem).to receive(:notices_count).and_return(threshold)
expect(Mailer).to receive(:err_notification).
and_return(double('email', :deliver => true))
Fabricate(:notice, :problem => @problem)
end
end
end
describe "email notifications for a resolved issue" do
before do
Errbit::Config.<API key> = true
@app = Fabricate(:app_with_watcher, :email_at_notices => [1])
@problem = Fabricate(:problem, :app => @app, :notices_count => 100)
end
after do
Errbit::Config.<API key> = false
end
it "should send email notification after 1 notice since an error has been resolved" do
@problem.resolve!
expect(Mailer).to receive(:err_notification).and_return(double('email', :deliver => true))
Fabricate(:notice, :problem => @problem)
end
end
describe "should send a notification if a notification service is configured with defaults" do
let(:app) { Fabricate(:app, :email_at_notices => [1], :<API key> => Fabricate(:<API key>))}
let(:problem) { Fabricate(:problem, :app => app, :notices_count => 100) }
let(:backtrace) { Fabricate(:backtrace) }
before do
Errbit::Config.<API key> = true
end
after do
Errbit::Config.<API key> = false
end
it "should create a campfire notification" do
<API key>(app.<API key>.class).to receive(:create_notification)
Notice.create!(:problem => problem, :message => 'FooError: Too Much Bar', :server_environment => {'environment-name' => 'production'},
:backtrace => backtrace, :notifier => { 'name' => 'Notifier', 'version' => '1', 'url' => 'http://toad.com' })
end
end
describe "should not send a notification if a notification service is not configured" do
let(:app) { Fabricate(:app, :email_at_notices => [1], :<API key> => Fabricate(:<API key>))}
let(:problem) { Fabricate(:problem, :app => app, :notices_count => 100) }
let(:backtrace) { Fabricate(:backtrace) }
before do
Errbit::Config.<API key> = true
end
after do
Errbit::Config.<API key> = false
end
it "should not create a campfire notification" do
<API key>(app.<API key>.class).to_not receive(:create_notification)
Notice.create!(:problem => problem, :message => 'FooError: Too Much Bar', :server_environment => {'environment-name' => 'production'},
:backtrace => backtrace, :notifier => { 'name' => 'Notifier', 'version' => '1', 'url' => 'http://toad.com' })
end
end
describe 'hipcat notifications' do
let(:app) { Fabricate(:app, :email_at_notices => [1], :<API key> => Fabricate(:<API key>))}
let(:problem) { Fabricate(:problem, :app => app, :notices_count => 100) }
before do
Errbit::Config.<API key> = true
end
after do
Errbit::Config.<API key> = false
end
it 'creates a hipchat notification' do
<API key>(app.<API key>.class).to receive(:create_notification)
Fabricate(:notice, :problem => problem)
end
end
describe "should send a notification at desired intervals" do
let(:app) { Fabricate(:app, :email_at_notices => [1], :<API key> => Fabricate(:<API key>, :notify_at_notices => [1,2]))}
let(:backtrace) { Fabricate(:backtrace) }
before do
Errbit::Config.<API key> = true
end
after do
Errbit::Config.<API key> = false
end
it "should create a campfire notification on first notice" do
problem = Fabricate(:problem, :app => app, :notices_count => 1)
<API key>(app.<API key>.class).to receive(:create_notification)
Notice.create!(:problem => problem, :message => 'FooError: Too Much Bar', :server_environment => {'environment-name' => 'production'},
:backtrace => backtrace, :notifier => { 'name' => 'Notifier', 'version' => '1', 'url' => 'http://toad.com' })
end
it "should create a campfire notification on second notice" do
problem = Fabricate(:problem, :app => app, :notices_count => 1)
<API key>(app.<API key>.class).to receive(:create_notification)
Notice.create!(:problem => problem, :message => 'FooError: Too Much Bar', :server_environment => {'environment-name' => 'production'},
:backtrace => backtrace, :notifier => { 'name' => 'Notifier', 'version' => '1', 'url' => 'http://toad.com' })
end
it "should not create a campfire notification on third notice" do
problem = Fabricate(:problem, :app => app, :notices_count => 1)
<API key>(app.<API key>.class).to receive(:create_notification)
Notice.create!(:problem => problem, :message => 'FooError: Too Much Bar', :server_environment => {'environment-name' => 'production'},
:backtrace => backtrace, :notifier => { 'name' => 'Notifier', 'version' => '1', 'url' => 'http://toad.com' })
end
end
end
|
namespace AngleSharp.Dom
{
using AngleSharp.Attributes;
using System;
<summary>
The Range interface represents a fragment of a document that can
contain nodes and parts of text nodes in a given document.
</summary>
[DomName("Range")]
public interface IRange
{
<summary>
Gets the node that starts the container.
</summary>
[DomName("startContainer")]
INode Head { get; }
<summary>
Gets the offset of the StartContainer in the document.
</summary>
[DomName("startOffset")]
Int32 Start { get; }
<summary>
Gets the node that ends the container.
</summary>
[DomName("endContainer")]
INode Tail { get; }
<summary>
Gets the offset of the EndContainer in the document.
</summary>
[DomName("endOffset")]
Int32 End { get; }
<summary>
Gets a value that indicates if the representation is collapsed.
</summary>
[DomName("collapsed")]
Boolean IsCollapsed { get; }
<summary>
Gets the common ancestor node of the contained range.
</summary>
[DomName("<API key>")]
INode CommonAncestor { get; }
<summary>
Selects the start of the given range by using the given reference
node and a relative offset.
</summary>
<param name="refNode">The reference node to use.</param>
<param name="offset">
The offset relative to the reference node.
</param>
[DomName("setStart")]
void StartWith(INode refNode, Int32 offset);
<summary>
Selects the end of the given range by using the given reference
node and a relative offset.
</summary>
<param name="refNode">The reference node to use.</param>
<param name="offset">
The offset relative to the reference node.
</param>
[DomName("setEnd")]
void EndWith(INode refNode, Int32 offset);
<summary>
Selects the start of the given range by using an inclusive
reference node.
</summary>
<param name="refNode">The reference node to use.</param>
[DomName("setStartBefore")]
void StartBefore(INode refNode);
<summary>
Selects the end of the given range by using an inclusive reference
node.
</summary>
<param name="refNode">The reference node to use.</param>
[DomName("setEndBefore")]
void EndBefore(INode refNode);
<summary>
Selects the start of the given range by using an exclusive
reference node.
</summary>
<param name="refNode">The reference node to use.</param>
[DomName("setStartAfter")]
void StartAfter(INode refNode);
<summary>
Selects the end of the given range by using an exclusive reference
node.
</summary>
<param name="refNode">The referenced node.</param>
[DomName("setEndAfter")]
void EndAfter(INode refNode);
<summary>
Collapses the range to a single level.
</summary>
<param name="toStart">
Determines if only the first level should be selected.
</param>
[DomName("collapse")]
void Collapse(Boolean toStart);
<summary>
Selects the contained node.
</summary>
<param name="refNode">The node to use.</param>
[DomName("selectNode")]
void Select(INode refNode);
<summary>
Selects the contained nodes by taking a reference node as origin.
</summary>
<param name="refNode">The reference node.</param>
[DomName("selectNodeContents")]
void SelectContent(INode refNode);
<summary>
Clears the contained nodes.
</summary>
[DomName("deleteContents")]
void ClearContent();
<summary>
Clears the node representation and returns a document fragment with
the originally contained nodes.
</summary>
<returns>The document fragment containing the nodes.</returns>
[DomName("extractContents")]
IDocumentFragment ExtractContent();
<summary>
Creates a document fragement of the contained nodes.
</summary>
<returns>The created document fragment.</returns>
[DomName("cloneContents")]
IDocumentFragment CopyContent();
<summary>
Inserts a node into the range.
</summary>
<param name="node">The node to include.</param>
[DomName("insertNode")]
void Insert(INode node);
<summary>
Includes the given node with its siblings in the range.
</summary>
<param name="newParent">The range to surround.</param>
[DomName("surroundContents")]
void Surround(INode newParent);
<summary>
Creates a copy of this range.
</summary>
<returns>The copy representing the same range.</returns>
[DomName("cloneRange")]
IRange Clone();
<summary>
Detaches the range from the DOM tree.
</summary>
[DomName("detach")]
void Detach();
<summary>
Checks if the given node is within this range by using a offset.
</summary>
<param name="node">The node to check for.</param>
<param name="offset">The offset to use.</param>
<returns>
True if the point is within the range, otherwise false.
</returns>
[DomName("isPointInRange")]
Boolean Contains(INode node, Int32 offset);
<summary>
Compares the boundary points of the range.
</summary>
<param name="how">
Determines how these points should be compared.
</param>
<param name="sourceRange">
The range of the other boundary points.
</param>
<returns>A relative position.</returns>
[DomName("<API key>")]
RangePosition CompareBoundaryTo(RangeType how, IRange sourceRange);
<summary>
Compares the node to the given offset and returns the relative
position.
</summary>
<param name="node">The node to use.</param>
<param name="offset">The offset to use.</param>
<returns>The relative position in the range.</returns>
[DomName("comparePoint")]
RangePosition CompareTo(INode node, Int32 offset);
<summary>
Checks if the given node is contained in this range.
</summary>
<param name="node">The node to check for.</param>
<returns>
True if the node is within the range, otherwise false.
</returns>
[DomName("intersectsNode")]
Boolean Intersects(INode node);
}
}
|
.PHONY: test
test: pep8 clean
@coverage report
@nosetests mining/test
@$(which gulp.js)
.PHONY: tox-test
tox-test: environment
@tox
.PHONY: environment
environment:
@pip install -r requirements_dev.txt
@pip install -r requirements.txt
@pip install numexpr==2.3
@python setup.py develop
@npm install gulp gulp-jshint
@mv mining/mining.sample.ini mining/mining.ini
.PHONY: install
install:
@python setup.py install
.PHONY: pep8
pep8:
@flake8 mining --ignore=F403,F401,F812,E128 --exclude=mining/assets
.PHONY: sdist
sdist: test
@python setup.py sdist upload
.PHONY: clean
clean:
@find ./ -name '*.pyc' -exec rm -f {} \;
@find ./ -name 'Thumbs.db' -exec rm -f {} \;
@find ./ -name '*~' -exec rm -f {} \;
|
// <auto-generated/>
#nullable disable
using System.Collections.Generic;
using Azure.Core;
namespace Azure.ResourceManager.Network.Models
{
<summary> Subnet in a virtual network resource. </summary>
public partial class Subnet : SubResource
{
<summary> Initializes a new instance of Subnet. </summary>
public Subnet()
{
AddressPrefixes = new ChangeTrackingList<string>();
ServiceEndpoints = new ChangeTrackingList<<API key>>();
<API key> = new ChangeTrackingList<<API key>>();
PrivateEndpoints = new ChangeTrackingList<PrivateEndpoint>();
IpConfigurations = new ChangeTrackingList<IPConfiguration>();
<API key> = new ChangeTrackingList<<API key>>();
IpAllocations = new ChangeTrackingList<SubResource>();
<API key> = new ChangeTrackingList<<API key>>();
<API key> = new ChangeTrackingList<<API key>>();
Delegations = new ChangeTrackingList<Delegation>();
}
<summary> Initializes a new instance of Subnet. </summary>
<param name="id"> Resource ID. </param>
<param name="name"> The name of the resource that is unique within a resource group. This name can be used to access the resource. </param>
<param name="etag"> A unique read-only string that changes whenever the resource is updated. </param>
<param name="addressPrefix"> The address prefix for the subnet. </param>
<param name="addressPrefixes"> List of address prefixes for the subnet. </param>
<param name="<API key>"> The reference to the <API key> resource. </param>
<param name="routeTable"> The reference to the RouteTable resource. </param>
<param name="natGateway"> Nat gateway associated with this subnet. </param>
<param name="serviceEndpoints"> An array of service endpoints. </param>
<param name="<API key>"> An array of service endpoint policies. </param>
<param name="privateEndpoints"> An array of references to private endpoints. </param>
<param name="ipConfigurations"> An array of references to the network interface IP configurations using subnet. </param>
<param name="<API key>"> Array of IP configuration profiles which reference this subnet. </param>
<param name="ipAllocations"> Array of IpAllocation which reference this subnet. </param>
<param name="<API key>"> An array of references to the external resources using subnet. </param>
<param name="<API key>"> An array of references to services injecting into this subnet. </param>
<param name="delegations"> An array of references to the delegations on the subnet. </param>
<param name="purpose"> A read-only string identifying the intention of use for this subnet based on delegations and other user-defined properties. </param>
<param name="provisioningState"> The provisioning state of the subnet resource. </param>
<param name="<API key>"> Enable or Disable apply network policies on private end point in the subnet. </param>
<param name="<API key>"> Enable or Disable apply network policies on private link service in the subnet. </param>
internal Subnet(string id, string name, string etag, string addressPrefix, IList<string> addressPrefixes, <API key> <API key>, RouteTable routeTable, SubResource natGateway, IList<<API key>> serviceEndpoints, IList<<API key>> <API key>, IReadOnlyList<PrivateEndpoint> privateEndpoints, IReadOnlyList<IPConfiguration> ipConfigurations, IReadOnlyList<<API key>> <API key>, IList<SubResource> ipAllocations, IReadOnlyList<<API key>> <API key>, IReadOnlyList<<API key>> <API key>, IList<Delegation> delegations, string purpose, ProvisioningState? provisioningState, string <API key>, string <API key>) : base(id)
{
Name = name;
Etag = etag;
AddressPrefix = addressPrefix;
AddressPrefixes = addressPrefixes;
<API key> = <API key>;
RouteTable = routeTable;
NatGateway = natGateway;
ServiceEndpoints = serviceEndpoints;
<API key> = <API key>;
PrivateEndpoints = privateEndpoints;
IpConfigurations = ipConfigurations;
<API key> = <API key>;
IpAllocations = ipAllocations;
<API key> = <API key>;
<API key> = <API key>;
Delegations = delegations;
Purpose = purpose;
ProvisioningState = provisioningState;
<API key> = <API key>;
<API key> = <API key>;
}
<summary> The name of the resource that is unique within a resource group. This name can be used to access the resource. </summary>
public string Name { get; set; }
<summary> A unique read-only string that changes whenever the resource is updated. </summary>
public string Etag { get; }
<summary> The address prefix for the subnet. </summary>
public string AddressPrefix { get; set; }
<summary> List of address prefixes for the subnet. </summary>
public IList<string> AddressPrefixes { get; }
<summary> The reference to the <API key> resource. </summary>
public <API key> <API key> { get; set; }
<summary> The reference to the RouteTable resource. </summary>
public RouteTable RouteTable { get; set; }
<summary> Nat gateway associated with this subnet. </summary>
public SubResource NatGateway { get; set; }
<summary> An array of service endpoints. </summary>
public IList<<API key>> ServiceEndpoints { get; }
<summary> An array of service endpoint policies. </summary>
public IList<<API key>> <API key> { get; }
<summary> An array of references to private endpoints. </summary>
public IReadOnlyList<PrivateEndpoint> PrivateEndpoints { get; }
<summary> An array of references to the network interface IP configurations using subnet. </summary>
public IReadOnlyList<IPConfiguration> IpConfigurations { get; }
<summary> Array of IP configuration profiles which reference this subnet. </summary>
public IReadOnlyList<<API key>> <API key> { get; }
<summary> Array of IpAllocation which reference this subnet. </summary>
public IList<SubResource> IpAllocations { get; }
<summary> An array of references to the external resources using subnet. </summary>
public IReadOnlyList<<API key>> <API key> { get; }
<summary> An array of references to services injecting into this subnet. </summary>
public IReadOnlyList<<API key>> <API key> { get; }
<summary> An array of references to the delegations on the subnet. </summary>
public IList<Delegation> Delegations { get; }
<summary> A read-only string identifying the intention of use for this subnet based on delegations and other user-defined properties. </summary>
public string Purpose { get; }
<summary> The provisioning state of the subnet resource. </summary>
public ProvisioningState? ProvisioningState { get; }
<summary> Enable or Disable apply network policies on private end point in the subnet. </summary>
public string <API key> { get; set; }
<summary> Enable or Disable apply network policies on private link service in the subnet. </summary>
public string <API key> { get; set; }
}
}
|
// MMApiObject.h
// Api
#import <Foundation/Foundation.h>
/*! @brief
*
*/
enum WXErrCode {
WXSuccess = 0,
WXErrCodeCommon = -1,
WXErrCodeUserCancel = -2,
WXErrCodeSentFail = -3,
WXErrCodeAuthDeny = -4,
WXErrCodeUnsupport = -5,
};
/*! @brief
*
*/
enum WXScene {
WXSceneSession = 0,
WXSceneTimeline = 1,
WXSceneFavorite = 2,
};
enum WXAPISupport {
WXAPISupportSession = 0,
};
/*! @brief profile
*
*/
enum WXBizProfileType{
<API key> = 0, /
<API key> = 1, /
};
/*! @brief mp
*
*/
enum WXMPWebviewType {
WXMPWebviewType_Ad = 0,
};
#pragma mark - BaseReq
/*! @brief SDK
*
*/
@interface BaseReq : NSObject
@property (nonatomic, assign) int type;
/** AppID*/
@property (nonatomic, retain) NSString* openID;
@end
#pragma mark - BaseResp
/*! @brief SDK
*
*/
@interface BaseResp : NSObject
@property (nonatomic, assign) int errCode;
@property (nonatomic, retain) NSString *errStr;
@property (nonatomic, assign) int type;
@end
#pragma mark - WXMediaMessage
@class WXMediaMessage;
/*! @brief
*
*
* @see PayResp
*/
@interface PayReq : BaseReq
@property (nonatomic, retain) NSString *partnerId;
@property (nonatomic, retain) NSString *prepayId;
@property (nonatomic, retain) NSString *nonceStr;
@property (nonatomic, assign) UInt32 timeStamp;
@property (nonatomic, retain) NSString *package;
@property (nonatomic, retain) NSString *sign;
@end
#pragma mark - PayResp
/*! @brief
*
*
*/
@interface PayResp : BaseResp
@property (nonatomic, retain) NSString *returnKey;
@end
#pragma mark - SendAuthReq
/*! @brief
*
* WXApisendReq
* SendAuthReq
* @see SendAuthResp
*/
@interface SendAuthReq : BaseReq
/** WXApisendReqSendAuthReq
* @see SendAuthResp
* @note scope1K
*/
@property (nonatomic, retain) NSString* scope;
/**
* @note state1K
*/
@property (nonatomic, retain) NSString* state;
@end
#pragma mark - SendAuthResp
/*! @brief
*
* WXApisendReqSendAuthReq
* SendAuthResp
* @see onResp
*/
@interface SendAuthResp : BaseResp
@property (nonatomic, retain) NSString* code;
/** sendReq
* @note state1K
*/
@property (nonatomic, retain) NSString* state;
@property (nonatomic, retain) NSString* lang;
@property (nonatomic, retain) NSString* country;
@end
#pragma mark - SendMessageToWXReq
/*! @brief
*
* SendMessageToWXReq
* textmessage
* @see SendMessageToWXResp
*/
@interface SendMessageToWXReq : BaseReq
/**
* @note 010K
*/
@property (nonatomic, retain) NSString* text;
/**
* @see WXMediaMessage
*/
@property (nonatomic, retain) WXMediaMessage* message;
@property (nonatomic, assign) BOOL bText;
/** (WXSceneSession)(WXSceneTimeline)
* @see WXScene
*/
@property (nonatomic, assign) int scene;
@end
#pragma mark - SendMessageToWXResp
/*! @brief SendMessageToWXReq
*
* SendMessageToWXReqSendMessageToWXResp
*/
@interface SendMessageToWXResp : BaseResp
@property(nonatomic, retain) NSString* lang;
@property(nonatomic, retain) NSString* country;
@end
#pragma mark - GetMessageFromWXReq
/*! @brief
*
* GetMessageFromWXReq
* sendRespGetMessageFromWXResp
*/
@interface GetMessageFromWXReq : BaseReq
@property (nonatomic, retain) NSString* lang;
@property (nonatomic, retain) NSString* country;
@end
#pragma mark - <API key>
/*! @brief
*
* sendRespGetMessageFromWXResp
*/
@interface <API key> : BaseResp
/**
@note 010K
*/
@property (nonatomic, retain) NSString* text;
/**
* @see WXMediaMessage
*/
@property (nonatomic, retain) WXMediaMessage* message;
@property (nonatomic, assign) BOOL bText;
@end
#pragma mark - <API key>
/*! @brief
*
* ShowMessageFromWXReq
* sendRespShowMessageFromWXResp
*/
@interface <API key> : BaseReq
/**
* @see WXMediaMessage
*/
@property (nonatomic, retain) WXMediaMessage* message;
@property (nonatomic, retain) NSString* lang;
@property (nonatomic, retain) NSString* country;
@end
#pragma mark - <API key>
/*! @brief
*
* ShowMessageFromWXReq
* sendRespShowMessageFromWXResp
*/
@interface <API key> : BaseResp
@end
#pragma mark - LaunchFromWXReq
/*! @brief
*
*
*/
@interface LaunchFromWXReq : BaseReq
@property (nonatomic, retain) WXMediaMessage* message;
@property (nonatomic, retain) NSString* lang;
@property (nonatomic, retain) NSString* country;
@end
#pragma mark - JumpToBizProfileReq
/* ! @brief profile
*
* profile
*/
@interface JumpToBizProfileReq : BaseReq
/** profile
* @attention 512
*/
@property (nonatomic, retain) NSString* username;
/** extMsg
* @attention 1024
*/
@property (nonatomic, retain) NSString* extMsg;
/**
*
* @see WXBizProfileType
*/
@property (nonatomic, assign) int profileType;
@end
#pragma mark - JumpToBizWebviewReq
/* ! @brief usrnameprofile
*
*/
@interface JumpToBizWebviewReq : BaseReq
/**
* @see WXMPWebviewType
*/
@property(nonatomic, assign) int webType;
/** profile
* @attention 512
*/
@property(nonatomic, retain) NSString* tousrname;
/** extMsg
* @attention 1024
*/
@property(nonatomic, retain) NSString* extMsg;
@end
#pragma mark - WXCardItem
@interface WXCardItem : NSObject
/** id
* @attention 1024
*/
@property (nonatomic,retain) NSString* cardId;
/** ext
* @attention 2024
*/
@property (nonatomic,retain) NSString* extMsg;
/**
* @attention ,reqresp:01
*/
@property (nonatomic,assign) UInt32 cardState;
@end;
#pragma mark - <API key>
/* ! @brief
*
*/
@interface <API key> : BaseReq
/**
* @attention 40 WXCardItem
*/
@property (nonatomic,retain) NSArray* cardAry;
@end
#pragma mark - <API key>
/** ! @brief
*
*/
@interface <API key> : BaseResp
/**
* @attention 40 WXCardItem
*/
@property (nonatomic,retain) NSArray* cardAry;
@end
#pragma mark - WXMediaMessage
/*! @brief
*
*
*/
@interface WXMediaMessage : NSObject
+(WXMediaMessage *) message;
/**
* @note 512
*/
@property (nonatomic, retain) NSString *title;
/**
* @note 1K
*/
@property (nonatomic, retain) NSString *description;
/**
* @note 32K
*/
@property (nonatomic, retain) NSData *thumbData;
/**
* @note 64
*/
@property (nonatomic, retain) NSString *mediaTagName;
@property (nonatomic, retain) NSString *messageExt;
@property (nonatomic, retain) NSString *messageAction;
/**
* WXImageObjectWXMusicObjectWXVideoObjectWXWebpageObject
*/
@property (nonatomic, retain) id mediaObject;
/*! @brief
*
* @param image
* @note 32K
*/
- (void) setThumbImage:(UIImage *)image;
@end
#pragma mark - WXImageObject
/*! @brief
*
*
* @note imageDataimageUrl
* @see WXMediaMessage
*/
@interface WXImageObject : NSObject
/*! @brief WXImageObject
*
* @note WXImageObject
*/
+(WXImageObject *) object;
/**
* @note 10M
*/
@property (nonatomic, retain) NSData *imageData;
/** url
* @note 10K
*/
@property (nonatomic, retain) NSString *imageUrl;
@end
#pragma mark - WXMusicObject
/*! @brief
*
*
* @note musicUrlmusicLowBandUrl
* @see WXMediaMessage
*/
@interface WXMusicObject : NSObject
/*! @brief WXMusicObject
*
* @note WXMusicObject
*/
+(WXMusicObject *) object;
/** url
* @note 10K
*/
@property (nonatomic, retain) NSString *musicUrl;
/** lowbandurl
* @note 10K
*/
@property (nonatomic, retain) NSString *musicLowBandUrl;
/** url
* @note 10K
*/
@property (nonatomic, retain) NSString *musicDataUrl;
/**lowbandurl
* @note 10K
*/
@property (nonatomic, retain) NSString *musicLowBandDataUrl;
@end
#pragma mark - WXVideoObject
/*! @brief
*
*
* @note videoUrlvideoLowBandUrl
* @see WXMediaMessage
*/
@interface WXVideoObject : NSObject
/*! @brief WXVideoObject
*
* @note WXVideoObject
*/
+(WXVideoObject *) object;
/** url
* @note 10K
*/
@property (nonatomic, retain) NSString *videoUrl;
/** lowbandurl
* @note 10K
*/
@property (nonatomic, retain) NSString *videoLowBandUrl;
@end
#pragma mark - WXWebpageObject
/*! @brief
*
*
* @see WXMediaMessage
*/
@interface WXWebpageObject : NSObject
/*! @brief WXWebpageObject
*
* @note WXWebpageObject
*/
+(WXWebpageObject *) object;
/** url
* @note 10K
*/
@property (nonatomic, retain) NSString *webpageUrl;
@end
#pragma mark - WXAppExtendObject
/*! @brief App
*
* WXAppExtendObject
*
* @note urlextInfofileData
* @see WXMediaMessage
*/
@interface WXAppExtendObject : NSObject
/*! @brief WXAppExtendObject
*
* @note WXAppExtendObject
*/
+(WXAppExtendObject *) object;
/** urlApp
* @note 10K
*/
@property (nonatomic, retain) NSString *url;
/**
* @note 2K
*/
@property (nonatomic, retain) NSString *extInfo;
/** App
* @note 10M
*/
@property (nonatomic, retain) NSData *fileData;
@end
#pragma mark - WXEmoticonObject
/*! @brief
*
*
* @see WXMediaMessage
*/
@interface WXEmoticonObject : NSObject
/*! @brief WXEmoticonObject
*
* @note WXEmoticonObject
*/
+(WXEmoticonObject *) object;
/**
* @note 10M
*/
@property (nonatomic, retain) NSData *emoticonData;
@end
#pragma mark - WXFileObject
/*! @brief
*
* @see WXMediaMessage
*/
@interface WXFileObject : NSObject
/*! @brief WXFileObject
*
* @note WXFileObject
*/
+(WXFileObject *) object;
/**
* @note 64
*/
@property (nonatomic, retain) NSString *fileExtension;
/**
* @note 10M
*/
@property (nonatomic, retain) NSData *fileData;
@end
|
import { argv } from 'yargs';
import * as CONFIG from '../../config';
/**
* Returns the project configuration (consisting of the base configuration provided by seed.config.ts and the additional
* project specific overrides as defined in project.config.ts)
*/
export function templateLocals() {
const configEnvName = argv['config-env'] || 'dev';
const configEnv = CONFIG.getPluginConfig('environment-config')[configEnvName];
if (!configEnv) {
throw new Error('Invalid configuration name');
}
const config = {
ENV_CONFIG: JSON.stringify(configEnv)
};
return Object.assign(config, CONFIG);
}
|
import { Template } from 'meteor/templating';
Template.messageAction.helpers({
isButton() {
return this.type === 'button';
},
<API key>() {
return Template.parentData(1).button_alignment === 'horizontal';
},
<API key>(processingType) {
return `js-actionButton-${ processingType || 'sendMessage' }`;
},
});
|
<?php
namespace Illuminate\Tests\Database;
use Illuminate\Database\Connection;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Schema\Grammars\MySqlGrammar;
use Illuminate\Database\Schema\Grammars\PostgresGrammar;
use Illuminate\Database\Schema\Grammars\SQLiteGrammar;
use Illuminate\Database\Schema\Grammars\SqlServerGrammar;
use Mockery as m;
use PHPUnit\Framework\TestCase;
class <API key> extends TestCase
{
protected function tearDown(): void
{
m::close();
}
public function <API key>()
{
$conn = m::mock(Connection::class);
$conn->shouldReceive('statement')->once()->with('foo');
$conn->shouldReceive('statement')->once()->with('bar');
$grammar = m::mock(MySqlGrammar::class);
$blueprint = $this->getMockBuilder(Blueprint::class)->setMethods(['toSql'])->setConstructorArgs(['users'])->getMock();
$blueprint->expects($this->once())->method('toSql')->with($this->equalTo($conn), $this->equalTo($grammar))->willReturn(['foo', 'bar']);
$blueprint->build($conn, $grammar);
}
public function <API key>()
{
$blueprint = new Blueprint('users');
$blueprint->unique(['foo', 'bar']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('users');
$blueprint->index('foo');
$commands = $blueprint->getCommands();
$this->assertSame('users_foo_index', $commands[0]->index);
$blueprint = new Blueprint('geo');
$blueprint->spatialIndex('coordinates');
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
}
public function <API key>()
{
$blueprint = new Blueprint('users', null, 'prefix_');
$blueprint->unique(['foo', 'bar']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('users', null, 'prefix_');
$blueprint->index('foo');
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('geo', null, 'prefix_');
$blueprint->spatialIndex('coordinates');
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
}
public function <API key>()
{
$blueprint = new Blueprint('users');
$blueprint->dropUnique(['foo', 'bar']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('users');
$blueprint->dropIndex(['foo']);
$commands = $blueprint->getCommands();
$this->assertSame('users_foo_index', $commands[0]->index);
$blueprint = new Blueprint('geo');
$blueprint->dropSpatialIndex(['coordinates']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
}
public function <API key>()
{
$blueprint = new Blueprint('users', null, 'prefix_');
$blueprint->dropUnique(['foo', 'bar']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('users', null, 'prefix_');
$blueprint->dropIndex(['foo']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
$blueprint = new Blueprint('geo', null, 'prefix_');
$blueprint->dropSpatialIndex(['coordinates']);
$commands = $blueprint->getCommands();
$this->assertSame('<API key>', $commands[0]->index);
}
public function <API key>()
{
$base = new Blueprint('users', function ($table) {
$table->dateTime('created')->useCurrent();
});
$connection = m::mock(Connection::class);
$blueprint = clone $base;
$this->assertEquals(['alter table `users` add `created` datetime default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new MySqlGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add column "created" timestamp(0) without time zone default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new PostgresGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add column "created" datetime default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new SQLiteGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add "created" datetime default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new SqlServerGrammar));
}
public function <API key>()
{
$base = new Blueprint('users', function ($table) {
$table->timestamp('created')->useCurrent();
});
$connection = m::mock(Connection::class);
$blueprint = clone $base;
$this->assertEquals(['alter table `users` add `created` timestamp default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new MySqlGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add column "created" timestamp(0) without time zone default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new PostgresGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add column "created" datetime default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new SQLiteGrammar));
$blueprint = clone $base;
$this->assertEquals(['alter table "users" add "created" datetime default CURRENT_TIMESTAMP not null'], $blueprint->toSql($connection, new SqlServerGrammar));
}
public function <API key>()
{
$base = new Blueprint('users', function ($table) {
$table->unsignedDecimal('money', 10, 2)->useCurrent();
});
$connection = m::mock(Connection::class);
$blueprint = clone $base;
$this->assertEquals(['alter table `users` add `money` decimal(10, 2) unsigned not null'], $blueprint->toSql($connection, new MySqlGrammar));
}
public function testRemoveColumn()
{
$base = new Blueprint('users', function ($table) {
$table->string('foo');
$table->string('remove_this');
$table->removeColumn('remove_this');
});
$connection = m::mock(Connection::class);
$blueprint = clone $base;
$this->assertEquals(['alter table `users` add `foo` varchar(255) not null'], $blueprint->toSql($connection, new MySqlGrammar));
}
public function testMacroable()
{
Blueprint::macro('foo', function () {
return $this->addCommand('foo');
});
MySqlGrammar::macro('compileFoo', function () {
return 'bar';
});
$blueprint = new Blueprint('users', function ($table) {
$table->foo();
});
$connection = m::mock(Connection::class);
$this->assertEquals(['bar'], $blueprint->toSql($connection, new MySqlGrammar));
}
}
|
#nullable enable
using System;
using System.Collections.Generic;
using System.Composition;
using System.Linq;
using System.Threading;
using EnvDTE;
using EnvDTE80;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeFixes;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Extensions;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Options;
using Microsoft.VisualStudio.Shell;
using Roslyn.Utilities;
using Task = System.Threading.Tasks.Task;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem
{
[Export, Shared]
internal partial class <API key> : IDisposable
{
private static readonly string LocalRegistryPath = $@"Roslyn\Internal\{nameof(<API key>)}\";
private static readonly Option<bool> NeverShowAgain = new Option<bool>(nameof(<API key>), nameof(NeverShowAgain),
defaultValue: false, storageLocations: new <API key>(LocalRegistryPath + nameof(NeverShowAgain)));
private readonly <API key> _workspace;
private readonly IThreadingContext _threadingContext;
private DTE? _dte;
private bool <API key>;
[<API key>]
[Obsolete(MefConstruction.<API key>, error: true)]
public <API key>(
<API key> workspace,
IThreadingContext threadingContext)
{
_workspace = workspace;
_threadingContext = threadingContext;
_workspace.WorkspaceChanged += OnWorkspaceChanged;
}
public void Initialize(IServiceProvider serviceProvider)
=> _dte = (DTE)serviceProvider.GetService(typeof(DTE));
void IDisposable.Dispose()
=> _workspace.WorkspaceChanged -= OnWorkspaceChanged;
private void OnWorkspaceChanged(object sender, <API key> e)
{
switch (e.Kind)
{
case WorkspaceChangeKind.SolutionAdded:
<API key> = false;
return;
// Check if a new analyzer config document was added
case WorkspaceChangeKind.<API key>:
break;
default:
return;
}
// Bail out if we have a null DTE instance or we have already shown the info bar for current solution.
if (_dte == null ||
<API key>)
{
return;
}
// Check if added analyzer config document is at the root of the current solution.
var <API key> = e.NewSolution.<API key>(e.DocumentId)?.FilePath;
var <API key> = PathUtilities.GetDirectoryName(<API key>);
var solutionDirectory = PathUtilities.GetDirectoryName(e.NewSolution.FilePath);
if (<API key> == null ||
<API key> == null ||
<API key> != solutionDirectory)
{
return;
}
// Check if user has explicitly disabled the suggestion to add newly added analyzer config document as solution item.
if (_workspace.Options.GetOption(NeverShowAgain))
{
return;
}
// Kick off a task to show info bar to make it a solution item.
Task.Run(async () =>
{
await _threadingContext.JoinableTaskFactory.<API key>();
var solution = (Solution2)_dte.Solution;
if (<API key>.<API key>(solution, <API key>, out _, out var <API key>) &&
<API key>)
{
return;
}
if (!<API key>)
{
<API key> = true;
var infoBarService = _workspace.Services.GetRequiredService<IInfoBarService>();
infoBarService.<API key>(
ServicesVSResources.A_new_editorconfig_file_was_detected_at_the_root_of_your_solution_Would_you_like_to_make_it_a_solution_item,
GetInfoBarUIItems().ToArray());
}
});
return;
// Local functions
IEnumerable<InfoBarUI> GetInfoBarUIItems()
{
// Yes - add editorconfig solution item.
yield return new InfoBarUI(
title: ServicesVSResources.Yes,
kind: InfoBarUI.UIKind.Button,
action: <API key>,
closeAfterAction: true);
// No - do not add editorconfig solution item.
yield return new InfoBarUI(
title: ServicesVSResources.No,
kind: InfoBarUI.UIKind.Button,
action: () => { },
closeAfterAction: true);
// Don't show the InfoBar again link
yield return new InfoBarUI(title: ServicesVSResources.<API key>,
kind: InfoBarUI.UIKind.Button,
action: () => _workspace.TryApplyChanges(_workspace.CurrentSolution.WithOptions(_workspace.Options.WithChangedOption(NeverShowAgain, true))),
closeAfterAction: true);
}
void <API key>()
{
var <API key> = _workspace.Services.GetRequiredService<<API key>>();
<API key>.<API key>(<API key>, CancellationToken.None).Wait();
}
}
}
}
|
import fs from 'fs';
import url from 'url';
import path from 'path';
import mime from 'mime-types';
import gulp from 'gulp';
import createServerTask from './tasks/server';
import consoleArguments from './console-arguments';
import { adminBundle } from './admin-bundle.tasks';
import { dashboardBundle } from './dashboard-bundle.tasks';
import { mediaBundle } from './media-bundle.tasks';
import { translatorBundle } from './translator-bundle.tasks';
const BUNDLES = [adminBundle, dashboardBundle, mediaBundle, translatorBundle];
const writeToResponse = (req, res, bundlePaths) => {
const formattedUrl = url.parse(req.url);
for (const bundlePath of bundlePaths) {
const filePath = path.normalize(bundlePath + formattedUrl.pathname);
try {
const stat = fs.statSync(filePath);
if (stat && stat.isFile()) {
const rstream = fs.createReadStream(filePath);
const extension = path.extname(filePath);
const contentType = mime.lookup(extension);
res.writeHead(200, {
'Content-Type': contentType,
'Content-Length': stat.size
});
rstream.pipe(res);
return;
}
} catch (e) {
// Does not exist
}
}
return new Error(`Local file for ${req.url} not found`);
};
const handleRequest = (req, res, next) => {
if (writeToResponse(req, res, BUNDLES.map(item => item.config.distPath))) {
// Nothing we can write to the stream, fallback to the default behavior
return next();
};
};
const startLocalTask = createServerTask({
config: {
ui: false,
ghostMode: false,
open: false,
reloadOnRestart: true,
notify: true,
proxy: { target: consoleArguments.backendProxy },
middleware: BUNDLES.map(bundle => { return { route: bundle.config.publicPath, handle: handleRequest } })
}
});
export const buildOnChange = (done) => {
for (const bundle of BUNDLES) {
const srcPath = bundle.config.srcPath;
const jsAssets = srcPath + 'js!(*.spec).js';
gulp.watch(jsAssets, bundle.tasks.scripts);
if (bundle.tasks.bundle) {
const jsNextAssets = srcPath + 'jsnext!(*.spec).js';
gulp.watch(jsNextAssets, bundle.tasks.bundle);
}
const styleAssets = srcPath + 'scss*.scss';
gulp.watch(styleAssets, bundle.tasks.cssOptimized);
if (bundle.tasks.cssNextOptimized) {
const styleNextAssets = srcPath + 'scssnext*.scss';
gulp.watch(styleNextAssets, bundle.tasks.cssNextOptimized);
}
}
done();
};
export function testOnChange(done) {
for (const bundle of BUNDLES) {
if (bundle.tasks.eslint) {
const srcPath = bundle.config.srcPath;
gulp.watch(`${srcPath}jsnext*.js`, bundle.tasks.eslint);
}
if (bundle.tasks.stylelint) {
const srcPath = bundle.config.srcPath;
gulp.watch(`${srcPath}scssnext*.scss`, bundle.tasks.stylelint);
}
}
done();
}
export default startLocalTask;
|
'use strict';
describe('Service: Initiatives', function () {
// instantiate service
var Initiatives,
Timeout,
cfg,
$httpBackend,
$rootScope,
tPromise;
// load the service's module
beforeEach(module('sumaAnalysis'));
beforeEach(inject(function (_$rootScope_, _$httpBackend_, _initiatives_, $q, $timeout) {
$rootScope = _$rootScope_;
$httpBackend = _$httpBackend_;
Initiatives = _initiatives_;
tPromise = $q.defer();
Timeout = $timeout;
cfg = {
timeoutPromise: tPromise,
timeout: 180000
};
}));
it('should make an AJAX call', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond([{}, {}]);
Initiatives.get(cfg).then(function (result) {
expect(result.length).to.equal(2);
done();
});
$httpBackend.flush();
});
it('should respond with error message on failure', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond(500, {message: 'Error'});
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Error',
code: 500
});
done();
});
$httpBackend.flush();
});
it('should return error with promiseTimeout true on aborted http request', function (done) {
// simulate aborted request
$httpBackend.whenGET('lib/php/initiatives.php')
.respond(0, {message: 'Error'});
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Initiatives.get Timeout',
code: 0,
promiseTimeout: true
});
done();
});
$httpBackend.flush();
});
it('should return error without promiseTimeout on http timeout', function (done) {
$httpBackend.whenGET('lib/php/initiatives.php')
.respond([{}, {}]);
Initiatives.get(cfg).then(function (result) {
}, function (result) {
expect(result).to.deep.equal({
message: 'Initiatives.get Timeout',
code: 0
});
done();
});
Timeout.flush();
});
});
|
<?php
declare(strict_types=1);
namespace Sylius\Component\Core\Model;
use Sylius\Component\Promotion\Model\<API key> as <API key>;
interface <API key> extends <API key>
{
public function <API key>(): ?int;
public function <API key>(?int $<API key>): void;
}
|
// IMPORTANT:
// Some of these functions are Windows specific, so you may want to add *nix specific banned function calls
#ifndef _INC_BANNED
# define _INC_BANNED
#endif
#ifdef _MSC_VER
# pragma once
# pragma deprecated (strcpy, strcpyA, strcpyW, wcscpy, _tcscpy, _mbscpy, StrCpy, StrCpyA, StrCpyW, lstrcpy, lstrcpyA, lstrcpyW, _tccpy, _mbccpy, _ftcscpy)
# pragma deprecated (strcat, strcatA, strcatW, wcscat, _tcscat, _mbscat, StrCat, StrCatA, StrCatW, lstrcat, lstrcatA, lstrcatW, StrCatBuff, StrCatBuffA, StrCatBuffW, StrCatChainW, _tccat, _mbccat, _ftcscat)
# pragma deprecated (wvsprintf, wvsprintfA, wvsprintfW, vsprintf, _vstprintf, vswprintf)
# pragma deprecated (strncpy, wcsncpy, _tcsncpy, _mbsncpy, _mbsnbcpy, StrCpyN, StrCpyNA, StrCpyNW, StrNCpy, strcpynA, StrNCpyA, StrNCpyW, lstrcpyn, lstrcpynA, lstrcpynW)
# pragma deprecated (strncat, wcsncat, _tcsncat, _mbsncat, _mbsnbcat, StrCatN, StrCatNA, StrCatNW, StrNCat, StrNCatA, StrNCatW, lstrncat, lstrcatnA, lstrcatnW, lstrcatn)
# pragma deprecated (IsBadWritePtr, IsBadHugeWritePtr, IsBadReadPtr, IsBadHugeReadPtr, IsBadCodePtr, IsBadStringPtr)
# pragma deprecated (gets, _getts, _gettws)
# pragma deprecated (RtlCopyMemory, CopyMemory)
# pragma deprecated (wnsprintf, wnsprintfA, wnsprintfW, sprintfW, sprintfA, wsprintf, wsprintfW, wsprintfA, sprintf, swprintf, _stprintf, _snwprintf, _snprintf, _sntprintf)
# pragma deprecated (_vsnprintf, vsnprintf, _vsnwprintf, _vsntprintf, wvnsprintf, wvnsprintfA, wvnsprintfW)
# pragma deprecated (strtok, _tcstok, wcstok, _mbstok)
# pragma deprecated (makepath, _tmakepath, _makepath, _wmakepath)
# pragma deprecated (_splitpath, _tsplitpath, _wsplitpath)
# pragma deprecated (scanf, wscanf, _tscanf, sscanf, swscanf, _stscanf, snscanf, snwscanf, _sntscanf)
# pragma deprecated (_itoa, _itow, _i64toa, _i64tow, _ui64toa, _ui64tot, _ui64tow, _ultoa, _ultot, _ultow)
#if (_SDL_BANNED_LEVEL3)
# pragma deprecated (CharToOem, CharToOemA, CharToOemW, OemToChar, OemToCharA, OemToCharW, CharToOemBuffA, CharToOemBuffW)
# pragma deprecated (alloca, _alloca)
# pragma deprecated (strlen, wcslen, _mbslen, _mbstrlen, StrLen, lstrlen)
# pragma deprecated (<API key>)
#endif
#ifndef <API key>
// Path APIs which assume MAX_PATH instead of requiring the caller to specify
// the buffer size have been deprecated. Include <PathCch.h> and use the PathCch
// equivalents instead.
# pragma deprecated (PathAddBackslash, PathAddBackslashA, PathAddBackslashW)
# pragma deprecated (PathAddExtension, PathAddExtensionA, PathAddExtensionW)
# pragma deprecated (PathAppend, PathAppendA, PathAppendW)
# pragma deprecated (PathCanonicalize, PathCanonicalizeA, PathCanonicalizeW)
# pragma deprecated (PathCombine, PathCombineA, PathCombineW)
# pragma deprecated (PathRenameExtension, <API key>, <API key>)
#endif // <API key>
#else // _MSC_VER
#endif /* _INC_BANNED */
|
<?php
define('IN_ECS', true);
ini_set('display_errors', 1);
error_reporting(E_ALL ^ E_NOTICE);
clearstatcache();
define('ROOT_PATH', str_replace('demo/includes/init.php', '', str_replace('\\', '/', __FILE__)));
require(ROOT_PATH . 'includes/lib_common.php');
@include(ROOT_PATH . 'includes/lib_base.php');
require(ROOT_PATH . 'admin/includes/lib_main.php');
require(ROOT_PATH . 'includes/lib_time.php');
clear_all_files();
if (file_exists(ROOT_PATH . 'data/config.php'))
{
include(ROOT_PATH . 'data/config.php');
}
elseif (file_exists(ROOT_PATH . 'includes/config.php'))
{
if (!rename(ROOT_PATH . 'includes/config.php', ROOT_PATH . 'data/config.php'))
{
die('Can\'t move config.php, please move it from includes/ to data/ manually!');
}
include(ROOT_PATH . 'data/config.php');
}
else
{
die('Can\'t find config.php!');
}
require(ROOT_PATH . 'includes/cls_ecshop.php');
require(ROOT_PATH . 'includes/cls_mysql.php');
/* ECSHOP */
$ecs = new ECS($db_name, $prefix);
/* ?
$ec_version_charset = 'gbk';
*/
$mysql_charset = $ecshop_charset = '';
$tmp_link = @mysql_connect($db_host, $db_user, $db_pass);
if (!$tmp_link)
{
die("Can't pConnect MySQL Server($db_host)!");
}
else
{
mysql_select_db($db_name);
$query = mysql_query(" SHOW CREATE TABLE " . $ecs->table('users'), $tmp_link) or die(mysql_error());
$tablestruct = mysql_fetch_row($query);
preg_match("/CHARSET=(\w+)/", $tablestruct[1], $m);
if (strpos($m[1], 'utf') === 0)
{
$mysql_charset = str_replace('utf', 'utf-', $m[1]);
}
else
{
$mysql_charset = $m[1];
}
}
if (defined('EC_CHARSET'))
{
$ecshop_charset = EC_CHARSET;
}
/*
if (empty($tmp_charset))
{
$check_charset = false;
$tmp_charset = 'gbk';
}
else
{
$check_charset = true;
}
if (!defined('EC_CHARSET'))
{
define('EC_CHARSET', $tmp_charset);
}
if ($ec_version_charset != EC_CHARSET)
{
die('Database Charset not match!');
}
*/
$db = new cls_mysql($db_host, $db_user, $db_pass, $db_name, $ecshop_charset);
require(ROOT_PATH . 'includes/cls_error.php');
$err = new ecs_error('message.dwt');
require(ROOT_PATH . 'includes/cls_sql_executor.php');
require(ROOT_PATH . 'demo/includes/cls_template.php');
$smarty = new template(ROOT_PATH . 'demo/templates/');
require(ROOT_PATH . 'demo/includes/lib_updater.php');
@set_time_limit(360);
?>
|
var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
var HtmlTableImport = function HtmlTableImport(table) {
this.table = table; //hold Tabulator object
this.fieldIndex = [];
this.hasIndex = false;
};
HtmlTableImport.prototype.parseTable = function () {
var self = this,
element = self.table.element,
options = self.table.options,
columns = options.columns,
headers = element.<API key>("th"),
rows = element.<API key>("tbody")[0].<API key>("tr"),
data = [],
newTable;
self.hasIndex = false;
self.table.options.htmlImporting.call(this.table);
//check for tablator inline options
self._extractOptions(element, options);
if (headers.length) {
self._extractHeaders(headers, rows);
} else {
self.<API key>(headers, rows);
}
//iterate through table rows and build data set
for (var index = 0; index < rows.length; index++) {
var row = rows[index],
cells = row.<API key>("td"),
item = {};
//create index if the dont exist in table
if (!self.hasIndex) {
item[options.index] = index;
}
for (var i = 0; i < cells.length; i++) {
var cell = cells[i];
if (typeof this.fieldIndex[i] !== "undefined") {
item[this.fieldIndex[i]] = cell.innerHTML;
}
}
//add row data to item
data.push(item);
}
//create new element
var newElement = document.createElement("div");
//transfer attributes to new element
var attributes = element.attributes;
// loop through attributes and apply them on div
for (var i in attributes) {
if (_typeof(attributes[i]) == "object") {
newElement.setAttribute(attributes[i].name, attributes[i].value);
}
}
// replace table with div element
element.parentNode.replaceChild(newElement, element);
options.data = data;
self.table.options.htmlImported.call(this.table);
// // newElement.tabulator(options);
this.table.element = newElement;
};
//extract tabluator attribute options
HtmlTableImport.prototype._extractOptions = function (element, options) {
var attributes = element.attributes;
for (var index in attributes) {
var attrib = attributes[index];
var name;
if ((typeof attrib === "undefined" ? "undefined" : _typeof(attrib)) == "object" && attrib.name && attrib.name.indexOf("tabulator-") === 0) {
name = attrib.name.replace("tabulator-", "");
for (var key in options) {
if (key.toLowerCase() == name) {
options[key] = this._attribValue(attrib.value);
}
}
}
}
};
//get value of attribute
HtmlTableImport.prototype._attribValue = function (value) {
if (value === "true") {
return true;
}
if (value === "false") {
return false;
}
return value;
};
//find column if it has already been defined
HtmlTableImport.prototype._findCol = function (title) {
var match = this.table.options.columns.find(function (column) {
return column.title === title;
});
return match || false;
};
//extract column from headers
HtmlTableImport.prototype._extractHeaders = function (headers, rows) {
for (var index = 0; index < headers.length; index++) {
var header = headers[index],
exists = false,
col = this._findCol(header.textContent),
width,
attributes;
if (col) {
exists = true;
} else {
col = { title: header.textContent.trim() };
}
if (!col.field) {
col.field = header.textContent.trim().toLowerCase().replace(" ", "_");
}
width = header.getAttribute("width");
if (width && !col.width) {
col.width = width;
}
//check for tablator inline options
attributes = header.attributes;
// //check for tablator inline options
this._extractOptions(header, col);
for (var i in attributes) {
var attrib = attributes[i],
name;
if ((typeof attrib === "undefined" ? "undefined" : _typeof(attrib)) == "object" && attrib.name && attrib.name.indexOf("tabulator-") === 0) {
name = attrib.name.replace("tabulator-", "");
col[name] = this._attribValue(attrib.value);
}
}
this.fieldIndex[index] = col.field;
if (col.field == this.table.options.index) {
this.hasIndex = true;
}
if (!exists) {
this.table.options.columns.push(col);
}
}
};
//generate blank headers
HtmlTableImport.prototype.<API key> = function (headers, rows) {
for (var index = 0; index < headers.length; index++) {
var header = headers[index],
col = { title: "", field: "col" + index };
this.fieldIndex[index] = col.field;
var width = header.getAttribute("width");
if (width) {
col.width = width;
}
this.table.options.columns.push(col);
}
};
Tabulator.prototype.registerModule("htmlTableImport", HtmlTableImport);
|
<?php
namespace Rocketeer\Binaries\PackageManagers;
use Rocketeer\Abstracts\<API key>;
class Bundler extends <API key>
{
/**
* The name of the manifest file to look for.
*
* @type string
*/
protected $manifest = 'Gemfile';
/**
* Get an array of default paths to look for.
*
* @return string[]
*/
protected function getKnownPaths()
{
return [
'bundle',
];
}
}
|
function DumpArray(a)
{
var undef_start = -1;
for (var i = 0; i < a.length; i++)
{
if (a[i] == undefined)
{
if (undef_start == -1)
{
undef_start = i;
}
}
else
{
if (undef_start != -1)
{
WScript.Echo(undef_start + "-" + (i-1) + " = undefined");
undef_start = -1;
}
WScript.Echo(i + " = " + a[i]);
}
}
}
DumpArray([]);
DumpArray([ 0 ]);
DumpArray([ 0, 1, 2, 3, 4, 5, 6 ,7 ,8, 9]);
DumpArray([,,,0,,,1,,,2,,,3,,,4,,,5,,,6,,,7,,,8,,,9,,,]);
var s0 = "";
for (var i = 0; i < 100; i++)
{
s0 += ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,";
}
DumpArray(eval("[" + s0 + "1]"));
var s1 = "";
for (var i = 0; i < 30; i++)
{
s1 += s0;
}
DumpArray(eval("[" + s1 + "1]"));
var s2 = "";
for (var i = 0; i < 10; i++)
{
s2 += s1;
}
DumpArray(eval("[" + s2 + "1]"));
|
.goog-flat-button {
position: relative;
/*width: 20ex;*/
margin: 2px;
border: 1px solid #000;
padding: 2px 6px;
font: normal 13px "Trebuchet MS", Tahoma, Arial, sans-serif;
color: #fff;
background-color: #8c2425;
cursor: pointer;
outline: none;
}
/* State: disabled. */
.<API key> {
border-color: #888;
color: #888;
background-color: #ccc;
cursor: default;
}
/* State: hover. */
.<API key> {
border-color: #8c2425;
color: #8c2425;
background-color: #eaa4a5;
}
/* State: active, selected, checked. */
.<API key>,
.<API key>,
.<API key> {
border-color: #5b4169;
color: #5b4169;
background-color: #d1a8ea;
}
/* State: focused. */
.<API key> {
border-color: #5b4169;
}
/* Pill (collapsed border) styles. */
.<API key> {
margin-right: 0;
}
.<API key> {
margin-left: 0;
border-left: none;
}
|
/*
Syntax error: Undefined variable: "$teal".
on line 124 of C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss, in `gradient-vertical'
from line 124 of C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss
from line 57 of C:/projects/admin/documentation/sass/bootstrap/bootstrap.scss
Backtrace:
C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss:124:in `gradient-vertical'
C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss:124
C:/projects/admin/documentation/sass/bootstrap/bootstrap.scss:57
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/script/variable.rb:49:in `_perform'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/script/node.rb:40:in `perform'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:259:in `block in visit_mixin'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:259:in `map'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:259:in `visit_mixin'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:100:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `block in visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `map'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:109:in `block in visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:121:in `with_environment'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:108:in `visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `block in visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:320:in `visit_rule'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:100:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:227:in `block in visit_import'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:227:in `map'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:227:in `visit_import'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:100:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `block in visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `map'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:53:in `visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:109:in `block in visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:121:in `with_environment'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:108:in `visit_children'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `block in visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:128:in `visit_root'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/base.rb:37:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:100:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/visitors/perform.rb:7:in `visit'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/tree/root_node.rb:20:in `render'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/engine.rb:315:in `_render'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/sass-3.2.7/lib/sass/engine.rb:262:in `render'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:140:in `block (2 levels) in compile'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:126:in `timed'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:139:in `block in compile'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/logger.rb:45:in `red'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:138:in `compile'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:118:in `compile_if_required'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:103:in `block (2 levels) in run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:101:in `each'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:101:in `block in run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:126:in `timed'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/compiler.rb:100:in `run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/commands/watch_project.rb:147:in `recompile'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/path.rb:73:in `call'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/path.rb:73:in `run_callback'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/path.rb:55:in `callback_action'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/path.rb:31:in `create'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/state/directory.rb:26:in `block in created'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/state/directory.rb:25:in `each'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/state/directory.rb:25:in `created'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/state/directory.rb:17:in `refresh'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/backends/polling.rb:17:in `block (2 levels) in run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/backends/polling.rb:17:in `each'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/backends/polling.rb:17:in `block in run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/backends/polling.rb:15:in `loop'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/backends/polling.rb:15:in `run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm/monitor.rb:26:in `run'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/fssm-0.2.10/lib/fssm.rb:70:in `monitor'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/commands/watch_project.rb:87:in `perform'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/commands/base.rb:18:in `execute'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/commands/project_base.rb:19:in `execute'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/exec/sub_command_ui.rb:43:in `perform!'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/lib/compass/exec/sub_command_ui.rb:15:in `run!'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/bin/compass:30:in `block in <top (required)>'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/bin/compass:44:in `call'
C:/Ruby193/lib/ruby/gems/1.9.1/gems/compass-0.12.2/bin/compass:44:in `<top (required)>'
C:/Ruby193/bin/compass:23:in `load'
C:/Ruby193/bin/compass:23:in `<main>'
*/
body:before {
white-space: pre;
font-family: monospace;
content: "Syntax error: Undefined variable: \"$teal\".\A on line 124 of C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss, in `gradient-vertical'\A from line 124 of C:/projects/admin/documentation/sass/bootstrap/_progress-bars.scss\A from line 57 of C:/projects/admin/documentation/sass/bootstrap/bootstrap.scss"; }
|
<?xml version="1.0" encoding="utf-8"?>
<svg version="1.1" id="Layer_1" xmlns="http:
viewBox="0 0 364.8 140.7" style="enable-background:new 0 0 364.8 140.7;" xml:space="preserve"
height="{{ include.height }}" width="{{ include.width}}">
<style type="text/css">
.white{fill:#FFFFFF;}
</style>
<g id="XMLID_14_">
<g id="XMLID_15_">
<g id="XMLID_49_">
<path id="XMLID_456_" class="white" d="M0.1,130.6c1.6-4.3,8.5-10.9,11.3-10.9c0.9,0,1.3,0.6,1.5,1.7c0.4,0.6,2.1,4.5,9.3,4.5
c7.7,0,15-5.1,17.9-13.2c8.1-22.4-37.4-23.3-23.4-61.6C25.7,26.3,62.4,0,87.9,0c14.7,0,20.2,9.6,14.9,24.2
c-7.6,20.9-39,40.6-50.5,40.6c-6.6,0-8-7-6.6-10.9c1.2-3.2,2.2-1.3,5.4-1.3c10.1,0,32.8-15.8,37.3-28.2
c2.3-6.4,0.9-10.3-6.6-10.3c-13.9,0-43,16.5-50.5,37c-10.6,29.1,35.2,29.7,23.8,61.1C49.6,127.4,32.8,140,17,140
C1,140-0.3,131.7,0.1,130.6z"/>
</g>
</g>
<path id="XMLID_16_" class="white" d="M362,113.1c-1.2,0-2.9,1.2-4.4,4c-3.2,5.8-8.5,14.3-13.5,14.3c-3.2,0-1.4-4.9-1-6.1L353,98
c2.2-6.1,2.9-13.3-5.4-13.3c-6.1,0-12.6,5.7-18.7,13.1l2.2-5.9c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-3.6,0.9-4,2.2
c-0.3,0.9-0.1,1.7-1,4.2l-8.3,22.9c-0.4,0.5-0.8,1.1-1.1,1.8c-3.2,5.8-8.5,14.3-13.5,14.3c-3,0-3.1-3.2-2-6.1l7.9-21.8
c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-3.6,0.9-4,2.2c-0.3,0.9-0.1,1.7-1,4.2l-7.4,20.3c-2.7,3.7-6.1,6.9-9.4,6.9c-4.3,0-5.6-4-3.1-10.8
c6.9-19,21.1-27.6,30.5-27.6c1.2,0,2.4,0.3,3.5,0.3c1.2,0,1.9-0.8,2.7-3c1.2-3.4-0.3-5.5-3.7-5.5c-15.1,0-34.4,13.5-42.4,35.5
c-0.4,1-0.7,2-0.9,3c-2.9,4.3-6.5,8.2-9.8,8.2c-3.2,0-1.4-4.9-1-6.1l9.9-27.3c2.2-6.1,3-13.3-5.1-13.3c-5.6,0-11.9,5.4-17.5,12.4
c1.9-5.9,2.2-12.4-5.3-12.4c-5.6,0-11.9,5.5-17.8,12.7l2-5.5c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-3.6,0.9-4,2.2c-0.3,0.9-0.1,1.7-1,4.2
l-8.3,22.9c-0.4,0.5-0.8,1.1-1.1,1.8c-3.2,5.8-8.5,14.3-13.5,14.3c-2.6,0-3.5-1.8-2-6.1l15.2-41.8c6.6,0,13.9,0,23.9-0.3
c3.3-0.1,9-12.8,4.1-8.8c-8.6,0.4-17.3,0.5-24.9,0.5l8.4-23.1c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-4.2,0.5-4.5,1.5
c-0.3,0.9,0.8,1.2-0.6,5L194.5,75c-6.7-0.1-11-0.4-11-0.4c-3.6,0-4.6,8.8-0.6,8.8c0,0,3,0.3,8.5,0.3l-12.9,35.5
c-3.3,5.6-8,12.5-12.5,12.5c-3.2,0-1.4-4.9-1-6.1l9.9-27.3c2.2-6.1,2.9-13.3-5.4-13.3c-6.1,0-12.6,5.7-18.7,13.1l2.2-5.9
c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-3.6,0.9-4,2.2c-0.3,0.9-0.1,1.7-1,4.2l-8.3,22.9c-0.4,0.5-0.8,1.1-1.1,1.8
c-3.2,5.8-8.5,14.3-13.5,14.3c-3.2,0-1.4-4.9-1-6.1l12.2-33.5c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-4.2,0.5-4.5,1.5
c-0.3,0.9,0.8,1.2-0.6,5l-7.7,21.3c-4.9,9.2-11.7,17.8-16.7,17.8c-3.6,0-4.3-3.8-1.7-11c4-11.1,7.5-16.9,13.3-26.7
c1.2-2,6.7-7.3,0.7-7.3c-6.6,0-6.7,2.6-8.4,4.9c0,0-10.2,15.7-15.3,29.7c-0.1,0.2-0.1,0.4-0.2,0.7c-3.1,4.8-7,9.5-10.7,9.5
c-2.6,0-3.5-1.8-2-6.1l15.2-41.8c6.6,0,13.9,0,23.9-0.3c3.3-0.1,9-12.8,4.1-8.8c-8.6,0.4-17.3,0.5-24.9,0.5l8.4-23.1
c1.2-3.2-0.5-5.8-4.6-5.8c-1.7,0-4.2,0.5-4.5,1.5c-0.3,0.9,0.8,1.2-0.6,5l-8.1,22.3c-6.7-0.1-11-0.4-11-0.4c-3.6,0-4.6,8.8-0.6,8.8
c0,0,3,0.3,8.5,0.3l-15.3,42c-3.8,10.4-0.6,14.7,6.1,14.7c5.2,0,10.1-2.9,14.4-6.8c0.6,4.4,3.3,6.9,8.2,6.9c5.5,0,10.9-4,15.9-9.5
c-1.1,5.3,1.2,9.4,6.8,9.4c4.7,0,9.2-2.3,13.1-5.7c-1.7,4.8,1.4,5.7,3.9,5.7c3.6,0,4.6-3.2,5.5-5.7l7-19.3
c4.7-10.4,16.5-21.8,20.1-21.8c2.4,0,1.4,3.4-0.1,7.5l-9.8,26.8c-2.5,6.7-0.7,12.4,5.9,12.4c5,0,9.8-2.6,13.9-6.4
c0.3,4.4,3.2,6.4,7.7,6.4c4.7,0,9.2-2.3,13.1-5.7c-1.7,4.8,1.4,5.7,3.9,5.7c3.6,0,4.6-3.2,5.5-5.7l7-19.3
c4.5-10.4,15.7-21.8,19.1-21.8c2.1,0,1.1,3.4-0.4,7.5l-12.2,33.4c-1.8,4.9,1.4,5.8,3.8,5.8c3.6,0,4.6-3.2,5.5-5.7l7-19.3
c4.5-10.4,15.7-21.8,19.1-21.8c2.1,0,1.1,3.4-0.4,7.5l-9.8,26.8c-2.5,6.7-0.7,12.4,5.9,12.4c5.3,0,10.2-2.9,14.5-6.9
c0.9,4.3,3.9,6.9,9.2,6.9c4.7,0,9.1-2.4,12.6-5.4c0.7,3.2,3.2,5.4,7.6,5.4c4.7,0,9.2-2.3,13.1-5.7c-1.7,4.8,1.4,5.7,3.9,5.7
c3.6,0,4.6-3.2,5.5-5.7l7-19.3c4.7-10.4,16.5-21.8,20.1-21.8c2.4,0,1.4,3.4-0.1,7.5l-9.8,26.8c-2.5,6.7-0.7,12.4,5.9,12.4
c10.7,0,20-11.9,25.2-20.6c0.3-0.5,0.4-0.8,0.5-0.9C365.3,116,364,113.1,362,113.1z"/>
</g>
</svg>
|
<?php
namespace Symfony\Component\Debug\Tests\FatalErrorHandler;
use Symfony\Component\ClassLoader\ClassLoader as SymfonyClassLoader;
use Symfony\Component\ClassLoader\<API key> as <API key>;
use Symfony\Component\Debug\Exception\FatalErrorException;
use Symfony\Component\Debug\FatalErrorHandler\<API key>;
class <API key> extends \<API key>
{
/**
* @dataProvider <API key>
*/
public function <API key>($error, $translatedMessage)
{
$handler = new <API key>();
$exception = $handler->handleError($error, new FatalErrorException('', 0, $error['type'], $error['file'], $error['line']));
$this->assertInstanceof('Symfony\Component\Debug\Exception\<API key>', $exception);
$this->assertSame($translatedMessage, $exception->getMessage());
$this->assertSame($error['type'], $exception->getSeverity());
$this->assertSame($error['file'], $exception->getFile());
$this->assertSame($error['line'], $exception->getLine());
}
/**
* @dataProvider <API key>
* @group legacy
*/
public function <API key>($error, $translatedMessage, $autoloader)
{
// Unregister all autoloaders to ensure the custom provided
// autoloader is the only one to be used during the test run.
$autoloaders = <API key>();
array_map('<API key>', $autoloaders);
<API key>($autoloader);
$handler = new <API key>();
$exception = $handler->handleError($error, new FatalErrorException('', 0, $error['type'], $error['file'], $error['line']));
<API key>($autoloader);
array_map('<API key>', $autoloaders);
$this->assertInstanceof('Symfony\Component\Debug\Exception\<API key>', $exception);
$this->assertSame($translatedMessage, $exception->getMessage());
$this->assertSame($error['type'], $exception->getSeverity());
$this->assertSame($error['file'], $exception->getFile());
$this->assertSame($error['line'], $exception->getLine());
}
public function <API key>()
{
return array(
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'WhizBangFactory\' not found',
),
"Attempted to load class \"WhizBangFactory\" from the global namespace.\nDid you forget a \"use\" statement?",
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\WhizBangFactory\' not found',
),
"Attempted to load class \"WhizBangFactory\" from namespace \"Foo\\Bar\".\nDid you forget a \"use\" statement for another namespace?",
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'<API key>\' not found',
),
"Attempted to load class \"<API key>\" from the global namespace.\nDid you forget a \"use\" statement for \"Symfony\Component\Debug\Exception\<API key>\"?",
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'PEARClass\' not found',
),
"Attempted to load class \"PEARClass\" from the global namespace.\nDid you forget a \"use\" statement for \"<API key>\"?",
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\<API key>\' not found',
),
"Attempted to load class \"<API key>\" from namespace \"Foo\Bar\".\nDid you forget a \"use\" statement for \"Symfony\Component\Debug\Exception\<API key>\"?",
),
);
}
public function <API key>()
{
$this->iniSet('error_reporting', -1 & ~E_USER_DEPRECATED);
$prefixes = array('Symfony\Component\Debug\Exception\\' => realpath(__DIR__.'/../../Exception'));
$symfonyAutoloader = new SymfonyClassLoader();
$symfonyAutoloader->addPrefixes($prefixes);
if (class_exists('Symfony\Component\ClassLoader\<API key>')) {
$<API key> = new <API key>();
$<API key>->registerPrefixes($prefixes);
} else {
$<API key> = $symfonyAutoloader;
}
return array(
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\<API key>\' not found',
),
"Attempted to load class \"<API key>\" from namespace \"Foo\Bar\".\nDid you forget a \"use\" statement for \"Symfony\Component\Debug\Exception\<API key>\"?",
array($symfonyAutoloader, 'loadClass'),
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\<API key>\' not found',
),
"Attempted to load class \"<API key>\" from namespace \"Foo\Bar\".\nDid you forget a \"use\" statement for \"Symfony\Component\Debug\Exception\<API key>\"?",
array($<API key>, 'loadClass'),
),
array(
array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\<API key>\' not found',
),
"Attempted to load class \"<API key>\" from namespace \"Foo\\Bar\".\nDid you forget a \"use\" statement for another namespace?",
function ($className) { /* do nothing here */ },
),
);
}
public function <API key>()
{
if (!file_exists(__DIR__.'/../FIXTURES/REQUIREDTWICE.PHP')) {
$this->markTestSkipped('Can only be run on case insensitive filesystems');
}
require_once __DIR__.'/../FIXTURES/REQUIREDTWICE.PHP';
$error = array(
'type' => 1,
'line' => 12,
'file' => 'foo.php',
'message' => 'Class \'Foo\\Bar\\RequiredTwice\' not found',
);
$handler = new <API key>();
$exception = $handler->handleError($error, new FatalErrorException('', 0, $error['type'], $error['file'], $error['line']));
$this->assertInstanceof('Symfony\Component\Debug\Exception\<API key>', $exception);
}
}
|
/* ftrfork.h */
/* Embedded resource forks accessor (specification). */
/* Masatake YAMATO and Redhat K.K. */
/* This file is part of the FreeType project, and may only be used, */
/* modified, and distributed under the terms of the FreeType project */
/* understand and accept it fully. */
/* Development of the code in this file is support of */
/* <API key> Promotion Agency, Japan. */
#ifndef __FTRFORK_H__
#define __FTRFORK_H__
#include <ft2build.h>
#include <API key>
FT_BEGIN_HEADER
/* Number of guessing rules supported in `FT_Raccess_Guess'. */
/* Don't forget to increment the number if you add a new guessing rule. */
#define FT_RACCESS_N_RULES 9
/* A structure to describe a reference in a resource by its resource ID */
/* and internal offset. The `POST' resource expects to be concatenated */
/* by the order of resource IDs instead of its appearance in the file. */
typedef struct FT_RFork_Ref_
{
FT_UShort res_id;
FT_Long offset;
} FT_RFork_Ref;
#ifdef <API key>
typedef FT_Error
(*<API key>)( FT_Library library,
FT_Stream stream,
char *base_file_name,
char **result_file_name,
FT_Long *result_offset );
typedef enum FT_RFork_Rule_ {
<API key> = -2,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
<API key>,
FT_RFork_Rule_vfat,
<API key>,
<API key>,
<API key>
} FT_RFork_Rule;
/* For fast translation between rule index and rule type,
* the macros FT_RFORK_xxx should be kept consistent with
* the raccess_guess_funcs table
*/
typedef struct <API key> {
<API key> func;
FT_RFork_Rule type;
} <API key>;
#ifndef <API key>
/* this array is a storage in non-PIC mode, so ; is needed in END */
#define <API key>( name, type ) \
static const type name[] = {
#define <API key>( func_suffix, type_suffix ) \
{ raccess_guess_ ## func_suffix, \
FT_RFork_Rule_ ## type_suffix },
#define <API key> };
#else /* <API key> */
/* this array is a function in PIC mode, so no ; is needed in END */
#define <API key>( name, type ) \
void \
FT_Init_Table_ ## name( type* storage ) \
{ \
type* local = storage; \
\
\
int i = 0;
#define <API key>( func_suffix, type_suffix ) \
local[i].func = raccess_guess_ ## func_suffix; \
local[i].type = FT_RFork_Rule_ ## type_suffix; \
i++;
#define <API key> }
#endif /* <API key> */
#endif /* <API key> */
/* <Function> */
/* FT_Raccess_Guess */
/* <Description> */
/* Guess a file name and offset where the actual resource fork is */
/* stored. The macro FT_RACCESS_N_RULES holds the number of */
/* guessing rules; the guessed result for the Nth rule is */
/* represented as a triplet: a new file name (new_names[N]), a file */
/* offset (offsets[N]), and an error code (errors[N]). */
/* <Input> */
/* library :: */
/* A FreeType library instance. */
/* stream :: */
/* A file stream containing the resource fork. */
/* base_name :: */
/* The (base) file name of the resource fork used for some */
/* guessing rules. */
/* <Output> */
/* new_names :: */
/* An array of guessed file names in which the resource forks may */
/* exist. If `new_names[N]' is NULL, the guessed file name is */
/* equal to `base_name'. */
/* offsets :: */
/* An array of guessed file offsets. `offsets[N]' holds the file */
/* offset of the possible start of the resource fork in file */
/* `new_names[N]'. */
/* errors :: */
/* An array of FreeType error codes. `errors[N]' is the error */
/* code of Nth guessing rule function. If `errors[N]' is not */
/* FT_Err_Ok, `new_names[N]' and `offsets[N]' are meaningless. */
FT_BASE( void )
FT_Raccess_Guess( FT_Library library,
FT_Stream stream,
char* base_name,
char** new_names,
FT_Long* offsets,
FT_Error* errors );
/* <Function> */
/* <API key> */
/* <Description> */
/* Get the information from the header of resource fork. The */
/* information includes the file offset where the resource map */
/* starts, and the file offset where the resource data starts. */
/* `<API key>' requires these two data. */
/* <Input> */
/* library :: */
/* A FreeType library instance. */
/* stream :: */
/* A file stream containing the resource fork. */
/* rfork_offset :: */
/* The file offset where the resource fork starts. */
/* <Output> */
/* map_offset :: */
/* The file offset where the resource map starts. */
/* rdata_pos :: */
/* The file offset where the resource data starts. */
/* <Return> */
/* FreeType error code. FT_Err_Ok means success. */
FT_BASE( FT_Error )
<API key>( FT_Library library,
FT_Stream stream,
FT_Long rfork_offset,
FT_Long *map_offset,
FT_Long *rdata_pos );
/* <Function> */
/* <API key> */
/* <Description> */
/* Get the data offsets for a tag in a resource fork. Offsets are */
/* stored in an array because, in some cases, resources in a resource */
/* fork have the same tag. */
/* <Input> */
/* library :: */
/* A FreeType library instance. */
/* stream :: */
/* A file stream containing the resource fork. */
/* map_offset :: */
/* The file offset where the resource map starts. */
/* rdata_pos :: */
/* The file offset where the resource data starts. */
/* tag :: */
/* The resource tag. */
/* sort_by_res_id :: */
/* A Boolean to sort the fragmented resource by their ids. */
/* The fragmented resources for `POST' resource should be sorted */
/* to restore Type1 font properly. For `snft' resources, sorting */
/* may induce a different order of the faces in comparison to that */
/* by QuickDraw API. */
/* <Output> */
/* offsets :: */
/* The stream offsets for the resource data specified by `tag'. */
/* This array is allocated by the function, so you have to call */
/* @ft_mem_free after use. */
/* count :: */
/* The length of offsets array. */
/* <Return> */
/* FreeType error code. FT_Err_Ok means success. */
/* <Note> */
/* Normally you should use `<API key>' to get the */
/* value for `map_offset' and `rdata_pos'. */
FT_BASE( FT_Error )
<API key>( FT_Library library,
FT_Stream stream,
FT_Long map_offset,
FT_Long rdata_pos,
FT_Long tag,
FT_Bool sort_by_res_id,
FT_Long **offsets,
FT_Long *count );
FT_END_HEADER
#endif /* __FTRFORK_H__ */
/* END */
|
require 'chunky_png'
require 'fileutils'
#COLOR_TRUE = ChunkyPNG::Color::rgba(224, 255, 255, 96) # for kunijiban
COLOR_TRUE = ChunkyPNG::Color::rgba(255, 255, 255, 0) # for ort
COLOR_FALSE = ChunkyPNG::Color::rgba(0, 0, 0, 128)
def _new_image
return ChunkyPNG::Image.new(256, 256, COLOR_FALSE)
end
def write(image, zxy)
path = "#{zxy.join('/')}.png"
FileUtils.mkdir_p(File.dirname(path)) unless File.directory?(File.dirname(path))
image.save(path)
print "wrote #{path}\n"
return _new_image
end
_new_image.save('404.png')
current = [nil, nil, nil]
last = [nil, nil, nil]
(z, x, y, u, v) = [nil, nil, nil, nil, nil]
image = _new_image
while gets
(z, x, y, u, v) = $_.strip.split(',').map{|v| v.to_i}
current = [z, x, y]
if current == last or last[0].nil?
image[u, v] = COLOR_TRUE
else
image = write(image, last)
image[u, v] = COLOR_TRUE
end
last = current
end
write(image, last)
|
# Southpaw Technology, and is not to be reproduced, transmitted,
__all__ = ['ScrollbarWdg', 'TestScrollbarWdg']
from tactic.ui.common import BaseRefreshWdg
from pyasm.web import DivWdg
class TestScrollbarWdg(BaseRefreshWdg):
def get_display(my):
top = my.top
top.add_style("width: 600px")
top.add_style("height: 400px")
return top
class ScrollbarWdg(BaseRefreshWdg):
def get_display(my):
top = my.top
top.add_class("spt_scrollbar_top")
content = my.kwargs.get("content")
content_class = my.kwargs.get("content_class")
if not content_class:
content_class = "spt_content"
width = 8
top.add_style("width: %s" % width)
top.add_style("position: absolute")
top.add_style("top: 0px")
top.add_style("right: 0px")
top.add_color("background", "background")
top.add_style("margin: 3px 5px")
top.add_style("opacity: 0.0")
top.add_behavior( {
'type': 'load',
'cbjs_action': my.get_onload_js()
} )
top.add_behavior( {
'type': 'load',
'content_class': content_class,
'cbjs_action': '''
var parent = bvr.src_el.getParent("." + bvr.content_class);
var size = parent.getSize();
bvr.src_el.setStyle("height", size.y);
var scrollbar = parent.getElement(".spt_scrollbar_top");
parent.addEvent("mouseenter", function() {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 1.0);
} );
parent.addEvent("mouseleave", function() {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 0.0);
} );
parent.addEvent("keypress", function(evt) {
new Fx.Tween(scrollbar, {duration: 250}).start("opacity", 0.0);
console.log(evt);
} );
parent.addEvent("mousewheel", function(evt) {
evt.stopPropagation();
spt.scrollbar.content = parent;
if (evt.wheel == 1) {
spt.scrollbar.scroll(15)
}
else {
spt.scrollbar.scroll(-15)
}
} );
'''
} )
bar = DivWdg()
bar.add_class("spt_scrollbar")
bar.add_class("hand")
top.add(bar)
bar.add_style("width: %s" % width)
bar.add_style("height: 30px")
bar.add_style("border: solid 1px black")
bar.add_color("background", "background3")
#bar.add_border()
bar.add_style("border-radius: 5")
bar.add_style("position: absolute")
bar.add_style("top: 0px")
top.add_behavior( {
'type': 'smart_drag',
'bvr_match_class': 'spt_scrollbar',
'<API key>' : True,
"cbjs_setup": 'spt.scrollbar.drag_setup( evt, bvr, mouse_411 );',
"cbjs_motion": 'spt.scrollbar.drag_motion( evt, bvr, mouse_411 );'
} )
return top
def get_onload_js(my):
return r'''
spt.scrollbar = {};
spt.scrollbar.mouse_start_y = null;
spt.scrollbar.el_start_y = null;
spt.scrollbar.top = null;
spt.scrollbar.content = null;
spt.scrollbar.drag_setup = function(evt, bvr, mouse_411) {
spt.scrollbar.mouse_start_y = mouse_411.curr_y;
var src_el = spt.behavior.get_bvr_src( bvr );
var pos_y = parseInt(src_el.getStyle("top").replace("px", ""));
spt.scrollbar.el_start_y = pos_y;
spt.scrollbar.content = $("spt_SCROLL");
spt.scrollbar.top = src_el.getParent(".spt_scrollbar_top")
}
spt.scrollbar.drag_motion = function(evt, bvr, mouse_411) {
var src_el = spt.behavior.get_bvr_src( bvr );
var dy = mouse_411.curr_y - spt.scrollbar.mouse_start_y;
var pos_y = spt.scrollbar.el_start_y + dy;
if (pos_y < 0) {
return;
}
var content = spt.scrollbar.content;
var content_size = spt.scrollbar.content.getSize();
var top_size = spt.scrollbar.top.getSize();
var bar_size = src_el.getSize();
if (pos_y > top_size.y - bar_size.y - 5) {
return;
}
bvr.src_el.setStyle("top", pos_y);
//var content = bvr.src_el.getParent(".spt_content");
content.setStyle("margin-top", -dy);
}
spt.scrollbar.scroll = function(dy) {
spt.scrollbar.content = $("spt_SCROLL");
var content = spt.scrollbar.content;
var pos_y = parseInt(content.getStyle("margin-top").replace("px", ""));
content.setStyle("margin-top", pos_y + dy);
}
'''
|
package org.eclipse.che.ide.actions;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import org.eclipse.che.api.analytics.client.logger.<API key>;
import org.eclipse.che.ide.Resources;
import org.eclipse.che.ide.api.action.ActionEvent;
import org.eclipse.che.ide.api.action.ProjectAction;
import org.eclipse.che.ide.api.editor.EditorAgent;
import org.eclipse.che.ide.api.editor.EditorInput;
import org.eclipse.che.ide.api.editor.EditorPartPresenter;
import org.eclipse.che.ide.api.editor.EditorWithAutoSave;
import org.eclipse.che.ide.util.loging.Log;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/** @author Evgen Vidolob */
@Singleton
public class SaveAllAction extends ProjectAction {
private final EditorAgent editorAgent;
private final <API key> eventLogger;
@Inject
public SaveAllAction(EditorAgent editorAgent, Resources resources, <API key> eventLogger) {
super("Save All", "Save all changes for project", resources.save());
this.editorAgent = editorAgent;
this.eventLogger = eventLogger;
}
/** {@inheritDoc} */
@Override
public void actionPerformed(ActionEvent e) {
eventLogger.log(this);
Collection<EditorPartPresenter> values = editorAgent.getOpenedEditors().values();
List<EditorPartPresenter> editors = new ArrayList<>(values);
save(editors);
}
private void save(final List<EditorPartPresenter> editors) {
if (editors.isEmpty()) {
return;
}
final EditorPartPresenter editorPartPresenter = editors.get(0);
if (editorPartPresenter.isDirty()) {
editorPartPresenter.doSave(new AsyncCallback<EditorInput>() {
@Override
public void onFailure(Throwable caught) {
Log.error(SaveAllAction.class, caught);
//try to save other files
editors.remove(editorPartPresenter);
save(editors);
}
@Override
public void onSuccess(EditorInput result) {
editors.remove(editorPartPresenter);
save(editors);
}
});
} else {
editors.remove(editorPartPresenter);
save(editors);
}
}
/** {@inheritDoc} */
@Override
public void updateProjectAction(ActionEvent e) {
// e.getPresentation().setVisible(true);
boolean hasDirtyEditor = false;
for (EditorPartPresenter editor : editorAgent.getOpenedEditors().values()) {
if(editor instanceof EditorWithAutoSave) {
if (((EditorWithAutoSave)editor).isAutoSaveEnabled()) {
continue;
}
}
if (editor.isDirty()) {
hasDirtyEditor = true;
break;
}
}
e.getPresentation().<API key>(hasDirtyEditor);
}
}
|
$_L(["$wt.widgets.Layout"],"$wt.layout.FillLayout",["$wt.graphics.Point","$wt.layout.FillData"],function(){
c$=$_C(function(){
this.type=256;
this.marginWidth=0;
this.marginHeight=0;
this.spacing=0;
$_Z(this,arguments);
},$wt.layout,"FillLayout",$wt.widgets.Layout);
$_K(c$,
function(){
$_R(this,$wt.layout.FillLayout,[]);
});
$_K(c$,
function(type){
$_R(this,$wt.layout.FillLayout,[]);
this.type=type;
},"~N");
$_V(c$,"computeSize",
function(composite,wHint,hHint,flushCache){
var children=composite.getChildren();
var count=children.length;
var maxWidth=0;
var maxHeight=0;
for(var i=0;i<count;i++){
var child=children[i];
var w=wHint;
var h=hHint;
if(count>0){
if(this.type==256&&wHint!=-1){
w=Math.max(0,Math.floor((wHint-(count-1)*this.spacing)/count));
}if(this.type==512&&hHint!=-1){
h=Math.max(0,Math.floor((hHint-(count-1)*this.spacing)/count));
}}var size=this.computeChildSize(child,w,h,flushCache);
maxWidth=Math.max(maxWidth,size.x);
maxHeight=Math.max(maxHeight,size.y);
}
var width=0;
var height=0;
if(this.type==256){
width=count*maxWidth;
if(count!=0)width+=(count-1)*this.spacing;
height=maxHeight;
}else{
width=maxWidth;
height=count*maxHeight;
if(count!=0)height+=(count-1)*this.spacing;
}width+=this.marginWidth*2;
height+=this.marginHeight*2;
if(wHint!=-1)width=wHint;
if(hHint!=-1)height=hHint;
return new $wt.graphics.Point(width,height);
},"$wt.widgets.Composite,~N,~N,~B");
$_M(c$,"computeChildSize",
function(control,wHint,hHint,flushCache){
var data=control.getLayoutData();
if(data==null){
data=new $wt.layout.FillData();
control.setLayoutData(data);
}var size=null;
if(wHint==-1&&hHint==-1){
size=data.computeSize(control,wHint,hHint,flushCache);
}else{
var trimX;
var trimY;
if($_O(control,$wt.widgets.Scrollable)){
var rect=(control).computeTrim(0,0,0,0);
trimX=rect.width;
trimY=rect.height;
}else{
trimX=trimY=control.getBorderWidth()*2;
}var w=wHint==-1?wHint:Math.max(0,wHint-trimX);
var h=hHint==-1?hHint:Math.max(0,hHint-trimY);
size=data.computeSize(control,w,h,flushCache);
}return size;
},"$wt.widgets.Control,~N,~N,~B");
$_V(c$,"flushCache",
function(control){
var data=control.getLayoutData();
if(data!=null)(data).flushCache();
return true;
},"$wt.widgets.Control");
$_M(c$,"getName",
function(){
var string=this.getClass().getName();
var index=string.lastIndexOf('.');
if(index==-1)return string;
return string.substring(index+1,string.length);
});
$_V(c$,"layout",
function(composite,flushCache){
var rect=composite.getClientArea();
var children=composite.getChildren();
var count=children.length;
if(count==0)return;
var width=rect.width-this.marginWidth*2;
var height=rect.height-this.marginHeight*2;
if(this.type==256){
width-=(count-1)*this.spacing;
var x=rect.x+this.marginWidth;
var extra=width%count;
var y=rect.y+this.marginHeight;
var cellWidth=Math.floor(width/count);
for(var i=0;i<count;i++){
var child=children[i];
var childWidth=cellWidth;
if(i==0){
childWidth+=Math.floor(extra/2);
}else{
if(i==count-1)childWidth+=Math.floor((extra+1)/2);
}child.setBounds(x,y,childWidth,height);
x+=childWidth+this.spacing;
}
}else{
height-=(count-1)*this.spacing;
var x=rect.x+this.marginWidth;
var cellHeight=Math.floor(height/count);
var y=rect.y+this.marginHeight;
var extra=height%count;
for(var i=0;i<count;i++){
var child=children[i];
var childHeight=cellHeight;
if(i==0){
childHeight+=Math.floor(extra/2);
}else{
if(i==count-1)childHeight+=Math.floor((extra+1)/2);
}child.setBounds(x,y,width,childHeight);
y+=childHeight+this.spacing;
}
}},"$wt.widgets.Composite,~B");
$_V(c$,"toString",
function(){
var string=this.getName()+"{";
string+="type="+((this.type==512)?"SWT.VERTICAL":"SWT.HORIZONTAL")+" ";
if(this.marginWidth!=0)string+="marginWidth="+this.marginWidth+" ";
if(this.marginHeight!=0)string+="marginHeight="+this.marginHeight+" ";
if(this.spacing!=0)string+="spacing="+this.spacing+" ";
string=string.trim();
string+="}";
return string;
});
});
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.