answer stringlengths 15 1.25M |
|---|
<div class="wrapper">
<label *ngIf="label" style="padding-right: 20px">{{label}}</label>
<label *ngIf="!label"></label>
<div>
<pre (click)="highlightText($event)" tabindex="0">{{contentView ? content : <API key>}}</pre>
</div>
<div [hidden]="!passwordField || contentView" [title]="'show' | translate" (click)="showPassword()" (keyup)="onKeyPress($event.target.value, 'show')" role="button" tabindex="0">
<i class="link fa fa-eye clickable"><span class="fix-font"> {{ 'show' | translate }}</span></i>
</div>
<div [hidden]="!passwordField || !contentView" [title]="'hide' | translate" (click)="hidePassword()" (keyup)="onKeyPress($event.target.value, 'hide')" role="button" tabindex="0">
<i class="link fa fa-eye-slash clickable"><span class="fix-font"> {{ 'hide' | translate }}</span></i>
</div>
<div title="{{ '<API key>' | translate }}" (click)="copyToClipboard()" role="button" (keyup)="onKeyPress($event.target.value, 'copy')" tabindex="0">
<pop-over message="{{ 'copypre_copied' | translate }}" hideAfter="300" tabIndex="-1">
<i class="link fa fa-copy clickable"><span class="fix-font"> {{ 'copypre_copy' | translate }}</span></i>
</pop-over>
</div>
</div> |
<?php
namespace Chigi\Chiji\Plugin\ReleaseExtension\Util;
use Chigi\Chiji\Plugin\ReleaseExtension\Exceptions\<API key>;
use Chigi\Chiji\Plugin\ReleaseExtension\ReleaseManager;
use Chigi\Chiji\Project\Project;
/**
* The Mapping between Project and ReleaseManager.
*
* @author Richard Lea <chigix@zoho.com>
*/
class ProjectMapping {
private static $mapping = array();
/**
* Register a <API key> Mapping.
*
* @param Project $project
* @param ReleaseManager $manager
*/
public static function register(Project $project, ReleaseManager $manager) {
self::$mapping[$project->getProjectName()] = array($project, $manager);
}
/**
*
* @param Project $project
* @return ReleaseManager
*/
public static function getManagerByProject(Project $project) {
if (isset(self::$mapping[$project->getProjectName()])) {
return self::$mapping[$project->getProjectName()][1];
} else {
throw new <API key>($project->getProjectName() . " NOT REGISTERED.");
}
}
} |
package org.deeplearning4j.nn.conf.dropout;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NonNull;
import lombok.val;
import org.deeplearning4j.nn.workspace.ArrayType;
import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr;
import org.nd4j.base.Preconditions;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.api.ops.random.impl.DropOutInverted;
import org.nd4j.linalg.factory.Broadcast;
import org.nd4j.linalg.factory.Nd4j;
import org.nd4j.linalg.schedule.ISchedule;
import org.nd4j.shade.jackson.annotation.<API key>;
import org.nd4j.shade.jackson.annotation.JsonProperty;
@Data
@<API key>({"mask"})
@EqualsAndHashCode(exclude = {"mask"})
public class SpatialDropout implements IDropout {
private double p;
private ISchedule pSchedule;
private transient INDArray mask;
/**
* @param <API key> Probability of retaining an activation - see {@link Dropout} javadoc
*/
public SpatialDropout(double <API key>) {
this(<API key>, null);
if (<API key> < 0.0) {
throw new <API key>("Activation retain probability must be > 0. Got: " + <API key>);
}
if (<API key> == 0.0) {
throw new <API key>("Invalid probability value: Dropout with 0.0 probability of retaining "
+ "activations is not supported");
}
}
/**
* @param <API key> Schedule for probability of retaining an activation - see {@link Dropout} javadoc
*/
public SpatialDropout(ISchedule <API key>) {
this(Double.NaN, <API key>);
}
protected SpatialDropout(@JsonProperty("p") double <API key>,
@JsonProperty("pSchedule") ISchedule <API key>) {
this.p = <API key>;
this.pSchedule = <API key>;
}
@Override
public INDArray applyDropout(INDArray inputActivations, INDArray output, int iteration, int epoch, LayerWorkspaceMgr workspaceMgr) {
Preconditions.checkArgument(inputActivations.rank() == 5 || inputActivations.rank() == 4
|| inputActivations.rank() == 3, "Cannot apply spatial dropout to activations of rank %s: " +
"spatial dropout can only be used for rank 3, 4 or 5 activations (input activations shape: %s)"
, inputActivations.rank(), inputActivations.shape());
double currP;
if (pSchedule != null) {
currP = pSchedule.valueAt(iteration, epoch);
} else {
currP = p;
}
val minibatch = inputActivations.size(0);
val dim1 = inputActivations.size(1);
mask = workspaceMgr.createUninitialized(ArrayType.INPUT, output.dataType(), minibatch, dim1).assign(1.0);
Nd4j.getExecutioner().exec(new DropOutInverted(mask, currP));
Broadcast.mul(inputActivations, mask, output, 0, 1);
return output;
}
@Override
public INDArray backprop(INDArray gradAtOutput, INDArray gradAtInput, int iteration, int epoch) {
Preconditions.checkState(mask != null, "Cannot perform backprop: Dropout mask array is absent (already cleared?)");
//Mask has values 0 or 1/p
//dL/dIn = dL/dOut * dOut/dIn = dL/dOut * (0 if dropped, or 1/p otherwise)
Broadcast.mul(gradAtOutput, mask, gradAtInput, 0, 1);
mask = null;
return gradAtInput;
}
@Override
public void clear() {
mask = null;
}
@Override
public IDropout clone() {
return new SpatialDropout(p, pSchedule);
}
} |
package com.github.out.http.retrofit;
import com.github.out.http.HttpContents;
import java.util.concurrent.TimeUnit;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava.<API key>;
import retrofit2.converter.gson.<API key>;
public class RetrofitFactory
{
private static Retrofit retrofit;
private RetrofitFactory()
{
OkHttpClient.Builder httpClientBuilder = new OkHttpClient.Builder();
httpClientBuilder.connectTimeout(HttpContents.DEFAULT_TIMEOUT, TimeUnit.SECONDS);
/**
* addConverterFactoryGson
* <API key>(<API key>.create())serviceObservable
*/
retrofit = new Retrofit.Builder()
.client(httpClientBuilder.build())
.addConverterFactory(<API key>.create())
.<API key>(<API key>.create())
.baseUrl(HttpContents.DOMAIN_URL)
.build();
}
/**
*
* `synchronized`
* @return
*/
public synchronized static Retrofit getInstance()
{
if (null == retrofit)
{
new RetrofitFactory();
}
return retrofit;
}
} |
#ifndef SRC_ROBOTENGINE_H_
#define SRC_ROBOTENGINE_H_
#include "src/Robot.h"
#include "src/Resultor.h"
#include "bin/Config.h"
class RobotEngine{
public:
virtual void init(int areas, Config &conf) {}
virtual void nextAction(int area, int zone, Robot &rob) = 0;
virtual void train(Resultor r) {}
};
#endif // SRC_ROBOTENGINE_H_ |
package com.fastcome1985.learn;
public interface AT<T> {
void at(T t);
} |
// This source file is part of the Swift.org open source project
#include "Serialization.h"
#include "SILFormat.h"
#include "swift/AST/ASTContext.h"
#include "swift/AST/ASTMangler.h"
#include "swift/AST/ASTVisitor.h"
#include "swift/AST/DiagnosticsCommon.h"
#include "swift/AST/Expr.h"
#include "swift/AST/FileSystem.h"
#include "swift/AST/<API key>.h"
#include "swift/AST/GenericEnvironment.h"
#include "swift/AST/Initializer.h"
#include "swift/AST/LazyResolver.h"
#include "swift/AST/LinkLibrary.h"
#include "swift/AST/ParameterList.h"
#include "swift/AST/Pattern.h"
#include "swift/AST/PrettyStackTrace.h"
#include "swift/AST/PropertyWrappers.h"
#include "swift/AST/ProtocolConformance.h"
#include "swift/AST/RawComment.h"
#include "swift/AST/SourceFile.h"
#include "swift/AST/TypeCheckRequests.h"
#include "swift/AST/TypeVisitor.h"
#include "swift/Basic/Dwarf.h"
#include "swift/Basic/FileSystem.h"
#include "swift/Basic/STLExtras.h"
#include "swift/Basic/Timer.h"
#include "swift/Basic/Version.h"
#include "swift/ClangImporter/ClangImporter.h"
#include "swift/ClangImporter/ClangModule.h"
#include "swift/Demangling/ManglingMacros.h"
#include "swift/Serialization/<API key>.h"
#include "swift/Strings.h"
#include "llvm/ADT/SmallString.h"
#include "llvm/ADT/StringExtras.h"
#include "llvm/Bitcode/BitstreamWriter.h"
#include "llvm/Bitcode/RecordLayout.h"
#include "llvm/Config/config.h"
#include "llvm/Support/Allocator.h"
#include "llvm/Support/Chrono.h"
#include "llvm/Support/Compiler.h"
#include "llvm/Support/DJB.h"
#include "llvm/Support/EndianStream.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/MemoryBuffer.h"
#include "llvm/Support/OnDiskHashTable.h"
#include "llvm/Support/Path.h"
#include "llvm/Support/raw_ostream.h"
#include "llvm/Support/<API key>.h"
#include <vector>
using namespace swift;
using namespace swift::serialization;
using namespace llvm::support;
using swift::version::Version;
using llvm::BCBlockRAII;
Used for static_assert.
static constexpr bool declIDFitsIn32Bits() {
using Int32Info = std::numeric_limits<uint32_t>;
using PtrIntInfo = std::numeric_limits<uintptr_t>;
using DeclIDTraits = llvm::<API key><DeclID>;
return PtrIntInfo::digits - DeclIDTraits::NumLowBitsAvailable <= Int32Info::digits;
}
Used for static_assert.
static constexpr bool <API key>() {
// FIXME: Considering BitOffset is a _bit_ offset, and we're storing it in 31
// bits of a PointerEmbeddedInt, the maximum offset inside a modulefile we can
// handle happens at 2**28 _bytes_, which is only 268MB. Considering
// Swift.swiftmodule is itself 25MB, it seems entirely possible users will
// exceed this limit.
using Int32Info = std::numeric_limits<uint32_t>;
using PtrIntInfo = std::numeric_limits<uintptr_t>;
using BitOffsetTraits = llvm::<API key><BitOffset>;
return PtrIntInfo::digits - BitOffsetTraits::NumLowBitsAvailable <= Int32Info::digits;
}
namespace {
Used to serialize the on-disk decl hash table.
class DeclTableInfo {
public:
using key_type = DeclBaseName;
using key_type_ref = key_type;
using data_type = Serializer::DeclTableData;
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
switch (key.getKind()) {
case DeclBaseName::Kind::Normal:
assert(!key.empty());
return llvm::djbHash(key.getIdentifier().str(),
<API key>);
case DeclBaseName::Kind::Subscript:
return static_cast<uint8_t>(DeclNameKind::Subscript);
case DeclBaseName::Kind::Constructor:
return static_cast<uint8_t>(DeclNameKind::Constructor);
case DeclBaseName::Kind::Destructor:
return static_cast<uint8_t>(DeclNameKind::Destructor);
}
llvm_unreachable("unhandled kind");
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
uint32_t keyLength = sizeof(uint8_t); // For the flag of the name's kind
if (key.getKind() == DeclBaseName::Kind::Normal) {
keyLength += key.getIdentifier().str().size(); // The name's length
}
assert(keyLength == static_cast<uint16_t>(keyLength));
uint32_t dataLength = (sizeof(uint32_t) + 1) * data.size();
assert(dataLength == static_cast<uint16_t>(dataLength));
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
writer.write<uint16_t>(dataLength);
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
endian::Writer writer(out, little);
switch (key.getKind()) {
case DeclBaseName::Kind::Normal:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Normal));
writer.OS << key.getIdentifier().str();
break;
case DeclBaseName::Kind::Subscript:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Subscript));
break;
case DeclBaseName::Kind::Constructor:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Constructor));
break;
case DeclBaseName::Kind::Destructor:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Destructor));
break;
}
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
for (auto entry : data) {
writer.write<uint8_t>(entry.first);
writer.write<uint32_t>(entry.second);
}
}
};
class ExtensionTableInfo {
serialization::Serializer &Serializer;
llvm::SmallDenseMap<const NominalTypeDecl *,std::string,4> MangledNameCache;
public:
explicit ExtensionTableInfo(serialization::Serializer &serializer)
: Serializer(serializer) {}
using key_type = Identifier;
using key_type_ref = key_type;
using data_type = Serializer::ExtensionTableData;
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
assert(!key.empty());
return llvm::djbHash(key.str(), <API key>);
}
int32_t getNameDataForBase(const NominalTypeDecl *nominal,
StringRef *dataToWrite = nullptr) {
if (nominal->getDeclContext()-><API key>())
return -Serializer.<API key>(nominal->getDeclContext());
auto &mangledName = MangledNameCache[nominal];
if (mangledName.empty())
mangledName = Mangle::ASTMangler().mangleNominalType(nominal);
assert(llvm::isUInt<31>(mangledName.size()));
if (dataToWrite)
*dataToWrite = mangledName;
return mangledName.size();
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
uint32_t keyLength = key.str().size();
assert(keyLength == static_cast<uint16_t>(keyLength));
uint32_t dataLength = (sizeof(uint32_t) * 2) * data.size();
for (auto dataPair : data) {
int32_t nameData = getNameDataForBase(dataPair.first);
if (nameData > 0)
dataLength += nameData;
}
assert(dataLength == static_cast<uint16_t>(dataLength));
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
writer.write<uint16_t>(dataLength);
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
out << key.str();
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
for (auto entry : data) {
StringRef dataToWrite;
writer.write<uint32_t>(entry.second);
writer.write<int32_t>(getNameDataForBase(entry.first, &dataToWrite));
out << dataToWrite;
}
}
};
class LocalDeclTableInfo {
public:
using key_type = std::string;
using key_type_ref = StringRef;
using data_type = DeclID;
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
assert(!key.empty());
return llvm::djbHash(key, <API key>);
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
uint32_t keyLength = key.size();
assert(keyLength == static_cast<uint16_t>(keyLength));
uint32_t dataLength = sizeof(uint32_t);
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
// No need to write the data length; it's constant.
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
out << key;
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
writer.write<uint32_t>(data);
}
};
using <API key> =
llvm::<API key><LocalDeclTableInfo>;
class <API key> {
public:
using key_type = Identifier;
using key_type_ref = const key_type &;
using data_type = Serializer::NestedTypeDeclsData; // (parent, child) pairs
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
assert(!key.empty());
return llvm::djbHash(key.str(), <API key>);
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
uint32_t keyLength = key.str().size();
assert(keyLength == static_cast<uint16_t>(keyLength));
uint32_t dataLength = (sizeof(uint32_t) * 2) * data.size();
assert(dataLength == static_cast<uint16_t>(dataLength));
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
writer.write<uint16_t>(dataLength);
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
// FIXME: Avoid writing string data for identifiers here.
out << key.str();
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
for (auto entry : data) {
writer.write<uint32_t>(entry.first);
writer.write<uint32_t>(entry.second);
}
}
};
class <API key> {
public:
using key_type = DeclBaseName;
using key_type_ref = const key_type &;
using data_type = BitOffset; // Offsets to sub-tables
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
switch (key.getKind()) {
case DeclBaseName::Kind::Normal:
assert(!key.empty());
return llvm::djbHash(key.getIdentifier().str(), <API key>);
case DeclBaseName::Kind::Subscript:
return static_cast<uint8_t>(DeclNameKind::Subscript);
case DeclBaseName::Kind::Constructor:
return static_cast<uint8_t>(DeclNameKind::Constructor);
case DeclBaseName::Kind::Destructor:
return static_cast<uint8_t>(DeclNameKind::Destructor);
}
llvm_unreachable("unhandled kind");
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
uint32_t keyLength = sizeof(uint8_t); // For the flag of the name's kind
if (key.getKind() == DeclBaseName::Kind::Normal) {
keyLength += key.getIdentifier().str().size(); // The name's length
}
assert(keyLength == static_cast<uint16_t>(keyLength));
uint32_t dataLength = sizeof(uint32_t);
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
// No need to write dataLength, it's constant.
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
endian::Writer writer(out, little);
switch (key.getKind()) {
case DeclBaseName::Kind::Normal:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Normal));
writer.OS << key.getIdentifier().str();
break;
case DeclBaseName::Kind::Subscript:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Subscript));
break;
case DeclBaseName::Kind::Constructor:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Constructor));
break;
case DeclBaseName::Kind::Destructor:
writer.write<uint8_t>(static_cast<uint8_t>(DeclNameKind::Destructor));
break;
}
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(<API key>(), "BitOffset too large");
endian::Writer writer(out, little);
writer.write<uint32_t>(static_cast<uint32_t>(data));
}
};
class <API key> {
public:
using key_type = DeclID;
using key_type_ref = const key_type &;
using data_type = Serializer::DeclMembersData; // Vector of DeclIDs
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
return llvm::hash_value(static_cast<uint32_t>(key));
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
// This will trap if a single ValueDecl has more than 16383 members
// with the same DeclBaseName. Seems highly unlikely.
assert((data.size() < (1 << 14)) && "Too many members");
uint32_t dataLength = sizeof(uint32_t) * data.size(); // value DeclIDs
endian::Writer writer(out, little);
// No need to write the key length; it's constant.
writer.write<uint16_t>(dataLength);
return { sizeof(uint32_t), dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
assert(len == sizeof(uint32_t));
endian::Writer writer(out, little);
writer.write<uint32_t>(key);
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
for (auto entry : data) {
writer.write<uint32_t>(entry);
}
}
};
} // end anonymous namespace
static ModuleDecl *getModule(ModuleOrSourceFile DC) {
if (auto M = DC.dyn_cast<ModuleDecl *>())
return M;
return DC.get<SourceFile *>()->getParentModule();
}
static ASTContext &getContext(ModuleOrSourceFile DC) {
return getModule(DC)->getASTContext();
}
static bool <API key>(const DeclContext *DC) {
return DC->isLocalContext() && !isa<<API key>>(DC) &&
!isa<SubscriptDecl>(DC);
}
namespace {
struct Accessors {
uint8_t OpaqueReadOwnership;
uint8_t ReadImpl, WriteImpl, ReadWriteImpl;
SmallVector<AccessorDecl *, 8> Decls;
};
} // end anonymous namespace
static uint8_t <API key>(swift::OpaqueReadOwnership ownership) {
switch (ownership) {
#define CASE(KIND) \
case swift::OpaqueReadOwnership::KIND: \
return uint8_t(serialization::OpaqueReadOwnership::KIND);
CASE(Owned)
CASE(Borrowed)
CASE(OwnedOrBorrowed)
#undef CASE
}
llvm_unreachable("bad kind");
}
static uint8_t getRawReadImplKind(swift::ReadImplKind kind) {
switch (kind) {
#define CASE(KIND) \
case swift::ReadImplKind::KIND: \
return uint8_t(serialization::ReadImplKind::KIND);
CASE(Stored)
CASE(Get)
CASE(Inherited)
CASE(Address)
CASE(Read)
#undef CASE
}
llvm_unreachable("bad kind");
}
static unsigned getRawWriteImplKind(swift::WriteImplKind kind) {
switch (kind) {
#define CASE(KIND) \
case swift::WriteImplKind::KIND: \
return uint8_t(serialization::WriteImplKind::KIND);
CASE(Immutable)
CASE(Stored)
CASE(Set)
CASE(StoredWithObservers)
CASE(<API key>)
CASE(MutableAddress)
CASE(Modify)
#undef CASE
}
llvm_unreachable("bad kind");
}
static unsigned <API key>(swift::ReadWriteImplKind kind) {
switch (kind) {
#define CASE(KIND) \
case swift::ReadWriteImplKind::KIND: \
return uint8_t(serialization::ReadWriteImplKind::KIND);
CASE(Immutable)
CASE(Stored)
CASE(MutableAddress)
CASE(<API key>)
CASE(Modify)
#undef CASE
}
llvm_unreachable("bad kind");
}
static Accessors getAccessors(const AbstractStorageDecl *storage) {
Accessors accessors;
accessors.OpaqueReadOwnership =
<API key>(storage-><API key>());
auto impl = storage->getImplInfo();
accessors.ReadImpl = getRawReadImplKind(impl.getReadImpl());
accessors.WriteImpl = getRawWriteImplKind(impl.getWriteImpl());
accessors.ReadWriteImpl = <API key>(impl.getReadWriteImpl());
auto decls = storage->getAllAccessors();
accessors.Decls.append(decls.begin(), decls.end());
return accessors;
}
LocalDeclContextID Serializer::<API key>(const DeclContext *DC) {
assert(DC->isLocalContext() && "Expected a local DeclContext");
return <API key>.addRef(DC);
}
GenericSignatureID
Serializer::<API key>(GenericSignature sig) {
if (!sig)
return 0;
return <API key>.addRef(sig);
}
SubstitutionMapID
Serializer::<API key>(SubstitutionMap substitutions) {
return <API key>.addRef(substitutions);
}
DeclContextID Serializer::addDeclContextRef(const DeclContext *DC) {
assert(DC && "cannot reference a null DeclContext");
switch (DC->getContextKind()) {
case DeclContextKind::Module:
case DeclContextKind::FileUnit: // Skip up to the module
return DeclContextID();
default:
break;
}
// If this decl context is a plain old serializable decl, queue it up for
// normal serialization.
if (<API key>(DC))
return DeclContextID::forLocalDeclContext(<API key>(DC));
return DeclContextID::forDecl(addDeclRef(DC->getAsDecl()));
}
DeclID Serializer::addDeclRef(const Decl *D, bool allowTypeAliasXRef) {
assert((!D || !isDeclXRef(D) || isa<ValueDecl>(D) || isa<OperatorDecl>(D) ||
isa<PrecedenceGroupDecl>(D)) &&
"cannot cross-reference this decl");
assert((!D || !isDeclXRef(D) ||
!D->getAttrs().hasAttribute<<API key>>()) &&
"cannot cross-reference this decl");
assert((!D || allowTypeAliasXRef || !isa<TypeAliasDecl>(D) ||
D->getModuleContext() == M) &&
"cannot cross-reference typealiases directly (use the TypeAliasType)");
return DeclsToSerialize.addRef(D);
}
serialization::TypeID Serializer::addTypeRef(Type ty) {
#ifndef NDEBUG
<API key> trace(M->getASTContext(), "serializing", ty);
assert((!ty || !ty->hasError()) && "Serializing error type");
#endif
return TypesToSerialize.addRef(ty);
}
IdentifierID Serializer::addDeclBaseNameRef(DeclBaseName ident) {
switch (ident.getKind()) {
case DeclBaseName::Kind::Normal: {
if (ident.empty())
return 0;
IdentifierID &id = IdentifierIDs[ident.getIdentifier()];
if (id != 0)
return id;
id = ++LastUniquedStringID;
StringsToWrite.push_back(ident.getIdentifier().str());
return id;
}
case DeclBaseName::Kind::Subscript:
return SUBSCRIPT_ID;
case DeclBaseName::Kind::Constructor:
return CONSTRUCTOR_ID;
case DeclBaseName::Kind::Destructor:
return DESTRUCTOR_ID;
}
llvm_unreachable("unhandled kind");
}
std::pair<StringRef, IdentifierID> Serializer::addUniquedString(StringRef str) {
if (str.empty())
return {str, 0};
decltype(UniquedStringIDs)::iterator iter;
bool isNew;
std::tie(iter, isNew) =
UniquedStringIDs.insert({str, LastUniquedStringID + 1});
if (!isNew)
return {iter->getKey(), iter->getValue()};
++LastUniquedStringID;
// Note that we use the string data stored in the StringMap.
StringsToWrite.push_back(iter->getKey());
return {iter->getKey(), LastUniquedStringID};
}
IdentifierID Serializer::addFilename(StringRef filename) {
assert(!filename.empty() && "Attemping to add an empty filename");
return addUniquedString(filename).second;
}
IdentifierID Serializer::<API key>(const DeclContext *DC) {
assert(!isa<ModuleDecl>(DC) &&
"References should be to things within modules");
const FileUnit *file = cast<FileUnit>(DC-><API key>());
const ModuleDecl *M = file->getParentModule();
if (M == this->M)
return CURRENT_MODULE_ID;
if (M == this->M->getASTContext().TheBuiltinModule)
return BUILTIN_MODULE_ID;
auto clangImporter =
static_cast<ClangImporter *>(
this->M->getASTContext().<API key>());
if (M == clangImporter-><API key>())
return <API key>;
auto exportedModuleName = file-><API key>();
assert(!exportedModuleName.empty());
auto exportedModuleID = M->getASTContext().getIdentifier(exportedModuleName);
return addDeclBaseNameRef(exportedModuleID);
}
SILLayoutID Serializer::addSILLayoutRef(const SILLayout *layout) {
return <API key>.addRef(layout);
}
NormalConformanceID
Serializer::addConformanceRef(const <API key> *conformance) {
assert(conformance->getDeclContext()->getParentModule() == M &&
"cannot reference conformance from another module");
return <API key>.addRef(conformance);
}
Record the name of a block.
void SerializerBase::emitBlockID(unsigned ID, StringRef name,
SmallVectorImpl<unsigned char> &nameBuffer) {
SmallVector<unsigned, 1> idBuffer;
idBuffer.push_back(ID);
Out.EmitRecord(llvm::bitc::<API key>, idBuffer);
// Emit the block name if present.
if (name.empty())
return;
nameBuffer.resize(name.size());
memcpy(nameBuffer.data(), name.data(), name.size());
Out.EmitRecord(llvm::bitc::<API key>, nameBuffer);
}
void SerializerBase::emitRecordID(unsigned ID, StringRef name,
SmallVectorImpl<unsigned char> &nameBuffer) {
assert(ID < 256 && "can't fit record ID in next to name");
nameBuffer.resize(name.size()+1);
nameBuffer[0] = ID;
memcpy(nameBuffer.data()+1, name.data(), name.size());
Out.EmitRecord(llvm::bitc::<API key>, nameBuffer);
}
void Serializer::writeBlockInfoBlock() {
BCBlockRAII restoreBlock(Out, llvm::bitc::BLOCKINFO_BLOCK_ID, 2);
SmallVector<unsigned char, 64> nameBuffer;
#define BLOCK(X) emitBlockID(X ## _ID, #X, nameBuffer)
#define BLOCK_RECORD(K, X) emitRecordID(K::X, #X, nameBuffer)
BLOCK(MODULE_BLOCK);
BLOCK(CONTROL_BLOCK);
BLOCK_RECORD(control_block, METADATA);
BLOCK_RECORD(control_block, MODULE_NAME);
BLOCK_RECORD(control_block, TARGET);
BLOCK(OPTIONS_BLOCK);
BLOCK_RECORD(options_block, SDK_PATH);
BLOCK_RECORD(options_block, XCC);
BLOCK_RECORD(options_block, IS_SIB);
BLOCK_RECORD(options_block, IS_TESTABLE);
BLOCK_RECORD(options_block, <API key>);
BLOCK_RECORD(options_block, RESILIENCE_STRATEGY);
BLOCK(INPUT_BLOCK);
BLOCK_RECORD(input_block, IMPORTED_MODULE);
BLOCK_RECORD(input_block, LINK_LIBRARY);
BLOCK_RECORD(input_block, IMPORTED_HEADER);
BLOCK_RECORD(input_block, <API key>);
BLOCK_RECORD(input_block, MODULE_FLAGS);
BLOCK_RECORD(input_block, SEARCH_PATH);
BLOCK_RECORD(input_block, FILE_DEPENDENCY);
BLOCK_RECORD(input_block, <API key>);
BLOCK_RECORD(input_block, <API key>);
BLOCK(<API key>);
#define RECORD(X) BLOCK_RECORD(decls_block, X);
#include "DeclTypeRecordNodes.def"
BLOCK(<API key>);
BLOCK_RECORD(identifier_block, IDENTIFIER_DATA);
BLOCK(INDEX_BLOCK);
BLOCK_RECORD(index_block, TYPE_OFFSETS);
BLOCK_RECORD(index_block, DECL_OFFSETS);
BLOCK_RECORD(index_block, IDENTIFIER_OFFSETS);
BLOCK_RECORD(index_block, TOP_LEVEL_DECLS);
BLOCK_RECORD(index_block, OPERATORS);
BLOCK_RECORD(index_block, EXTENSIONS);
BLOCK_RECORD(index_block, <API key>);
BLOCK_RECORD(index_block, OPERATOR_METHODS);
BLOCK_RECORD(index_block, OBJC_METHODS);
BLOCK_RECORD(index_block, ENTRY_POINT);
BLOCK_RECORD(index_block, <API key>);
BLOCK_RECORD(index_block, <API key>);
BLOCK_RECORD(index_block, <API key>);
BLOCK_RECORD(index_block, LOCAL_TYPE_DECLS);
BLOCK_RECORD(index_block, <API key>);
BLOCK_RECORD(index_block, SIL_LAYOUT_OFFSETS);
BLOCK_RECORD(index_block, PRECEDENCE_GROUPS);
BLOCK_RECORD(index_block, NESTED_TYPE_DECLS);
BLOCK_RECORD(index_block, DECL_MEMBER_NAMES);
BLOCK_RECORD(index_block, <API key>);
BLOCK(<API key>);
BLOCK_RECORD(<API key>, DECL_MEMBERS);
BLOCK(SIL_BLOCK);
BLOCK_RECORD(sil_block, SIL_FUNCTION);
BLOCK_RECORD(sil_block, SIL_BASIC_BLOCK);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, SIL_ONE_TYPE);
BLOCK_RECORD(sil_block, SIL_ONE_OPERAND);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, SIL_ONE_TYPE_VALUES);
BLOCK_RECORD(sil_block, SIL_TWO_OPERANDS);
BLOCK_RECORD(sil_block, SIL_TAIL_ADDR);
BLOCK_RECORD(sil_block, SIL_INST_APPLY);
BLOCK_RECORD(sil_block, SIL_INST_NO_OPERAND);
BLOCK_RECORD(sil_block, SIL_VTABLE);
BLOCK_RECORD(sil_block, SIL_VTABLE_ENTRY);
BLOCK_RECORD(sil_block, SIL_GLOBALVAR);
BLOCK_RECORD(sil_block, SIL_INST_CAST);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, SIL_WITNESS_TABLE);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, SIL_SPECIALIZE_ATTR);
BLOCK_RECORD(sil_block, <API key>);
BLOCK_RECORD(sil_block, <API key>);
// These layouts can exist in both decl blocks and sil blocks.
#define <API key>(K, X) emitRecordID(X, #X, nameBuffer)
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::<API key>);
<API key>(sil_block,
decls_block::GENERIC_PARAM_LIST);
<API key>(sil_block,
decls_block::GENERIC_REQUIREMENT);
<API key>(sil_block,
decls_block::LAYOUT_REQUIREMENT);
BLOCK(SIL_INDEX_BLOCK);
BLOCK_RECORD(sil_index_block, SIL_FUNC_NAMES);
BLOCK_RECORD(sil_index_block, SIL_FUNC_OFFSETS);
BLOCK_RECORD(sil_index_block, SIL_VTABLE_NAMES);
BLOCK_RECORD(sil_index_block, SIL_VTABLE_OFFSETS);
BLOCK_RECORD(sil_index_block, SIL_GLOBALVAR_NAMES);
BLOCK_RECORD(sil_index_block, <API key>);
BLOCK_RECORD(sil_index_block, <API key>);
BLOCK_RECORD(sil_index_block, <API key>);
BLOCK_RECORD(sil_index_block, <API key>);
BLOCK_RECORD(sil_index_block, <API key>);
BLOCK_RECORD(sil_index_block, <API key>);
#undef BLOCK
#undef BLOCK_RECORD
}
void Serializer::writeHeader(const <API key> &options) {
{
BCBlockRAII restoreBlock(Out, CONTROL_BLOCK_ID, 3);
control_block::ModuleNameLayout ModuleName(Out);
control_block::MetadataLayout Metadata(Out);
control_block::TargetLayout Target(Out);
ModuleName.emit(ScratchRecord, M->getName().str());
SmallString<32> versionStringBuf;
llvm::raw_svector_ostream versionString(versionStringBuf);
versionString << Version::<API key>();
size_t <API key> = versionString.tell();
versionString << '('
<< M->getASTContext().LangOpts.<API key>;
size_t <API key> =
versionString.tell() - <API key> - 1;
versionString << ")/" << version::getSwiftFullVersion();
Metadata.emit(ScratchRecord,
<API key>, <API key>,
<API key>,
<API key>,
versionString.str());
Target.emit(ScratchRecord, M->getASTContext().LangOpts.Target.str());
{
llvm::BCBlockRAII restoreBlock(Out, OPTIONS_BLOCK_ID, 4);
options_block::IsSIBLayout IsSIB(Out);
IsSIB.emit(ScratchRecord, options.IsSIB);
if (M->isTestingEnabled()) {
options_block::IsTestableLayout IsTestable(Out);
IsTestable.emit(ScratchRecord);
}
if (M-><API key>()) {
options_block::<API key> PrivateImports(Out);
PrivateImports.emit(ScratchRecord);
}
if (M-><API key>() != ResilienceStrategy::Default) {
options_block::<API key> Strategy(Out);
Strategy.emit(ScratchRecord, unsigned(M-><API key>()));
}
if (options.<API key>) {
options_block::SDKPathLayout SDKPath(Out);
options_block::XCCLayout XCC(Out);
SDKPath.emit(ScratchRecord, M->getASTContext().SearchPathOpts.SDKPath);
auto &Opts = options.ExtraClangOptions;
for (auto Arg = Opts.begin(), E = Opts.end(); Arg != E; ++Arg) {
// FIXME: This is a hack and calls for a better design.
// Filter out any -ivfsoverlay options that include an
// <API key>.yaml overlay. By convention the Xcode
// buildsystem uses these while *building* mixed Objective-C and Swift
// frameworks; but they should never be used to *import* the module
// defined in the framework.
if (StringRef(*Arg).startswith("-ivfsoverlay")) {
auto Next = std::next(Arg);
if (Next != E &&
StringRef(*Next).endswith("<API key>.yaml")) {
++Arg;
continue;
}
}
XCC.emit(ScratchRecord, *Arg);
}
}
}
}
}
static void flattenImportPath(const ModuleDecl::ImportedModule &import,
SmallVectorImpl<char> &out) {
llvm::raw_svector_ostream outStream(out);
import.second-><API key>().printForward(outStream,
StringRef("\0", 1));
if (import.first.empty())
return;
outStream << '\0';
assert(import.first.size() == 1 && "can only handle top-level decl imports");
auto accessPathElem = import.first.front();
outStream << accessPathElem.first.str();
}
uint64_t getRawModTimeOrHash(const <API key>::FileDependency &dep) {
if (dep.isHashBased()) return dep.getContentHash();
return dep.getModificationTime();
}
using ImportSet = llvm::SmallSet<ModuleDecl::ImportedModule, 8,
ModuleDecl::<API key>>;
static ImportSet getImportsAsSet(const ModuleDecl *M,
ModuleDecl::ImportFilter filter) {
SmallVector<ModuleDecl::ImportedModule, 8> imports;
M->getImportedModules(imports, filter);
ImportSet importSet;
importSet.insert(imports.begin(), imports.end());
return importSet;
}
void Serializer::writeInputBlock(const <API key> &options) {
BCBlockRAII restoreBlock(Out, INPUT_BLOCK_ID, 4);
input_block::<API key> ImportedModule(Out);
input_block::LinkLibraryLayout LinkLibrary(Out);
input_block::<API key> ImportedHeader(Out);
input_block::<API key> <API key>(Out);
input_block::SearchPathLayout SearchPath(Out);
input_block::<API key> FileDependency(Out);
input_block::<API key> DependencyDirectory(Out);
input_block::<API key> ModuleInterface(Out);
if (options.<API key>) {
const SearchPathOptions &searchPathOpts = M->getASTContext().SearchPathOpts;
// Put the framework search paths first so that they'll be preferred upon
// deserialization.
for (auto &framepath : searchPathOpts.<API key>)
SearchPath.emit(ScratchRecord, /*framework=*/true, framepath.IsSystem,
framepath.Path);
for (auto &path : searchPathOpts.ImportSearchPaths)
SearchPath.emit(ScratchRecord, /*framework=*/false, /*system=*/false, path);
}
// Note: We're not using StringMap here because we don't need to own the
// strings.
llvm::DenseMap<StringRef, unsigned> <API key>;
for (auto const &dep : options.Dependencies) {
StringRef directoryName = llvm::sys::path::parent_path(dep.getPath());
unsigned &<API key> = <API key>[directoryName];
if (!<API key>) {
// This name must be newly-added. Give it a new ID (and skip 0).
<API key> = <API key>.size();
DependencyDirectory.emit(ScratchRecord, directoryName);
}
FileDependency.emit(ScratchRecord,
dep.getSize(),
getRawModTimeOrHash(dep),
dep.isHashBased(),
dep.isSDKRelative(),
<API key>,
llvm::sys::path::filename(dep.getPath()));
}
if (!options.ModuleInterface.empty())
ModuleInterface.emit(ScratchRecord, options.ModuleInterface);
ModuleDecl::ImportFilter allImportFilter;
allImportFilter |= ModuleDecl::ImportFilterKind::Public;
allImportFilter |= ModuleDecl::ImportFilterKind::Private;
allImportFilter |= ModuleDecl::ImportFilterKind::ImplementationOnly;
SmallVector<ModuleDecl::ImportedModule, 8> allImports;
M->getImportedModules(allImports, allImportFilter);
ModuleDecl::<API key>(allImports);
// Collect the public and private imports as a subset so that we can
// distinguish them.
ImportSet publicImportSet =
getImportsAsSet(M, ModuleDecl::ImportFilterKind::Public);
ImportSet privateImportSet =
getImportsAsSet(M, ModuleDecl::ImportFilterKind::Private);
auto clangImporter =
static_cast<ClangImporter *>(M->getASTContext().<API key>());
ModuleDecl *<API key> = clangImporter-><API key>();
ModuleDecl::ImportedModule <API key>{{}, <API key>};
// Make sure the bridging header module is always at the top of the import
// list, mimicking how it is processed before any module imports when
// compiling source files.
if (llvm::is_contained(allImports, <API key>)) {
off_t importedHeaderSize = 0;
time_t <API key> = 0;
std::string contents;
if (!options.ImportedHeader.empty()) {
contents = clangImporter-><API key>(
options.ImportedHeader, importedHeaderSize, <API key>);
}
assert(publicImportSet.count(<API key>));
ImportedHeader.emit(ScratchRecord,
publicImportSet.count(<API key>),
importedHeaderSize, <API key>,
options.ImportedHeader);
if (!contents.empty()) {
contents.push_back('\0');
<API key>.emit(ScratchRecord, contents);
}
}
ModuleDecl *theBuiltinModule = M->getASTContext().TheBuiltinModule;
for (auto import : allImports) {
if (import.second == theBuiltinModule ||
import.second == <API key>) {
continue;
}
SmallString<64> importPath;
flattenImportPath(import, importPath);
serialization::ImportControl stableImportControl;
// The order of checks here is important, since a module can be imported
// differently in different files, and we need to record the "most visible"
// form here.
if (publicImportSet.count(import))
stableImportControl = ImportControl::Exported;
else if (privateImportSet.count(import))
stableImportControl = ImportControl::Normal;
else
stableImportControl = ImportControl::ImplementationOnly;
ImportedModule.emit(ScratchRecord,
static_cast<uint8_t>(stableImportControl),
!import.first.empty(), importPath);
}
if (!options.ModuleLinkName.empty()) {
LinkLibrary.emit(ScratchRecord, serialization::LibraryKind::Library,
options.AutolinkForceLoad, options.ModuleLinkName);
}
}
Translate AST default argument kind to the Serialization enum values, which
are guaranteed to be stable.
static uint8_t <API key>(swift::DefaultArgumentKind kind) {
switch (kind) {
#define CASE(X) \
case swift::DefaultArgumentKind::X: \
return static_cast<uint8_t>(serialization::DefaultArgumentKind::X);
CASE(None)
CASE(Normal)
CASE(Inherited)
CASE(Column)
CASE(File)
CASE(Line)
CASE(Function)
CASE(DSOHandle)
CASE(NilLiteral)
CASE(EmptyArray)
CASE(EmptyDictionary)
CASE(StoredProperty)
#undef CASE
}
llvm_unreachable("Unhandled DefaultArgumentKind in switch.");
}
static uint8_t
<API key>(const AnyMetatypeType *metatype) {
if (!metatype->hasRepresentation()) {
return serialization::<API key>::MR_None;
}
switch (metatype->getRepresentation()) {
case swift::<API key>::Thin:
return serialization::<API key>::MR_Thin;
case swift::<API key>::Thick:
return serialization::<API key>::MR_Thick;
case swift::<API key>::ObjC:
return serialization::<API key>::MR_ObjC;
}
llvm_unreachable("bad representation");
}
Translate from the requirement kind to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(RequirementKind kind) {
#define CASE(KIND) \
case RequirementKind::KIND: \
return <API key>::KIND;
switch (kind) {
CASE(Conformance)
CASE(Superclass)
CASE(SameType)
CASE(Layout)
}
#undef CASE
llvm_unreachable("Unhandled RequirementKind in switch.");
}
void Serializer::<API key>(ArrayRef<Requirement> requirements,
const std::array<unsigned, 256> &abbrCodes) {
using namespace decls_block;
if (requirements.empty())
return;
auto reqAbbrCode = abbrCodes[<API key>::Code];
auto layoutReqAbbrCode = abbrCodes[<API key>::Code];
for (const auto &req : requirements) {
if (req.getKind() != RequirementKind::Layout)
<API key>::emitRecord(
Out, ScratchRecord, reqAbbrCode,
<API key>(req.getKind()),
addTypeRef(req.getFirstType()), addTypeRef(req.getSecondType()));
else {
// Write layout requirement.
auto layout = req.getLayoutConstraint();
unsigned size = 0;
unsigned alignment = 0;
if (layout->isKnownSizeTrivial()) {
size = layout-><API key>();
alignment = layout->getAlignmentInBits();
}
<API key> rawKind = <API key>::UnknownLayout;
switch (layout->getKind()) {
case <API key>::<API key>:
rawKind = <API key>::<API key>;
break;
case <API key>::RefCountedObject:
rawKind = <API key>::RefCountedObject;
break;
case <API key>::Trivial:
rawKind = <API key>::Trivial;
break;
case <API key>::TrivialOfExactSize:
rawKind = <API key>::TrivialOfExactSize;
break;
case <API key>::TrivialOfAtMostSize:
rawKind = <API key>::TrivialOfAtMostSize;
break;
case <API key>::Class:
rawKind = <API key>::Class;
break;
case <API key>::NativeClass:
rawKind = <API key>::NativeClass;
break;
case <API key>::UnknownLayout:
rawKind = <API key>::UnknownLayout;
break;
}
<API key>::emitRecord(
Out, ScratchRecord, layoutReqAbbrCode, rawKind,
addTypeRef(req.getFirstType()), size, alignment);
}
}
}
void Serializer::writeASTBlockEntity(GenericSignature sig) {
using namespace decls_block;
assert(sig);
assert(<API key>.hasRef(sig));
// Determine whether we can just write the param types as is, or whether we
// have to encode them manually because one of them has a declaration with
// module context (which can happen in SIL).
bool <API key> =
llvm::any_of(sig->getGenericParams(),
[](const <API key> *paramTy) {
auto *decl = paramTy->getDecl();
return decl && decl->getDeclContext()-><API key>();
});
if (!<API key>) {
// Record the generic parameters.
SmallVector<uint64_t, 4> rawParamIDs;
for (auto *paramTy : sig->getGenericParams()) {
rawParamIDs.push_back(addTypeRef(paramTy));
}
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
rawParamIDs);
} else {
// Record the generic parameters.
SmallVector<uint64_t, 4> rawParamIDs;
for (auto *paramTy : sig->getGenericParams()) {
auto *decl = paramTy->getDecl();
// For a full environment, add the name and canonicalize the param type.
Identifier paramName = decl ? decl->getName() : Identifier();
rawParamIDs.push_back(addDeclBaseNameRef(paramName));
paramTy = paramTy->getCanonicalType()->castTo<<API key>>();
rawParamIDs.push_back(addTypeRef(paramTy));
}
auto envAbbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, envAbbrCode,
rawParamIDs);
}
<API key>(sig->getRequirements(), DeclTypeAbbrCodes);
}
void Serializer::writeASTBlockEntity(const SubstitutionMap substitutions) {
using namespace decls_block;
assert(substitutions);
assert(<API key>.hasRef(substitutions));
// Collect the replacement types.
SmallVector<uint64_t, 4> rawReplacementTypes;
for (auto type : substitutions.getReplacementTypes())
rawReplacementTypes.push_back(addTypeRef(type));
auto <API key> = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, <API key>,
<API key>(
substitutions.getGenericSignature()),
substitutions.getConformances().size(),
rawReplacementTypes);
writeConformances(substitutions.getConformances(), DeclTypeAbbrCodes);
}
void Serializer::writeASTBlockEntity(const SILLayout *layout) {
using namespace decls_block;
assert(<API key>.hasRef(layout));
SmallVector<unsigned, 16> data;
// Save field types.
for (auto &field : layout->getFields()) {
unsigned typeRef = addTypeRef(field.getLoweredType());
// Set the high bit if mutable.
if (field.isMutable())
typeRef |= 0x80000000U;
data.push_back(typeRef);
}
unsigned abbrCode
= DeclTypeAbbrCodes[SILLayoutLayout::Code];
SILLayoutLayout::emitRecord(
Out, ScratchRecord, abbrCode,
<API key>(layout->getGenericSignature()),
layout->getFields().size(),
data);
}
void Serializer::writeASTBlockEntity(
const <API key> *conformance) {
using namespace decls_block;
// The conformance must be complete, or we can't serialize it.
assert(conformance->isComplete());
assert(<API key>.hasRef(conformance));
auto protocol = conformance->getProtocol();
SmallVector<DeclID, 32> data;
unsigned numValueWitnesses = 0;
unsigned numTypeWitnesses = 0;
conformance->forEachTypeWitness([&](AssociatedTypeDecl *assocType,
Type type, TypeDecl *typeDecl) {
data.push_back(addDeclRef(assocType));
data.push_back(addTypeRef(type));
data.push_back(addDeclRef(typeDecl, /*allowTypeAliasXRef*/true));
++numTypeWitnesses;
return false;
});
conformance->forEachValueWitness([&](ValueDecl *req, Witness witness) {
++numValueWitnesses;
data.push_back(addDeclRef(req));
data.push_back(addDeclRef(witness.getDecl()));
assert(witness.getDecl() || req->getAttrs().hasAttribute<OptionalAttr>()
|| req->getAttrs().isUnavailable(req->getASTContext()));
// If there is no witness, we're done.
if (!witness.getDecl()) return;
auto subs = witness.getSubstitutions();
// Canonicalize away typealiases, since these substitutions aren't used
// for diagnostics and we reference fewer declarations that way.
subs = subs.getCanonical();
// Map archetypes to type parameters, since we always substitute them
// away. Note that in a merge-modules pass, we're serializing conformances
// that we deserialized, so they will already have their replacement types
// in terms of interface types; hence the hasArchetypes() check is
// necessary for correctness, not just as a fast path.
if (subs.hasArchetypes())
subs = subs.<API key>();
data.push_back(<API key>(subs));
});
unsigned <API key> =
conformance-><API key>().size();
unsigned abbrCode
= DeclTypeAbbrCodes[<API key>::Code];
auto ownerID = addDeclContextRef(conformance->getDeclContext());
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addDeclRef(protocol),
ownerID.getOpaqueValue(),
numTypeWitnesses,
numValueWitnesses,
<API key>,
data);
// Write requirement signature conformances.
for (auto reqConformance : conformance-><API key>())
writeConformance(reqConformance, DeclTypeAbbrCodes);
}
void
Serializer::writeConformance(ProtocolConformance *conformance,
const std::array<unsigned, 256> &abbrCodes,
GenericEnvironment *genericEnv) {
writeConformance(<API key>(conformance), abbrCodes, genericEnv);
}
void
Serializer::writeConformance(<API key> conformanceRef,
const std::array<unsigned, 256> &abbrCodes,
GenericEnvironment *genericEnv) {
using namespace decls_block;
if (conformanceRef.isInvalid()) {
unsigned abbrCode = abbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode);
return;
}
if (conformanceRef.isAbstract()) {
unsigned abbrCode = abbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addDeclRef(conformanceRef.getAbstract()));
return;
}
auto conformance = conformanceRef.getConcrete();
switch (conformance->getKind()) {
case <API key>::Normal: {
auto normal = cast<<API key>>(conformance);
if (!isDeclXRef(normal->getDeclContext()->getAsDecl())
&& !isa<ClangModuleUnit>(normal->getDeclContext()
-><API key>())) {
// A normal conformance in this module file.
unsigned abbrCode = abbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord,
abbrCode,
addConformanceRef(normal));
} else {
// A conformance in a different module file.
unsigned abbrCode = abbrCodes[<API key>::Code];
<API key>::emitRecord(
Out, ScratchRecord,
abbrCode,
addDeclRef(normal->getProtocol()),
addDeclRef(normal->getType()->getAnyNominal()),
<API key>(normal->getDeclContext()));
}
break;
}
case <API key>::Self: {
auto self = cast<<API key>>(conformance);
unsigned abbrCode = abbrCodes[<API key>::Code];
auto protocolID = addDeclRef(self->getProtocol());
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
protocolID);
break;
}
case <API key>::Specialized: {
auto conf = cast<<API key>>(conformance);
unsigned abbrCode = abbrCodes[<API key>::Code];
auto type = conf->getType();
if (genericEnv && type->hasArchetype())
type = type->mapTypeOutOfContext();
<API key>::emitRecord(
Out, ScratchRecord,
abbrCode,
addTypeRef(type),
<API key>(conf->getSubstitutionMap()));
writeConformance(conf-><API key>(), abbrCodes, genericEnv);
break;
}
case <API key>::Inherited: {
auto conf = cast<<API key>>(conformance);
unsigned abbrCode
= abbrCodes[<API key>::Code];
auto type = conf->getType();
if (genericEnv && type->hasArchetype())
type = type->mapTypeOutOfContext();
<API key>::emitRecord(
Out, ScratchRecord, abbrCode, addTypeRef(type));
writeConformance(conf-><API key>(), abbrCodes, genericEnv);
break;
}
}
}
void
Serializer::writeConformances(ArrayRef<<API key>> conformances,
const std::array<unsigned, 256> &abbrCodes) {
using namespace decls_block;
for (auto conformance : conformances)
writeConformance(conformance, abbrCodes);
}
void
Serializer::writeConformances(ArrayRef<ProtocolConformance*> conformances,
const std::array<unsigned, 256> &abbrCodes) {
using namespace decls_block;
for (auto conformance : conformances)
writeConformance(conformance, abbrCodes);
}
static bool <API key>(Decl *D) {
switch (D->getKind()) {
case DeclKind::Import:
case DeclKind::InfixOperator:
case DeclKind::PrefixOperator:
case DeclKind::PostfixOperator:
case DeclKind::TopLevelCode:
case DeclKind::Extension:
case DeclKind::Module:
case DeclKind::PrecedenceGroup:
llvm_unreachable("decl should never be a member");
case DeclKind::MissingMember:
llvm_unreachable("should never need to reserialize a member placeholder");
case DeclKind::IfConfig:
case DeclKind::PoundDiagnostic:
return false;
case DeclKind::EnumCase:
return false;
case DeclKind::OpaqueType:
return true;
case DeclKind::EnumElement:
case DeclKind::Protocol:
case DeclKind::Constructor:
case DeclKind::Destructor:
case DeclKind::PatternBinding:
case DeclKind::Subscript:
case DeclKind::TypeAlias:
case DeclKind::GenericTypeParam:
case DeclKind::AssociatedType:
case DeclKind::Enum:
case DeclKind::Struct:
case DeclKind::Class:
case DeclKind::Var:
case DeclKind::Param:
case DeclKind::Func:
case DeclKind::Accessor:
return true;
}
llvm_unreachable("Unhandled DeclKind in switch.");
}
static serialization::AccessorKind <API key>(swift::AccessorKind K){
switch (K) {
#define ACCESSOR(ID) \
case swift::AccessorKind::ID: return serialization::ID;
#include "swift/AST/AccessorKinds.def"
}
llvm_unreachable("Unhandled AccessorKind in switch.");
}
static serialization::CtorInitializerKind
<API key>(swift::CtorInitializerKind K){
switch (K) {
#define CASE(NAME) \
case swift::CtorInitializerKind::NAME: return serialization::NAME;
CASE(Designated)
CASE(Convenience)
CASE(Factory)
CASE(ConvenienceFactory)
#undef CASE
}
llvm_unreachable("Unhandled CtorInitializerKind in switch.");
}
void Serializer::writeCrossReference(const DeclContext *DC, uint32_t pathLen) {
using namespace decls_block;
unsigned abbrCode;
switch (DC->getContextKind()) {
case DeclContextKind::AbstractClosureExpr:
case DeclContextKind::Initializer:
case DeclContextKind::TopLevelCodeDecl:
case DeclContextKind::SerializedLocal:
case DeclContextKind::EnumElementDecl:
llvm_unreachable("cannot cross-reference this context");
case DeclContextKind::Module:
llvm_unreachable("should only cross-reference something within a file");
case DeclContextKind::FileUnit:
abbrCode = DeclTypeAbbrCodes[XRefLayout::Code];
XRefLayout::emitRecord(Out, ScratchRecord, abbrCode,
<API key>(DC), pathLen);
break;
case DeclContextKind::GenericTypeDecl: {
auto generic = cast<GenericTypeDecl>(DC);
writeCrossReference(DC->getParent(), pathLen + 1);
// Opaque return types are unnamed and need a special xref.
if (auto opaque = dyn_cast<OpaqueTypeDecl>(generic)) {
if (!opaque->hasName()) {
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord,
abbrCode,
addDeclBaseNameRef(opaque-><API key>()));
break;
}
}
assert(generic->hasName());
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
Identifier discriminator;
if (generic-><API key>()) {
auto *containingFile = cast<FileUnit>(generic-><API key>());
discriminator = containingFile-><API key>(generic);
}
bool isProtocolExt = DC->getParent()-><API key>();
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addDeclBaseNameRef(generic->getName()),
addDeclBaseNameRef(discriminator),
isProtocolExt,
generic->hasClangNode());
break;
}
case DeclContextKind::ExtensionDecl: {
auto ext = cast<ExtensionDecl>(DC);
auto nominal = ext->getExtendedNominal();
assert(nominal);
writeCrossReference(nominal, pathLen + 1);
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
CanGenericSignature genericSig(nullptr);
if (ext-><API key>()) {
genericSig = ext->getGenericSignature()-><API key>();
}
<API key>::emitRecord(
Out, ScratchRecord, abbrCode, <API key>(DC),
<API key>(genericSig));
break;
}
case DeclContextKind::SubscriptDecl: {
auto SD = cast<SubscriptDecl>(DC);
writeCrossReference(DC->getParent(), pathLen + 1);
Type ty = SD->getInterfaceType()->getCanonicalType();
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
bool isProtocolExt = SD->getDeclContext()-><API key>();
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addTypeRef(ty), SUBSCRIPT_ID,
isProtocolExt, SD->hasClangNode(),
SD->isStatic());
break;
}
case DeclContextKind::<API key>: {
if (auto fn = dyn_cast<AccessorDecl>(DC)) {
auto storage = fn->getStorage();
writeCrossReference(storage->getDeclContext(), pathLen + 2);
Type ty = storage->getInterfaceType()->getCanonicalType();
IdentifierID nameID = addDeclBaseNameRef(storage->getBaseName());
bool isProtocolExt = fn->getDeclContext()-><API key>();
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addTypeRef(ty), nameID,
isProtocolExt,
storage->hasClangNode(),
storage->isStatic());
abbrCode =
DeclTypeAbbrCodes[<API key>::Code];
auto emptyID = addDeclBaseNameRef(Identifier());
auto accessorKind = <API key>(fn->getAccessorKind());
assert(!fn->isObservingAccessor() &&
"cannot form cross-reference to observing accessors");
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, emptyID,
accessorKind);
break;
}
auto fn = cast<<API key>>(DC);
writeCrossReference(DC->getParent(), pathLen + 1 + fn->isOperator());
Type ty = fn->getInterfaceType()->getCanonicalType();
if (auto ctor = dyn_cast<ConstructorDecl>(DC)) {
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
Out, ScratchRecord, abbrCode, addTypeRef(ty),
(bool)ctor->getDeclContext()-><API key>(),
ctor->hasClangNode(),
<API key>(ctor->getInitKind()));
break;
}
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
bool isProtocolExt = fn->getDeclContext()-><API key>();
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addTypeRef(ty),
addDeclBaseNameRef(fn->getBaseName()),
isProtocolExt, fn->hasClangNode(),
fn->isStatic());
if (fn->isOperator()) {
// Encode the fixity as a filter on the func decls, to distinguish prefix
// and postfix operators.
auto op = cast<FuncDecl>(fn)->getOperatorDecl();
assert(op);
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
auto emptyID = addDeclBaseNameRef(Identifier());
auto fixity = getStableFixity(op->getKind());
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, emptyID,
fixity);
}
break;
}
}
}
void Serializer::writeCrossReference(const Decl *D) {
using namespace decls_block;
unsigned abbrCode;
if (auto op = dyn_cast<OperatorDecl>(D)) {
writeCrossReference(op->getDeclContext(), 1);
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
auto nameID = addDeclBaseNameRef(op->getName());
auto fixity = getStableFixity(op->getKind());
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, nameID,
fixity);
return;
}
if (auto prec = dyn_cast<PrecedenceGroupDecl>(D)) {
writeCrossReference(prec->getDeclContext(), 1);
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
auto nameID = addDeclBaseNameRef(prec->getName());
uint8_t fixity = OperatorKind::PrecedenceGroup;
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, nameID,
fixity);
return;
}
if (auto fn = dyn_cast<<API key>>(D)) {
// Functions are special because they might be operators.
writeCrossReference(fn, 0);
return;
}
writeCrossReference(D->getDeclContext());
if (auto opaque = dyn_cast<OpaqueTypeDecl>(D)) {
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord,
abbrCode,
addDeclBaseNameRef(opaque-><API key>()));
return;
}
if (auto genericParam = dyn_cast<<API key>>(D)) {
assert(!D->getDeclContext()-><API key>() &&
"Cannot cross reference a generic type decl at module scope.");
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
genericParam->getDepth(),
genericParam->getIndex());
return;
}
bool isProtocolExt = D->getDeclContext()-><API key>();
if (auto type = dyn_cast<TypeDecl>(D)) {
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
Identifier discriminator;
if (type-><API key>()) {
auto *containingFile =
cast<FileUnit>(type->getDeclContext()-><API key>());
discriminator = containingFile-><API key>(type);
}
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addDeclBaseNameRef(type->getName()),
addDeclBaseNameRef(discriminator),
isProtocolExt, D->hasClangNode());
return;
}
auto val = cast<ValueDecl>(D);
auto ty = val->getInterfaceType()->getCanonicalType();
abbrCode = DeclTypeAbbrCodes[<API key>::Code];
IdentifierID iid = addDeclBaseNameRef(val->getBaseName());
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addTypeRef(ty), iid, isProtocolExt,
D->hasClangNode(), val->isStatic());
}
Translate from the AST associativity enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(swift::Associativity assoc) {
switch (assoc) {
case swift::Associativity::Left:
return serialization::Associativity::LeftAssociative;
case swift::Associativity::Right:
return serialization::Associativity::RightAssociative;
case swift::Associativity::None:
return serialization::Associativity::NonAssociative;
}
llvm_unreachable("Unhandled Associativity in switch.");
}
static serialization::StaticSpellingKind
<API key>(swift::StaticSpellingKind SS) {
switch (SS) {
case swift::StaticSpellingKind::None:
return serialization::StaticSpellingKind::None;
case swift::StaticSpellingKind::KeywordStatic:
return serialization::StaticSpellingKind::KeywordStatic;
case swift::StaticSpellingKind::KeywordClass:
return serialization::StaticSpellingKind::KeywordClass;
}
llvm_unreachable("Unhandled StaticSpellingKind in switch.");
}
static uint8_t <API key>(swift::AccessLevel access) {
switch (access) {
#define CASE(NAME) \
case swift::AccessLevel::NAME: \
return static_cast<uint8_t>(serialization::AccessLevel::NAME);
CASE(Private)
CASE(FilePrivate)
CASE(Internal)
CASE(Public)
CASE(Open)
#undef CASE
}
llvm_unreachable("Unhandled AccessLevel in switch.");
}
static serialization::SelfAccessKind
<API key>(swift::SelfAccessKind MM) {
switch (MM) {
case swift::SelfAccessKind::NonMutating:
return serialization::SelfAccessKind::NonMutating;
case swift::SelfAccessKind::Mutating:
return serialization::SelfAccessKind::Mutating;
case swift::SelfAccessKind::Consuming:
return serialization::SelfAccessKind::Consuming;
}
llvm_unreachable("Unhandled StaticSpellingKind in switch.");
}
#ifndef NDEBUG
// This is done with a macro so that we get a slightly more useful assertion.
# define DECL(KIND, PARENT)\
<API key> \
static void <API key>(const KIND ## Decl *D) {\
for (auto Attr : D->getAttrs()) {\
assert(Attr->canAppearOnDecl(D) && "attribute cannot appear on a " #KIND);\
}\
}
# include "swift/AST/DeclNodes.def"
#else
static void <API key>(const Decl *D) {}
#endif
bool Serializer::isDeclXRef(const Decl *D) const {
const DeclContext *topLevel = D->getDeclContext()-><API key>();
if (topLevel->getParentModule() != M)
return true;
if (!SF || topLevel == SF)
return false;
// Special-case for SIL generic parameter decls, which don't have a real
// DeclContext.
if (!isa<FileUnit>(topLevel)) {
assert(isa<<API key>>(D) && "unexpected decl kind");
return false;
}
return true;
}
void Serializer::<API key>(PatternBindingDecl *binding,
unsigned bindingIndex) {
using namespace decls_block;
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
StringRef initStr;
SmallString<128> scratch;
auto &entry = binding->getPatternList()[bindingIndex];
auto varDecl = entry.getAnchoringVarDecl();
if (entry.<API key>() &&
varDecl-><API key>()) {
initStr = entry.<API key>(scratch);
}
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, addDeclRef(binding),
bindingIndex, initStr);
}
void
Serializer::<API key>(const DeclContext *parentContext,
unsigned index) {
using namespace decls_block;
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
auto parentID = addDeclContextRef(parentContext);
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
parentID.getOpaqueValue(),
index);
}
void Serializer::<API key>(const DeclContext *parentContext,
Type Ty, bool isImplicit,
unsigned discriminator) {
using namespace decls_block;
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
auto parentID = addDeclContextRef(parentContext);
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addTypeRef(Ty), isImplicit,
discriminator,
parentID.getOpaqueValue());
}
void Serializer::writeASTBlockEntity(const DeclContext *DC) {
using namespace decls_block;
assert(<API key>(DC) &&
"should be serialized as a Decl instead");
assert(<API key>.hasRef(DC));
switch (DC->getContextKind()) {
case DeclContextKind::AbstractClosureExpr: {
auto ACE = cast<AbstractClosureExpr>(DC);
<API key>(ACE->getParent(), ACE->getType(),
ACE->isImplicit(), ACE->getDiscriminator());
break;
}
case DeclContextKind::Initializer: {
if (auto PBI = dyn_cast<<API key>>(DC)) {
<API key>(PBI->getBinding(), PBI->getBindingIndex());
} else if (auto DAI = dyn_cast<<API key>>(DC)) {
<API key>(DAI->getParent(), DAI->getIndex());
}
break;
}
case DeclContextKind::TopLevelCodeDecl: {
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord, abbrCode,
addDeclContextRef(DC->getParent()).getOpaqueValue());
break;
}
// If we are merging already serialized modules with local decl contexts,
// we handle them here in a similar fashion.
case DeclContextKind::SerializedLocal: {
auto local = cast<<API key>>(DC);
switch (local-><API key>()) {
case <API key>::AbstractClosure: {
auto SACE = cast<<API key>>(local);
<API key>(SACE->getParent(), SACE->getType(),
SACE->isImplicit(), SACE->getDiscriminator());
return;
}
case <API key>::<API key>: {
auto DAI = cast<<API key>>(local);
<API key>(DAI->getParent(), DAI->getIndex());
return;
}
case <API key>::<API key>: {
auto PBI = cast<<API key>>(local);
<API key>(PBI->getBinding(), PBI->getBindingIndex());
return;
}
case <API key>::TopLevelCodeDecl: {
auto abbrCode = DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(Out, ScratchRecord,
abbrCode, addDeclContextRef(DC->getParent()).getOpaqueValue());
return;
}
}
}
default:
llvm_unreachable("Trying to write a DeclContext that isn't local");
}
}
static <API key> <API key>(
<API key>::Kind kind) {
switch (kind) {
case <API key>::ZeroResult:
return <API key>::ZeroResult;
case <API key>::NonZeroResult:
return <API key>::NonZeroResult;
case <API key>::ZeroPreservedResult:
return <API key>::ZeroPreservedResult;
case <API key>::NilResult:
return <API key>::NilResult;
case <API key>::NonNilError:
return <API key>::NonNilError;
}
llvm_unreachable("Unhandled <API key> in switch.");
}
Translate from the AST VarDeclSpecifier enum to the
Serialization enum values, which are guaranteed to be stable.
static uint8_t <API key>(swift::ParamDecl::Specifier sf) {
switch (sf) {
case swift::ParamDecl::Specifier::Default:
return uint8_t(serialization::ParamDeclSpecifier::Default);
case swift::ParamDecl::Specifier::InOut:
return uint8_t(serialization::ParamDeclSpecifier::InOut);
case swift::ParamDecl::Specifier::Shared:
return uint8_t(serialization::ParamDeclSpecifier::Shared);
case swift::ParamDecl::Specifier::Owned:
return uint8_t(serialization::ParamDeclSpecifier::Owned);
}
llvm_unreachable("bad param decl specifier kind");
}
static uint8_t <API key>(swift::VarDecl::Introducer intr) {
switch (intr) {
case swift::VarDecl::Introducer::Let:
return uint8_t(serialization::VarDeclIntroducer::Let);
case swift::VarDecl::Introducer::Var:
return uint8_t(serialization::VarDeclIntroducer::Var);
}
llvm_unreachable("bad variable decl introducer kind");
}
Returns true if the declaration of \p decl depends on \p problemContext
based on lexical nesting.
- \p decl is \p problemContext
- \p decl is declared within \p problemContext
- \p decl is declared in an extension of a type that depends on
\p problemContext
static bool contextDependsOn(const NominalTypeDecl *decl,
const DeclContext *problemContext) {
SmallPtrSet<const ExtensionDecl *, 8> seenExtensionDCs;
const DeclContext *dc = decl;
do {
if (dc == problemContext)
return true;
if (auto *extension = dyn_cast<ExtensionDecl>(dc)) {
if (extension->isChildContextOf(problemContext))
return true;
// Avoid cycles when Left.Nested depends on Right.Nested somehow.
bool isNewlySeen = seenExtensionDCs.insert(extension).second;
if (!isNewlySeen)
break;
dc = extension-><API key>();
} else {
dc = dc->getParent();
}
} while (dc);
return false;
}
static void <API key>(llvm::SmallSetVector<Type, 4> &seen,
Type ty,
const DeclContext *excluding) {
ty.visit([&](Type next) {
auto *nominal = next->getAnyNominal();
if (!nominal)
return;
if (contextDependsOn(nominal, excluding))
return;
seen.insert(nominal-><API key>());
});
}
static void
<API key>(llvm::SmallSetVector<Type, 4> &seen,
const Requirement &req,
const DeclContext *excluding) {
<API key>(seen, req.getFirstType(), excluding);
if (req.getKind() != RequirementKind::Layout)
<API key>(seen, req.getSecondType(), excluding);
}
static SmallVector<Type, 4> <API key>(Type ty) {
llvm::SmallSetVector<Type, 4> result;
<API key>(result, ty, /*excluding*/nullptr);
return result.takeVector();
}
class Serializer::DeclSerializer : public DeclVisitor<DeclSerializer> {
Serializer &S;
DeclID id;
bool didVerifyAttrs = false;
template <typename DeclKind>
void <API key>(const DeclKind *D) {
::<API key>(D);
didVerifyAttrs = true;
}
void writeDeclAttribute(const DeclAttribute *DA) {
using namespace decls_block;
// Completely ignore attributes that aren't serialized.
if (DA->isNotSerialized())
return;
// Ignore attributes that have been marked invalid. (This usually means
// type-checking removed them, but only provided a warning rather than an
// error.)
if (DA->isInvalid())
return;
switch (DA->getKind()) {
case DAK_RawDocComment:
case <API key>: // Serialized as part of the type.
case DAK_AccessControl:
case DAK_SetterAccess:
case DAK_ObjCBridged:
case <API key>:
case DAK_Implements:
case DAK_ObjCRuntimeName:
case <API key>:
case <API key>:
case DAK_PrivateImport:
llvm_unreachable("cannot serialize attribute");
case DAK_Count:
llvm_unreachable("not a real attribute");
#define SIMPLE_DECL_ATTR(_, CLASS, ...)\
case DAK_##CLASS: { \
auto abbrCode = S.DeclTypeAbbrCodes[CLASS##DeclAttrLayout::Code]; \
CLASS##DeclAttrLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode, \
DA->isImplicit()); \
return; \
}
#include "swift/AST/Attr.def"
case DAK_SILGenName: {
auto *theAttr = cast<SILGenNameAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
theAttr->isImplicit(),
theAttr->Name);
return;
}
case DAK_CDecl: {
auto *theAttr = cast<CDeclAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[CDeclDeclAttrLayout::Code];
CDeclDeclAttrLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
theAttr->isImplicit(),
theAttr->Name);
return;
}
case DAK_Alignment: {
auto *theAlignment = cast<AlignmentAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
theAlignment->isImplicit(),
theAlignment->getValue());
return;
}
case <API key>: {
auto *theBase = cast<<API key>>(DA);
auto abbrCode
= S.DeclTypeAbbrCodes[<API key>::Code];
auto nameID = S.addDeclBaseNameRef(theBase->BaseClassName);
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
theBase->isImplicit(), nameID);
return;
}
case DAK_Semantics: {
auto *theAttr = cast<SemanticsAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
theAttr->isImplicit(),
theAttr->Value);
return;
}
case DAK_Inline: {
auto *theAttr = cast<InlineAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
(unsigned)theAttr->getKind());
return;
}
case DAK_Optimize: {
auto *theAttr = cast<OptimizeAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
(unsigned)theAttr->getMode());
return;
}
case DAK_Effects: {
auto *theAttr = cast<EffectsAttr>(DA);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
(unsigned)theAttr->getKind());
return;
}
case DAK_Available: {
auto *theAttr = cast<AvailableAttr>(DA);
ENCODE_VER_TUPLE(Introduced, theAttr->Introduced)
ENCODE_VER_TUPLE(Deprecated, theAttr->Deprecated)
ENCODE_VER_TUPLE(Obsoleted, theAttr->Obsoleted)
llvm::SmallString<32> blob;
blob.append(theAttr->Message);
blob.append(theAttr->Rename);
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
theAttr->isImplicit(),
theAttr-><API key>(),
theAttr-><API key>(),
theAttr-><API key>(),
<API key>(Introduced),
<API key>(Deprecated),
<API key>(Obsoleted),
static_cast<unsigned>(theAttr->Platform),
theAttr->Message.size(),
theAttr->Rename.size(),
blob);
return;
}
case DAK_ObjC: {
auto *theAttr = cast<ObjCAttr>(DA);
SmallVector<IdentifierID, 4> pieces;
unsigned numArgs = 0;
if (auto name = theAttr->getName()) {
numArgs = name->getNumArgs() + 1;
for (auto piece : name->getSelectorPieces()) {
pieces.push_back(S.addDeclBaseNameRef(piece));
}
}
auto abbrCode = S.DeclTypeAbbrCodes[ObjCDeclAttrLayout::Code];
ObjCDeclAttrLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
theAttr->isImplicit(),
theAttr->isSwift3Inferred(),
theAttr->isNameImplicit(), numArgs, pieces);
return;
}
case DAK_Specialize: {
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
auto SA = cast<SpecializeAttr>(DA);
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
(unsigned)SA->isExported(),
(unsigned)SA-><API key>(),
S.<API key>(SA-><API key>()));
return;
}
case <API key>: {
auto abbrCode =
S.DeclTypeAbbrCodes[<API key>::Code];
auto theAttr = cast<<API key>>(DA);
auto replacedFun = theAttr-><API key>();
SmallVector<IdentifierID, 4> pieces;
pieces.push_back(S.addDeclBaseNameRef(replacedFun.getBaseName()));
for (auto argName : replacedFun.getArgumentNames())
pieces.push_back(S.addDeclBaseNameRef(argName));
assert(theAttr->getReplacedFunction());
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode, false, /*implicit flag*/
S.addDeclRef(theAttr->getReplacedFunction()), pieces.size(), pieces);
return;
}
case DAK_Custom: {
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
auto theAttr = cast<CustomAttr>(DA);
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode, theAttr->isImplicit(),
S.addTypeRef(theAttr->getTypeLoc().getType()));
return;
}
case <API key>: {
auto abbrCode =
S.DeclTypeAbbrCodes[<API key>::Code];
auto theAttr = cast<<API key>>(DA);
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode, theAttr->isImplicit(),
S.addDeclBaseNameRef(theAttr-><API key>));
break;
}
}
}
void <API key>(const ValueDecl *value) {
using namespace decls_block;
auto *storage = dyn_cast<AbstractStorageDecl>(value);
auto access = value->getFormalAccess();
// Emit the private descriminator for private decls.
// FIXME: We shouldn't need to encode this for /all/ private decls.
// In theory we can follow the same rules as mangling and only include
// the outermost private context.
bool <API key> =
access <= swift::AccessLevel::FilePrivate &&
!value->getDeclContext()->isLocalContext();
// Emit the the filename for private mapping for private decls and
// decls with private accessors if compiled with -<API key>.
bool <API key> =
S.M-><API key>() &&
!value->getDeclContext()->isLocalContext() &&
(access <= swift::AccessLevel::FilePrivate ||
(storage &&
storage->getFormalAccess() >= swift::AccessLevel::Internal &&
storage->hasPrivateAccessor()));
if (<API key> || <API key>) {
auto topLevelContext = value->getDeclContext()-><API key>();
if (auto *enclosingFile = dyn_cast<FileUnit>(topLevelContext)) {
if (<API key>) {
Identifier discriminator =
enclosingFile-><API key>(value);
unsigned abbrCode =
S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(discriminator));
}
auto getFilename = [](FileUnit *enclosingFile,
const ValueDecl *decl) -> StringRef {
if (auto *SF = dyn_cast<SourceFile>(enclosingFile)) {
return llvm::sys::path::filename(SF->getFilename());
} else if (auto *LF = dyn_cast<LoadedFile>(enclosingFile)) {
return LF-><API key>(decl);
}
return StringRef();
};
if (<API key>) {
auto filename = getFilename(enclosingFile, value);
if (!filename.empty()) {
auto filenameID = S.addFilename(filename);
<API key>::emitRecord(
S.Out, S.ScratchRecord,
S.DeclTypeAbbrCodes[<API key>::Code],
filenameID);
}
}
}
}
if (value->getDeclContext()->isLocalContext()) {
auto discriminator = value-><API key>();
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
discriminator);
}
}
void <API key>(const <API key> &fec) {
using namespace decls_block;
auto kind = <API key>(fec.getKind());
uint8_t isOwned = fec.isErrorOwned() == <API key>::IsOwned;
uint8_t isReplaced = bool(fec.<API key>());
TypeID <API key> = S.addTypeRef(fec.<API key>());
TypeID resultTypeID;
switch (fec.getKind()) {
case <API key>::ZeroResult:
case <API key>::NonZeroResult:
resultTypeID = S.addTypeRef(fec.getResultType());
break;
case <API key>::ZeroPreservedResult:
case <API key>::NilResult:
case <API key>::NonNilError:
resultTypeID = 0;
break;
}
auto abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
static_cast<uint8_t>(kind),
isOwned,
isReplaced,
fec.<API key>(),
<API key>,
resultTypeID);
}
void writeGenericParams(const GenericParamList *genericParams) {
using namespace decls_block;
// Don't write anything if there are no generic params.
if (!genericParams)
return;
SmallVector<DeclID, 4> paramIDs;
for (auto next : genericParams->getParams())
paramIDs.push_back(S.addDeclRef(next));
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
paramIDs);
}
void writeParameterList(const ParameterList *PL) {
using namespace decls_block;
SmallVector<DeclID, 8> paramIDs;
for (const ParamDecl *param : *PL)
paramIDs.push_back(S.addDeclRef(param));
unsigned abbrCode = S.DeclTypeAbbrCodes[ParameterListLayout::Code];
ParameterListLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode, paramIDs);
}
Writes an array of members for a decl context.
\param parentID The DeclID of the context.
\param members The decls within the context.
\param isClass True if the context could be a class context (class,
class extension, or protocol).
void writeMembers(DeclID parentID, DeclRange members, bool isClass) {
using namespace decls_block;
SmallVector<DeclID, 16> memberIDs;
for (auto member : members) {
if (!<API key>(member))
continue;
DeclID memberID = S.addDeclRef(member);
memberIDs.push_back(memberID);
if (auto VD = dyn_cast<ValueDecl>(member)) {
// Record parent->members in subtable of DeclMemberNames
if (VD->hasName() &&
!VD->getBaseName().empty()) {
std::unique_ptr<DeclMembersTable> &memberTable =
S.DeclMemberNames[VD->getBaseName()].second;
if (!memberTable) {
memberTable = llvm::make_unique<DeclMembersTable>();
}
(*memberTable)[parentID].push_back(memberID);
}
// Same as above, but for @_implements attributes
if (auto A = VD->getAttrs().getAttribute<ImplementsAttr>()) {
std::unique_ptr<DeclMembersTable> &memberTable =
S.DeclMemberNames[A->getMemberName().getBaseName()].second;
if (!memberTable) {
memberTable = llvm::make_unique<DeclMembersTable>();
}
(*memberTable)[parentID].push_back(memberID);
}
// Possibly add a record to <API key> too.
if (isClass) {
if (VD-><API key>()) {
auto &list = S.<API key>[VD->getBaseName()];
list.push_back({getKindForTable(VD), memberID});
}
}
}
}
unsigned abbrCode = S.DeclTypeAbbrCodes[MembersLayout::Code];
MembersLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode, memberIDs);
}
Writes the given pattern, recursively.
void writePattern(const Pattern *pattern) {
using namespace decls_block;
// Retrieve the type of the pattern.
auto getPatternType = [&] {
Type type = pattern->getType();
// If we have a contextual type, map out to an interface type.
if (type->hasArchetype())
type = type->mapTypeOutOfContext();
return type;
};
assert(pattern && "null pattern");
switch (pattern->getKind()) {
case PatternKind::Paren: {
unsigned abbrCode = S.DeclTypeAbbrCodes[ParenPatternLayout::Code];
ParenPatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
pattern->isImplicit());
writePattern(cast<ParenPattern>(pattern)->getSubPattern());
break;
}
case PatternKind::Tuple: {
auto tuple = cast<TuplePattern>(pattern);
unsigned abbrCode = S.DeclTypeAbbrCodes[TuplePatternLayout::Code];
TuplePatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(getPatternType()),
tuple->getNumElements(),
tuple->isImplicit());
abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
for (auto &elt : tuple->getElements()) {
// FIXME: Default argument expressions?
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(elt.getLabel()));
writePattern(elt.getPattern());
}
break;
}
case PatternKind::Named: {
auto named = cast<NamedPattern>(pattern);
unsigned abbrCode = S.DeclTypeAbbrCodes[NamedPatternLayout::Code];
NamedPatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(named->getDecl()),
S.addTypeRef(getPatternType()),
named->isImplicit());
break;
}
case PatternKind::Any: {
unsigned abbrCode = S.DeclTypeAbbrCodes[AnyPatternLayout::Code];
AnyPatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(getPatternType()),
pattern->isImplicit());
break;
}
case PatternKind::Typed: {
auto typed = cast<TypedPattern>(pattern);
unsigned abbrCode = S.DeclTypeAbbrCodes[TypedPatternLayout::Code];
TypedPatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(getPatternType()),
typed->isImplicit());
writePattern(typed->getSubPattern());
break;
}
case PatternKind::Is:
case PatternKind::EnumElement:
case PatternKind::OptionalSome:
case PatternKind::Bool:
case PatternKind::Expr:
llvm_unreachable("Refutable patterns cannot be serialized");
case PatternKind::Var: {
auto var = cast<VarPattern>(pattern);
unsigned abbrCode = S.DeclTypeAbbrCodes[VarPatternLayout::Code];
VarPatternLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
var->isLet(), var->isImplicit());
writePattern(var->getSubPattern());
break;
}
}
}
void <API key>(const ProtocolDecl *proto) {
using namespace decls_block;
SmallVector<DeclID, 16> witnessIDs;
for (auto member : proto->getMembers()) {
if (auto *value = dyn_cast<ValueDecl>(member)) {
auto witness = proto->getDefaultWitness(value);
if (!witness)
continue;
DeclID requirementID = S.addDeclRef(value);
DeclID witnessID = S.addDeclRef(witness.getDecl());
witnessIDs.push_back(requirementID);
witnessIDs.push_back(witnessID);
// FIXME: Substitutions
}
}
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord,
abbrCode, witnessIDs);
}
Writes the body text of the provided funciton, if the function is
inlinable and has body text.
void <API key>(const <API key> *AFD) {
using namespace decls_block;
// Only serialize the text for an inlinable function body if we're emitting
// a partial module. It's not needed in the final module file, but it's
// needed in partial modules so you can emit a module interface after
// merging them.
if (!S.SF) return;
if (AFD-><API key>() != swift::ResilienceExpansion::Minimal)
return;
if (!AFD-><API key>()) return;
SmallString<128> scratch;
auto body = AFD-><API key>(scratch);
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode, body);
}
unsigned <API key>(
const AbstractStorageDecl *storage) const {
unsigned count = 0;
for (auto *accessor : storage->getAllAccessors()) {
if (accessor->needsNewVTableEntry())
count++;
}
return count;
}
public:
DeclSerializer(Serializer &S, DeclID id) : S(S), id(id) {}
~DeclSerializer() {
assert(didVerifyAttrs);
}
void visit(const Decl *D) {
// Emit attributes (if any).
for (auto Attr : D->getAttrs())
writeDeclAttribute(Attr);
if (auto *value = dyn_cast<ValueDecl>(D))
<API key>(value);
DeclVisitor<DeclSerializer>::visit(const_cast<Decl *>(D));
}
If this gets referenced, we forgot to handle a decl.
void visitDecl(const Decl *) = delete;
void visitExtensionDecl(const ExtensionDecl *extension) {
using namespace decls_block;
<API key>(extension);
auto contextID = S.addDeclContextRef(extension->getDeclContext());
Type extendedType = extension->getExtendedType();
assert(!extendedType->hasArchetype());
// FIXME: Use the canonical type here in order to minimize circularity
// issues at deserialization time. A known problematic case here is
// "extension of typealias Foo"; "typealias Foo = SomeKit.Bar"; and then
// trying to import Bar accidentally asking for all of its extensions
// (perhaps because we're searching for a conformance).
// We could limit this to only the problematic cases, but it seems like a
// simpler user model to just always desugar extension types.
extendedType = extendedType->getCanonicalType();
auto conformances = extension-><API key>(
<API key>::All, nullptr);
SmallVector<TypeID, 8> <API key>;
for (auto inherited : extension->getInherited()) {
assert(!inherited.getType()->hasArchetype());
<API key>.push_back(S.addTypeRef(inherited.getType()));
}
size_t numInherited = <API key>.size();
llvm::SmallSetVector<Type, 4> dependencies;
<API key>(
dependencies, extendedType, /*excluding*/nullptr);
for (Requirement req : extension-><API key>()) {
<API key>(dependencies, req,
/*excluding*/nullptr);
}
for (auto dependencyTy : dependencies)
<API key>.push_back(S.addTypeRef(dependencyTy));
unsigned abbrCode = S.DeclTypeAbbrCodes[ExtensionLayout::Code];
auto extendedNominal = extension->getExtendedNominal();
ExtensionLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(extendedType),
S.addDeclRef(extendedNominal),
contextID.getOpaqueValue(),
extension->isImplicit(),
S.<API key>(
extension->getGenericSignature()),
conformances.size(),
numInherited,
<API key>);
bool isClassExtension = false;
if (extendedNominal) {
isClassExtension = isa<ClassDecl>(extendedNominal) ||
isa<ProtocolDecl>(extendedNominal);
}
// Extensions of nested generic types have multiple generic parameter
// lists. Collect them all, from the innermost to outermost.
SmallVector<GenericParamList *, 2> allGenericParams;
for (auto *genericParams = extension->getGenericParams();
genericParams != nullptr;
genericParams = genericParams->getOuterParameters()) {
allGenericParams.push_back(genericParams);
}
// Reverse the list, and write the parameter lists, from outermost
// to innermost.
for (auto *genericParams : swift::reversed(allGenericParams))
writeGenericParams(genericParams);
writeMembers(id, extension->getMembers(), isClassExtension);
S.writeConformances(conformances, S.DeclTypeAbbrCodes);
}
void <API key>(const PatternBindingDecl *binding) {
using namespace decls_block;
<API key>(binding);
auto contextID = S.addDeclContextRef(binding->getDeclContext());
SmallVector<uint64_t, 2> initContextIDs;
for (unsigned i : range(binding-><API key>())) {
auto initContextID =
S.addDeclContextRef(binding->getPatternList()[i].getInitContext());
if (!initContextIDs.empty()) {
initContextIDs.push_back(initContextID.getOpaqueValue());
} else if (initContextID) {
initContextIDs.append(i, 0);
initContextIDs.push_back(initContextID.getOpaqueValue());
}
}
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode, contextID.getOpaqueValue(),
binding->isImplicit(), binding->isStatic(),
uint8_t(<API key>(binding->getStaticSpelling())),
binding-><API key>(),
initContextIDs);
DeclContext *owningDC = nullptr;
if (binding->getDeclContext()->isTypeContext())
owningDC = binding->getDeclContext();
for (auto entry : binding->getPatternList()) {
writePattern(entry.getPattern());
// Ignore initializer; external clients don't need to know about it.
}
}
void <API key>(const PrecedenceGroupDecl *group) {
using namespace decls_block;
<API key>(group);
auto contextID = S.addDeclContextRef(group->getDeclContext());
auto nameID = S.addDeclBaseNameRef(group->getName());
auto associativity = <API key>(group->getAssociativity());
SmallVector<DeclID, 8> relations;
for (auto &rel : group->getHigherThan())
relations.push_back(S.addDeclRef(rel.Group));
for (auto &rel : group->getLowerThan())
relations.push_back(S.addDeclRef(rel.Group));
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
nameID, contextID.getOpaqueValue(),
associativity, group->isAssignment(),
group->getHigherThan().size(),
relations);
}
void <API key>(const InfixOperatorDecl *op) {
using namespace decls_block;
<API key>(op);
auto contextID = S.addDeclContextRef(op->getDeclContext());
auto nameID = S.addDeclBaseNameRef(op->getName());
auto groupID = S.addDeclRef(op->getPrecedenceGroup());
SmallVector<DeclID, 1> <API key>;
for (auto *decl : op-><API key>())
<API key>.push_back(S.addDeclRef(decl));
unsigned abbrCode = S.DeclTypeAbbrCodes[InfixOperatorLayout::Code];
InfixOperatorLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode, nameID,
contextID.getOpaqueValue(), groupID,
<API key>);
}
template <typename Layout>
void <API key>(const OperatorDecl *op) {
auto contextID = S.addDeclContextRef(op->getDeclContext());
SmallVector<DeclID, 1> <API key>;
for (auto *decl : op-><API key>())
<API key>.push_back(S.addDeclRef(decl));
unsigned abbrCode = S.DeclTypeAbbrCodes[Layout::Code];
Layout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(op->getName()),
contextID.getOpaqueValue(),
<API key>);
}
void <API key>(const PrefixOperatorDecl *op) {
using namespace decls_block;
<API key>(op);
<API key><<API key>>(op);
}
void <API key>(const PostfixOperatorDecl *op) {
using namespace decls_block;
<API key>(op);
<API key><<API key>>(op);
}
void visitTypeAliasDecl(const TypeAliasDecl *typeAlias) {
using namespace decls_block;
assert(!typeAlias->isObjC() && "ObjC typealias is not meaningful");
<API key>(typeAlias);
auto contextID = S.addDeclContextRef(typeAlias->getDeclContext());
auto underlying = typeAlias->getUnderlyingType();
llvm::SmallSetVector<Type, 4> dependencies;
<API key>(dependencies, underlying->getCanonicalType(),
/*excluding*/nullptr);
for (Requirement req : typeAlias-><API key>()) {
<API key>(dependencies, req,
/*excluding*/nullptr);
}
SmallVector<TypeID, 4> dependencyIDs;
for (Type dep : dependencies)
dependencyIDs.push_back(S.addTypeRef(dep));
uint8_t rawAccessLevel =
<API key>(typeAlias->getFormalAccess());
unsigned abbrCode = S.DeclTypeAbbrCodes[TypeAliasLayout::Code];
TypeAliasLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(typeAlias->getName()),
contextID.getOpaqueValue(),
S.addTypeRef(underlying),
/*no longer used*/TypeID(),
typeAlias->isImplicit(),
S.<API key>(
typeAlias->getGenericSignature()),
rawAccessLevel,
dependencyIDs);
writeGenericParams(typeAlias->getGenericParams());
}
void <API key>(const <API key> *genericParam) {
using namespace decls_block;
<API key>(genericParam);
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(genericParam->getName()),
genericParam->isImplicit(),
genericParam->getDepth(),
genericParam->getIndex());
}
void <API key>(const AssociatedTypeDecl *assocType) {
using namespace decls_block;
<API key>(assocType);
auto contextID = S.addDeclContextRef(assocType->getDeclContext());
SmallVector<DeclID, 4> <API key>;
for (auto overridden : assocType->getOverriddenDecls()) {
<API key>.push_back(S.addDeclRef(overridden));
}
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(assocType->getName()),
contextID.getOpaqueValue(),
S.addTypeRef(assocType-><API key>()),
assocType->isImplicit(),
<API key>);
}
void visitStructDecl(const StructDecl *theStruct) {
using namespace decls_block;
<API key>(theStruct);
auto contextID = S.addDeclContextRef(theStruct->getDeclContext());
auto conformances = theStruct-><API key>(
<API key>::All, nullptr);
SmallVector<TypeID, 4> <API key>;
for (auto inherited : theStruct->getInherited()) {
assert(!inherited.getType()->hasArchetype());
<API key>.push_back(S.addTypeRef(inherited.getType()));
}
llvm::SmallSetVector<Type, 4> dependencyTypes;
for (Requirement req : theStruct-><API key>()) {
<API key>(dependencyTypes, req,
/*excluding*/nullptr);
}
for (Type ty : dependencyTypes)
<API key>.push_back(S.addTypeRef(ty));
uint8_t rawAccessLevel =
<API key>(theStruct->getFormalAccess());
unsigned abbrCode = S.DeclTypeAbbrCodes[StructLayout::Code];
StructLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(theStruct->getName()),
contextID.getOpaqueValue(),
theStruct->isImplicit(),
theStruct->isObjC(),
S.<API key>(
theStruct->getGenericSignature()),
rawAccessLevel,
conformances.size(),
theStruct->getInherited().size(),
<API key>);
writeGenericParams(theStruct->getGenericParams());
writeMembers(id, theStruct->getMembers(), false);
S.writeConformances(conformances, S.DeclTypeAbbrCodes);
}
void visitEnumDecl(const EnumDecl *theEnum) {
using namespace decls_block;
<API key>(theEnum);
auto contextID = S.addDeclContextRef(theEnum->getDeclContext());
auto conformances = theEnum-><API key>(
<API key>::All, nullptr);
SmallVector<TypeID, 4> <API key>;
for (auto inherited : theEnum->getInherited()) {
assert(!inherited.getType()->hasArchetype());
<API key>.push_back(S.addTypeRef(inherited.getType()));
}
llvm::SmallSetVector<Type, 4> dependencyTypes;
for (const EnumElementDecl *nextElt : theEnum->getAllElements()) {
if (!nextElt->hasAssociatedValues())
continue;
// FIXME: Types in the same module are still important for enums. It's
// possible an enum element has a payload that references a type
// declaration from the same module that can't be imported (for whatever
// reason). However, we need a more robust handling of deserialization
// dependencies that can handle circularities. rdar://problem/32359173
<API key>(dependencyTypes,
nextElt-><API key>(),
/*excluding*/theEnum->getParentModule());
}
for (Requirement req : theEnum-><API key>()) {
<API key>(dependencyTypes, req,
/*excluding*/nullptr);
}
for (Type ty : dependencyTypes)
<API key>.push_back(S.addTypeRef(ty));
uint8_t rawAccessLevel =
<API key>(theEnum->getFormalAccess());
unsigned abbrCode = S.DeclTypeAbbrCodes[EnumLayout::Code];
EnumLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(theEnum->getName()),
contextID.getOpaqueValue(),
theEnum->isImplicit(),
theEnum->isObjC(),
S.<API key>(
theEnum->getGenericSignature()),
S.addTypeRef(theEnum->getRawType()),
rawAccessLevel,
conformances.size(),
theEnum->getInherited().size(),
<API key>);
writeGenericParams(theEnum->getGenericParams());
writeMembers(id, theEnum->getMembers(), false);
S.writeConformances(conformances, S.DeclTypeAbbrCodes);
}
void visitClassDecl(const ClassDecl *theClass) {
using namespace decls_block;
<API key>(theClass);
assert(!theClass->isForeign());
auto contextID = S.addDeclContextRef(theClass->getDeclContext());
auto conformances = theClass-><API key>(
<API key>::NonInherited, nullptr);
SmallVector<TypeID, 4> <API key>;
for (auto inherited : theClass->getInherited()) {
assert(!inherited.getType()->hasArchetype());
<API key>.push_back(S.addTypeRef(inherited.getType()));
}
llvm::SmallSetVector<Type, 4> dependencyTypes;
if (theClass->hasSuperclass()) {
// FIXME: Nested types can still be a problem here: it's possible that (for
// whatever reason) they won't be able to be deserialized, in which case
// we'll be in trouble forming the actual superclass type. However, we
// need a more robust handling of deserialization dependencies that can
// handle circularities. rdar://problem/50835214
<API key>(dependencyTypes, theClass->getSuperclass(),
/*excluding*/theClass);
}
for (Requirement req : theClass-><API key>()) {
<API key>(dependencyTypes, req,
/*excluding*/nullptr);
}
for (Type ty : dependencyTypes)
<API key>.push_back(S.addTypeRef(ty));
uint8_t rawAccessLevel =
<API key>(theClass->getFormalAccess());
bool <API key> =
const_cast<ClassDecl *>(theClass)->
<API key>();
unsigned abbrCode = S.DeclTypeAbbrCodes[ClassLayout::Code];
ClassLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(theClass->getName()),
contextID.getOpaqueValue(),
theClass->isImplicit(),
theClass->isObjC(),
<API key>,
S.<API key>(
theClass->getGenericSignature()),
S.addTypeRef(theClass->getSuperclass()),
rawAccessLevel,
conformances.size(),
theClass->getInherited().size(),
<API key>);
writeGenericParams(theClass->getGenericParams());
writeMembers(id, theClass->getMembers(), true);
S.writeConformances(conformances, S.DeclTypeAbbrCodes);
}
void visitProtocolDecl(const ProtocolDecl *proto) {
using namespace decls_block;
<API key>(proto);
auto contextID = S.addDeclContextRef(proto->getDeclContext());
SmallVector<TypeID, 4> <API key>;
llvm::SmallSetVector<Type, 4> dependencyTypes;
for (auto element : proto->getInherited()) {
assert(!element.getType()->hasArchetype());
<API key>.push_back(S.addTypeRef(element.getType()));
if (element.getType()->is<ProtocolType>())
dependencyTypes.insert(element.getType());
}
for (Requirement req : proto-><API key>()) {
// Requirements can be cyclic, so for now filter out any requirements
// it's better than nothing.
<API key>(dependencyTypes, req,
/*excluding*/S.M);
}
for (Type ty : dependencyTypes)
<API key>.push_back(S.addTypeRef(ty));
uint8_t rawAccessLevel = <API key>(proto->getFormalAccess());
unsigned abbrCode = S.DeclTypeAbbrCodes[ProtocolLayout::Code];
ProtocolLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(proto->getName()),
contextID.getOpaqueValue(),
proto->isImplicit(),
const_cast<ProtocolDecl *>(proto)
->requiresClass(),
proto->isObjC(),
proto-><API key>(),
rawAccessLevel, proto->getInherited().size(),
<API key>);
writeGenericParams(proto->getGenericParams());
S.<API key>(
proto-><API key>(), S.DeclTypeAbbrCodes);
writeMembers(id, proto->getMembers(), true);
<API key>(proto);
}
void visitVarDecl(const VarDecl *var) {
using namespace decls_block;
<API key>(var);
auto contextID = S.addDeclContextRef(var->getDeclContext());
Accessors accessors = getAccessors(var);
uint8_t rawAccessLevel = <API key>(var->getFormalAccess());
uint8_t <API key> = rawAccessLevel;
if (var->isSettable(nullptr))
<API key> =
<API key>(var-><API key>());
unsigned <API key> = 0;
Type ty = var->getInterfaceType();
SmallVector<TypeID, 2> arrayFields;
for (auto accessor : accessors.Decls)
arrayFields.push_back(S.addDeclRef(accessor));
if (auto backingInfo = var-><API key>()) {
if (backingInfo.backingVar) {
++<API key>;
arrayFields.push_back(S.addDeclRef(backingInfo.backingVar));
}
if (backingInfo.storageWrapperVar) {
++<API key>;
arrayFields.push_back(S.addDeclRef(backingInfo.storageWrapperVar));
}
}
for (Type dependency : <API key>(ty->getCanonicalType()))
arrayFields.push_back(S.addTypeRef(dependency));
VarDecl *lazyStorage = nullptr;
if (var->getAttrs().hasAttribute<LazyAttr>())
lazyStorage = var-><API key>();
auto rawIntroducer = <API key>(var->getIntroducer());
unsigned numVTableEntries = <API key>(var);
unsigned abbrCode = S.DeclTypeAbbrCodes[VarLayout::Code];
VarLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(var->getName()),
contextID.getOpaqueValue(),
var->isImplicit(),
var->isObjC(),
var->isStatic(),
rawIntroducer,
var-><API key>(),
var->isGetterMutating(),
var->isSetterMutating(),
var-><API key>(),
S.addDeclRef(lazyStorage),
accessors.OpaqueReadOwnership,
accessors.ReadImpl,
accessors.WriteImpl,
accessors.ReadWriteImpl,
accessors.Decls.size(),
S.addTypeRef(ty),
var-><API key>(),
S.addDeclRef(var->getOverriddenDecl()),
rawAccessLevel, <API key>,
S.addDeclRef(var-><API key>()),
<API key>,
numVTableEntries,
arrayFields);
}
void visitParamDecl(const ParamDecl *param) {
using namespace decls_block;
<API key>(param);
auto contextID = S.addDeclContextRef(param->getDeclContext());
Type interfaceType = param->getInterfaceType();
// Only save the text for normal and stored property default arguments, not
// any of the special ones.
StringRef defaultArgumentText;
SmallString<128> scratch;
swift::DefaultArgumentKind argKind = param-><API key>();
if (argKind == swift::DefaultArgumentKind::Normal ||
argKind == swift::DefaultArgumentKind::StoredProperty)
defaultArgumentText =
param-><API key>(scratch);
unsigned abbrCode = S.DeclTypeAbbrCodes[ParamLayout::Code];
ParamLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(param->getArgumentName()),
S.addDeclBaseNameRef(param->getName()),
contextID.getOpaqueValue(),
<API key>(param->getSpecifier()),
S.addTypeRef(interfaceType),
param-><API key>(),
param->isVariadic(),
param->isAutoClosure(),
<API key>(argKind),
defaultArgumentText);
if (interfaceType->hasError()) {
param->getDeclContext()->dumpContext();
interfaceType->dump();
llvm_unreachable("error in interface type of parameter");
}
}
void visitFuncDecl(const FuncDecl *fn) {
using namespace decls_block;
<API key>(fn);
auto contextID = S.addDeclContextRef(fn->getDeclContext());
unsigned abbrCode = S.DeclTypeAbbrCodes[FuncLayout::Code];
SmallVector<IdentifierID, 4> <API key>;
<API key>.push_back(
S.addDeclBaseNameRef(fn->getFullName().getBaseName()));
for (auto argName : fn->getFullName().getArgumentNames())
<API key>.push_back(S.addDeclBaseNameRef(argName));
uint8_t rawAccessLevel = <API key>(fn->getFormalAccess());
Type ty = fn->getInterfaceType();
for (auto dependency : <API key>(ty->getCanonicalType()))
<API key>.push_back(S.addTypeRef(dependency));
FuncLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
fn->isImplicit(),
fn->isStatic(),
uint8_t(
<API key>(fn->getStaticSpelling())),
fn->isObjC(),
uint8_t(
<API key>(fn->getSelfAccessKind())),
fn-><API key>(),
fn->hasThrows(),
S.<API key>(
fn->getGenericSignature()),
S.addTypeRef(fn-><API key>()),
fn-><API key>(),
S.addDeclRef(fn->getOperatorDecl()),
S.addDeclRef(fn->getOverriddenDecl()),
fn->getFullName().getArgumentNames().size() +
fn->getFullName().isCompoundName(),
rawAccessLevel,
fn->needsNewVTableEntry(),
S.addDeclRef(fn-><API key>()),
<API key>);
writeGenericParams(fn->getGenericParams());
// Write the body parameters.
writeParameterList(fn->getParameters());
if (auto errorConvention = fn-><API key>())
<API key>(*errorConvention);
<API key>(fn);
}
void visitOpaqueTypeDecl(const OpaqueTypeDecl *opaqueDecl) {
using namespace decls_block;
<API key>(opaqueDecl);
auto namingDeclID = S.addDeclRef(opaqueDecl->getNamingDecl());
auto contextID = S.addDeclContextRef(opaqueDecl->getDeclContext());
auto interfaceSigID = S.<API key>(
opaqueDecl-><API key>());
auto interfaceTypeID =
S.addTypeRef(opaqueDecl-><API key>());
auto genericSigID = S.<API key>(opaqueDecl->getGenericSignature());
SubstitutionMapID underlyingTypeID = 0;
if (auto underlying = opaqueDecl-><API key>())
underlyingTypeID = S.<API key>(*underlying);
unsigned abbrCode = S.DeclTypeAbbrCodes[OpaqueTypeLayout::Code];
OpaqueTypeLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(), namingDeclID,
interfaceSigID, interfaceTypeID, genericSigID,
underlyingTypeID);
writeGenericParams(opaqueDecl->getGenericParams());
}
void visitAccessorDecl(const AccessorDecl *fn) {
// Accessor synthesis and type checking is now sufficiently lazy that
// we might have unvalidated accessors in a primary file.
// FIXME: Once accessor synthesis and getInterfaceType() itself are
// request-ified this goes away.
if (!fn->hasInterfaceType()) {
assert(fn->isImplicit());
// FIXME: Remove this one
(void)fn->getInterfaceType();
}
using namespace decls_block;
<API key>(fn);
auto contextID = S.addDeclContextRef(fn->getDeclContext());
unsigned abbrCode = S.DeclTypeAbbrCodes[AccessorLayout::Code];
uint8_t rawAccessLevel = <API key>(fn->getFormalAccess());
uint8_t rawAccessorKind =
uint8_t(<API key>(fn->getAccessorKind()));
Type ty = fn->getInterfaceType();
SmallVector<IdentifierID, 4> dependencies;
for (auto dependency : <API key>(ty->getCanonicalType()))
dependencies.push_back(S.addTypeRef(dependency));
AccessorLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
fn->isImplicit(),
fn->isStatic(),
uint8_t(<API key>(
fn->getStaticSpelling())),
fn->isObjC(),
uint8_t(<API key>(
fn->getSelfAccessKind())),
fn-><API key>(),
fn->hasThrows(),
S.<API key>(
fn->getGenericSignature()),
S.addTypeRef(fn-><API key>()),
fn-><API key>(),
S.addDeclRef(fn->getOverriddenDecl()),
S.addDeclRef(fn->getStorage()),
rawAccessorKind,
rawAccessLevel,
fn->needsNewVTableEntry(),
fn->isTransparent(),
dependencies);
writeGenericParams(fn->getGenericParams());
// Write the body parameters.
writeParameterList(fn->getParameters());
if (auto errorConvention = fn-><API key>())
<API key>(*errorConvention);
<API key>(fn);
}
void <API key>(const EnumElementDecl *elem) {
using namespace decls_block;
<API key>(elem);
auto contextID = S.addDeclContextRef(elem->getDeclContext());
SmallVector<IdentifierID, 4> <API key>;
auto baseName = S.addDeclBaseNameRef(elem->getBaseName());
<API key>.push_back(baseName);
for (auto argName : elem->getFullName().getArgumentNames())
<API key>.push_back(S.addDeclBaseNameRef(argName));
Type ty = elem->getInterfaceType();
for (Type dependency : <API key>(ty->getCanonicalType()))
<API key>.push_back(S.addTypeRef(dependency));
// We only serialize the raw values of @objc enums, because they're part
// of the ABI. That isn't the case for Swift enums.
auto rawValueKind = <API key>::None;
bool isNegative = false, isRawValueImplicit = false;
StringRef RawValueText;
if (elem->getParentEnum()->isObjC()) {
// Currently ObjC enums always have integer raw values.
rawValueKind = <API key>::IntegerLiteral;
auto ILE = cast<IntegerLiteralExpr>(elem-><API key>());
RawValueText = ILE->getDigitsText();
isNegative = ILE->isNegative();
isRawValueImplicit = ILE->isImplicit();
}
unsigned abbrCode = S.DeclTypeAbbrCodes[EnumElementLayout::Code];
EnumElementLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
elem->isImplicit(),
elem->hasAssociatedValues(),
(unsigned)rawValueKind,
isRawValueImplicit,
isNegative,
S.addUniquedStringRef(RawValueText),
elem->getFullName().getArgumentNames().size()+1,
<API key>);
if (auto *PL = elem->getParameterList())
writeParameterList(PL);
}
void visitSubscriptDecl(const SubscriptDecl *subscript) {
using namespace decls_block;
<API key>(subscript);
auto contextID = S.addDeclContextRef(subscript->getDeclContext());
Accessors accessors = getAccessors(subscript);
SmallVector<IdentifierID, 4> <API key>;
for (auto argName : subscript->getFullName().getArgumentNames())
<API key>.push_back(S.addDeclBaseNameRef(argName));
for (auto accessor : accessors.Decls)
<API key>.push_back(S.addDeclRef(accessor));
Type ty = subscript->getInterfaceType();
for (Type dependency : <API key>(ty->getCanonicalType()))
<API key>.push_back(S.addTypeRef(dependency));
uint8_t rawAccessLevel =
<API key>(subscript->getFormalAccess());
uint8_t <API key> = rawAccessLevel;
if (subscript->supportsMutation())
<API key> =
<API key>(subscript-><API key>());
uint8_t rawStaticSpelling =
uint8_t(<API key>(subscript->getStaticSpelling()));
unsigned numVTableEntries = <API key>(subscript);
unsigned abbrCode = S.DeclTypeAbbrCodes[SubscriptLayout::Code];
SubscriptLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
subscript->isImplicit(),
subscript->isObjC(),
subscript->isGetterMutating(),
subscript->isSetterMutating(),
accessors.OpaqueReadOwnership,
accessors.ReadImpl,
accessors.WriteImpl,
accessors.ReadWriteImpl,
accessors.Decls.size(),
S.<API key>(
subscript->getGenericSignature()),
S.addTypeRef(subscript-><API key>()),
subscript-><API key>(),
S.addDeclRef(subscript->getOverriddenDecl()),
rawAccessLevel,
<API key>,
rawStaticSpelling,
subscript->
getFullName().getArgumentNames().size(),
S.addDeclRef(subscript-><API key>()),
numVTableEntries,
<API key>);
writeGenericParams(subscript->getGenericParams());
writeParameterList(subscript->getIndices());
}
void <API key>(const ConstructorDecl *ctor) {
using namespace decls_block;
<API key>(ctor);
auto contextID = S.addDeclContextRef(ctor->getDeclContext());
SmallVector<IdentifierID, 4> <API key>;
for (auto argName : ctor->getFullName().getArgumentNames())
<API key>.push_back(S.addDeclBaseNameRef(argName));
Type ty = ctor->getInterfaceType();
for (Type dependency : <API key>(ty->getCanonicalType()))
<API key>.push_back(S.addTypeRef(dependency));
uint8_t rawAccessLevel = <API key>(ctor->getFormalAccess());
bool firstTimeRequired = ctor->isRequired();
if (auto *overridden = ctor->getOverriddenDecl())
if (firstTimeRequired && overridden->isRequired())
firstTimeRequired = false;
unsigned abbrCode = S.DeclTypeAbbrCodes[ConstructorLayout::Code];
ConstructorLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
ctor->isFailable(),
ctor-><API key>(),
ctor->isImplicit(),
ctor->isObjC(),
ctor-><API key>(),
ctor->hasThrows(),
<API key>(
ctor->getInitKind()),
S.<API key>(
ctor->getGenericSignature()),
S.addDeclRef(ctor->getOverriddenDecl()),
rawAccessLevel,
ctor->needsNewVTableEntry(),
firstTimeRequired,
ctor->getFullName().getArgumentNames().size(),
<API key>);
writeGenericParams(ctor->getGenericParams());
writeParameterList(ctor->getParameters());
if (auto errorConvention = ctor-><API key>())
<API key>(*errorConvention);
<API key>(ctor);
}
void visitDestructorDecl(const DestructorDecl *dtor) {
using namespace decls_block;
<API key>(dtor);
auto contextID = S.addDeclContextRef(dtor->getDeclContext());
unsigned abbrCode = S.DeclTypeAbbrCodes[DestructorLayout::Code];
DestructorLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
contextID.getOpaqueValue(),
dtor->isImplicit(),
dtor->isObjC(),
S.<API key>(
dtor->getGenericSignature()));
<API key>(dtor);
}
void <API key>(const TopLevelCodeDecl *) {
// Top-level code is ignored; external clients don't need to know about it.
}
void visitImportDecl(const ImportDecl *) {
llvm_unreachable("import decls should not be serialized");
}
void visitIfConfigDecl(const IfConfigDecl *) {
llvm_unreachable("#if block declarations should not be serialized");
}
void <API key>(const PoundDiagnosticDecl *) {
llvm_unreachable("#warning/#error declarations should not be serialized");
}
void visitEnumCaseDecl(const EnumCaseDecl *) {
llvm_unreachable("enum case decls should not be serialized");
}
void visitModuleDecl(const ModuleDecl *) {
llvm_unreachable("module decls are not serialized");
}
void <API key>(const MissingMemberDecl *) {
llvm_unreachable("member placeholders shouldn't be serialized");
}
};
void Serializer::writeASTBlockEntity(const Decl *D) {
using namespace decls_block;
<API key> trace("serializing", D);
assert(DeclsToSerialize.hasRef(D));
BitOffset initialOffset = Out.GetCurrentBitNo();
SWIFT_DEFER {
// This is important enough to leave on in Release builds.
if (initialOffset == Out.GetCurrentBitNo()) {
llvm::<API key> message("failed to serialize anything");
abort();
}
};
assert(!D->isInvalid() && "cannot create a module with an invalid decl");
if (isDeclXRef(D)) {
writeCrossReference(D);
return;
}
assert(!D->hasClangNode() && "imported decls should use cross-references");
DeclSerializer(*this, DeclsToSerialize.addRef(D)).visit(D);
}
#define SIMPLE_CASE(TYPENAME, VALUE) \
case swift::TYPENAME::VALUE: return uint8_t(serialization::TYPENAME::VALUE);
Translate from the AST function representation enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(
swift::FunctionType::Representation cc) {
switch (cc) {
SIMPLE_CASE(<API key>, Swift)
SIMPLE_CASE(<API key>, Block)
SIMPLE_CASE(<API key>, Thin)
SIMPLE_CASE(<API key>, CFunctionPointer)
}
llvm_unreachable("bad calling convention");
}
Translate from the AST function representation enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(
swift::SILFunctionType::Representation cc) {
switch (cc) {
SIMPLE_CASE(<API key>, Thick)
SIMPLE_CASE(<API key>, Block)
SIMPLE_CASE(<API key>, Thin)
SIMPLE_CASE(<API key>, CFunctionPointer)
SIMPLE_CASE(<API key>, Method)
SIMPLE_CASE(<API key>, ObjCMethod)
SIMPLE_CASE(<API key>, WitnessMethod)
SIMPLE_CASE(<API key>, Closure)
}
llvm_unreachable("bad calling convention");
}
Translate from the AST coroutine-kind enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(
swift::SILCoroutineKind kind) {
switch (kind) {
SIMPLE_CASE(SILCoroutineKind, None)
SIMPLE_CASE(SILCoroutineKind, YieldOnce)
SIMPLE_CASE(SILCoroutineKind, YieldMany)
}
llvm_unreachable("bad kind");
}
Translate from the AST ownership enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t
<API key>(swift::ReferenceOwnership ownership) {
switch (ownership) {
SIMPLE_CASE(ReferenceOwnership, Strong)
#define REF_STORAGE(Name, ...) \
SIMPLE_CASE(ReferenceOwnership, Name)
#include "swift/AST/ReferenceStorage.def"
}
llvm_unreachable("bad ownership kind");
}
Translate from the AST ownership enum to the Serialization enum
values, which are guaranteed to be stable.
static uint8_t <API key>(swift::ValueOwnership ownership) {
switch (ownership) {
SIMPLE_CASE(ValueOwnership, Default)
SIMPLE_CASE(ValueOwnership, InOut)
SIMPLE_CASE(ValueOwnership, Shared)
SIMPLE_CASE(ValueOwnership, Owned)
}
llvm_unreachable("bad ownership kind");
}
Translate from the AST ParameterConvention enum to the
Serialization enum values, which are guaranteed to be stable.
static uint8_t <API key>(swift::ParameterConvention pc) {
switch (pc) {
SIMPLE_CASE(ParameterConvention, Indirect_In)
SIMPLE_CASE(ParameterConvention, <API key>)
SIMPLE_CASE(ParameterConvention, <API key>)
SIMPLE_CASE(ParameterConvention, Indirect_Inout)
SIMPLE_CASE(ParameterConvention, <API key>)
SIMPLE_CASE(ParameterConvention, Direct_Owned)
SIMPLE_CASE(ParameterConvention, Direct_Unowned)
SIMPLE_CASE(ParameterConvention, Direct_Guaranteed)
}
llvm_unreachable("bad parameter convention kind");
}
Translate from the AST ResultConvention enum to the
Serialization enum values, which are guaranteed to be stable.
static uint8_t <API key>(swift::ResultConvention rc) {
switch (rc) {
SIMPLE_CASE(ResultConvention, Indirect)
SIMPLE_CASE(ResultConvention, Owned)
SIMPLE_CASE(ResultConvention, Unowned)
SIMPLE_CASE(ResultConvention, UnownedInnerPointer)
SIMPLE_CASE(ResultConvention, Autoreleased)
}
llvm_unreachable("bad result convention kind");
}
#undef SIMPLE_CASE
Find the typealias given a builtin type.
static TypeAliasDecl *<API key>(ASTContext &Ctx, Type T) {
Get the type name by chopping off "Builtin.".
llvm::SmallString<32> FullName;
llvm::raw_svector_ostream OS(FullName);
T->print(OS);
assert(FullName.startswith(<API key>));
StringRef TypeName = FullName.substr(8);
SmallVector<ValueDecl*, 4> CurModuleResults;
Ctx.TheBuiltinModule->lookupValue(Ctx.getIdentifier(TypeName),
NLKind::QualifiedLookup,
CurModuleResults);
assert(CurModuleResults.size() == 1);
return cast<TypeAliasDecl>(CurModuleResults[0]);
}
class Serializer::TypeSerializer : public TypeVisitor<TypeSerializer> {
Serializer &S;
public:
explicit TypeSerializer(Serializer &S) : S(S) {}
If this gets referenced, we forgot to handle a type.
void visitType(const TypeBase *) = delete;
void visitErrorType(const ErrorType *) {
llvm_unreachable("should not serialize an invalid type");
}
void visitUnresolvedType(const UnresolvedType *) {
llvm_unreachable("should not serialize an invalid type");
}
void visitModuleType(const ModuleType *) {
llvm_unreachable("modules are currently not first-class values");
}
void visitInOutType(const InOutType *) {
llvm_unreachable("inout types are only used in function type parameters");
}
void visitLValueType(const LValueType *) {
llvm_unreachable("lvalue types are only used in function bodies");
}
void <API key>(const TypeVariableType *) {
llvm_unreachable("type variables should not escape the type checker");
}
void <API key>(Type ty) {
using namespace decls_block;
TypeAliasDecl *typeAlias =
<API key>(S.M->getASTContext(), ty);
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(typeAlias,
/*allowTypeAliasXRef*/true),
TypeID());
}
void visitBuiltinType(BuiltinType *ty) {
<API key>(ty);
}
void visitSILTokenType(SILTokenType *ty) {
// This is serialized like a BuiltinType, even though it isn't one.
<API key>(ty);
}
void visitTypeAliasType(const TypeAliasType *alias) {
using namespace decls_block;
const TypeAliasDecl *typeAlias = alias->getDecl();
auto underlyingType = typeAlias->getUnderlyingType();
unsigned abbrCode = S.DeclTypeAbbrCodes[TypeAliasTypeLayout::Code];
TypeAliasTypeLayout::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(typeAlias, /*allowTypeAliasXRef*/true),
S.addTypeRef(alias->getParent()),
S.addTypeRef(underlyingType),
S.addTypeRef(alias-><API key>()),
S.<API key>(alias->getSubstitutionMap()));
}
template <typename Layout>
void <API key>(Type wrappedTy) {
unsigned abbrCode = S.DeclTypeAbbrCodes[Layout::Code];
Layout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(wrappedTy));
}
void visitParenType(const ParenType *parenTy) {
using namespace decls_block;
assert(parenTy->getParameterFlags().isNone());
<API key><ParenTypeLayout>(parenTy->getUnderlyingType());
}
void visitTupleType(const TupleType *tupleTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[TupleTypeLayout::Code];
TupleTypeLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode);
abbrCode = S.DeclTypeAbbrCodes[TupleTypeEltLayout::Code];
for (auto &elt : tupleTy->getElements()) {
assert(elt.getParameterFlags().isNone());
TupleTypeEltLayout::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(elt.getName()),
S.addTypeRef(elt.getType()));
}
}
void visitNominalType(const NominalType *nominalTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[NominalTypeLayout::Code];
NominalTypeLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(nominalTy->getDecl()),
S.addTypeRef(nominalTy->getParent()));
}
template <typename Layout>
void visitMetatypeImpl(const AnyMetatypeType *metatypeTy) {
unsigned abbrCode = S.DeclTypeAbbrCodes[Layout::Code];
// Map the metatype representation.
auto repr = <API key>(metatypeTy);
Layout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(metatypeTy->getInstanceType()),
static_cast<uint8_t>(repr));
}
void <API key>(const <API key> *metatypeTy) {
using namespace decls_block;
visitMetatypeImpl<<API key>>(metatypeTy);
}
void visitMetatypeType(const MetatypeType *metatypeTy) {
using namespace decls_block;
visitMetatypeImpl<MetatypeTypeLayout>(metatypeTy);
}
void <API key>(const DynamicSelfType *dynamicSelfTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(dynamicSelfTy->getSelfType()));
}
void <API key>(const <API key> *archetypeTy) {
using namespace decls_block;
auto sig = archetypeTy-><API key>()->getGenericSignature();
GenericSignatureID sigID = S.<API key>(sig);
auto interfaceType = archetypeTy->getInterfaceType()
->castTo<<API key>>();
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
sigID,
interfaceType->getDepth(),
interfaceType->getIndex());
}
void <API key>(const OpenedArchetypeType *archetypeTy) {
using namespace decls_block;
<API key><<API key>>(
archetypeTy-><API key>());
}
void
<API key>(const <API key> *archetypeTy) {
using namespace decls_block;
auto declID = S.addDeclRef(archetypeTy->getDecl());
auto substMapID = S.<API key>(archetypeTy->getSubstitutions());
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
declID, substMapID);
}
void <API key>(const NestedArchetypeType *archetypeTy) {
using namespace decls_block;
auto rootTypeID = S.addTypeRef(archetypeTy->getRoot());
auto interfaceTypeID = S.addTypeRef(archetypeTy->getInterfaceType());
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
rootTypeID, interfaceTypeID);
}
void <API key>(const <API key> *genericParam) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
DeclID declIDOrDepth;
unsigned indexPlusOne;
if (genericParam->getDecl() &&
!(genericParam->getDecl()->getDeclContext()-><API key>() &&
S.isDeclXRef(genericParam->getDecl()))) {
declIDOrDepth = S.addDeclRef(genericParam->getDecl());
indexPlusOne = 0;
} else {
declIDOrDepth = genericParam->getDepth();
indexPlusOne = genericParam->getIndex() + 1;
}
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
declIDOrDepth, indexPlusOne);
}
void <API key>(const DependentMemberType *dependent) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
assert(dependent->getAssocType() && "Unchecked dependent member type");
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(dependent->getBase()),
S.addDeclRef(dependent->getAssocType()));
}
void <API key>(const AnyFunctionType *fnTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[FunctionParamLayout::Code];
for (auto ¶m : fnTy->getParams()) {
auto paramFlags = param.getParameterFlags();
auto rawOwnership =
<API key>(paramFlags.getValueOwnership());
FunctionParamLayout::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclBaseNameRef(param.getLabel()),
S.addTypeRef(param.getPlainType()), paramFlags.isVariadic(),
paramFlags.isAutoClosure(), rawOwnership);
}
}
void visitFunctionType(const FunctionType *fnTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[FunctionTypeLayout::Code];
FunctionTypeLayout::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(fnTy->getResult()),
<API key>(fnTy->getRepresentation()),
fnTy->isNoEscape(),
fnTy->throws());
<API key>(fnTy);
}
void <API key>(const GenericFunctionType *fnTy) {
using namespace decls_block;
assert(!fnTy->isNoEscape());
auto genericSig = fnTy->getGenericSignature();
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(fnTy->getResult()),
<API key>(fnTy->getRepresentation()),
fnTy->throws(),
S.<API key>(genericSig));
<API key>(fnTy);
}
void <API key>(const SILBlockStorageType *storageTy) {
using namespace decls_block;
<API key><<API key>>(
storageTy->getCaptureType());
}
void visitSILBoxType(const SILBoxType *boxTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[SILBoxTypeLayout::Code];
SILLayoutID layoutRef = S.addSILLayoutRef(boxTy->getLayout());
SILBoxTypeLayout::emitRecord(
S.Out, S.ScratchRecord, abbrCode, layoutRef,
S.<API key>(boxTy->getSubstitutions()));
}
void <API key>(const SILFunctionType *fnTy) {
using namespace decls_block;
auto representation = fnTy->getRepresentation();
auto <API key> =
<API key>(representation);
SmallVector<TypeID, 8> variableData;
for (auto param : fnTy->getParameters()) {
variableData.push_back(S.addTypeRef(param.getType()));
unsigned conv = <API key>(param.getConvention());
variableData.push_back(TypeID(conv));
}
for (auto yield : fnTy->getYields()) {
variableData.push_back(S.addTypeRef(yield.getType()));
unsigned conv = <API key>(yield.getConvention());
variableData.push_back(TypeID(conv));
}
for (auto result : fnTy->getResults()) {
variableData.push_back(S.addTypeRef(result.getType()));
unsigned conv = <API key>(result.getConvention());
variableData.push_back(TypeID(conv));
}
if (fnTy->hasErrorResult()) {
auto abResult = fnTy->getErrorResult();
variableData.push_back(S.addTypeRef(abResult.getType()));
unsigned conv = <API key>(abResult.getConvention());
variableData.push_back(TypeID(conv));
}
auto sig = fnTy->getGenericSignature();
auto stableCoroutineKind =
<API key>(fnTy->getCoroutineKind());
auto <API key> =
<API key>(fnTy->getCalleeConvention());
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
stableCoroutineKind, <API key>,
<API key>, fnTy->isPseudogeneric(), fnTy->isNoEscape(),
fnTy->hasErrorResult(), fnTy->getParameters().size(),
fnTy->getNumYields(), fnTy->getNumResults(),
S.<API key>(sig), variableData);
if (auto conformance = fnTy-><API key>())
S.writeConformance(*conformance, S.DeclTypeAbbrCodes);
}
void visitArraySliceType(const ArraySliceType *sliceTy) {
using namespace decls_block;
<API key><<API key>>(sliceTy->getBaseType());
}
void visitDictionaryType(const DictionaryType *dictTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addTypeRef(dictTy->getKeyType()),
S.addTypeRef(dictTy->getValueType()));
}
void visitOptionalType(const OptionalType *optionalTy) {
using namespace decls_block;
<API key><OptionalTypeLayout>(optionalTy->getBaseType());
}
void
<API key>(const <API key> *composition) {
using namespace decls_block;
SmallVector<TypeID, 4> protocols;
for (auto proto : composition->getMembers())
protocols.push_back(S.addTypeRef(proto));
unsigned abbrCode =
S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
composition-><API key>(),
protocols);
}
void <API key>(const <API key> *refTy) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
auto stableOwnership =
<API key>(refTy->getOwnership());
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
stableOwnership,
S.addTypeRef(refTy->getReferentType()));
}
void <API key>(const UnboundGenericType *generic) {
using namespace decls_block;
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(
S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(generic->getDecl(), /*allowTypeAliasXRef*/true),
S.addTypeRef(generic->getParent()));
}
void <API key>(const BoundGenericType *generic) {
using namespace decls_block;
SmallVector<TypeID, 8> genericArgIDs;
for (auto next : generic->getGenericArgs())
genericArgIDs.push_back(S.addTypeRef(next));
unsigned abbrCode = S.DeclTypeAbbrCodes[<API key>::Code];
<API key>::emitRecord(S.Out, S.ScratchRecord, abbrCode,
S.addDeclRef(generic->getDecl()),
S.addTypeRef(generic->getParent()),
genericArgIDs);
}
};
void Serializer::writeASTBlockEntity(Type ty) {
using namespace decls_block;
<API key> traceRAII(ty->getASTContext(), "serializing", ty);
assert(TypesToSerialize.hasRef(ty));
BitOffset initialOffset = Out.GetCurrentBitNo();
SWIFT_DEFER {
// This is important enough to leave on in Release builds.
if (initialOffset == Out.GetCurrentBitNo()) {
llvm::<API key> message("failed to serialize anything");
abort();
}
};
TypeSerializer(*this).visit(ty);
}
template <typename <API key>>
bool Serializer::<API key>(
<API key> &entities) {
if (!entities.hasMoreToSerialize())
return false;
while (auto next = entities.popNext(Out.GetCurrentBitNo()))
writeASTBlockEntity(next.getValue());
return true;
}
void Serializer::<API key>() {
BCBlockRAII restoreBlock(Out, <API key>, 8);
using namespace decls_block;
<API key><<API key>>();
<API key><TypeAliasTypeLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><NominalTypeLayout>();
<API key><ParenTypeLayout>();
<API key><TupleTypeLayout>();
<API key><TupleTypeEltLayout>();
<API key><FunctionTypeLayout>();
<API key><FunctionParamLayout>();
<API key><MetatypeTypeLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><SILBoxTypeLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><OptionalTypeLayout>();
<API key><<API key>>();
<API key><TypeAliasLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><StructLayout>();
<API key><ConstructorLayout>();
<API key><VarLayout>();
<API key><ParamLayout>();
<API key><FuncLayout>();
<API key><AccessorLayout>();
<API key><OpaqueTypeLayout>();
<API key><<API key>>();
<API key><ProtocolLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><InfixOperatorLayout>();
<API key><<API key>>();
<API key><ClassLayout>();
<API key><EnumLayout>();
<API key><EnumElementLayout>();
<API key><SubscriptLayout>();
<API key><ExtensionLayout>();
<API key><DestructorLayout>();
<API key><ParameterListLayout>();
<API key><ParenPatternLayout>();
<API key><TuplePatternLayout>();
<API key><<API key>>();
<API key><NamedPatternLayout>();
<API key><VarPatternLayout>();
<API key><AnyPatternLayout>();
<API key><TypedPatternLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><SILLayoutLayout>();
<API key><<API key>>();
<API key><<API key>>();
<API key><<API key>>();
<API key><MembersLayout>();
<API key><XRefLayout>();
#define DECL_ATTR(X, NAME, ...) \
<API key><NAME##DeclAttrLayout>();
#include "swift/AST/Attr.def"
bool wroteSomething;
do {
// Each of these loops can trigger the others to execute again, so repeat
// until /all/ of the pending lists are empty.
wroteSomething = false;
wroteSomething |= <API key>(DeclsToSerialize);
wroteSomething |= <API key>(TypesToSerialize);
wroteSomething |=
<API key>(<API key>);
wroteSomething |=
<API key>(<API key>);
wroteSomething |=
<API key>(<API key>);
wroteSomething |=
<API key>(<API key>);
wroteSomething |= <API key>(<API key>);
} while (wroteSomething);
}
std::vector<CharOffset> Serializer::writeAllIdentifiers() {
assert(!DeclsToSerialize.hasMoreToSerialize() &&
"did not call Serializer::<API key>?");
BCBlockRAII restoreBlock(Out, <API key>, 3);
identifier_block::<API key> IdentifierData(Out);
llvm::SmallString<4096> stringData;
// Make sure no identifier has an offset of 0.
stringData.push_back('\0');
std::vector<CharOffset> identifierOffsets;
for (StringRef str : StringsToWrite) {
identifierOffsets.push_back(stringData.size());
stringData.append(str);
stringData.push_back('\0');
}
IdentifierData.emit(ScratchRecord, stringData.str());
return identifierOffsets;
}
template <typename <API key>>
void Serializer::writeOffsets(const index_block::OffsetsLayout &Offsets,
const <API key> &entities) {
Offsets.emit(ScratchRecord, <API key>::RecordCode,
entities.getOffsets());
}
Writes an in-memory decl table to an on-disk representation, using the
given layout.
static void writeDeclTable(const index_block::DeclListLayout &DeclList,
index_block::RecordKind kind,
const Serializer::DeclTable &table) {
if (table.empty())
return;
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::<API key><DeclTableInfo> generator;
for (auto &entry : table)
generator.insert(entry.first, entry.second);
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
DeclList.emit(scratch, kind, tableOffset, hashTableBlob);
}
static void
writeExtensionTable(const index_block::<API key> &ExtensionTable,
const Serializer::ExtensionTable &table,
Serializer &serializer) {
if (table.empty())
return;
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::<API key><ExtensionTableInfo> generator;
ExtensionTableInfo info{serializer};
for (auto &entry : table) {
generator.insert(entry.first, entry.second, info);
}
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream, info);
}
ExtensionTable.emit(scratch, tableOffset, hashTableBlob);
}
static void writeLocalDeclTable(const index_block::DeclListLayout &DeclList,
index_block::RecordKind kind,
<API key> &generator) {
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
DeclList.emit(scratch, kind, tableOffset, hashTableBlob);
}
static void
<API key>(const index_block::<API key> &declList,
const Serializer::<API key> &table) {
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::<API key><<API key>> generator;
for (auto &entry : table)
generator.insert(entry.first, entry.second);
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
declList.emit(scratch, tableOffset, hashTableBlob);
}
static void
<API key>(const index_block::<API key> &declNames,
const Serializer::<API key> &table) {
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::<API key><<API key>> generator;
// Emit the offsets of the sub-tables; the tables themselves have been
// separately emitted into <API key> by now.
for (auto &entry : table) {
// Or they _should_ have been; check for nonzero offsets.
assert(static_cast<unsigned>(entry.second.first) != 0);
generator.insert(entry.first, entry.second.first);
}
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
declNames.emit(scratch, tableOffset, hashTableBlob);
}
static void
<API key>(const <API key>::DeclMembersLayout &mems,
const Serializer::DeclMembersTable &table) {
SmallVector<uint64_t, 8> scratch;
llvm::SmallString<4096> hashTableBlob;
uint32_t tableOffset;
{
llvm::<API key><<API key>> generator;
for (auto &entry : table)
generator.insert(entry.first, entry.second);
llvm::raw_svector_ostream blobStream(hashTableBlob);
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
mems.emit(scratch, tableOffset, hashTableBlob);
}
namespace {
Used to serialize the on-disk Objective-C method hash table.
class ObjCMethodTableInfo {
public:
using key_type = ObjCSelector;
using key_type_ref = key_type;
using data_type = Serializer::ObjCMethodTableData;
using data_type_ref = const data_type &;
using hash_value_type = uint32_t;
using offset_type = unsigned;
hash_value_type ComputeHash(key_type_ref key) {
llvm::SmallString<32> scratch;
return llvm::djbHash(key.getString(scratch), <API key>);
}
std::pair<unsigned, unsigned> EmitKeyDataLength(raw_ostream &out,
key_type_ref key,
data_type_ref data) {
llvm::SmallString<32> scratch;
auto keyLength = key.getString(scratch).size();
assert(keyLength <= std::numeric_limits<uint16_t>::max() &&
"selector too long");
uint32_t dataLength = 0;
for (const auto &entry : data) {
dataLength += sizeof(uint32_t) + 1 + sizeof(uint32_t);
dataLength += std::get<0>(entry).size();
}
endian::Writer writer(out, little);
writer.write<uint16_t>(keyLength);
writer.write<uint32_t>(dataLength);
return { keyLength, dataLength };
}
void EmitKey(raw_ostream &out, key_type_ref key, unsigned len) {
#ifndef NDEBUG
uint64_t start = out.tell();
#endif
out << key;
assert((out.tell() - start == len) && "measured key length incorrectly");
}
void EmitData(raw_ostream &out, key_type_ref key, data_type_ref data,
unsigned len) {
static_assert(declIDFitsIn32Bits(), "DeclID too large");
endian::Writer writer(out, little);
for (const auto &entry : data) {
writer.write<uint32_t>(std::get<0>(entry).size());
writer.write<uint8_t>(std::get<1>(entry));
writer.write<uint32_t>(std::get<2>(entry));
out.write(std::get<0>(entry).c_str(), std::get<0>(entry).size());
}
}
};
} // end anonymous namespace
static void <API key>(const index_block::<API key> &out,
Serializer::ObjCMethodTable &objcMethods) {
// Collect all of the Objective-C selectors in the method table.
std::vector<ObjCSelector> selectors;
for (const auto &entry : objcMethods) {
selectors.push_back(entry.first);
}
// Sort the Objective-C selectors so we emit them in a stable order.
llvm::array_pod_sort(selectors.begin(), selectors.end());
// Create the on-disk hash table.
llvm::<API key><ObjCMethodTableInfo> generator;
llvm::SmallString<32> hashTableBlob;
uint32_t tableOffset;
{
llvm::raw_svector_ostream blobStream(hashTableBlob);
for (auto selector : selectors) {
generator.insert(selector, objcMethods[selector]);
}
// Make sure that no bucket is at offset 0
endian::write<uint32_t>(blobStream, 0, little);
tableOffset = generator.Emit(blobStream);
}
SmallVector<uint64_t, 8> scratch;
out.emit(scratch, tableOffset, hashTableBlob);
}
Recursively walks the members and derived global decls of any nominal types
to build up global tables.
template<typename Range>
static void <API key>(
Serializer &S,
Range members,
Serializer::DeclTable &operatorMethodDecls,
Serializer::ObjCMethodTable &objcMethods,
Serializer::<API key> &nestedTypeDecls,
bool isLocal = false) {
const NominalTypeDecl *nominalParent = nullptr;
for (const Decl *member : members) {
// If there is a corresponding Objective-C method, record it.
auto recordObjCMethod = [&](const <API key> *func) {
if (isLocal)
return;
if (auto owningClass = func->getDeclContext()->getSelfClassDecl()) {
if (func->isObjC()) {
Mangle::ASTMangler mangler;
std::string ownerName = mangler.mangleNominalType(owningClass);
assert(!ownerName.empty() && "Mangled type came back empty!");
objcMethods[func->getObjCSelector()].push_back(
std::make_tuple(ownerName,
func-><API key>(),
S.addDeclRef(func)));
}
}
};
if (auto memberValue = dyn_cast<ValueDecl>(member)) {
if (memberValue->hasName() &&
memberValue->isOperator()) {
// Add operator methods.
// Note that we don't have to add operators that are already in the
// top-level list.
operatorMethodDecls[memberValue->getBaseName()].push_back({
/*ignored*/0,
S.addDeclRef(memberValue)
});
}
}
// Record Objective-C methods.
if (auto *func = dyn_cast<<API key>>(member))
recordObjCMethod(func);
// Handle accessors.
if (auto storage = dyn_cast<AbstractStorageDecl>(member)) {
for (auto *accessor : storage->getAllAccessors()) {
recordObjCMethod(accessor);
}
}
if (auto nestedType = dyn_cast<TypeDecl>(member)) {
if (nestedType->getEffectiveAccess() > swift::AccessLevel::FilePrivate) {
if (!nominalParent) {
const DeclContext *DC = member->getDeclContext();
nominalParent = DC-><API key>();
assert(nominalParent && "parent context is not a type or extension");
}
nestedTypeDecls[nestedType->getName()].push_back({
S.addDeclRef(nominalParent),
S.addDeclRef(nestedType)
});
}
}
// Recurse into nested declarations.
if (auto iterable = dyn_cast<IterableDeclContext>(member)) {
<API key>(S, iterable->getMembers(),
operatorMethodDecls,
objcMethods, nestedTypeDecls,
isLocal);
}
}
}
void Serializer::writeAST(ModuleOrSourceFile DC,
bool <API key>) {
DeclTable topLevelDecls, operatorDecls, operatorMethodDecls;
DeclTable <API key>;
ObjCMethodTable objcMethods;
<API key> nestedTypeDecls;
<API key> localTypeGenerator, <API key>;
ExtensionTable extensionDecls;
bool hasLocalTypes = false;
bool <API key> = false;
Optional<DeclID> entryPointClassID;
SmallVector<DeclID, 16> <API key>;
ArrayRef<const FileUnit *> files;
SmallVector<const FileUnit *, 1> Scratch;
if (SF) {
Scratch.push_back(SF);
files = llvm::makeArrayRef(Scratch);
} else {
files = M->getFiles();
}
for (auto nextFile : files) {
if (nextFile->hasEntryPoint())
entryPointClassID = addDeclRef(nextFile->getMainClass());
// FIXME: Switch to a visitor interface?
SmallVector<Decl *, 32> fileDecls;
nextFile->getTopLevelDecls(fileDecls);
for (auto D : fileDecls) {
if (isa<ImportDecl>(D) || isa<IfConfigDecl>(D) ||
isa<PoundDiagnosticDecl>(D) || isa<TopLevelCodeDecl>(D)) {
continue;
}
if (auto VD = dyn_cast<ValueDecl>(D)) {
if (!VD->hasName())
continue;
topLevelDecls[VD->getBaseName()]
.push_back({ getKindForTable(D), addDeclRef(D) });
} else if (auto ED = dyn_cast<ExtensionDecl>(D)) {
const NominalTypeDecl *extendedNominal = ED->getExtendedNominal();
extensionDecls[extendedNominal->getName()]
.push_back({ extendedNominal, addDeclRef(D) });
} else if (auto OD = dyn_cast<OperatorDecl>(D)) {
operatorDecls[OD->getName()]
.push_back({ getStableFixity(OD->getKind()), addDeclRef(D) });
} else if (auto PGD = dyn_cast<PrecedenceGroupDecl>(D)) {
<API key>[PGD->getName()]
.push_back({ decls_block::<API key>, addDeclRef(D) });
} else if (isa<PatternBindingDecl>(D)) {
// No special handling needed.
} else {
llvm_unreachable("all top-level declaration kinds accounted for");
}
<API key>.push_back(addDeclRef(D));
// If this nominal type has associated top-level decls for a
// derived conformance (for example, ==), force them to be
// serialized.
if (auto IDC = dyn_cast<IterableDeclContext>(D)) {
<API key>(*this, IDC->getMembers(),
operatorMethodDecls, objcMethods,
nestedTypeDecls);
}
}
SmallVector<TypeDecl *, 16> localTypeDecls;
nextFile->getLocalTypeDecls(localTypeDecls);
SmallVector<OpaqueTypeDecl *, 16> <API key>;
nextFile-><API key>(<API key>);
for (auto TD : localTypeDecls) {
// FIXME: We should delay parsing function bodies so these type decls
// don't even get added to the file.
if (TD->getDeclContext()-><API key>())
continue;
hasLocalTypes = true;
Mangle::ASTMangler Mangler;
std::string MangledName =
evaluateOrDefault(M->getASTContext().evaluator,
<API key> { TD },
std::string());
assert(!MangledName.empty() && "Mangled type came back empty!");
localTypeGenerator.insert(MangledName, addDeclRef(TD));
if (auto IDC = dyn_cast<IterableDeclContext>(TD)) {
<API key>(*this, IDC->getMembers(),
operatorMethodDecls, objcMethods,
nestedTypeDecls, /*isLocal=*/true);
}
}
for (auto OTD : <API key>) {
<API key> = true;
Mangle::ASTMangler Mangler;
auto MangledName = Mangler.mangleDeclAsUSR(OTD->getNamingDecl(),
MANGLING_PREFIX_STR);
<API key>.insert(MangledName, addDeclRef(OTD));
}
}
<API key>();
std::vector<CharOffset> identifierOffsets = writeAllIdentifiers();
{
BCBlockRAII restoreBlock(Out, INDEX_BLOCK_ID, 4);
index_block::OffsetsLayout Offsets(Out);
writeOffsets(Offsets, DeclsToSerialize);
writeOffsets(Offsets, TypesToSerialize);
writeOffsets(Offsets, <API key>);
writeOffsets(Offsets, <API key>);
writeOffsets(Offsets, <API key>);
writeOffsets(Offsets, <API key>);
writeOffsets(Offsets, <API key>);
Offsets.emit(ScratchRecord, index_block::IDENTIFIER_OFFSETS,
identifierOffsets);
index_block::DeclListLayout DeclList(Out);
writeDeclTable(DeclList, index_block::TOP_LEVEL_DECLS, topLevelDecls);
writeDeclTable(DeclList, index_block::OPERATORS, operatorDecls);
writeDeclTable(DeclList, index_block::PRECEDENCE_GROUPS, <API key>);
writeDeclTable(DeclList, index_block::<API key>,
<API key>);
writeDeclTable(DeclList, index_block::OPERATOR_METHODS, operatorMethodDecls);
if (hasLocalTypes)
writeLocalDeclTable(DeclList, index_block::LOCAL_TYPE_DECLS,
localTypeGenerator);
if (<API key>)
writeLocalDeclTable(DeclList, index_block::<API key>,
<API key>);
if (!extensionDecls.empty()) {
index_block::<API key> ExtensionTable(Out);
writeExtensionTable(ExtensionTable, extensionDecls, *this);
}
index_block::OrderedDeclsLayout OrderedDecls(Out);
OrderedDecls.emit(ScratchRecord, index_block::<API key>,
<API key>);
index_block::<API key> ObjCMethodTable(Out);
<API key>(ObjCMethodTable, objcMethods);
if (<API key> &&
!nestedTypeDecls.empty()) {
index_block::<API key> <API key>(Out);
<API key>(<API key>, nestedTypeDecls);
}
if (entryPointClassID.hasValue()) {
index_block::EntryPointLayout EntryPoint(Out);
EntryPoint.emit(ScratchRecord, entryPointClassID.getValue());
}
{
// Write sub-tables to a skippable sub-block.
BCBlockRAII restoreBlock(Out, <API key>, 4);
<API key>::DeclMembersLayout DeclMembersTable(Out);
for (auto &entry : DeclMemberNames) {
// Save BitOffset we're writing sub-table to.
static_assert(<API key>(), "BitOffset too large");
assert(Out.GetCurrentBitNo() < (1ull << 32));
entry.second.first = Out.GetCurrentBitNo();
// Write sub-table.
<API key>(DeclMembersTable, *entry.second.second);
}
}
// Write top-level table mapping names to sub-tables.
index_block::<API key> <API key>(Out);
<API key>(<API key>, DeclMemberNames);
}
}
void SerializerBase::writeToStream(raw_ostream &os) {
os.write(Buffer.data(), Buffer.size());
os.flush();
}
SerializerBase::SerializerBase(ArrayRef<unsigned char> signature,
ModuleOrSourceFile DC) {
for (unsigned char byte : signature)
Out.Emit(byte, 8);
this->M = getModule(DC);
this->SF = DC.dyn_cast<SourceFile *>();
}
void Serializer::writeToStream(raw_ostream &os, ModuleOrSourceFile DC,
const SILModule *SILMod,
const <API key> &options) {
Serializer S{<API key>, DC};
// FIXME: This is only really needed for debugging. We don't actually use it.
S.writeBlockInfoBlock();
{
BCBlockRAII moduleBlock(S.Out, MODULE_BLOCK_ID, 2);
S.writeHeader(options);
S.writeInputBlock(options);
S.writeSIL(SILMod, options.SerializeAllSIL);
S.writeAST(DC, options.<API key>);
}
S.writeToStream(os);
}
void swift::serializeToBuffers(
ModuleOrSourceFile DC, const <API key> &options,
std::unique_ptr<llvm::MemoryBuffer> *moduleBuffer,
std::unique_ptr<llvm::MemoryBuffer> *moduleDocBuffer,
std::unique_ptr<llvm::MemoryBuffer> *<API key>,
const SILModule *M) {
assert(!StringRef::withNullAsEmpty(options.OutputPath).empty());
{
SharedTimer timer("Serialization, swiftmodule, to buffer");
llvm::SmallString<1024> buf;
llvm::raw_svector_ostream stream(buf);
Serializer::writeToStream(stream, DC, M, options);
bool hadError = withOutputFile(getContext(DC).Diags,
options.OutputPath,
[&](raw_ostream &out) {
out << stream.str();
return false;
});
if (hadError)
return;
if (moduleBuffer)
*moduleBuffer = llvm::make_unique<llvm::<API key>>(
std::move(buf), options.OutputPath);
}
if (!StringRef::withNullAsEmpty(options.DocOutputPath).empty()) {
SharedTimer timer("Serialization, swiftdoc, to buffer");
llvm::SmallString<1024> buf;
llvm::raw_svector_ostream stream(buf);
writeDocToStream(stream, DC, options.GroupInfoPath);
(void)withOutputFile(getContext(DC).Diags,
options.DocOutputPath,
[&](raw_ostream &out) {
out << stream.str();
return false;
});
if (moduleDocBuffer)
*moduleDocBuffer = llvm::make_unique<llvm::<API key>>(
std::move(buf), options.DocOutputPath);
}
if (!StringRef::withNullAsEmpty(options.<API key>).empty()) {
SharedTimer timer("Serialization, swiftsourceinfo, to buffer");
llvm::SmallString<1024> buf;
llvm::raw_svector_ostream stream(buf);
<API key>(stream, DC);
(void)withOutputFile(getContext(DC).Diags,
options.<API key>,
[&](raw_ostream &out) {
out << stream.str();
return false;
});
if (<API key>)
*<API key> = llvm::make_unique<llvm::<API key>>(
std::move(buf), options.<API key>);
}
}
void swift::serialize(ModuleOrSourceFile DC,
const <API key> &options,
const SILModule *M) {
assert(!StringRef::withNullAsEmpty(options.OutputPath).empty());
if (StringRef(options.OutputPath) == "-") {
// Special-case writing to stdout.
Serializer::writeToStream(llvm::outs(), DC, M, options);
assert(StringRef::withNullAsEmpty(options.DocOutputPath).empty());
return;
}
bool hadError = withOutputFile(getContext(DC).Diags,
options.OutputPath,
[&](raw_ostream &out) {
SharedTimer timer("Serialization, swiftmodule");
Serializer::writeToStream(out, DC, M, options);
return false;
});
if (hadError)
return;
if (!StringRef::withNullAsEmpty(options.DocOutputPath).empty()) {
(void)withOutputFile(getContext(DC).Diags,
options.DocOutputPath,
[&](raw_ostream &out) {
SharedTimer timer("Serialization, swiftdoc");
writeDocToStream(out, DC, options.GroupInfoPath);
return false;
});
}
if (!StringRef::withNullAsEmpty(options.<API key>).empty()) {
(void)withOutputFile(getContext(DC).Diags,
options.<API key>,
[&](raw_ostream &out) {
SharedTimer timer("Serialization, swiftsourceinfo");
<API key>(out, DC);
return false;
});
}
} |
using NDatabase.Api;
using NDatabase.Exceptions;
namespace NDatabase.Meta
{
<summary>
Some Storage engine constants about offset position for object writing/reading.
</summary>
internal static class <API key>
{
internal const int NbIdsPerBlock = 1000;
internal const int <API key> = 18;
<summary>
Default max number of write object actions per transaction - 10 000
</summary>
internal const int <API key> = 10000;
<summary>
header(34) + 1000 * 18
</summary>
internal const int IdBlockSize = 34 + NbIdsPerBlock * <API key>;
internal const long NullObjectIdId = 0;
internal const long <API key> = 0;
internal const long NullObjectPosition = 0;
internal const long ObjectIsNotInCache = -1;
internal const long ObjectDoesNotExist = -2;
<summary>
this occurs when a class has been refactored adding a field.
</summary>
<remarks>
this occurs when a class has been refactored adding a field. Old objects do not the new field
</remarks>
internal const long FieldDoesNotExist = -1;
private const int Version30 = 30;
internal const int <API key> = Version30;
private const long <API key> = 0;
private const long <API key> = 0;
<summary>
pull id type (byte),id(long),
</summary>
private const long <API key> = 0;
private const long <API key> = 0;
<summary>
Used to make an attribute reference a null object - setting its id to zero
</summary>
internal static readonly OID NullObjectId = null;
<summary>
File format version : 1 int (4 bytes)
</summary>
internal const int <API key> = 0;
<summary>
The Database ID : 4 Long (4*8 bytes)
</summary>
internal static readonly int <API key> = OdbType.Integer.Size;
<summary>
The last Transaction ID 2 long (2*4*8 bytes)
</summary>
internal static readonly int <API key> = <API key> +
4 * OdbType.Long.Size;
<summary>
The number of classes in the meta model 1 long (4*8 bytes)
</summary>
internal static readonly int <API key> = <API key> +
2 * OdbType.Long.Size;
<summary>
The first class OID : 1 Long (8 bytes)
</summary>
internal static readonly int <API key> = <API key> +
OdbType.Long.Size;
<summary>
The last ODB close status.
</summary>
<remarks>
The last ODB close status. Used to detect if the transaction is ok : 1 byte
</remarks>
internal static readonly int <API key> = <API key> +
OdbType.Long.Size;
internal static readonly int <API key> =
<API key> + OdbType.Byte.Size;
<summary>
The Database character encoding : 50 bytes
</summary>
internal static readonly int <API key> =
<API key> + 120 * OdbType.Byte.Size;
<summary>
The position of the current id block: 1 long
</summary>
internal static readonly int <API key> =
<API key> + 58 * OdbType.Byte.Size;
<summary>
First ID Block position
</summary>
internal static readonly int <API key> = <API key> +
OdbType.Long.Size;
internal static readonly int <API key> = <API key>;
internal static readonly int[] <API key> = new[]
{
<API key>, <API key>, <API key>,
<API key>, <API key>,
<API key>, <API key>, <API key>
};
private static readonly long <API key> = <API key> + OdbType.Integer.Size;
private static readonly long ClassOffsetCategory = <API key> + OdbType.Byte.Size;
private static readonly long ClassOffsetId = ClassOffsetCategory + OdbType.Byte.Size;
private static readonly long <API key> = ClassOffsetId + OdbType.Long.Size;
internal static readonly long <API key> = <API key> +
OdbType.Long.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
private static readonly long <API key> = <API key> + OdbType.Integer.Size;
private static readonly long <API key> = <API key> + OdbType.Byte.Size;
private static readonly long <API key> = <API key> + OdbType.Long.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
<summary>
<pre>ID Block Header :
Block size : 1 int
Block type : 1 byte
Block status : 1 byte
Prev block position : 1 long
Next block position : 1 long
Block number : 1 int
Max id : 1 long
Total size = 34</pre>
</summary>
internal static readonly long <API key> = OdbType.Integer.Size + OdbType.Byte.Size;
private static readonly long <API key> = <API key> + OdbType.Byte.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
internal static readonly long <API key> = <API key> + OdbType.Integer.Size;
internal static readonly long <API key> = <API key> + OdbType.Long.Size;
private static readonly long BlockIdRepetitionId = <API key> + OdbType.Byte.Size;
internal static readonly long <API key> = BlockIdRepetitionId + OdbType.Long.Size;
internal static readonly long <API key> = <API key> +
OdbType.Integer.Size;
// DATABASE HEADER
// END OF DATABASE HEADER
// CLASS OFFSETS
// OBJECT OFFSETS - update this section when modifying the odb file format
internal static void <API key>(int version)
{
var versionIsCompatible = version == <API key>;
if (!versionIsCompatible)
{
throw new OdbRuntimeException(
NDatabaseError.<API key>.AddParameter(version).AddParameter(
<API key>));
}
}
internal static long <API key>(OID oid)
{
long number;
var objectId = oid.ObjectId;
if (objectId % NbIdsPerBlock == 0)
number = objectId / NbIdsPerBlock;
else
number = objectId / NbIdsPerBlock + 1;
return number;
}
}
} |
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_16) on Sat Mar 06 22:12:20 IST 2010 -->
<TITLE>
FramesCodeVisitor.VersionException
</TITLE>
<META NAME="date" CONTENT="2010-03-06">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="FramesCodeVisitor.VersionException";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/FramesCodeVisitor.VersionException.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.html" title="class in org.deuce.transform.asm"><B>PREV CLASS</B></A>
NEXT CLASS</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/deuce/transform/asm/FramesCodeVisitor.VersionException.html" target="_top"><B>FRAMES</B></A>
<A HREF="FramesCodeVisitor.VersionException.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#<API key>.lang.Throwable">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | <A HREF="#constructor_detail">CONSTR</A> | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<H2>
<FONT SIZE="-1">
org.deuce.transform.asm</FONT>
<BR>
Class FramesCodeVisitor.VersionException</H2>
<PRE>
java.lang.Object
<IMG SRC="../../../../resources/inherit.gif" ALT="extended by ">java.lang.Throwable
<IMG SRC="../../../../resources/inherit.gif" ALT="extended by ">java.lang.Exception
<IMG SRC="../../../../resources/inherit.gif" ALT="extended by ">java.lang.RuntimeException
<IMG SRC="../../../../resources/inherit.gif" ALT="extended by "><B>org.deuce.transform.asm.FramesCodeVisitor.VersionException</B>
</PRE>
<DL>
<DT><B>All Implemented Interfaces:</B> <DD>java.io.Serializable</DD>
</DL>
<DL>
<DT><B>Enclosing class:</B><DD><A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.html" title="class in org.deuce.transform.asm">FramesCodeVisitor</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public static class <B>FramesCodeVisitor.VersionException</B><DT>extends java.lang.RuntimeException</DL>
</PRE>
<P>
<DL>
<DT><B>See Also:</B><DD><A HREF="../../../../serialized-form.html#org.deuce.transform.asm.FramesCodeVisitor.VersionException">Serialized Form</A></DL>
<HR>
<P>
<A NAME="field_summary"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Field Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.VersionException.html" title="class in org.deuce.transform.asm">FramesCodeVisitor.VersionException</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.VersionException.html#INSTANCE">INSTANCE</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="constructor_summary"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Constructor Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.VersionException.html#FramesCodeVisitor.VersionException()">FramesCodeVisitor.VersionException</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<A NAME="method_summary"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
</TABLE>
<A NAME="<API key>.lang.Throwable"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="<API key>">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Throwable</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>fillInStackTrace, getCause, getLocalizedMessage, getMessage, getStackTrace, initCause, printStackTrace, printStackTrace, printStackTrace, setStackTrace, toString</CODE></TD>
</TR>
</TABLE>
<A NAME="<API key>.lang.Object"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="<API key>">
<TH ALIGN="left"><B>Methods inherited from class java.lang.Object</B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</CODE></TD>
</TR>
</TABLE>
<P>
<A NAME="field_detail"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Field Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="INSTANCE"></A><H3>
INSTANCE</H3>
<PRE>
public static final <A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.VersionException.html" title="class in org.deuce.transform.asm">FramesCodeVisitor.VersionException</A> <B>INSTANCE</B></PRE>
<DL>
<DL>
</DL>
</DL>
<A NAME="constructor_detail"></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Constructor Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="FramesCodeVisitor.VersionException()"></A><H3>
FramesCodeVisitor.VersionException</H3>
<PRE>
public <B>FramesCodeVisitor.VersionException</B>()</PRE>
<DL>
</DL>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/FramesCodeVisitor.VersionException.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-files/index-1.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../org/deuce/transform/asm/FramesCodeVisitor.html" title="class in org.deuce.transform.asm"><B>PREV CLASS</B></A>
NEXT CLASS</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/deuce/transform/asm/FramesCodeVisitor.VersionException.html" target="_top"><B>FRAMES</B></A>
<A HREF="FramesCodeVisitor.VersionException.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | <A HREF="#field_summary">FIELD</A> | <A HREF="#constructor_summary">CONSTR</A> | <A HREF="#<API key>.lang.Throwable">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: <A HREF="#field_detail">FIELD</A> | <A HREF="#constructor_detail">CONSTR</A> | METHOD</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
</BODY>
</HTML> |
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_75) on Sat May 16 22:22:32 CEST 2015 -->
<title>org.apache.cassandra.io.sstable Class Hierarchy</title>
<meta name="date" content="2015-05-16">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.apache.cassandra.io.sstable Class Hierarchy";
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar_top">
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/apache/cassandra/io/compress/package-tree.html">Prev</a></li>
<li><a href="../../../../../org/apache/cassandra/io/sstable/format/package-tree.html">Next</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/cassandra/io/sstable/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_top">
</a></div>
<div class="header">
<h1 class="title">Hierarchy For Package org.apache.cassandra.io.sstable</h1>
<span class="strong">Package Hierarchies:</span>
<ul class="horizontal">
<li><a href="../../../../../overview-tree.html">All Packages</a></li>
</ul>
</div>
<div class="contentContainer">
<h2 title="Class Hierarchy">Class Hierarchy</h2>
<ul>
<li type="circle">java.lang.Object
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a> (implements java.io.Closeable)
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableSimpleWriter.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableSimpleWriter</span></a></li>
</ul>
</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/BloomFilterTracker.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">BloomFilterTracker</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ColumnNameHelper.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ColumnNameHelper</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ColumnStats.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ColumnStats</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ColumnStats.MaxIntTracker.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ColumnStats.MaxIntTracker</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ColumnStats.MaxLongTracker.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ColumnStats.MaxLongTracker</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ColumnStats.MinLongTracker.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ColumnStats.MinLongTracker</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/Component.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">Component</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/CQLSSTableWriter.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">CQLSSTableWriter</span></a> (implements java.io.Closeable)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/CQLSSTableWriter.Builder.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">CQLSSTableWriter.Builder</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/Descriptor.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">Descriptor</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/DescriptorTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">DescriptorTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/Downsampling.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">Downsampling</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexHelper.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexHelper</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexHelper.IndexInfo.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexHelper.IndexInfo</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexHelper.IndexInfo.Serializer.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexHelper.IndexInfo.Serializer</span></a> (implements org.apache.cassandra.io.<a href="../../../../../org/apache/cassandra/io/ISerializer.html" title="interface in org.apache.cassandra.io">ISerializer</a><T>)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexHelperTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexHelperTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummary.<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummary.<API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummaryBuilder.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummaryBuilder</span></a> (implements java.lang.AutoCloseable)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummaryBuilder.ReadableBoundary.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummaryBuilder.ReadableBoundary</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummaryManager.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummaryManager</span></a> (implements org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="interface in org.apache.cassandra.io.sstable"><API key></a>)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummaryTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummaryTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/LegacySSTableTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">LegacySSTableTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ReducingKeyIterator.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">ReducingKeyIterator</span></a> (implements org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils">CloseableIterator</a><T>)</li>
<li type="circle">org.apache.cassandra.<a href="../../../../../org/apache/cassandra/SchemaLoader.html" title="class in org.apache.cassandra"><span class="strong">SchemaLoader</span></a>
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableRewriterTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableRewriterTest</span></a></li>
</ul>
</li>
<li type="circle">org.apache.cassandra.utils.concurrent.<a href="../../../../../org/apache/cassandra/utils/concurrent/SharedCloseableImpl.html" title="class in org.apache.cassandra.utils.concurrent"><span class="strong">SharedCloseableImpl</span></a> (implements org.apache.cassandra.utils.concurrent.<a href="../../../../../org/apache/cassandra/utils/concurrent/SharedCloseable.html" title="interface in org.apache.cassandra.utils.concurrent">SharedCloseable</a>)
<ul>
<li type="circle">org.apache.cassandra.utils.concurrent.<a href="../../../../../org/apache/cassandra/utils/concurrent/<API key>.html" title="class in org.apache.cassandra.utils.concurrent"><span class="strong"><API key></span></a>
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/IndexSummary.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">IndexSummary</span></a></li>
</ul>
</li>
</ul>
</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTable.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTable</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableDeletingTask.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableDeletingTask</span></a> (implements java.lang.Runnable)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a> (implements java.lang.Comparable<T>, org.apache.cassandra.db.columniterator.<a href="../../../../../org/apache/cassandra/db/columniterator/OnDiskAtomIterator.html" title="interface in org.apache.cassandra.db.columniterator">OnDiskAtomIterator</a>)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableLoader.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableLoader</span></a> (implements org.apache.cassandra.streaming.<a href="../../../../../org/apache/cassandra/streaming/StreamEventHandler.html" title="interface in org.apache.cassandra.streaming">StreamEventHandler</a>)</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableLoader.Client.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableLoader.Client</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableLoaderTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableLoaderTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableMetadataTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableMetadataTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableReaderTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableReaderTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableRewriter.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableRewriter</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableScannerTest.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableScannerTest</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableUtils.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableUtils</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableUtils.Appender.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableUtils.Appender</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/SSTableUtils.Context.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">SSTableUtils.Context</span></a></li>
<li type="circle">java.lang.Throwable (implements java.io.Serializable)
<ul>
<li type="circle">java.lang.Exception
<ul>
<li type="circle">java.lang.RuntimeException
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="class in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li type="circle">com.google.common.collect.<API key><E> (implements java.util.Iterator<E>)
<ul>
<li type="circle">com.google.common.collect.AbstractIterator<T>
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/KeyIterator.html" title="class in org.apache.cassandra.io.sstable"><span class="strong">KeyIterator</span></a> (implements org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils">CloseableIterator</a><T>)</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<h2 title="Interface Hierarchy">Interface Hierarchy</h2>
<ul>
<li type="circle">java.lang.AutoCloseable
<ul>
<li type="circle">java.io.Closeable
<ul>
<li type="circle">org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils"><span class="strong">CloseableIterator</span></a><T> (also extends java.lang.AutoCloseable, java.util.Iterator<E>)
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ISSTableScanner.html" title="interface in org.apache.cassandra.io.sstable"><span class="strong">ISSTableScanner</span></a></li>
</ul>
</li>
</ul>
</li>
<li type="circle">org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils"><span class="strong">CloseableIterator</span></a><T> (also extends java.io.Closeable, java.util.Iterator<E>)
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ISSTableScanner.html" title="interface in org.apache.cassandra.io.sstable"><span class="strong">ISSTableScanner</span></a></li>
</ul>
</li>
</ul>
</li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/<API key>.html" title="interface in org.apache.cassandra.io.sstable"><span class="strong"><API key></span></a></li>
<li type="circle">java.util.Iterator<E>
<ul>
<li type="circle">org.apache.cassandra.utils.<a href="../../../../../org/apache/cassandra/utils/CloseableIterator.html" title="interface in org.apache.cassandra.utils"><span class="strong">CloseableIterator</span></a><T> (also extends java.lang.AutoCloseable, java.io.Closeable)
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/ISSTableScanner.html" title="interface in org.apache.cassandra.io.sstable"><span class="strong">ISSTableScanner</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
<h2 title="Enum Hierarchy">Enum Hierarchy</h2>
<ul>
<li type="circle">java.lang.Object
<ul>
<li type="circle">java.lang.Enum<E> (implements java.lang.Comparable<T>, java.io.Serializable)
<ul>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/Descriptor.Type.html" title="enum in org.apache.cassandra.io.sstable"><span class="strong">Descriptor.Type</span></a></li>
<li type="circle">org.apache.cassandra.io.sstable.<a href="../../../../../org/apache/cassandra/io/sstable/Component.Type.html" title="enum in org.apache.cassandra.io.sstable"><span class="strong">Component.Type</span></a></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="bottomNav"><a name="navbar_bottom">
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="<API key>">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../../org/apache/cassandra/io/compress/package-tree.html">Prev</a></li>
<li><a href="../../../../../org/apache/cassandra/io/sstable/format/package-tree.html">Next</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/apache/cassandra/io/sstable/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip-navbar_bottom">
</a></div>
</body>
</html> |
<!DOCTYPE html>
<html>
{% include head.html %}
<body id="page-top" class="index">
{% include nav.html %}
{% include portfolio_grid.html %}
{% if site.contact == "static" %}
{% include contact_static.html %}
{% elsif site.contact == "disqus" %}
{% include contact_disqus.html %}
{% else %}
{% include contact.html %}
{% endif %}
{% include footer.html %}
{% include modals.html %}
{% include js.html %}
</body>
</html> |
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.09.09 at 01:22:27 PM CEST
package test;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"<API key>",
"sec",
"sigBlock"
})
@XmlRootElement(name = "body")
public class Body {
@XmlElements({
@XmlElement(name = "<API key>", type = <API key>.class),
@XmlElement(name = "p", type = P.class),
@XmlElement(name = "alternatives", type = Alternatives.class),
@XmlElement(name = "related-article", type = RelatedArticle.class),
@XmlElement(name = "graphic", type = Graphic.class),
@XmlElement(name = "related-object", type = RelatedObject.class),
@XmlElement(name = "boxed-text", type = BoxedText.class),
@XmlElement(name = "def-list", type = DefList.class),
@XmlElement(name = "media", type = Media.class),
@XmlElement(name = "table-wrap", type = TableWrap.class),
@XmlElement(name = "speech", type = Speech.class),
@XmlElement(name = "table-wrap-group", type = TableWrapGroup.class),
@XmlElement(name = "disp-formula-group", type = DispFormulaGroup.class),
@XmlElement(name = "verse-group", type = VerseGroup.class),
@XmlElement(name = "list", type = test.List.class),
@XmlElement(name = "disp-quote", type = DispQuote.class),
@XmlElement(name = "tex-math", type = TexMath.class),
@XmlElement(name = "preformat", type = Preformat.class),
@XmlElement(name = "chem-struct-wrap", type = ChemStructWrap.class),
@XmlElement(name = "disp-formula", type = DispFormula.class),
@XmlElement(name = "fig-group", type = FigGroup.class),
@XmlElement(name = "statement", type = Statement.class),
@XmlElement(name = "array", type = Array.class),
@XmlElement(name = "math", namespace = "http:
@XmlElement(name = "address", type = Address.class),
@XmlElement(name = "fig", type = Fig.class)
})
protected java.util.List<Object> <API key>;
protected java.util.List<Sec> sec;
@XmlElement(name = "sig-block")
protected SigBlock sigBlock;
/**
* Gets the value of the <API key> property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the <API key> property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* <API key>().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link <API key> }
* {@link P }
* {@link Alternatives }
* {@link RelatedArticle }
* {@link Graphic }
* {@link RelatedObject }
* {@link BoxedText }
* {@link DefList }
* {@link Media }
* {@link TableWrap }
* {@link Speech }
* {@link TableWrapGroup }
* {@link DispFormulaGroup }
* {@link VerseGroup }
* {@link test.List }
* {@link DispQuote }
* {@link TexMath }
* {@link Preformat }
* {@link ChemStructWrap }
* {@link DispFormula }
* {@link FigGroup }
* {@link Statement }
* {@link Array }
* {@link MathType }
* {@link Address }
* {@link Fig }
*
*
*/
public java.util.List<Object> <API key>() {
if (<API key> == null) {
<API key> = new ArrayList<Object>();
}
return this.<API key>;
}
/**
* Gets the value of the sec property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the sec property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getSec().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Sec }
*
*
*/
public java.util.List<Sec> getSec() {
if (sec == null) {
sec = new ArrayList<Sec>();
}
return this.sec;
}
/**
* Gets the value of the sigBlock property.
*
* @return
* possible object is
* {@link SigBlock }
*
*/
public SigBlock getSigBlock() {
return sigBlock;
}
/**
* Sets the value of the sigBlock property.
*
* @param value
* allowed object is
* {@link SigBlock }
*
*/
public void setSigBlock(SigBlock value) {
this.sigBlock = value;
}
} |
package cloud_controller_ng
/*
* File Generated by enaml generator
* !!! Please do not edit this file !!!
*/
type LoggerEndpoint struct {
/*Port - Descr: Port for logger endpoint listed at /v2/info Default: 443
*/
Port interface{} `yaml:"port,omitempty"`
/*UseSsl - Descr: Whether to use ssl for logger endpoint listed at /v2/info Default: true
*/
UseSsl interface{} `yaml:"use_ssl,omitempty"`
} |
{# the comments section for actor and channel item pages #}
{% load nonce avatar format entry %}
<div id="comments">
{% if comments|length %}
<h2>{{comments|length}} comment{{comments|length|pluralize}} so far</h2>
<ul>
{% for comment in comments %}
{% if not comment.actor_ref.spam %}
<li id="c-{{comment.uuid}}">
<div class="top"><div class="bottom">
{% linked_avatar comment.actor_ref "u" request %}
<p>{% if request %}{{comment|format_comment:request}}{% else %}{{comment|format_comment}}{% endif %}</p>
<p class="meta">
{{comment.created_at|je_timesince}} ago by {% actor_link comment.actor_ref request %}
{% <API key> request.user comment %}
{% entry_mark_as_spam request.user comment %}
</p>
</div></div>
</li>
{% else %}
<li class="spam">
<p>This comment is hidden because it appears to be spam</p>
</li>
{% endif %}
{% endfor %}
</ul>
{% else %}
<h2>No comments so far</h2>
{% endif %}
</div>
{% if request.user.is_authenticated %}
{% if not request.user.spam %}
<div id="leave-comment">
<h2>Add a Comment</h2>
{% include 'form_error.html' %}
<form id="comment-form" action="" method="post">
<div>
<label for="comment">Your Comment</label>
<textarea name="content" id="comment" rows="4" cols="25" tabindex="1"></textarea>
{% if <API key> and participants %}
<p id="participant-nicks">
Click to insert screen name{% if <API key> %} (or use keyboard shortcuts 1–9){% endif %}:
{% for lowernick, actor_ref in participants.items|dictsort:"0"|slice:":9" %}
{{actor_ref|avatar:"u"}}<a href="#"{% if <API key> %} accesskey="{{forloop.counter}}"{% endif %}>{{actor_ref.display_nick}}</a>{% if <API key> %}<span>/{{forloop.counter}}</span>{% endif %}
{% endfor %}
{% for lowernick, actor_ref in participants.items|dictsort:"0"|slice:"10:25" %}
{{actor_ref|avatar:"u"}}<a href="#">{{actor_ref.display_nick}}</a>
{% endfor %}
</p>
{% endif %}
{% if MARKDOWN_ENABLED %}
<p class="help">
Minimal formatting allowed (_<em>emphasis</em>_, *<strong>bold</strong>*, [link text](http://example.com/))
</p>
{% endif %}
</div>
<div class="buttons">
{{request.user|noncefield:"entry_add_comment"}}
<input type="hidden" name="stream" value="{{entry_ref.stream}}" />
<input type="hidden" name="entry" value="{{entry_ref.keyname}}" />
<input type="hidden" name="nick" value="{{request.user.nick}}" />
<input type="hidden" name="entry_add_comment" value="" />
<input type="submit" value="Submit Comment" tabindex="2"/>
</div>
</form>
</div>
{% else %}{# user is spam #}
<div class="notice">
<h3>Posting suspended</h3>
<p>
Your posting rights have been suspended because your activities triggered our spam watchdog. If you think this was a mistake, please contact us by email at <a href="mailto:support@{{NS_DOMAIN}}">support@{{NS_DOMAIN}}</a> and we'll sort out your case.
</p>
</div>
{% endif %}
{% else %}
<p class="centered">
<big><a href="/login?redirect_to=http://{{request.META.HTTP_HOST|urlencode}}{{request.META.PATH_INFO|urlencode}}">Sign in to add a comment</a></big>
</p>
{% endif %} |
package org.callimachusproject.concepts;
import org.openrdf.annotations.Iri;
@Iri("http://callimachusproject.org/rdf/2009/framework#OpenIDManager")
public interface OpenIDManager extends <API key> {
@Iri("http://callimachusproject.org/rdf/2009/framework#openIdEndpointUrl")
String <API key>();
@Iri("http://callimachusproject.org/rdf/2009/framework#openIdEndpointUrl")
void <API key>(String url);
@Iri("http://callimachusproject.org/rdf/2009/framework#openIdRealm")
String getOpenIdRealm();
@Iri("http://callimachusproject.org/rdf/2009/framework#openIdRealm")
void getOpenIdRealm(String openIdRealm);
} |
# Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.battle.MovieSound
from panda3d.core import Camera, Point3, Vec3
from direct.interval.IntervalGlobal import *
from BattleBase import *
from BattleProps import *
from BattleSounds import *
import BattleParticles
from RewardPanel import *
import MovieCamera
from direct.directnotify import DirectNotifyGlobal
import MovieUtil
import MovieNPCSOS
from toontown.toonbase import <API key>
notify = DirectNotifyGlobal.directNotify.newCategory('MovieSound')
soundFiles = ('AA_sound_bikehorn.ogg', 'AA_sound_whistle.ogg', 'AA_sound_bugle.ogg', 'AA_sound_aoogah.ogg', 'AA_sound_elephant.ogg', 'SZ_DD_foghorn.ogg', '<API key>.ogg')
appearSoundFiles = ('MG_tag_1.ogg', 'LB_receive_evidence.ogg', 'm_match_trumpet.ogg', 'TL_step_on_rake.ogg', 'toonbldg_grow.ogg', 'mailbox_full_wobble.ogg', 'mailbox_full_wobble.ogg')
hitSoundFiles = ('<API key>.ogg',)
tSound = 2.45
tSuitReact = 2.8
<API key> = MovieUtil.SUIT_LURE_DISTANCE * 0.75
TIME_TO_WALK_BACK = 0.5
if <API key> == 0:
TIME_TO_WALK_BACK = 0
<API key> = 0.5
BEFORE_STARS = 0.5
AFTER_STARS = 1.75
def doSounds(sounds):
if len(sounds) == 0:
return (None, None)
else:
npcArrivals, npcDepartures, npcs = MovieNPCSOS.doNPCTeleports(sounds)
mtrack = Parallel()
hitCount = 0
prevLevel = 0
prevSounds = [[],
[],
[],
[],
[],
[],
[]]
for sound in sounds:
level = sound['level']
prevSounds[level].append(sound)
for target in sound['target']:
if target['hp'] > 0:
hitCount += 1
break
delay = 0.0
for soundList in prevSounds:
if len(soundList) > 0:
mtrack.append(__doSoundsLevel(soundList, delay, hitCount, npcs))
delay += TOON_SOUND_DELAY
soundTrack = Sequence(npcArrivals, mtrack, npcDepartures)
targets = sounds[0]['target']
camDuration = mtrack.getDuration()
enterDuration = npcArrivals.getDuration()
exitDuration = npcDepartures.getDuration()
camTrack = MovieCamera.chooseSoundShot(sounds, targets, camDuration, enterDuration, exitDuration)
return (soundTrack, camTrack)
def __getSuitTrack(sound, lastSoundThatHit, delay, hitCount, targets, totalDamage, hpbonus, toon, npcs):
tracks = Parallel()
attacks = 0
uberDelay = 0.0
isUber = 0
level = sound['level']
if level >= <API key>.<API key>:
uberDelay = 3.0
isUber = 1
for target in targets:
suit = target['suit']
if totalDamage > 0 and sound == lastSoundThatHit:
hp = target['hp']
died = target['died']
battle = sound['battle']
kbbonus = target['kbbonus']
suitTrack = Sequence()
showDamage = Func(suit.showHpText, -totalDamage, openEnded=0, attackTrack=SOUND_TRACK, attackLevel=level)
updateHealthBar = Func(suit.updateHealthBar, totalDamage)
if isUber:
breakEffect = BattleParticles.<API key>(file='soundBreak')
breakEffect.setDepthWrite(0)
breakEffect.setDepthTest(0)
breakEffect.setTwoSided(1)
breakEffect.setBin('fixed', 10)
soundEffect = <API key>.getSound(hitSoundFiles[0])
suitTrack.append(Wait(delay + tSuitReact))
if isUber:
delayTime = random.random()
suitTrack.append(Wait(delayTime + 2.0))
suitTrack.append(Func(setPosFromOther, breakEffect, suit, Point3(0, 0.0, suit.getHeight() - 1.0)))
suitTrack.append(Parallel(showDamage, updateHealthBar, SoundInterval(soundEffect, node=suit), __getPartTrack(breakEffect, 0.0, 1.0, [breakEffect, suit, 0], softStop=-0.5)))
else:
suitTrack.append(showDamage)
suitTrack.append(updateHealthBar)
if hitCount == 1:
suitTrack.append(Parallel(ActorInterval(suit, 'squirt-small'), MovieUtil.<API key>(suit, 0.5, 1.8)))
else:
suitTrack.append(ActorInterval(suit, 'squirt-small'))
if kbbonus == 0:
suitTrack.append(<API key>(suit, battle))
suitTrack.append(Func(battle.unlureSuit, suit))
bonusTrack = None
if hpbonus > 0:
bonusTrack = Sequence(Wait(delay + tSuitReact + delay + 0.75 + uberDelay), Func(suit.showHpText, -hpbonus, 1, openEnded=0), Func(suit.updateHealthBar, hpbonus))
suitTrack.append(Func(suit.loop, 'neutral'))
if bonusTrack == None:
tracks.append(suitTrack)
else:
tracks.append(Parallel(suitTrack, bonusTrack))
elif totalDamage <= 0:
tracks.append(Sequence(Wait(2.9), Func(MovieUtil.indicateMissed, suit, 1.0)))
return tracks
def __doSoundsLevel(sounds, delay, hitCount, npcs):
lastSoundThatHit = None
totalDamage = 0
for sound in sounds:
for target in sound['target']:
if target['hp'] > 0:
lastSoundThatHit = sound
totalDamage += target['hp']
break
mainTrack = Sequence()
tracks = Parallel()
deathTracks = Parallel()
for sound in sounds:
toon = sound['toon']
if 'npc' in sound:
toon = sound['npc']
level = sound['level']
targets = sound['target']
hpbonus = sound['hpbonus']
attackMTrack = soundfn_array[sound['level']](sound, delay, toon, targets, level)
tracks.append(Sequence(Wait(delay), attackMTrack))
tracks.append(__getSuitTrack(sound, lastSoundThatHit, delay, hitCount, targets, totalDamage, hpbonus, toon, npcs))
for target in targets:
battle = sound['battle']
suit = target['suit']
died = target['died']
revived = target['revived']
if revived:
deathTracks.append(MovieUtil.<API key>(suit, toon, battle, npcs))
elif died:
deathTracks.append(MovieUtil.<API key>(suit, toon, battle, npcs))
mainTrack.append(tracks)
mainTrack.append(deathTracks)
return mainTrack
def <API key>(suit, battle):
resetPos, resetHpr = battle.getActorPosHpr(suit)
moveDist = Vec3(suit.getPos(battle) - resetPos).length()
moveDuration = 0.5
walkTrack = Sequence(Func(suit.setHpr, battle, resetHpr), ActorInterval(suit, 'walk', startTime=1, duration=moveDuration, endTime=0.0001), Func(suit.loop, 'neutral'))
moveTrack = LerpPosInterval(suit, moveDuration, resetPos, other=battle)
return Parallel(walkTrack, moveTrack)
def <API key>(suit, battle):
return <API key>(suit, battle)
def <API key>(sound, delay, toon, operaInstrument = None):
isNPC = 0
if sound.get('npc'):
isNPC = 1
battle = sound['battle']
hasLuredSuits = __hasLuredSuits(sound)
if not isNPC:
oldPos, oldHpr = battle.getActorPosHpr(toon)
newPos = Point3(oldPos)
newPos.setY(newPos.getY() - <API key>)
retval = Sequence(Wait(delay))
if <API key> and hasLuredSuits and not isNPC:
retval.append(Parallel(ActorInterval(toon, 'walk', startTime=1, duration=TIME_TO_WALK_BACK, endTime=0.0001), LerpPosInterval(toon, TIME_TO_WALK_BACK, newPos, other=battle)))
if operaInstrument:
sprayEffect = BattleParticles.<API key>(file='soundWave')
sprayEffect.setDepthWrite(0)
sprayEffect.setDepthTest(0)
sprayEffect.setTwoSided(1)
I1 = 2.8
retval.append(ActorInterval(toon, 'sound', playRate=1.0, startTime=0.0, endTime=I1))
retval.append(Func(setPosFromOther, sprayEffect, operaInstrument, (0, 1.6, -0.18)))
retval.append(__getPartTrack(sprayEffect, 0.0, 6.0, [sprayEffect, toon, 0], softStop=-3.5))
retval.append(ActorInterval(toon, 'sound', playRate=1.0, startTime=I1))
else:
retval.append(ActorInterval(toon, 'sound'))
if <API key> and hasLuredSuits and not isNPC:
retval.append(Parallel(ActorInterval(toon, 'walk', startTime=0.0001, duration=TIME_TO_WALK_BACK, endTime=1), LerpPosInterval(toon, TIME_TO_WALK_BACK, oldPos, other=battle)))
retval.append(Func(toon.loop, 'neutral'))
return retval
def __hasLuredSuits(sound):
retval = False
targets = sound['target']
for target in targets:
kbbonus = target['kbbonus']
if kbbonus == 0:
retval = True
break
return retval
def __doBikehorn(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax = Vec3(0.65, 0.65, 0.65)
instrMax *= <API key>
instrStretch = Vec3(0.6, 1.1, 0.6)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('bikehorn')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-1.1, -1.4, 0.1)
instrument.setHpr(145, 0, 0)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow = LerpScaleInterval(instrument, duration=0.2, startScale=instrMin, scale=instrMax)
instrumentAppear = Parallel(grow, Sequence(Wait(0.15), SoundInterval(instrumentAppearSfx, node=toon)))
stretchInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrMax, scale=instrStretch, blendType='easeOut')
backInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrStretch, scale=instrMax, blendType='easeIn')
stretchMega = LerpScaleInterval(megaphone, duration=0.2, startScale=megaphone.getScale(), scale=0.9, blendType='easeOut')
backMega = LerpScaleInterval(megaphone, duration=0.2, startScale=0.9, scale=megaphone.getScale(), blendType='easeIn')
attackTrack = Parallel(Sequence(stretchInstr, backInstr), Sequence(stretchMega, backMega))
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(3.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
instrumentshrink = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax, scale=instrMin)
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), Parallel(attackTrack, SoundInterval(soundEffect, node=toon)), Wait(0.2), instrumentshrink)
tracks.append(soundTrack)
return tracks
def __doWhistle(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax = Vec3(0.2, 0.2, 0.2)
instrMax *= <API key>
instrStretch = Vec3(0.25, 0.25, 0.25)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('whistle')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-1.2, -1.3, 0.1)
instrument.setHpr(145, 0, 85)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow = LerpScaleInterval(instrument, duration=0.2, startScale=instrMin, scale=instrMax)
instrumentAppear = Parallel(grow, Sequence(Wait(0.05), SoundInterval(instrumentAppearSfx, node=toon)))
stretchInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrMax, scale=instrStretch, blendType='easeOut')
backInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrStretch, scale=instrMax, blendType='easeIn')
attackTrack = Sequence(stretchInstr, backInstr)
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(3.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
instrumentshrink = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax, scale=instrMin)
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), Parallel(attackTrack, SoundInterval(soundEffect, node=toon)), Wait(0.2), instrumentshrink)
tracks.append(soundTrack)
return tracks
def __doBugle(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax = Vec3(0.4, 0.4, 0.4)
instrMax *= <API key>
instrStretch = Vec3(0.5, 0.5, 0.5)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('bugle')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-1.3, -1.4, 0.1)
instrument.setHpr(145, 0, 85)
instrument.setScale(instrMin)
def longshake(model, num):
inShake = LerpScaleInterval(model, duration=0.2, startScale=instrMax, scale=instrStretch, blendType='easeInOut')
outShake = LerpScaleInterval(model, duration=0.2, startScale=instrStretch, scale=instrMax, blendType='easeInOut')
i = 1
seq = Sequence()
while i < num:
if i % 2 == 0:
seq.append(inShake)
else:
seq.append(outShake)
i += 1
seq.start()
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow = LerpScaleInterval(instrument, duration=1, startScale=instrMin, scale=instrMax, blendType='easeInOut')
instrumentshrink = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax, scale=instrMin)
instrumentAppear = Sequence(grow, Wait(0), Func(longshake, instrument, 5))
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundEffect = <API key>.getSound(soundFiles[level])
megaphoneTrack = Parallel(Sequence(Wait(delay + 1.7), SoundInterval(soundEffect, node=toon)), Sequence(Wait(delayTime), megaphoneShow, Wait(1.7), instrumentAppear, Wait(1), instrumentshrink, Wait(1.5), megaphoneHide))
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Wait(delayTime)
tracks.append(soundTrack)
return tracks
def __doAoogah(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax = Vec3(0.5, 0.5, 0.5)
instrMax *= <API key>
instrStretch = Vec3(1.1, 0.9, 0.4)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('aoogah')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-1.0, -1.5, 0.2)
instrument.setHpr(145, 0, 85)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow = LerpScaleInterval(instrument, duration=0.2, startScale=instrMin, scale=instrMax)
instrumentAppear = Parallel(grow, Sequence(Wait(0.05), SoundInterval(instrumentAppearSfx, node=toon)))
stretchInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrMax, scale=instrStretch, blendType='easeOut')
backInstr = LerpScaleInterval(instrument, duration=0.2, startScale=instrStretch, scale=instrMax, blendType='easeInOut')
attackTrack = Sequence(stretchInstr, Wait(1), backInstr)
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(3.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
instrumentshrink = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax, scale=instrMin)
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), Parallel(attackTrack, SoundInterval(soundEffect, node=toon), Sequence(Wait(1.5), instrumentshrink)))
tracks.append(soundTrack)
return tracks
def __doElephant(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax1 = Vec3(0.3, 0.4, 0.2)
instrMax1 *= <API key>
instrMax2 = Vec3(0.3, 0.3, 0.3)
instrMax2 *= <API key>
instrStretch1 = Vec3(0.3, 0.5, 0.25)
instrStretch1 *= <API key>
instrStretch2 = Vec3(0.3, 0.7, 0.3)
instrStretch2 *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('elephant')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-0.6, -0.9, 0.15)
instrument.setHpr(145, 0, 85)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow1 = LerpScaleInterval(instrument, duration=0.3, startScale=instrMin, scale=instrMax1)
grow2 = LerpScaleInterval(instrument, duration=0.3, startScale=instrMax1, scale=instrMax2)
instrumentAppear = Parallel(Sequence(grow1, grow2), Sequence(Wait(0.05), SoundInterval(instrumentAppearSfx, node=toon)))
stretchInstr1 = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax2, scale=instrStretch1, blendType='easeOut')
stretchInstr2 = LerpScaleInterval(instrument, duration=0.1, startScale=instrStretch1, scale=instrStretch2, blendType='easeOut')
stretchInstr = Sequence(stretchInstr1, stretchInstr2)
backInstr = LerpScaleInterval(instrument, duration=0.1, startScale=instrStretch2, scale=instrMax2, blendType='easeOut')
attackTrack = Sequence(stretchInstr, Wait(1), backInstr)
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(3.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
instrumentshrink = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax2, scale=instrMin)
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), Parallel(attackTrack, SoundInterval(soundEffect, node=toon), Sequence(Wait(1.5), instrumentshrink)))
tracks.append(soundTrack)
return tracks
def __doFoghorn(sound, delay, toon, targets, level):
tracks = Parallel()
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax1 = Vec3(0.1, 0.1, 0.1)
instrMax1 *= <API key>
instrMax2 = Vec3(0.3, 0.3, 0.3)
instrMax2 *= <API key>
instrStretch = Vec3(0.4, 0.4, 0.4)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('fog_horn')
def setInstrumentStats(instrument = instrument):
instrument.setPos(-0.8, -0.9, 0.2)
instrument.setHpr(145, 0, 0)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow1 = LerpScaleInterval(instrument, duration=1, startScale=instrMin, scale=instrMax1)
grow2 = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax1, scale=instrMax2)
instrumentAppear = Parallel(Sequence(grow1, grow2), Sequence(Wait(0.05), SoundInterval(instrumentAppearSfx, node=toon)))
stretchInstr = LerpScaleInterval(instrument, duration=0.3, startScale=instrMax2, scale=instrStretch, blendType='easeOut')
backInstr = LerpScaleInterval(instrument, duration=1.0, startScale=instrStretch, scale=instrMin, blendType='easeIn')
spinInstr = LerpHprInterval(instrument, duration=1.5, startHpr=Vec3(145, 0, 0), hpr=Vec3(145, 0, 90), blendType='easeInOut')
attackTrack = Parallel(Sequence(Wait(0.2), spinInstr), Sequence(stretchInstr, Wait(0.5), backInstr))
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(3.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
if soundEffect:
delayTime = delay + tSound
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), Parallel(attackTrack, SoundInterval(soundEffect, node=toon)))
tracks.append(soundTrack)
return tracks
def __doOpera(sound, delay, toon, targets, level):
tracks = Parallel()
delay = delay
instrMin = Vec3(0.001, 0.001, 0.001)
instrMax1 = Vec3(1.7, 1.7, 1.7)
instrMax1 *= <API key>
instrMax2 = Vec3(2.2, 2.2, 2.2)
instrMax2 *= <API key>
instrStretch = Vec3(0.4, 0.4, 0.4)
instrStretch *= <API key>
megaphone = globalPropPool.getProp('megaphone')
instrument = globalPropPool.getProp('singing')
head = instrument.find('**/opera_singer')
head.setPos(0, 0, 0)
def setInstrumentStats(instrument = instrument):
newPos = Vec3(-0.8, -0.9, 0.2)
newPos *= 1.3
instrument.setPos(newPos[0], newPos[1], newPos[2])
instrument.setHpr(145, 0, 90)
instrument.setScale(instrMin)
hand = toon.getRightHand()
megaphoneShow = Sequence(Func(megaphone.reparentTo, hand), Func(instrument.reparentTo, hand), Func(setInstrumentStats))
megaphoneHide = Sequence(Func(MovieUtil.removeProp, megaphone), Func(MovieUtil.removeProp, instrument))
instrumentAppearSfx = <API key>.getSound(appearSoundFiles[level])
grow1 = LerpScaleInterval(instrument, duration=1, startScale=instrMin, scale=instrMax1, blendType='easeOut')
grow2 = LerpScaleInterval(instrument, duration=1.1, startScale=instrMax1, scale=instrMax2, blendType='easeIn')
shrink2 = LerpScaleInterval(instrument, duration=0.1, startScale=instrMax2, scale=instrMin)
instrumentAppear = Parallel(Sequence(grow1, grow2, Wait(6.0), shrink2), Sequence(Wait(0.0), SoundInterval(instrumentAppearSfx, node=toon)))
hasLuredSuits = __hasLuredSuits(sound)
delayTime = delay
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
megaphoneTrack = Sequence(Wait(delayTime), megaphoneShow, Wait(1.0), instrumentAppear, Wait(2.0), megaphoneHide)
tracks.append(megaphoneTrack)
toonTrack = <API key>(sound, delay, toon, operaInstrument=instrument)
tracks.append(toonTrack)
soundEffect = <API key>.getSound(soundFiles[level])
if soundEffect:
delayTime = delay + tSound - 0.3
if hasLuredSuits:
delayTime += TIME_TO_WALK_BACK
soundTrack = Sequence(Wait(delayTime), SoundInterval(soundEffect, node=toon))
tracks.append(Sequence(Wait(0)))
tracks.append(soundTrack)
return tracks
def setPosFromOther(dest, source, offset = Point3(0, 0, 0)):
pos = render.getRelativePoint(source, offset)
dest.setPos(pos)
dest.reparentTo(render)
soundfn_array = (__doBikehorn,
__doWhistle,
__doBugle,
__doAoogah,
__doElephant,
__doFoghorn,
__doOpera)
def __getPartTrack(particleEffect, startDelay, durationDelay, partExtraArgs, softStop = 0):
pEffect = partExtraArgs[0]
parent = partExtraArgs[1]
if len(partExtraArgs) == 3:
worldRelative = partExtraArgs[2]
else:
worldRelative = 1
return Sequence(Wait(startDelay), ParticleInterval(pEffect, parent, worldRelative, duration=durationDelay, cleanup=True, softStopT=softStop)) |
package management
import (
"context"
"reflect"
"sort"
"sync"
"github.com/hashicorp/go-multierror"
"github.com/pkg/errors"
v3 "github.com/rancher/rancher/pkg/apis/management.cattle.io/v3"
"github.com/rancher/rancher/pkg/features"
"github.com/rancher/rancher/pkg/rbac"
"github.com/rancher/rancher/pkg/settings"
"github.com/rancher/rancher/pkg/types/config"
"github.com/rancher/rancher/pkg/wrangler"
"github.com/sirupsen/logrus"
"golang.org/x/crypto/bcrypt"
corev1 "k8s.io/api/core/v1"
rbacv1 "k8s.io/api/rbac/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/util/net"
"k8s.io/client-go/util/retry"
)
const (
bootstrappedRole = "authz.management.cattle.io/bootstrapped-role"
<API key> = "admincreated"
cattleNamespace = "cattle-system"
<API key> = "authz.management.cattle.io/bootstrapping"
<API key> = "admin-user"
)
var (
defaultAdminLabel = map[string]string{<API key>: <API key>}
adminCreateLock sync.Mutex
)
func addRoles(wrangler *wrangler.Context, management *config.ManagementContext) (string, error) {
rb := newRoleBuilder()
rb.addRole("Create Clusters", "clusters-create").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("create").
addRule().apiGroups("provisioning.cattle.io").resources("clusters").verbs("create").
addRule().apiGroups("management.cattle.io").resources("templates", "templateversions").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("nodedrivers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("kontainerdrivers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("nodetemplates").verbs("*").
addRule().apiGroups("").resources("secrets").verbs("create").
addRule().apiGroups("management.cattle.io").resources("cisconfigs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("create").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch")
rb.addRole("Manage Node Drivers", "nodedrivers-manage").
addRule().apiGroups("management.cattle.io").resources("nodedrivers").verbs("*")
rb.addRole("Manage Cluster Drivers", "<API key>").
addRule().apiGroups("management.cattle.io").resources("kontainerdrivers").verbs("*")
rb.addRole("Manage Catalogs", "catalogs-manage").
addRule().apiGroups("management.cattle.io").resources("catalogs", "templates", "templateversions").verbs("*")
rb.addRole("Use Catalog Templates", "catalogs-use").
addRule().apiGroups("management.cattle.io").resources("templates", "templateversions").verbs("get", "list", "watch")
rb.addRole("Manage Users", "users-manage").
addRule().apiGroups("management.cattle.io").resources("users", "globalrolebindings").verbs("*").
addRule().apiGroups("management.cattle.io").resources("globalroles").verbs("get", "list", "watch")
rb.addRole("Manage Roles", "roles-manage").
addRule().apiGroups("management.cattle.io").resources("roletemplates").verbs("*")
rb.addRole("Manage Authentication", "authn-manage").
addRule().apiGroups("management.cattle.io").resources("authconfigs").verbs("get", "list", "watch", "update")
rb.addRole("Manage Settings", "settings-manage").
addRule().apiGroups("management.cattle.io").resources("settings").verbs("*")
rb.addRole("Manage Features", "features-manage").
addRule().apiGroups("management.cattle.io").resources("features").verbs("get", "list", "watch", "update")
rb.addRole("Manage PodSecurityPolicy Templates", "<API key>").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*")
rb.addRole("Create RKE Templates", "<API key>").
addRule().apiGroups("management.cattle.io").resources("clustertemplates").verbs("create")
rb.addRole("Create RKE Template Revisions", "<API key>").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("create")
rb.addRole("View Rancher Metrics", "<API key>").
addRule().apiGroups("management.cattle.io").resources("ranchermetrics").verbs("get")
rb.addRole("Admin", "admin").
addRule().apiGroups("*").resources("*").verbs("*").
addRule().apiGroups().nonResourceURLs("*").verbs("*")
// restricted-admin will get cluster admin access to all downstream clusters but limited access to the local cluster
restrictedAdminRole := addUserRules(rb.addRole("Restricted Admin", "restricted-admin"))
restrictedAdminRole.
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("*").
addRule().apiGroups("management.cattle.io").resources("clustertemplates").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("management.cattle.io").resources("globalroles", "globalrolebindings").verbs("*").
addRule().apiGroups("management.cattle.io").resources("users", "userattribute", "groups", "groupmembers").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("management.cattle.io").resources("fleetworkspaces").verbs("*").
addRule().apiGroups("management.cattle.io").resources("authconfigs").verbs("*").
addRule().apiGroups("management.cattle.io").resources("nodedrivers").verbs("*").
addRule().apiGroups("management.cattle.io").resources("kontainerdrivers").verbs("*").
addRule().apiGroups("management.cattle.io").resources("roletemplates").verbs("*").
addRule().apiGroups("management.cattle.io").resources("catalogs", "templates", "templateversions").verbs("*")
// restricted-admin can edit settings if rancher is bootstrapped with restricted-admin role
if settings.<API key>.Get() == "true" {
restrictedAdminRole.
addRule().apiGroups("management.cattle.io").resources("settings").verbs("*")
}
userRole := addUserRules(rb.addRole("User", "user"))
userRole.
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch")
rb.addRole("User Base", "user-base").
addRule().apiGroups("management.cattle.io").resources("preferences").verbs("*").
addRule().apiGroups("management.cattle.io").resources("settings").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("features").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("project.cattle.io").resources("<API key>").verbs("*")
// TODO user should be dynamically authorized to only see herself
// TODO enable when groups are "in". they need to be self-service
if err := rb.<API key>(management); err != nil {
return "", errors.Wrap(err, "problem reconciling global roles")
}
// RoleTemplates to be used inside of clusters
rb = newRoleBuilder()
// K8s default roles
rb.addRoleTemplate("Kubernetes cluster-admin", "cluster-admin", "cluster", true, true, true)
rb.addRoleTemplate("Kubernetes admin", "admin", "project", true, true, false)
rb.addRoleTemplate("Kubernetes edit", "edit", "project", true, true, false)
rb.addRoleTemplate("Kubernetes view", "view", "project", true, true, false)
// Cluster roles
rb.addRoleTemplate("Cluster Owner", "cluster-owner", "cluster", false, false, true).
addRule().apiGroups("*").resources("*").verbs("*").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("own").
addRule().apiGroups("provisioning.cattle.io").resources("clusters").verbs("*").
addRule().apiGroups("cluster.x-k8s.io").resources("machines").verbs("*").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("*").
addRule().apiGroups("rke-machine.cattle.io").resources("*").verbs("*").
addRule().apiGroups().nonResourceURLs("*").verbs("*")
rb.addRoleTemplate("Cluster Member", "cluster-member", "cluster", false, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projects").verbs("create").
addRule().apiGroups("management.cattle.io").resources("nodes", "nodepools").verbs("get", "list", "watch").
addRule().apiGroups("").resources("nodes").verbs("get", "list", "watch").
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("apiregistration.k8s.io").resources("apiservices").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusterloggings").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusteralertrules").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusteralertgroups").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("notifiers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("catalogtemplates").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusters").resourceNames("local").verbs("get").
addRule().apiGroups("provisioning.cattle.io").resources("clusters").verbs("get", "watch").
addRule().apiGroups("cluster.x-k8s.io").resources("machines").verbs("get", "watch").
addRule().apiGroups("cluster.x-k8s.io").resources("machinedeployments").verbs("get", "watch").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("get", "watch").
addRule().apiGroups("rke-machine.cattle.io").resources("*").verbs("get", "watch").
addRule().apiGroups("metrics.k8s.io").resources("nodemetrics", "nodes").verbs("get", "list", "watch")
rb.addRoleTemplate("Create Projects", "projects-create", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("projects").verbs("create")
rb.addRoleTemplate("View All Projects", "projects-view", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("projects").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("get", "list", "watch").
addRule().apiGroups("").resources("namespaces").verbs("get", "list", "watch").
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch").
<API key>("view")
rb.addRoleTemplate("Manage Nodes", "nodes-manage", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("nodes", "nodepools").verbs("*").
addRule().apiGroups("").resources("nodes").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("cluster.x-k8s.io").resources("machines").verbs("*").
addRule().apiGroups("cluster.x-k8s.io").resources("machinedeployments").verbs("*").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("*").
addRule().apiGroups("rke-machine.cattle.io").resources("*").verbs("*")
rb.addRoleTemplate("View Nodes", "nodes-view", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("nodes", "nodepools").verbs("get", "list", "watch").
addRule().apiGroups("").resources("nodes").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("cluster.x-k8s.io").resources("machines").verbs("get", "watch").
addRule().apiGroups("cluster.x-k8s.io").resources("machinedeployments").verbs("get", "watch").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("get", "watch").
addRule().apiGroups("rke-machine.cattle.io").resources("*").verbs("get", "watch")
rb.addRoleTemplate("Manage Storage", "storage-manage", "cluster", false, false, false).
addRule().apiGroups("").resources("persistentvolumes").verbs("*").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("*").
addRule().apiGroups("").resources("<API key>").verbs("*")
rb.addRoleTemplate("Manage Cluster Members", "<API key>", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*")
rb.addRoleTemplate("View Cluster Members", "<API key>", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Cluster Catalogs", "<API key>", "cluster", false, false, true).
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("*").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("*")
rb.addRoleTemplate("View Cluster Catalogs", "<API key>", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Cluster Backups", "backups-manage", "cluster", false, false, false).
addRule().apiGroups("management.cattle.io").resources("etcdbackups").verbs("*")
rb.addRoleTemplate("Manage Navlinks", "navlinks-manage", "cluster", false, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("*")
// Project roles
rb.addRoleTemplate("Project Owner", "project-owner", "project", false, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("*").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("*").
addRule().apiGroups("project.cattle.io").resources("pipelines").verbs("*").
addRule().apiGroups("project.cattle.io").resources("pipelineexecutions").verbs("*").
addRule().apiGroups("project.cattle.io").resources("pipelinesettings").verbs("*").
addRule().apiGroups("project.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("").resources("namespaces").verbs("create").
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("apiregistration.k8s.io").resources("apiservices").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("*").
addRule().apiGroups("metrics.k8s.io").resources("pods").verbs("*").
addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("notifiers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectalertrules").verbs("*").
addRule().apiGroups("management.cattle.io").resources("projectalertgroups").verbs("*").
addRule().apiGroups("management.cattle.io").resources("projectloggings").verbs("*").
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectcatalogs").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("management.cattle.io").resources("catalogtemplates").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("monitoring.cattle.io").resources("prometheus").verbs("view").
addRule().apiGroups("monitoring.coreos.com").resources("prometheuses", "prometheusrules", "servicemonitors").verbs("*").
addRule().apiGroups("networking.istio.io").resources("destinationrules", "envoyfilters", "gateways", "serviceentries", "sidecars", "virtualservices").verbs("*").
addRule().apiGroups("config.istio.io").resources("apikeys", "authorizations", "checknothings", "circonuses", "deniers", "fluentds", "handlers", "kubernetesenvs", "kuberneteses", "listcheckers", "listentries", "logentries", "memquotas", "metrics", "opas", "prometheuses", "quotas", "quotaspecbindings", "quotaspecs", "rbacs", "reportnothings", "rules", "solarwindses", "stackdrivers", "statsds", "stdios").verbs("*").
addRule().apiGroups("authentication.istio.io").resources("policies").verbs("*").
addRule().apiGroups("rbac.istio.io").resources("rbacconfigs", "serviceroles", "servicerolebindings").verbs("*").
addRule().apiGroups("security.istio.io").resources("<API key>).verbs("*").
addRule().apiGroups("management.cattle.io").resources("projects").verbs("own").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("operations").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("releases").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("get").resourceNames("local").
<API key>("admin")
rb.addRoleTemplate("Project Member", "project-member", "project", false, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("*").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("*").
addRule().apiGroups("project.cattle.io").resources("pipelines").verbs("*").
addRule().apiGroups("project.cattle.io").resources("pipelineexecutions").verbs("*").
addRule().apiGroups("").resources("namespaces").verbs("create").
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("apiregistration.k8s.io").resources("apiservices").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("*").
addRule().apiGroups("metrics.k8s.io").resources("pods").verbs("*").
addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("notifiers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectalertrules").verbs("*").
addRule().apiGroups("management.cattle.io").resources("projectalertgroups").verbs("*").
addRule().apiGroups("management.cattle.io").resources("projectloggings").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectcatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("catalogtemplates").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("monitoring.cattle.io").resources("prometheus").verbs("view").
addRule().apiGroups("monitoring.coreos.com").resources("prometheuses", "prometheusrules", "servicemonitors").verbs("*").
addRule().apiGroups("networking.istio.io").resources("destinationrules", "envoyfilters", "gateways", "serviceentries", "sidecars", "virtualservices").verbs("*").
addRule().apiGroups("config.istio.io").resources("apikeys", "authorizations", "checknothings", "circonuses", "deniers", "fluentds", "handlers", "kubernetesenvs", "kuberneteses", "listcheckers", "listentries", "logentries", "memquotas", "metrics", "opas", "prometheuses", "quotas", "quotaspecbindings", "quotaspecs", "rbacs", "reportnothings", "rules", "solarwindses", "stackdrivers", "statsds", "stdios").verbs("*").
addRule().apiGroups("authentication.istio.io").resources("policies").verbs("*").
addRule().apiGroups("rbac.istio.io").resources("rbacconfigs", "serviceroles", "servicerolebindings").verbs("*").
addRule().apiGroups("security.istio.io").resources("<API key>).verbs("*").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("operations").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("releases").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("get").resourceNames("local").
<API key>("edit")
rb.addRoleTemplate("Read-only", "read-only", "project", false, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("pipelines").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("pipelineexecutions").verbs("get", "list", "watch").
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("apiregistration.k8s.io").resources("apiservices").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("metrics.k8s.io").resources("pods").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("notifiers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectalertrules").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectalertgroups").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectloggings").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clustercatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("projectcatalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("monitoring.coreos.com").resources("prometheuses", "prometheusrules", "servicemonitors").verbs("get", "list", "watch").
addRule().apiGroups("networking.istio.io").resources("destinationrules", "envoyfilters", "gateways", "serviceentries", "sidecars", "virtualservices").verbs("get", "list", "watch").
addRule().apiGroups("config.istio.io").resources("apikeys", "authorizations", "checknothings", "circonuses", "deniers", "fluentds", "handlers", "kubernetesenvs", "kuberneteses", "listcheckers", "listentries", "logentries", "memquotas", "metrics", "opas", "prometheuses", "quotas", "quotaspecbindings", "quotaspecs", "rbacs", "reportnothings", "rules", "solarwindses", "stackdrivers", "statsds", "stdios").verbs("get", "list", "watch").
addRule().apiGroups("authentication.istio.io").resources("policies").verbs("get", "list", "watch").
addRule().apiGroups("rbac.istio.io").resources("rbacconfigs", "serviceroles", "servicerolebindings").verbs("get", "list", "watch").
addRule().apiGroups("security.istio.io").resources("<API key>).verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("clusterrepos").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("operations").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("releases").verbs("get", "list", "watch").
addRule().apiGroups("catalog.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("get").resourceNames("local").
<API key>("view")
rb.addRoleTemplate("Create Namespaces", "create-ns", "project", false, false, false).
addRule().apiGroups("").resources("namespaces").verbs("create")
rb.addRoleTemplate("Manage Workloads", "workloads-manage", "project", false, false, false).
addRule().apiGroups("").resources("pods", "pods/attach", "pods/exec", "pods/portforward", "pods/proxy", "<API key>",
"<API key>/scale").verbs("*").
addRule().apiGroups("apps").resources("daemonsets", "deployments", "deployments/rollback", "deployments/scale", "replicasets",
"replicasets/scale", "statefulsets").verbs("*").
addRule().apiGroups("autoscaling").resources("<API key>").verbs("*").
addRule().apiGroups("batch").resources("cronjobs", "jobs").verbs("*").
addRule().apiGroups("").resources("limitranges", "pods/log", "pods/status", "<API key>/status", "resourcequotas", "resourcequotas/status", "bindings").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("*").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("*").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch")
rb.addRoleTemplate("View Workloads", "workloads-view", "project", false, false, false).
addRule().apiGroups("").resources("pods", "<API key>", "<API key>/scale").verbs("get", "list", "watch").
addRule().apiGroups("apps").resources("daemonsets", "deployments", "deployments/rollback", "deployments/scale", "replicasets",
"replicasets/scale", "statefulsets").verbs("get", "list", "watch").
addRule().apiGroups("autoscaling").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("batch").resources("cronjobs", "jobs").verbs("get", "list", "watch").
addRule().apiGroups("").resources("limitranges", "pods/log", "pods/status", "<API key>/status", "resourcequotas", "resourcequotas/status", "bindings").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apps").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("apprevisions").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Ingress", "ingress-manage", "project", false, false, false).
addRule().apiGroups("extensions").resources("ingresses").verbs("*").
addRule().apiGroups("networking.k8s.io").resources("ingresses").verbs("*")
rb.addRoleTemplate("View Ingress", "ingress-view", "project", false, false, false).
addRule().apiGroups("extensions").resources("ingresses").verbs("get", "list", "watch").
addRule().apiGroups("networking.k8s.io").resources("ingresses").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Services", "services-manage", "project", false, false, false).
addRule().apiGroups("").resources("services", "services/proxy", "endpoints").verbs("*")
rb.addRoleTemplate("View Services", "services-view", "project", false, false, false).
addRule().apiGroups("").resources("services", "endpoints").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Secrets", "secrets-manage", "project", false, false, false).
addRule().apiGroups("").resources("secrets").verbs("*")
rb.addRoleTemplate("View Secrets", "secrets-view", "project", false, false, false).
addRule().apiGroups("").resources("secrets").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Config Maps", "configmaps-manage", "project", false, false, false).
addRule().apiGroups("").resources("configmaps").verbs("*")
rb.addRoleTemplate("View Config Maps", "configmaps-view", "project", false, false, false).
addRule().apiGroups("").resources("configmaps").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Volumes", "<API key>", "project", false, false, false).
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("*")
rb.addRoleTemplate("View Volumes", "<API key>", "project", false, false, false).
addRule().apiGroups("").resources("persistentvolumes").verbs("get", "list", "watch").
addRule().apiGroups("storage.k8s.io").resources("storageclasses").verbs("get", "list", "watch").
addRule().apiGroups("").resources("<API key>").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Service Accounts", "<API key>", "project", false, false, false).
addRule().apiGroups("").resources("serviceaccounts").verbs("*")
rb.addRoleTemplate("View Service Accounts", "<API key>", "project", false, false, false).
addRule().apiGroups("").resources("serviceaccounts").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Project Members", "<API key>", "project", false, false, false).
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("*")
rb.addRoleTemplate("View Project Members", "<API key>", "project", false, false, false).
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch")
rb.addRoleTemplate("Manage Project Catalogs", "<API key>", "project", false, false, false).
addRule().apiGroups("management.cattle.io").resources("projectcatalogs").verbs("*")
rb.addRoleTemplate("View Project Catalogs", "<API key>", "project", false, false, false).
addRule().apiGroups("management.cattle.io").resources("projectcatalogs").verbs("get", "list", "watch")
rb.addRoleTemplate("Project Monitoring View Role", "<API key>", "project", false, true, false).
addRule().apiGroups("monitoring.cattle.io").resources("prometheus").verbs("view").
<API key>("view")
rb.addRoleTemplate("View Monitoring", "monitoring-ui-view", "project", true, false, false)
rb.addRoleTemplate("View Navlinks", "navlinks-view", "project", true, false, false).
addRule().apiGroups("ui.cattle.io").resources("navlinks").verbs("get", "list", "watch")
// Not specific to project or cluster
// TODO When clusterevents has value, consider adding this back in
//rb.addRoleTemplate("View Events", "events-view", "", true, false, false).
// addRule().apiGroups("").resources("events").verbs("get", "list", "watch").
// addRule().apiGroups("management.cattle.io").resources("clusterevents").verbs("get", "list", "watch")
if err := rb.<API key>(management); err != nil {
return "", errors.Wrap(err, "problem reconciling role templates")
}
adminName, err := BootstrapAdmin(wrangler)
if err != nil {
return "", err
}
err = <API key>(management)
if err != nil {
return "", err
}
return adminName, nil
}
func addUserRules(role *roleBuilder) *roleBuilder {
role.
addRule().apiGroups("").resources("secrets").verbs("create").
addRule().apiGroups("management.cattle.io").resources("principals", "roletemplates").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("preferences").verbs("*").
addRule().apiGroups("management.cattle.io").resources("settings").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("features").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("templates", "templateversions", "catalogs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("clusters").verbs("create").
addRule().apiGroups("management.cattle.io").resources("nodedrivers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("kontainerdrivers").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("nodetemplates").verbs("create").
addRule().apiGroups("management.cattle.io").resources("fleetworkspaces").verbs("create").
addRule().apiGroups("management.cattle.io").resources("multiclusterapps", "globaldnses", "globaldnsproviders", "<API key>").verbs("create").
addRule().apiGroups("management.cattle.io").resources("rkek8ssystemimages").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("rkeaddons").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("cisconfigs").verbs("get", "list", "watch").
addRule().apiGroups("management.cattle.io").resources("<API key>").verbs("get", "list", "watch").
addRule().apiGroups("project.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("project.cattle.io").resources("<API key>").verbs("*").
addRule().apiGroups("provisioning.cattle.io").resources("clusters").verbs("create").
addRule().apiGroups("rke-machine-config.cattle.io").resources("*").verbs("create")
return role
}
// BootstrapAdmin checks if the <API key> exists, if it does this indicates rancher has
// already created the admin user and should not attempt it again. Otherwise attempt to create the admin.
func BootstrapAdmin(management *wrangler.Context) (string, error) {
adminCreateLock.Lock()
defer adminCreateLock.Unlock()
if settings.NoDefaultAdmin.Get() == "true" {
return "", nil
}
var adminName string
set := labels.Set(defaultAdminLabel)
admins, err := management.Mgmt.User().List(v1.ListOptions{LabelSelector: set.String()})
if err != nil {
return "", err
}
if len(admins.Items) > 0 {
adminName = admins.Items[0].Name
}
if _, err := management.K8s.CoreV1().ConfigMaps(cattleNamespace).Get(context.TODO(), <API key>, v1.GetOptions{}); err != nil {
if !apierrors.IsNotFound(err) {
logrus.Warnf("Unable to determine if admin user already created: %v", err)
return "", nil
}
} else {
// config map already exists, nothing to do
return adminName, nil
}
users, err := management.Mgmt.User().List(v1.ListOptions{})
if err != nil {
return "", err
}
if len(users.Items) == 0 {
// Config map does not exist and no users, attempt to create the default admin user
bootstrapPassword, <API key>, err := <API key>(context.TODO(), management.K8s.CoreV1().Secrets(cattleNamespace))
if err != nil {
return "", errors.Wrap(err, "failed to retrieve bootstrap password")
}
<API key>, _ := bcrypt.<API key>([]byte(bootstrapPassword), bcrypt.DefaultCost)
admin, err := management.Mgmt.User().Create(&v3.User{
ObjectMeta: v1.ObjectMeta{
GenerateName: "user-",
Labels: defaultAdminLabel,
},
DisplayName: "Default Admin",
Username: "admin",
Password: string(<API key>),
MustChange<API key> || bootstrapPassword == "admin",
})
if err != nil && !apierrors.IsAlreadyExists(err) {
return "", errors.Wrap(err, "can not ensure admin user exists")
}
if err == nil {
var serverURL string
if settings.ServerURL.Get() != "" {
serverURL = settings.ServerURL.Get()
}
if serverURL == "" {
ip, err := net.ChooseHostInterface()
if err == nil {
serverURL = "https://" + ip.String()
}
}
if serverURL == "" {
serverURL = "https://" + "localhost"
}
logrus.Infof("")
logrus.Infof("
logrus.Infof("Welcome to Rancher")
if <API key> {
logrus.Infof("A bootstrap password has been generated for your admin user.")
logrus.Infof("")
logrus.Infof("Bootstrap Password: %s", bootstrapPassword)
logrus.Infof("")
logrus.Infof("Use %s/dashboard/?setup=%s to complete setup in the UI", serverURL, bootstrapPassword)
} else {
logrus.Infof("")
logrus.Infof("Use %s/dashboard/ to complete setup in the UI", serverURL)
}
logrus.Infof("
logrus.Infof("")
}
adminName = admin.Name
bindings, err := management.Mgmt.GlobalRoleBinding().List(v1.ListOptions{LabelSelector: set.String()})
if err != nil {
logrus.Warnf("Failed to create default admin global role binding: %v", err)
bindings = &v3.<API key>{}
}
if len(bindings.Items) == 0 {
adminRole := "admin"
if settings.<API key>.Get() == "true" {
adminRole = "restricted-admin"
}
_, err = management.Mgmt.GlobalRoleBinding().Create(
&v3.GlobalRoleBinding{
ObjectMeta: v1.ObjectMeta{
GenerateName: "globalrolebinding-",
Labels: defaultAdminLabel,
},
UserName: adminName,
GlobalRoleName: adminRole,
})
if err != nil && !features.MCM.Enabled() {
_, crbErr := management.RBAC.ClusterRoleBinding().Create(&rbacv1.ClusterRoleBinding{
ObjectMeta: v1.ObjectMeta{
GenerateName: "default-admin-",
Labels: defaultAdminLabel,
},
Subjects: []rbacv1.Subject{{
Kind: "User",
APIGroup: rbacv1.GroupName,
Name: adminName,
}},
RoleRef: rbacv1.RoleRef{
APIGroup: rbacv1.GroupName,
Kind: "ClusterRole",
Name: "cluster-admin",
},
})
if crbErr != nil {
logrus.Warnf("Failed to create default admin global role binding: %v", err)
}
} else if err != nil {
logrus.Warnf("Failed to create default admin global role binding: %v", err)
} else {
logrus.Info("Created default admin user and binding")
}
}
}
adminConfigMap := corev1.ConfigMap{
ObjectMeta: v1.ObjectMeta{
Name: <API key>,
Namespace: cattleNamespace,
},
}
_, err = management.K8s.CoreV1().ConfigMaps(cattleNamespace).Create(context.TODO(), &adminConfigMap, v1.CreateOptions{})
if err != nil {
if !apierrors.IsAlreadyExists(err) {
logrus.Warnf("Error creating admin config map: %v", err)
}
}
return adminName, nil
}
// <API key> will set the default roles for user login, cluster create
// and project create. If the default roles already have the bootstrappedRole
// annotation this will be a no-op as this was done on a previous startup and will
// now respect the currently selected defaults.
func <API key>(management *config.ManagementContext) error {
user, err := management.Management.GlobalRoles("").Get("user", v1.GetOptions{})
if err != nil {
return err
}
if _, ok := user.Annotations[bootstrappedRole]; !ok {
copy := user.DeepCopy()
copy.NewUserDefault = true
if copy.Annotations == nil {
copy.Annotations = make(map[string]string)
}
copy.Annotations[bootstrappedRole] = "true"
_, err := management.Management.GlobalRoles("").Update(copy)
if err != nil {
return err
}
}
clusterRole, err := management.Management.RoleTemplates("").Get("cluster-owner", v1.GetOptions{})
if err != nil {
return nil
}
if _, ok := clusterRole.Annotations[bootstrappedRole]; !ok {
copy := clusterRole.DeepCopy()
copy.<API key> = true
if copy.Annotations == nil {
copy.Annotations = make(map[string]string)
}
copy.Annotations[bootstrappedRole] = "true"
_, err := management.Management.RoleTemplates("").Update(copy)
if err != nil {
return err
}
}
projectRole, err := management.Management.RoleTemplates("").Get("project-owner", v1.GetOptions{})
if err != nil {
return nil
}
if _, ok := projectRole.Annotations[bootstrappedRole]; !ok {
copy := projectRole.DeepCopy()
copy.<API key> = true
if copy.Annotations == nil {
copy.Annotations = make(map[string]string)
}
copy.Annotations[bootstrappedRole] = "true"
_, err := management.Management.RoleTemplates("").Update(copy)
if err != nil {
return err
}
}
return nil
}
func <API key>(management *config.ManagementContext) error {
var returnErr error
// If adding Rules for new CRDs to the below ClusterRole, make sure to add them in a sorted order
// <API key> is a CR containing rules for granting restricted-admins access to all CRDs that can be created in a v3.Cluster's namespace
cr := rbacv1.ClusterRole{
ObjectMeta: v1.ObjectMeta{
Name: rbac.<API key>,
},
Rules: []rbacv1.PolicyRule{
{
APIGroups: []string{"management.cattle.io"},
Resources: []string{"*"},
Verbs: []string{"*"},
},
},
}
if err := <API key>(management, cr); err != nil {
returnErr = multierror.Append(returnErr, err)
}
// <API key> is a CR containing rules for granting restricted-admins access to all CRDs that can be created in a
// v3.Cluster and v3.Project's namespace
cr = rbacv1.ClusterRole{
ObjectMeta: v1.ObjectMeta{
Name: rbac.<API key>,
},
Rules: []rbacv1.PolicyRule{
{
APIGroups: []string{"management.cattle.io"},
Resources: []string{"*"},
Verbs: []string{"*"},
},
{
APIGroups: []string{"project.cattle.io"},
Resources: []string{"*"},
Verbs: []string{"*"},
},
},
}
if err := <API key>(management, cr); err != nil {
returnErr = multierror.Append(returnErr, err)
}
return returnErr
}
func <API key>(management *config.ManagementContext, cr rbacv1.ClusterRole) error {
for _, rule := range cr.Rules {
sort.Slice(rule.APIGroups, func(i, j int) bool { return rule.APIGroups[i] < rule.APIGroups[j] })
}
sort.Slice(cr.Rules, func(i, j int) bool {
return cr.Rules[i].APIGroups[0] < cr.Rules[j].APIGroups[0]
})
_, err := management.RBAC.ClusterRoles("").Create(&cr)
if err != nil {
if !apierrors.IsAlreadyExists(err) {
return err
}
err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
existingCR, err := management.RBAC.ClusterRoles("").Get(cr.Name, v1.GetOptions{})
if err != nil {
return err
}
if reflect.DeepEqual(cr.Rules, existingCR.Rules) {
return nil
}
existingCR.Rules = cr.Rules
_, err = management.RBAC.ClusterRoles("").Update(existingCR)
return err
})
return err
}
return nil
} |
set_state();
/* Get custom refresh interval, default = 30 seconds*/
var refresh_interval = parseInt(document.getElementsByName('refresh_interval')[0].value);
if (!(typeof(refresh_interval) !== 'undefined')) refresh_interval = 30;
if (refresh_interval == null || refresh_interval == NaN) refresh_interval = 30;
else if (refresh_interval < 1) stop_refresh;
/* Timestamp of last refresh*/
var date = Date.now();
/* Close the range dropdown menu when user clicks outside of it*/
window.onclick = function(event) {
try{
if (!event.target.matches('.range-btn') && document.<API key>("dropdown-content")[0].classList.contains('show')) {
var dropdowns = document.<API key>("dropdown-content");
var k = false;
var i;
for (i = 0; i < dropdowns.length; i++) {
var openDropdown = dropdowns[i];
if (openDropdown.classList.contains('show')) {
openDropdown.classList.remove('show');
k = true;
}
}
if(k == true){
document.<API key>("range-btn")[0].classList.remove("selected");
}
}
}
/* Catch for if user clicks outside of 'window'. Otherwise event.target.matches is undefined*/
catch{
return;
}
}
/* Refresh status table and system services display*/
var TimerSwitch = 1;
var timer_id;
function set_refresh(time) {
TimerSwitch = 1;
timer_id = setTimeout(function() {
refresh_plot();
var csrftoken = document.getElementsByName('csrfmiddlewaretoken')[0].value;
fetch(location.href,{
method: 'GET',
headers: {'Accept': 'text/html', 'X-CSRFToken': csrftoken},
credentials: 'same-origin',
})
.then(function(response){
/* Check response status code*/
if(response.ok){
return response.text();
}throw new Error('Could not update status table :( HTTP response not was not OK -> '+response.status);
})
.then((html) => {
var parser = new DOMParser();
var doc = parser.parseFromString(html, "text/html");
document.getElementById("status").innerHTML = doc.getElementById("status").innerHTML;
document.getElementById("system-services").innerHTML = doc.getElementById("system-services").innerHTML;
TimerVal = refresh_interval;
set_refresh(refresh_interval*1000);
set_state();
checkForExpandedRow();
<API key>();
initialize();
})
.catch((error) => {
console.warn(error);
TimerVal = refresh_interval;
set_refresh(refresh_interval*1000);
});
},time);
document.getElementById('vms-iframe').src='';
}
function stop_refresh() {
TimerSwitch = 0;
clearTimeout(timer_id);
window.stop();
}
function native_list(url) {
stop_refresh();
document.getElementById('vms-iframe').src='/vm/list/'+url;
window.location.href = "#vms-overlay";
}
function foreign_list(url) {
stop_refresh();
document.getElementById('vms-iframe').src='/vm/foreign/'+url;
window.location.href = "#vms-overlay";
}
function toggle_id(name){
if(document.getElementById(name).style.display == "table-row"){
document.getElementById('sym-'+name).innerHTML = "▾"
document.getElementById(name).style.display = "none"
sessionStorage.setItem(name, 0);
}
else{
document.getElementById('sym-'+name).innerHTML = "▴"
document.getElementById(name).style.display = "table-row"
sessionStorage.setItem(name, 1);
}
}
function toggle_group(name){
var n;
if(document.<API key>(name)[0].style.display == "table-row"){
document.getElementById('sym-'+name).innerHTML = "▾"
for(n=0; n<document.<API key>(name).length; n++){
document.<API key>(name)[n].style.display = "none"
sessionStorage.setItem(document.<API key>(name)[n].id, 0);
}
}
else{
for(n=0; n<document.<API key>(name).length; n++){
document.getElementById('sym-'+name).innerHTML = "▴"
document.<API key>(name)[n].style.display = "table-row"
sessionStorage.setItem(document.<API key>(name)[n].id, 1);
}
}
}
/* Set state of expand row in session storage*/
function set_state(){
var n;
var clouds = document.querySelectorAll('[id^="expand-"]');
for(n=0;n<clouds.length;n++){
if(sessionStorage.getItem(clouds[n].id)==1){
console.log(clouds[n].id)
document.getElementById(clouds[n].id).style.display = "table-row"
document.getElementById('sym-'+clouds[n].id).innerHTML = "▴"
}
}
}
/* Add event listeners*/
function initialize(){
addEventListeners("plottable");
}
/* Add click events to table elements and range selection*/
function addEventListeners(className) {
var inputList = document.<API key>(className);
var i;
var list_length = inputList.length;
for (i = 0; i < list_length; i++){
inputList[i].addEventListener('click', function(){
if(className == 'plottable'){
var list = document.querySelectorAll(`td[data-path="${this.dataset.path}"]`);
for (k = 0; k < list.length; k++){
list[k].classList.toggle('plotted');
}
if(!list) this.classList.toggle("plotted");
togglePlot(this);
}else selectRange(this);
});
}
}
/* Range selection dropdown*/
function dropDown(){
if(!document.getElementById("myDropdown").classList.contains("show"))
document.getElementById("range-select").classList.add("selected");
else document.getElementById("range-select").classList.remove("selected");
document.getElementById("myDropdown").classList.toggle("show");
}
/* Change time range for plot based on user selection*/
function selectRange(range){
const curr_range = document.<API key>("range-btn");
curr_range[0].innerHTML = range.innerHTML+'<span class="space"></span><span class="caret"></span>';
const dropdowns = document.<API key>("selected");
var l = dropdowns.length;
for (var i = 0; i < l; i++) {
dropdowns[0].classList.remove('selected');
}
range.classList.toggle("selected");
/* Calculate new range*/
var to = new Date();
var from = new Date();
var multiple = 60000;
if(range.parentElement.classList.contains("days")) multiple = 86400000;
to.setTime(date-(range.dataset.to*multiple))
to = to.getTime();
from.setTime(date-(range.dataset.from*multiple));
from = from.getTime();
/* Update traces with data from new range*/
if((date-from) > 3600000 && !(TSPlot.traces[0].x[0] < from)){
var traces = TSPlot.traces;
var newdata = {
y: [],
x: []
};
var index = [];
var query = createQuery(traces[0].name, from, TSPlot.traces[0].x[0], true);
/* Create string of queries for db*/
for (var i = 1; i < traces.length; i++){
query += ';'
query += createQuery(traces[i].name, from, TSPlot.traces[0].x[0], true)
}
if(window.location.pathname == "/cloud/status/") var newpath = "plot";
else var newpath = "/cloud/status/plot";
const csrftoken = document.getElementsByName('csrfmiddlewaretoken')[0].value;
fetch(newpath,{
method: 'POST',
headers: {'Accept': 'application/json', 'X-CSRFToken': csrftoken},
credentials: 'same-origin',
body: query,
}
)
.then(function(response){
/* Check response status code*/
if(response.ok){
return response.json();
}throw new Error('Could not update range :( HTTP response not was not OK -> '+response.status)
})
.then(function(data){
/* Parse response into trace object. Add null values between points where no data
exists for more than 70s to show gaps in plot*/
var no_new_points = 0;
for(var i = 0; i < traces.length; i++){
/* Skip trace if no new data exists*/
if(!(typeof data.results[i].series !== 'undefined')){
no_new_points ++;
break;
}
index.push(i);
var responsedata = data.results[i].series;
if((typeof (data.results[i].series) !== 'undefined')){
responsedata = data.results[i].series[0].values;
}
var arrays = parseData(responsedata);
var newarrayx = arrays[0];
var newarrayy = arrays[1];
/* Add nulls to beginning and end of new data*/
newarrayx.unshift(newarrayx[0]-15000);
newarrayy.unshift(null);
newarrayx.push(newarrayx[newarrayx.length-1]+15000);
newarrayy.push(null);
newdata.y.push(newarrayy);
newdata.x.push(newarrayx);
}
/* If there were no new points for all traces*/
if(no_new_points == index.length) return data;
else{
Plotly.prependTraces('plotly-TS', newdata, index);
return updateTraces(newdata, index, []);
}
})
.then(function(response){
/* Update plot range*/
TSPlot.layout.xaxis.range = [from, to];
Plotly.relayout('plotly-TS', TSPlot.layout);
})
.catch(error => console.warn(error));
}
/* If no new data is needed*/
else{
/* Only update plot range*/
TSPlot.layout.xaxis.range = [from, to];
TSPlot.layout.yaxis.range = [];
Plotly.relayout('plotly-TS', TSPlot.layout);
}
}
/* Toggle plotted traces and initialize/show plot if not yet created*/
function togglePlot(trace){
if (trace.dataset.path.startsWith(" ")){
trace.dataset.path = trace.dataset.path.replace(" "," groups_total ");
}
else{
trace.dataset.path = trace.dataset.path.replace(" "," ");
}
trace.dataset.path = trace.dataset.path.trim();
if(TSPlot.showing == true){
/* Check if trace is already plotted*/
var x;
var index = -1;
for(x = 0; x < TSPlot.traces.length; x++){
if (TSPlot.traces[x].name == trace.dataset.path){
index = x;
break;
}
}
/* If trace is already plotted*/
if(index != -1){
if(TSPlot.traces.length == 1) {
TSPlot.hide();
}else{
Plotly.deleteTraces('plotly-TS', index);
/* Store plotted traces for refresh*/
var trace_array = [];
for(var y = 0; y < TSPlot.traces.length; y++){
trace_array.push(TSPlot.traces[y].name)
}
sessionStorage.setItem("traces", JSON.stringify(trace_array));
}
}else{
/* Plot new trace*/
getTraceData(trace, true);
}
}
/* If plot is not created*/
else{
document.getElementById("loader").style.display = 'inline-block';
/* Create plot*/
TSPlot.show();
getTraceData(trace, false);
}
}
/* Construct query in Line Protocol for db*/
function createQuery(trace, from, to, showing){
const line = trace.split(" ");
var query = '';
var services = false;
var global_total = false;
var group = line[0];
/* If trace is for global group total*/
if (group == 'groups_total'){
global_total = true
var groups = [];
var l = document.getElementsByName(group);
var len = l.length
for (var k = 0; k < len; k++){
groups.push(l[k].value);
}
var measurement = line[1];
query += `SELECT time,SUM("value") FROM "${measurement}" WHERE `;
for (var g = 0; g < groups.length; g++){
query += `"group"='${groups[g]}'`;
if(g != groups.length-1){
query += ` OR `;
}
}
}
else{
query += `SELECT time,value FROM `;
}
/* If trace is for service status*/
if(line.length == 1){
services = true;
var measurement = line[0];
query += `"${measurement}"`;
}
/* If trace is regular*/
else if(line.length == 3 && !global_total){
var cloud = line[1];
var measurement = line[2];
query += `"${measurement}" WHERE "cloud"='${cloud}' AND "group"='${group}'`;
}else if(!services && !global_total){
var measurement = line[1];
query += `"${measurement}" WHERE "group"='${group}'`;
}
/* If requesting newest 30s of data*/
if (to == 0){
if(!services) query += ` AND time >= ${from}ms`;
else query += ` WHERE time >= ${from}ms`;
/* Default request is last 1 hour*/
}else if (showing == false || (date-from) <= 3600000){
if(!services) query += ` AND time >= ${date-3600000}ms`;
else query += ` WHERE time >= ${date-3600000}ms`;
/* If plot is showing*/
}else{
/* Check if trace is already plotted*/
var index = -1;
for(var x = 0; x < TSPlot.traces.length; x++){
if (TSPlot.traces[x].name == trace){
index = x;
break;
}
}
/* If trace is already plotted*/
if(index == -1) to = date;
if(!services){
if(from > TSPlot.layout.xaxis.range[0]) query += ` AND time >= ${TSPlot.layout.xaxis.range[0]}ms AND time < ${to}ms`;
else query += ` AND time >= ${from}ms AND time < ${to}ms`;
}else{
if(from > TSPlot.layout.xaxis.range[0]) query += ` WHERE time >= ${TSPlot.layout.xaxis.range[0]}ms AND time < ${to}ms`;
else query += ` WHERE time >= ${from}ms AND time < ${to}ms`;
}
}
/* Get db to sum over 30s periods*/
if(global_total) query += ` GROUP BY time(30s)`;
return query;
}
/* Parse data and add null values between points where no data exists for more than 70s to show gaps in plot.
Returns new x and y arrays to be added to a trace*/
function parseData(responsedata){
/* Variables to keep track of where to insert nulls*/
var addindex = [];
var addtime = [];
const unpackData = (arr, index) => {
var newarr = arr.map((x, ind) => {
if(index == 0 && ind < arr.length-1){
/* If gap between two timestamps is > 60s*/
if((Math.abs(arr[ind+1][index] - arr[ind][index])) > 60000){
addtime.push(arr[ind][index] + 1000);
addindex.push(ind+1);
}
}
return x[index];
});
return newarr
}
/* Add null timestamps and values to x and y arrays*/
var newarrayx = unpackData(responsedata, 0);
for(var f = 0; f<addindex.length; f++){
newarrayx.splice(addindex[f]+f,0,addtime[f]);
}
var newarrayy = unpackData(responsedata, 1);
for(var f = 0; f<addindex.length; f++){
newarrayy.splice(addindex[f]+f,0,null);
}
return [newarrayx, newarrayy];
}
/* Fetch trace data from db and add to plot*/
function getTraceData(trace, showing){
trace.dataset.path = trace.dataset.path.replace(" ","groups_total ")
if(window.location.pathname == "/cloud/status/") var newpath = "plot";
else var newpath = "/cloud/status/plot";
var nullvalues = [];
if(showing == true) query = createQuery(trace.dataset.path, TSPlot.traces[0].x[0], date, showing);
else query = createQuery(trace.dataset.path, date-3600000, date, showing);
const csrftoken = document.getElementsByName('csrfmiddlewaretoken')[0].value;
fetch(newpath,{
method: 'POST',
headers: {'Accept': 'application/json', 'X-CSRFToken': csrftoken},
credentials: 'same-origin',
body: query,
}
)
.then(function(response){
/* Check response status code*/
if(response.ok){
return response.json();
}throw new Error('Could not get trace :( HTTP response not was not OK -> '+response.status);
})
.then(function(data){
/* Parse response into trace object.*/
if(!(typeof (data.results[0]) !== 'undefined') || !(typeof (data.results[0].series) !== 'undefined')){
throw `Oops! That trace: '${trace.dataset.path}' does not exist`;
}
const newarrays = parseData(data.results[0].series[0].values);
var newtrace = {
mode: 'lines',
name: trace.dataset.path,
x: newarrays[0],
y: newarrays[1],
}
/* Pop trailing nulls that were produced by db through summing in a query*/
if (newtrace.name.split(" ", 1) == 'groups_total'){
if(newtrace.y[newtrace.y.length -1] == null){
newtrace.y.pop();
newtrace.x.pop();
}
}
/* If plot is showing, add trace to plot*/
if(showing == true){
var newlayout = {
yaxis: {
rangemode : "tozero"
},
xaxis: {
type : "date",
range: TSPlot.layout.xaxis.range
}
};
Plotly.relayout('plotly-TS', newlayout);
return Plotly.addTraces('plotly-TS', newtrace);
}
/* Create plot with trace*/
else return TSPlot.initialize(newtrace);
}).then(function(data){
if(showing == true){
/* Store plotted traces for refresh*/
var trace_array = [];
for(var y = 0; y < TSPlot.traces.length; y++){
trace_array.push(TSPlot.traces[y].name)
}
sessionStorage.setItem("traces", JSON.stringify(trace_array));
}
})
.catch(function(error){
if(showing == false) document.getElementById("plot").style.display = 'none';
console.warn(error);
});
}
/* On refresh, check for plotted traces to update colour in status tables*/
function <API key>(){
if (typeof (Storage) !== "undefined"){
var plotted_traces = JSON.parse(sessionStorage.getItem("traces"));
if (plotted_traces != null){
for(var x = 0; x < plotted_traces.length; x++){
if(plotted_traces[x].split(" ", 1) == 'groups_total'){
plotted_traces[x] = plotted_traces[x].replace("groups_total "," ")
}
var stat = document.querySelectorAll('td[data-path="'+plotted_traces[x]+'"]');
for(var k = 0; k < stat.length; k++){
stat[k].classList.toggle("plotted");
}
}
}
}
}
/* On refresh, check if expanded row was showing*/
function checkForExpandedRow() {
if (typeof (Storage) !== "undefined"){
var expanded_row = JSON.parse(sessionStorage.getItem("extra-row"));
if(expanded_row != null && expanded_row == true){
document.getElementById('toggle-row').click();
}
}
}
/* Refresh plot every 30 seconds with new data from db*/
function refresh_plot() {
/* Only refresh if plot is showing*/
if(TSPlot.showing == true){
var traces = TSPlot.traces;
var newdata = {
y: [],
x: []
};
var index = [];
var query = createQuery(traces[0].name, traces[0].x[traces[0].x.length-1], 0, true)
/* Keep track of order of traces that are global totals. This info is used in updateTraces() below*/
var global_total = [];
if (traces[0].name.split(" ", 1) == 'groups_total') global_total.push(true);
else global_total.push(false);
index.push(0);
/* Create string of queries for db*/
for (var i = 1; i < traces.length; i++){
if (traces[i].name.split(" ", 1) == 'groups_total') global_total.push(true);
else global_total.push(false);
index.push(i);
query += ';'
query += createQuery(traces[i].name, traces[i].x[traces[i].x.length-1], 0, true);
}
if(window.location.pathname == "/cloud/status/") var newpath = "plot";
else var newpath = "/cloud/status/plot";
const csrftoken = document.getElementsByName('csrfmiddlewaretoken')[0].value;
fetch(newpath,{
method: 'POST',
headers: {'Accept': 'application/json', 'X-CSRFToken': csrftoken},
credentials: 'same-origin',
body: query,
}
)
.then(function(response){
/* Check response status code*/
if(response.ok){
return response.json();
}throw new Error('Could not update trace(s) :( HTTP response not was not OK -> '+response.status)
})
.then(function(data){
/* Parse response into arrays of new points*/
var new_points = true;
for(var k = 0; k < traces.length; k++){
if(!(typeof (data.results[k]) !== 'undefined') || !(typeof (data.results[k].series) !== 'undefined')){
new_points = false;
break;
}
const responsedata = data.results[k].series[0].values;
const unpackData = (arr, index) => {
var newarr = arr.map(x => x[index]);
return newarr
}
var updatetrace = {
x: unpackData(responsedata, 0),
y: unpackData(responsedata, 1)
}
/* Update new data for traces*/
newdata.y.push(unpackData(responsedata, 1));
newdata.x.push(unpackData(responsedata, 0));
}
/* Update plot with new data*/
if(new_points == true) return updateTraces(newdata, index, global_total);
else return;
})
.catch(error => console.warn(error));
}
}
/* Update plot traces with most recent data points and new range*/
function updateTraces(newdata, index, global_total_list){
/* If global view totals, check for null values in last < 30s period and remove*/
if(global_total_list && global_total_list.length){
for(var x = 0; x < global_total_list.length; x++){
if(global_total_list[x]){
/* Pop trailing nulls that were produced by db through summing in a query*/
if(newdata.y[x][newdata.y[x].length-1] == null){
newdata.y[x].pop();
newdata.x[x].pop();
}
}
}
}
/* If last plotted data point was 55s or more ago, insert null to show break in plot*/
for(var k = 0; k < index.length; k++){
var len = TSPlot.traces[k].x.length -1;
if(typeof(TSPlot.traces[k]) !== 'undefined'){
if(TSPlot.traces[k].x[len] < ((newdata.x[k][0])-60000)){
newdata.x[k].unshift(newdata.x[k][0] - 1000);
newdata.y[k].unshift(null);
}
}
}
Plotly.extendTraces('plotly-TS', newdata, index);
/* Only update range if if looking at last 12 hours or less*/
if(TSPlot.layout.xaxis.range[1] >= date && (date - TSPlot.layout.xaxis.range[0]) <= 43200000){
date = Date.now();
var diff = date - TSPlot.layout.xaxis.range[1];
TSPlot.layout.xaxis.range[1] = date;
TSPlot.layout.xaxis.range[0] += diff;
}
var newlayout = {
yaxis: {
rangemode : "tozero"
},
xaxis: {
type : "date",
range: TSPlot.layout.xaxis.range
}
};
Plotly.relayout('plotly-TS', newlayout);
}
function downloadPlot(){
Plotly.downloadImage('plotly-TS', {format: 'png', width: 1200, height: 600, filename: 'newplot'});
}
/* Plot Object*/
var TSPlot = {
layout: {
yaxis: {
rangemode: "tozero",
},
xaxis: {
type: 'date',
},
margin: {
l: 50,
r: 50,
t: 40,
b: 40
},
showlegend: true,
hoverlabel: {
namelength:30
}
},
showing: false,
traces: [],
/* Create new plot with trace in div*/
initialize: function(trace) {
TSPlot.layout.xaxis.range = [date-3600000, date];
TSPlot.traces.push(trace);
Plotly.newPlot('plotly-TS', TSPlot.traces, TSPlot.layout, {responsive: true, displayModeBar: false});
var traces = [];
traces.push(trace.name);
sessionStorage.setItem("traces", JSON.stringify(traces));
},
/* Hide plot*/
hide: function() {
var newlayout = {
yaxis: {
rangemode : "tozero"
},
xaxis: {
type : "date"
}
};
Plotly.relayout('plotly-TS', newlayout);
TSPlot.traces = [];
TSPlot.showing = false;
/* Hide plot div*/
document.getElementById("plot").style.display = 'none';
const curr_range = document.<API key>("range-btn");
curr_range[0].innerHTML = 'Last 1 hour<span class="space"></span><span class="caret"></span>';
/* Remove indication of plotted traces*/
var list = document.<API key>('plotted');
var init_length = list.length;
for (var i = 0; i < init_length; i++) {
var value = list[0];
value.classList.remove('plotted');
}
sessionStorage.removeItem("traces");
const dropdowns = document.<API key>("selected");
var l = dropdowns.length;
for (var i = 0; i < l; i++) {
dropdowns[0].classList.remove('selected');
}
document.querySelectorAll('a[data-from="60"]')[0].classList.add('selected');
setTimeout(function(){
Plotly.purge('plotly-TS');
}, 10);
},
/* Show plot*/
show: function(){
TSPlot.showing = true;
document.getElementById("plot").style.display = 'block';
}
}//TSPlot |
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using Rimss.GraphicsProcessing.Palette.ColorCaches.Common;
using Rimss.GraphicsProcessing.Palette.Ditherers;
using Rimss.GraphicsProcessing.Palette.Extensions;
using Rimss.GraphicsProcessing.Palette.PathProviders;
using Rimss.GraphicsProcessing.Palette.Quantizers;
namespace Rimss.GraphicsProcessing.Palette.Helpers
{
public class ImageBuffer : IDisposable
{
#region | Fields |
private Int32[] fastBitX;
private Int32[] fastByteX;
private Int32[] fastY;
private readonly Bitmap bitmap;
private readonly BitmapData bitmapData;
private readonly ImageLockMode lockMode;
private List<Color> cachedPalette;
#endregion
#region | Delegates |
public delegate Boolean <API key>(Pixel pixel);
public delegate Boolean <API key>(Pixel pixel, ImageBuffer buffer);
public delegate Boolean <API key>(Pixel sourcePixel, Pixel targetPixel);
public delegate Boolean <API key>(Pixel sourcePixel, Pixel targetPixel, ImageBuffer sourceBuffer, ImageBuffer targetBuffer);
#endregion
#region | Properties |
public Int32 Width { get; private set; }
public Int32 Height { get; private set; }
public Int32 Size { get; private set; }
public Int32 Stride { get; private set; }
public Int32 BitDepth { get; private set; }
public Int32 BytesPerPixel { get; private set; }
public Boolean IsIndexed { get; private set; }
public PixelFormat PixelFormat { get; private set; }
#endregion
#region | Calculated properties |
<summary>
Gets a value indicating whether this buffer can be read.
</summary>
<value>
<c>true</c> if this instance can read; otherwise, <c>false</c>.
</value>
public Boolean CanRead
{
get { return lockMode == ImageLockMode.ReadOnly || lockMode == ImageLockMode.ReadWrite; }
}
<summary>
Gets a value indicating whether this buffer can written to.
</summary>
<value>
<c>true</c> if this instance can write; otherwise, <c>false</c>.
</value>
public Boolean CanWrite
{
get { return lockMode == ImageLockMode.WriteOnly || lockMode == ImageLockMode.ReadWrite; }
}
<summary>
Gets or sets the palette.
</summary>
public List<Color> Palette
{
get { return UpdatePalette(); }
set
{
bitmap.SetPalette(value);
cachedPalette = value;
}
}
#endregion
#region | Constructors |
<summary>
Initializes a new instance of the <see cref="ImageBuffer"/> class.
</summary>
public ImageBuffer(Image bitmap, ImageLockMode lockMode) : this((Bitmap) bitmap, lockMode) { }
<summary>
Initializes a new instance of the <see cref="ImageBuffer"/> class.
</summary>
public ImageBuffer(Bitmap bitmap, ImageLockMode lockMode)
{
// locks the image data
this.bitmap = bitmap;
this.lockMode = lockMode;
// gathers the informations
Width = bitmap.Width;
Height = bitmap.Height;
PixelFormat = bitmap.PixelFormat;
IsIndexed = PixelFormat.IsIndexed();
BitDepth = PixelFormat.GetBitDepth();
BytesPerPixel = Math.Max(1, BitDepth >> 3);
// determines the bounds of an image, and locks the data in a specified mode
Rectangle bounds = Rectangle.FromLTRB(0, 0, Width, Height);
// locks the bitmap data
lock (bitmap) bitmapData = bitmap.LockBits(bounds, lockMode, PixelFormat);
// creates internal buffer
Stride = bitmapData.Stride < 0 ? -bitmapData.Stride : bitmapData.Stride;
Size = Stride*Height;
// precalculates the offsets
Precalculate();
}
#endregion
#region | Maintenance methods |
private void Precalculate()
{
fastBitX = new Int32[Width];
fastByteX = new Int32[Width];
fastY = new Int32[Height];
// precalculates the x-coordinates
for (Int32 x = 0; x < Width; x++)
{
fastBitX[x] = x*BitDepth;
fastByteX[x] = fastBitX[x] >> 3;
fastBitX[x] = fastBitX[x] % 8;
}
// precalculates the y-coordinates
for (Int32 y = 0; y < Height; y++)
{
fastY[y] = y * bitmapData.Stride;
}
}
public Int32 GetBitOffset(Int32 x)
{
return fastBitX[x];
}
public Byte[] Copy()
{
// transfers whole image to a working memory
Byte[] result = new Byte[Size];
Marshal.Copy(bitmapData.Scan0, result, 0, Size);
// returns the backup
return result;
}
public void Paste(Byte[] buffer)
{
// commits the data to a bitmap
Marshal.Copy(buffer, 0, bitmapData.Scan0, Size);
}
#endregion
#region | Pixel read methods |
public void ReadPixel(Pixel pixel, Byte[] buffer = null)
{
// determines pixel offset at [x, y]
Int32 offset = fastByteX[pixel.X] + fastY[pixel.Y];
// reads the pixel from a bitmap
if (buffer == null)
{
pixel.ReadRawData(bitmapData.Scan0 + offset);
}
else // reads the pixel from a buffer
{
pixel.ReadData(buffer, offset);
}
}
public Int32 GetIndexFromPixel(Pixel pixel)
{
Int32 result;
// determines whether the format is indexed
if (IsIndexed)
{
result = pixel.Index;
}
else // not possible to get index from a non-indexed format
{
String message = string.Format("Cannot retrieve index for a non-indexed format. Please use Color (or Value) property instead.");
throw new <API key>(message);
}
return result;
}
public Color GetColorFromPixel(Pixel pixel)
{
Color result;
// determines whether the format is indexed
if (pixel.IsIndexed)
{
Int32 index = pixel.Index;
result = pixel.Parent.GetPaletteColor(index);
}
else // gets color from a non-indexed format
{
result = pixel.Color;
}
// returns the found color
return result;
}
public Int32 ReadIndexUsingPixel(Pixel pixel, Byte[] buffer = null)
{
// reads the pixel from bitmap/buffer
ReadPixel(pixel, buffer);
// returns the found color
return GetIndexFromPixel(pixel);
}
public Color ReadColorUsingPixel(Pixel pixel, Byte[] buffer = null)
{
// reads the pixel from bitmap/buffer
ReadPixel(pixel, buffer);
// returns the found color
return GetColorFromPixel(pixel);
}
public Int32 <API key>(Pixel pixel, Int32 x, Int32 y, Byte[] buffer = null)
{
// redirects pixel -> [x, y]
pixel.Update(x, y);
// reads index from a bitmap/buffer using pixel, and stores it in the pixel
return ReadIndexUsingPixel(pixel, buffer);
}
public Color <API key>(Pixel pixel, Int32 x, Int32 y, Byte[] buffer = null)
{
// redirects pixel -> [x, y]
pixel.Update(x, y);
// reads color from a bitmap/buffer using pixel, and stores it in the pixel
return ReadColorUsingPixel(pixel, buffer);
}
#endregion
#region | Pixel write methods |
private void WritePixel(Pixel pixel, Byte[] buffer = null)
{
// determines pixel offset at [x, y]
Int32 offset = fastByteX[pixel.X] + fastY[pixel.Y];
// writes the pixel to a bitmap
if (buffer == null)
{
pixel.WriteRawData(bitmapData.Scan0 + offset);
}
else // writes the pixel to a buffer
{
pixel.WriteData(buffer, offset);
}
}
public void SetIndexToPixel(Pixel pixel, Int32 index, Byte[] buffer = null)
{
// determines whether the format is indexed
if (IsIndexed)
{
pixel.Index = (Byte) index;
}
else // cannot write color to an indexed format
{
String message = string.Format("Cannot set index for a non-indexed format. Please use Color (or Value) property instead.");
throw new <API key>(message);
}
}
public void SetColorToPixel(Pixel pixel, Color color, IColorQuantizer quantizer)
{
// determines whether the format is indexed
if (pixel.IsIndexed)
{
// last chance if quantizer is provided, use it
if (quantizer != null)
{
Byte index = (Byte)quantizer.GetPaletteIndex(color, pixel.X, pixel.Y);
pixel.Index = index;
}
else // cannot write color to an index format
{
String message = string.Format("Cannot retrieve color for an indexed format. Use GetPixelIndex() instead.");
throw new <API key>(message);
}
}
else // sets color to a non-indexed format
{
pixel.Color = color;
}
}
public void <API key>(Pixel pixel, Int32 index, Byte[] buffer = null)
{
// sets index to pixel (pixel's index is updated)
SetIndexToPixel(pixel, index, buffer);
// writes pixel to a bitmap/buffer
WritePixel(pixel, buffer);
}
public void <API key>(Pixel pixel, Color color, IColorQuantizer quantizer, Byte[] buffer = null)
{
// sets color to pixel (pixel is updated with color)
SetColorToPixel(pixel, color, quantizer);
// writes pixel to a bitmap/buffer
WritePixel(pixel, buffer);
}
public void <API key>(Pixel pixel, Int32 x, Int32 y, Int32 index, Byte[] buffer = null)
{
// redirects pixel -> [x, y]
pixel.Update(x, y);
// writes color to bitmap/buffer using pixel
<API key>(pixel, index, buffer);
}
public void <API key>(Pixel pixel, Int32 x, Int32 y, Color color, IColorQuantizer quantizer, Byte[] buffer = null)
{
// redirects pixel -> [x, y]
pixel.Update(x, y);
// writes color to bitmap/buffer using pixel
<API key>(pixel, color, quantizer, buffer);
}
#endregion
#region | Generic methods |
private void ProcessInParallel(ICollection<Point> path, Action<LineTask> process, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(process, "process");
// updates the palette
UpdatePalette();
// prepares parallel processing
Double pointsPerTask = (1.0*path.Count)/parallelTaskCount;
LineTask[] lineTasks = new LineTask[parallelTaskCount];
Double pointOffset = 0.0;
// creates task for each batch of rows
for (Int32 index = 0; index < parallelTaskCount; index++)
{
lineTasks[index] = new LineTask((Int32) pointOffset, (Int32) (pointOffset + pointsPerTask));
pointOffset += pointsPerTask;
}
// process the image in a parallel manner
Parallel.ForEach(lineTasks, process);
}
#endregion
#region | Processing methods |
private void ProcessPerPixelBase(IList<Point> path, Delegate processingAction, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(path, "path");
Guard.CheckNull(processingAction, "<API key>");
// determines mode
Boolean isAdvanced = processingAction is <API key>;
// prepares the per pixel task
Action<LineTask> processPerPixel = lineTask =>
{
// initializes variables per task
Pixel pixel = new Pixel(this);
for (Int32 pathOffset = lineTask.StartOffset; pathOffset < lineTask.EndOffset; pathOffset++)
{
Point point = path[pathOffset];
Boolean allowWrite;
// enumerates the pixel, and returns the control to the outside
pixel.Update(point.X, point.Y);
// when read is allowed, retrieves current value (in bytes)
if (CanRead) ReadPixel(pixel);
// process the pixel by custom user operation
if (isAdvanced)
{
<API key> <API key> = (<API key>) processingAction;
allowWrite = <API key>(pixel, this);
}
else // use simplified version with pixel parameter only
{
<API key> processFunction = (<API key>) processingAction;
allowWrite = processFunction(pixel);
}
// when write is allowed, copies the value back to the row buffer
if (CanWrite && allowWrite) WritePixel(pixel);
}
};
// processes image per pixel
ProcessInParallel(path, processPerPixel, parallelTaskCount);
}
public void ProcessPerPixel(IList<Point> path, <API key> <API key>, Int32 parallelTaskCount = 4)
{
ProcessPerPixelBase(path, <API key>, parallelTaskCount);
}
public void <API key>(IList<Point> path, <API key> <API key>, Int32 parallelTaskCount = 4)
{
ProcessPerPixelBase(path, <API key>, parallelTaskCount);
}
#endregion
#region | Transformation functions |
private void <API key>(ImageBuffer target, IList<Point> path, Delegate transformAction, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(path, "path");
Guard.CheckNull(target, "target");
Guard.CheckNull(transformAction, "transformAction");
// updates the palette
UpdatePalette();
target.UpdatePalette();
// checks the dimensions
if (Width != target.Width || Height != target.Height)
{
const String message = "Both images have to have the same dimensions.";
throw new <API key>(message);
}
// determines mode
Boolean isAdvanced = transformAction is <API key>;
// process the image in a parallel manner
Action<LineTask> transformPerPixel = lineTask =>
{
// creates individual pixel structures per task
Pixel sourcePixel = new Pixel(this);
Pixel targetPixel = new Pixel(target);
// enumerates the pixels row by row
for (Int32 pathOffset = lineTask.StartOffset; pathOffset < lineTask.EndOffset; pathOffset++)
{
Point point = path[pathOffset];
Boolean allowWrite;
// enumerates the pixel, and returns the control to the outside
sourcePixel.Update(point.X, point.Y);
targetPixel.Update(point.X, point.Y);
// when read is allowed, retrieves current value (in bytes)
if (CanRead) ReadPixel(sourcePixel);
if (target.CanRead) target.ReadPixel(targetPixel);
// process the pixel by custom user operation
if (isAdvanced)
{
<API key> <API key> = (<API key>) transformAction;
allowWrite = <API key>(sourcePixel, targetPixel, this, target);
}
else // use simplified version with pixel parameters only
{
<API key> transformFunction = (<API key>) transformAction;
allowWrite = transformFunction(sourcePixel, targetPixel);
}
// when write is allowed, copies the value back to the row buffer
if (target.CanWrite && allowWrite) target.WritePixel(targetPixel);
}
};
// transforms image per pixel
ProcessInParallel(path, transformPerPixel, parallelTaskCount);
}
public void TransformPerPixel(ImageBuffer target, IList<Point> path, <API key> <API key>, Int32 parallelTaskCount = 4)
{
<API key>(target, path, <API key>, parallelTaskCount);
}
public void <API key>(ImageBuffer target, IList<Point> path, <API key> <API key>, Int32 parallelTaskCount = 4)
{
<API key>(target, path, <API key>, parallelTaskCount);
}
#endregion
#region | Scan colors methods |
public void ScanColors(IColorQuantizer quantizer, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(quantizer, "quantizer");
// determines which method of color retrieval to use
IList<Point> path = quantizer.GetPointPath(Width, Height);
// use different scanning method depending whether the image format is indexed
<API key> scanColors = pixel =>
{
quantizer.AddColor(GetColorFromPixel(pixel), pixel.X, pixel.Y);
return false;
};
// performs the image scan, using a chosen method
ProcessPerPixel(path, scanColors, parallelTaskCount);
}
public static void ScanImageColors(Image sourceImage, IColorQuantizer quantizer, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
source.ScanColors(quantizer, parallelTaskCount);
}
}
#endregion
#region | Synthetize palette methods |
public List<Color> SynthetizePalette(IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(quantizer, "quantizer");
// Step 1 - prepares quantizer for another round
quantizer.Prepare(this);
// Step 2 - scans the source image for the colors
ScanColors(quantizer, parallelTaskCount);
// Step 3 - synthetises the palette, and returns the result
return quantizer.GetPalette(colorCount);
}
public static List<Color> <API key>(Image sourceImage, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
return source.SynthetizePalette(quantizer, colorCount, parallelTaskCount);
}
}
#endregion
#region | Quantize methods |
public void Quantize(ImageBuffer target, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// performs the pure quantization wihout dithering
Quantize(target, quantizer, null, colorCount, parallelTaskCount);
}
public void Quantize(ImageBuffer target, IColorQuantizer quantizer, IColorDitherer ditherer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(target, "target");
Guard.CheckNull(quantizer, "quantizer");
// initializes quantization parameters
Boolean isTargetIndexed = target.PixelFormat.IsIndexed();
// step 1 - prepares the palettes
List<Color> targetPalette = isTargetIndexed ? SynthetizePalette(quantizer, colorCount, parallelTaskCount) : null;
// step 2 - updates the bitmap palette
target.bitmap.SetPalette(targetPalette);
target.UpdatePalette(true);
// step 3 - prepares ditherer (optional)
if (ditherer != null) ditherer.Prepare(quantizer, colorCount, this, target);
// step 4 - prepares the quantization function
<API key> quantize = (sourcePixel, targetPixel) =>
{
// reads the pixel color
Color color = GetColorFromPixel(sourcePixel);
// converts alpha to solid color
color = QuantizationHelper.ConvertAlpha(color);
// quantizes the pixel
SetColorToPixel(targetPixel, color, quantizer);
// marks pixel as processed by default
Boolean result = true;
// preforms inplace dithering (optional)
if (ditherer != null && ditherer.IsInplace)
{
result = ditherer.ProcessPixel(sourcePixel, targetPixel);
}
// returns the result
return result;
};
// step 5 - generates the target image
IList<Point> path = quantizer.GetPointPath(Width, Height);
TransformPerPixel(target, path, quantize, parallelTaskCount);
// step 6 - preforms non-inplace dithering (optional)
if (ditherer != null && !ditherer.IsInplace)
{
Dither(target, ditherer, quantizer, colorCount, 1);
}
// step 7 - finishes the dithering (optional)
if (ditherer != null) ditherer.Finish();
// step 8 - clean-up
quantizer.Finish();
}
public static Image QuantizeImage(ImageBuffer source, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// performs the pure quantization wihout dithering
return QuantizeImage(source, quantizer, null, colorCount, parallelTaskCount);
}
public static Image QuantizeImage(ImageBuffer source, IColorQuantizer quantizer, IColorDitherer ditherer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
// creates a target bitmap in an appropriate format
PixelFormat targetPixelFormat = Extend.<API key>(colorCount);
Image result = new Bitmap(source.Width, source.Height, targetPixelFormat);
// lock mode
ImageLockMode lockMode = ditherer == null ? ImageLockMode.WriteOnly : ImageLockMode.ReadWrite;
// wraps target image to a buffer
using (ImageBuffer target = new ImageBuffer(result, lockMode))
{
source.Quantize(target, quantizer, ditherer, colorCount, parallelTaskCount);
return result;
}
}
public static Image QuantizeImage(Image sourceImage, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// performs the pure quantization wihout dithering
return QuantizeImage(sourceImage, quantizer, null, colorCount, parallelTaskCount);
}
public static Image QuantizeImage(Image sourceImage, IColorQuantizer quantizer, IColorDitherer ditherer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// lock mode
ImageLockMode lockMode = ditherer == null ? ImageLockMode.ReadOnly : ImageLockMode.ReadWrite;
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, lockMode))
{
return QuantizeImage(source, quantizer, ditherer, colorCount, parallelTaskCount);
}
}
#endregion
#region | Calculate mean error methods |
public Double CalculateMeanError(ImageBuffer target, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(target, "target");
// initializes the error
Int64 totalError = 0;
// prepares the function
<API key> calculateMeanError = (sourcePixel, targetPixel) =>
{
Color sourceColor = GetColorFromPixel(sourcePixel);
Color targetColor = GetColorFromPixel(targetPixel);
totalError += ColorModelHelper.<API key>(ColorModel.RedGreenBlue, sourceColor, targetColor);
return false;
};
// performs the image scan, using a chosen method
IList<Point> standardPath = new <API key>().GetPointPath(Width, Height);
TransformPerPixel(target, standardPath, calculateMeanError, parallelTaskCount);
// returns the calculates RMSD
return Math.Sqrt(totalError/(3.0*Width*Height));
}
public static Double <API key>(ImageBuffer source, ImageBuffer target, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
// use other override to calculate error
return source.CalculateMeanError(target, parallelTaskCount);
}
public static Double <API key>(ImageBuffer source, Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.CalculateMeanError(target, parallelTaskCount);
}
}
public static Double <API key>(Image sourceImage, ImageBuffer target, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.CalculateMeanError(target, parallelTaskCount);
}
}
public static Double <API key>(Image sourceImage, Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.CalculateMeanError(target, parallelTaskCount);
}
}
#endregion
#region | Calculate normalized mean error methods |
public Double <API key>(ImageBuffer target, Int32 parallelTaskCount = 4)
{
return CalculateMeanError(target, parallelTaskCount) / 255.0;
}
public static Double <API key>(ImageBuffer source, Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.<API key>(target, parallelTaskCount);
}
}
public static Double <API key>(Image sourceImage, ImageBuffer target, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.<API key>(target, parallelTaskCount);
}
}
public static Double <API key>(ImageBuffer source, ImageBuffer target, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
// use other override to calculate error
return source.<API key>(target, parallelTaskCount);
}
public static Double <API key>(Image sourceImage, Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
return source.<API key>(target, parallelTaskCount);
}
}
#endregion
#region | Change pixel format methods |
public void ChangeFormat(ImageBuffer target, IColorQuantizer quantizer, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(target, "target");
Guard.CheckNull(quantizer, "quantizer");
// gathers some information about the target format
Boolean hasSourceAlpha = PixelFormat.HasAlpha();
Boolean hasTargetAlpha = target.PixelFormat.HasAlpha();
Boolean isTargetIndexed = target.PixelFormat.IsIndexed();
Boolean isSourceDeepColor = PixelFormat.IsDeepColor();
Boolean isTargetDeepColor = target.PixelFormat.IsDeepColor();
// step 1 to 3 - prepares the palettes
if (isTargetIndexed) SynthetizePalette(quantizer, target.PixelFormat.GetColorCount(), parallelTaskCount);
// prepares the quantization function
<API key> changeFormat = (sourcePixel, targetPixel) =>
{
// if both source and target formats are deep color formats, copies a value directly
if (isSourceDeepColor && isTargetDeepColor)
{
//UInt64 value = sourcePixel.Value;
//targetPixel.SetValue(value);
}
else
{
// retrieves a source image color
Color color = GetColorFromPixel(sourcePixel);
// if alpha is not present in the source image, but is present in the target, make one up
if (!hasSourceAlpha && hasTargetAlpha)
{
Int32 argb = 255 << 24 | color.R << 16 | color.G << 8 | color.B;
color = Color.FromArgb(argb);
}
// sets the color to a target pixel
SetColorToPixel(targetPixel, color, quantizer);
}
// allows to write (obviously) the transformed pixel
return true;
};
// step 5 - generates the target image
IList<Point> standardPath = new <API key>().GetPointPath(Width, Height);
TransformPerPixel(target, standardPath, changeFormat, parallelTaskCount);
}
public static void ChangeFormat(ImageBuffer source, PixelFormat targetFormat, IColorQuantizer quantizer, out Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
// creates a target bitmap in an appropriate format
targetImage = new Bitmap(source.Width, source.Height, targetFormat);
// wraps target image to a buffer
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.WriteOnly))
{
source.ChangeFormat(target, quantizer, parallelTaskCount);
}
}
public static void ChangeFormat(Image sourceImage, PixelFormat targetFormat, IColorQuantizer quantizer, out Image targetImage, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
ChangeFormat(source, targetFormat, quantizer, out targetImage, parallelTaskCount);
}
}
#endregion
#region | Dithering methods |
public void Dither(ImageBuffer target, IColorDitherer ditherer, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(target, "target");
Guard.CheckNull(ditherer, "ditherer");
Guard.CheckNull(quantizer, "quantizer");
// prepares ditherer for another round
ditherer.Prepare(quantizer, colorCount, this, target);
// processes the image via the ditherer
IList<Point> path = ditherer.GetPointPath(Width, Height);
TransformPerPixel(target, path, ditherer.ProcessPixel, parallelTaskCount);
}
public static void DitherImage(ImageBuffer source, ImageBuffer target, IColorDitherer ditherer, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
// use other override to calculate error
source.Dither(target, ditherer, quantizer, colorCount, parallelTaskCount);
}
public static void DitherImage(ImageBuffer source, Image targetImage, IColorDitherer ditherer, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(source, "source");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
source.Dither(target, ditherer, quantizer, colorCount, parallelTaskCount);
}
}
public static void DitherImage(Image sourceImage, ImageBuffer target, IColorDitherer ditherer, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
source.Dither(target, ditherer, quantizer, colorCount, parallelTaskCount);
}
}
public static void DitherImage(Image sourceImage, Image targetImage, IColorDitherer ditherer, IColorQuantizer quantizer, Int32 colorCount, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
Guard.CheckNull(targetImage, "targetImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
using (ImageBuffer target = new ImageBuffer(targetImage, ImageLockMode.ReadOnly))
{
// use other override to calculate error
source.Dither(target, ditherer, quantizer, colorCount, parallelTaskCount);
}
}
#endregion
#region | Gamma correction |
public void CorrectGamma(Single gamma, IColorQuantizer quantizer, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(quantizer, "quantizer");
// determines which method of color retrieval to use
IList<Point> path = quantizer.GetPointPath(Width, Height);
// calculates gamma ramp
Int32[] gammaRamp = new Int32[256];
for (Int32 index = 0; index < 256; ++index)
{
gammaRamp[index] = Clamp((Int32) ((255.0f*Math.Pow(index/255.0f, 1.0f/gamma)) + 0.5f));
}
// use different scanning method depending whether the image format is indexed
<API key> correctGamma = pixel =>
{
Color oldColor = GetColorFromPixel(pixel);
Int32 red = gammaRamp[oldColor.R];
Int32 green = gammaRamp[oldColor.G];
Int32 blue = gammaRamp[oldColor.B];
Color newColor = Color.FromArgb(red, green, blue);
SetColorToPixel(pixel, newColor, quantizer);
return true;
};
// performs the image scan, using a chosen method
ProcessPerPixel(path, correctGamma, parallelTaskCount);
}
public static void CorrectImageGamma(Image sourceImage, Single gamma, IColorQuantizer quantizer, Int32 parallelTaskCount = 4)
{
// checks parameters
Guard.CheckNull(sourceImage, "sourceImage");
// wraps source image to a buffer
using (ImageBuffer source = new ImageBuffer(sourceImage, ImageLockMode.ReadOnly))
{
source.CorrectGamma(gamma, quantizer, parallelTaskCount);
}
}
#endregion
#region | Palette methods |
public static Int32 Clamp(Int32 value, Int32 minimum = 0, Int32 maximum = 255)
{
if (value < minimum) value = minimum;
if (value > maximum) value = maximum;
return value;
}
private List<Color> UpdatePalette(Boolean forceUpdate = false)
{
if (IsIndexed && (cachedPalette == null || forceUpdate))
{
cachedPalette = bitmap.GetPalette();
}
return cachedPalette;
}
public Color GetPaletteColor(Int32 paletteIndex)
{
return cachedPalette[paletteIndex];
}
#endregion
#region << IDisposable >>
public void Dispose()
{
// releases the image lock
lock (bitmap) bitmap.UnlockBits(bitmapData);
}
#endregion
#region | Sub-classes |
private class LineTask
{
<summary>
Gets or sets the start offset.
</summary>
public Int32 StartOffset { get; private set; }
<summary>
Gets or sets the end offset.
</summary>
public Int32 EndOffset { get; private set; }
<summary>
Initializes a new instance of the <see cref="<API key>.Helpers.ImageBuffer.LineTask"/> class.
</summary>
public LineTask(Int32 startOffset, Int32 endOffset)
{
StartOffset = startOffset;
EndOffset = endOffset;
}
}
#endregion
}
} |
<!DOCTYPE HTML PUBLIC "-
<!-- NewPage -->
<html lang="hu">
<head>
<!-- Generated by javadoc (1.8.0_121) on Thu May 25 15:04:13 CEST 2017 -->
<title>application.beadando3.DAO</title>
<meta name="date" content="2017-05-25">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="application.beadando3.DAO";
}
}
catch(err) {
}
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="topNav"><a name="navbar.top">
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../application/beadando3/DAO/package-summary.html">Package</a></li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Package</li>
<li>Next Package</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?application/beadando3/DAO/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip.navbar.top">
</a></div>
<div class="header">
<h1 title="Package" class="title">Package application.beadando3.DAO</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="typeSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/FeaturesModelDAO.html" title="class in application.beadando3.DAO">FeaturesModelDAO</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/InterfacesModelDAO.html" title="class in application.beadando3.DAO">InterfacesModelDAO</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/LoginModelDAO.html" title="class in application.beadando3.DAO">LoginModelDAO</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/PingModelDAO.html" title="class in application.beadando3.DAO">PingModelDAO</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/RouterModelDAO.html" title="class in application.beadando3.DAO">RouterModelDAO</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../application/beadando3/DAO/TracerouteModelDAO.html" title="class in application.beadando3.DAO">TracerouteModelDAO</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
<dl>
<dt><span class="simpleTagLabel">Author:</span></dt>
<dd>danida</dd>
</dl>
</div>
<div class="bottomNav"><a name="navbar.bottom">
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../application/beadando3/DAO/package-summary.html">Package</a></li>
<li>Class</li>
<li><a href="package-use.html">Use</a></li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev Package</li>
<li>Next Package</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?application/beadando3/DAO/package-summary.html" target="_top">Frames</a></li>
<li><a href="package-summary.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="<API key>">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!
allClassesLink = document.getElementById("<API key>");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
</script>
</div>
<a name="skip.navbar.bottom">
</a></div>
</body>
</html> |
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Mon Jun 24 22:24:58 UTC 2013 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
org.apache.hadoop.hbase.rest.filter Class Hierarchy (HBase 0.94.9 API)
</TITLE>
<META NAME="date" CONTENT="2013-06-24">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.apache.hadoop.hbase.rest.filter Class Hierarchy (HBase 0.94.9 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/apache/hadoop/hbase/rest/client/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../org/apache/hadoop/hbase/rest/metrics/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/hbase/rest/filter/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<CENTER>
<H2>
Hierarchy For Package org.apache.hadoop.hbase.rest.filter
</H2>
</CENTER>
<DL>
<DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../overview-tree.html">All Packages</A></DL>
<HR>
<H2>
Class Hierarchy
</H2>
<UL>
<LI TYPE="circle">java.lang.<A HREF="http://java.sun.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><B>Object</B></A><UL>
<LI TYPE="circle">org.apache.hadoop.hbase.rest.filter.<A HREF="../../../../../../org/apache/hadoop/hbase/rest/filter/GzipFilter.html" title="class in org.apache.hadoop.hbase.rest.filter"><B>GzipFilter</B></A> (implements javax.servlet.Filter)
<LI TYPE="circle">java.io.<A HREF="http:
<UL>
<LI TYPE="circle">javax.servlet.ServletInputStream<UL>
<LI TYPE="circle">org.apache.hadoop.hbase.rest.filter.<A HREF="../../../../../../org/apache/hadoop/hbase/rest/filter/GZIPRequestStream.html" title="class in org.apache.hadoop.hbase.rest.filter"><B>GZIPRequestStream</B></A></UL>
</UL>
<LI TYPE="circle">java.io.<A HREF="http:
<UL>
<LI TYPE="circle">javax.servlet.ServletOutputStream<UL>
<LI TYPE="circle">org.apache.hadoop.hbase.rest.filter.<A HREF="../../../../../../org/apache/hadoop/hbase/rest/filter/GZIPResponseStream.html" title="class in org.apache.hadoop.hbase.rest.filter"><B>GZIPResponseStream</B></A></UL>
</UL>
<LI TYPE="circle">javax.servlet.<API key> (implements javax.servlet.ServletRequest)
<UL>
<LI TYPE="circle">javax.servlet.http.<API key> (implements javax.servlet.http.HttpServletRequest)
<UL>
<LI TYPE="circle">org.apache.hadoop.hbase.rest.filter.<A HREF="../../../../../../org/apache/hadoop/hbase/rest/filter/GZIPRequestWrapper.html" title="class in org.apache.hadoop.hbase.rest.filter"><B>GZIPRequestWrapper</B></A></UL>
</UL>
<LI TYPE="circle">javax.servlet.<API key> (implements javax.servlet.ServletResponse)
<UL>
<LI TYPE="circle">javax.servlet.http.<API key> (implements javax.servlet.http.HttpServletResponse)
<UL>
<LI TYPE="circle">org.apache.hadoop.hbase.rest.filter.<A HREF="../../../../../../org/apache/hadoop/hbase/rest/filter/GZIPResponseWrapper.html" title="class in org.apache.hadoop.hbase.rest.filter"><B>GZIPResponseWrapper</B></A></UL>
</UL>
</UL>
</UL>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../org/apache/hadoop/hbase/rest/client/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../org/apache/hadoop/hbase/rest/metrics/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/hbase/rest/filter/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
Copyright &
</BODY>
</HTML> |
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2016.01.27 at 04:17:31 PM EST
package org.w3._1999.xlink;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlType;
import org.w3c.dom.Element;
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "titleEltType", propOrder = {
"content"
})
public class TitleEltType {
@XmlMixed
@XmlAnyElement(lax = true)
protected List<Object> content;
@XmlAttribute(name = "type", namespace = "http:
protected TypeType type;
@XmlAttribute(name = "lang", namespace = "http:
protected String lang;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Object }
* {@link Element }
* {@link String }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link TypeType }
*
*/
public TypeType getType() {
if (type == null) {
return TypeType.TITLE;
} else {
return type;
}
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link TypeType }
*
*/
public void setType(TypeType value) {
this.type = value;
}
/**
*
* xml:lang is not required, but provides much of the
* motivation for title elements in addition to attributes, and so
* is provided here for convenience.
*
*
* @return
* possible object is
* {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
} |
# AUTOGENERATED FILE
FROM balenalib/nitrogen6x-alpine:3.13-build
# remove several traces of python
RUN apk del python*
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# key 63C7CC90: public key "Simon McVittie <smcv@pseudorandom.co.uk>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <donald@stufft.io>" imported
RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
# point Python at a system-provided certificate database. Otherwise, we might hit <API key>.
ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
ENV PYTHON_VERSION 3.6.15
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.3.1
ENV SETUPTOOLS_VERSION 60.5.4
RUN set -x \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.<API key>.3.tar.gz" \
&& echo "<SHA256-like> Python-$PYTHON_VERSION.<API key>.3.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.<API key>.3.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.<API key>.3.tar.gz" \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/<SHA1-like>/get-pip.py" \
&& echo "<SHA256-like> get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install --no-cache-dir virtualenv
ENV PYTHON_DBUS_VERSION 1.2.18
# install dbus-python dependencies
RUN apk add --no-cache \
dbus-dev \
dbus-glib-dev
# install dbus-python
RUN set -x \
&& mkdir -p /usr/src/dbus-python \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \
&& gpg --verify dbus-python.tar.gz.asc \
&& tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \
&& rm dbus-python.tar.gz* \
&& cd /usr/src/dbus-python \
&& PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \
&& make -j$(nproc) \
&& make install -j$(nproc) \
&& cd / \
&& rm -rf /usr/src/dbus-python
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/<SHA1-like>/scripts/assets/tests/test-stack@python.sh" \
&& echo "Running test-stack@python" \
&& chmod +x test-stack@python.sh \
&& bash test-stack@python.sh \
&& rm -rf test-stack@python.sh
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https:
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh |
#!/bin/sh
set -e
LC_COLLATE=C # Enforce known sort order
committers=$(git shortlog -se HEAD | cut -f2,3 | sort)
# Sort contributors (different systems have slightly different soting logics)
cat CONTRIBUTORS | sort > SCONTRIBUTORS
missing_authors=$(echo "$committers" | comm -13 SCONTRIBUTORS -)
missing_authors=$(echo ${missing_authors} | sed 's/Caleb Donovick <donovick@cs.stanford.edu>
missing_authors=$(echo ${missing_authors} | sed 's/Guillem Francès <guillem.frances@upf.edu>
missing_authors=$(echo ${missing_authors} | sed 's/Matthew Fernandez <matthew.fernandez@gmail.com>
if [ -n "$missing_authors" ]
then
echo "$missing_authors" | awk '{print "::error file=SCONTRIBUTORS,line=0:: MISSING: " $0}'
echo " Some committers do NOT appear in CONTRIBUTORS "
echo ""
echo "$missing_authors"
echo "== Note: The following contributors are not committers. Do we need to update .mailmap? =="
echo "$committers" | comm -23 SCONTRIBUTORS -
exit 1
else
echo "== Note: The following contributors were checked"
echo "$committers" | comm -12 SCONTRIBUTORS -
echo "All good!"
fi |
# AUTOGENERATED FILE
FROM balenalib/cubox-i-ubuntu:disco-build
ENV NODE_VERSION 10.24.0
ENV YARN_VERSION 1.22.4
RUN for key in \
<API key> \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$<API key>.tar.gz" \
&& echo "<SHA256-like> node-v$<API key>.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$<API key>.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$<API key>.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \ |
# AUTOGENERATED FILE
FROM balenalib/up-core-alpine:3.11-build
# Default to UTF-8 file.encoding
ENV LANG C.UTF-8
# add a simple script that can auto-detect the appropriate JAVA_HOME value
# based on whether the JDK or only the JRE is installed
RUN { \
echo '#!/bin/sh'; \
echo 'set -e'; \
echo; \
echo 'dirname "$(dirname "$(readlink -f "$(which javac || which java)")")"'; \
} > /usr/local/bin/docker-java-home \
&& chmod +x /usr/local/bin/docker-java-home
ENV JAVA_HOME /usr/lib/jvm/java-1.8-openjdk
ENV PATH $PATH:/usr/lib/jvm/java-1.8-openjdk/jre/bin:/usr/lib/jvm/java-1.8-openjdk/bin
RUN set -x \
&& apk add --no-cache \
openjdk8 \
&& [ "$JAVA_HOME" = "$(docker-java-home)" ]
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https:
RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& ln -f /bin/sh /bin/sh.real \
&& ln -f /bin/sh-shim /bin/sh |
$.noty.themes.wso2 = {
name : 'wso2',
helpers : {},
modal : {
css: {
position : 'fixed',
width : '100%',
height : '100%',
backgroundColor: '#000',
zIndex : 10000,
opacity : 0.6,
display : 'none',
left : 0,
top : 0
}
},
style : function() {
this.$bar.css({
overflow : 'hidden',
margin : '4px 0',
borderRadius: '2px'
});
this.$message.css({
fontSize : '14px',
lineHeight: '16px',
textAlign : 'center',
padding : '10px',
width : 'auto',
position : 'relative'
});
this.$closeButton.css({
position : 'absolute',
top : 4, right: 4,
width : 10, height: 10,
background: "url(data:image/png;base64,<API key>++<API key>+<API key>/<API key>+<API key>+<API key>/ve7LNHxSg/<API key>=)",
display : 'none',
cursor : 'pointer'
});
this.$buttons.css({
padding : 5,
textAlign : 'right',
borderTop : '1px solid #ccc',
backgroundColor: '#fff'
});
this.$buttons.find('button').css({
marginLeft: 5
});
this.$buttons.find('button:first').css({
marginLeft: 0
});
this.$bar.on({
mouseenter: function() {
$(this).find('.noty_close').stop().fadeTo('normal', 1);
},
mouseleave: function() {
$(this).find('.noty_close').stop().fadeTo('normal', 0);
}
});
switch(this.options.layout.name) {
case 'top':
this.$bar.css({
borderBottom: '2px solid #eee',
borderLeft : '2px solid #eee',
borderRight : '2px solid #eee',
borderTop : '2px solid #eee',
boxShadow : "0 2px 4px rgba(0, 0, 0, 0.1)"
});
break;
case 'topCenter':
case 'center':
case 'bottomCenter':
case 'inline':
this.$bar.css({
border : '1px solid #eee',
boxShadow: "0 2px 4px rgba(0, 0, 0, 0.1)"
});
this.$message.css({fontSize: '13px', textAlign: 'center'});
break;
case 'topLeft':
case 'topRight':
case 'bottomLeft':
case 'bottomRight':
case 'centerLeft':
case 'centerRight':
this.$bar.css({
border : '1px solid #eee',
boxShadow: "0 2px 4px rgba(0, 0, 0, 0.1)"
});
this.$message.css({fontSize: '13px', textAlign: 'left'});
break;
case 'bottom':
this.$bar.css({
borderTop : '2px solid #eee',
borderLeft : '2px solid #eee',
borderRight : '2px solid #eee',
borderBottom: '2px solid #eee',
boxShadow : "0 -2px 4px rgba(0, 0, 0, 0.1)"
});
break;
default:
this.$bar.css({
border : '2px solid #eee',
boxShadow: "0 2px 4px rgba(0, 0, 0, 0.1)"
});
break;
}
switch(this.options.type) {
case 'alert':
case 'notification':
this.$bar.css({backgroundColor: '#FFF', borderColor: '#dedede', color: '#444'});
break;
case 'warning':
this.$bar.css({backgroundColor: '#FFEAA8', borderColor: '#FFC237', color: '#826200'});
this.$buttons.css({borderTop: '1px solid #FFC237'});
break;
case 'error':
this.$bar.css({backgroundColor: '#e74c3c', borderColor: '#e74c3c', color: '#FFF'});
this.$message.css({fontWeight: 'bold'});
this.$buttons.css({borderTop: '1px solid darkred'});
break;
case 'information':
this.$bar.css({backgroundColor: '#78C5E7', borderColor: '#3badd6', color: '#FFF'});
this.$buttons.css({borderTop: '1px solid #0B90C4'});
break;
case 'success':
this.$bar.css({backgroundColor: '#40D47E', borderColor: '#40D47E', color: '#fff'});
this.$buttons.css({borderTop: '1px solid #50C24E'});
break;
default:
this.$bar.css({backgroundColor: '#FFF', borderColor: '#CCC', color: '#444'});
break;
}
},
callback: {
onShow : function() {
},
onClose: function() {
}
}
}; |
<!-- BEGIN MUNGE: UNVERSIONED_WARNING -->
<!-- BEGIN STRIP_FOR_RELEASE -->
<h1>*** PLEASE NOTE: This document applies to the HEAD of the source
tree only. If you are using a released version of Kubernetes, you almost
certainly want the docs that go with that version.</h1>
<strong>Documentation for specific releases can be found at
[releases.k8s.io](http://releases.k8s.io).</strong>
<!-- END STRIP_FOR_RELEASE -->
<!-- END MUNGE: UNVERSIONED_WARNING -->
## Getting started with Vagrant
Running kubernetes with Vagrant (and VirtualBox) is an easy way to run/test/develop on your local machine (Linux, Mac OS X).
**Table of Contents**
- [Prerequisites](#prerequisites)
- [Setup](#setup)
- [Interacting with your Kubernetes cluster with Vagrant.](#<API key>)
- [Authenticating with your master](#<API key>)
- [Running containers](#running-containers)
- [Troubleshooting](#troubleshooting)
- [I keep downloading the same (large) box all the time!](#<API key>)
- [I just created the cluster, but I am getting authorization errors!](#<API key>-<API key>)
- [I just created the cluster, but I do not see my container running!](#<API key>)
- [I want to make changes to Kubernetes code!](#<API key>)
- [I have brought Vagrant up but the nodes cannot validate!](#<API key>)
- [I want to change the number of nodes!](#<API key>)
- [I want my VMs to have more memory!](#<API key>)
- [I ran vagrant suspend and nothing works!](#<API key>)
- [I want vagrant to sync folders via nfs!](#<API key>)
Prerequisites
1. Install latest version >= 1.6.2 of vagrant from http:
2. Install one of:
1. The latest version of Virtual Box from https:
2. [VMWare Fusion](https:
3. [VMWare Workstation](https:
4. [Parallels Desktop](https:
5. libvirt with KVM and enable support of hardware virtualisation. [Vagrant-libvirt](https://github.com/pradels/vagrant-libvirt). For fedora provided official rpm, and possible to use ```yum install vagrant-libvirt```
Setup
Setting up a cluster is as simple as running:
sh
export KUBERNETES_PROVIDER=vagrant
curl -sS https://get.k8s.io | bash
The `KUBERNETES_PROVIDER` environment variable tells all of the various cluster management scripts which variant to use. If you forget to set this, the assumption is you are running on Google Compute Engine.
By default, the Vagrant setup will create a single master VM (called kubernetes-master) and one node (called kubernetes-minion-1). Each VM will take 1 GB, so make sure you have at least 2GB to 4GB of free memory (plus appropriate free disk space). To start your local cluster, open a shell and run:
sh
cd kubernetes
export KUBERNETES_PROVIDER=vagrant
./cluster/kube-up.sh
Vagrant will provision each machine in the cluster with all the necessary components to run Kubernetes. The initial setup can take a few minutes to complete on each machine.
If you installed more than one Vagrant provider, Kubernetes will usually pick the appropriate one. However, you can override which one Kubernetes will use by setting the [`<API key>`](https://docs.vagrantup.com/v2/providers/default.html) environment variable:
sh
export <API key>=parallels
export KUBERNETES_PROVIDER=vagrant
./cluster/kube-up.sh
By default, each VM in the cluster is running Fedora.
To access the master or any node:
sh
vagrant ssh master
vagrant ssh minion-1
If you are running more than one node, you can access the others by:
sh
vagrant ssh minion-2
vagrant ssh minion-3
Each node in the cluster installs the docker daemon and the kubelet.
The master node instantiates the Kubernetes master components as pods on the machine.
To view the service status and/or logs on the kubernetes-master:
sh
vagrant ssh master
[vagrant@kubernetes-master ~] $ sudo su
[root@kubernetes-master ~] $ systemctl status kubelet
[root@kubernetes-master ~] $ journalctl -ru kubelet
[root@kubernetes-master ~] $ systemctl status docker
[root@kubernetes-master ~] $ journalctl -ru docker
[root@kubernetes-master ~] $ tail -f /var/log/kube-apiserver.log
[root@kubernetes-master ~] $ tail -f /var/log/<API key>.log
[root@kubernetes-master ~] $ tail -f /var/log/kube-scheduler.log
To view the services on any of the nodes:
sh
vagrant ssh minion-1
[vagrant@kubernetes-master ~] $ sudo su
[root@kubernetes-master ~] $ systemctl status kubelet
[root@kubernetes-master ~] $ journalctl -ru kubelet
[root@kubernetes-master ~] $ systemctl status docker
[root@kubernetes-master ~] $ journalctl -ru docker
Interacting with your Kubernetes cluster with Vagrant.
With your Kubernetes cluster up, you can manage the nodes in your cluster with the regular Vagrant commands.
To push updates to new Kubernetes code after making source changes:
sh
./cluster/kube-push.sh
To stop and then restart the cluster:
sh
vagrant halt
./cluster/kube-up.sh
To destroy the cluster:
sh
vagrant destroy
Once your Vagrant machines are up and provisioned, the first thing to do is to check that you can use the `kubectl.sh` script.
You may need to build the binaries first, you can do this with ```make```
sh
$ ./cluster/kubectl.sh get nodes
NAME LABELS
10.245.1.4 <none>
10.245.1.5 <none>
10.245.1.3 <none>
Authenticating with your master
When using the vagrant provider in Kubernetes, the `cluster/kubectl.sh` script will cache your credentials in a `~/.<API key>` file so you will not be prompted for them in the future.
sh
cat ~/.<API key>
{ "User": "vagrant",
"Password": "vagrant",
"CAFile": "/home/k8s_user/.kubernetes.vagrant.ca.crt",
"CertFile": "/home/k8s_user/.kubecfg.vagrant.crt",
"KeyFile": "/home/k8s_user/.kubecfg.vagrant.key"
}
You should now be set to use the `cluster/kubectl.sh` script. For example try to list the nodes that you have started with:
sh
./cluster/kubectl.sh get nodes
Running containers
Your cluster is running, you can list the nodes in your cluster:
sh
$ ./cluster/kubectl.sh get nodes
NAME LABELS
10.245.2.4 <none>
10.245.2.3 <none>
10.245.2.2 <none>
Now start running some containers!
You can now use any of the `cluster/kube-*.sh` commands to interact with your VM machines.
Before starting a container there will be no pods, services and replication controllers.
sh
$ ./cluster/kubectl.sh get pods
NAME READY STATUS RESTARTS AGE
$ ./cluster/kubectl.sh get services
NAME LABELS SELECTOR IP(S) PORT(S)
$ ./cluster/kubectl.sh get <API key>
CONTROLLER CONTAINER(S) IMAGE(S) SELECTOR REPLICAS
Start a container running nginx with a replication controller and three replicas
sh
$ ./cluster/kubectl.sh run my-nginx --image=nginx --replicas=3 --port=80
When listing the pods, you will see that three containers have been started and are in Waiting state:
sh
$ ./cluster/kubectl.sh get pods
NAME READY STATUS RESTARTS AGE
my-nginx-5kq0g 0/1 Pending 0 10s
my-nginx-gr3hh 0/1 Pending 0 10s
my-nginx-xql4j 0/1 Pending 0 10s
You need to wait for the provisioning to complete, you can monitor the nodes by doing:
sh
$ vagrant ssh minion-1 -c 'sudo docker images'
kubernetes-minion-1:
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
<none> <none> 96864a7d2df3 26 hours ago 204.4 MB
google/cadvisor latest e0575e677c50 13 days ago 12.64 MB
kubernetes/pause latest 6c4579af347b 8 weeks ago 239.8 kB
Once the docker image for nginx has been downloaded, the container will start and you can list it:
sh
$ vagrant ssh minion-1 -c 'sudo docker ps'
kubernetes-minion-1:
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
dbe79bf6e25b nginx:latest "nginx" 21 seconds ago Up 19 seconds k8s--mynginx.<API key>.<API key>
fa0e29c94501 kubernetes/pause:latest "/pause" 8 minutes ago Up 8 minutes 0.0.0.0:8080->80/tcp k8s--net.<API key>.<API key>
aa2ee3ed844a google/cadvisor:latest "/usr/bin/cadvisor" 38 minutes ago Up 38 minutes k8s--cadvisor.<API key>.file--4626b3a2
65a3a926f357 kubernetes/pause:latest "/pause" 39 minutes ago Up 39 minutes 0.0.0.0:4194->8080/tcp k8s--net.<API key>.file--342fd561
Going back to listing the pods, services and <API key>, you now have:
sh
$ ./cluster/kubectl.sh get pods
NAME READY STATUS RESTARTS AGE
my-nginx-5kq0g 1/1 Running 0 1m
my-nginx-gr3hh 1/1 Running 0 1m
my-nginx-xql4j 1/1 Running 0 1m
$ ./cluster/kubectl.sh get services
NAME LABELS SELECTOR IP(S) PORT(S)
$ ./cluster/kubectl.sh get <API key>
CONTROLLER CONTAINER(S) IMAGE(S) SELECTOR REPLICAS
my-nginx my-nginx nginx run=my-nginx 3
We did not start any services, hence there are none listed. But we see three replicas displayed properly.
Check the [guestbook](../../examples/guestbook/README.md) application to learn how to create a service.
You can already play with scaling the replicas with:
sh
$ ./cluster/kubectl.sh scale rc my-nginx --replicas=2
$ ./cluster/kubectl.sh get pods
NAME READY STATUS RESTARTS AGE
my-nginx-5kq0g 1/1 Running 0 2m
my-nginx-gr3hh 1/1 Running 0 2m
Congratulations!
Troubleshooting
# I keep downloading the same (large) box all the time!
By default the Vagrantfile will download the box from S3. You can change this (and cache the box locally) by providing a name and an alternate URL when calling `kube-up.sh`
sh
export KUBERNETES_BOX_NAME=<API key>
export KUBERNETES_BOX_URL=<API key>
export KUBERNETES_PROVIDER=vagrant
./cluster/kube-up.sh
# I just created the cluster, but I am getting authorization errors!
You probably have an incorrect ~/.<API key> file for the cluster you are attempting to contact.
sh
rm ~/.<API key>
After using kubectl.sh make sure that the correct credentials are set:
sh
cat ~/.<API key>
{
"User": "vagrant",
"Password": "vagrant"
}
# I just created the cluster, but I do not see my container running!
If this is your first time creating the cluster, the kubelet on each node schedules a number of docker pull requests to fetch prerequisite images. This can take some time and as a result may delay your initial pod getting provisioned.
# I want to make changes to Kubernetes code!
To set up a vagrant cluster for hacking, follow the [vagrant developer guide](../devel/developer-guides/vagrant.md).
# I have brought Vagrant up but the nodes cannot validate!
Log on to one of the nodes (`vagrant ssh minion-1`) and inspect the salt minion log (`sudo cat /var/log/salt/minion`).
# I want to change the number of nodes!
You can control the number of nodes that are instantiated via the environment variable `NUM_MINIONS` on your host machine. If you plan to work with replicas, we strongly encourage you to work with enough nodes to satisfy your largest intended replica size. If you do not plan to work with replicas, you can save some system resources by running with a single node. You do this, by setting `NUM_MINIONS` to 1 like so:
sh
export NUM_MINIONS=1
# I want my VMs to have more memory!
You can control the memory allotted to virtual machines with the `KUBERNETES_MEMORY` environment variable.
Just set it to the number of megabytes you would like the machines to have. For example:
sh
export KUBERNETES_MEMORY=2048
If you need more granular control, you can set the amount of memory for the master and nodes independently. For example:
sh
export <API key>=1536
export <API key>=2048
# I ran vagrant suspend and nothing works!
vagrant suspend``` seems to mess up the network. This is not supported at this time.
# I want vagrant to sync folders via nfs!
You can ensure that vagrant uses nfs to sync folders with virtual machines by setting the <API key> environment variable to 'true'. nfs is faster than virtualbox or vmware's 'shared folders' and does not require guest additions. See the [vagrant docs](http://docs.vagrantup.com/v2/synced-folders/nfs.html) for details on configuring nfs on the host. This setting will have no effect on the libvirt provider, which uses nfs by default. For example:
sh
export <API key>=true
<!-- BEGIN MUNGE: GENERATED_ANALYTICS -->
[]()
<!-- END MUNGE: GENERATED_ANALYTICS --> |
package me.dengfengdecao.function;
import org.junit.Test;
/**
*
* @author linyu
*
*/
public class Fibonacci {
int fibonacci_recursion(int n) {
if (n <= 0)
return 0;
if (n == 1)
return 1;
return fibonacci_recursion(n-1) + fibonacci_recursion(n-2);
}
int fibonacci(int n) {
int result[] = {0, 1};
if (n < 2)
return result[n];
int f1 = 1;
int f2 = 0;
int fn = 0;
for (int i = 2; i <= n; i++) {
fn = f1 + f2;
f2 = f1;
f1 = fn;
}
return fn;
}
@Test
public void test1() throws Exception {
int result0 = fibonacci_recursion(0);
int result1 = fibonacci_recursion(1);
int result2 = fibonacci_recursion(2);
int result3 = fibonacci_recursion(10);
int result4 = fibonacci_recursion(40);
System.out.println("test1:" + result0);
System.out.println("test1:" + result1);
System.out.println("test1:" + result2);
System.out.println("test1:" + result3);
System.out.println("test1:" + result4);
}
@Test
public void test2() throws Exception {
int result0 = fibonacci_recursion(0);
int result1 = fibonacci_recursion(1);
int result2 = fibonacci_recursion(2);
int result3 = fibonacci_recursion(10);
int result4 = fibonacci_recursion(40);
System.out.println("test2:" + result0);
System.out.println("test2:" + result1);
System.out.println("test2:" + result2);
System.out.println("test2:" + result3);
System.out.println("test2:" + result4);
}
} |
<!DOCTYPE HTML PUBLIC "-
<!--NewPage
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_45) on Wed Nov 05 20:13:35 WET 2014 -->
<TITLE>
org.apache.jmeter.sampler Class Hierarchy (Apache JMeter API)
</TITLE>
<META NAME="date" CONTENT="2014-11-05">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.apache.jmeter.sampler Class Hierarchy (Apache JMeter API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<A NAME="navbar_top"></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
<b>Apache JMeter</b></EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../org/apache/jmeter/reporters/gui/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../org/apache/jmeter/sampler/gui/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/apache/jmeter/sampler/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<HR>
<CENTER>
<H2>
Hierarchy For Package org.apache.jmeter.sampler
</H2>
</CENTER>
<DL>
<DT><B>Package Hierarchies:</B><DD><A HREF="../../../../overview-tree.html">All Packages</A></DL>
<HR>
<H2>
Class Hierarchy
</H2>
<UL>
<LI TYPE="circle">java.lang.<A HREF="http://docs.oracle.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><B>Object</B></A><UL>
<LI TYPE="circle">org.apache.jmeter.testelement.<A HREF="../../../../org/apache/jmeter/testelement/AbstractTestElement.html" title="class in org.apache.jmeter.testelement"><B>AbstractTestElement</B></A> (implements org.apache.jmeter.gui.<A HREF="../../../../org/apache/jmeter/gui/Searchable.html" title="interface in org.apache.jmeter.gui">Searchable</A>, java.io.<A HREF="http://docs.oracle.com/javase/6/docs/api/java/io/Serializable.html?is-external=true" title="class or interface in java.io">Serializable</A>, org.apache.jmeter.testelement.<A HREF="../../../../org/apache/jmeter/testelement/TestElement.html" title="interface in org.apache.jmeter.testelement">TestElement</A>)
<UL>
<LI TYPE="circle">org.apache.jmeter.samplers.<A HREF="../../../../org/apache/jmeter/samplers/AbstractSampler.html" title="class in org.apache.jmeter.samplers"><B>AbstractSampler</B></A> (implements org.apache.jmeter.engine.util.<A HREF="../../../../org/apache/jmeter/engine/util/<API key>.html" title="interface in org.apache.jmeter.engine.util"><API key></A>, org.apache.jmeter.samplers.<A HREF="../../../../org/apache/jmeter/samplers/Sampler.html" title="interface in org.apache.jmeter.samplers">Sampler</A>)
<UL>
<LI TYPE="circle">org.apache.jmeter.sampler.<A HREF="../../../../org/apache/jmeter/sampler/DebugSampler.html" title="class in org.apache.jmeter.sampler"><B>DebugSampler</B></A> (implements org.apache.jmeter.testbeans.<A HREF="../../../../org/apache/jmeter/testbeans/TestBean.html" title="interface in org.apache.jmeter.testbeans">TestBean</A>)
<LI TYPE="circle">org.apache.jmeter.sampler.<A HREF="../../../../org/apache/jmeter/sampler/TestAction.html" title="class in org.apache.jmeter.sampler"><B>TestAction</B></A> (implements org.apache.jmeter.samplers.<A HREF="../../../../org/apache/jmeter/samplers/Interruptible.html" title="interface in org.apache.jmeter.samplers">Interruptible</A>)
</UL>
</UL>
<LI TYPE="circle">java.beans.<A HREF="http:
<UL>
<LI TYPE="circle">org.apache.jmeter.testbeans.<A HREF="../../../../org/apache/jmeter/testbeans/BeanInfoSupport.html" title="class in org.apache.jmeter.testbeans"><B>BeanInfoSupport</B></A><UL>
<LI TYPE="circle">org.apache.jmeter.sampler.<A HREF="../../../../org/apache/jmeter/sampler/<API key>.html" title="class in org.apache.jmeter.sampler"><B><API key></B></A></UL>
</UL>
</UL>
</UL>
<HR>
<A NAME="navbar_bottom"></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="<API key>"></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
<b>Apache JMeter</b></EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../org/apache/jmeter/reporters/gui/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../org/apache/jmeter/sampler/gui/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/apache/jmeter/sampler/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<HR>
Copyright &
</BODY>
</HTML> |
## Video
Properties
Name | Type | Description | Notes
**id** | **int** | | [optional]
**id_product** | **int** | | [optional]
**id_video_group** | **int** | | [optional]
**id_product_image** | **int** | | [optional]
**id_language** | **int** | | [optional]
**language_filter** | **int** | | [optional]
**id_media_source** | **int** | | [optional]
**name** | [**\Kinow\Client\Model\I18nField[]**](#I18nField) | | [optional]
**description** | [**\Kinow\Client\Model\I18nField[]**](#I18nField) | | [optional]
**duration** | **int** | | [optional]
**filename** | **string** | | [optional]
**position** | **int** | | [optional]
**subscription** | **int** | | [optional]
**free** | **int** | | [optional]
**download** | **int** | | [optional]
**active** | **bool** | | [optional]
**date_add** | **string** | | [optional]
**date_upd** | **string** | | [optional]
**can_watch** | **bool** | | [optional]
**cover** | **string** | | [optional]
**thumbnail** | **string** | | [optional]
**geoloc_enabled** | **bool** | | [optional]
**<API key>** | **string** | | [optional]
**<API key>** | **string** | | [optional]
**has_free_access** | [**\Kinow\Client\Model\VideoFreeAccess**](#VideoFreeAccess) | | [optional]
**advertising_url** | **string** | | [optional] |
package pe.fu.common.bean;
import pe.fu.common.bean.constant.RequestStatus;
import java.io.Serializable;
/**
*
*
* @author <a href='mailto:475961393@qq.com'>Fhcj</a><br/>
* 2016721
*/
public class RequestResult implements Serializable {
private static final long serialVersionUID = -<API key>;
private int status;
private String note;
private Object value;
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
public Object getValue() {
return value;
}
public void setValue(Object value) {
this.value = value;
}
} |
package com.example.mypc.eyepetizer.bean.topbean;
import com.google.gson.Gson;
import com.google.gson.annotations.SerializedName;
public class CoverBean {
@SerializedName("feed")
private String feed;
@SerializedName("detail")
private String detail;
@SerializedName("blurred")
private String blurred;
@SerializedName("sharing")
private Object sharing;
public static CoverBean objectFromData(String str) {
return new Gson().fromJson(str, CoverBean.class);
}
public String getFeed() {
return feed;
}
public void setFeed(String feed) {
this.feed = feed;
}
public String getDetail() {
return detail;
}
public void setDetail(String detail) {
this.detail = detail;
}
public String getBlurred() {
return blurred;
}
public void setBlurred(String blurred) {
this.blurred = blurred;
}
public Object getSharing() {
return sharing;
}
public void setSharing(Object sharing) {
this.sharing = sharing;
}
} |
public class phoneAdver
{
public static void main(String[] args)
{
String input = "<API key>";
System.out.println("The original String is: " + input);
String result = convert(input);
System.out.println("The converted String is: " + result);
}
public static String convert(String s)
{
String converted = "";
for (int i = 0; i < s.length(); i++)
{
if (s.charAt(i) == 'A' || s.charAt(i) == 'B' || s.charAt(i) == 'C')
{
converted = converted + '2';
}
else if (s.charAt(i) == 'D' || s.charAt(i) == 'E' || s.charAt(i) == 'F')
{
converted = converted + '3';
}
else if (s.charAt(i) == 'G' || s.charAt(i) == 'H' || s.charAt(i) == 'I')
{
converted = converted + '4';
}
else if (s.charAt(i) == 'J' || s.charAt(i) == 'K' || s.charAt(i) == 'L')
{
converted = converted + '5';
}
else if (s.charAt(i) == 'M' || s.charAt(i) == 'N' || s.charAt(i) == 'O')
{
converted = converted + '6';
}
else if (s.charAt(i) == 'P' || s.charAt(i) == 'Q' || s.charAt(i) == 'R' || s.charAt(i) == 'S')
{
converted = converted + '7';
}
else if (s.charAt(i) == 'T' || s.charAt(i) == 'U' || s.charAt(i) == 'V')
{
converted = converted + '8';
}
else if (s.charAt(i) == 'W' || s.charAt(i) == 'X' || s.charAt(i) == 'Y' || s.charAt(i) == 'Z')
{
converted = converted + '9';
}
else
{
converted = converted + s.charAt(i);
}
}
//do stuff
//do more stuff
return converted;
}
} |
<?php
namespace Google\AdsApi\AdManager\v202108;
/**
* This file was generated from WSDL. DO NOT EDIT.
*/
class <API key> extends \Google\AdsApi\AdManager\v202108\ApiError
{
/**
* @var string $reason
*/
protected $reason = null;
/**
* @param string $fieldPath
* @param \Google\AdsApi\AdManager\v202108\FieldPathElement[] $fieldPathElements
* @param string $trigger
* @param string $errorString
* @param string $reason
*/
public function __construct($fieldPath = null, array $fieldPathElements = null, $trigger = null, $errorString = null, $reason = null)
{
parent::__construct($fieldPath, $fieldPathElements, $trigger, $errorString);
$this->reason = $reason;
}
/**
* @return string
*/
public function getReason()
{
return $this->reason;
}
/**
* @param string $reason
* @return \Google\AdsApi\AdManager\v202108\<API key>
*/
public function setReason($reason)
{
$this->reason = $reason;
return $this;
}
} |
stitch_rects.cc and hungarian.cc are originally from Tensorbox with name stitch_rects.cpp and hungarian.cpp
cp files to tensorflow/tensorflow/example/detector/
bazel build -c opt --config=cuda tensorflow/examples/detector/... |
import java.util.Properties
import com.bgfurfeature.kafka.KafkaSink
import com.dyuproject.protostuff.StringSerializer
import org.apache.spark.SparkConf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Duration, StreamingContext}
import scala.collection.mutable
object KafkaSinkTest extends App {
var sparkConf = new SparkConf()
var ssc = new StreamingContext(sparkConf, Duration(10000))
val kafkaProducer: Broadcast[KafkaSink[String, String]] = {
val kafkaProducerConfig = {
val p = new Properties()
p.setProperty("bootstrap.servers", "")
p.setProperty("key.serializer", classOf[StringSerializer].getName)
p.setProperty("value.serializer", classOf[StringSerializer].getName)
p
}
println("kafka producer init done!")
import scala.collection.JavaConversions._
ssc.sparkContext.broadcast[KafkaSink[String, String]](KafkaSink.apply[String, String]
(kafkaProducerConfig.toMap))
}
var rddQueue = new mutable.Queue[RDD[(String, String)]]()
var inputSteam = ssc.queueStream(rddQueue)
.foreachRDD(rdd => {
if (!rdd.isEmpty()) {
rdd.foreach(record => {
kafkaProducer.value.send("", record._1, record._2)
})
}
})
ssc.start()
rddQueue += ssc.sparkContext.sequenceFile[String, String]("", 2)
ssc.awaitTermination()
} |
// TODO:
// <summary>
// Defines the <API key> type.
// </summary>
namespace WinUsbRx.Core.DeviceManagement
{
using System;
using System.Runtime.InteropServices;
using Factory;
using UnsafeNative;
using Wrappers;
<summary>
The device interface detail.
</summary>
internal class <API key> : <API key>, IDisposable
{
<summary>
The _device information set handle.
</summary>
private readonly IntPtr <API key>;
<summary>
The _marshal wrapper.
</summary>
private readonly IMarshalWrapper _marshalWrapper;
<summary>
The _unsafe native methods wrapper.
</summary>
private readonly <API key> <API key>;
<summary>
The _device path.
</summary>
private string _devicePath;
<summary>
The _description.
</summary>
private string _description;
<summary>
Initializes a new instance of the <see cref="<API key>"/> class.
</summary>
<param name="<API key>">
The device Information Set Handle.
</param>
<param name="size">
The size.
</param>
<param name="marshalWrapper">
The marshal Wrapper.
</param>
<param name="<API key>">
The device Info Data Factory.
</param>
<param name="<API key>">
The unsafe Native Methods Wrapper.
</param>
public <API key>(
IntPtr <API key>,
int size,
IMarshalWrapper marshalWrapper,
<API key> <API key>,
<API key> <API key>)
{
<API key> = <API key>;
DeviceInfoData = <API key>.Create();
_marshalWrapper = marshalWrapper;
<API key> = <API key>;
<API key> = Marshal.AllocHGlobal(size);
_marshalWrapper.WriteInteger32(<API key>, (IntPtr.Size == 4) ? (4 + Marshal.<API key>) : 8);
}
<summary>
Gets the device info data.
</summary>
public DeviceInfoData DeviceInfoData { get; private set; }
<summary>
Gets the device interface detail buffer.
</summary>
public IntPtr <API key> { get; private set; }
<summary>
Gets the device path.
</summary>
public string DevicePath
{
get
{
if (string.IsNullOrEmpty(_devicePath))
{
var devicePathName = new IntPtr(<API key>.ToInt64() + sizeof(int));
_devicePath = Marshal.PtrToStringAuto(devicePathName);
return _devicePath;
}
return _devicePath;
}
}
<summary>
Gets the description.
</summary>
public string Description
{
get
{
if (string.IsNullOrEmpty(_description))
{
_description = <API key>.GetProperty(<API key>, DeviceInfoData, Spdrp.DeviceDesc).ToString();
}
return _description;
}
}
//Manufacturer = <API key>.GetProperty(<API key>, deviceInfoSet, deviceInfoData, Spdrp.Mfg);
//var hardwareIDs = <API key>(deviceInfoSet, deviceInfoData, SPDRP.SPDRP_HARDWAREID);
//var regex = new Regex("^USB\\\\VID_([0-9A-F]{4})&PID_([0-9A-F]{4})", RegexOptions.IgnoreCase);
//var foundVidPid = false;
//foreach (var hardwareID in hardwareIDs)
// var match = regex.Match(hardwareID);
// if (match.Success)
// details.VID = ushort.Parse(match.Groups[1].Value, System.Globalization.NumberStyles.AllowHexSpecifier);
// details.PID = ushort.Parse(match.Groups[2].Value, System.Globalization.NumberStyles.AllowHexSpecifier);
// foundVidPid = true;
// break;
//if (!foundVidPid)
// throw new APIException("Failed to find VID and PID for USB device. No hardware ID could be parsed.");
<summary>
The dispose.
</summary>
public void Dispose()
{
if (<API key> != IntPtr.Zero)
{
_marshalWrapper.FreeHGlobal(<API key>);
}
}
}
} |
package main
import (
"errors"
"os"
"path/filepath"
)
func NashPath() (string, error) {
nashpath := os.Getenv("NASHPATH")
if nashpath != "" {
return nashpath, nil
}
h, err := home()
return filepath.Join(h, "nash"), err
}
func NashRoot() (string, error) {
nashroot, ok := os.LookupEnv("NASHROOT")
if ok {
return nashroot, nil
}
h, err := home()
return filepath.Join(h, "nashroot"), err
}
func home() (string, error) {
homedir, err := os.UserHomeDir()
if err != nil {
return "", err
}
if homedir == "" {
return "", errors.New("invalid empty home dir")
}
return homedir, nil
} |
void GenerateArray(int array[], int size)
{
for (int i = size - 1; i >= 0; --i)
{
array[i] = rand();
}
} |
<ion-view view-title="Login">
<ion-content>
<div class="list">
<label class="item item-input">
<span class="input-label">
Vehicle Id
</span>
<input ng-model="vehicle.vehicleId" type="text">
</input>
</label>
<label class="item item-input">
<span class="input-label">
Password
</span>
<input ng-model="vehicle.password" type="password">
</input>
</label>
</div>
<div class="padding">
<button class="button button-block button-positive" ng-click="signIn(vehicle)">
Sign-In
</button>
<p class="text-center">
<a href="#/register">
Register
</a>
</p>
</div>
</ion-content>
</ion-view> |
# <API key>
Spotify WebAPI |
-- User: sdcuike
-- Time: 4:09
-- redis api
local modulename = "Redis"
local _M = {}
_M._VERSION = '0.0.1'
local redis = require('resty.redis')
_M.new = function(self, conf)
self.host = conf.host
self.port = conf.port
self.timeout = conf.timeout
self.poolsize = conf.poolsize
self.idletime = conf.idletime
self.passwd = conf.passwd
self.dbid = conf.dbid
local red = redis:new()
return setmetatable({redis = red}, { __index = _M } )
end
_M.connectdb = function(self)
local host = self.host
local port = self.port
local red = self.redis
if not (host and port) then
return nil, 'no tcp avaliable provided'
end
if not dbid then
dbid = 0
end
local timeout = self.timeout
if not timeout then
timeout = 1000 -- 10s
end
red:set_timeout(timeout)
local ok, err
if host and port then
ok, err = red:connect(host, port)
if ok then
local auth, error = red:auth(self.passwd)
if not auth then
ngx.log(ngx.ERR, "[[".."failed to authenticate: "..error.."]]")
return nil,error
end
return red:select(dbid)
end
end
return ok, err
end
_M.keepalivedb = function(self)
local pool_max_idle_time = self.idletime
local pool_size = self.poolsize
if not pool_size then pool_size = 1000 end
if not pool_max_idle_time then pool_max_idle_time = 90000 end
return self.redis:set_keepalive(pool_max_idle_time, pool_size)
end
_M.close = function(self)
local ok, err = self.redis:close()
if not ok then
ngx.log(ngx.ERR, "[[".."failed to close:"..err.."]]")
end
end
return _M |
// <API key>.h
#import <UIKit/UIKit.h>
#import "XMPPJID.h"
@interface <API key> : UIViewController
@property(nonatomic, strong) XMPPJID * friendJid ;
@end |
package reversi.server.commands;
import java.util.List;
public class ReversiCommand
{
public static enum ReversiCommandType
{
Exit,
Display,
Undo,
Redo,
Easy,
Medium,
Hard,
Impossible,
HumanAI,
AIAI,
AILOCAL, //For watching the server's AI fight itself.
Move,
Comment,
Unknown,
Black,
White,
Ok
}
private final ReversiCommandType type;
private final List<String> parameters;
public ReversiCommand(ReversiCommandType type, List<String> parameters)
{
this.type = type;
this.parameters = parameters;
}
public ReversiCommand(ReversiCommandType type)
{
this.type = type;
this.parameters = null;
}
public List<String> getParameters() {
return this.parameters;
}
public ReversiCommandType getType() {
return type;
}
} |
package meta
import (
"strings"
"time"
)
// DateTimeFormat is the standard date and time storage format used
// in the CSV files, it is assumed to have resolution of one second
// and that times are in UTC.
const DateTimeFormat = "2006-01-02T15:04:05Z"
// Reference describes a location where measurements can be taken.
type Reference struct {
// Code is used to identify the measurement location.
Code string
// Network can be used to group multiple measurement locations.
Network string
// Name is used to label the measurement location.
Name string
}
// Point describes a measurement location geographically.
type Point struct {
// Latitude represents the location latitude, with negative values representing southern latitudes.
Latitude float64
// Longitude represents the location longitude, with negative values representing western longitudes.
Longitude float64
// Elevation represents the location height relative to the given datum.
Elevation float64
// Datum can be used to indicate the location measurement reference.
Datum string
// Depth measures the depth of water at the measurement point, if appropriate.
Depth float64
latitude string // shadow value used to retain formatting
longitude string // shadow value used to retain formatting
elevation string // shadow value used to retain formatting
depth string // shadow value used to retain formatting
}
// ElevationOk returns the Elevation and whether it has been set.
func (p Point) ElevationOk() (float64, bool) {
if p.elevation != "" {
return p.Elevation, true
}
return 0.0, false
}
// DepthOk returns the Depth and whether it has been set.
func (p Point) DepthOk() (float64, bool) {
if p.depth != "" {
return p.Depth, true
}
return 0.0, false
}
// Orientation is used to describe how a piece of installed equipment is aligned.
type Orientation struct {
// Dip represents the vertical deployment, with a zero value representing a horizontal installation,
// a positive value indicating a installation downwards, whereas a negative value indicates an upward
// facing installation.
Dip float64
// Azimuth represents an equipment installation bearing, ideally with reference to true north.
Azimuth float64
// Method can be used to indicate the method or measuring the azimuth.
Method string
dip string // shadow value used to retain formatting
azimuth string // shadow value used to retain formatting
}
// DipOk returns the Dip and whether it has been set.
func (o Orientation) DipOk() (float64, bool) {
if o.dip != "" {
return o.Dip, true
}
return 0.0, false
}
// AzimuthOk returns the Azimuth and whether it has been set.
func (o Orientation) AzimuthOk() (float64, bool) {
if o.azimuth != "" {
return o.Azimuth, true
}
return 0.0, false
}
// Offset can be used to adjust an equipment installation relative to a given Point.
type Offset struct {
// Vertical represents an adjustment up or down, the exact interpretation will depend on the use case,
// although it is assumed to have units of meters.
Vertical float64
// North can be used to offset the installation to northwards, it is asusmed to have units of meters.
North float64
// East can be used to offset the installation to eastwards, it is asusmed to have units of meters.
East float64
vertical string // shadow value used to retain formatting
north string // shadow value used to retain formatting
east string // shadow value used to retain formatting
}
// VerticalOk returns the Vertical offset and whether it has been set.
func (o Offset) VerticalOk() (float64, bool) {
if o.vertical != "" {
return o.Vertical, true
}
return 0.0, false
}
// NorthOk returns the North offset and whether it has been set.
func (o Offset) NorthOk() (float64, bool) {
if o.north != "" {
return o.North, true
}
return 0.0, false
}
// EastOk returns the East offset and whether it has been set.
func (o Offset) EastOk() (float64, bool) {
if o.east != "" {
return o.East, true
}
return 0.0, false
}
// Scale can be used to represent a non-linear installation, such as a pressure sensor installed in sea water
// rather than fresh water.
type Scale struct {
// Factor can be used to represent a change of scale of the recorded value.
Factor float64
// Bias can be used to represent an offset to the recorded value.
Bias float64
factor string // shadow value used to retain formatting
bias string // shadow value used to retain formatting
}
// FactorOk returns the Factor and whether it has been set.
func (s Scale) FactortOk() (float64, bool) {
if s.factor != "" {
return s.Factor, true
}
return 0.0, false
}
// BiasOk returns the Bias and whether it has been set.
func (s Scale) BiasOk() (float64, bool) {
if s.bias != "" {
return s.Bias, true
}
return 0.0, false
}
// Span represents a time window.
type Span struct {
// Start represents the beginning of the time window.
Start time.Time
// End represents the stop time of the window.
End time.Time
}
// Equipment represents an indiviual piece of hardware.
type Equipment struct {
// Make describes the manufacturer or equipment maker.
Make string
// Model describes the manufacturer's model name.
Model string
// Serial describes the manufacturer's identification of the device.
Serial string
}
func (e Equipment) String() string {
return e.Make + " " + e.Model + " [" + e.Serial + "]"
}
// Less compares Equipment structs suitable for sorting.
func (e Equipment) Less(eq Equipment) bool {
switch {
case strings.ToLower(e.Make) < strings.ToLower(eq.Make):
return true
case strings.ToLower(e.Make) > strings.ToLower(eq.Make):
return false
case strings.ToLower(e.Model) < strings.ToLower(eq.Model):
return true
case strings.ToLower(e.Model) > strings.ToLower(eq.Model):
return false
}
return e.Serial < eq.Serial
}
// Install is a compounded struct the represents the installation of a
// piece of equipment over a given time period.
type Install struct {
// Equipment respresents the actual installed equipment.
Equipment
// Span describes the installed time period.
Span
}
// Less compares Install structs suitable for sorting.
func (i Install) Less(in Install) bool {
switch {
case i.Equipment.Less(in.Equipment):
return true
case in.Equipment.Less(i.Equipment):
return false
default:
return i.Start.Before(in.Start)
}
} |
.player-bar{
position: fixed;
bottom: 0;
left: 0;
right: 0;
height: 200px;
background-color: rgba(255, 255, 255, 0.3);
z-index: 100;
}
.player-bar a {
font-size: 1.1rem;
vertical-align: middle;
}
.player-bar a,
.player=bar a:hover {
color: white;
cursor: pointer;
text-decoration: none;
}
.player-bar .container {
display: table;
padding: 0;
width: 90%;
min-height: 100%;
}
.player-bar .control-group {
display: table-cell;
vertical-align: middle;
}
.player-bar .main-controls {
width: 25%;
text-align: left;
padding-right: 1rem;
}
.player-bar .main-controls .previous {
margin-right: 16.5%;
}
.player-bar .main-controls .play-pause {
margin-right: 15%;
font-size: 1.6rem;
}
.player-bar .currently-playing {
width: 50%;
text-align: center;
position: relative;
}
.player-bar .currently-playing .song-name,
.player-bar .currently-playing .artist-name,
.player-bar .currently-playing .artist-song-mobile {
text-align: center;
font-size: 0.75rem;
margin: 0;
position: absolute;
width: 100%;
font-weight: 300;
}
.player-bar .currently-playing .song-name,
.player-bar .currently-playing .artist-song-mobile {
top: 1.1rem;
}
.player-bar .currently-playing .artist-name{
bottom: 1.1rem;
}
.player-bar .currently-playing .artist-song-mobile {
display: none;
}
.seek-control {
position: relative;
font-size: 0.8rem;
}
.seek-control .current-time {
position: absolute;
top: 0.5rem;
}
.seek-control .total-time {
position: absolute;
right: 0;
top: 0.5rem;
}
.seek-bar {
height: 0.25rem;
background-color: rgba(255, 255, 255, 0.3);
border-radius: 2px;
position: relative;
cursor: pointer;
}
.seek-bar .fill {
background-color: white;
width: 36%;
height: 0.25rem;
border-radius: 2px;
}
.seek-bar .thumb {
position: absolute;
height: 0.5rem;
width: 0.5rem;
background-color: white;
left: 36%;
top: 50%;
margin-left: -0.25rem;
margin-top: -0.25rem;
border-radius: 50%;
cursor: pointer;
-webkit-transition: all 100ms ease-in-out;
-moz-transition: all 100ms ease-in-out;
transition: width 100ms ease-in-out,
height 100ms ease-in-out,
margin-top 100ms ease-in-out,
margin-left 100ms ease-in-out;
}
.seek-bar:hover .thumb {
width: 1.1rem;
height: 1.1rem;
margin-top: -0.5rem;
margin-left: -0.5rem;
}
.player-bar .volume {
width: 25%;
text-align: right;
}
.player-bar .volume .icon {
font-size: 1.1rem;
display: inline-block;
vertical-align: middle;
}
.player-bar .volume .seek-bar {
display: inline-block;
width: 5.75rem;
vertical-align: middle;
}
@media (max-width: 640px) {
.player-bar {
padding: 1rem;
background-color: rgba(0,0,0,0.6);
}
.player-bar .main-controls,
.player-bar .currently-playing,
.player-bar .volume {
display: block;
margin: 0 auto;
padding: 0;
width: 100%;
text-align: center;
}
.player-bar .main-controls,
.playter-bar .volume {
min-height: 3.5rem;
}
.player-bar .currently-playing {
min-height: 2.5rem;
}
.player-bar .artist-name,
.player-bar .song-name {
display: none;
}
,player-bar .currently-playing .artist-song-mobile {
display: block;
}
} |
package com.linkedin.sample;
import com.google.code.linkedinapi.client.LinkedInApiClient;
import com.google.code.linkedinapi.client.<API key>;
import com.google.code.linkedinapi.client.oauth.LinkedInAccessToken;
import com.google.code.linkedinapi.client.oauth.<API key>;
import com.google.code.linkedinapi.client.oauth.<API key>;
import com.google.code.linkedinapi.client.oauth.<API key>;
import java.util.Scanner;
public class Main2 {
private static Scanner s;
public static void main(String[] args) {
String linkedinKey = "77mahxt83mbma8"; //add your LinkedIn key
String linkedin<API key>; //add your LinkedIn <API key> oauthService;
<API key> requestToken;
System.out.println("Fetching request token from LinkedIn...");
String authUrl = null;
String authToken,authTokenSecret;
oauthService= <API key>.getInstance().<API key>(linkedinKey,linkedinSecret);
requestToken= oauthService.<API key>();
authToken= requestToken.getToken();
authTokenSecret = requestToken.getTokenSecret();
System.out.println("Request token " +requestToken);
System.out.println("Auth token" +authToken);
System.out.println("Auth token secret" +authTokenSecret);
authUrl = requestToken.getAuthorizationUrl();
System.out.println("Copy below link in web browser to authorize. Copy the PIN obtained\n" + authUrl);
System.out.println("Enter the PIN code:");
String pin;
try
{
s = new Scanner(System.in);
pin = s.next();
System.out.println("Fetching access token from LinkedIn...");
LinkedInAccessToken accessToken = oauthService.getOAuthAccessToken(requestToken, pin);
System.out.println("Access token : " + accessToken.getToken());
System.out.println("Token secret : " + accessToken.getTokenSecret());
final <API key> factory = <API key>.newInstance(linkedinKey,linkedinSecret);
final LinkedInApiClient client = factory.<API key>(accessToken);
//posting status to profile
client.updateCurrentStatus("LinkedIN API is cool!");
}
finally
{
System.out.println("Updated status!");
}
}
} |
package org.ovirt.engine.ui.uicommonweb.models.vms;
import org.ovirt.engine.core.common.TimeZoneType;
import org.ovirt.engine.ui.uicommonweb.dataprovider.AsyncDataProvider;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TimeZoneModel {
private static final Map<TimeZoneType, Collection<TimeZoneModel>> <API key> = new HashMap<TimeZoneType, Collection<TimeZoneModel>>();
public static Collection<TimeZoneModel> getTimeZones(TimeZoneType timeZoneType) {
return <API key>.get(timeZoneType);
}
static {
for (TimeZoneType timeZoneType : TimeZoneType.values()) {
mapListModels(timeZoneType, timeZoneType.getTimeZoneList());
}
}
private static void mapListModels(TimeZoneType timeZoneType, Map<String, String> timeZones) {
List<TimeZoneModel> models = new ArrayList<TimeZoneModel>();
models.add(new TimeZoneModel(null, timeZoneType)); // add empty field representing default engine TZ
for (Map.Entry<String, String> entry : timeZones.entrySet()) {
models.add(new TimeZoneModel(entry.getKey(), timeZoneType));
}
<API key>.put(timeZoneType, models);
}
private final String timeZoneKey;
private final TimeZoneType timeZoneType;
public TimeZoneModel(String timeZoneKey, TimeZoneType timeZoneType) {
this.timeZoneKey = timeZoneKey;
this.timeZoneType = timeZoneType;
}
public String getTimeZoneKey() {
return timeZoneKey;
}
public boolean isDefault() {
return timeZoneKey == null;
}
public String getDisplayValue() {
if (isDefault()) {
String defaultTimeZoneKey = (String) AsyncDataProvider.getInstance().<API key>(timeZoneType.<API key>());
// check if default timezone is correct
if (!timeZoneType.getTimeZoneList().containsKey(defaultTimeZoneKey)) {
// if not show GMT
defaultTimeZoneKey = timeZoneType.getUltimateFallback();
}
return timeZoneType.getTimeZoneList().get(defaultTimeZoneKey);
} else {
return timeZoneType.getTimeZoneList().get(timeZoneKey);
}
}
} |
using System;
using System.Collections.Generic;
using Microsoft.VisualStudio;
using Microsoft.VisualStudio.Shell;
using Microsoft.VisualStudio.Shell.Interop;
namespace Microsoft.VisualStudio.Project
{
[CLSCompliant(false)]
public class EnumDependencies : IVsEnumDependencies
{
private List<IVsDependency> dependencyList = new List<IVsDependency>();
private uint nextIndex;
public EnumDependencies(IList<IVsDependency> dependencyList)
{
Utilities.ArgumentNotNull("dependencyList", dependencyList);
foreach(IVsDependency dependency in dependencyList)
{
this.dependencyList.Add(dependency);
}
}
public EnumDependencies(IList<IVsBuildDependency> dependencyList)
{
Utilities.ArgumentNotNull("dependencyList", dependencyList);
foreach(IVsBuildDependency dependency in dependencyList)
{
this.dependencyList.Add(dependency);
}
}
public int Clone(out IVsEnumDependencies enumDependencies)
{
ThreadHelper.<API key>();
enumDependencies = new EnumDependencies(this.dependencyList);
enumDependencies.Skip(this.nextIndex);
return VSConstants.S_OK;
}
public int Next(uint elements, IVsDependency[] dependencies, out uint elementsFetched)
{
elementsFetched = 0;
Utilities.ArgumentNotNull("dependencies", dependencies);
uint fetched = 0;
int count = this.dependencyList.Count;
while(this.nextIndex < count && elements > 0 && fetched < count)
{
dependencies[fetched] = this.dependencyList[(int)this.nextIndex];
this.nextIndex++;
fetched++;
elements
}
elementsFetched = fetched;
// Did we get 'em all?
return (elements == 0 ? VSConstants.S_OK : VSConstants.S_FALSE);
}
public int Reset()
{
this.nextIndex = 0;
return VSConstants.S_OK;
}
public int Skip(uint elements)
{
this.nextIndex += elements;
uint count = (uint)this.dependencyList.Count;
if(this.nextIndex > count)
{
this.nextIndex = count;
return VSConstants.S_FALSE;
}
return VSConstants.S_OK;
}
}
} |
/*! \file noises.h
* \author Jared Hoberock
* \brief Adapted from Gelato/shaders/noises.h.
*/
#ifndef NOISES_H
#define NOISES_H
#include "stdshader.h"
#include "detail/ShadingInterface.h"
#include "detail/Point.h"
// A hook into the ShadingInterface
// XXX This is probably not reentrant
extern ShadingInterface *gContext;
inline float snoise(float x, float y = 0, float z = 0)
{
return gContext->noise(x,y,z);
} // end snoise()
inline float snoise(const Point &x)
{
return snoise(x.x, x.y, x.z);
} // end snoise()
inline Vector vsnoise(const Point &x)
{
// XXX This is almost definitely not robust
return Vector(snoise(x),
snoise(x + Point(1,1,1)),
snoise(x + Point(2,2,2)));
} // end vsnoise()
inline Vector vsnoise(const float x, const float y = 0, const float z = 0)
{
return vsnoise(Point(x,y,z));
} // end vsnoise()
#ifndef FILTLOW
# define FILTLOW 0.2
#endif
#ifndef FILTHIGH
# define FILTHIGH 0.75
#endif
inline Vector vectorFbm(const Point &p,
int octaves, float lacunarity, float gain)
{
float amp = 1.0f;
Point pp = p;
Vector sum(0,0,0);
for(int i = 0; i < octaves; ++i)
{
sum += amp * vsnoise(pp);
amp *= gain;
// multiply and offset
pp = pp * lacunarity + Point(1,1,1);
} // end for i
return sum;
} // end vectorFbm()
inline float periodicFbm(float p, float period, int octaves, float gain)
{
float amp = 1.0f;
float pp = p;
float pper = period;
float sum = 0;
for(int i = 0; i < octaves; ++i)
{
sum += amp * snoise(pp);
amp *= gain;
pp *= 2.0f;
pper *= 2.0f;
} // end for i
return sum;
} // end periodicFbm()
#endif // NOISES_H |
The origins of this project are in discovering the fundamental weaknesses of
the rvlvr project. One of those weaknesses is that evolutionary criteria, even
behaviorally aware ones guide the search of the program space too strictly. For
example, consider how much of the space will inevitably be explored in order to
find a program like "not (n % 2 == 0 and n > 4 * k)". Because of the presence
of the not operator, the MSE of related programs (for example, the same program
without the not operator) is very high. Since rvlvr only builds programs by
concatenation, the search guide fails completely here and basically forces
rvlvr from ever evolving the highest fitness program. However, it should
eventually evolve a similar or equivalent program, by "refactoring" the
location of the not operator.
This leads to the question, what part of the program space is actually
effectively searched using search guides like this one? It would seem that
search guides like this one help genetic algorithms where they are already most
effective (and least necessary): smooth reward functions, with little distance
between local minima. This is also what gradient based methods are good at,
although gradient based methods may suffer from additional issues. In
particular, the gradient may be difficult to compute, or the gradient may not
exist, even though the reward function is smooth enough for evolutionary
algorithms.
An interesting space to consider is that of convex logic. This is the space of
logic programs which operate on a set of facts, where each fact is a predicate
over some fixed (per predicate) number of atoms. Without negation (or with only
very limited negation in the form of pre-processing) addition of new facts to
the set can never cause the removal of old facts. However, this space of
programs is still very versatile, because many logical conditions can be
reorganized into a form that doesn't require the use of a negation operator.
Unlike integer program evolution, this program space naturally allows for
handling an ensemble of programs, since clauses operate largely independently.
Furthermore, due to uses in database queries, the most common implementation of
programs in this space, datalog, already has significant literature on how to
implement efficient recomputation of the results of a program when a new clause
is added to it.
For efficiency, we should probably create a datalog variant where each clause
and each literal is a fixed size. However, it's not clear to me if a particular
size limits the abilities of programs.
After spending a few hours reading the research, it appears there's no
concensus on if limiting the order of predicates limits the capabilities of the
space.
One annoyance is that my original problem domain for this work is actually very
hard to fit into datalog. Nevermind, I'm just bad at datalog.
Connect 4:
Board(color: 0..3, x: 0..8, y: 0..7)
Turn(player: 0..2)
Next(option: 0.., color: 0..3, x: 0..8, y: 0..7)
Player(color: 0..3)
Player(0)
Player(1)
Next(x, color, x, y) :- Board(2, x, 0), Turn(color)
Next(x, color, x, y) :- Board(2, x, y), Board(piece, x, y1), Subtract(y, 1, y1), Turn(color), Player(piece)
Pawn Only Chess:
Board(color: 0..3, x:0..9, y: 0..9)
Turn(player: 0..2)
Next(option: 0.., color: 0..3, x: 0..9, y: 0..9)
Player(color: 0..3)
Player(0)
Player(1)
Direction(0, 1)
Direction(1, -1)
DiagDirection(1)
DiagDirection(-1)
Next(e, color, x, y), Next(e, 2, x, y1) :- Board(2, x, y), Turn(color), Board(color, x, y1), Add(y, direction, y1), Direction(color, direction), Enumerate3(x, y, 0, e)
Next(e, color, x, y), Next(e, 2, x1, y1) :- Board(other, x, y), Turn(color), Other(other, color), Board(color, x1, y1), Add(y, direction, y1), Direction(color, direction), Enumerate(x, y, direction1, e), Add(x, direction2, x1), DiagDirection(direction1)
What should our derivation algorithm be, and how can it be described using
linear algebra? The effectiveness of deep neural networks makes it clear that
using gradients to optimize large models can be very effective. I do not think
that there is a reasonable way to compute a gradient in the space of all
datalog programs. However, it would still be convenient if there is a way to
use search guides similar to back propagation to make the evolutionary search
more efficient.
On the topic of evoluutionary search, it's clear that the general approach here
has no need to be truly evolutionary in nature. A family of related datalog
programs will all tend to produce a common subset of facts. We could record,
for each fact, which individuals in the population derive that fact. This would
probably be more efficient than computing the result of each individual in the
population separately. However, this does involve more overhead than simply
recording how many members of the population derive this fact. Of course, if we
only record how many members of the population derive a fact, we cannot use
this information to derive new facts. But if we instead of a count use a
"weight", then we have similar semantics, but less overhead. Which is probably
preferable.
The evolutionary fitness criteria should probably be something like "minimize
the weights of invalid facts, while maximizing the weight of valid facts." This
is slightly complicated by the presence of enumeration. A fact is valid if it
is expected, ignoring the presence of its enumeration value. A fact is invalid
if it is not expected. A fact is very invalid if it has the same enumeration
value as a valid fact. There should probably also be some weak evolutionary
pressure towards simpler programs.
One weakness of rvlvr was that the fitness function was finicky and ad hoc.
Perhaps we can avoid this by constraining weights this time around?
Alright, what exactly is the error propagation algorithm?
We input our input facts X. Then, we compute an activation for each fact that
has non-zero activation. An activation is essentially a number of times that
fact has been derived true by the datalog program. Note that since a clause may
appear more than once in the program (have non-zero weight), this number is the
sum of (the weights of each clauses implying the fact, times the sum, for each
set of valid tail combinations, of the activation of each fact in the tail). We
record how much each clause contributes to the fact.
It occurs to me that the "back propagation" algorithm I'm about to specify
performs much of the same computation as top-down datalog evaluation. Perhaps
we would effectively compute both together?
That would be difficult to do if we don't know if we can't know whether a fact
is valid in isolation. This runs counter to how enumeration was supposed to be
enforced. However, maybe enumeration can still be enforced by propagating
inequality constraints along with equality constraints. Top down algorithms
need to use equality constraints anyways. However, the inequality constraints
needed here are soft, unlike the equality constraints, which are hard.
I don't see significant evidence that top-down approaches would be more
efficient, so I think I'll continue with the bottom up method.
It's worth thinking about how to implement refinement of datalog program
ensembles using traditional back-propagation algorithms. In principle, one
could imagine some way of encoding the refinement process as an unusual set of
error functions applied to some unusual structure of matrices.
Consider the space of all neural networks as the space of any finite sequence
of finite dimensional tensors with reLU's between them, and with arbitrary
bindings between the weights of different neurons. Then any activation tensor
represents a sequence of facts, where each activation represents what
unweighted portion of the ensemble found that fact to be true. Furthermore,
because we used arbitrary dimensional tensors, each index into the activation
tensor can correspond to a constant, excluding the first index, which is needed
to indicate which predicate the fact is over. Furthermore, if we allow negative
weights we are implicitly computing an ensemble of datalog programs with
stratified negation. It's stratisfied because a finite sequence of weight
tensors was assumed. However, variables make no appearance in this
representation. Furthermore, the size of the tensors grows exponentially, with
the exponent being the arity of the largest predicate and the base being the
number of constants. It's also worth mentioning that a constant needs to be
input to each layer to allow conjunction to be represented.
How could variables be represented? Imagine we have a very simple dataflow
program:
IDB:
a(X: 0..2)
b(X: 0..2)
c(X: 0..2)
a(X) :- b(X), c(X)
EDB:
b(0)
c(0)
Then the tensor form is:
A[0] = [
[0, n],
[1, m],
[1, k],
]
And we would like:
A[1] = reLU(A[0] * X) = [
[1, n],
[1, m],
[1, k],
]
For all n, m, k.
We would also like
reLU([
[0, n],
[0, m],
[1, k],
] * X) = [
[0, n],
[0, m],
[1, k],
]
and
reLU([
[0, n],
[1, m],
[0, k],
] * X) = [
[0, n],
[1, m],
[0, k],
]
From these examples, the pattern of broadcasting across each dimension
represents a variable and multiply the rest of the tensors together is clear.
However, it's also clear from this analysis that this technique isn't going to
get us very far. Firstly, it seems that you need at least one layer per clause.
Secondly, there is certainly going to be significant issues with the gradient.
If reLU is actually used, then the gradient will usually be zero in the
direction of each reasonable change. Using the sigmoid function should help
somewhat. Finally, this method is dense, but it encodes the entire space.
That's a rather large optimization space.
Even if I do end up implementing gradient based optimization, I doubt other
code I write will be wasted, so I'm not going to focus on it just yet.
Red(x: 0..6, y: 0..5)
Black(x: 0..6, y: 0..5)
Empty(x: 0..6, y: 0..5)
Turn(player: 0..1)
NextRed(option: 0.., x: 0..6, y: 0..5)
NextBlack(option: 0.., x: 0..6, y: 0..5)
Piece(x, y) :- Red(x, y)
Piece(x, y) :- Black(x, y)
NextRed(x, x, 0) :- Empty(x, 0), Turn(0)
NextRed(x, x, y) :- Empty(x, y), Turn(0), Piece(x, y1), Subtract(y, 1, y1)
Next(x, player, x, y) :- Valid(x, y, color), Board(player, _, _), Pla
Empty(x, 0), Turn(player)
Red(turn: 0.., x: 0..6, y: 0..5)
Black(turn: 0.., x: 0..6, y: 0..5)
Empty(turn: 0.., x: 0..6, y: 0..5)
Next(option: 0.., turn: 0.., x: 0..6, y: 0..5)
Piece(turn, x, y) :- Red(turn, x, y)
Piece(turn, x, y) :- Black(turn, x, y)
NextPiece(x, 0) :- Piece(x, 0)
One conceptual problem we've been hitting is that it's not clear how to handle
really computing activations in a useful, stable way. The first, obvious
problem is recursive self-implication. When a fact leads to a set of new facts
which imply that fact, as long as the weights along the loop multiply to a
number above one then the activation becomes infinite. It's only minorly
difficult to use tabling to detect this case, and either record the activation
for all such facts as infinite, but it's not clear that's a useful result.
Of course, this can be mitigated somewhat by forcing activations to fall off,
either by enforcing that weights are all less than one or by multiplying by
some fall off factor. However, this could also cause vanishing gradient type
problems, where the activations all become near zero.
On a related note, it's not clear that demand driven mutation really matches
gradient methods all that closely. The partial differential of the output table
with respect to the introduction of any particular new rule is often zero, even
though a small set of rules can result in total reduction of error.
We've been thinking about this pretty backwards. If you think of this as a
gradient problem, then there's a lot of problems. It's possible to actually
compute the gradient fully in a sparse way using typical differential methods.
However, actually changing the structure of the program doesn't fit gradient
methods at all. Furthermore, thinking about it that way doesn't really help
make progress. Instead, we want to think about exactly what operations we're
trying to do last, and then reason about what data dependencies are useful for
doing that.
What we really want to capture from the logic program is a direction to mutate
the program in. In other words, we want to extract some data that tells us
something about what rules to introduce to the program, or, if those rules
already exist, strengthen the weights of.
Think about how to evaluate the datalog program in a top-down fashion. We start
with some predicate we would like to find matches for. First, we look through
the fact table for this predicate. Then, we look through clauses with that
predicate in their head. For each such clause, we need to create a set of
constraints for that clause. We enter our search into the table for that
clause. Then, we recurse into each literal in the body of the clause, carrying
our constraints with us. This recursive process fills out the set of all
possible facts for our predicate into its fact table, which we return.
Subsumtive tabling operates in the same way, although it orders the search, and
keeps fact tables per literal, instead of per predicate.
So, to think about how to mutate the program, think about each step in this
process where the input could be changed to get the output we want.
Example:
Say we would like to derive the following datalog program:
foo(X, Y) :- bar(X, X), bar(Y, Y)
Given the following samples:
given
bar(0, 0),
bar(1, 1)
implies
foo(0, 1),
foo(1, 0)
given
bar(2, 2),
bar(1, 1)
implies
foo(2, 1),
foo(1, 2)
given
bar(0, 0),
bar(2, 2)
implies
foo(0, 2),
foo(2, 0)
Given the following training routine (the empty program):
fix
foo(X, Y)
We start by looking through the table of values for foo. It's empty, so maybe
we should add a new fact. We should somehow record this demand. Then, we look
through the rules implying foo. There are none, so maybe we should add one.
Record this demand. There's nothing left for us to do, so we're done computing
demand.
What is our demand? Either a fact which fulfills some constraints, or a rule
which would produce this fact from the same constraints.
What exactly are our constraints? Usually, in top down evaluation we're trying
to find all facts which can fit our predicate. Fortunately, in our case we
should have much more specific constraints. We're looking for specific facts,
not all facts for a predicate. Our constraints, at any point in time, are
actually a finite set of facts we need to prove, given our set of inputs.
It seems likely that we'll need to lose some information when coming up with
the mutation guides. My current best idea is to record for each variable a
vector of values it holds throughout a run for a given input database. Then, we
can compute a similar vector based on how much each variable violates
constraints. Hopefully, we can use these two vectors to guide the mutation.
What types of mutation should we perform? Within a clause, if a variable is
over constrained then we can replace some uses of it by new, unconstrained
variables. Alternatively, we might want to delete a literal referring to the
variable.
If a variable needs to be more constrained, we can merge it with another
variable, or add another literal referring to that variable.
Alright, back to implementation mode. Let's start by implementing minimal
backwards reasoning. That is to say, given an asserted fact and a program, find
a set of sets of facts, such that any set of facts being true would result in
the asserted fact being true.
After implementing fuzzy logic, I'm fairly certain that it is quite reasonable
to optimize the weights separately from the topology of the program. Weight
adjustments are basically a table mapping clauses to weight reductions which
would minimize the output error. However, the mutation guides are basically a
table which map each variable in some clauses to some record of how that
variable is contributing to error. One specific idea would be a scalar, where
larger vlues indicate the variable should be less constrained, and negative
values indicate the variable should be more constrained.
Algorithm:
loop
compute results
compute weight adjustments
if weight adjustments < threshold:
break
remove low weight clauses
compute mutation guides
generate new clauses
There's basically no way to ensure that this differential technique is correct
in certain ways without constraining the either, both, and finalize functions to
all be monotonic with respect to each other.
Do the current min-max weights have this problem? It's kinda hard to tell. I
believe they do if negative weights are allowed. The problem can be seen in the
following example:
confidence(1)
B(0)
C(0)
weight(-1)
A(X) :- B(X)
B(X) :- C(X)
What is the truth value of A(0) in this program?
If We evaluate A(X) :- B(X) first, then we both the truth value of B(0) with
the default to get the truth value 1 (= min(1, 1)). Then, we finalize 1 by
multiplying by the weight, -1 to get -1. So we assign A(0) the truth value -1.
Next, we evaluate B(X) :- C(X). Similarly, we take -1 * min(1, 1) = -1. Then,
we set the truth value of B(0) to either(-1, -1) = max(-1, -1) = 1. We evaluate
A(X) :- B(X) again, resulting in A(0) = -1 * max(-1,
Consider the equivalent program, with positive weights (but some negative
confidence values):
confidence(1)
B(0)
weight(1)
A(X) :- B(X)
B(X) :- C(X)
confidence(-1)
C(0)
What is the truth value of A(0) in this program?
If We evaluate A(X) :- B(X) first, then we both the truth value of B(0) with
the default to get the truth value 1 (= min(1, 1)). Then, we finalize 1 by
multiplying by the weight, 1 to get 1. So we assign A(0) the truth value 1.
Next, we evaluate B(X) :- C(X). Similarly, we take 1 * min(1, -1) = -1. Then,
we set the truth value of B(0) to either(-1, 1) = max(-1, 1) = 1.
Perhaps we can make more progress on this problem by only considering trees? And passing values along the edges of trees?
We're looking for cases where two different order of evaluations of clauses results in a fact having two different truth values.
In the truth value of a fact is given by a reduction of either() of the
finalize() of a reduction of both(). In other words the minimal expression
which allows any particular operator to be reordered is (where a, b, c, and d are known truth values):
either(default(), finalize(both(default(), a, b)), finalize(both(default(), c, d)))
With the min-max truth value, that expression is equivalent to:
max(1.0, x * min(1.0, a, b), y * min(1.0, c, d))
where x and y are known weights
Assuming x and y are positive:
max(1.0, x * min(1.0, a, b), y * min(1.0, c, d)) =
max(1.0, min(x, xa, xb), min(y, yc, yd))
a * min(max(b, c), min(max(d, e), max(f, g))) != a * min(
What am I trying to prove here? Of course max and min are commutative and "idempotent."
Exactly what kind of truth value are we computing here?
If we imagine that the truth value represents the bias of a Bernoulli random
variable, then taking the min of two truth values of two facts (a and b)
represents the bias of a fact which is true iff (a & b) if a and b are
maximally correlated. If a and b are independent, then the truth value should
be the product of the truth values instead.
The max of two truth values corresponds to the the truth value of (a | b), if a
and b are maximally correlated. If a and b are independent, then the correct
function to perform on the truth values (call them x and y) is 1 - (1 - x) * (1 - y).
If the last function is associative:
1 - (1 - (1 - (1 - x) * (1 - z))) * (1 - y) ?= 1 - (1 - x) * (1 - (1 - (1 - y) * (1 - z)))
Left side:
1 - (1 - (1 - (1 - x) * (1 - z))) * (1 - y)
1 - (1 - (1 - (1 - x - z + xz))) * (1 - y)
1 - (1 - (x + z - xz)) * (1 - y)
1 - (1 - x - z + xz) * (1 - y)
1 - ((1 - x - z + xz) - y(1 - x - z + xz))
1 - (1 - x - z + xz - y + yx + yz - yxz)
x + z - xz + y - yx - yz + yxz
Right side:
1 - (1 - (1 - (1 - y) * (1 - z))) * (1 - x)
1 - (1 - (1 - (1 - y - z + yz))) * (1 - x)
1 - (1 - (y + z - yz)) * (1 - x)
1 - (1 - y - z + yz) * (1 - x)
1 - ((1 - y - z + yz) - x(1 - y - z + yz))
1 - (1 - y - z + yz - x + xy + xz - xyz)
y + z - yz + x - xy - xz + xyz
Those four functions are all associatiive and commutative. Unfortunately, the
functions which correspond to minimal correlation are not idempotent. This
should not be surprising, since in the trivial case of combining the truth
value of a fact with itself, we should expect incorrect results if we assume a
the truth value is not correlated with itself.
In most Bayseian learning systems, the asssumption used to improve tractability
is that things are independent. In this case, we instead assume "maximum
correlation/dependence". Does that still work well?
We would like proof that weight optimization will lead to the "selection" of
correct rules from our program, given that the correct rules are present in our
program. By selection, I mean that we would like the weights for those rules to
become 1.0, or at least distinguishably higher than every other weight in the
program. If we continuously renormalize the weights, then all we actually need
to prove is that all wrong rules get eliminated. It seems obvious that this
should happen, so I guess I'll stop worrying about it.
One thing that's annoying is that we can't easily compute the fixed point of
the truth values of a program. For the min-max truth value, we can at least
guarantee that calling `evaluate_bottom_up` enough times will result in
convergence. However, error propagation is kinda a mess. And even for normal
evaluation, the number of iterations to reach convergence can be the length of
the longest loop in the fact graph (which can potentially be the entire
database). In comparison, loop detection in a list using the pointer following
algorithm guarantees detection in at most two passes through the loop.
Yeah, the update conditions upon finding a "wave collision" devolve into
keeping a tree copied from the part of the fact graph that lead to the current
fact. Instead, it should be cheaper (and more useful) to improve the dirtyness
tracking in the bottom up algorithm.
But what about error propagation? That can still diverge. Even if we
arbitrarily restrict the range of adjustments (to say 0.0 to 1.0), the values
can still oscillate if we don't restrict the weights to positive values. So
maybe do that?
As for error propagation:
We need to avoid looping infinitely. It would be nice if in doing so we
actually compute the correct error, instead of ignoring loops. The simplest
way to avoid looping is to keep a set of already visited facts. When a fact is
visited, do not add it to the frontier. This technique completely fails for
diamonds in the fact graph, which is basically unacceptable. Conceptually, the
best idea I have for how to make this actually work is the as follows. For
every entry in the frontier, keep a mapping from facts to the weight that fact
had when it was visited on the way to adding this entry to the frontier, and
also the number of times that fact has been visited along this pass. When a
fact is first found in this mapping, compute a "loop closure adjustment, and
store it in a separate adjustment table. However, still propagate the error as
normal. The second time the fact is encountered from its own entailer table, do
not propagate it. After normal propagation, use the loop closure adjustment
table to determine how weights diverge. Asjust them to the appropriate
divergent values, then propagate the error resulting from divergent weights.
Another perspective is that loops in the fact graph are essentially anomalous,
and not worth computing properly. In that case, the first time a fact is
encountered in its own entailer table, we should just drop it from propagation.
It works! In a minimal sense. In retrospect, I need to actually do the math on
how to optimize the network.
Damn, remember how to do back propagation correctly from first principles is a
pain.
We want to compute the derivative of the output w.r.t. the weights.
u[i, j] = ?
v[i, j] = both v[i - 1, j] u[i, j]
w[i, j] = finalize v[i, j]
x[i, j] = either x[i, j - 1] w[i, j]
u[i, j] = ?
v[i, j] = min v[i - 1, j] u[i, j]
w[i, j] = q * v[i, j]
x[i, j] = max x[i, j - 1] w[i, j]
du[i, j] = ?
dv[i, j] = argmin [v[i - 1, j] u[i, j]] [dv[i - 1, j], du[i, j]]
dw[i, j] = q * dv[i, j] + dq * v[i, j]
dx[i, j] = argmax [x[i, j - 1] w[i, j]] [dx[i, j - 1], dw[i, j]]
gx[i, j] = ?
gw[i, j] = argmax [x[i, j - 1], w[i, j]] [0, gx[i, j]]
gx[i, j - 1] += argmax [x[i, j - 1], w[i, j]] [gx[i, j], 0]
gv[i, j] += q * gw[i, j]
gq += v[i, j] * gw[i, j]
gu[i, j] = argmin [v[i - 1, j], u[i, j]] [0, gv[i, j]]
gv[i - 1, j] += argmin [v[i - 1, j], u[i, j]] [gv[i, j], 0]
gx[i, j] = ?
gx[i, j - 1], gw[i, j] = back_either x[i, j - 1], w[i, j], gx[i, j]
gq, gv[i, j] += back_finalize q, v[i, j], gw[i, j]
gv[i - 1, j], gu[i, j] += back_both v[i - 1, j], u[i, j], gv[i, j]
= argmin [v[i - 1, j], u[i, j]] [0, gv[i, j]]
e = (1/2) * (y - x) ** 2
d e = y - x * (d y - d x) ???
e = (1/2) * (y**2 - 2 * y * x + x**2)
d e = (1/2) * (2 * y - 2 * (y * d x + d y * x) + 2 * x)
d e = y * d x + d y * x
gy = x * ge
gx = y * -ge
Intuition check:
(Minimizing error of a linear function.)
e = (1/2) * (w * x - y) ** 2
w = ?
x = ?
y = ?
a = w * x
b = a - y
d = b * b
e = d / 2
dw = ?
dx = ?
dy = ?
da = w * dx + dw * x
db = da - dy
dd = 2b * db
de = dd / 2
de = (2(w * x - y) * (w * dx + dw * x - dy)) / 2
de = (w * x - y) * (w * dx + dw * x - dy)
de/dw = (w * x - y) * x
ge = ?
gd = ge / 2
gb = 2 * gd
ga = gb
gy = -gb
gx = w * ga
gw = x * ga
Sweet. We appear to have gotten gradient descent to work. There's probably a
lot of optimization work we'll want to do at some point. Right now, the release
version takes a few minutes to run through a hundred thousand iterations of
gradient descent. This should be fast enough to try out `genetic optimization`
techniques.
So, how does that work? Fundamentally, we have two choices for how a rule gets
added to the program. Either the rule is modified from another rule, or is
randomly drawn from the space of possible rules. One thing that's not clear is
quite what the space is. In particular, it's not clear what the maximum number
of predicates should be. Adding any single rule which involves a predicate
which is not in any other rule (or fact) is useless, but two such additions
might be critical to finding a correct program. Perhaps we should allow new
rules to be added that use the next predicate after all the existing predicates
in the program. We could also introduce an evolutionary cost for using higher
predicates.
So we can create a clause by:
- Selecting a random predicate.
- Selecting a number of output variables up to the number of terms in the predicate.
- For each term, select to put an output there with probability (remaining output variables / remaining output slots).
- Choosing a number of literals.
- For each literal, choosing a predicate, such that the sum of the number terms is less than or equal to the number of output variables.
- For each output variable, select a randomly not yet selected output term.
- For each term not yet selected, choose with some probability a constant and with the opposite probability a variable.
- If a constant is chosen, choose a constant randomly up to 1 more than the largest constant previously found in that term.
- If a variable is chosen, choose a random variable number up to 1 more than the current number of variables.
This routine should work, and doesn't need to repeatedly reject a generated
sample. It's probably not uniform over and space, but that shouldn't be too
important. There's probably some priors we want to bake into it. It would also
be nice to use the statistical trick from the head to ensure we use each output
variable somewhere in the body.
The second way is to find an existing clause and mutate it. Here, I would like
to use behavioral techniques, since I feel like they should work well on
datalog programs.
The basic idea here is to find terms that result in high error. Using the
`fact_adjustment` tables, we know what facts should be different for a
predicate. How about the following algorithm:
- Choose a clause to improve.
- For each term, set aside a place to sum the absolute error and the total error.
- Go through the latent fact table for the clause's head predicate.
- Go through each term in the fact.
- If that term's value never shows up in the normal fact table, add the absolute value of the fact adjustment value for that term, and the total.
- Choose a term to change using a weighted random choice, using the absolute value of the fact adjustment.
- If that term's total error is positive, the term is over constrained. Create a copy of the clause where the term is less constrained.
- If the term's total error is negative, the term in under constrained. Create a copy of the clause where the term is more constrained.
- It's probably worthwhile to do the "wrong thing" with some probability.
- If a term is a constant, it can be made less constrained by setting it to a variable.
- If a term is a variable, it might be made less constrained by various operations:
- Removing some (but not all) uses of that variable in the body.
- Changing what terms that variable is used in in the body.
- If a term is a constant, it canot be made more constrained.
- Perhaps the constant should be changed?
- If a term is a variable, it can be made more constrained in various ways:
- By turning it into a constant.
- By using it in more terms in the body.
- By using it in different terms in the body.
- By making the other terms in predicates it is in more constrained.
Trying to work out an algorithm in this style is a serious pain. Many of the steps can cause the clause to become invalid.
One option is to use rejection sampling. But this seems wasteful. Furthermore, the current code is impossible to control in a high level way.
I would like to be able to tune the mutation in a meaningful way. If we had a calculus of clause mutations, we would be able to organize them in a useful way.
There are two constraints for validity:
- Each output variable must have at least one input.
- Each literal must have the correct number of terms.
Maybe we should create a datastructure where the validity constraints are
obvious. Currently, the second constraint is easy to maintain, but the first
one is easily broken on accident.
We should break up mutation into some number of mutation operations.
Each mutation operation corresponds to a small change to a clause, which might
not be valid on its own.
Suppose the following:
A clause has some number of bound terms, and 0 free terms.
Each variable in the clause has some number of supporting terms.
A valid mutation must ensure that the number of bound terms is kept constant.
A valid mutation must ensure that for each variable, the number of terms
removed from its support should be less than the number of supporting terms.
A mutation is composed of some number of concrete mutation operations.
An abstract mutation operation might decrease or increase the number of free terms.
It may also decrease or increase the number of supporting terms for a variable.
Each mutation operation also has a reverse mutation operation, with opposite effects.
The complete set of minimal mutation operations are:
1 bind_constant
- Decreases the number of free terms by 1.
2 bind_variable
- Decreases the number of free terms by 1.
- Increases the number of suport terms for that variable by 1.
3 insert_predicate[n]
- Increases the number of free terms by n, the number of terms associated with the predicate.
4 bind_variable_out
- Decreases the number of free output terms by 1.
5 bind_constant_out
- Decreases the number of free output terms by 1.
6 <API key>
- Decreases the number of free output terms by 1.
- Introduces a new variable which must have support at least 1.
Composing multiple mutation operations results in a mutation, with some preconditions.
For example:
1 bind_variable - bind_constant -> |terms| >= 1 && Exists var: |var| > 0
Proposed mutation algorithm:
- Choose a number of unconstrained mutation operations.
- The more operations chosen, the less the new clause will resemble the old.
- There might a principled way of choosing this, based on the weight of the clause.
- Choose operations which will make
Taking the set of mutation operations up to some fixed predicate size (say, 8),
along with the null operation, there is a tensor which gives a meaningful
weight to each set of operations which maintain validity.
The tensor is the outer product of probabilities for each operation with
itself, with invalid combinations zeroed out, and rank equal to the maximum
number of operations considered. There is likely an algorithm we could write to
sample from this tensor without actually creating it.
The key is that we have a weight vector for the next mutation operation. When
we choose a mutation operation, we need to update that vector.
If we're willing to build up arbitrarily complex mutations, then there's no
need to weight any particular mutation operations highly. However, we would
probably like to have our mutation contain about some number of changed terms
(or maybe an expected number of mutation operations). By changing the weights
dynamically, we can make it more likely that we'll have a valid mutation near
the expected value.
Conceptually, we have two probabilistic schedules here. The tweak / control
schedule, which has some bias for certain mutation operations. There's also a
schedule needed to have a valid mutation at the expected time (range). The key
is that we need to choose mutation operations which are chosen by both
schedules. This is probably easiest achieved by multiplying them and then
normalizing the resulting weights (if the array starts to become too small).
The constraint schedule is determined by it's expected excess change on each
validity dimension being equal to the amount that dimension is off by.
For example, suppose we need three variables to be bound. If we are targeting
10 mutops, and have performed 5, then the probability of binding a variable
needs to be 3/5. But wait, what if we have no free terms? Then there's no way
to make this work (without adding a predicate). So when we were at 4, we should
have only been able to free terms or bind variables using free terms.
Ugh, the way constraints interact is so complex.
For any clause, there must be more terms than output variables.
To be trivially solvable, there must be more free terms than output variables.
So for the constraint schedule, if free terms <= free output variables require creation of free terms.
Wait, it's more like this:
<API key> = ?
needed_free_terms = <API key>
<API key> = needed_free_terms - free_terms + <API key>
if <API key> == ops_remaining:
if needed_free_terms == free_terms:
output 1 only for binding variables
else:
output 1 only for creating new terms and binding variables
elif free_terms == ops_remaining
output 1 only for decreasing number of free terms
Using pure weight combination seems pretty tricky to make reasonably correct and efficient.
Instead, it's probably easiest to output weights for choosing the mutation operations.
Then, use rejection for the concrete option.
This usually doesn't uniformly sample from the space, or even from a subspace.
It's not clear that that's a problem, so I'll let it be.
I was discussing this project with Daniel last night. He suggested I use a more analytical approach.
I started actually thinking about what that would imply for SAT lowering.
For most forms of program synthesis, SAT lowering is basically done by performing symbolic evaluation.
However, I'm operating on logical programs. It would totally impractical to
lower all of the search operations done in a datalog program into a SAT
formula. However, it would be practical to lower only the concrete
instantiations of some subset of the clauses (like the used instantiations).
However, maybe we can do better.
We recently realized that having a canonical form for clauses is relatively easy.
Just sort the output variables, then sort the clauses by predicate number.
However, having a canonical form for two programs is much harder.
Formal datalog analysis usually defines the semantics of a datalog program to
be its output, since that is total and a complete description of its output
behavior on an input database.
However, having a canonicalization of whole programs might be very useful for anaylsis.
What would be even more useful would be if we could describe an entire (useful)
space of programs as a vector space.
On a related note, it would be nice to know if it's possible to get the same
capabilities out of a datalog program with more restrictions on its form. For
example, does having a maximum number of literals per clause or terms per
literal restrict the language's capabilities. If so, what would be needed to lift this restriction.
What if we had first-class support for enumeration tables?
For any given input, enumeration tables are equivalent to adding the equivalent real tables.
This is only true if the variables of each clause have a known space before the
enumeration table is expanded.
If we write something like this, what happens?
a(X1) :- enumerate!(X, X1, X2), a(X2)
Okay, clearly that's invalid. We can create some rule about enumerate! being a
virtual table which only has values when all but one of its entries is
specified by other tables, unless the first entry is specified.
But what if we write something like this:
a(X) :- enumerate!(X, X1, X2), a(X1), a(X2)
This requires us to do some form of type checking, by assuming that a(X) has a
finite domain. Then, we analyze this clause and see that it increases the
domain of a(X).
But what if there's something like this:
b(X) :- enumerate!(X, X1, X2), a(X1), a(X2)
a(X) :- b(X)
Eh, we can probably just use a unification based approach here, and detect
errors when we try to unify |a(X)| with a type expression (created by enumeration)
in terms of |a(X)|.
Okay, the behavior of the enumerate is dependent on types, which means that now
each clause needs to have a finite domain for each variable. That also needs to
be canonicalized, so it's probably not worthwhile.
Try to canonicalize all arbitrary datalog programs is equivalent to solving the
restriction problem, right? Which is probably quite difficult, since it's an open research problem.
What if we add first class support for (two-)tuples? Then you can build list /
curch numerals and are probably Turing complete.
Okay, maybe we should just try to attack the canonicalization directly.
Firstly, it's always possible to emulate a lower size predicate with a higher
sized one, by writing programs that happen to always set several of the terms
equal.
Maybe we can begin by characterize the program by at least two numbers P and T,
where P is the number of predicates and T is the number of terms in each one.
So maybe it's better to look at this from a bottom up / constraint based idea.
For each term t, it can have an equality constraint with any other term t'.
Yeah, after thinking about that, it doesn't seem to go anywhere.
Maybe we should continue trying to define canonicalization using ordering.
It's pretty easy to define an order function for clauses (basically just
re-using definitions and recursing on the subtypes).
But there's still a ton of symmetry due to predicate renaming.
It's probably possible to solve that problem using dataflow ordering constraints.
Combining these, it's likely possible to enumerate the programs in a space with
near maximum efficiency.
There's probably still redundant programs in that some later programs have the
same semantics as earlier programs.
Maybe it would be best to be able to test these things out experimentally. |
#include <GAGPL/CHEMISTRY/<API key>.h>
#include <boost/lexical_cast.hpp>
#include <iostream>
namespace gag
{
<API key>& <API key>::Instance()
{
static <API key> cgt;
return cgt;
}
void <API key>::load(const std::string& filename)
{
std::cout << "Load functional group table once!\n" << std::endl;
PeriodicTable& ptable = PeriodicTable::Instance();
//ptable.load();
using boost::property_tree::ptree;
ptree pt;
read_xml(filename, pt);
<API key> fg_ref;
BOOST_FOREACH(ptree::value_type &v, pt.get_child("parameters.FunctionalGroupSets"))
{
if(v.first == "FunctionalGroup")
{
<API key> fg_pt;
fg_pt._symbol = v.second.get<std::string>("Symbol");
fg_pt._name = v.second.get<std::string>("Name");
//const std::string compo_string = v.second.get<std::string>("Composition");
//fg.getComposition().update(compo_string );
fg_pt._composition_string = v.second.get<std::string>("Composition");
if(v.second.count("Ring") > 0) {
BOOST_FOREACH(ptree::value_type &m, v.second.get_child("Ring"))
{
if(m.first == "Start")
fg_pt._start = boost::lexical_cast<size_t>(m.second.data());
else if(m.first == "End")
fg_pt._end = boost::lexical_cast<size_t>(m.second.data());
else {
// Do nothing.
}
}
} else {
fg_pt._start = 0;
fg_pt._end = 0;
}
if(v.second.count("Sites")>0) // Dependent functional group.
{
BOOST_FOREACH(ptree::value_type &s, v.second.get_child("Sites"))
{
// Composition nodes.
if(s.first == "Site")
{
// The repeating number of Site.
size_t loop1 = s.second.get("<xmlattr>.Count", 1);
// Check the contents of Site and decide if conversion is needed.
// 1. A Site has to have a core or functional group as its children.
// 2. If a core, store the information into _sites.
// 3. If a functional group, covert it to Site which will be stored in _sites.
std::string core;
// tag Subset.FunctionalGroup or FunctionalGroup.
std::vector<std::string> satellites;
if(s.second.count("Core") > 0) {
core = s.second.get<std::string>("Core");
// Subset is not necessarily present.
if(s.second.count("Subset") > 0) {
BOOST_FOREACH(ptree::value_type &t, s.second.get_child("Subset"))
{
if(t.first == "FunctionalGroup") {
size_t loop2 = t.second.get("<xmlattr>.Count", 1);
for(size_t i = 0; i<loop2; i++)
satellites.push_back(t.second.data());
}
}
}
} else if(s.second.count("FunctionalGroup") == 1) {
// Only one functional group is allowed for replacement.
// core will be kept empty.
satellites.push_back(s.second.get<std::string>("FunctionalGroup"));
} else {
std::cout << fg_pt._name << std::endl;
throw std::runtime_error("Functional group for replacing is missing!");
}
for(size_t i = 0; i < loop1; i++)
fg_pt._sites.push_back(std::make_pair(core, satellites));
}
}
} else { // Independent functional group.
// Currently, do nothing.
}
fg_ref.insert(std::make_pair(fg_pt._symbol, fg_pt));
}
}
pt.erase("parameters.FunctionalGroupSets");
this->buildtree(fg_ref);
}
// Convert information from fg_ref to functionalgroups.
void <API key>::buildtree(const <API key>& fg_ref)
{
<API key>::const_iterator iter = fg_ref.begin();
// Iterate over the reference table.
for(; iter != fg_ref.end(); iter++) {
// Create a FunctionalGroup object using symbol information from fg_ref.
this-><API key>(fg_ref, iter->first);
//functionalgroups.insert(std::make_pair(iter->first, ff));
}
}
FunctionalGroup <API key>::<API key>(const <API key>& fg_ref, const std::string& symbol)
{
<API key>::const_iterator iter = fg_ref.find(symbol);
if(iter == fg_ref.end())
throw std::runtime_error("Symbol cannot be found in the reference table!");
// If this functional group has been created previously, return it directly.
if(this-><API key>(symbol))
return this-><API key>(symbol);
// If not defined, create it from the scratch.
FunctionalGroup ff(symbol, iter->second._name, iter->second._start, iter->second._end);
// Set the composition of the functional group.
ff.add(iter->second._composition_string);
// Check if it is an independent functional group.
if(!iter->second._sites.empty()) { // Dependent functional group.
// Iterate over all the sites.
std::vector<std::pair<std::string, std::vector<std::string> > >::const_iterator site_iter = iter->second._sites.begin();
for(; site_iter != iter->second._sites.end(); site_iter++)
{
if(site_iter->first.empty()){ // If core is empty. Convert the functional group to Site.
if(site_iter->second.empty() || site_iter->second.size() > 1)
throw std::runtime_error("There should be only one functional group for replacing site.");
std::string child_symbol = site_iter->second.front();
FunctionalGroup child_fg = this-><API key>(fg_ref, child_symbol);
Site temp_st = child_fg.getConvertedSite();
// Update _sites.
ff._sites.push_back(temp_st);
} else {
// Create Site
PeriodicTable& ptable = PeriodicTable::Instance();
Site temp_st;
temp_st.core = ptable.getElementBySymbol(site_iter->first);
std::vector<std::string>::const_iterator fg_iter = site_iter->second.begin();
for (; fg_iter != site_iter->second.end(); fg_iter++)
{
FunctionalGroup temp_fg = this-><API key>(fg_ref, *fg_iter);
std::string temp_symbol(*fg_iter);
temp_st.fg_map.insert(std::make_pair(temp_symbol, temp_fg));
}
ff._sites.push_back(temp_st);
}
}
} else { // Independent functional group.
}
functionalgroups.insert(std::make_pair(symbol, ff));
// Update functionalgroups.
return ff;
}
FunctionalGroup <API key>::<API key>(const std::string& symbol) const
{
std::map<std::string, FunctionalGroup>::const_iterator i = functionalgroups.find(symbol);
return i != functionalgroups.end() ? i->second : FunctionalGroup();
}
bool <API key>::<API key>( const std::string& symbol )
{
std::map<std::string, FunctionalGroup>::iterator iter = functionalgroups.find(symbol);
return iter != functionalgroups.end();
}
std::vector<std::string> <API key>::<API key>() const
{
std::vector<std::string> keys;
std::pair<std::string, FunctionalGroup> fg;
BOOST_FOREACH(fg, functionalgroups)
{
keys.push_back(fg.first);
}
return keys;
}
//void <API key>::<API key>( FunctionalGroup& fg, Modification& mod)
// Reaction temp_rt;
// BOOST_FOREACH(temp_rt, mod.<API key>())
// // apply the corresponding reactions to the core.
// std::pair<AtomOperation, std::string> <API key>;
// BOOST_FOREACH(<API key>, temp_rt.core_operation)
// if(<API key>.first == Addition) {
// FunctionalGroup fg_operation = this-><API key>(<API key>.second);
// fg.addFunctionalGroup(fg_operation, temp_rt.position);
// } else if(<API key>.first == Removement) {
// fg.<API key>(<API key>.second, temp_rt.position);
// // apply the corresponding reactions to the sub-functionalgroup.
// boost::tuple<size_t, AtomOperation, std::string> single_operation;
// std::pair<std::string, OperationSet> fg_operation_pair;
// BOOST_FOREACH(fg_operation_pair, temp_rt.sub_fg_operation)
// if(!fg.<API key>(fg_operation_pair.first, temp_rt.position)) {
// continue;
// <API key> chain;
// chain.push_back(std::make_pair(temp_rt.position, this-><API key>(fg_operation_pair.first)));
// BOOST_FOREACH(single_operation, fg_operation_pair.second)
// <API key> chain_temp(chain);
// chain_temp.push_back(std::make_pair(single_operation.get<0>(), this-><API key>(single_operation.get<2>())));
// if(single_operation.get<1>() == Addition) {
// //fg.<API key>(fg_operation_pair.first, temp_rt.position).addFunctionalGroup(fg_operation, single_operation.get<0>());
// fg.<API key>(chain_temp);
// } else if(single_operation.get<1>() == Removement) {
// //fg.<API key>(fg_operation_pair.first, temp_rt.position).<API key>(single_operation.get<2>(), single_operation.get<0>());
// fg.<API key>(chain_temp);
// // Get the reactant functional group and modify it.
// FunctionalGroup fg_ext = this-><API key>(temp_rt.reactant_operation.first);
// BOOST_FOREACH(single_operation, temp_rt.reactant_operation.second)
// if(single_operation.get<1>() == Addition) {
// FunctionalGroup fg_operation = this-><API key>(single_operation.get<2>());
// fg_ext.addFunctionalGroup(fg_operation, single_operation.get<0>());
// } else if(single_operation.get<1>() == Removement) {
// fg_ext.<API key>(single_operation.get<2>(), single_operation.get<0>());
// if(!temp_rt.core_operation.empty()) { // Add to the core.
// fg.addFunctionalGroup(fg_ext, temp_rt.position);
// } else if(!temp_rt.sub_fg_operation.empty()) { // Add to the sub functional group.
// // Check and see if the functional group contain such a sub functional group.
// std::multimap<std::string, OperationSet>::iterator oper_iter = temp_rt.sub_fg_operation.begin();
// for(; oper_iter != temp_rt.sub_fg_operation.end(); oper_iter++)
// if(fg.<API key>(oper_iter->first, temp_rt.position)) {
// <API key> chain;
// chain.push_back(std::make_pair(temp_rt.position, this-><API key>(oper_iter->first)));
// chain.push_back(std::make_pair(0, fg_ext));
// //fg.<API key>(oper_iter->first, temp_rt.position).addFunctionalGroup(fg_ext);
// fg.<API key>(chain);
//void <API key>::<API key>( FunctionalGroup& fg, Modification mod, const size_t idx )
// // Use this function at risk.
// for(std::vector<Reaction>::iterator iter = mod.<API key>().begin(); iter != mod.<API key>().end(); iter++)
// iter->position = idx;
// this-><API key>(fg, mod);
} |
package scato
package object control {
type Lazy[A] = Unit => A
} |
using System;
using System.Collections;
using System.Xml;
using StructureMap.Configuration;
using StructureMap.Graph;
using StructureMap.Pipeline;
namespace StructureMap.Source
{
<summary>
An implementation of InstanceMemento that stores properties as Xml attributes
Limited functionality
</summary>
public class <API key> : InstanceMemento
{
private readonly XmlElement _element;
public <API key>(XmlNode node)
{
_element = (XmlElement) node;
}
protected override string innerConcreteKey { get { return _element.GetAttribute(XmlConstants.TYPE_ATTRIBUTE); } }
protected override string innerInstanceKey { get { return _element.GetAttribute(XmlConstants.KEY_ATTRIBUTE); } }
public XmlElement InnerElement { get { return _element; } }
public override bool IsReference { get { return (ConcreteKey == string.Empty && string.IsNullOrEmpty(getPluggedType())); } }
public override string ReferenceKey { get { return InstanceKey; } }
public void SetConcreteKey(string concreteKey)
{
_element.SetAttribute(XmlConstants.TYPE_ATTRIBUTE, concreteKey);
}
protected override InstanceMemento getChild(string key)
{
XmlNode childNode = _element[key];
if (childNode == null)
{
return null;
}
else
{
return new <API key>(childNode);
}
}
protected override string getPropertyValue(string key)
{
if (_element.HasAttribute(key))
{
return _element.GetAttribute(key);
}
else
{
XmlElement childElement = _element[key];
return childElement == null ? string.Empty : childElement.InnerText.Trim();
}
}
public override InstanceMemento[] GetChildrenArray(string Key)
{
XmlNode childrenNode = _element[Key];
if (childrenNode == null)
{
return null;
}
var list = new ArrayList();
var element = (XmlElement) childrenNode.FirstChild;
while (element != null)
{
if (element.NodeType == XmlNodeType.Element)
{
InstanceMemento childMemento = new <API key>(element);
list.Add(childMemento);
}
element = (XmlElement) element.NextSibling;
}
return (InstanceMemento[]) list.ToArray(typeof (InstanceMemento));
}
public override InstanceMemento Substitute(InstanceMemento memento)
{
var templater = new XmlTemplater(_element);
XmlNode substitutedNode = templater.SubstituteTemplates(_element, memento);
return new <API key>(substitutedNode);
}
public override string ToString()
{
return _element.OuterXml;
}
public override Instance ReadChildInstance(string name, PluginGraph graph, Type childType)
{
ITypeReader reader = TypeReaderFactory.GetReader(childType);
if (reader == null)
{
return base.ReadChildInstance(name, graph, childType);
}
XmlElement element = _element[name];
return element == null ? null : reader.Read(element, childType);
}
}
} |
# eENVplus TFES cluster visualization
This cluster visualization intends to visualize contents exploited by the Thesaurus Framework Exploitation Services [TFES](https://github.com/eENVplus/<API key>). It provides an interactive user interface which can be used to perform a so-called "Semantic Explorative Search (SES)".
Due to the cascading hierarchical structure of the semantic information represented in a SKOS constellation, pinpointing the right concept, which precisely addresses the aimed definition, can be a challenging task. This can, however, be facilitated by the use of the hierarchical nature of the semantic data and generation of an interactive visualization of this hierarchy. The user will then be able to explore through the hierarchy and fine-tune his selection by going through the more general concept and find his precise concept under the narrower concepts of the abstract concept.
This document describes the configuration and usage of the visualization tool.
TFES cluster visualization is licensed under the [Apache Licence 2.0](https:
## Usage
Deployment
The repository content is a small set of files:
* ```index.html``` Contains the HTML-Frame for the visualization components, applies CSS stylesheets
* ```d3.min.js```, ```d3tip.js``` Minimized version of the [D3.js](http://d3js.org/) library
* ```<API key>.js``` Java Script implementation of the cluster visualization
These files have to be deployed to an arbitrary HTTP webserver. Check availability by calling
the appropriate URL, e.g. http://localhost:8000/visualization
Configuration
The visualization depends on the availability of a Thesaurus Framework Exploitation Service (TFES)
REST endpoint. Therefore an URL to this endpoint has to be configured.
Edit the file ```index.html``` and adapt to following line to an URL to your TFES REST endpoint in your environment:
eENVplusCluster.init({
baseUrl: "http://localhost:8085/tfes/rest/",
});
User interface
To use the cluster visualization the webpage of your webserver in one of the current Browsers
(Firefox, Chrome, etc.) according to the following request pattern:
http://<hostname>:<port>/<application>/?type=cluster&focusConcept=<conceptURI>&k=<depth>&languages=<listOfLanguages>&relations=<listOfRelationTypes>
Request examples:
* [http:
* [linkeddata.ge.imati.cnr.it/visualization/?type=cluster&focusConcept=http:
* [http:
The visualization initially shows an overview around the given focusConcept using the given depth, languages and relations to other keyword concepts.
The visualization offers the following features:
* Drag and zoom
* Hover keyword concepts to get detailed information about the keyword concept and its relation to other concepts in the visualization
* Different relations are represented by different edge colors
* Different originating thesauri are represented by different node colors
* Enable a legend overlay to get information about node and edge colors by clicking the ```?``` sign.
* Use the legend panel to choose for a set of relations to be visualized
* Hover keyword concepts to call the LusTRE interface by clicking the ```>``` sign shown next to the concept node
* Navigate to another keyword concept by clicking on its node
* Go back and forward in visualization history by using the ```<``` ```>``` buttons on the top left corner
## Contribute
Feel free to contribute! :-) |
<!DOCTYPE html>
<html xmlns="http:
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<link rel="SHORTCUT ICON" href="../../../../../img/clover.ico" />
<link rel="stylesheet" href="../../../../../aui/css/aui.min.css" media="all"/>
<link rel="stylesheet" href="../../../../../aui/css/aui-experimental.min.css" media="all"/>
<!--[if IE 9]><link rel="stylesheet" href="../../../../../aui/css/aui-ie9.min.css" media="all"/><![endif]-->
<style type="text/css" media="all">
@import url('../../../../../style.css');
@import url('../../../../../tree.css');
</style>
<script src="../../../../../jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-experimental.min.js" type="text/javascript"></script>
<script src="../../../../../aui/js/aui-soy.min.js" type="text/javascript"></script>
<script src="../../../../../package-nodes-tree.js" type="text/javascript"></script>
<script src="../../../../../clover-tree.js" type="text/javascript"></script>
<script src="../../../../../clover.js" type="text/javascript"></script>
<script src="../../../../../clover-descriptions.js" type="text/javascript"></script>
<script src="../../../../../cloud.js" type="text/javascript"></script>
<title>ABA Route Transit Number Validator 1.0.1-SNAPSHOT</title>
</head>
<body>
<div id="page">
<header id="header" role="banner">
<nav class="aui-header <API key>" role="navigation">
<div class="aui-header-inner">
<div class="aui-header-primary">
<h1 id="logo" class="aui-header-logo <API key>">
<a href="http://openclover.org" title="Visit OpenClover home page"><span class="<API key>">OpenClover</span></a>
</h1>
</div>
<div class="<API key>">
<ul class="aui-nav">
<li id="system-help-menu">
<a class="aui-nav-link" title="Open online documentation" target="_blank"
href="http://openclover.org/documentation">
<span class="aui-icon aui-icon-small aui-iconfont-help"> Help</span>
</a>
</li>
</ul>
</div>
</div>
</nav>
</header>
<div class="aui-page-panel">
<div class="<API key>">
<div class="aui-page-panel-nav <API key>">
<div class="<API key>" style="margin-bottom: 20px;">
<div class="<API key>">
<a href="http://cardatechnologies.com" target="_top">
<div class="aui-avatar aui-avatar-large aui-avatar-project">
<div class="aui-avatar-inner">
<img src="../../../../../img/clover_logo_large.png" alt="Clover icon"/>
</div>
</div>
</a>
</div>
<div class="<API key>" >
<h1>
<a href="http://cardatechnologies.com" target="_top">
ABA Route Transit Number Validator 1.0.1-SNAPSHOT
</a>
</h1>
</div>
</div>
<nav class="aui-navgroup <API key>">
<div class="aui-navgroup-inner">
<ul class="aui-nav">
<li class="">
<a href="../../../../../dashboard.html">Project overview</a>
</li>
</ul>
<div class="aui-nav-heading <API key>">
<strong>Packages</strong>
</div>
<div class="aui-nav project-packages">
<form method="get" action="#" class="aui <API key>">
<input type="text" autocomplete="off" class="package-filter text"
placeholder="Type to filter packages..." name="package-filter" id="package-filter"
title="Start typing package name (or part of the name) to search through the tree. Use arrow keys and the Enter key to navigate."/>
</form>
<p class="<API key> hidden">
<small>No results found.</small>
</p>
<div class="<API key>" data-root-relative="../../../../../" data-package-name="com.cardatechnologies.utils.validators.abaroutevalidator">
<div class="<API key>"></div>
<div class="<API key>"></div>
</div>
</div>
</div>
</nav> </div>
<section class="<API key>">
<div class="<API key>">
<div class="<API key>"><ol class="aui-nav aui-nav-breadcrumbs">
<li><a href="../../../../../dashboard.html"> Project Clover database Sat Aug 7 2021 12:29:33 MDT</a></li>
<li><a href="test-pkg-summary.html">Package com.cardatechnologies.utils.validators.abaroutevalidator</a></li>
<li><a href="<API key>.html">Class <API key></a></li>
</ol></div>
<h1 class="aui-h2-clover">
Test <API key>
</h1>
<table class="aui">
<thead>
<tr>
<th>Test</th>
<th><label title="The test result. Either a Pass, Fail or Error.">Status</label></th>
<th><label title="When the test execution was started">Start time</label></th>
<th><label title="The total time in seconds taken to run this test.">Time (seconds)</label></th>
<th><label title="A failure or error message if the test is not successful.">Message</label></th>
</tr>
</thead>
<tbody>
<tr>
<td>
<a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/<API key>.html?line=41872#src-41872" ><API key></a>
</td>
<td>
<span class="sortValue">1</span><span class="aui-lozenge aui-lozenge-success">PASS</span>
</td>
<td>
7 Aug 12:35:54
</td>
<td>
0.0 </td>
<td>
<div></div>
<div class="errorMessage"></div>
</td>
</tr>
</tbody>
</table>
<div> </div>
<table class="aui aui-table-sortable">
<thead>
<tr>
<th style="white-space:nowrap;"><label title="A class that was directly hit by this test.">Target Class</label></th>
<th colspan="4"><label title="The percentage of coverage contributed by each single test.">Coverage contributed by</label> <API key></th>
</tr>
</thead>
<tbody>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.<API key></span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/exceptions/<API key>.html?id=13270#<API key>" title="<API key>" name="sl-43">com.cardatechnologies.utils.validators.abaroutevalidator.exceptions.<API key></a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/ErrorCodes.html?id=13270#ErrorCodes" title="ErrorCodes" name="sl-42">com.cardatechnologies.utils.validators.abaroutevalidator.ErrorCodes</a>
</td>
<td>
<span class="sortValue">0.5714286</span>57.1%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="57.1% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:57.1%"></div></div></div> </td>
</tr>
<tr>
<td>
<span class="sortValue">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</span>
  <a href="../../../../../com/cardatechnologies/utils/validators/abaroutevalidator/AbaRouteValidator.html?id=13270#AbaRouteValidator" title="AbaRouteValidator" name="sl-47">com.cardatechnologies.utils.validators.abaroutevalidator.AbaRouteValidator</a>
</td>
<td>
<span class="sortValue">0.29411766</span>29.4%
</td>
<td class="align-middle" style="width: 100%" colspan="3">
<div>
<div title="29.4% Covered" style="min-width:40px;" class="barNegative contribBarNegative contribBarNegative"><div class="barPositive contribBarPositive contribBarPositive" style="width:29.4%"></div></div></div> </td>
</tr>
</tbody>
</table>
</div> <!-- class="<API key>" -->
<footer id="footer" role="contentinfo">
<section class="footer-body">
<ul>
<li>
Report generated by <a target="_new" href="http://openclover.org">OpenClover</a> v 4.4.1
on Sat Aug 7 2021 12:49:26 MDT using coverage data from Sat Aug 7 2021 12:47:23 MDT.
</li>
</ul>
<ul>
<li>OpenClover is free and open-source software. </li>
</ul>
</section>
</footer> </section> <!-- class="<API key>" -->
</div> <!-- class="<API key>" -->
</div> <!-- class="aui-page-panel" -->
</div> <!-- id="page" -->
</body>
</html> |
@CHARSET "UTF-8";
#barraEmbaixo {
margin-top: 4px;
}
#barraEmbaixo img {
margin-left: -56px;
}
#camposUsuario input[type=radio], #preferencias input[type=checkbox] {
margin: 0;
}
#preferencias span {
padding: 5px;
}
.endereco {
padding-bottom: 10px;
} |
package validator
import (
"net/url"
"strings"
"time"
)
// IsNull check if the string is null.
func IsNull(str string) bool {
return len(str) == 0
}
func IsWord(str string, params ...int) bool {
if IsNull(str) {
return false
}
return rxWord.MatchString(str)
}
func IsTime(str string, format string) bool {
_, err := time.Parse(format, str)
return err == nil
}
func IsDate(str string, format ...string) bool {
if len(format) == 0 {
if len(strings.Split(str, "/")) > 1 {
return IsTime(str, "2006/01/02")
}
if len(strings.Split(str, "-")) > 1 {
return IsTime(str, "2006-01-02")
}
return false
}
return IsTime(str, format[0])
}
func IsEmpty(str string) bool {
return len(strings.TrimSpace(str)) == 0
}
func IsRequestURI(rawurl string) bool {
_, err := url.ParseRequestURI(rawurl)
return err == nil
}
func IsURI(str string) bool {
relation := false
for idx, val := range str {
if val == '.' {
relation = true
continue
}
if val == '/' || val == '\\' {
if idx < len(str)-1 {
if str[idx+1] == '.' {
relation = true
continue
}
}
}
if relation && (val != '/' && val != '\\') {
return false
}
return IsRequestURI(str[idx:])
}
return false
}
func IsMobilePhone(str string) bool {
if IsEmpty(str) {
return false
}
return rxMobolePhone.MatchString(str)
} |
package net.sf.robocode.serialization;
import java.awt.*;
import java.io.*;
/**
* This utility class is used for cloning objects.
*
* @author Flemming N. Larsen (original)
*/
public class ObjectCloner {
/**
* Returns a deep copy of the specified object, or {@code null} if the
* object cannot be serialized.
*
* @param orig the object to deep copy.
* @return a new object that is a deep copy of the specified input object; or
* {@code null} if the object was not copied for some reason.
*/
public static Object deepCopy(Object orig) {
if (!(orig instanceof Serializable)) {
return null;
}
// Write the object out to a byte array
<API key> baos = new <API key>();
ObjectOutputStream out = null;
try {
out = new ObjectOutputStream(baos);
out.writeObject(orig);
out.flush();
} catch (IOException e) {
return null;
} finally {
if (out != null) {
try {
out.close();
} catch (IOException ignored) {}
}
}
// Now, create a new object by reading it in from the byte array where the
// original object was written to.
Object obj = null;
ObjectInputStream in = null;
try {
in = new ObjectInputStream(new <API key>(baos.toByteArray()));
obj = in.readObject();
} catch (IOException e) {
return null;
} catch (<API key> e) {
return null;
} finally {
if (in != null) {
try {
in.close();
} catch (IOException ignored) {}
}
}
return obj;
}
/**
* Returns a deep copy of the specified {@code Color}, or {@code null} if
* the reference to the {@code Color} is {@code null}.
*
* @param c the {@code Color} to deep copy.
* @return a new {@code Color} that is a deep copy if the specified input
* {@code Color}; or {@code null} if the reference to the
* {@code Color} is {@code null}.
*/
public static Color deepCopy(Color c) {
return (c != null) ? new Color(c.getRGB(), true) : null;
}
} |
package binny
import (
"bytes"
"io"
"sync"
)
var pools = struct {
enc sync.Pool
dec sync.Pool
}{
enc: sync.Pool{
New: func() interface{} {
buf := bytes.NewBuffer(make([]byte, 0, <API key>))
eb := &encBuffer{b: buf, e: NewEncoder(buf)}
eb.e.NoAutoFlushOnEncode = true
return eb
},
},
dec: sync.Pool{
New: func() interface{} {
return NewDecoder(nil)
},
},
}
type encBuffer struct {
b *bytes.Buffer
e *Encoder
}
func getEncBuffer() *encBuffer {
eb := pools.enc.Get().(*encBuffer)
return eb
}
func putEncBuffer(eb *encBuffer) {
eb.b.Reset()
pools.enc.Put(eb)
}
func getDec(r io.Reader) *Decoder {
dec := pools.dec.Get().(*Decoder)
dec.Reset(r)
return dec
}
func putDec(dec *Decoder) {
dec.Reset(nil)
pools.dec.Put(dec)
} |
#pragma strict
var cam1 : Camera;
var cam2 : Camera;
var ReferenceScript : GameObject;
var touch = false;
function Start () {
cam1.enabled = true;
cam2.enabled = false;
}
function Update () {
var runnerBehaviour: RunnerBehaviour = ReferenceScript.GetComponent("RunnerBehaviour");
if (touch == false)
{
if (runnerBehaviour.touchdown == true)
{
cam1.enabled = !cam1.enabled;
cam2.enabled = !cam2.enabled;
touch = true;
}
}
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from elasticsearch import ConnectionError
import gc
import logging
import mock
from multiprocessing import Process, Queue
import os
import sys
from shellbot import Context, Engine
from shellbot.events import Message
from shellbot.updaters import <API key>
my_engine = Engine()
class UpdaterTests(unittest.TestCase):
def tearDown(self):
collected = gc.collect()
logging.info("Garbage collector: collected %d objects." % (collected))
def test_init(self):
logging.info('***** init')
u = <API key>()
self.assertEqual(u.engine, None)
u = <API key>(engine=my_engine)
self.assertEqual(u.engine, my_engine)
def test_on_init(self):
logging.info('***** on_init')
u = <API key>()
self.assertEqual(u.host, None)
u = <API key>(host=None)
self.assertEqual(u.host, None)
u = <API key>(host='')
self.assertEqual(u.host, '')
u = <API key>(host='elastic.acme.com')
self.assertEqual(u.host, 'elastic.acme.com')
def test_get_host(self):
logging.info('***** get_host')
u = <API key>(engine=my_engine)
self.assertEqual(u.get_host(), 'localhost:9200')
u = <API key>(engine=my_engine, host=None)
self.assertEqual(u.get_host(), 'localhost:9200')
u = <API key>(engine=my_engine, host='')
self.assertEqual(u.get_host(), 'localhost:9200')
u = <API key>(engine=my_engine, host='elastic.acme.com')
self.assertEqual(u.get_host(), 'elastic.acme.com')
def test_on_bond(self):
logging.info('***** on_bond')
u = <API key>(host='this.does.not.exist')
with self.assertRaises(Exception):
u.on_bond(bot='*dummy')
def test_put(self):
logging.info('***** put')
class FakeDb(object):
def __init__(self):
self.expected = None
def index(self, index, doc_type, body):
assert index == 'shellbot'
assert doc_type == 'event'
assert body == self.expected
u = <API key>()
u.db = FakeDb()
message_1 = Message({
'person_label': 'alice@acme.com',
'text': 'a first message',
})
u.db.expected = message_1.attributes
u.put(message_1)
message_2 = Message({
'person_label': 'bob@acme.com',
'text': 'a second message',
})
u.db.expected = message_2.attributes
u.put(message_2)
if __name__ == '__main__':
Context.set_logger()
sys.exit(unittest.main()) |
package freepander.wechart;
import freepander.model.recv.WxRecvGeoMsg;
import freepander.model.send.WxSendMsg;
import freepander.model.send.WxSendTextMsg;
import freepander.util.WeiXin;
/**
*
* @author freepander
*
*/
public class ParseLocation {
public WxSendMsg parse(WxRecvGeoMsg recvMsg) {
WxSendTextMsg sendMsg = (WxSendTextMsg) WeiXin.builderSendByRecv(recvMsg, "text");
sendMsg.setContent("");
return sendMsg;
}
} |
package base.controllers
import base.models.{PermissionBase, UserBase, UserRoleBase, UserSessionBase}
import play.api.Logger
import play.api.mvc.Security.{<API key>, <API key>}
import play.api.mvc.{ActionBuilder, _}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.language.{implicitConversions, postfixOps}
import scala.reflect.ClassTag
trait Secure[S<:UserSessionBase[U], U<:UserBase, R<: UserRoleBase, P<:PermissionBase] extends ControllerBase{
/***
* Constants
*/
val not<API key>: String = "/login"
val userCachingDuration: Duration = 5 minutes
private val idInSession = "id"
private val logMarkerStart =
@inline
private def logStart() = Logger.debug(logMarkerStart)
/****
* Secure Actions
*/
type MRQ[A] = MayBeSecureRequest[A,U]
type SRQ[A] = SecureRequest[A,U]
def actionMayBeSecure(action: => MRQ[AnyContent] => Result)(implicit cS: ClassTag[S], cU: ClassTag[U]):EssentialAction ={
val ab = new ActionBuilder[MRQ,AnyContent] with ActionTransformer[Request, MRQ] {
override protected def executionContext: ExecutionContext = <API key>.executionContext
override def parser: BodyParser[AnyContent] = <API key>.parsers.defaultBodyParser
def transform[A](request: Request[A]): Future[MayBeSecureRequest[A, U]] = Future.successful {
logStart()
new MayBeSecureRequest(getUser(request), request)
}
}
ab(<API key>.parsers.defaultBodyParser)(action)
}
def actionSecure(action: => SRQ[AnyContent] => Result)(implicit cS: ClassTag[S], cU: ClassTag[U]):EssentialAction = {
val ab = new ActionBuilder[SRQ,AnyContent] {
override protected def executionContext: ExecutionContext = <API key>.executionContext
override def parser: BodyParser[AnyContent] = <API key>.parsers.defaultBodyParser
def invokeBlock[A](request: Request[A], block: (SRQ[A]) => Future[Result]): Future[Result] = {
<API key>(
getUser,
<API key>.parsers.defaultBodyParser,
_ => Redirect(not<API key>))(<API key>.executionContext)
.authenticate(request, { authRequest: <API key>[A, U] =>
logStart()
block( new SecureRequest[A,U](authRequest.user, request))
})
}
}
ab(<API key>.parsers.defaultBodyParser)(action)
}
def actionSecureByRole(roles: Long*)(action: => SRQ[AnyContent] => Result)(implicit cS: ClassTag[S], cU: ClassTag[U]): EssentialAction = {
<API key>(roles: _*)()(action)
}
def <API key>(roles: Long*)(permissions: Long*)(action: => SRQ[AnyContent] => Result)(implicit cS: ClassTag[S], cU: ClassTag[U]): EssentialAction = {
val ab = new ActionBuilder[SRQ,AnyContent] {
override protected def executionContext: ExecutionContext = <API key>.executionContext
override def parser: BodyParser[AnyContent] = <API key>.parsers.defaultBodyParser
def invokeBlock[A](request: Request[A], block: (SRQ[A]) => Future[Result]): Future[Result] = {
<API key>(getUser(roles: _*)(permissions: _*),
<API key>.parsers.defaultBodyParser,
_ => Redirect(not<API key>))(<API key>.executionContext)
.authenticate(request, { authRequest: <API key>[A, U] =>
logStart()
block( new SecureRequest[A,U](authRequest.user, request))
})
}
}
ab(<API key>.parsers.defaultBodyParser)(action)
}
def <API key>(action: => U => Request[AnyContent] => Result)(implicit cS: ClassTag[S], cU: ClassTag[U]): EssentialAction =
Security.Authenticated(getUser, _ => Redirect(not<API key>))
{ user => actionMayBeSecure(request => action(user)(request)) }
/***
*
* Helpers
*/
def initSession(user: U)(implicit cS: ClassTag[S], cU: ClassTag[U]):(String,String) = {
val sessionClass = cS.runtimeClass
val userClass = cU.runtimeClass
val sessionConstructor = sessionClass.<API key>(userClass)
val us = sessionConstructor.newInstance(user)
val session:S = us match {
case s:S => s
case _ => throw new RuntimeException("unexpected")
}
env.userCache.set(session.getIdString,session,userCachingDuration)
(idInSession,session.getIdString)
}
def getSession(request: RequestHeader)(implicit cS: ClassTag[S]): Option[S] = request.session.get(idInSession).flatMap{ id:String =>
val s:Option[S] = env.userCache.get(id)
val s2:Option[S] = s.fold{
val m = cS.runtimeClass.getDeclaredMethod("restore",classOf[String])
val resS: Option[S] = m.invoke(null,id) match{case s: S=> Some(s); case _ => None}//= UserSession.restore(id)
resS.foreach(x=>env.userCache.set(x.getIdString,resS,userCachingDuration))
resS
} (x=>Some(x))
s2
}
def getUser(requiredRole: Long*)(requiredPermission: Long*)(request: RequestHeader)(implicit cS: ClassTag[S], cU: ClassTag[U]):Option[U] = {
val user = getUser(request)
val userWithRolePass = user.filter {user => requiredRole.exists(user.getRoles.contains(_))}
val <API key> = userWithRolePass.filter{user => user.getPermissions.containsAll(scala.collection.JavaConverters.seqAsJavaList(requiredPermission))}
<API key>
}
def getUser(request: RequestHeader)(implicit cS: ClassTag[S]):Option[U] = {
getSession(request).flatMap{s2 => Option(s2.getUser)}
}
}
class SecureRequest[A, U<:UserBase](val user: U, request: Request[A]) extends WrappedRequest[A](request)
class MayBeSecureRequest[A, U<:UserBase](val user: Option[U], request: Request[A]) extends WrappedRequest[A](request)
object <API key>{
class GenericRequest[A](request: Request[A]){
def getUser:Option[UserBase] = request match {
case sr:SecureRequest[A,_] => Some(sr.user)
case mr:MayBeSecureRequest[A,_] => mr.user
case _ => None
}
}
implicit def <API key>[A](request: Request[A]):GenericRequest[A] = new GenericRequest[A](request)
} |
layout: post
title: jeecg-boot
date: 2021-08-29 21:01:55 +0800
categories: [Tool]
tags: [tool, doc, sh]
published: true
# jeecg-boot
SpringBoot 2.xSpringCloudAnt Design&VueMybatis-plusShiroJWT!
OnlineCoding->->MERGEJava70%
JeecgBoot OnlineOnline
JEECG: OnlineCodingMerge
JEECG: online
Jeecg-BootJ2EESAASMISOAERPCRMMerge70%
https://github.com/jeecgboot/jeecg-boot
* any list
{:toc} |
#include <iostream>
#include <fstream>
#include <vector>
#include <list>
#include <deque>
#include <stack>
#include <queue>
#include <set>
#include <map>
#include <bitset>
#include <algorithm>
#include <utility>
#include <functional>
#include <valarray>
#include <math.h>
#include <time.h>
#include <stdlib.h>
#include <stdio.h>
#include <ctype.h>
#include <string.h>
using namespace std;
#define REP(i, n) for(i = 0; i < (n); i++)
#define FOR(i, a, n) for(i = a; i < n; i++)
#define REV(i, a, n) for(i = a; i > n; i
typedef long long ll;
typedef long double ld;
int k, f[1000000];
string s;
ll ans = 0;
int main(void) {
memset(f, 0, sizeof(f));
cin >> k >> s;
int ct = 0;
f[0] = 1;
for(int i = 0; i < (int) s.size(); i++) {
if(s[i] == '1') {
ct += 1;
}
if(ct >= k) ans += (ll) f[ct - k];
f[ct] += 1;
}
cout << ans << endl;
return 0;
} |
<?php
require_once 'ccUtils.php';
class ccCache
{
protected $_root;
public function __construct($root)
{
$this->setRoot($root);
}
public function getRoot()
{
return $this->_root;
}
public function setRoot($root)
{
if(!is_dir($root))
{
throw new <API key>("Invalid root path given.");
}
$this->_root = $root;
}
public function store($path, $content)
{
$file = ccPath::os($this->getRoot(), $path);
if(!file_exists(dirname($file)))
{
mkdir(dirname($file), 0777, true);
}
file_put_contents($file, $content);
return $file;
}
public function retrieve($path)
{
$file = ccPath::os($this->getRoot(), $path);
if(!file_exists($file))
{
return null;
}
return file_get_contents($file);
}
public function concatenate($path, $contents, $glue="\n")
{
$contents = implode($glue, $contents);
return $this->store($path, $contents);
}
} |
# code_generator
generate codes from templates |
#include "robot_widget.hpp"
#include <cmath>
#include <stdexcept>
#include <rj_geometry/util.hpp>
#include <rj_common/vision_dot_pattern.hpp>
#include <rj_constants/constants.hpp>
RobotWidget::RobotWidget(QWidget* /*parent*/, Qt::WindowFlags /*f*/) {
for (int i = 0; i < 4; i++) {
setWheelFault(i, false);
}
setBallSenseFault(false);
_blueTeam = true;
_shellID = 0;
_hasBall = false;
_ballSenseFault = false;
for (bool& _wheelFault : _wheelFaults) {
_wheelFault = false;
}
}
void RobotWidget::setShellID(int shellID) { _shellID = shellID; }
void RobotWidget::setBlueTeam(bool blueTeam) {
if (blueTeam != _blueTeam) {
_blueTeam = blueTeam;
update();
}
}
bool RobotWidget::blueTeam() const { return _blueTeam; }
void RobotWidget::setWheelFault(int wheelIndex, bool faulty) {
if (wheelIndex < 0 || wheelIndex > 3) {
throw std::out_of_range("Invalid wheel index");
}
_wheelFaults[wheelIndex] = faulty;
update();
}
void RobotWidget::setBallSenseFault(bool faulty) { _ballSenseFault = faulty; }
void RobotWidget::setHasBall(bool hasBall) {
if (hasBall != _hasBall) {
_hasBall = hasBall;
update();
}
}
// draws a red X with @width = @height = @size centered at @center
void drawRedX(QPainter& painter, const QPointF& center, float size, float lineThickness = 0.01) {
float halfLen = 0.5 * sqrtf(powf(size, 2) + powf(size, 2));
painter.save();
{
QPen xPen(Qt::red, lineThickness);
xPen.setCapStyle(Qt::RoundCap);
painter.setPen(xPen);
painter.translate(center);
painter.rotate(45);
painter.drawLine(QPointF(-halfLen / 2, 0), QPointF(halfLen / 2, 0));
painter.rotate(90);
painter.drawLine(QPointF(-halfLen / 2, 0), QPointF(halfLen / 2, 0));
}
painter.restore();
}
void RobotWidget::paintEvent(QPaintEvent* /*event*/) {
QPainter painter(this);
painter.setRenderHint(QPainter::Antialiasing);
// move to middle
painter.translate(rect().center());
// scale so we can draw robot in units of meters
float minPadding = 9;
float scale = std::fmin((width() - minPadding * 2) / kRobotRadius,
(height() - minPadding * 2) / kRobotRadius) /
2;
painter.scale(scale, scale);
// draw robot body
// note: angles are given to drawChord in sixteenths of a degree
int span = 40;
int start = (90 + span) * 16;
int end = (360 - span * 2) * 16;
painter.setBrush(Qt::black);
painter.setPen(Qt::NoPen);
painter.drawChord(QRectF(-kRobotRadius, -kRobotRadius, kRobotRadius * 2, kRobotRadius * 2),
start, end);
// draw dots
painter.setPen(Qt::NoPen);
for (int i = 0; i < 4; i++) {
painter.setBrush(QBrush(kDotPatternColors[_shellID][i]));
QPointF dotCenter;
dotCenter.setX((i >= 2) ? kDotsSmallOffset : kDotsLargeOffset);
dotCenter.setX(dotCenter.x() * ((i == 1 || i == 2) ? 1 : -1));
dotCenter.setY((i <= 1) ? kDotsSmallOffset : kDotsLargeOffset);
dotCenter.setY(dotCenter.y() * ((i <= 1) ? -1 : 1));
painter.drawEllipse(dotCenter, kDotsRadius, kDotsRadius);
}
// draw center dot
painter.setBrush(_blueTeam ? Qt::blue : Qt::yellow);
painter.drawEllipse(QPointF(0, 0), kDotsRadius, kDotsRadius);
const float RedXSize = 0.06;
// draw wheels
const float wheelWidth = 0.015;
const float wheelRadius = 0.03;
const float wheelDist = kRobotRadius + wheelWidth / 2;
const float wheelAngles[] = {-M_PI * 0.8, M_PI * 0.8, M_PI * 0.2, M_PI * -0.2};
for (int i = 0; i < 4; i++) {
painter.save();
{
float angle = wheelAngles[i];
// translate to center of wheel
painter.translate(wheelDist * cosf(angle), wheelDist * sinf(angle));
// FIXME: draw wheel fault
if (_wheelFaults[i]) {
painter.save();
{
float dist = RedXSize / 2;
painter.translate(cosf(angle) * dist, sinf(angle) * dist);
drawRedX(painter, QPointF(0, 0), RedXSize);
}
painter.restore();
}
// rotate to alight the x-axis with the wheel radius
painter.rotate(radians_to_degrees(angle) + 90);
painter.setBrush(Qt::gray);
const float wheelRounding = 0.01;
painter.drawRoundedRect(
QRectF(-wheelRadius, -wheelWidth / 2, wheelRadius * 2, wheelWidth), wheelRounding,
wheelRounding);
}
painter.restore();
}
if (_ballSenseFault) {
// draw a red X by the robot's mouth
drawRedX(painter, QPointF(0, -kRobotRadius - (RedXSize / 2) + 0.02), RedXSize);
} else if (_hasBall) {
// draw orange golf ball
const float ballRadius = 0.02135;
static QColor ballColor(0xff, 0x90, 0);
float ballCenterY = -(kRobotRadius + ballRadius) + 0.02;
painter.save();
{
painter.translate(0, ballCenterY);
painter.setBrush(ballColor);
painter.drawEllipse(QRectF(-ballRadius, -ballRadius, ballRadius * 2, ballRadius * 2));
}
painter.restore();
}
} |
package it.redhat.demo.rest;
import it.redhat.demo.entity.Proposal;
import it.redhat.demo.service.ProposalService;
import org.jboss.resteasy.spi.validation.ValidateRequest;
import javax.inject.Inject;
import javax.validation.Valid;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import java.util.List;
@Path("proposal")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public class RestService {
@Inject
private ProposalService service;
@GET
public List<Proposal> all() {
return service.all();
}
@GET
@Path("{id}")
public Proposal get(@PathParam("id") Long id) {
return service.get(id);
}
@POST
@ValidateRequest
public Proposal create(@Valid Proposal person) {
return service.create(person);
}
@PUT
@ValidateRequest
public Proposal update(@Valid Proposal person) {
return service.update(person);
}
@DELETE
@Path("{id}")
public void delete(@PathParam("id") Long id) {
service.delete(id);
}
} |
# -*- coding: utf-8 -*-
from openerp import fields, models
class Partner(models.Model):
_inherit = 'res.partner'
# Le agrego una nueva columna al modelo res.partner, por defecto los partner
# no son instructores
instructor = fields.Boolean("Instructor", default=False)
session_ids = fields.Many2many('openacademy.session',
string="Attended Sessions", readonly=True) |
#include <iostream>
#include <vector>
using namespace std;
/**
* :
* 1. ,
* 2. ,
*/
class Solution {
public:
bool VerifySquenceOfBST(vector<int> sequence) {
// ,,true
if(sequence.size() == 0){
return false;
}
if(sequence.size() <=2){
return true;
}
int num = sequence[sequence.size() - 1];
int sep = -1;
for(int i=0; i< sequence.size()-1; i++){
if(sep == -1){
if(sequence[i] > num){
sep = i;
}
}else{
if(sequence[i] < num){
return false;
}
}
}
if(sep == -1){
sep = 0;
}
vector<int> left;
vector<int> right;
for(int i=0; i<sep; i++){
left.push_back(sequence[i]);
};
for(int i=sep; i<sequence.size()-1; i++){
right.push_back(sequence[i]);
};
bool left_b = true;
if(left.size() != 0){
left_b = VerifySquenceOfBST(left);
}
bool right_b = true;
if(right.size() != 0){
right_b = VerifySquenceOfBST(right);
}
return left_b && right_b;
};
};
int main(){
vector<int> vec = {1,2,3,4};
Solution s;
cout<<s.VerifySquenceOfBST(vec);
} |
<API key>
======================
Sample to discover babel-camel-lambda (Improved Java DSL for Apache Camel)
In this sample, you are suppose to define a route which will
- receive a String containing amounts
- sum the amounts
- sends the sum to a media depending on if it is positive or not
The route should be define in the io.xtech.babel.lambda.sample.MyRoute class.
To get some feeling about what may be done with the babel-camel-lambda DSL, please have a look to the [Babel Lambda Test sources] (https://github.com/Crossing-Tech/babel-experimental/blob/master/babel-camel/babel-camel-lambda/src/test/java/io/xtech/babel/lambda/LambdaDSLTest.java).
You may also have a look to the [Babel documentation] (http://crossing-tech.github.io/babel).
To Test your route, you may use the MyRouteTest JUnit test.
To try your route, you may run the Runner class.
To run the Route (the Babel Scala one), please run `mvn package hawtio:spring` |
package com.nummulus.amqp.driver
import org.slf4j.LoggerFactory
import com.nummulus.amqp.driver.api.provider.AmqpGuardianActor
import com.nummulus.amqp.driver.configuration.TimeOutConfigurer
import com.nummulus.amqp.driver.configuration.QueueConfigurer
import com.typesafe.config.Config
import _root_.akka.actor.ActorRef
import _root_.akka.actor.ActorSystem
import _root_.akka.actor.Props
/**
* Default driver implementation.
*
* Every driver has a single connection with the broker. The connection is only
* established if a consumer is created. Every consumer will get a separate
* channel.
*/
private[driver] class DefaultDriver(connectionFactory: ConnectionFactory, config: Config) extends AmqpDriver
with QueueConfigurer
with TimeOutConfigurer {
private val logger = LoggerFactory.getLogger(getClass)
private val rootConfig = config.getConfig("amqp")
private lazy val actorSystem = ActorSystem("AmqpDriver")
private lazy val connection = createConnection()
/**
* Returns an actor which can communicate with the services' operation.
*
* @param service name of the service owning the operation to consume
* @param operation name of the operation to consume
* @return new consumer
*/
override def newConsumer(service: String, operation: String): ActorRef = {
logger.info(s"Retrieving configuration for operation '$operation' on service '$service'")
val queueConfiguration = <API key>(rootConfig, service, operation)
val channel = connection.createChannel()
val timeOut = getConsumerTimeOut(rootConfig, service, operation)
actorSystem.actorOf(Props(classOf[DefaultConsumer], channel, queueConfiguration, timeOut, IdGenerators.random))
}
/**
* Returns an actor which acts as a liaison for a services' operation.
*
* @param operation name of the operation to provide
* @return new provider
* @throws QueueConfiguration if the queue has missing keys in the configuration file
*/
override def newProvider(operation: String): ActorRef = {
logger.info(s"Retrieving configuration for operation '$operation'")
val queueConfiguration = <API key>(rootConfig, operation)
val channel = connection.createChannel()
actorSystem.actorOf(Props(classOf[AmqpGuardianActor], channel, queueConfiguration, IdGenerators.random), queueConfiguration.queue + "Guardian")
}
/**
* Returns a newly created connection to the broker.
*/
private def createConnection(): Connection = {
val host = rootConfig.getString("host")
logger.info("Connecting to AMQP broker at {}", host)
connectionFactory.setHost(host)
connectionFactory.newConnection()
}
} |
import axios from "axios";
import {dispatchError} from "../dispatcher/dispatcher";
/**
* Get operation detail.
* @returns {Function} Operation detail.
*/
export function getOperationData() {
return function (dispatch) {
axios.post("./api/auth/operation/detail", {}, {
headers: {
'X-OPERATION-HASH': operationHash,
}
}).then((response) => {
if (response.data.chosenAuthMethod) {
// if authMethod is already chosen, skip choice and go directly to the authMethod
switch (response.data.chosenAuthMethod) {
case "POWERAUTH_TOKEN":
dispatch({
type: "SHOW_SCREEN_TOKEN",
payload: response.data
});
return null;
case "SMS_KEY":
dispatch({
type: "SHOW_SCREEN_SMS",
payload: response.data
});
return null;
// otherwise show regular operation review with authMethod choice
}
}
dispatch({
type: "<API key>",
payload: response.data
});
return null;
}).catch((error) => {
dispatchError(dispatch, error);
})
}
}
/**
* Cancel operation.
* @returns {Function} No return value.
*/
export function cancel() {
return function (dispatch) {
axios.post("./api/auth/operation/cancel", {}, {
headers: {
'X-OPERATION-HASH': operationHash,
}
}).then((response) => {
dispatch({
type: "SHOW_SCREEN_ERROR",
payload: {
message: response.data.message
}
});
return null;
}).catch((error) => {
dispatchError(dispatch, error);
})
}
}
/**
* Update an operation.
* @param formData Operation form data.
* @param chosenAuthMethod Chosen authentication method.
* @param callback Callback to call when call is finished.
* @returns {Function} No return value.
*/
export function updateOperation(formData, chosenAuthMethod, callback) {
return function (dispatch) {
axios.put("./api/auth/operation/formData", {
formData: formData
}, {
headers: {
'X-OPERATION-HASH': operationHash,
}
}).then((response) => {
axios.put("./api/auth/operation/chosenAuthMethod", {
chosenAuthMethod: chosenAuthMethod
}, {
headers: {
'X-OPERATION-HASH': operationHash,
}
});
callback();
return null;
}).catch((error) => {
dispatchError(dispatch, error);
})
}
} |
package org.tuxdevelop.spring.batch.lightmin.server.fe.controller;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.validation.BindingResult;
import org.springframework.validation.Validator;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.servlet.mvc.support.RedirectAttributes;
import org.springframework.web.servlet.view.RedirectView;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.common.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.common.BooleanModel;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.common.ContentPageModel;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.common.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.server.scheduler.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.server.scheduler.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.server.scheduler.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.model.server.scheduler.<API key>;
import org.tuxdevelop.spring.batch.lightmin.server.fe.service.<API key>;
import javax.servlet.http.HttpServletRequest;
@Controller
public class <API key> extends CommonController {
private final <API key> <API key>;
private final Validator validator;
public <API key>(final <API key> <API key>,
final Validator validator) {
this.<API key> = <API key>;
this.validator = validator;
}
@GetMapping(value = "/server-schedulers")
public RedirectView init(final HttpServletRequest request) {
return this.createRedirectView("<API key>", request);
}
//Executions
@GetMapping(value = "/<API key>")
public void <API key>(final Model model,
@RequestParam(name = "start-index", defaultValue = "0") final Integer index,
@RequestParam(name = "page-size", defaultValue = "20") final Integer pageSize,
@RequestParam(name = "state", required = false) final Integer state) {
this.initExecutionsModel(model, index, pageSize, state);
}
@PostMapping(value = "/<API key>", params = {"delete-execution"})
public void deleteExecution(final Model model,
@RequestParam(name = "start-index", defaultValue = "0") final Integer index,
@RequestParam(name = "page-size", defaultValue = "20") final Integer pageSize,
@RequestParam(name = "execution-id") final Long executionId,
@RequestParam(name = "state", required = false) final Integer state) {
this.<API key>.deleteExecution(executionId);
this.initExecutionsModel(model, index, pageSize, state);
}
@PostMapping(value = "/<API key>", params = {"stop-execution"})
public void stopExecution(final Model model,
@RequestParam(name = "start-index", defaultValue = "0") final Integer index,
@RequestParam(name = "page-size", defaultValue = "20") final Integer pageSize,
@RequestParam(name = "execution-id") final Long executionId,
@RequestParam(name = "state", required = false) final Integer state) {
this.<API key>.stopExecution(executionId);
this.initExecutionsModel(model, index, pageSize, state);
}
//Configurations
@GetMapping(value = "/<API key>")
public void <API key>(final Model model) {
this.<API key>(model);
}
@GetMapping(value = "/<API key>", params = {"<API key>"})
public void <API key>(
final Model model,
@RequestParam(name = "application") final String applicationName) {
final <API key> configuration;
if (model.containsAttribute("<API key>")) {
configuration = (<API key>) model.asMap().get("<API key>");
} else {
configuration = new <API key>();
}
configuration.setApplicationName(applicationName);
final <API key> <API key> = this.<API key>(applicationName);
model.addAttribute("<API key>", configuration);
model.addAttribute("modificationType", new <API key>(<API key>.ModificationType.ADD));
model.addAttribute("<API key>", <API key>);
model.addAttribute("schedulerStatus", <API key>.<API key>.values());
model.addAttribute("booleanSelector", BooleanModel.values());
}
@GetMapping(value = "/<API key>", params = {"<API key>"})
public void <API key>(
final Model model,
@RequestParam(name = "configuration-id") final long id) {
final <API key> configuration;
if (model.containsAttribute("<API key>")) {
configuration = (<API key>) model.asMap().get("<API key>");
} else {
configuration = this.<API key>.findById(id);
}
final <API key> <API key> = this.<API key>(configuration.getApplicationName());
model.addAttribute("<API key>", configuration);
model.addAttribute("modificationType", new <API key>(<API key>.ModificationType.UPDATE));
model.addAttribute("<API key>", <API key>);
model.addAttribute("schedulerStatus", <API key>.<API key>.values());
model.addAttribute("booleanSelector", BooleanModel.values());
}
@PostMapping(value = "/<API key>", params = {"add-scheduler"})
public RedirectView <API key>(@ModelAttribute("<API key>") final <API key> configuration,
final BindingResult bindingResult,
final HttpServletRequest request,
final RedirectAttributes redirectAttributes) {
this.validator.validate(configuration, bindingResult);
final RedirectView redirectView;
if (bindingResult.hasErrors()) {
redirectAttributes.addFlashAttribute("<API key>", configuration);
redirectAttributes.addFlashAttribute(BindingResult.MODEL_KEY_PREFIX + "<API key>", bindingResult);
redirectView = this.createRedirectView(
"<API key>?<API key>=param-error"
+ "&application=" + configuration.getApplicationName(),
request);
} else {
this.<API key>.<API key>(configuration);
redirectView = this.createRedirectView("<API key>", request);
}
return redirectView;
}
@PostMapping(value = "/<API key>", params = {"update-scheduler"})
public RedirectView <API key>(@ModelAttribute("<API key>") final <API key> configuration,
final BindingResult bindingResult,
final HttpServletRequest request,
final RedirectAttributes redirectAttributes) {
this.validator.validate(configuration, bindingResult);
final RedirectView redirectView;
if (bindingResult.hasErrors()) {
redirectAttributes.addFlashAttribute("<API key>", configuration);
redirectAttributes.addFlashAttribute(BindingResult.MODEL_KEY_PREFIX + "<API key>", bindingResult);
redirectView = this.createRedirectView(
"<API key>?<API key>=param-error"
+ "&configuration-id=" + configuration.getId(),
request);
} else {
this.<API key>.<API key>(configuration);
redirectView = this.createRedirectView("<API key>", request);
}
return redirectView;
}
@PostMapping(value = "/<API key>", params = {"disable-scheduler"})
public void <API key>(final Model model,
@RequestParam(name = "configuration-id") final Long configurationId) {
this.<API key>.<API key>(configurationId);
this.<API key>(model);
}
@PostMapping(value = "/<API key>", params = {"start-scheduler"})
public void <API key>(final Model model,
@RequestParam(name = "configuration-id") final Long configurationId) {
this.<API key>.startConfiguration(configurationId);
this.<API key>(model);
}
@PostMapping(value = "/<API key>", params = {"delete-scheduler"})
public void <API key>(final Model model,
@RequestParam(name = "configuration-id") final Long configurationId) {
this.<API key>.deleteConfiguration(configurationId);
this.<API key>(model);
}
@GetMapping(value = "/<API key>")
public void <API key>(final Model model,
@RequestParam(name = "id") final Long id) {
final <API key> configuration = this.<API key>.findById(id);
model.addAttribute("<API key>", configuration);
}
//Mappers and Helpers
private void initExecutionsModel(final Model model, final Integer index, final Integer pageSize, final Integer state) {
final ContentPageModel<<API key>> page =
this.<API key>.<API key>(state, index, pageSize);
model.addAttribute("pageModel", page);
}
private void <API key>(final Model model) {
final <API key> <API key> =
this.<API key>.<API key>();
model.addAttribute("<API key>", <API key>);
}
private <API key> <API key>(@RequestParam(name = "application") final String applicationName) {
final String <API key> = this.<API key>.<API key>(applicationName);
return this.<API key>.<API key>(<API key>);
}
} |
package game;
import java.util.HashSet;
import java.util.Objects;
import java.util.Set;
import util.Pair;
public class MasterMind {
enum Color {
R, G, B, Y
}
private Color[] colors;
public MasterMind(Color[] colors){
Objects.requireNonNull(colors);
this.colors = colors;
}
public int getSlotNumber(){
return colors.length;
}
public Pair<Integer> guess (Color[] guessed){
Objects.requireNonNull(guessed);
if (guessed.length != colors.length){
throw new <API key>("Wrong element number!");
}
int right = 0;
int wrong = 0;
//store already checked slots.
Set<Integer> tmp = new HashSet<>();
for (int i = 0; i< colors.length; i++){
if (colors[i] == guessed[i]){
right++;
tmp.add(i);
}
}
//store false matched slots.
Set<Integer> tmp2 = new HashSet<>();
for (int i = 0; i< colors.length; i++){
//skip the matched
if (!tmp.contains(i)){
for (int j=0; j<colors.length; j++){
if (i==j) continue;
//skip the false matched
if (!tmp2.contains(j)){
if (colors[i] == guessed[j]){
wrong++;
tmp2.add(j);
break;
}
}
}
}
}
return new Pair<Integer>(right, wrong);
}
} |
package com.busata.bhammer.views.<API key>;
public interface <API key> {
void onScrollDown();
void onScrollUp();
} |
// Menu.h
// test2
#include <iostream>
#include <sstream>
#ifndef test2_Menu_h
#define test2_Menu_h
class GameSettings {
public:
std::string Player1;
std::string Player2;
int Map;
};
GameSettings StartMenu();
#endif |
include ../../makefile.conf
DIRS = minc
SRCS = parse_with_minc.cpp
OBJS = parse_with_minc.o
ifeq ($(PERL_SUPPORT), TRUE)
DIRS += perl
OBJS += parse_with_perl.o
SRCS += parse_with_perl.c
ifeq ($(ARCH), MACOSX)
PERL_CCFLAGS = `sh perl/print_perl_includes.sh $(PERL) $(BUILD_ARCH)`
else
PERL_CCFLAGS = `$(PERL) -MExtUtils::Embed -e ccopts`
endif
endif
ifeq ($(PYTHON_SUPPORT), TRUE)
DIRS += python
OBJS += parse_with_python.o
SRCS += parse_with_python.c
PYTHON_CCFLAGS = -I`$(PYTHON) python/<API key>.py`
ifeq ($(<API key>), TRUE)
PYDEF = -DPYEXT_INIT
endif
endif
all: $(DIRS) $(OBJS)
$(DIRS)::
@echo "making all in parser..."
$(MAKE) $(MFLAGS) -C $@
parse_with_minc.o: parse_with_minc.cpp rtcmix_parse.h
$(CXX) $(CXXFLAGS) -c parse_with_minc.cpp
parse_with_perl.o: parse_with_perl.c rtcmix_parse.h
$(CC) $(CFLAGS) $(PERL_CCFLAGS) -DSHAREDLIBDIR=\"$(LIBDESTDIR)\" -c parse_with_perl.c
parse_with_python.o: parse_with_python.c rtcmix_parse.h
$(CC) $(CFLAGS) $(PYTHON_CCFLAGS) $(PYDEF) -c parse_with_python.c
.PHONY: install clean
install:
@for DIR in $(DIRS); \
do \
( cd $$DIR; $(MAKE) $(MFLAGS) install; ) \
done
uninstall:
@for DIR in $(DIRS); \
do \
( cd $$DIR; $(MAKE) $(MFLAGS) uninstall; ) \
done
clean:
$(RM) *.o core
@for DIR in $(DIRS); \
do \
( cd $$DIR; $(MAKE) $(MFLAGS) clean; ) \
done
cleanall:
$(RM) *.o depend core
@for DIR in $(DIRS); \
do \
( cd $$DIR; $(MAKE) $(MFLAGS) cleanall; ) \
done
depend:
ifeq ($(ARCH),SGI)
# Note: -MM is a gcc extension. If no gcc, use -M.
# # Unfortunately, this doesn't work... -JGG
# # $(SHELL) -ec '$(CC) -M $(CFLAGS) $(SRCS)' > depend
else
-$(SHELL) -ec '$(CC) -M $(CFLAGS) $(PERL_CCFLAGS) $(PYTHON_CCFLAGS) $(SRCS)' > depend
@for DIR in $(DIRS); \
do \
( cd $$DIR; $(MAKE) $(MFLAGS) depend; ) \
done
endif |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Ao3tracksync.Areas.HelpPage
{
<summary>
This class will generate the samples for the help page.
</summary>
public class <API key>
{
<summary>
Initializes a new instance of the <see cref="<API key>"/> class.
</summary>
public <API key>()
{
<API key> = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
<API key> = new List<Func<<API key>, Type, object>>
{
<API key>,
};
}
<summary>
Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
</summary>
public IDictionary<HelpPageSampleKey, Type> <API key> { get; internal set; }
<summary>
Gets the objects that are used directly as samples for certain actions.
</summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
<summary>
Gets the objects that are serialized as samples by the supported formatters.
</summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
<summary>
Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
stopping when the factory successfully returns a non-<see langref="null"/> object.
</summary>
<remarks>
Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
<code><API key>.Insert(0, func)</code> to provide an override and
<code><API key>.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:<API key>",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<<API key>, Type, object>> <API key> { get; private set; }
<summary>
Gets the request body samples for a given <see cref="ApiDescription"/>.
</summary>
<param name="api">The <see cref="ApiDescription"/>.</param>
<returns>The samples keyed by media type.</returns>
public IDictionary<<API key>, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
<summary>
Gets the response body samples for a given <see cref="ApiDescription"/>.
</summary>
<param name="api">The <see cref="ApiDescription"/>.</param>
<returns>The samples keyed by media type.</returns>
public IDictionary<<API key>, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
<summary>
Gets the request or response body samples.
</summary>
<param name="api">The <see cref="ApiDescription"/>.</param>
<param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
<returns>The samples keyed by media type.</returns>
public virtual IDictionary<<API key>, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new <API key>("api");
}
string controllerName = api.ActionDescriptor.<API key>.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.<API key>.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<<API key>, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (<API key> mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = <API key>(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
<summary>
Search for samples that are provided directly through <see cref="ActionSamples"/>.
</summary>
<param name="controllerName">Name of the controller.</param>
<param name="actionName">Name of the action.</param>
<param name="parameterNames">The parameter names.</param>
<param name="type">The CLR type.</param>
<param name="formatter">The formatter.</param>
<param name="mediaType">The media type.</param>
<param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
<returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, <API key> mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
<summary>
Gets the sample object that will be serialized by the formatters.
First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
one using <see cref="<API key>"/> (which wraps an <see cref="ObjectGenerator"/>) and other
factories in <see cref="<API key>"/>.
</summary>
<param name="type">The type.</param>
<returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:<API key>",
Justification = "Even if all items in <API key> throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<<API key>, Type, object> factory in <API key>)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
<summary>
Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
</summary>
<param name="api">The <see cref="ApiDescription"/>.</param>
<returns>The type.</returns>
public virtual Type <API key>(ApiDescription api)
{
string controllerName = api.ActionDescriptor.<API key>.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.<API key>.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
<summary>
Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
</summary>
<param name="api">The <see cref="ApiDescription"/>.</param>
<param name="controllerName">Name of the controller.</param>
<param name="actionName">Name of the action.</param>
<param name="parameterNames">The parameter names.</param>
<param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
<param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new <API key>("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new <API key>("api");
}
Type type;
if (<API key>.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
<API key>.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
<API key> <API key> = api.<API key>.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = <API key> == null ? null : <API key>.ParameterDescriptor.ParameterType;
formatters = api.<API key>;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.<API key>;
break;
}
}
return type;
}
<summary>
Writes the sample object using formatter.
</summary>
<param name="formatter">The formatter.</param>
<param name="value">The value.</param>
<param name="type">The type.</param>
<param name="mediaType">Type of the media.</param>
<returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:<API key>", Justification = "The exception is recorded as InvalidSample.")]
public virtual object <API key>(MediaTypeFormatter formatter, object value, Type type, <API key> mediaType)
{
if (formatter == null)
{
throw new <API key>("formatter");
}
if (mediaType == null)
{
throw new <API key>("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string <API key> = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
<API key> = TryFormatXml(<API key>);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
<API key> = TryFormatJson(<API key>);
}
sample = new TextSample(<API key>);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object <API key>(<API key> sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:<API key>", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:<API key>", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
} |
body,
html {
font-family: inherit,sans-serif;
width: 100%;
height: 100%;
}
body,
h1,
h2,
h3,
h4,
h5,
h6 {
font-family: "Helvetica","Arial","sans-serif";
font-weight: 700;
}
.topnav {
font-size: 14px;
}
.lead {
font-size: 16px;
font-weight: 200;
}
.intro-header {
padding-top: 50px; /* If you're making other pages, make sure there is 50px of padding to make sure the navbar doesn't overlap content! */
padding-bottom: 50px;
text-align: center;
color: #f8f8f8;
background: url(../img/intro-bg.jpg) no-repeat center center;
background-size: cover;
}
.intro-message {
position: relative;
padding-top: 20%;
padding-bottom: 20%;
}
.intro-message > h1 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 5em;
}
.intro-divider {
width: 400px;
border-top: 1px solid #f8f8f8;
border-bottom: 1px solid rgba(0,0,0,0.2);
}
.intro-message > h3 {
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
}
@media(max-width:767px) {
.intro-message {
padding-bottom: 15%;
}
.intro-message > h1 {
font-size: 3em;
}
ul.<API key> > li {
display: block;
margin-bottom: 20px;
padding: 0;
}
ul.<API key> > li:last-child {
margin-bottom: 0;
}
.intro-divider {
width: 100%;
}
}
.network-name {
text-transform: uppercase;
font-size: 14px;
font-weight: 400;
letter-spacing: 2px;
}
.content-section-a {
padding: 50px 0;
background-color: #fffs;
}
.content-section-b {
padding: 50px 0;
border-top: 1px solid #e7e7e7;
border-bottom: 1px solid #e7e7e7;
}
.section-heading {
margin-bottom: 30px;
}
.<API key> {
float: left;
width: 200px;
border-top: 3px solid #e7e7e7;
}
.banner {
padding: 100px 0;
color: #333;
background-color:#f8f8f8;
background-size: cover;
}
.banner h2 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 3em;
}
.banner ul {
margin-bottom: 0;
}
.<API key> {
float: right;
margin-top: 0;
}
@media(max-width:1199px) {
ul.<API key> {
float: left;
margin-top: 15px;
}
}
@media(max-width:767px) {
.banner h2 {
margin: 0;
text-shadow: 2px 2px 3px rgba(0,0,0,0.6);
font-size: 3em;
}
ul.<API key> > li {
display: block;
margin-bottom: 20px;
padding: 0;
}
ul.<API key> > li:last-child {
margin-bottom: 0;
}
}
footer {
padding: 50px 0;
background-color: #fff;
}
p.copyright {
margin: 15px 0 0;
} |
<a href="
<script type="text/javascript" async defer src="//assets.pinterest.com/js/pinit.js"></script> |
project_path: /web/_project.yaml
book_path: /web/tools/_book.yaml
description: Reference documentation for the "Offscreen Images" Lighthouse audit.
{# wf_updated_on: 2017-05-31 #}
{# wf_published_on: 2017-05-31 #}
# Offscreen Images {: .page-title }
## Why the audit is important {: #why }
Offscreen images are images that appear [below the fold][BTF]. Since users can't
see offscreen images when they load a page, there's no reason to download the
offscreen images as part of the initial page load. In other words, deferring the
load of offscreen images can speed up page load time and time to interactive.
[BTF]: https://en.wikipedia.org/wiki/Above_the_fold#Below_the_fold
## How to pass the audit {: #how }
To pass this audit, refactor your pages to only download above-the-fold images
during the initial request. Applying this strategy to your JS, HTML, CSS, and
other resources can also speed up page load time. See [Critical Rendering
Path][CRP] to learn more.
[CRP]: /web/fundamentals/performance/<API key>/
Consider using an [<API key>][IO] to intelligently determine when to
lazy-load offscreen images. For example, suppose you have some images at the
bottom of a very long page. With an <API key>, you can load the
images only when the user has scrolled halfway down the page. See [Intersect
all the things!][IATT] for more on this approach.
[IATT]: /web/updates/2016/04/<API key>#<API key>
[IO]: https://developers.google.com/web/updates/2016/04/<API key>
If you do use an <API key>, make sure to include the
[polyfill][polyfill], because native browser support is limited.
[polyfill]: https://github.com/WICG/<API key>/tree/gh-pages/polyfill
{% include "web/tools/lighthouse/audits/<API key>.html" %}
Lighthouse flags offscreen images that were requested before the
Time To Interactive (TTI) event.
{% include "web/tools/lighthouse/audits/_feedback/offscreen-images.html" %} |
package com.taskadapter.redmineapi;
import com.taskadapter.redmineapi.bean.User;
import com.taskadapter.redmineapi.internal.Transport;
import java.util.Date;
public class UserGenerator {
public static User generateRandomUser(Transport transport) {
long randomNumber = new Date().getTime();
return new User(transport)
.setFirstName("fname")
.setLastName("lname")
.setLogin("login" + randomNumber)
.setMail("somemail" + randomNumber + "@somedomain.com")
.setPassword("zzzz1234");
}
} |
use gl;
use ToGlEnum;
List of client-side pixel formats.
These are all the possible formats of data when uploading to a texture.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ClientFormat {
U8,
U8U8,
U8U8U8,
U8U8U8U8,
I8,
I8I8,
I8I8I8,
I8I8I8I8,
U16,
U16U16,
U16U16U16,
U16U16U16U16,
I16,
I16I16,
I16I16I16,
I16I16I16I16,
U32,
U32U32,
U32U32U32,
U32U32U32U32,
I32,
I32I32,
I32I32I32,
I32I32I32I32,
U3U3U2,
U5U6U5,
U4U4U4U4,
U5U5U5U1,
U10U10U10U2,
F16,
F16F16,
F16F16F16,
F16F16F16F16,
F32,
F32F32,
F32F32F32,
F32F32F32F32,
}
impl ClientFormat {
Returns the size in bytes of a pixel of this type.
pub fn get_size(&self) -> usize {
use std::mem;
match *self {
ClientFormat::U8 => 1 * mem::size_of::<u8>(),
ClientFormat::U8U8 => 2 * mem::size_of::<u8>(),
ClientFormat::U8U8U8 => 3 * mem::size_of::<u8>(),
ClientFormat::U8U8U8U8 => 4 * mem::size_of::<u8>(),
ClientFormat::I8 => 1 * mem::size_of::<i8>(),
ClientFormat::I8I8 => 2 * mem::size_of::<i8>(),
ClientFormat::I8I8I8 => 3 * mem::size_of::<i8>(),
ClientFormat::I8I8I8I8 => 4 * mem::size_of::<i8>(),
ClientFormat::U16 => 1 * mem::size_of::<u16>(),
ClientFormat::U16U16 => 2 * mem::size_of::<u16>(),
ClientFormat::U16U16U16 => 3 * mem::size_of::<u16>(),
ClientFormat::U16U16U16U16 => 4 * mem::size_of::<u16>(),
ClientFormat::I16 => 1 * mem::size_of::<i16>(),
ClientFormat::I16I16 => 2 * mem::size_of::<i16>(),
ClientFormat::I16I16I16 => 3 * mem::size_of::<i16>(),
ClientFormat::I16I16I16I16 => 4 * mem::size_of::<i16>(),
ClientFormat::U32 => 1 * mem::size_of::<u32>(),
ClientFormat::U32U32 => 2 * mem::size_of::<u32>(),
ClientFormat::U32U32U32 => 3 * mem::size_of::<u32>(),
ClientFormat::U32U32U32U32 => 4 * mem::size_of::<u32>(),
ClientFormat::I32 => 1 * mem::size_of::<i32>(),
ClientFormat::I32I32 => 2 * mem::size_of::<i32>(),
ClientFormat::I32I32I32 => 3 * mem::size_of::<i32>(),
ClientFormat::I32I32I32I32 => 4 * mem::size_of::<i32>(),
ClientFormat::U3U3U2 => (3 + 3 + 2) / 8,
ClientFormat::U5U6U5 => (5 + 6 + 5) / 8,
ClientFormat::U4U4U4U4 => (4 + 4 + 4 + 4) / 8,
ClientFormat::U5U5U5U1 => (5 + 5 + 5 + 1) / 8,
ClientFormat::U10U10U10U2 => (10 + 10 + 10 + 2) / 2,
ClientFormat::F16 => 16 / 8,
ClientFormat::F16F16 => (16 + 16) / 8,
ClientFormat::F16F16F16 => (16 + 16 + 16) / 8,
ClientFormat::F16F16F16F16 => (16 + 16 + 16 + 16) / 8,
ClientFormat::F32 => 1 * mem::size_of::<f32>(),
ClientFormat::F32F32 => 2 * mem::size_of::<f32>(),
ClientFormat::F32F32F32 => 3 * mem::size_of::<f32>(),
ClientFormat::F32F32F32F32 => 4 * mem::size_of::<f32>(),
}
}
Returns the number of components of this client format.
pub fn get_num_components(&self) -> u8 {
match *self {
ClientFormat::U8 => 1,
ClientFormat::U8U8 => 2,
ClientFormat::U8U8U8 => 3,
ClientFormat::U8U8U8U8 => 4,
ClientFormat::I8 => 1,
ClientFormat::I8I8 => 2,
ClientFormat::I8I8I8 => 3,
ClientFormat::I8I8I8I8 => 4,
ClientFormat::U16 => 1,
ClientFormat::U16U16 => 2,
ClientFormat::U16U16U16 => 3,
ClientFormat::U16U16U16U16 => 4,
ClientFormat::I16 => 1,
ClientFormat::I16I16 => 2,
ClientFormat::I16I16I16 => 3,
ClientFormat::I16I16I16I16 => 4,
ClientFormat::U32 => 1,
ClientFormat::U32U32 => 2,
ClientFormat::U32U32U32 => 3,
ClientFormat::U32U32U32U32 => 4,
ClientFormat::I32 => 1,
ClientFormat::I32I32 => 2,
ClientFormat::I32I32I32 => 3,
ClientFormat::I32I32I32I32 => 4,
ClientFormat::U3U3U2 => 3,
ClientFormat::U5U6U5 => 3,
ClientFormat::U4U4U4U4 => 4,
ClientFormat::U5U5U5U1 => 4,
ClientFormat::U10U10U10U2 => 4,
ClientFormat::F16 => 1,
ClientFormat::F16F16 => 2,
ClientFormat::F16F16F16 => 3,
ClientFormat::F16F16F16F16 => 4,
ClientFormat::F32 => 1,
ClientFormat::F32F32 => 2,
ClientFormat::F32F32F32 => 3,
ClientFormat::F32F32F32F32 => 4,
}
}
}
List of uncompressed pixel formats that contain floating-point-like data.
Some formats are marked as "guaranteed to be supported". What this means is that you are
certain that the backend will use exactly these formats. If you try to use a format that
is not supported by the backend, it will automatically fall back to a larger format.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum <API key> {
Guaranteed to be supported for both textures and renderbuffers.
U8,
Guaranteed to be supported for textures.
I8,
Guaranteed to be supported for both textures and renderbuffers.
U16,
Guaranteed to be supported for textures.
I16,
Guaranteed to be supported for both textures and renderbuffers.
U8U8,
Guaranteed to be supported for textures.
I8I8,
Guaranteed to be supported for both textures and renderbuffers.
U16U16,
Guaranteed to be supported for textures.
I16I16,
U3U32U,
U4U4U4,
U5U5U5,
Guaranteed to be supported for textures.
U8U8U8,
Guaranteed to be supported for textures.
I8I8I8,
U10U10U10,
U12U12U12,
Guaranteed to be supported for textures.
I16I16I16,
U2U2U2U2,
U4U4U4U4,
U5U5U5U1,
Guaranteed to be supported for both textures and renderbuffers.
U8U8U8U8,
Guaranteed to be supported for textures.
I8I8I8I8,
Guaranteed to be supported for both textures and renderbuffers.
U10U10U10U2,
U12U12U12U12,
Guaranteed to be supported for both textures and renderbuffers.
U16U16U16U16,
Guaranteed to be supported for both textures and renderbuffers.
F16,
Guaranteed to be supported for both textures and renderbuffers.
F16F16,
Guaranteed to be supported for textures.
F16F16F16,
Guaranteed to be supported for both textures and renderbuffers.
F16F16F16F16,
Guaranteed to be supported for both textures and renderbuffers.
F32,
Guaranteed to be supported for both textures and renderbuffers.
F32F32,
Guaranteed to be supported for textures.
F32F32F32,
Guaranteed to be supported for both textures and renderbuffers.
F32F32F32F32,
Guaranteed to be supported for both textures and renderbuffers.
F11F11F10,
Uses three components of 9 bits of precision that all share the same exponent.
Use this format only if all the components are approximately equal.
Guaranteed to be supported for textures.
F9F9F9,
}
impl <API key> {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::UncompressedFloat(self)
}
}
impl ToGlEnum for <API key> {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
<API key>::U8 => gl::R8,
<API key>::I8 => gl::R8_SNORM,
<API key>::U16 => gl::R16,
<API key>::I16 => gl::R16_SNORM,
<API key>::U8U8 => gl::RG8,
<API key>::I8I8 => gl::RG8_SNORM,
<API key>::U16U16 => gl::RG16,
<API key>::I16I16 => gl::RG16_SNORM,
<API key>::U3U32U => gl::R3_G3_B2,
<API key>::U4U4U4 => gl::RGB4,
<API key>::U5U5U5 => gl::RGB5,
<API key>::U8U8U8 => gl::RGB8,
<API key>::I8I8I8 => gl::RGB8_SNORM,
<API key>::U10U10U10 => gl::RGB10,
<API key>::U12U12U12 => gl::RGB12,
<API key>::I16I16I16 => gl::RGB16_SNORM,
<API key>::U2U2U2U2 => gl::RGBA2,
<API key>::U4U4U4U4 => gl::RGBA4,
<API key>::U5U5U5U1 => gl::RGB5_A1,
<API key>::U8U8U8U8 => gl::RGBA8,
<API key>::I8I8I8I8 => gl::RGBA8_SNORM,
<API key>::U10U10U10U2 => gl::RGB10_A2,
<API key>::U12U12U12U12 => gl::RGBA12,
<API key>::U16U16U16U16 => gl::RGBA16,
<API key>::F16 => gl::R16F,
<API key>::F16F16 => gl::RG16F,
<API key>::F16F16F16 => gl::RGB16F,
<API key>::F16F16F16F16 => gl::RGBA16F,
<API key>::F32 => gl::R32F,
<API key>::F32F32 => gl::RG32F,
<API key>::F32F32F32 => gl::RGB32F,
<API key>::F32F32F32F32 => gl::RGBA32F,
<API key>::F11F11F10 => gl::R11F_G11F_B10F,
<API key>::F9F9F9 => gl::RGB9_E5,
}
}
}
List of uncompressed pixel formats that contain signed integral data.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum <API key> {
I8,
I16,
I32,
I8I8,
I16I16,
I32I32,
I8I8I8,
May not be supported by renderbuffers.
I16I16I16,
May not be supported by renderbuffers.
I32I32I32,
May not be supported by renderbuffers.
I8I8I8I8,
I16I16I16I16,
I32I32I32I32,
}
impl <API key> {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::<API key>(self)
}
}
impl ToGlEnum for <API key> {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
<API key>::I8 => gl::R8I,
<API key>::I16 => gl::R16I,
<API key>::I32 => gl::R32I,
<API key>::I8I8 => gl::RG8I,
<API key>::I16I16 => gl::RG16I,
<API key>::I32I32 => gl::RG32I,
<API key>::I8I8I8 => gl::RGB8I,
<API key>::I16I16I16 => gl::RGB16I,
<API key>::I32I32I32 => gl::RGB32I,
<API key>::I8I8I8I8 => gl::RGBA8I,
<API key>::I16I16I16I16 => gl::RGBA16I,
<API key>::I32I32I32I32 => gl::RGBA32I,
}
}
}
List of uncompressed pixel formats that contain unsigned integral data.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum <API key> {
U8,
U16,
U32,
U8U8,
U16U16,
U32U32,
U8U8U8,
May not be supported by renderbuffers.
U16U16U16,
May not be supported by renderbuffers.
U32U32U32,
May not be supported by renderbuffers.
U8U8U8U8,
U16U16U16U16,
U32U32U32U32,
U10U10U10U2,
}
impl <API key> {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::<API key>(self)
}
}
impl ToGlEnum for <API key> {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
<API key>::U8 => gl::R8UI,
<API key>::U16 => gl::R16UI,
<API key>::U32 => gl::R32UI,
<API key>::U8U8 => gl::RG8UI,
<API key>::U16U16 => gl::RG16UI,
<API key>::U32U32 => gl::RG32UI,
<API key>::U8U8U8 => gl::RGB8UI,
<API key>::U16U16U16 => gl::RGB16UI,
<API key>::U32U32U32 => gl::RGB32UI,
<API key>::U8U8U8U8 => gl::RGBA8UI,
<API key>::U16U16U16U16 => gl::RGBA16UI,
<API key>::U32U32U32U32 => gl::RGBA32UI,
<API key>::U10U10U10U2 => gl::RGB10_A2UI,
}
}
}
List of compressed texture formats.
TODO: many formats are missing
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum CompressedFormat {
Red/green compressed texture with one unsigned component.
RGTCFormatU,
Red/green compressed texture with one signed component.
RGTCFormatI,
Red/green compressed texture with two unsigned components.
RGTCFormatUU,
Red/green compressed texture with two signed components.
RGTCFormatII,
}
impl CompressedFormat {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::CompressedFormat(self)
}
}
impl ToGlEnum for CompressedFormat {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
CompressedFormat::RGTCFormatU => gl::<API key>,
CompressedFormat::RGTCFormatI => gl::<API key>,
CompressedFormat::RGTCFormatUU => gl::COMPRESSED_RG_RGTC2,
CompressedFormat::RGTCFormatII => gl::<API key>,
}
}
}
List of formats available for depth textures.
`I16`, `I24` and `I32` are still treated as if they were floating points.
Only the internal representation is integral.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DepthFormat {
I16,
I24,
May not be supported by all hardware.
I32,
F32,
}
impl DepthFormat {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::DepthFormat(self)
}
}
impl ToGlEnum for DepthFormat {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
DepthFormat::I16 => gl::DEPTH_COMPONENT16,
DepthFormat::I24 => gl::DEPTH_COMPONENT24,
DepthFormat::I32 => gl::DEPTH_COMPONENT32,
DepthFormat::F32 => gl::DEPTH_COMPONENT32F,
}
}
}
List of formats available for depth-stencil textures.
// TODO: If OpenGL 4.3 or <API key> is not available, then depth/stencil
// textures are treated by samplers exactly like depth-only textures
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DepthStencilFormat {
I24I8,
F32I8,
}
impl DepthStencilFormat {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::DepthStencilFormat(self)
}
}
impl ToGlEnum for DepthStencilFormat {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
DepthStencilFormat::I24I8 => gl::DEPTH24_STENCIL8,
DepthStencilFormat::F32I8 => gl::DEPTH32F_STENCIL8,
}
}
}
List of formats available for stencil textures.
You are strongly advised to only use `I8`.
// TODO: Stencil only formats cannot be used for Textures, unless OpenGL 4.4 or
// <API key> is available.
#[allow(missing_docs)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum StencilFormat {
I1,
I4,
I8,
I16,
}
impl StencilFormat {
Turns this format into a more generic `TextureFormat`.
pub fn to_texture_format(self) -> TextureFormat {
TextureFormat::StencilFormat(self)
}
}
impl ToGlEnum for StencilFormat {
fn to_glenum(&self) -> gl::types::GLenum {
match *self {
StencilFormat::I1 => gl::STENCIL_INDEX1,
StencilFormat::I4 => gl::STENCIL_INDEX4,
StencilFormat::I8 => gl::STENCIL_INDEX8,
StencilFormat::I16 => gl::STENCIL_INDEX16,
}
}
}
Format of the internal representation of a texture.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[allow(missing_docs)]
pub enum TextureFormat {
UncompressedFloat(<API key>),
<API key>(<API key>),
<API key>(<API key>),
CompressedFormat(CompressedFormat),
DepthFormat(DepthFormat),
StencilFormat(StencilFormat),
DepthStencilFormat(DepthStencilFormat),
} |
package lzw.csu.lannerdemo;
public class LannerBean {
private int id;
private String title;
private String imageUrl;
public void setId(int id) {
this.id = id;
}
public void setImageUrl(String imageUrl) {
this.imageUrl = imageUrl;
}
public void setTitle(String title) {
this.title = title;
}
public int getId() {
return id;
}
public String getImageUrl() {
return imageUrl;
}
public String getTitle() {
return title;
}
} |
package restore
import (
"os"
"time"
"github.com/pkg/errors"
"golang.org/x/sys/windows"
"github.com/kopia/kopia/internal/atomicfile"
)
func symlinkChown(path string, uid, gid int) error {
return nil
}
func symlinkChmod(path string, mode os.FileMode) error {
return nil
}
func symlinkChtimes(linkPath string, atime, mtime time.Time) error {
fta := windows.NsecToFiletime(atime.UnixNano())
ftw := windows.NsecToFiletime(mtime.UnixNano())
linkPath = atomicfile.<API key>(linkPath)
fn, err := windows.UTF16PtrFromString(linkPath)
if err != nil {
return errors.Wrap(err, "UTF16PtrFromString")
}
h, err := windows.CreateFile(
fn, windows.GENERIC_READ|windows.GENERIC_WRITE,
windows.FILE_SHARE_READ|windows.FILE_SHARE_WRITE,
nil, windows.OPEN_EXISTING,
windows.<API key>, 0)
if err != nil {
return errors.Wrapf(err, "CreateFile error on %v", linkPath)
}
defer windows.CloseHandle(h) //nolint:errcheck
// nolint:wrapcheck
return windows.SetFileTime(h, &ftw, &fta, &ftw)
} |
#import "SwrveContentItem.h"
@interface SwrveContentSpacer : SwrveContentItem
@property (readonly, atomic, strong) NSString *height;
-(id) initWithTag:(NSString *)tag andDictionary:(NSDictionary *)dict;
@end |
package com.opengamma.analytics.financial.interestrate.swaption.provider;
import com.opengamma.analytics.financial.interestrate.<API key>;
import com.opengamma.analytics.financial.interestrate.swap.provider.<API key>;
import com.opengamma.analytics.financial.interestrate.swaption.derivative.<API key>;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackFunctionData;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.BlackPriceFunction;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.<API key>;
import com.opengamma.analytics.financial.model.option.pricing.analytic.formula.<API key>;
import com.opengamma.analytics.financial.model.volatility.smile.function.SABRFormulaData;
import com.opengamma.analytics.financial.provider.calculator.discounting.<API key>;
import com.opengamma.analytics.financial.provider.calculator.discounting.<API key>;
import com.opengamma.analytics.financial.provider.description.interestrate.<API key>;
import com.opengamma.analytics.financial.provider.description.interestrate.<API key>;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.<API key>;
import com.opengamma.analytics.financial.provider.sensitivity.multicurve.<API key>;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.financial.convention.daycount.DayCount;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.money.Currency;
import com.opengamma.util.money.<API key>;
import com.opengamma.util.tuple.DoublesPair;
/**
* Class used to compute the price and sensitivity of a physical delivery swaption with SABR model and extrapolation to the right. Implemented only for the
* <API key>. OpenGamma implementation note for the extrapolation: Smile extrapolation, version 1.2, May 2011.
*/
public class <API key> {
/**
* The cut-off strike. The smile is extrapolated above that level.
*/
private final double _cutOffStrike;
/**
* The tail thickness parameter.
*/
private final double _mu;
/**
* The calculator and methods.
*/
private static final <API key> METHOD_SWAP = <API key>.getInstance();
private static final <API key> PRDC = <API key>.getInstance();
private static final <API key> PRCSDC = <API key>.getInstance();
/**
* Constructor from cut-off strike and tail parameter.
*
* @param cutOffStrike
* The cut-off strike.
* @param mu
* The tail thickness parameter.
*/
public <API key>(final double cutOffStrike, final double mu) {
_cutOffStrike = cutOffStrike;
_mu = mu;
}
/**
* Computes the present value of a physical delivery European swaption in the SABR model with extrapolation to the right.
*
* @param swaption
* The swaption.
* @param sabrData
* The SABR data.
* @return The present value.
*/
public <API key> presentValue(final <API key> swaption, final <API key> sabrData) {
ArgumentChecker.notNull(swaption, "Swaption");
ArgumentChecker.notNull(sabrData, "SABR swaption provider");
final DayCount <API key> = sabrData.getSABRGenerator().getFixedLegDayCount();
final <API key> multicurves = sabrData.<API key>();
final Currency ccy = swaption.getCurrency();
final double pvbpModified = METHOD_SWAP.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double forwardModified = PRDC.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double strikeModified = METHOD_SWAP.couponEquivalent(swaption.getUnderlyingSwap(), pvbpModified, multicurves);
final double maturity = swaption.getMaturityTime();
final <API key> option = new <API key>(strikeModified, swaption.getTimeToExpiry(), swaption.isCall());
// Implementation note: option required to pass the strike (in case the swap has non-constant coupon).
if (strikeModified <= _cutOffStrike) { // No extrapolation
final BlackPriceFunction blackFunction = new BlackPriceFunction();
final double volatility = sabrData.getSABRParameter().getVolatility(swaption.getTimeToExpiry(), maturity, strikeModified, forwardModified);
final BlackFunctionData dataBlack = new BlackFunctionData(forwardModified, pvbpModified, volatility);
final Function1D<BlackFunctionData, Double> func = blackFunction.getPriceFunction(option);
return <API key>.of(ccy, func.evaluate(dataBlack) * (swaption.isLong() ? 1.0 : -1.0));
}
// With extrapolation
final DoublesPair expiryMaturity = DoublesPair.of(swaption.getTimeToExpiry(), maturity);
final double alpha = sabrData.getSABRParameter().getAlpha(expiryMaturity);
final double beta = sabrData.getSABRParameter().getBeta(expiryMaturity);
final double rho = sabrData.getSABRParameter().getRho(expiryMaturity);
final double nu = sabrData.getSABRParameter().getNu(expiryMaturity);
final SABRFormulaData sabrParam = new SABRFormulaData(alpha, beta, rho, nu);
final <API key> sabrExtrapolation = new <API key>(forwardModified, sabrParam, _cutOffStrike,
swaption.getTimeToExpiry(), _mu);
return <API key>.of(ccy, pvbpModified * sabrExtrapolation.price(option) * (swaption.isLong() ? 1.0 : -1.0));
}
/**
* Computes the present value rate sensitivity to rates of a physical delivery European swaption in the SABR model with extrapolation to the right.
*
* @param swaption
* The swaption.
* @param sabrData
* The SABR data. The SABR function need to be the Hagan function.
* @return The present value curve sensitivity.
*/
public <API key> <API key>(final <API key> swaption,
final <API key> sabrData) {
ArgumentChecker.notNull(swaption, "Swaption");
ArgumentChecker.notNull(sabrData, "SABR swaption provider");
final DayCount <API key> = sabrData.getSABRGenerator().getFixedLegDayCount();
final <API key> multicurves = sabrData.<API key>();
final Currency ccy = swaption.getCurrency();
final double pvbpModified = METHOD_SWAP.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double forwardModified = PRDC.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double strikeModified = METHOD_SWAP.couponEquivalent(swaption.getUnderlyingSwap(), pvbpModified, multicurves);
final double maturity = swaption.getMaturityTime();
// Derivative of the forward and pvbp with respect to the rates.
final <API key> pvbpModifiedDr = METHOD_SWAP.<API key>(swaption.getUnderlyingSwap(), <API key>,
multicurves);
final <API key> forwardModifiedDr = PRCSDC.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
// Implementation note: option required to pass the strike (in case the swap has non-constant coupon).
final <API key> option = new <API key>(strikeModified, swaption.getTimeToExpiry(), swaption.isCall());
// With extrapolation
final DoublesPair expiryMaturity = DoublesPair.of(swaption.getTimeToExpiry(), maturity);
final double alpha = sabrData.getSABRParameter().getAlpha(expiryMaturity);
final double beta = sabrData.getSABRParameter().getBeta(expiryMaturity);
final double rho = sabrData.getSABRParameter().getRho(expiryMaturity);
final double nu = sabrData.getSABRParameter().getNu(expiryMaturity);
final SABRFormulaData sabrParam = new SABRFormulaData(alpha, beta, rho, nu);
final <API key> sabrExtrapolation = new <API key>(forwardModified, sabrParam, _cutOffStrike,
swaption.getTimeToExpiry(), _mu);
<API key> result = pvbpModifiedDr.multipliedBy(sabrExtrapolation.price(option));
final double priceDF = sabrExtrapolation.<API key>(option);
result = result.plus(forwardModifiedDr.multipliedBy(pvbpModified * priceDF));
if (!swaption.isLong()) {
result = result.multipliedBy(-1);
}
return <API key>.of(ccy, result);
}
/**
* Computes the present value SABR sensitivity of a physical delivery European swaption in the SABR model with extrapolation to the right.
*
* @param swaption
* The swaption.
* @param sabrData
* The SABR data. The SABR function need to be the Hagan function.
* @return The present value SABR sensitivity.
*/
public <API key> <API key>(final <API key> swaption,
final <API key> sabrData) {
ArgumentChecker.notNull(swaption, "Swaption");
ArgumentChecker.notNull(sabrData, "SABR swaption provider");
final DayCount <API key> = sabrData.getSABRGenerator().getFixedLegDayCount();
final <API key> multicurves = sabrData.<API key>();
final double pvbpModified = METHOD_SWAP.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double forwardModified = PRDC.<API key>(swaption.getUnderlyingSwap(), <API key>, multicurves);
final double strikeModified = METHOD_SWAP.couponEquivalent(swaption.getUnderlyingSwap(), pvbpModified, multicurves);
final double maturity = swaption.getMaturityTime();
final <API key> sensi = new <API key>();
final DoublesPair expiryMaturity = DoublesPair.of(swaption.getTimeToExpiry(), maturity);
// Implementation note: option required to pass the strike (in case the swap has non-constant coupon).
final <API key> option = new <API key>(strikeModified, swaption.getTimeToExpiry(), swaption.isCall());
final double alpha = sabrData.getSABRParameter().getAlpha(expiryMaturity);
final double beta = sabrData.getSABRParameter().getBeta(expiryMaturity);
final double rho = sabrData.getSABRParameter().getRho(expiryMaturity);
final double nu = sabrData.getSABRParameter().getNu(expiryMaturity);
final SABRFormulaData sabrParam = new SABRFormulaData(alpha, beta, rho, nu);
final <API key> sabrExtrapolation = new <API key>(forwardModified, sabrParam, _cutOffStrike,
swaption.getTimeToExpiry(), _mu);
final double[] priceDSabr = new double[4];
sabrExtrapolation.priceAdjointSABR(option, priceDSabr);
final double omega = swaption.isLong() ? 1.0 : -1.0;
sensi.addAlpha(expiryMaturity, omega * pvbpModified * priceDSabr[0]);
sensi.addBeta(expiryMaturity, omega * pvbpModified * priceDSabr[1]);
sensi.addRho(expiryMaturity, omega * pvbpModified * priceDSabr[2]);
sensi.addNu(expiryMaturity, omega * pvbpModified * priceDSabr[3]);
return sensi;
}
} |
package com.marqod.biosphere.models
import com.marqod.biosphere.engine.GameState
import com.marqod.biosphere.utils.{Gauge, Vector2}
abstract class TileResource {
val resource: ResourceType.Value
val maxResource: Double
val resourceRegen: Double
val resourceAmount: Gauge
val node: Boolean = Math.random() < 0.5
def update(x: Int, y: Int, gameState: GameState) = {
if (!resourceAmount.empty()) {
if (node && resourceAmount.full()) {
spread(x, y, gameState)
}
resourceAmount.step()
}
}
def spread(x: Int, y: Int, gameState: GameState) = {
val dir = TileDirections.getRandom()
gameState.getTileOpt(x + dir.x.toInt, y + dir.y.toInt) match {
case Some(t: Tile) => {
if (t.resource.resource == resource && t.resource.resourceAmount.empty()) {
t.resource.resourceAmount.step()
} else {
false
}
}
case _ => false
}
}
}
case class AlgaeResource() extends TileResource {
val resource = ResourceType.algae
val maxResource = if (node) 100.0 else 20.0 + Math.random() * 60
val resourceRegen = 0.01
val resourceAmount = new Gauge(0,0,maxResource,resourceRegen)
}
case class GrassResource() extends TileResource {
val resource = ResourceType.grass
val maxResource = if (node) 100.0 else 20.0 + Math.random() * 60
val resourceRegen = 0.01
val resourceAmount = new Gauge(0,0,maxResource,resourceRegen)
}
case class NoResource() extends TileResource {
val resource = ResourceType.none
val maxResource = 0.0
val resourceRegen = 0.0
val resourceAmount = new Gauge(1,0,maxResource,resourceRegen)
}
object ResourceType extends Enumeration {
val algae, none, grass = Value
}
object TileDirections {
val directions: List[Vector2] = List(
Vector2(-1,-1),Vector2(-1,0),Vector2(-1,1),
Vector2(0,-1),Vector2(0,1),
Vector2(1,-1),Vector2(1,0),Vector2(1,1)
)
def getRandom(): Vector2 = {
return directions(Math.floor(Math.random() * directions.length).toInt)
}
} |
package com.learn.oauth.domain;
import javax.persistence.Table;
import java.io.Serializable;
//@Entity
@Table(name = "OAUTH_CLIENT_TOKEN")
public class OauthClientToken implements Serializable {
} |
# AUTOGENERATED FILE
FROM balenalib/zc702-zynq7-ubuntu:focal-run
# remove several traces of debian python
RUN apt-get purge -y python.*
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# install python dependencies
RUN apt-get update && apt-get install -y --<API key> \
ca-certificates \
netbase \ |
package com.liuguangqiang.idaily.feature.detail;
import androidx.lifecycle.MutableLiveData;
import androidx.lifecycle.ViewModel;
import android.os.Bundle;
import com.liuguangqiang.idaily.api.ServiceFactory;
import com.liuguangqiang.idaily.entity.Story;
import com.liuguangqiang.idaily.api.service.StoryService;
import io.reactivex.Observer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
public class StoryViewModel extends ViewModel {
public Story story;
public String title = "";
public MutableLiveData<Story> storyLiveData = new MutableLiveData<>();
public void pushArguments(Bundle bundle) {
Story story = bundle.getParcelable(StoryActivity.ARG_STORY);
if (story != null) {
setTitle(story.getTitle());
getStory(story.id);
}
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getImage() {
if (story == null) return "";
return story.getImage();
}
public String getBody() {
return getBody(storyLiveData.getValue());
}
public void setStory(Story story) {
storyLiveData.postValue(story);
}
public Story getStory() {
return story;
}
public void getStory(int id) {
StoryService storyService = ServiceFactory.getInstance().create(StoryService.class);
storyService.getStory(id).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread()).subscribe(new Observer<Story>() {
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onNext(Story story) {
setStory(story);
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
}
});
}
public String getBody(Story story) {
if (story == null) return "";
return loadDataWithCSS(story.getBody(), story.getCss().get(0));
}
private String loadDataWithCSS(String loadData, String cssPath) {
String header = "<html><head><link href=\"%s\" type=\"text/css\" rel=\"stylesheet\"/></head><body>";
String footer = "</body></html>";
StringBuilder sb = new StringBuilder();
sb.append(String.format(header, cssPath));
sb.append(loadData);
sb.append(footer);
return sb.toString();
}
} |
package jp.go.affrc.naro.wgs.service.dao;
import javax.annotation.Generated;
import org.seasar.extension.unit.S2TestCase;
/**
* {@link TExecuteLogService}
*
*/
@Generated(value = {"S2JDBC-Gen 2.4.47", "org.seasar.extension.jdbc.gen.internal.model.<API key>"}, date = "2014/02/26 11:09:31")
public class <API key> extends S2TestCase {
private TExecuteLogService tExecuteLogService;
/**
*
*
* @throws Exception
*/
@Override
protected void setUp() throws Exception {
super.setUp();
include("app.dicon");
}
/**
* {@link #tExecuteLogService}
*
* @throws Exception
*/
public void testAvailable() throws Exception {
assertNotNull(tExecuteLogService);
}
} |
package com.realcomp.prime.util;
import au.com.bytecode.opencsv.CSVParser;
import com.realcomp.prime.record.Record;
import com.realcomp.prime.schema.Field;
import com.realcomp.prime.schema.FieldList;
import com.realcomp.prime.schema.Schema;
import com.realcomp.prime.schema.xml.XStreamFactory;
import com.thoughtworks.xstream.XStream;
import joptsimple.OptionException;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import java.io.*;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
/**
* Schema generator for delimited files.
* Uses the first record in a delimited file to generate a schema.
* Escapes invalid characters in the header.
*
*/
public class SchemaGenerator{
private static final Logger logger = Logger.getLogger(SchemaGenerator.class.getName());
private String delimiter = "TAB";
public void generate(InputStream in, OutputStream out) throws IOException{
String header = new BufferedReader(new InputStreamReader(in)).readLine();
String[] fieldNames = getFieldNames(header);
Schema schema = buildSchema(fieldNames);
writeSchema(schema, out);
}
public Schema generate(File file) throws IOException{
String header = null;
try (BufferedReader reader = new BufferedReader(new FileReader(file))){
header = reader.readLine();
}
String[] fieldNames = getFieldNames(header);
return buildSchema(fieldNames);
}
public Schema generate(Record record) throws IOException{
String[] fieldNames = record.keySet().toArray(new String[record.keySet().size()]);
return buildSchema(fieldNames);
}
protected String toXml(Schema schema){
XStream xstream = XStreamFactory.build(true);
StringWriter temp = new StringWriter();
xstream.toXML(schema, temp);
return temp.getBuffer().toString();
}
protected void writeSchema(Schema schema, OutputStream out) throws IOException{
String xml = toXml(schema);
xml = clean(xml);
out.write(xml.getBytes());
}
protected String[] getFieldNames(String header) throws IOException{
CSVParser parser;
if (delimiter.equals("\t") || delimiter.equalsIgnoreCase("TAB")){
parser = new CSVParser('\t', '\u0000');
}
else if (delimiter.equalsIgnoreCase("CSV")){
parser = new CSVParser(',', CSVParser.<API key>);
}
else{
parser = new CSVParser(delimiter.charAt(0), CSVParser.<API key>);
}
return parser.parseLine(header);
}
protected Schema buildSchema(String[] fieldNames){
Schema schema = new Schema();
Map<String, String> format = new HashMap<>();
format.put("header", "true");
if (delimiter.equals("\t") || delimiter.equalsIgnoreCase("TAB")){
format.put("type", "TAB");
}
else if (delimiter.equals(",") || delimiter.equalsIgnoreCase("CSV")){
format.put("type", "CSV");
}
else{
format.put("type", "" + delimiter.charAt(0));
}
schema.setFormat(format);
int count = 1;
FieldList fieldList = new FieldList();
for (String fieldName : fieldNames){
fieldName = escapeFieldName(fieldName);
fieldList.add(new Field(fieldName.isEmpty() ? "FIELD" + count : fieldName));
count++;
}
schema.addFieldList(fieldList);
return schema;
}
protected String escapeFieldName(String original){
return original.replaceAll("[.+]","_");
}
protected String clean(String dirty){
String header = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
header = header.concat("<rc:schema\n");
header = header.concat(" xmlns:rc=\"http:
header = header.concat(" xmlns:xsi=\"http:
header =
header.concat(" xsi:schemaLocation=\"http:
String clean = header.concat(dirty.replace("<schema>", ""));
clean = clean.replace("</schema>", "</rc:schema>");
clean = clean.replaceAll(" length=\"0\"", "");
clean = clean.replaceAll(" classifier=\".*\"", "");
clean = clean.concat("\n");
return clean;
}
public String getDelimiter(){
return delimiter;
}
public void setDelimiter(String delimiter){
this.delimiter = delimiter;
}
private static void printHelp(OptionParser parser){
try{
parser.printHelpOn(System.err);
}
catch (IOException ignored){
}
}
public static void main(String[] args){
OptionParser parser = new OptionParser(){
{
acceptsAll(Arrays.asList("d", "delimiter"), "delimiter").withRequiredArg().describedAs("delimiter");
accepts("in", "input file (default: STDIN)").withRequiredArg().describedAs("file");
accepts("out", "output file (default: STDOUT)").withRequiredArg().describedAs("file");
acceptsAll(Arrays.asList("h", "?", "help"), "help");
}
};
int result = 0;
try{
OptionSet options = parser.parse(args);
if (options.has("?")){
printHelp(parser);
}
else{
SchemaGenerator generator = new SchemaGenerator();
if (options.has("d")){
generator.setDelimiter((String) options.valueOf("d"));
}
InputStream in = options.has("in")
? new BufferedInputStream(new FileInputStream((String) options.valueOf("in")))
: new BufferedInputStream(System.in);
OutputStream out = options.has("out")
? new <API key>(new FileOutputStream((String) options.valueOf("out")))
: new <API key>(System.out);
generator.generate(in, out);
result = 0;
in.close();
out.close();
}
}
catch (IOException ex){
logger.severe(ex.getMessage());
}
catch (OptionException ex){
logger.severe(ex.getMessage());
printHelp(parser);
}
System.exit(result);
}
} |
package nl.fontys.sofa.limo.view.topcomponent;
import nl.fontys.sofa.limo.api.exception.<API key>;
import nl.fontys.sofa.limo.view.node.factory.HubChildFactory;
import nl.fontys.sofa.limo.view.node.root.AbstractRootNode;
import nl.fontys.sofa.limo.view.node.root.HubRootNode;
import nl.fontys.sofa.limo.view.util.LIMOResourceBundle;
import org.netbeans.api.settings.ConvertAsProperties;
import org.openide.awt.ActionID;
import org.openide.awt.ActionReference;
import org.openide.awt.ActionReferences;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.util.NbBundle.Messages;
import org.openide.windows.TopComponent;
@ConvertAsProperties(
dtd = "-//nl.fontys.sofa.limo.view.topcomponent//Hub//EN",
autostore = false
)
@TopComponent.Description(
preferredID = "HubTopComponent",
iconBase = "icons/gui/list.png",
persistenceType = TopComponent.PERSISTENCE_ALWAYS
)
@TopComponent.Registration(
mode = "editor",
openAtStartup = false
)
@ActionID(
category = "Window",
id = "nl.fontys.sofa.limo.view.topcomponent.HubTopComponent"
)
@ActionReferences({
@ActionReference(path = "Menu/Master Data/Hub templates", position = 10),
@ActionReference(path = "Shortcuts", name = "D-H")
})
@TopComponent.<API key>(
displayName = "#CTL_HubAction",
preferredID = "HubTopComponent"
)
@Messages({
"CTL_HubAction=Hub template catalog",
"CTL_HubTopComponent=Hub template catalog"
})
public final class HubTopComponent extends <API key> {
public HubTopComponent() {
super();
}
@Override
protected ChildFactory getChildFactory() {
return new HubChildFactory();
}
@Override
protected AbstractRootNode createRootNode(Children children) throws <API key> {
AbstractRootNode rootNode = new HubRootNode(children);
rootNode.setDisplayName(LIMOResourceBundle.getString("HUB"));
return rootNode;
}
@Override
public String getName() {
return Bundle.CTL_HubTopComponent();
}
void writeProperties(java.util.Properties p) {
// better to version settings since initial version as advocated at
p.setProperty("version", "1.0");
// TODO store your settings
}
void readProperties(java.util.Properties p) {
String version = p.getProperty("version");
// TODO read your settings according to their version
}
} |
/*! \addtogroup CORE Core Microcontroller Access
* @{
*
* \ingroup PERIPHIO
*
* \details The Core peripheral gives access to the MCU clock and other core features.
*
*/
/*! \file
* \brief Analog to Digital Converter Header File
*
*/
#ifndef DEV_CORE_H_
#define DEV_CORE_H_
#include <stdint.h>
#include "ioctl.h"
#include "hwpl/arch.h"
#include "hwpl/types.h"
#include "dev/pio.h"
#ifdef __cplusplus
extern "C" {
#endif
#define CORE_IOC_IDENT_CHAR 'c'
typedef hwpl_action_t core_action_t;
/*! \brief See below for details.
* \details These are the possible values for \a reset_type
* in \ref core_attr_t.
*/
typedef enum {
CORE_RESET_SRC_POR /*! Power on Reset */,
<API key> /*! External Reset signal */,
CORE_RESET_SRC_WDT /*! Watchdog Timer Reset */,
CORE_RESET_SRC_BOR /*! Brown Out Reset */,
<API key> /*! Software System Reset */
} core_reset_src_t;
/*! \details THis lists the valid values for the clock output
* source clock.
*/
typedef enum {
CORE_CLKOUT_CPU /*! Use the CPU Clock */,
<API key> /*! Use the Main Oscillator */,
<API key> /*! Use the Internal Oscillator */,
CORE_CLKOUT_USB /*! Use the USB Clock */,
CORE_CLKOUT_RTC /*! Use the RTC Clock */
} core_clkout_src_t;
/*! \brief Used with I_CORE_SETCLKOUT
* \details This structure is used to set
* the clock out attributes (see \ref I_CORE_SETCLKOUT).
*/
typedef struct HWPL_PACK {
uint32_t src /*! \brief The clock output source (see \ref core_clkout_src_t) */;
uint32_t div /*! \brief The clock divide value applied to src */;
} core_clkout_t;
enum {
CORE_FAULT_NONE,
CORE_FAULT_HARD,
CORE_FAULT_MEM,
CORE_FAULT_BUS,
CORE_FAULT_USAGE,
CORE_FAULT_ISR
};
/*! \details This enumerates the valid peripherals supported
* by HWPL.
*/
typedef enum {
<API key> /*! RESERVED */,
CORE_PERIPH_CORE /*! Core Functionality */,
CORE_PERIPH_ADC /*! Analog to Digital Converter */,
CORE_PERIPH_DAC /*! Digital to Analog Converter */,
CORE_PERIPH_UART /*! UART */,
CORE_PERIPH_SPI /*! SPI */,
CORE_PERIPH_USB /*! USB */,
CORE_PERIPH_CAN /*! CAN */,
CORE_PERIPH_ENET /*! ENET */,
CORE_PERIPH_I2C /*! I2C */,
CORE_PERIPH_I2S /*! I2S */,
CORE_PERIPH_MEM /*! External memory interface */,
CORE_PERIPH_RTC /*! RTC */,
CORE_PERIPH_CEC /*! Consumer Electronic Control (Part of HDMI) */,
CORE_PERIPH_QEI /*! Quadrature Encoder Interface */,
CORE_PERIPH_PWM /*! PWM */,
CORE_PERIPH_PIO /*! GPIO */,
CORE_PERIPH_TMR /*! Timer (output compare and input capture) */,
CORE_PERIPH_EINT /*! External interrupts */,
CORE_PERIPH_WDT /*! Watch dog timer */,
CORE_PERIPH_BOD /*! Brown out detection */,
CORE_PERIPH_DMA /*! Direct Memory Access */,
CORE_PERIPH_JTAG /*! JTAG */,
CORE_PERIPH_RESET /*! Reset */,
CORE_PERIPH_CLKOUT /*! Clockout */,
CORE_PERIPH_LCD /*! LCD */,
CORE_PERIPH_LCD1 /*! LCD */,
CORE_PERIPH_LCD2 /*! LCD */,
CORE_PERIPH_LCD3 /*! LCD */,
CORE_PERIPH_EMC /*! Externa Memory Controller */,
CORE_PERIPH_SDC /*! SD Card */,
CORE_PERIPH_SSP /*! SSP */,
CORE_PERIPH_MCPWM /*! Motor Control PWM */,
CORE_PERIPH_NMI /*! Non-maskable Interrupt */,
CORE_PERIPH_TRACE /*! Trace data */,
CORE_PERIPH_TOTAL
} core_periph_t;
#define CORE_PERIPH_GPIO CORE_PERIPH_PIO
/*! \details This lists the sleep modes supported by HWPL
*
*/
typedef enum {
CORE_SLEEP /*! Sleep mode */,
CORE_DEEPSLEEP /*! Deep sleep (preserve SRAM) */,
CORE_DEEPSLEEP_STOP /*! Deep sleep (preserve SRAM, stop clocks) */,
<API key> /*! Turn the device off (lose SRAM) */
} core_sleep_t;
/*! \brief Core IO Attributes
* \details This structure defines the attributes structure
* for configuring the Core port.
*/
typedef struct HWPL_PACK {
uint32_t serial_number[4] /*! \brief The serial number of the device (from the silicon) */;
uint32_t clock /*! \brief The current clock speed */;
uint32_t signature /*! \brief The software signature */;
uint8_t reset_type /*! \brief The source of the last reset */;
} core_attr_t;
/*! \brief This requests reads the core attributes.
*/
#define I_CORE_GETATTR _IOCTLR(CORE_IOC_IDENT_CHAR, I_GLOBAL_GETATTR, core_attr_t)
/*! \brief This requests writes the core attributes.
*/
#define I_CORE_SETATTR _IOCTLW(CORE_IOC_IDENT_CHAR, I_GLOBAL_SETATTR, core_attr_t)
#define I_CORE_SETACTION _IOCTLW(CORE_IOC_IDENT_CHAR, I_GLOBAL_SETACTION, core_action_t)
/*! \brief Data structure for setting the pin functionality.
* \details This structure is used with I_CORE_SETPINFUNC to
* set the functionality of pins.
*/
typedef struct HWPL_PACK {
uint8_t periph_port /*! \brief Peripheral port value (e.g. 0 for I2C0) */;
uint8_t periph_func /*! \brief Peripheral port function (see \ref core_periph_t) */;
pio_t io /*! \brief PIO port and pin (see \ref pio_t) */;
} core_pinfunc_t;
/*! \brief Data structure used for setting interrupt priorities
* \details This data structure is used with the I_CORE_SETIRQPRIO
* request to set the interrupt priority for peripherals.
*/
typedef struct HWPL_PACK {
uint8_t periph /*! \brief The peripheral type (see \ref core_periph_t) */;
uint8_t prio /*! \brief The priority (lower number is higher priority on ARM CM3 */;
uint8_t port /*! \brief The peripheral port number (e.g. 0 for I2C0) */;
} core_irqprio_t;
/*! \brief See below for details.
* \details This request sets the functionality of the specified pin. The
* following example shows how to set P0.15 to be used with I2C0. If
* the port/pin combo does not match the peripheral function, the request
* will fail. This call is not necessary when using the set attribute
* request. For example, I_I2C_SETATTR will configure the pins according
* to the pin_assign member.
* \code
* core_pinfunc_t pinfunc;
* pinfunc.periph_port = 0;
* pinfunc.periph_func = CORE_PERIPH_I2C;
* pinfunc.io.port = 0;
* pinfunc.io.pin = 15;
* ioctl(core_fd, I_CORE_SETPINFUNC, &pinfunc);
* \endcode
*
*/
#define I_CORE_SETPINFUNC _IOCTLW(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL, core_pinfunc_t)
/*! \brief This request powersdown the device.
*/
#define I_CORE_SLEEP _IOCTL(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL + 1)
/*! \brief This request resets the device.
*/
#define I_CORE_RESET _IOCTL(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL + 2)
/*! \brief This request invokes the bootloader.
*/
#define <API key> _IOCTL(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL + 3)
/*! \brief This request sets the IRQ priority.
*/
#define I_CORE_SETIRQPRIO _IOCTLW(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL + 4, core_irqprio_t)
/*! \brief See below for details.
* \details This configures the clkout functionality.
* \code
* core_clkout_t clkout;
* clkout.src = <API key>;
* clkout.div = 8; //output will be the src divided by this value
* ioctl(core_fd, I_CORE_SETCLKOUT, &clkout);
* \endcode
*
*/
#define I_CORE_SETCLKOUT _IOCTLW(CORE_IOC_IDENT_CHAR, I_GLOBAL_TOTAL + 5, core_clkout_t)
#define I_CORE_TOTAL 6
#ifdef __cplusplus
}
#endif
#endif // DEV_CORE_H_ |
#ifndef _FLAMMES_H_
#define _FLAMMES_H_
#include "effect98.h"
int InitializeFlammes (TInfo **ppInfo, TTooltips **ppTooltips);
#endif |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.