text
stringlengths 27
775k
|
|---|
@file:JvmName("MapboxConstants")
package com.mapbox.maps
import java.util.*
/**
* Name of the database file.
*/
const val DATABASE_NAME = "mbx.db"
/**
* Default Locale for data processing (ex: String.toLowerCase(com.mapbox.maps.getMAPBOX_LOCALE, "foo"))
*/
val MAPBOX_LOCALE: Locale = Locale.US
/**
* Resource name used to lookup the Mapbox access token.
*/
const val MAPBOX_ACCESS_TOKEN_RESOURCE_NAME = "mapbox_access_token"
internal const val CORE_SHARED_LIBRARY_NAME = "mapbox-common"
internal const val MAP_SHARED_LIBRARY_NAME = "mapbox-maps"
/**
* The currently supported minimum zoom level.
*/
internal const val MINIMUM_ZOOM = 0.0f
/**
* The currently supported maximum zoom level.
*/
internal const val MAXIMUM_ZOOM = 25.5f
/**
* The currently supported maximum bearing
*/
internal const val MAXIMUM_BEARING = 360.0
/**
* The currently supported minimum bearing
*/
internal const val MINIMUM_BEARING = 0.0
|
# frozen_string_literal: true
require 'support/models/authorizy_cop'
require 'support/models/empty_cop'
require 'support/controllers/dummy_controller'
RSpec.describe DummyController, '#authorizy', type: :controller do
let!(:user) { User.new }
context 'when cop responds to the controller name' do
context 'when method resturns false' do
it 'denies the access' do
config_mock(cop: AuthorizyCop, current_user: user) do
get :action, params: { access: false }
end
expect(response).to redirect_to('/')
end
end
context 'when method resturns true' do
it 'denies the access' do
config_mock(cop: AuthorizyCop, current_user: user) do
get :action, params: { access: true }
end
expect(response.body).to eq('{"message":"authorized"}')
end
end
end
context 'when cop does not respond to the controller name' do
it 'denies the access' do
config_mock(cop: EmptyCop, current_user: user) do
get :action
end
expect(response).to redirect_to('/')
end
end
end
|
#include <GameState.hpp>
GameState::GameState(sf::RenderWindow* window)
: State(window) {
}
GameState::~GameState() {
delete m_bg[0];
delete m_bg[1];
delete im_bg;
}
void GameState::Init() {
player = new Player();
player->Init(sf::Vector2f(200.0f / 2.0f, 150.0f / 2.0f), 50.0f);
im_bg = new sf::Texture;
if (!im_bg->loadFromFile("assets/images/Background.png")) {
std::cout << "Missing background texture" << std::endl;
}
for (int i = 0; i < 2; i++) {
m_bg[i] = new sf::RectangleShape(sf::Vector2f(200.0f, 150.0f));
m_bg[i]->setTexture(im_bg);
m_bg[i]->setPosition(0, i * 200.0f);
}
}
void GameState::Update(const float dt) {
player->Update(dt);
}
void GameState::UpdateInput(sf::Event e) {
if (e.type == sf::Event::KeyPressed) {
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Space))
player->Jump(200.0f);
}
};
void GameState::Render(sf::RenderTarget* target) {
target->draw(*m_bg[0]);
target->draw(*m_bg[1]);
target->draw(player->GetSprite());
}
|
/*
Write a program to reverse of an integer number.
*/
#include <stdio.h>
int main(void) {
int n, rev = 0, d;
printf("Enter any integer to find it\'s reverse: ");
scanf("%d", &n);
while(n != 0) {
d = n % 10;
rev = rev * 10 + d;
n = n / 10;
}
printf("\nThe reverse of is %d", rev);
return 0;
}
|
package io.connectedhealth_idaas.eventbuilder.dataobjects.general;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
public class NextofKin {
private String nextofkinSetId;
private String nextofkinNumber;
private String nextofkinName;
private String nextofkinSurName;
private String nextofkinAddress;
private String nextofkinMobilePhoneNumber;
private String nextofkinBusPhoneNumber;
private String nextofkinDateofBirth;
private String nextofkinGender;
private String nextofkinType;
private String nextofkinRelationship;
private String nextofkinSSN;
private String nextofkinDateBegin;
private String nextofkinDateEnd;
private String nextofkinOrgName;
private String nextofkinMaritalStatus;
private String nextofkinCitizenship;
private String nextofkinEthnicityGrp;
private String nextofkinReligion;
private String nextofkinVIPIndiciator;
private String nextofkinFinClass;
private String nextofkinRace;
private String employerName;
private String employerAddress;
private String employerPhone;
private String employerId;
private String employerStatus;
private String employerStopDate;
public String getNextofkinSetId() {
return nextofkinSetId;
}
public void setNextofkinSetId(String nextofkinSetId) {
this.nextofkinSetId = nextofkinSetId;
}
public String getNextofkinNumber() {
return nextofkinNumber;
}
public void setNextofkinNumber(String nextofkinNumber) {
this.nextofkinNumber = nextofkinNumber;
}
public String getNextofkinName() {
return nextofkinName;
}
public void setNextofkinName(String nextofkinName) {
this.nextofkinName = nextofkinName;
}
public String getNextofkinSurName() {
return nextofkinSurName;
}
public void setNextofkinSurName(String nextofkinSurName) {
this.nextofkinSurName = nextofkinSurName;
}
public String getNextofkinAddress() {
return nextofkinAddress;
}
public void setNextofkinAddress(String nextofkinAddress) {
this.nextofkinAddress = nextofkinAddress;
}
public String getNextofkinMobilePhoneNumber() {
return nextofkinMobilePhoneNumber;
}
public void setNextofkinMobilePhoneNumber(String nextofkinMobilePhoneNumber) {
this.nextofkinMobilePhoneNumber = nextofkinMobilePhoneNumber;
}
public String getNextofkinBusPhoneNumber() {
return nextofkinBusPhoneNumber;
}
public void setNextofkinBusPhoneNumber(String nextofkinBusPhoneNumber) {
this.nextofkinBusPhoneNumber = nextofkinBusPhoneNumber;
}
public String getNextofkinDateofBirth() {
return nextofkinDateofBirth;
}
public void setNextofkinDateofBirth(String nextofkinDateofBirth) {
this.nextofkinDateofBirth = nextofkinDateofBirth;
}
public String getNextofkinGender() {
return nextofkinGender;
}
public void setNextofkinGender(String nextofkinGender) {
this.nextofkinGender = nextofkinGender;
}
public String getNextofkinType() {
return nextofkinType;
}
public void setNextofkinType(String nextofkinType) {
this.nextofkinType = nextofkinType;
}
public String getNextofkinRelationship() {
return nextofkinRelationship;
}
public void setNextofkinRelationship(String nextofkinRelationship) {
this.nextofkinRelationship = nextofkinRelationship;
}
public String getNextofkinSSN() {
return nextofkinSSN;
}
public void setNextofkinSSN(String nextofkinSSN) {
this.nextofkinSSN = nextofkinSSN;
}
public String getNextofkinDateBegin() {
return nextofkinDateBegin;
}
public void setNextofkinDateBegin(String nextofkinDateBegin) {
this.nextofkinDateBegin = nextofkinDateBegin;
}
public String getNextofkinDateEnd() {
return nextofkinDateEnd;
}
public void setNextofkinDateEnd(String nextofkinDateEnd) {
this.nextofkinDateEnd = nextofkinDateEnd;
}
public String getNextofkinOrgName() {
return nextofkinOrgName;
}
public void setNextofkinOrgName(String nextofkinOrgName) {
this.nextofkinOrgName = nextofkinOrgName;
}
public String getNextofkinMaritalStatus() {
return nextofkinMaritalStatus;
}
public void setNextofkinMaritalStatus(String nextofkinMaritalStatus) {
this.nextofkinMaritalStatus = nextofkinMaritalStatus;
}
public String getNextofkinCitizenship() {
return nextofkinCitizenship;
}
public void setNextofkinCitizenship(String nextofkinCitizenship) {
this.nextofkinCitizenship = nextofkinCitizenship;
}
public String getNextofkinEthnicityGrp() {
return nextofkinEthnicityGrp;
}
public void setNextofkinEthnicityGrp(String nextofkinEthnicityGrp) {
this.nextofkinEthnicityGrp = nextofkinEthnicityGrp;
}
public String getNextofkinReligion() {
return nextofkinReligion;
}
public void setNextofkinReligion(String nextofkinReligion) {
this.nextofkinReligion = nextofkinReligion;
}
public String getNextofkinVIPIndiciator() {
return nextofkinVIPIndiciator;
}
public void setNextofkinVIPIndiciator(String nextofkinVIPIndiciator) {
this.nextofkinVIPIndiciator = nextofkinVIPIndiciator;
}
public String getNextofkinFinClass() {
return nextofkinFinClass;
}
public void setNextofkinFinClass(String nextofkinFinClass) {
this.nextofkinFinClass = nextofkinFinClass;
}
public String getNextofkinRace() {
return nextofkinRace;
}
public void setNextofkinRace(String nextofkinRace) {
this.nextofkinRace = nextofkinRace;
}
public String getEmployerName() {
return employerName;
}
public void setEmployerName(String employerName) {
this.employerName = employerName;
}
public String getEmployerAddress() {
return employerAddress;
}
public void setEmployerAddress(String employerAddress) {
this.employerAddress = employerAddress;
}
public String getEmployerPhone() {
return employerPhone;
}
public void setEmployerPhone(String employerPhone) {
this.employerPhone = employerPhone;
}
public String getEmployerId() {
return employerId;
}
public void setEmployerId(String employerId) {
this.employerId = employerId;
}
public String getEmployerStatus() {
return employerStatus;
}
public void setEmployerStatus(String employerStatus) {
this.employerStatus = employerStatus;
}
public String getEmployerStopDate() {
return employerStopDate;
}
public void setEmployerStopDate(String employerStopDate) {
this.employerStopDate = employerStopDate;
}
//toString
public String toString()
{
return ReflectionToStringBuilder.toString(this);
}
}
|
;/*!
; @file
;
; @ingroup fapi
;
; @brief DosGetPID DOS wrapper
;
; (c) osFree Project 2018, <http://www.osFree.org>
; for licence see licence.txt in root directory, or project website
;
; This is Family API implementation for DOS, used with BIND tools
; to link required API
;
; @author Yuri Prokushev (yuri.prokushev@gmail.com)
;
;*/
.8086
; Helpers
INCLUDE HELPERS.INC
INCLUDE GLOBALVARS.INC
EXTERN DOSGETINFOSEG: PROC
PIDINFO struc
pidi_pid dw ? ;current process' process ID
pidi_tid dw ? ;current process' thread ID
pidi_pidParent dw ? ;process ID of the parent
PIDINFO ends
_TEXT SEGMENT BYTE PUBLIC 'CODE' USE16
@PROLOG DOSGETPID
PPID DD ?
@LOCALW GLOBALSEG
@LOCALW LOCALSEG
@START DOSGETPID
PUSH SS
LEA AX, GLOBALSEG
PUSH AX
PUSH SS
LEA AX, LOCALSEG
PUSH AX
CALL DOSGETINFOSEG
MOV AX, LOCALSEG
MOV DS, AX
LES BX,[DS:BP].ARGS.PPID
MOV AX, [DS:lis_pidCurrent]
MOV [ES:BX].PIDINFO.pidi_pid, AX
MOV AX, [DS:lis_tidCurrent]
MOV [ES:BX].PIDINFO.pidi_tid, AX
MOV AX, [DS:lis_pidParent]
MOV [ES:BX].PIDINFO.pidi_pidParent, AX
@EPILOG DOSGETPID
_TEXT ends
end
|
const viewport = {
data() {
return {
clientHeight: 0
}
},
created() {
this.clientHeight = document.documentElement.clientHeight || document.body.clientHeight
}
}
export default viewport
|
namespace AirlineHierarchy.TransportAircrafts.Airplanes.CargoAirplanes.Models
{
public class AirbusBelugaXL : CargoAirplane
{
public override string Model => "BelugaXL";
public override string Manufacturer => "Airbus";
public override int FlightRange => 4000;
public override double FuelConsumption => 10;
public override int LoadCapacity => 53000;
public override int CargoCompartmentVolume => 1400;
}
}
|
using SUP_G6.DataTypes;
using SUP_G6.Interface;
using System;
using System.Collections.Generic;
using System.Text;
namespace SUP_G6.Models
{
public class GameResult : IGameResult
{
public int GameId { get; set; }
public int PlayerId { get; set; }
public string PlayerName { get; set; }
public int Tries { get; set; }
public double ElapsedTimeInSeconds { get; set; }
public double TotalScore { get; set; }
public Level Level { get; set; }
public bool Win { get; set; }
public string DisplayName { get; set; }
public Int64 DisplayCount { get; set; }
}
}
|
#!/usr/bin/env bash
# This script can be used to build the Docker images manually (outside of CI)
set -e
GIT_TAG=$1
MAIN_TAG=$2
SECOND_TAG=$3
THIRD_TAG=$4
if [[ -z "$MAIN_TAG" || -z "$GIT_TAG" ]]
then
echo "Usage:"
echo " build.sh git-tag-or-hash tag [second-tag] [third-tag]"
echo "Example:"
echo " build.sh release/0.12 0.12.0-rc"
echo " build.sh release/0.12 0.12.0 0.12 latest"
exit 1
fi
if [[ -z "$THIRD_TAG" ]]
then
if [[ -z "$SECOND_TAG" ]]
then
declare -a tags=("$MAIN_TAG")
else
declare -a tags=("$MAIN_TAG" "$SECOND_TAG")
fi
else
declare -a tags=("$MAIN_TAG" "$SECOND_TAG" "$THIRD_TAG")
fi
echo "Building Docker images for ${tags[*]} using git tag $GIT_TAG"
sleep 1
BUILDER_TAG="aergo/builder"
echo "Building ${BUILDER_TAG}"
docker build --no-cache --build-arg GIT_TAG=$GIT_TAG --file Dockerfile.builder -t ${BUILDER_TAG} .
docker create --name extract ${BUILDER_TAG}
docker cp extract:/go/aergo/bin/ .
docker cp extract:/go/aergo/cmd/brick/arglog.toml bin/brick-arglog.toml
docker cp extract:/go/aergo/libtool/lib/ .
docker rm -f extract
declare -a names=("node" "tools" "polaris")
for name in "${names[@]}"
do
tagsExpanded=()
for tag in "${tags[@]}"; do
tagsExpanded+=("-t aergo/$name:$tag")
done
echo "[aergo/$name:${tags[*]}]"
DOCKERFILE="Dockerfile.$name"
echo docker build -q ${tagsExpanded[@]} --file $DOCKERFILE .
imageid=`docker build -q ${tagsExpanded[@]} --file $DOCKERFILE .`
docker images --format "Done: \t{{.Repository}}:{{.Tag}} \t{{.ID}} ({{.Size}})" | grep "${imageid:7:12}"
done
rm -rf bin lib
echo -e "\nREPOSITORY TAG IMAGE ID CREATED SIZE"
for name in "${names[@]}"
do
for tag in "${tags[@]}"
do
docker images aergo/$name:$tag | tail -1
done
done
echo -e "\nYou can now push these to Docker Hub."
echo "For example:"
declare -a names=("node" "tools" "polaris")
for name in "${names[@]}"
do
for tag in "${tags[@]}"
do
echo " docker push aergo/$name:$tag"
done
done
|
import VirtualEngine, {
VirtualRegistry as VirtualRegistryClass,
RequireResolver,
FileResolve,
EngineOptions,
RegistryOptions
} from './VirtualEngine';
import { shim } from './helpers';
import DevpackMiddleware from './DevpackMiddleware';
const devpack = DevpackMiddleware;
/**
* Instantiates the Virtual Engine by `engine = reactus()`.
*
* @param config - The engine options
*/
export default function createVirtualEngine(config?: EngineOptions) {
return new VirtualEngine(config);
}
//additional exports
export {
VirtualEngine,
RequireResolver,
FileResolve,
EngineOptions,
RegistryOptions,
devpack,
shim
};
/**
* Instantiates a Virtual Registry by `engine = reactus.VirtualRegistry()`
* following how express instantiates routers by `express.Router()`.
*
* @param config - The registry options
*/
export function VirtualRegistry(config?: RegistryOptions) {
return new VirtualRegistryClass(config);
};
createVirtualEngine.devpack = devpack;
createVirtualEngine.VirtualRegistry = VirtualRegistry;
VirtualRegistry.VirtualRegistryClass = VirtualRegistryClass;
|
# Joplin Server Changelog
## [server-v1.7.2](https://github.com/laurent22/joplin/releases/tag/server-v1.7.4) - 2021-01-24T19:11:10Z
- Fixed: Fixed password hashing when changing password
- Improved: Many other internal changes for increased reliability
|
"""
# AccelerometerCalibrationPlots.jl
Debug plots for
[AccelerometerCalibration.jl](https://github.com/notinaboat/AccelerometerCalibration.jl)
"""
module AccelerometerCalibrationPlots
using AccelerometerCalibration
using Plots
offset_series = []
scale_series = []
rotation_series = []
function reset()
empty!(offset_series)
empty!(scale_series)
empty!(rotation_series)
end
macro circle_plot(x, y)
quote
plot( x->sqrt(1 - x^2), color=:black, legend=false);
plot!(x->-sqrt(1 - x^2), color=:black, legend=false);
scatter!($(esc(x)),
$(esc(y)),
xaxis=(label=$(string(x))),
yaxis=(label=$(string(y))),
xlims=(-1.5,1.5),
ylims=(-1.5,1.5),
legend=false)
end
end
macro series_plot(x)
quote
plot($(esc(x)), legend=false, xaxis=(label=$(string(x))))
end
end
function calplot(x, y, z, xc, yc, zc,
offset, scale, rotation)
plot(@circle_plot(x, y), @circle_plot(xc, yc), @series_plot(offset),
@circle_plot(x, z), @circle_plot(xc, zc), @series_plot(scale),
@circle_plot(y, z), @circle_plot(yc, zc), @series_plot(rotation),
layout = (3,3), size=(1200,1200))
end
function calplot(c::AbstractArray{AccelerometerCalibration.Calibration})
c_count = length(c)
l = length(c[1].points)
xyz = [[sc.points[j][i] for i in 1:3, j in 1:l] for sc in c]
xyzc =[sc.rotation * ((xyz[i] .* sc.scale) .- sc.offset)
for (i, sc) in enumerate(c)]
x, y, z = (hcat((xyz[i][j,:] for i in 1:c_count)...) for j in 1:3)
xc, yc, zc = (hcat((xyzc[i][j,:] for i in 1:c_count)...) for j in 1:3)
push!(offset_series, vec([c[j].offset[i] for i in 1:3, j in 1:c_count]))
push!(scale_series, vec([c[j].scale[i] for i in 1:3, j in 1:c_count]))
push!(rotation_series, vcat([[r.theta1, r.theta2, r.theta3]
for r in (c[j].rotation for j in 1:c_count)]...))
o = permutedims(hcat(offset_series...))
s = permutedims(hcat(scale_series...))
r = permutedims(hcat(rotation_series...))
calplot(x, y, z, xc, yc, zc, o, s, r)
end
end # module
|
<?php
namespace Visitors;
use Illuminate\Database\Eloquent\Model;
class Visitors extends Model
{
//Modelo de la tabla y datos que recibe el arreglo
protected $table = 'visitors';
protected $fillable = array('nombre', 'apellidos', 'foto', 'motivo');
}
|
package com.yc.common.mongodb.vo;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
import java.io.Serializable;
import java.util.List;
/**
* @description:
* @author: youcong
* @time: 2021/12/3 19:55
*/
@Data
@ToString
@NoArgsConstructor
public class PageModel implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 当前页
*/
private Integer pageNo = 1;
/**
* 当前页条数
*/
private Integer pageSize = 10;
/**
* 总共的条数
*/
private Long total;
/**
* 总共的页数
*/
private Integer pages;
/**
* 实体类集合
*/
private List<?> list;
}
|
use FindBin;
sub { [ 200, [ "Content-Type", "text/plain" ], [ "$FindBin::Bin" ] ] };
|
import { api, LightningElement } from 'lwc';
export default class Button extends LightningElement {
static delegatesFocus = true;
@api
focus() {
this.template.querySelector('button').focus();
}
}
|
package com.example.cs4518_project
import retrofit2.Call
import retrofit2.http.GET
import retrofit2.http.Query
interface RetrofitInterface {
@GET("weather")
fun getData(
@Query("lat") lat: Double,
@Query("lon") lon: Double,
@Query("appid") appid: String
): Call<WeatherData>
}
|
/* $NoKeywords: $ */
/*
//
// Copyright (c) 1993-2012 Robert McNeel & Associates. All rights reserved.
// OpenNURBS, Rhinoceros, and Rhino3D are registered trademarks of Robert
// McNeel & Associates.
//
// THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY.
// ALL IMPLIED WARRANTIES OF FITNESS FOR ANY PARTICULAR PURPOSE AND OF
// MERCHANTABILITY ARE HEREBY DISCLAIMED.
//
// For complete openNURBS copyright information see <http://www.opennurbs.org>.
//
////////////////////////////////////////////////////////////////
*/
#include "opennurbs.h"
ON_BoundingBox::ON_BoundingBox()
: m_min(1.0,0.0,0.0),
m_max(-1.0,0.0,0.0)
{}
const ON_BoundingBox ON_BoundingBox::EmptyBoundingBox;
ON_BoundingBox::ON_BoundingBox( const ON_3dPoint& min_pt, const ON_3dPoint& max_pt )
: m_min( min_pt ),
m_max( max_pt )
{}
ON_BoundingBox::~ON_BoundingBox()
{}
void ON_BoundingBox::Destroy()
{
m_min.Zero();
m_max.Zero();
m_min.x = 1.0;
m_max.x = -1.0;
}
//////////
// ON_BoundingBox::Transform() updates the bounding box
// to be the smallest axis aligned bounding box that contains
// the transform of the eight corner points of the input
// bounding box.
bool ON_BoundingBox::Transform( const ON_Xform& xform )
{
ON_3dPointArray corners;
bool rc = GetCorners( corners );
if (rc) {
rc = corners.Transform(xform);
if (rc)
rc = Set(corners);
}
return rc;
}
double ON_BoundingBox::Tolerance() const
{
// rough guess at a tolerance to use for comparing
return ON_BoundingBoxTolerance( 3, m_min, m_max );
}
ON_3dPoint& ON_BoundingBox::operator[](int i)
{
return (i>0) ? m_max : m_min;
}
const ON_3dPoint& ON_BoundingBox::operator[](int i) const
{
return (i>0) ? m_max : m_min;
}
ON_3dPoint ON_BoundingBox::Min() const
{
return m_min;
}
ON_3dPoint ON_BoundingBox::Max() const
{
return m_max;
}
ON_3dVector ON_BoundingBox::Diagonal() const
{
return m_max - m_min;
}
ON_3dPoint ON_BoundingBox::Center() const
{
return 0.5*(m_max+m_min);
}
ON_3dPoint ON_BoundingBox::Corner( int x_index, int y_index, int z_index ) const
{
// 8 corners of box
// x_index 0 = Min().x, 1 = Max().x
// y_index 0 = Min().y, 1 = Max().y
// z_index 0 = Min().z, 1 = Max().z
ON_3dPoint corner;
corner.x = (x_index>0) ? m_max.x : m_min.x;
corner.y = (y_index>0) ? m_max.y : m_min.y;
corner.z = (z_index>0) ? m_max.z : m_min.z;
return corner;
}
bool
ON_BoundingBox::GetCorners(
ON_3dPoint corners[8]// returns list of 8 corner points
) const
{
int n = 0;
if ( IsValid() )
{
ON_3dPoint P;
int i,j,k;
for( i = 0; i < 2; i++ )
{
P.x = (i) ? m_max.x : m_min.x;
for ( j = 0; j < 2; j++ )
{
P.y = (j) ? m_max.y : m_min.y;
for ( k = 0; k < 2; k++ )
{
P.z = (k) ? m_max.z : m_min.z;
corners[n++] = P;
}
}
}
}
return (8==n);
}
bool
ON_BoundingBox::GetCorners(
ON_3dPointArray& corners// returns list of 8 corner points
) const
{
ON_3dPoint c[8];
corners.Empty();
bool rc = GetCorners(c);
if ( rc )
corners.Append(8,c);
return rc;
}
ON_ClippingRegion::ON_ClippingRegion()
{
memset(this,0,sizeof(*this));
}
void ON_ClippingRegion::SetClipPlaneTolerance( double clip_plane_tolerance )
{
if ( clip_plane_tolerance > 0.0 && clip_plane_tolerance < 3.402823466e+38 )
m_clip_plane_tolerance = (float)clip_plane_tolerance;
else
m_clip_plane_tolerance = 0.0;
}
double ON_ClippingRegion::ClipPlaneTolerance() const
{
return (float)m_clip_plane_tolerance;
}
int ON_ClippingRegion::InViewFrustum(
ON_3dPoint P
) const
{
return InViewFrustum(1,&P);
}
int ON_ClippingRegion::InViewFrustum(
const ON_BoundingBox& bbox
) const
{
if ( !ON_IsValid(bbox.m_min.x)
|| !ON_IsValid(bbox.m_max.x)
|| bbox.m_min.x > bbox.m_max.x
)
{
return 0;
}
ON_3dPoint P[8];
P[0] = bbox.m_min;
P[1] = bbox.m_max;
P[2].x = bbox.m_min.x; P[2].y = bbox.m_min.y; P[2].z = bbox.m_max.z;
P[3].x = bbox.m_min.x; P[3].y = bbox.m_max.y; P[3].z = bbox.m_min.z;
P[4].x = bbox.m_min.x; P[4].y = bbox.m_max.y; P[4].z = bbox.m_max.z;
P[5].x = bbox.m_max.x; P[5].y = bbox.m_min.y; P[5].z = bbox.m_min.z;
P[6].x = bbox.m_max.x; P[6].y = bbox.m_min.y; P[6].z = bbox.m_max.z;
P[7].x = bbox.m_max.x; P[7].y = bbox.m_max.y; P[7].z = bbox.m_min.z;
return InViewFrustum(8,P);
}
int ON_ClippingRegion::InViewFrustum(
int count,
const ON_3fPoint* p
) const
{
const double* xform;
const float* cv;
double x, w;
unsigned int out, all_out, some_out;
int i;
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3];
if (x < -w) out = 0x01; else if (x > w) out = 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::InViewFrustum(
int count,
const ON_3dPoint* p
) const
{
const double* xform;
const double* cv;
double x, w;
unsigned int out, all_out, some_out;
int i;
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3];
if (x < -w) out = 0x01; else if (x > w) out = 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::InViewFrustum(
int count,
const ON_4dPoint* p
) const
{
const double* xform;
const double* cv;
double x, w;
unsigned int out, all_out, some_out;
int i;
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 4 )
{
out = 0;
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
if (x < -w) out = 0x01; else if (x > w) out = 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::InClipPlaneRegion(
ON_3dPoint P
) const
{
return InClipPlaneRegion(1,&P);
}
int ON_ClippingRegion::InClipPlaneRegion(
const ON_BoundingBox& bbox
) const
{
if ( !ON_IsValid(bbox.m_min.x)
|| !ON_IsValid(bbox.m_max.x)
|| bbox.m_min.x > bbox.m_max.x
)
{
return 0;
}
if ( m_clip_plane_count <= 0 )
{
return 2;
}
ON_3dPoint P[8];
P[0] = bbox.m_min;
P[1] = bbox.m_max;
P[2].x = bbox.m_min.x; P[2].y = bbox.m_min.y; P[2].z = bbox.m_max.z;
P[3].x = bbox.m_min.x; P[3].y = bbox.m_max.y; P[3].z = bbox.m_min.z;
P[4].x = bbox.m_min.x; P[4].y = bbox.m_max.y; P[4].z = bbox.m_max.z;
P[5].x = bbox.m_max.x; P[5].y = bbox.m_min.y; P[5].z = bbox.m_min.z;
P[6].x = bbox.m_max.x; P[6].y = bbox.m_min.y; P[6].z = bbox.m_max.z;
P[7].x = bbox.m_max.x; P[7].y = bbox.m_max.y; P[7].z = bbox.m_min.z;
return InClipPlaneRegion(8,P);
}
int ON_ClippingRegion::InClipPlaneRegion(
int count,
const ON_3fPoint* p
) const
{
const ON_PlaneEquation* cpeqn;
const float* cv;
double x;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
if ( count <= 0 || !p )
return 0;
if ( m_clip_plane_count <= 0 )
return 2;
some_out = 0;
all_out = 0xFFFFFFFF;
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
//if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d;
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::InClipPlaneRegion(
int count,
const ON_3dPoint* p
) const
{
const ON_PlaneEquation* cpeqn;
const double* cv;
double x;
unsigned int out, all_out, some_out, cpbit;
int i, j;
if ( count <= 0 || !p )
return 0;
if ( m_clip_plane_count <= 0 )
return 2;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
//if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d;
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::InClipPlaneRegion(
int count,
const ON_4dPoint* p
) const
{
const ON_PlaneEquation* cpeqn;
const double* cv;
double x;
unsigned int out, all_out, some_out, cpbit;
int i, j;
if ( count <= 0 || !p )
return 0;
if ( m_clip_plane_count <= 0 )
return 2;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
cv = &p[0].x;
for ( i = count; i--; cv += 4 )
{
out = 0;
//if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d*cv[3];
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::IsVisible( ON_3dPoint P ) const
{
return IsVisible(1,&P);
}
int ON_ClippingRegion::IsVisible( const ON_BoundingBox& bbox ) const
{
if ( !ON_IsValid(bbox.m_min.x)
|| !ON_IsValid(bbox.m_max.x)
|| bbox.m_min.x > bbox.m_max.x
)
{
return 0;
}
ON_3dPoint P[8];
P[0] = bbox.m_min;
P[1] = bbox.m_max;
P[2].x = bbox.m_min.x; P[2].y = bbox.m_min.y; P[2].z = bbox.m_max.z;
P[3].x = bbox.m_min.x; P[3].y = bbox.m_max.y; P[3].z = bbox.m_min.z;
P[4].x = bbox.m_min.x; P[4].y = bbox.m_max.y; P[4].z = bbox.m_max.z;
P[5].x = bbox.m_max.x; P[5].y = bbox.m_min.y; P[5].z = bbox.m_min.z;
P[6].x = bbox.m_max.x; P[6].y = bbox.m_min.y; P[6].z = bbox.m_max.z;
P[7].x = bbox.m_max.x; P[7].y = bbox.m_max.y; P[7].z = bbox.m_min.z;
return IsVisible(8,P);
}
int ON_ClippingRegion::IsVisible( int count, const ON_3fPoint* p ) const
{
const double* xform;
const ON_PlaneEquation* cpeqn;
const float* cv;
double x, w;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d;
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::IsVisible( int count, const ON_3dPoint* p ) const
{
const double* xform;
const ON_PlaneEquation* cpeqn;
const double* cv;
double x, w;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 3 )
{
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d;
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::IsVisible( int count, const ON_4dPoint* p ) const
{
const double* xform;
const ON_PlaneEquation* cpeqn;
const double* cv;
double x, w;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
for ( i = count; i--; cv += 4 )
{
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d*cv[3];
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
x = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
if (x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
if (x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
unsigned int ON_ClippingRegion::TransformPoint(
const ON_4dPoint& P,
ON_4dPoint& Q
) const
{
unsigned int out, cpbit;
const double* xform = &m_xform.m_xform[0][0];
const double cv[4] = {P.x,P.y,P.z,P.w};
const ON_PlaneEquation* cpeqn;
int j;
double x,y,z,w;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d*cv[3];
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
y = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
if (y < -w) out |= 0x04; else if (y > w) out |= 0x08;
z = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
if (z < -w) out |= 0x10; else if (z > w) out |= 0x20;
if ( w <= 0.0 )
out = 0x80000000;
Q.x = x; Q.y = y; Q.z = z; Q.w = w;
return out;
}
unsigned int ON_ClippingRegion::TransformPoint(
const ON_3dPoint& P,
ON_3dPoint& Q
) const
{
unsigned int out, cpbit;
const double* xform = &m_xform.m_xform[0][0];
const double cv[3] = {P.x,P.y,P.z};
const ON_PlaneEquation* cpeqn;
int j;
double x,y,z,w;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d;
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
y = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7];
if (y < -w) out |= 0x04; else if (y > w) out |= 0x08;
z = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11];
if (z < -w) out |= 0x10; else if (z > w) out |= 0x20;
if ( w <= 0.0 )
{
w = (0.0==w) ? 1.0 : 1.0/w;
out |= 0x80000000;
}
else
{
w = 1.0/w;
}
Q.x = x*w; Q.y = y*w; Q.z = z*w;
return out;
}
unsigned int ON_ClippingRegion::TransformPoint(
const ON_3fPoint& P,
ON_3dPoint& Q
) const
{
ON_3dPoint PP(P.x,P.y,P.z);
return TransformPoint(PP,Q);
}
int ON_ClippingRegion::TransformPoints( int count, ON_4dPoint* p, unsigned int* pflags ) const
{
// transforms cv's to pick coordinates
const double* xform;
const ON_PlaneEquation* cpeqn;
double* cv;
double x, y, z, w;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
i = count;
while (i--)
{
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d*cv[3];
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
y = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
if (y < -w) out |= 0x04; else if (y > w) out |= 0x08;
z = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
if (z < -w) out |= 0x10; else if (z > w) out |= 0x20;
if ( w <= 0.0 )
out |= 0x80000000;
some_out |= out;
all_out &= out;
*pflags++ = out;
*cv++ = x; *cv++ = y; *cv++ = z; *cv++ = w;
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
int ON_ClippingRegion::TransformPoints( int count, ON_4dPoint* p ) const
{
// transforms cv's to pick coordinates
const double* xform;
const ON_PlaneEquation* cpeqn;
double* cv;
double x, y, z, w;
unsigned int out, all_out, some_out, cpbit;
int i, j;
// 14 May 2012 Dale Lear
// Fix http://dev.mcneel.com/bugtrack/?q=102481
// Picking hatches that are coplanar with clipping planes.
// The "fix" was to set clipping_plane_tolerance = same
// tolerance the display code uses. Before the fix,
// 0.0 was used as the clipping_plane_tolerance.
const double clip_plane_tolerance = ClipPlaneTolerance();
some_out = 0;
all_out = 0xFFFFFFFF;
xform = &m_xform.m_xform[0][0];
cv = &p[0].x;
i = count;
while (i--)
{
out = 0;
if ( m_clip_plane_count )
{
cpbit = 0x40;
cpeqn = m_clip_plane;
j = m_clip_plane_count;
while (j--)
{
x = cpeqn->x*cv[0] + cpeqn->y*cv[1] + cpeqn->z*cv[2] + cpeqn->d*cv[3];
if ( x < -clip_plane_tolerance )
out |= cpbit;
cpbit <<= 1;
cpeqn++;;
}
}
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
if (x < -w) out |= 0x01; else if (x > w) out |= 0x02;
y = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
if (y < -w) out |= 0x04; else if (y > w) out |= 0x08;
z = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
if (z < -w) out |= 0x10; else if (z > w) out |= 0x20;
*cv++ = x; *cv++ = y; *cv++ = z; *cv++ = w;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// no further "out" checking is necessary
while (i--)
{
x = xform[0]*cv[0] + xform[1]*cv[1] + xform[2]*cv[2] + xform[3]*cv[3];
y = xform[4]*cv[0] + xform[5]*cv[1] + xform[6]*cv[2] + xform[7]*cv[3];
z = xform[8]*cv[0] + xform[9]*cv[1] + xform[10]*cv[2] + xform[11]*cv[3];
w = xform[12]*cv[0] + xform[13]*cv[1] + xform[14]*cv[2] + xform[15]*cv[3];
*cv++ = x; *cv++ = y; *cv++ = z; *cv++ = w;
}
break;
}
}
if ( all_out )
i = 0;
else if ( some_out )
i = 1;
else
i = 2;
return i;
}
bool ON_ClippingRegion::GetLineClipPlaneParamters(
ON_4dPoint P0,
ON_4dPoint P1,
double* t0,
double* t1
) const
{
double s0, s1, x0, x1, s;
const ON_PlaneEquation* eqn;
int i;
if ( m_clip_plane_count )
{
s0 = 0.0;
s1 = 1.0;
eqn = m_clip_plane;
const double clip_plane_tolerance = ClipPlaneTolerance();
for ( i = 0; i < m_clip_plane_count; i++, eqn++ )
{
x0 = eqn->x*P0.x + eqn->y*P0.y + eqn->z*P0.z + eqn->d*P0.w;
x1 = eqn->x*P1.x + eqn->y*P1.y + eqn->z*P1.z + eqn->d*P1.w;
if ( x0 < 0.0)
{
if ( x1 <= 0.0 )
{
if ( x0 < -clip_plane_tolerance && x1 <= - clip_plane_tolerance )
return false;
}
if ( x0 != x1 )
{
s = x0/(x0-x1);
if ( s > s0 )
{
s0 = s;
if ( s0 >= s1 )
return false;
}
}
}
else if ( x1 < 0.0 )
{
if ( x0 <= 0.0 )
{
if ( x1 < -clip_plane_tolerance && x0 <= - clip_plane_tolerance )
return false;
}
if ( x0 != x1 )
{
s = x1/(x1-x0);
if ( s < s1 )
{
s1 = s;
if ( s0 >= s1 )
return false;
}
}
}
}
*t0 = s0;
*t1 = s1;
}
else
{
*t0 = 0.0;
*t1 = 1.0;
}
return true;
}
int ON_BoundingBox::IsVisible(
const ON_Xform& bbox2c
) const
{
int i,j,k,n;
unsigned int all_out, some_out, out;
double bx,by,bz,x,w;
const double* p;
if ( !ON_IsValid(m_min.x) || !ON_IsValid(m_max.x) || m_min.x > m_max.x)
return 0;
some_out = 0; // will be != 0 if some portion of box is outside visible region
all_out = 0xFFFFFFFF; // will be == 0 if some portion of box is inside visible region
p = &bbox2c.m_xform[0][0];
n = 0;
i = 2; bx = m_min.x;
while(i--)
{
j = 2; by = m_min.y;
while(j--)
{
k = 2; bz = m_min.z;
while(k--)
{
w = bx*p[12] + by*p[13] + bz*p[14] + p[15];
x = bx*p[ 0] + by*p[ 1] + bz*p[ 2] + p[ 3];
if ( x < -w) out = 0x01; else if (x > w) out = 0x02; else out = 0;
x = bx*p[ 4] + by*p[ 5] + bz*p[ 6] + p[ 7];
if ( x < -w) out |= 0x04; else if (x > w) out |= 0x08;
x = bx*p[ 8] + by*p[ 9] + bz*p[10] + p[11];
if ( x < -w) out |= 0x10; else if (x > w) out |= 0x20;
some_out |= out;
all_out &= out;
if ( some_out && !all_out )
{
// box intersects visble region but is not completely inside it.
return 1;
}
bz = m_max.z;
}
by = m_max.y;
}
bx = m_max.x;
}
return ( all_out ? 0 : 2 );
}
bool ON_BoundingBox::IsPointIn( const ON_3dPoint& p, int bStrictlyIn ) const
{
bool bIn = false;
if ( bStrictlyIn ) {
bIn = m_min.x<p.x && p.x<m_max.x &&
m_min.y<p.y && p.y<m_max.y &&
m_min.z<p.z && p.z<m_max.z;
}
else {
bIn = m_min.x<=p.x && p.x<=m_max.x &&
m_min.y<=p.y && p.y<=m_max.y &&
m_min.z<=p.z && p.z<=m_max.z;
}
return bIn;
}
ON_3dPoint ON_BoundingBox::ClosestPoint(
const ON_3dPoint& test_point
) const
{
ON_3dPoint near_point = test_point;
// GBA 30 March 04. For performance reasons in closest point to surface
//this function no longer validates the bounding box.
if ( test_point.x < m_min.x )
near_point.x = m_min.x;
else if ( test_point.x > m_max.x )
near_point.x = m_max.x;
if ( test_point.y < m_min.y )
near_point.y = m_min.y;
else if ( test_point.y > m_max.y )
near_point.y = m_max.y;
if ( test_point.z < m_min.z )
near_point.z = m_min.z;
else if ( test_point.z > m_max.z )
near_point.z = m_max.z;
return near_point;
}
int ON_BoundingBox::GetClosestPoint(
const ON_Line& line, ON_3dPoint& box_point, double* t0, double* t1
) const
{
if(!IsValid() || !line.IsValid())
return 0;
ON_3dPoint closest;
if(line.Direction().Length()<=ON_SQRT_EPSILON){
ON_3dPoint center = line.PointAt(.5);
if(t0) *t0 = 0.0;
if(t1) *t1 = 1.0;
box_point = ClosestPoint(center);
return IsPointIn( center )? 3 : 1;
}
ON_Interval over[3]; //parameter overlap for each direction
for(int j=0; j<3; j++){
ON_Interval pl( line[0][j], line[1][j]);
if( pl[0]!=pl[1])
over[j]= ON_Interval( pl.NormalizedParameterAt(Min()[j]),
pl.NormalizedParameterAt(Max()[j]) );
else
if( Min()[j]<=pl[0] && pl[0]<=Max()[j] )
over[j]=ON_Interval(-ON_DBL_MAX, ON_DBL_MAX);
else
over[j]=ON_Interval(ON_UNSET_VALUE, ON_UNSET_VALUE);
}
// Step 1. Check for an intersection of the infinte line with the box
ON_Interval overlap(-ON_DBL_MAX, ON_DBL_MAX);
bool nonempty=true;
int i;
for( i=0;i<3 && nonempty;i++)
nonempty = overlap.Intersection(over[i]);
if(nonempty){ // infinte line intersects box
if( overlap.Intersection( ON_Interval(0,1) ) ){
// Box & Line segment intersect
if(t0) *t0 = overlap[0];
if(t1) *t1 = overlap[1];
box_point = line.PointAt(overlap[0]);
return (overlap.Length()>0)? 3 : 2;
}
// closest point is at end of line segment
double EndInd=(overlap[1]<0)? 0.0: 1.0;
if(t0) *t0 = EndInd;
if(t1) *t1 = EndInd;
return 1;
}
// Step 2. Check for closest point on box edge and line segment interior
// In this case when we project orthogonal to the box edge we get a line
// in the plane that doesn't intersect the 2d-box in the plane. The projection
// of the 3d closest point is the closest point of this 2d closest point problem.
int k[3];
for(i=0; i<3; i++)
{
// Project box and line onto coord plane with normal Unit(i).
if(!overlap.Intersection( over[(i+1)%3], over[(i+2)%3] )){
// Projected line doesnt intersect the projexted box.
// Find the closest vertex of the projected box.
ON_3dVector StdUnit(0,0,0);
StdUnit[i]=1.0;
ON_3dVector n = ON_CrossProduct(line.Direction(), StdUnit);
if(n.Length()==0)
continue;
n.Unitize();
int ilo[3]={0,0,0};
int ihi[3]={1,1,1};
int imin[3]={-1,-1,-1};
double amin=0.0;
ihi[i]=ilo[i];
for(k[0]=ilo[0]; k[0]<=ihi[0]; k[0]++)
for(k[1]=ilo[1]; k[1]<=ihi[1]; k[1]++)
for(k[2]=ilo[2]; k[2]<=ihi[2]; k[2]++){
double a = n*(Corner(k[0],k[1],k[2]) - line.from);
if(amin == 0.0 || fabs(a)<fabs(amin))
{
amin= a;
imin[0]=k[0]; imin[1]=k[1]; imin[2]=k[2];
}
}
if ( imin[0] < 0 )
{
return 0;
}
ON_3dPoint vertex = Corner(imin[0],imin[1],imin[2]);
vertex[i] = line.from[i];
// Solve for 2d-closest point between closest corner and projected line
ON_3dVector ProjDir = line.Direction();
ProjDir[i]=0.0;
double t = ( vertex - line.from)*ProjDir / ProjDir.LengthSquared();
ON_3dPoint cp = line.PointAt(t);
if( 0<=t && t<=1 && m_min[i]<=cp[i] &&cp[i]<= m_max[i] ){
if(t0) *t0 = t; // found the closest point
if(t1) *t1 = t;
vertex[i] = cp[i];
box_point = vertex;
return 1;
}
}
}
//Step 3. Check each Corner of the box for closest points
for( k[0]=0; k[0]<2; k[0]++)
for( k[1]=0; k[1]<2; k[1]++)
for( k[2]=0; k[2]<2; k[2]++){
ON_3dPoint corner = Corner(k[0],k[1],k[2]);
double tstar;
line.ClosestPointTo( corner, &tstar );
ON_3dPoint cp = line.PointAt( tstar);
ON_3dVector disp = cp - corner;
bool InNCone=true;
for(int j=0;j<2 && InNCone;j++){
InNCone = InNCone && ( k[j] )? disp[j]>=0 : disp[j]<=0 ;
}
if(InNCone){
if(t0) *t0 = tstar;
if(t1) *t1 = tstar;
box_point = corner;
return 1;
}
}
//Step 4. Closest point is at a line end
for(i=0; i<2; i++){
closest = ClosestPoint(line[i]);
double dot = (closest - line[i]) * line.Direction();
if( (i==0 && dot<= 0) || (i==1 && dot>=0) )
{
if(t0) *t0 = i;
if(t1) *t1 = i;
box_point = closest;
return 1;
}
}
ON_ASSERT(false); //Should never get here
return 0;
}
ON_3dPoint ON_BoundingBox::FarPoint(
const ON_3dPoint& test_point
) const
{
ON_3dPoint far_point = test_point;
// if ( IsValid() ) {
far_point.x = ( fabs(m_min.x-test_point.x) >= fabs(m_max.x-test_point.x) )
? m_min.x : m_max.x;
far_point.y = ( fabs(m_min.y-test_point.y) >= fabs(m_max.y-test_point.y) )
? m_min.y : m_max.y;
far_point.z = ( fabs(m_min.z-test_point.z) >= fabs(m_max.z-test_point.z) )
? m_min.z : m_max.z;
// }
return far_point;
}
//TODO: Replace this static function with an ON_Interval member function.
// Add to the ON_Interval class in ON_Interval.h
//
// returns true if the intersection is non-empty and sets AB to the intersection
// bool GetIntersection(ON_Interval B, ON_Interval& AB);
static bool Intersect( ON_Interval A, ON_Interval B, ON_Interval& AB);
bool Intersect( ON_Interval A, ON_Interval B, ON_Interval& AB){
if(A.IsDecreasing()) A.Swap();
if(B.IsDecreasing()) B.Swap();
bool NotEmpty=true;
if( A.m_t[0] <= B.m_t[0] && B.m_t[0]<=A.m_t[1] && A.m_t[1]<= B.m_t[1]){
AB.Set(B.m_t[0], A.m_t[1]);
} else if( B.m_t[0] <= A.m_t[0] && A.m_t[0]<=B.m_t[1] && B.m_t[1]<=A.m_t[1]){
AB.Set(A.m_t[0], B.m_t[1]);
} else if( A.m_t[0] <= B.m_t[0] && B.m_t[0] <= B.m_t[1] && B.m_t[1]<= A.m_t[1]){
AB.Set(B.m_t[0], B.m_t[1]);
} else if( B.m_t[0] <= A.m_t[0] && A.m_t[0] <= A.m_t[1] && A.m_t[1]<= B.m_t[1]){
AB.Set(A.m_t[0], A.m_t[1]);
} else if(A.m_t[1] < B.m_t[0] || B.m_t[1] < A.m_t[0] ){
AB.Destroy();
NotEmpty = false;
}
return NotEmpty;
}
bool ON_BoundingBox::GetClosestPoint(
const ON_BoundingBox& other_box, // "other" bounding box
ON_3dPoint& this_point, // point on "this" box that is closest to "other" box
ON_3dPoint& other_point // point on "other" box that is closest to "this" box
) const
{
ON_BoundingBox b;
if ( !IsValid() || !other_box.IsValid() )
return false;
for (int i=0; i<3; i++ )
{
ON_Interval It(m_min[i],m_max[i]);
ON_Interval Io(other_box.m_min[i],other_box.m_max[i]);
ON_Interval intersect;
bool NotEmpty = Intersect(It,Io,intersect);
if(NotEmpty)
{
this_point[i] = other_point[i] = intersect.Mid();
}
else {
if(m_max[i]< other_box.m_min[i] )
{
this_point[i] = m_max[i];
other_point[i] = other_box.m_min[i];
}
else {
this_point[i] = m_min[i];
other_point[i] = other_box.m_max[i];
}
}
}
return true;
}
//////////
// Get points on bounding boxes that are farthest from each other.
bool ON_BoundingBox::GetFarPoint(
const ON_BoundingBox& other_box, // "other" bounding box
ON_3dPoint& this_point, // point on "this" box that is farthest from "other" box point
ON_3dPoint& other_point // point on "other" box that is farthest from "this" box point
) const
{
if(!IsValid() || !other_box.IsValid())
return false;
for(int i=0; i<3; i++){
ON_Interval It(m_min[i], m_max[i]);
ON_Interval Io(other_box.m_min[i], other_box.m_max[i]);
if( It.Includes(Io) || Io.Includes(It)){
if( m_max[i] - other_box.m_min[i] > other_box.m_max[i] - m_min[i]){
this_point[i] = m_max[i];
other_point[i] = other_box.m_min[i];
} else {
this_point[i] = m_min[i];
other_point[i] = other_box.m_max[i];
}
} else {
if( m_min[i]< other_box.m_min[i]){
this_point[i]=m_min[i];
} else {
other_point[i] = other_box.m_min[i];
}
if( m_max[i]> other_box.m_max[i]){
this_point[i]=m_max[i];
} else {
other_point[i] = other_box.m_max[i];
}
}
}
return true;
}
bool ON_BoundingBox::SwapCoordinates( int i, int j )
{
bool rc = false;
if ( IsValid() && 0 <= i && i < 3 && 0 <= j && j < 3 ) {
rc = true;
if ( i != j ) {
double t = m_min[i]; m_min[i] = m_min[j]; m_min[j] = t;
t = m_max[i]; m_max[i] = m_max[j]; m_max[j] = t;
}
}
return rc;
}
bool ON_BoundingBox::IsDisjoint( const ON_BoundingBox& other_bbox ) const
{
if ( m_min.x > m_max.x || other_bbox.m_min.x > other_bbox.m_max.x
|| m_min.x > other_bbox.m_max.x
|| m_max.x < other_bbox.m_min.x )
{
return true;
}
if ( m_min.y > m_max.y || other_bbox.m_min.y > other_bbox.m_max.y
|| m_min.y > other_bbox.m_max.y
|| m_max.y < other_bbox.m_min.y )
{
return true;
}
if ( m_min.z > m_max.z || other_bbox.m_min.z > other_bbox.m_max.z
|| m_min.z > other_bbox.m_max.z
|| m_max.z < other_bbox.m_min.z )
{
return true;
}
return false;
}
bool ON_BoundingBox::Intersection(
const ON_BoundingBox& a
)
{
if ( IsValid() && a.IsValid() ) {
if ( a.m_min.x > m_min.x )
m_min.x = a.m_min.x;
if ( a.m_min.y > m_min.y )
m_min.y = a.m_min.y;
if ( a.m_min.z > m_min.z )
m_min.z = a.m_min.z;
if ( a.m_max.x < m_max.x )
m_max.x = a.m_max.x;
if ( a.m_max.y < m_max.y )
m_max.y = a.m_max.y;
if ( a.m_max.z < m_max.z )
m_max.z = a.m_max.z;
}
else {
Destroy();
}
return IsValid();
}
bool ON_BoundingBox::Intersection( //Returns true when intersect is non-empty.
const ON_Line& line, //Infinite Line segment to intersect with
double* t0 , // t0 parameter of first intersection point
double* t1 // t1 parameter of last intersection point (t0<=t1)
) const
{
ON_Interval t(-ON_DBL_MAX, ON_DBL_MAX), ti, Li;
const double* boxmin = &m_min.x;
const double* boxmax = &m_max.x;
const double* from = &line.from.x;
const double* to = &line.to.x;
for(int i=0; i<3; i++)
{
if( from[i] == to[i] )
{
if( from[i] < boxmin[i] || from[i] > boxmax[i] )
return false;
}
else
{
Li.m_t[0] = from[i];
Li.m_t[1] = to[i];
ti.m_t[0] = Li.NormalizedParameterAt( boxmin[i]);
ti.m_t[1] = Li.NormalizedParameterAt( boxmax[i]);
if ( !t.Intersection(ti) )
return false;
}
}
if(t0)
*t0 = t.Min();
if(t1)
*t1 = t.Max();
return true;
}
bool ON_BoundingBox::Union(
const ON_BoundingBox& a
)
{
if ( IsValid() ) {
if ( a.IsValid() ) {
if ( a.m_min.x < m_min.x )
m_min.x = a.m_min.x;
if ( a.m_min.y < m_min.y )
m_min.y = a.m_min.y;
if ( a.m_min.z < m_min.z )
m_min.z = a.m_min.z;
if ( a.m_max.x > m_max.x )
m_max.x = a.m_max.x;
if ( a.m_max.y > m_max.y )
m_max.y = a.m_max.y;
if ( a.m_max.z > m_max.z )
m_max.z = a.m_max.z;
}
}
else if ( a.IsValid() ) {
*this = a;
}
else {
Destroy();
}
return IsValid();
}
bool ON_BoundingBox::Intersection(
const ON_BoundingBox& a,
const ON_BoundingBox& b
)
{
if ( a.IsValid() && b.IsValid() ) {
m_min.x = (a.m_min.x >= b.m_min.x) ? a.m_min.x : b.m_min.x;
m_min.y = (a.m_min.y >= b.m_min.y) ? a.m_min.y : b.m_min.y;
m_min.z = (a.m_min.z >= b.m_min.z) ? a.m_min.z : b.m_min.z;
m_max.x = (a.m_max.x <= b.m_max.x) ? a.m_max.x : b.m_max.x;
m_max.y = (a.m_max.y <= b.m_max.y) ? a.m_max.y : b.m_max.y;
m_max.z = (a.m_max.z <= b.m_max.z) ? a.m_max.z : b.m_max.z;
}
else {
Destroy();
}
return IsValid();
}
bool ON_BoundingBox::Includes(
const ON_BoundingBox& other,
bool bProperSubSet) const
{
bool rc = true;
bool proper = false;
for(int i=0; i<3 && rc ; i++)
{
ON_Interval thisI( m_min[i], m_max[i]);
ON_Interval otherI( other.m_min[i], other.m_max[i]);
rc = thisI.Includes( otherI );
if(bProperSubSet && !proper)
{
proper = (other.m_min[i] > m_min[i]) || (other.m_max[i] < m_max[i]);
}
}
// 9 December 2004 Dale Lear
// fixed bug by changing if(proper) to if(bProperSubSet)
if(bProperSubSet)
rc = rc && proper;
return rc;
}
bool ON_BoundingBox::Union(
const ON_BoundingBox& a,
const ON_BoundingBox& b
)
{
if ( a.IsValid() ) {
if ( b.IsValid() ) {
m_min.x = (a.m_min.x <= b.m_min.x) ? a.m_min.x : b.m_min.x;
m_min.y = (a.m_min.y <= b.m_min.y) ? a.m_min.y : b.m_min.y;
m_min.z = (a.m_min.z <= b.m_min.z) ? a.m_min.z : b.m_min.z;
m_max.x = (a.m_max.x >= b.m_max.x) ? a.m_max.x : b.m_max.x;
m_max.y = (a.m_max.y >= b.m_max.y) ? a.m_max.y : b.m_max.y;
m_max.z = (a.m_max.z >= b.m_max.z) ? a.m_max.z : b.m_max.z;
}
else {
*this = a;
}
}
else if ( b.IsValid() ) {
*this = b;
}
else {
Destroy();
}
return IsValid();
}
double ON_BoundingBox::Volume() const
{
double dx = m_max.x - m_min.x;
double dy = m_max.y - m_min.y;
double dz = m_max.z - m_min.z;
return (dx > 0.0 && dy > 0.0 && dz > 0.0) ? dx*dy*dz : 0.0;
}
double ON_BoundingBox::Area() const
{
double dx = m_max.x - m_min.x;
double dy = m_max.y - m_min.y;
double dz = m_max.z - m_min.z;
return (dx >= 0.0 && dy >= 0.0 && dz >= 0.0) ? 2.0*(dx*dy + dy*dz + dz*dx) : 0.0;
}
bool ON_BoundingBox::Set(
int dim, int is_rat, int count, int stride,
const double* points,
int bGrowBox
)
{
return ON_GetPointListBoundingBox(dim, is_rat, count, stride, points, *this, bGrowBox!=0, 0 );
}
bool ON_BoundingBox::Set ( const ON_3dPoint& P, int bGrowBox )
{
if ( !bGrowBox || !IsValid() )
{
m_min = P;
m_max = P;
}
else
{
if ( P.x < m_min.x ) m_min.x = P.x; else if ( m_max.x < P.x ) m_max.x = P.x;
if ( P.y < m_min.y ) m_min.y = P.y; else if ( m_max.y < P.y ) m_max.y = P.y;
if ( P.z < m_min.z ) m_min.z = P.z; else if ( m_max.z < P.z ) m_max.z = P.z;
}
return true;
}
bool ON_BoundingBox::Set( const ON_SimpleArray<ON_4dPoint>& a, int bGrowBox )
{
const int count = a.Count();
const double* p = (count>0) ? &a.Array()->x : 0;
return ON_GetPointListBoundingBox(3, 1, count, 4, p, *this, bGrowBox!=0, 0 );
}
bool ON_BoundingBox::Set( const ON_SimpleArray<ON_3dPoint>& a, int bGrowBox )
{
const int count = a.Count();
const double* p = (count>0) ? &a.Array()->x : 0;
return ON_GetPointListBoundingBox(3, 0, count, 3, p, *this, bGrowBox!=0, 0 );
}
bool ON_BoundingBox::Set( const ON_SimpleArray<ON_2dPoint>& a, int bGrowBox )
{
const int count = a.Count();
const double* p = (count>0) ? &a.Array()->x : 0;
return ON_GetPointListBoundingBox(2, 0, count, 2, p, *this, bGrowBox!=0, 0 );
}
ON_BoundingBox ON_PointListBoundingBox(
int dim, int is_rat, int count, int stride, const double* points
)
{
ON_BoundingBox bbox;
ON_GetPointListBoundingBox( dim, is_rat, count, stride, points, bbox, false, 0 );
return bbox;
}
bool ON_GetPointListBoundingBox(
int dim, int is_rat, int count, int stride, const double* points,
ON_BoundingBox& tight_bbox,
int bGrowBox,
const ON_Xform* xform
)
{
// bounding box workhorse
bool rc = false;
if ( bGrowBox && !tight_bbox.IsValid() )
{
bGrowBox = false;
}
if ( !bGrowBox )
{
tight_bbox.Destroy();
}
if ( is_rat )
{
is_rat = 1;
}
if ( count > 0 && dim > 0 && points && (count == 1 || stride >= dim+is_rat) )
{
ON_BoundingBox bbox;
ON_3dPoint P(0.0,0.0,0.0);
double w;
int i, wi;
if ( xform && xform->IsIdentity() )
{
xform = 0;
}
wi = dim;
if ( dim > 3 )
{
dim = 3;
}
rc = true;
if ( is_rat )
{
// skip bogus starting points
while ( count > 0 && points[wi] == 0.0 )
{
count--;
points += stride;
rc = false;
}
if ( count <= 0 )
return false;
}
memcpy( &bbox.m_min.x, points, dim*sizeof(bbox.m_min.x) );
if ( is_rat )
{
w = 1.0/points[wi];
bbox.m_min.x *= w; bbox.m_min.y *= w; bbox.m_min.z *= w;
}
if ( xform )
{
bbox.m_min.Transform(*xform);
}
bbox.m_max = bbox.m_min;
points += stride;
count--;
if ( count > 0 )
{
if ( is_rat )
{
// homogeneous rational points
if ( xform )
{
for ( /*empty*/; count--; points += stride )
{
if ( 0.0 == (w = points[wi]) )
{
rc = false;
continue;
}
memcpy( &P.x, points, dim*sizeof(P.x) );
w = 1.0/w;
P.x *= w; P.y *= w; P.z *= w;
P.Transform(*xform);
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
if ( dim < 3 )
{
for ( i = dim; i < 3; i++)
{
bbox.m_min[i] = 0.0;
bbox.m_max[i] = 0.0;
}
}
}
else
{
for ( /*empty*/; count--; points += stride )
{
if ( 0.0 == (w = points[wi]) )
{
rc = false;
continue;
}
memcpy( &P.x, points, dim*sizeof(P.x) );
w = 1.0/w;
P.x *= w; P.y *= w; P.z *= w;
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
}
}
else
{
// bounding box of non-rational points
if ( xform )
{
for ( /*empty*/; count--; points += stride )
{
memcpy( &P.x, points, dim*sizeof(P.x) );
P.Transform(*xform);
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
if ( dim < 3 )
{
for ( i = dim; i < 3; i++)
{
bbox.m_min[i] = 0.0;
bbox.m_max[i] = 0.0;
}
}
}
else
{
for ( /*empty*/; count--; points += stride )
{
memcpy( &P.x, points, dim*sizeof(P.x) );
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
}
}
}
tight_bbox.Union(bbox);
}
else if ( bGrowBox )
{
// result is still valid if no points are added to a valid input box
rc = (0 == count);
}
return rc;
}
bool ON_GetPointListBoundingBox(
int dim, int is_rat, int count, int stride, const float* points,
ON_BoundingBox& tight_bbox,
int bGrowBox,
const ON_Xform* xform
)
{
// bounding box workhorse
ON_BoundingBox bbox;
ON_3dPoint P(0.0,0.0,0.0);
ON_3fPoint Q(0.0,0.0,0.0);
double w;
int i, wi;
bool rc = false;
if ( bGrowBox && !tight_bbox.IsValid() )
{
bGrowBox = false;
}
if ( !bGrowBox )
{
tight_bbox.Destroy();
}
if ( is_rat )
{
is_rat = 1;
}
if ( count > 0 && dim > 0 && points && (count == 1 || stride >= dim+is_rat) )
{
if ( xform && xform->IsIdentity() )
{
xform = 0;
}
wi = dim;
if ( dim > 3 )
{
dim = 3;
}
rc = true;
if ( is_rat )
{
// skip bogus starting points
while ( count > 0 && points[wi] == 0.0f )
{
count--;
points += stride;
rc = false;
}
if ( count <= 0 )
return false;
}
if ( !bGrowBox )
{
memcpy( &Q.x, points, dim*sizeof(Q.x) );
bbox.m_min = Q;
if ( is_rat )
{
w = 1.0/points[wi];
bbox.m_min.x *= w; bbox.m_min.y *= w; bbox.m_min.z *= w;
}
if ( xform )
{
bbox.m_min.Transform(*xform);
}
bbox.m_max = bbox.m_min;
points += stride;
count--;
bGrowBox = true;
}
if ( count > 0 )
{
if ( is_rat )
{
// homogeneous rational points
if ( xform )
{
for ( /*empty*/; count--; points += stride )
{
if ( 0.0 == (w = points[wi]) )
{
rc = false;
continue;
}
memcpy( &Q.x, points, dim*sizeof(Q.x) );
w = 1.0/w;
P.x = w*Q.x; P.y = w*Q.y; P.z = w*Q.z;
P.Transform(*xform);
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
if ( dim < 3 )
{
for ( i = dim; i < 3; i++)
{
bbox.m_min[i] = 0.0;
bbox.m_max[i] = 0.0;
}
}
}
else
{
for ( /*empty*/; count--; points += stride )
{
if ( 0.0 == (w = points[wi]) )
{
rc = false;
continue;
}
memcpy( &Q.x, points, dim*sizeof(Q.x) );
w = 1.0/w;
P.x = w*Q.x; P.y = w*Q.y; P.z = w*Q.z;
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
}
}
else
{
// bounding box of non-rational points
if ( xform )
{
for ( /*empty*/; count--; points += stride )
{
memcpy( &Q.x, points, dim*sizeof(Q.x) );
P.x = Q.x; P.y = Q.y; P.z = Q.z;
P.Transform(*xform);
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
if ( dim < 3 )
{
for ( i = dim; i < 3; i++)
{
bbox.m_min[i] = 0.0;
bbox.m_max[i] = 0.0;
}
}
}
else
{
for ( /*empty*/; count--; points += stride )
{
memcpy( &Q.x, points, dim*sizeof(Q.x) );
P.x = Q.x; P.y = Q.y; P.z = Q.z;
if ( bbox.m_min.x > P.x ) bbox.m_min.x = P.x; else if ( bbox.m_max.x < P.x ) bbox.m_max.x = P.x;
if ( bbox.m_min.y > P.y ) bbox.m_min.y = P.y; else if ( bbox.m_max.y < P.y ) bbox.m_max.y = P.y;
if ( bbox.m_min.z > P.z ) bbox.m_min.z = P.z; else if ( bbox.m_max.z < P.z ) bbox.m_max.z = P.z;
}
}
}
}
tight_bbox.Union(bbox);
}
else if ( bGrowBox )
{
// result is still valid if no points are added to a valid input box
rc = (0 == count);
}
return rc;
}
bool ON_GetPointListBoundingBox(
int dim, int is_rat, int count, int stride, const double* points,
double* boxmin, double* boxmax,
int bGrowBox
)
/*****************************************************************************
Bounding Box of a set of points
INPUT:
dim ( >= 1 ) dimension of each point
is_rat ( true if points are rational )
count ( >= 1 ) number of points
stride ( >= (is_rat)?(dim+1):dim )
points array of dim*count doubles
boxmin, boxmax unused arrays of dim doubles
bGrowBox true if input box should be enlarged to contain points
boxmin[i]>boxmax[i] for some i, represents an empty initial box
false if input box should be ignored bounding box of points
is returned
OUTPUT:
boxmin, boxmax diagonal corners of bounding box
*****************************************************************************/
{
// OBSOLETE
// bounding box workhorse
double x, w;
int j;
bool rc = false;
for ( j = 0; j < dim && bGrowBox; j++ )
{
if ( boxmin[j] > boxmax[j] )
bGrowBox = false;
}
if ( count > 0 )
{
if ( is_rat )
{
is_rat = 1;
}
if ( points && dim > 0 && (count == 1 || stride >= dim+is_rat) )
{
// input is valid list of a least 1 point
if ( is_rat )
{
// bounding box of homogeneous rational points
rc = true;
while ( count > 0 && points[dim] == 0.0 )
{
count--;
points += stride;
rc = false;
}
if ( count > 0 )
{
if ( !bGrowBox )
{
ON_ArrayScale( dim, 1.0/points[dim], points, boxmin );
memcpy( boxmax, boxmin, dim*sizeof(*boxmax) );
points += stride;
count--;
bGrowBox = true;
}
if ( count > 0 )
{
for ( /*empty*/; count--; points += stride )
{
if ( points[dim] == 0.0 ) {
rc = false;
continue;
}
w = 1.0/points[dim];
for ( j = 0; j < dim; j++ )
{
x = w*points[j];
if (boxmin[j] > x)
boxmin[j] = x;
else if (boxmax[j] < x)
boxmax[j] = x;
}
}
}
}
}
else
{
// bounding box of non-rational points
rc = true;
if ( !bGrowBox )
{
// use first point to initialize box
memcpy( boxmin, points, dim*sizeof(*boxmin) );
memcpy( boxmax, boxmin, dim*sizeof(*boxmax) );
points += stride;
count--;
bGrowBox = true;
}
if ( count )
{
// grow box to contain the rest of the points
for ( /*empty*/; count--; points += stride )
{
for ( j = 0; j < dim; j++ )
{
x = points[j];
if (boxmin[j] > x)
boxmin[j] = x;
else if (boxmax[j] < x)
boxmax[j] = x;
}
}
}
}
}
}
else if ( bGrowBox )
{
// result is still valid if no points are added to a valid input box
rc = true;
}
return rc;
}
ON_BoundingBox ON_PointListBoundingBox(
int dim, int is_rat, int count, int stride, const float* points
)
{
ON_BoundingBox bbox;
ON_GetPointListBoundingBox( dim, is_rat, count, stride, points, bbox, false, 0 );
return bbox;
}
bool ON_GetPointListBoundingBox(
int dim, int is_rat, int count, int stride, const float* points,
float* boxmin, float* boxmax,
int bGrowBox
)
/*****************************************************************************
Bounding Box of a set of points
INPUT:
dim ( >= 1 ) dimension of each point
is_rat ( true if points are rational )
count ( >= 1 ) number of points
stride ( >= (is_rat)?(dim+1):dim )
points array of dim*count floats
boxmin, boxmax unused arrays of dim floats
bGrowBox true if input box should be enlarged to contain points
false if input box should be ignored bounding box of points
is returned
OUTPUT:
boxmin, boxmax diagonal corners of bounding box
*****************************************************************************/
{
// OBSOLETE
// bounding box workhorse
float x;
double w;
int j;
bool rc = false;
for ( j = 0; j < dim && bGrowBox; j++ )
{
if ( boxmin[j] > boxmax[j] )
bGrowBox = false;
}
if ( count > 0 )
{
if ( is_rat )
is_rat = 1;
if ( points && dim > 0 && (count == 1 || stride >= dim+is_rat) )
{
if ( is_rat ) {
rc = true;
while ( count > 0 && points[dim] == 0.0 ) {
count--;
points += stride;
rc = false;
}
if ( count > 0 ) {
if ( !bGrowBox )
{
ON_ArrayScale( dim, 1.0f/points[dim], points, boxmin );
memcpy( boxmax, boxmin, dim*sizeof(*boxmax) );
points += stride;
count--;
bGrowBox = true;
}
for ( /*empty*/; count--; points += stride )
{
if ( points[dim] == 0.0 )
continue;
w = 1.0/points[dim];
for ( j = 0; j < dim; j++ ) {
x = (float)(w*points[j]);
if (boxmin[j] > x)
boxmin[j] = x;
else if (boxmax[j] < x)
boxmax[j] = x;
}
}
}
}
else
{
rc = true;
if ( !bGrowBox ) {
memcpy( boxmin, points, dim*sizeof(*boxmin) );
memcpy( boxmax, boxmin, dim*sizeof(*boxmax) );
points += stride;
count--;
bGrowBox = true;
}
for ( /*empty*/; count--; points += stride )
{
for ( j = 0; j < dim; j++ ) {
x = points[j];
if (boxmin[j] > x)
boxmin[j] = x;
else if (boxmax[j] < x)
boxmax[j] = x;
}
}
}
}
}
else if ( bGrowBox )
{
rc = true;
}
return rc;
}
ON_BoundingBox ON_PointGridBoundingBox(
int dim,
ON_BOOL32 is_rat,
int point_count0, int point_count1,
int point_stride0, int point_stride1,
const double* p
)
{
ON_BoundingBox bbox;
if ( dim > 3 )
{
// strides control stepping - no need to waste time on coordinates we don't return
dim = 3;
}
ON_GetPointGridBoundingBox( dim, is_rat,
point_count0, point_count1,
point_stride0, point_stride1, p,
&bbox.m_min.x, &bbox.m_max.x, false );
return bbox;
}
bool ON_GetPointGridBoundingBox(
int dim,
int is_rat,
int point_count0, int point_count1,
int point_stride0, int point_stride1,
const double* p,
double* boxmin, double* boxmax,
int bGrowBox
)
{
int i;
for ( i = 0; i < dim && bGrowBox; i++ )
{
if ( boxmin[i] > boxmax[i] )
bGrowBox = false;
}
bool rc = bGrowBox ? true : false;
for ( i = 0; i < point_count0; i++ )
{
if ( !ON_GetPointListBoundingBox( dim, is_rat, point_count1, point_stride1, p + i*point_stride0, boxmin, boxmax, bGrowBox ) ) {
rc = false;
break;
}
else
{
bGrowBox = true;
if (!i)
rc = true;
}
}
return rc;
}
bool ON_BeyondSinglePrecision( const ON_BoundingBox& bbox, ON_Xform* xform )
{
bool rc = false;
if ( bbox.IsValid() )
{
// 31 March 2011:
// The values of too_far = 262144.0 and too_big = 1048576.0
// are first guesses. If you changes these values,
// you must append a comment containing your name,
// the date, the values your are using, a bug number
// of a bug report containing a file that demonstrates
// why you changed the number. You must retest all
// previous bugs before committing your changes.
//
// DATE: 31 March 2011
// NAME: Dale Lear
// COMMENT: First guess at values for too_far and too
// VALUES: too_far = 262144.0 from tests with simple mesh sphere
// too_big = 1048576.0
// BUG: http://dev.mcneel.com/bugtrack/?q=83437
const double too_far = 262144.0; // should be a power of 2
const double too_big = 1048576.0; // MUST be a power of 2
bool bTooFar = ( bbox.m_min.x >= too_far
|| bbox.m_min.y >= too_far
|| bbox.m_min.z >= too_far
|| bbox.m_max.x <= -too_far
|| bbox.m_max.y <= -too_far
|| bbox.m_max.z <= -too_far
);
bool bTooBig = ( bbox.m_min.x <= -too_big
|| bbox.m_min.y <= -too_big
|| bbox.m_min.z <= -too_big
|| bbox.m_max.x >= too_big
|| bbox.m_max.y >= too_big
|| bbox.m_max.z >= too_big
);
if ( bTooFar || bTooBig )
{
rc = true;
if ( 0 != xform )
{
ON_3dVector C = bbox.Center();
// Any modification of coordinates contributes to
// less precision in calculations. These tests
// remove small components of translations that
// do not help matters and may add more fuzz to
// calculations.
if ( fabs(C.x) <= 100.0 )
C.x = 0.0;
if ( fabs(C.y) <= 100.0 )
C.y = 0.0;
if ( fabs(C.z) <= 100.0 )
C.z = 0.0;
double r = 0.5*bbox.m_max.DistanceTo(bbox.m_min);
// T = translate center of bbox to origin
ON_Xform T;
T.Translation(-C);
// S = scale to shrink things that are too big
// to have a maximum coordinate of 1024.
// The scale is a power of 2 to preserve as much
// precision as possible.
double s = 1.0;
if ( r > too_big/16.0 )
{
// also apply a power of 2 scale to shrink large
// object so its coordinates are <= 1024.0
s = too_big;
while ( r > s*1024.0 )
s *= 2.0;
s = 1.0/s;
}
ON_Xform S(s);
// xform positions bbox in a region of space
// where single precision coordinates should
// work for most calculations.
*xform = S*T;
}
}
}
if (!rc && 0 != xform )
xform->Identity();
return rc;
}
double ON_BoundingBoxTolerance(
int dim,
const double* bboxmin,
const double* bboxmax
)
{
int i;
double x, tolerance=0.0;
#if defined(ON_COMPILER_MSC)
#pragma warning( push )
// Disable the MSC /W4 "conditional expression is constant" warning
// generated by the do {...} while(0) in the ON_ASSERT_OR_RETURN macro.
#pragma warning( disable : 4127 )
#endif
ON_ASSERT_OR_RETURN( dim > 0 && bboxmin != NULL && bboxmax != NULL,tolerance);
for ( i = 0; i < dim; i++ ) {
ON_ASSERT_OR_RETURN(bboxmin[i] <= bboxmax[i],tolerance);
}
#if defined(ON_COMPILER_MSC)
#pragma warning( pop )
#endif
tolerance = ON_ArrayDistance(dim,bboxmin,bboxmax)*ON_EPSILON;
for ( i = 0; i < dim; i++ ) {
x = (bboxmax[i] - bboxmin[i])*ON_SQRT_EPSILON;
if ( x > tolerance )
tolerance = x;
x = (fabs(bboxmax[i]) - fabs(bboxmin[i]))*ON_EPSILON;
if ( x > tolerance )
tolerance = x;
}
if ( tolerance > 0.0 && tolerance < ON_ZERO_TOLERANCE )
tolerance = ON_ZERO_TOLERANCE;
return tolerance;
}
int ON_BoundingBox::IsDegenerate( double tolerance ) const
{
ON_3dVector diag = Diagonal();
if ( tolerance < 0.0 )
{
// compute scale invarient tolerance
tolerance = diag.MaximumCoordinate()*ON_SQRT_EPSILON;
}
int rc = 0;
if ( diag.x < 0.0 )
return 4;
if ( diag.x <= tolerance )
rc++;
if ( diag.y < 0.0 )
return 4;
if ( diag.y <= tolerance )
rc++;
if ( diag.z < 0.0 )
return 4;
if ( diag.z <= tolerance )
rc++;
return rc;
}
double ON_BoundingBox::MinimumDistanceTo( const ON_3dPoint& P ) const
{
// 8 Feb 2005 - new function - not tested yet
// this function must be fast
// If Q = any point in box, then
// P.DistanceTo(Q) >= MinimumDistanceTo(P).
ON_3dVector V;
if ( P.x < m_min.x )
V.x = m_min.x - P.x;
else if ( P.x > m_max.x )
V.x = P.x - m_max.x;
else
V.x = 0.0;
if ( P.y < m_min.y )
V.y = m_min.y - P.y;
else if ( P.y > m_max.y )
V.y = P.y - m_max.y;
else
V.y = 0.0;
if ( P.z < m_min.z )
V.z = m_min.z - P.z;
else if ( P.z > m_max.z )
V.z = P.z - m_max.z;
else
V.z = 0.0;
return V.Length();
}
double ON_BoundingBox::MaximumDistanceTo( const ON_3dPoint& P ) const
{
// this function must be fast
// If Q = any point in box, then
// P.DistanceTo(Q) <= MaximumDistanceTo(P).
ON_3dVector V;
V.x = ( (P.x < 0.5*(m_min.x+m_max.x)) ? m_max.x : m_min.x) - P.x;
V.y = ( (P.y < 0.5*(m_min.y+m_max.y)) ? m_max.y : m_min.y) - P.y;
V.z = ( (P.z < 0.5*(m_min.z+m_max.z)) ? m_max.z : m_min.z) - P.z;
return V.Length();
}
static double ON_BBoxMinimumDistanceToHelper( const ON_BoundingBox& bbox, ON_Line line )
{
// 8 Feb 2005 - new function - not tested yet
// this function must be fast
// returns 0.0 if the line intersects the box and
// returns != 0.0 if the line does not intersect
// returns d > 0.0 if the line misses the box and the minimum dist is >= d.
// returns ON_UNSET_VALUE if the line misses the box but the minimum distance
// is not easily bounded away from zero.
double d, t;
bool bTrimmed;
// quick check for line.from inside box
if ( bbox.m_min.x <= line.from.x && line.from.x <= bbox.m_max.x )
{
if ( bbox.m_min.y <= line.from.y && line.from.y <= bbox.m_max.y )
{
if ( bbox.m_min.z <= line.from.z && line.from.z <= bbox.m_max.z )
{
return 0.0;
}
}
}
// quick check for line.to inside box
if ( bbox.m_min.x <= line.to.x && line.to.x <= bbox.m_max.x )
{
if ( bbox.m_min.y <= line.to.y && line.to.y <= bbox.m_max.y )
{
if ( bbox.m_min.z <= line.to.z && line.to.z <= bbox.m_max.z )
{
return 0.0;
}
}
}
ON_BoundingBox line_bbox;
line_bbox.Set(3,false,2,3,&line.from.x,false);
d = bbox.MinimumDistanceTo(line_bbox);
if ( d > 0.0 )
return d;
if ( bbox.m_min.x <= line_bbox.m_min.x && line_bbox.m_max.x <= bbox.m_max.x )
{
if ( bbox.m_min.y <= line_bbox.m_min.y && line_bbox.m_max.y <= bbox.m_max.y )
{
// The fact that MinimumDistanceTo(line_bbox) == 0.0 implies
// that the z-extents of the line intersects this bounding box.
return 0.0;
}
else if ( bbox.m_min.z <= line_bbox.m_min.z && line_bbox.m_max.z <= bbox.m_max.z )
{
// The fact that MinimumDistanceTo(line_bbox) == 0.0 implies
// that the y-extents of the line intersects this bounding box.
return 0.0;
}
}
else if ( bbox.m_min.y <= line_bbox.m_min.y && line_bbox.m_max.y <= bbox.m_max.y
&& bbox.m_min.z <= line_bbox.m_min.z && line_bbox.m_max.z <= bbox.m_max.z )
{
// The fact that MinimumDistanceTo(line_bbox) == 0.0 implies
// that the x-extents of the line intersects this bounding box.
return 0.0;
}
d = line.to.x - line.from.x;
bTrimmed = false;
if ( d != 0.0 )
{
if ( d < 0.0 )
{
line.Reverse();
d = -d;
}
d = 1.0/d;
t = (bbox.m_min.x - line.from.x)*d;
if( 0.0 < t && t < 1.0 )
{
line.from = line.PointAt(t);
line.from.x = bbox.m_min.x;
d = line.to.x - line.from.x;
if ( d != 0.0 )
d = 1.0/d;
bTrimmed = true;
}
t = (bbox.m_max.x - line.from.x)*d;
if( 0.0 < t && t < 1.0 )
{
line.to = line.PointAt(t);
line.to.x = bbox.m_max.x;
bTrimmed = true;
}
}
d = line.to.y - line.from.y;
if ( d < 0.0 )
{
line.Reverse();
d = -d;
}
if ( bTrimmed )
{
if ( line.to.y < bbox.m_min.y || line.from.y > bbox.m_max.y )
return ON_UNSET_VALUE;
if ( line.from.z < bbox.m_min.z && line.to.z < bbox.m_min.z )
return ON_UNSET_VALUE;
if ( line.from.z > bbox.m_max.z && line.to.z > bbox.m_max.z )
return ON_UNSET_VALUE;
}
if ( d > 0.0 )
{
d = 1.0/d;
t = (bbox.m_min.y - line.from.y)*d;
if( 0.0 < t && t < 1.0 )
{
line.from = line.PointAt(t);
line.from.y = bbox.m_min.y;
d = line.to.y - line.from.y;
if ( d != 0.0 )
d = 1.0/d;
}
t = (bbox.m_max.y - line.from.y)*d;
if( 0.0 < t && t < 1.0 )
{
line.to = line.PointAt(t);
line.to.y = bbox.m_max.y;
}
}
if ( line.from.z < bbox.m_min.z && line.to.z < bbox.m_min.z )
return ON_UNSET_VALUE;
if ( line.from.z > bbox.m_max.z && line.to.z > bbox.m_max.z )
return ON_UNSET_VALUE;
return 0.0; // some portion of the line hits the box
}
double ON_BoundingBox::MinimumDistanceTo( const ON_Plane& plane ) const
{
ON_PlaneEquation e;
e.Create(plane.origin,plane.zaxis);
return MinimumDistanceTo(e);
}
double ON_BoundingBox::MinimumDistanceTo( const ON_PlaneEquation& e ) const
{
double t, t0, t1;
ON_3dPoint P(m_min); // min, min, min
t0 = t1 = e.ValueAt(P);
P.z = m_max.z; // min, min, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.y = m_max.y; // min, max, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.z = m_min.z; // min, max, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.x = m_max.x; // max, max, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.y = m_min.y; // max, min, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.z = m_max.z; // max, min, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
else if (t > t1)
{
t1 = t; if ( t0 <= 0.0 && t1 >= 0.0 ) return 0.0;
}
P.y = m_max.y; // max, max, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t;
}
else if (t > t1)
{
t1 = t;
}
if ( t0 >= 0.0 ) return t0;
if ( t1 <= 0.0 ) return -t1;
return 0.0;
}
double ON_BoundingBox::MaximumDistanceTo( const ON_Plane& plane ) const
{
ON_PlaneEquation e;
e.Create(plane.origin,plane.zaxis);
return MinimumDistanceTo(e);
}
double ON_BoundingBox::MaximumDistanceTo( const ON_PlaneEquation& e ) const
{
double t, t0;
ON_3dPoint P(m_min); // min, min, min
t0 = fabs(e.ValueAt(P));
P.z = m_max.z; // min, min, max
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.y = m_max.y; // min, max, max
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.z = m_min.z; // min, max, min
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.x = m_max.x; // max, max, min
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.y = m_min.y; // max, min, min
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.z = m_max.z; // max, min, max
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
P.y = m_max.y; // max, max, max
t = fabs(e.ValueAt(P)); if (t > t0) t0 = t;
return t0;
}
bool ON_BoundingBox::IsFartherThan( double d, const ON_Plane& plane ) const
{
ON_PlaneEquation e;
e.Create(plane.origin,plane.zaxis);
return IsFartherThan(d,e);
}
bool ON_BoundingBox::IsFartherThan( double d, const ON_PlaneEquation& e ) const
{
double t, t0, t1;
ON_3dPoint P(m_min); // min, min, min
t0 = t1 = e.ValueAt(P);
if ( t0 <= d && t1 >= -d ) return false;
P.z = m_max.z; // min, min, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.y = m_max.y; // min, max, max
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.z = m_min.z; // min, max, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.x = m_max.x; // max, max, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.y = m_min.y; // max, min, min
t = e.ValueAt(P);
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.z = m_max.z; // max, min, max
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
P.y = m_max.y; // max, max, max
if (t < t0)
{
t0 = t; if ( t0 <= d && t1 >= -d ) return false;
}
else if (t > t1)
{
t1 = t; if ( t0 <= d && t1 >= -d ) return false;
}
return true;
}
double ON_BoundingBox::MinimumDistanceTo( const ON_Line& line ) const
{
double d = ON_BBoxMinimumDistanceToHelper( *this, line );
if ( d < 0.0 )
{
// At this point we know the line does not intersect the box.
// To get a lower bound on the shortest distance between the
// line and the box, we need to compare the line to the
// edges of the box.
const ON_BoundingBox line_bbox(line.BoundingBox());
ON_Line edge;
double e,t;
int i,j;
edge.from.z = m_min.z;
edge.to.z = m_max.z;
for ( i = 0; i < 2; i++ )
{
edge.from.x = i?m_min.x:m_max.x;
if ( d > 0.0 )
{
if ( line_bbox.m_min.x - edge.from.x > d )
continue;
if ( edge.from.x - line_bbox.m_max.x > d )
continue;
}
edge.to.x = edge.from.x;
for ( j = 0; j < 2; j++ )
{
edge.from.y = j?m_min.y:m_max.y;
if ( d > 0.0 )
{
if ( line_bbox.m_min.y - edge.from.y > d )
continue;
if ( edge.from.y - line_bbox.m_max.y > d )
continue;
}
edge.to.y = edge.from.y;
if ( ON_Intersect(edge,line,&e,&t) )
{
if ( e < 0.0 ) e = 0.0; else if (e > 1.0) e = 1.0;
if ( t < 0.0 ) t = 0.0; else if (t > 1.0) t = 1.0;
e = edge.PointAt(e).DistanceTo(line.PointAt(t));
if ( d < 0.0 || e < d )
d = e;
}
}
}
edge.from.y = m_min.y;
edge.to.y = m_max.y;
for ( i = 0; i < 2; i++ )
{
edge.from.z = i?m_min.z:m_max.z;
edge.to.z = edge.from.z;
if ( d > 0.0 )
{
if ( line_bbox.m_min.z - edge.from.z > d )
continue;
if ( edge.from.z - line_bbox.m_max.z > d )
continue;
}
for ( j = 0; j < 2; j++ )
{
edge.from.x = j?m_min.x:m_max.x;
if ( d > 0.0 )
{
if ( line_bbox.m_min.x - edge.from.x > d )
continue;
if ( edge.from.x - line_bbox.m_max.x > d )
continue;
}
edge.to.x = edge.from.x;
if ( ON_Intersect(edge,line,&e,&t) )
{
if ( e < 0.0 ) e = 0.0; else if (e > 1.0) e = 1.0;
if ( t < 0.0 ) t = 0.0; else if (t > 1.0) t = 1.0;
e = edge.PointAt(e).DistanceTo(line.PointAt(t));
if ( d < 0.0 || e < d )
d = e;
}
}
}
edge.from.x = m_min.x;
edge.to.x = m_max.x;
for ( i = 0; i < 2; i++ )
{
edge.from.y = i?m_min.y:m_max.y;
edge.to.y = edge.from.y;
if ( d > 0.0 )
{
if ( line_bbox.m_min.y - edge.from.y > d )
continue;
if ( edge.from.y - line_bbox.m_max.y > d )
continue;
}
for ( j = 0; j < 2; j++ )
{
edge.from.z = j?m_min.z:m_max.z;
edge.to.z = edge.from.z;
if ( d > 0.0 )
{
if ( line_bbox.m_min.z - edge.from.z > d )
continue;
if ( edge.from.z - line_bbox.m_max.z > d )
continue;
}
if ( ON_Intersect(edge,line,&e,&t) )
{
if ( e < 0.0 ) e = 0.0; else if (e > 1.0) e = 1.0;
if ( t < 0.0 ) t = 0.0; else if (t > 1.0) t = 1.0;
e = edge.PointAt(e).DistanceTo(line.PointAt(t));
if ( d < 0.0 || e < d )
d = e;
}
}
}
if ( d < 0.0 )
d = 0.0;
}
return d;
}
double ON_BoundingBox::MaximumDistanceTo( const ON_Line& line ) const
{
// 8 Feb 2005 - new function - not tested yet
// this function must be fast
// If Q = any point on the line and
// P = any point in box, then
// P.DistanceTo(Q) <= MaximumDistanceTo(line).
double d,dx,dy,dz;
const double* a;
int i,j,k;
d = 0.0;
a = &line.from.x;
for ( i = 0; i < 2; i++ )
{
dx = fabs(a[0] - (i?m_max.x:m_min.x));
dx = dx*dx;
if ( dx <= d )
continue;
for ( j = 0; j < 2; j++ )
{
dy = fabs(a[1] - (j?m_max.y:m_min.y));
dy = dx + dy*dy;
if ( dy <= d )
continue;
for ( k = 0; k < 2; k++ )
{
dz = fabs(a[2] - (k?m_max.z:m_min.z));
dz = dz*dz + dy;
if ( dz > d )
d = dz;
}
}
}
a = &line.to.x;
for ( i = 0; i < 2; i++ )
{
dx = fabs(a[0] - (i?m_max.x:m_min.x));
dx = dx*dx;
if ( dx <= d )
continue;
for ( j = 0; j < 2; j++ )
{
dy = fabs(a[1] - (j?m_max.y:m_min.y));
dy = dx + dy*dy;
if ( dy <= d )
continue;
for ( k = 0; k < 2; k++ )
{
dz = fabs(a[2] - (k?m_max.z:m_min.z));
dz = dz*dz + dy;
if ( dz > d )
d = dz;
}
}
}
return sqrt(d);
}
double ON_BoundingBox::MinimumDistanceTo( const ON_BoundingBox& other ) const
{
// this must be fast
ON_3dVector V;
if ( m_min.x > other.m_max.x )
V.x = m_min.x - other.m_max.x;
else if ( m_max.x < other.m_min.x )
V.x = other.m_min.x - m_max.x;
else
V.x = 0.0;
if ( m_min.y > other.m_max.y )
V.y = m_min.y - other.m_max.y;
else if ( m_max.y < other.m_min.y )
V.y = other.m_min.y - m_max.y;
else
V.y = 0.0;
if ( m_min.z > other.m_max.z )
V.z = m_min.z - other.m_max.z;
else if ( m_max.z < other.m_min.z )
V.z = other.m_min.z - m_max.z;
else
V.z = 0.0;
return V.Length();
}
double ON_BoundingBox::MaximumDistanceTo( const ON_BoundingBox& other ) const
{
// this must be fast
ON_3dVector V;
double d;
V.x = fabs(m_min.x - other.m_max.x);
d = fabs(m_max.x - other.m_min.x);
if ( d > V.x )
V.x = d;
V.y = fabs(m_min.y - other.m_max.y);
d = fabs(m_max.y - other.m_min.y);
if ( d > V.y )
V.y = d;
V.z = fabs(m_min.z - other.m_max.z);
d = fabs(m_max.z - other.m_min.z);
if ( d > V.z )
V.z = d;
return V.Length();
}
bool ON_BoundingBox::IsFartherThan( double d, const ON_3dPoint& P ) const
{
return (d < MinimumDistanceTo(P));
}
bool ON_BoundingBox::IsFartherThan( double d, const ON_Line& line ) const
{
ON_BoundingBox bbox = *this;
bbox.m_min.x -= d;
bbox.m_min.y -= d;
bbox.m_min.z -= d;
bbox.m_max.x += d;
bbox.m_max.y += d;
bbox.m_max.z += d;
d = ON_BBoxMinimumDistanceToHelper( bbox, line );
// d != 0.0 if and only if line misses the enlarged box
return (d != 0.0);
}
bool ON_BoundingBox::IsFartherThan( double d, const ON_BoundingBox& other ) const
{
return (d < MinimumDistanceTo(other));
}
|
/**
* @file
*
* @brief
*
* @copyright BSD License (see doc/LICENSE.md or http://www.libelektra.org)
*/
#ifndef VISITOR_H
#define VISITOR_H
class TreeItem;
class TreeModel;
/**
* @brief The abstract Visitor class to support the visitor pattern.
*/
class Visitor
{
public:
/**
* @brief The abstract method a visitor who wants to visit a ConfigNode needs to implement.
*
* @param node The visited ConfigNode
*/
virtual void visit (TreeItem & item) = 0;
};
#endif // VISITOR_H
|
#!/usr/bin/env python3
# Copyright (c) 2017 The MagnaChain Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
测试:
赎回挖矿币
//赎回挖矿币, 步骤
// 1).侧链提起赎回请求.(侧链先销毁挖矿币,防止继续挖矿)
// 2).主链收到,创造新的交易,抵押币作为输入,赎回到正常地址,需要指定来自那个侧链请求
// 如果是主链先发起请求的,而且是先拿回抵押币的话,可能侧链还在继续挖矿.
// 这个交易和前面跨链交易不一样,原先"转到"侧链成为挖矿币的输入并没有销毁,可以作为转入s2时的输入.
// 赎回挖矿币, 步骤1
"""
# Imports should be in PEP8 ordering (std library first, then third party
# libraries then local imports).
from decimal import Decimal
# Avoid wildcard * imports if possible
from test_framework.test_framework import MagnaChainTestFramework
from test_framework.mininode import COIN, MINER_REWARD
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
bytes_to_hex_str,
)
class RedeemMortgageTest(MagnaChainTestFramework):
# Each functional test is a subclass of the MagnaChainTestFramework class.
# Override the set_test_params(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def set_test_params(self):
"""Override test parameters for your individual test.
This method must be overridden and num_nodes must be exlicitly set."""
self.setup_clean_chain = True
self.num_nodes = 2 # todo should be 2 nodes
'''
self.num_sidenodes here is setting sidechain nodes num,just like self.num_nodes
and the self.sidenodes like self.nodes
'''
self.num_sidenodes = 2
self.side_extra_args = [["-regtestrsheight=10"], ["-regtestrsheight=10"]]
self.rpc_timewait = 900
self.protected_mode = [[0], [0]]
def run_test(self):
"""Main test logic"""
self.sync_all([self.sidenodes])
for i in range(self.num_nodes):
self.sidenodes[i].generate(2)
assert_equal(len(self.sidenodes[i].getrawmempool()), 0)
self.sync_all([self.sidenodes])
self.nodes[i].generate(2)
self.sync_all()
# send some coins to branch to use
# self.node0.sendtobranchchain(self.sidechain_id, self.snode0.getnewaddress(), 1000)
# self.node0.generate(8)
# self.snode0.generate(1)
# self.sync_all()
# self.sync_all([self.sidenodes])
# // 赎回挖矿币, 步骤
# // 1).侧链提起赎回请求.(侧链先销毁挖矿币, 防止继续挖矿)
# // 2).主链收到, 创造新的交易, 抵押币作为输入, 赎回到正常地址, 需要指定来自那个侧链请求
# // 如果是主链先发起请求的, 而且是先拿回抵押币的话, 可能侧链还在继续挖矿.
# // 这个交易和前面跨链交易不一样, 原先
# "转到"
# 侧链成为挖矿币的输入并没有销毁, 可以作为转入s2时的输入.
# // 赎回挖矿币, 步骤1
# 原来抵押的总额
self.import_prikey()
origin_mortgage = 100000
start_balance = self.snode0.getbalance()
start_balance1 = self.snode1.getbalance()
mortgage_txs = self.snode0.listmortgagecoins()
assert_raises_rpc_error(-32600, 'Coin need 10 confirmation', self.snode0.redeemmortgagecoinstatement,
mortgage_txs[0]['txid'])
self.snode0.generate(7) # 使REDEEM_SAFE_HEIGHT满足
mortgage_txs = self.snode0.listmortgagecoins()
print(mortgage_txs)
txid = self.snode0.sendtoaddress(self.node0.getnewaddress(), 1)
assert_raises_rpc_error(-32603, 'This RPC API Only be called in branch chain',
self.node0.redeemmortgagecoinstatement, txid)
assert_raises_rpc_error(-32600, 'Invalid mortgage coin', self.snode0.redeemmortgagecoinstatement,
self.available_utxo())
assert_raises_rpc_error(-4, 'Coin is spent', self.snode0.redeemmortgagecoinstatement, self.mortgage_coin(), 9)
balance = self.node0.getbalance()
results = []
self.sync_all() # TODO: make sure node1 have all branch header tx in mempool.(to be optimize)
self.snode1.generate(10)
self.sync_all([self.sidenodes])
for i in range(20):
result = self.snode0.redeemmortgagecoinstatement(self.mortgage_coin())
results.append(result['txid'])
print("result:", result)
assert_equal(len(self.snode0.listmortgagecoins()), 20 - i - 1)
assert_raises_rpc_error(-32603, 'no address with enough coins', self.snode0.generate, 1)
self.sync_all([self.sidenodes]) # sync mempool to snode1
besthash = self.snode1.getbestblockhash()
best_height = self.snode1.getblockcount()
badhash = self.snode1.generate(7)[0]
self.sync_all([self.sidenodes])
self.sync_all()
assert_equal(0, len(self.node0.getrawmempool())) # side chain gen block header not sync to mainchain
fees = 0
txs = self.node0.getrawmempool(True)
for txid in txs:
fees += Decimal(txs[txid]['fee'])
self.node0.generate(1)
self.sync_all()
self.snode1.generate(1)
self.log.info("after node0 gen blocks,mortgage coins should be redeemed")
self.sync_all([self.sidenodes])
self.sync_all()
txs = self.node0.getrawmempool(True)
for txid in txs:
fees += Decimal(txs[txid]['fee'])
self.node0.generate(2)
self.sync_all()
self.log.info("rebroadcastredeemtransaction should raise a RPC exception,we will catch it")
for t in results:
assert_raises_rpc_error(-25, 'Coin is spent', self.snode0.rebroadcastredeemtransaction, t)
self.node0.generate(2)
print("self.node0.getbalance", self.node0.getbalance(), "balance", balance,
"self.node0.getbalance() - balance", self.node0.getbalance() - balance,
"origin_mortgage", origin_mortgage, "fees", fees)
print(self.node0.getbalance() - balance - 4 * MINER_REWARD, origin_mortgage + fees)
# 25 is for fee
assert_equal(self.node0.getbalance() - balance - 4 * MINER_REWARD,origin_mortgage)
# assert self.node0.getbalance() - balance - 4 * MINER_REWARD > origin_mortgage and (
# self.node0.getbalance() - balance - 4 * MINER_REWARD < origin_mortgage + fees)
# try to invalidateblock some blocks and snode0 generate again
self.snode0.invalidateblock(badhash)
assert_equal(self.snode0.getbestblockhash(),besthash)
assert_equal(self.snode0.getblockcount(), best_height)
assert_raises_rpc_error(-32603, 'no address with enough coins', self.snode0.generate, 1)
def mortgage_coin(self, spentable=True):
'''
获取有一个已成熟的抵押币
:param spentable:
:return:
'''
for tx in self.snode0.listmortgagecoins():
if tx['confirmations'] >= 10:
if spentable:
return tx['txid']
if spentable:
continue
return tx['txid']
else:
self.log.info("mortgage_coin not found,generate")
self.snode1.generate(10)
return self.mortgage_coin()
def available_utxo(self):
for tx in self.snode0.listunspent():
if tx['confirmations'] >= 10:
print(tx)
return tx['txid']
def import_prikey(self):
for item in self.snode0.listmortgagecoins():
prikey = self.snode0.dumpprivkey(item['address'])
self.node0.importprivkey(prikey)
if __name__ == '__main__':
RedeemMortgageTest().main()
|
import {
Controller,
UseGuards,
Post,
Request,
Get,
HttpCode,
UseInterceptors,
ClassSerializerInterceptor
} from '@nestjs/common';
import { ApiBody, ApiBearerAuth, ApiHeader, ApiTags, ApiResponse } from '@nestjs/swagger';
import { AuthService } from './auth.service';
import { JwtAuthGuard } from './guards/jwt-auth.guard';
import { LocalAuthGuard } from './guards/local-auth.guard';
import { JwtResponseDto } from './dto/jwt-response.dto';
import { UserCredentialsDto } from './dto/user-credentials.dto';
@ApiTags('Auth')
@UseInterceptors(ClassSerializerInterceptor)
@Controller('auth')
export class AuthController {
constructor(
private authService: AuthService
) {}
@ApiResponse({ status: 200, description: 'Login was successful', type: JwtResponseDto })
@ApiResponse({ status: 401, description: 'Invalid credentials' })
@ApiBody({ type: UserCredentialsDto })
@UseGuards(LocalAuthGuard)
@Post('login')
@HttpCode(200)
async login(@Request() req): Promise<JwtResponseDto> {
return req.user as JwtResponseDto;
}
@ApiBearerAuth()
@ApiResponse({ status: 200, description: 'Logout was successful' })
@ApiResponse({ status: 401, description: 'Invalid credentials' })
@ApiHeader({ name: 'Authorization', description: 'JWT Token' })
@UseGuards(JwtAuthGuard)
@Get('logout')
async logout(@Request() req): Promise<{message: string}> {
return this.authService.logout(req.user.email);
}
}
|
from blackbox.handlers.databases._base import BlackboxDatabase
from blackbox.utils import run_command
from blackbox.utils.logger import log
class MongoDB(BlackboxDatabase):
"""
A Database handler that will do a mongodump for MongoDB, backing up all documents.
This will use mongodump with --gzip and --archive, and must be restored using the same
arguments, e.g. mongorestore --gzip --archive=/path/to/file.archive.
"""
required_fields = ("connection_string",)
backup_extension = ".archive"
def backup(self, backup_path) -> None:
"""Dump all the data to a file and then return the filepath."""
# Run the backup, and store the outcome in this object.
self.success, self.output = run_command(
f"mongodump "
f"--uri={self.config['connection_string']} "
"--gzip "
"--forceTableScan "
f"--archive={backup_path}"
)
log.debug(self.output)
|
package com.trigonated.gamecollection.di
import com.trigonated.gamecollection.api.rawg.RawgService
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.components.SingletonComponent
import javax.inject.Singleton
@InstallIn(SingletonComponent::class)
@Module
class RawgApiModule {
@Singleton
@Provides
fun provideRawgService(): RawgService {
return RawgService.create()
}
}
|
using System;
namespace ZKWebStandard.Ioc
{
/// <summary>
/// Singleton reuse attribute<br/>
/// A convenient attribute from ReuseAttribute<br/>
/// 标记单例的属性<br/>
/// 继承了ReuseAttribute的便捷属性<br/>
/// </summary>
/// <seealso cref="IContainer"/>
/// <seealso cref="Container"/>
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct, Inherited = false)]
public class SingletonReuseAttribute : ReuseAttribute
{
/// <summary>
/// Initialize<br/>
/// 初始化<br/>
/// </summary>
public SingletonReuseAttribute() : base(ReuseType.Singleton) { }
}
}
|
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require 'data-sink-client'
require 'webmock/rspec'
WebMock.disable_net_connect!
def gzip(body)
wio = StringIO.new("w")
w_gz = Zlib::GzipWriter.new(wio)
w_gz.write(body)
w_gz.close
wio.string
end
|
require 'spec_helper'
feature 'Sign Up' do
context 'with valid data' do
scenario 'create a new user' do
visit spree.signup_path
fill_in 'Email', with: 'email@person.com'
fill_in 'Password', with: 'password'
fill_in 'Password Confirmation', with: 'password'
click_button 'Create'
expect(page).to have_text 'You have signed up successfully.'
expect(Spree::User.count).to eq(1)
end
end
context 'with invalid data' do
scenario 'does not create a new user' do
visit spree.signup_path
fill_in 'Email', with: 'email@person.com'
fill_in 'Password', with: 'password'
fill_in 'Password Confirmation', with: ''
click_button 'Create'
expect(page).to have_css '#errorExplanation'
expect(Spree::User.count).to eq(0)
end
end
end
|
/*
Copyright 2011-2012 Stefano Chizzolini. http://www.pdfclown.org
Contributors:
* Stefano Chizzolini (original code developer, http://www.stefanochizzolini.it)
This file should be part of the source code distribution of "PDF Clown library" (the
Program): see the accompanying README files for more info.
This Program is free software; you can redistribute it and/or modify it under the terms
of the GNU Lesser General Public License as published by the Free Software Foundation;
either version 3 of the License, or (at your option) any later version.
This Program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY,
either expressed or implied; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the License for more details.
You should have received a copy of the GNU Lesser General Public License along with this
Program (see README files); if not, go to the GNU website (http://www.gnu.org/licenses/).
Redistribution and use, with or without modification, are permitted provided that such
redistributions retain the above copyright notice, license and disclaimer, along with
this list of conditions.
*/
using System;
using PasswordProtectedChecker.Pdf.Interfaces;
namespace PasswordProtectedChecker.Pdf
{
/**
<summary>Base PDF parser [PDF:1.7:3.2].</summary>
*/
public class BaseParser : PostScriptParser
{
#region dynamic
#region constructors
protected BaseParser(
IInputStream stream
) : base(stream)
{
}
protected BaseParser(
byte[] data
) : base(data)
{
}
#endregion
#region interface
#region public
public override bool MoveNext(
)
{
bool moved;
while (moved = base.MoveNext())
{
var tokenType = TokenType;
if (tokenType == TokenTypeEnum.Comment)
continue; // Comments are ignored.
if (tokenType == TokenTypeEnum.Literal)
{
var literalToken = (string) Token;
if (literalToken.StartsWith(Keyword.DatePrefix)) // Date.
try
{
Token = PdfDate.ToDate(literalToken);
}
catch (ParseException)
{
/* NOOP: gently degrade to a common literal. */
}
}
break;
}
return moved;
}
/**
<summary>Parses the current PDF object [PDF:1.6:3.2].</summary>
*/
public virtual PdfDataObject ParsePdfObject(
)
{
switch (TokenType)
{
case TokenTypeEnum.Integer:
return PdfInteger.Get((int) Token);
case TokenTypeEnum.Name:
return new PdfName((string) Token, true);
case TokenTypeEnum.DictionaryBegin:
{
var dictionary = new PdfDictionary();
dictionary.Updateable = false;
while (true)
{
// Key.
MoveNext();
if (TokenType == TokenTypeEnum.DictionaryEnd) break;
var key = (PdfName) ParsePdfObject();
// Value.
MoveNext();
var value = (PdfDirectObject) ParsePdfObject();
// Add the current entry to the dictionary!
dictionary[key] = value;
}
dictionary.Updateable = true;
return dictionary;
}
case TokenTypeEnum.ArrayBegin:
{
var array = new PdfArray();
array.Updateable = false;
while (true)
{
// Value.
MoveNext();
if (TokenType == TokenTypeEnum.ArrayEnd) break;
// Add the current item to the array!
array.Add((PdfDirectObject) ParsePdfObject());
}
array.Updateable = true;
return array;
}
case TokenTypeEnum.Literal:
if (Token is DateTime)
return PdfDate.Get((DateTime) Token);
else
return new PdfTextString(
Encoding.Pdf.Encode((string) Token)
);
case TokenTypeEnum.Hex:
return new PdfTextString(
(string) Token,
PdfString.SerializationModeEnum.Hex
);
case TokenTypeEnum.Real:
return PdfReal.Get((double) Token);
case TokenTypeEnum.Boolean:
return PdfBoolean.Get((bool) Token);
case TokenTypeEnum.Null:
return null;
default:
throw new Exception("Unknown type: " + TokenType);
}
}
/**
<summary>Parses a PDF object after moving to the given token offset.</summary>
<param name="offset">Number of tokens to skip before reaching the intended one.</param>
<seealso cref="ParsePdfObject()"/>
*/
public PdfDataObject ParsePdfObject(
int offset
)
{
MoveNext(offset);
return ParsePdfObject();
}
#endregion
#endregion
#endregion
}
}
|
import java.util.Scanner;
import warehouses.Cache;
import simulated.Simulator;
public class Chiral {
production.Exporter righ;
production.Exporter fh;
production.Exporter r;
java.lang.String q;
public static final double quantify = 0.6181309728629134;
public static synchronized void main(String[] align) {
double calculation;
java.lang.String bcl;
java.lang.String ahn;
java.lang.String z;
production.Exporter electricity;
production.Exporter emt;
production.Exporter intrasentential;
production.Exporter pardi;
int bo;
calculation = 0.12864344348514312;
bcl = "";
ahn = "";
z = "";
electricity = null;
emt = null;
intrasentential = null;
pardi = null;
System.out.print("\f");
bo = 0;
try {
bo = System.in.available();
} catch (Exception e) {
}
if (bo <= 0) {
System.out.println("ERROR: System.in is empty, no file was passed or it is empty");
} else {
double revisedOrbit, timeMiserly;
int landStoreroomReduce;
Simulator slm;
revisedOrbit = 0;
timeMiserly = 0;
landStoreroomReduce = 0;
try {
Scanner keypad;
keypad = new Scanner(System.in);
timeMiserly = keypad.nextDouble();
revisedOrbit = keypad.nextDouble();
landStoreroomReduce = keypad.nextInt();
} catch (Exception adrian) {
System.out.println(
"ERROR: There are not enough values or the values passed are in the incorrect format");
System.out.println(
" Values should be in the form mean (double) rng (double) storageLimit (int)");
return;
}
slm = new Simulator(10000000, timeMiserly, revisedOrbit);
Cache.doGarageRestricted(landStoreroomReduce);
slm.began();
}
}
{
q = "";
r = null;
fh = null;
righ = null;
}
}
|
const errorMsgGenerator = require('../../../utils/errorMessageGenerator')
const brandServices = require('../../brands/services/index-brand.service')
const imageServices = require('../../images/image-services')
const categoryServices = require('../../categories/services/index-category.service')
const productServices = require('../../products/services/index-product.service')
module.exports = async function editProduct (req, res, next) {
const productId = req.params.id
let { brand, model, description, price, categories, color } = req.body
let files = req.files || []
try {
categories = JSON.parse(categories)
} catch (error) {
return next(new TypeError(errorMsgGenerator.invalidDataMsg('categoriesIds', 'JSON array', categories)))
}
let images
try {
let product = await productServices.getById(productId)
let newBrand = await brandServices.getByName(brand)
let oldBrand = product.brand
let newCategories = await Promise.all(categories.map(categoryId => categoryServices.getByName(categoryId)))
let oldCategories = product.categories
if (oldBrand.name !== brand) {
await brandServices.removeProduct(oldBrand._id, product._id)
await brandServices.addProduct(newBrand._id, product._id)
}
await Promise.all(oldCategories.map(category => categoryServices.removeProduct(category._id, product._id)))
await Promise.all(newCategories.map(category => categoryServices.addProduct(category._id, product._id)))
let categoriesIds = newCategories.map(category => category._id)
images = files ? await Promise.all(Object.keys(files).map(key => imageServices.create(files[key]))) : []
await productServices.edit(product._id, newBrand._id, model, description, price, categoriesIds, color, images)
product = await productServices.getPublic(product._id)
res.json(product)
} catch (error) {
if (images) {
await Promise.all(images.map(image => imageServices.remove(image)))
}
next(error)
}
}
|
// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually
#pragma once
#include <common/common.h>
#include <CIndexList.hpp>
START_ATF_NAMESPACE
namespace Info
{
using CIndexListctor_CIndexList2_ptr = void (WINAPIV*)(struct CIndexList*);
using CIndexListctor_CIndexList2_clbk = void (WINAPIV*)(struct CIndexList*, CIndexListctor_CIndexList2_ptr);
using CIndexListCopyFront4_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int*, char*);
using CIndexListCopyFront4_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int*, char*, CIndexListCopyFront4_ptr);
using CIndexListFindNode6_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*);
using CIndexListFindNode6_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*, CIndexListFindNode6_ptr);
using CIndexListGetAllNode8_ptr = struct CIndexList::_index_node* (WINAPIV*)(struct CIndexList*, unsigned int*);
using CIndexListGetAllNode8_clbk = struct CIndexList::_index_node* (WINAPIV*)(struct CIndexList*, unsigned int*, CIndexListGetAllNode8_ptr);
using CIndexListGetSize10_ptr = int (WINAPIV*)(struct CIndexList*);
using CIndexListGetSize10_clbk = int (WINAPIV*)(struct CIndexList*, CIndexListGetSize10_ptr);
using CIndexListIsInList12_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*);
using CIndexListIsInList12_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*, CIndexListIsInList12_ptr);
using CIndexListIsSetting14_ptr = bool (WINAPIV*)(struct CIndexList*);
using CIndexListIsSetting14_clbk = bool (WINAPIV*)(struct CIndexList*, CIndexListIsSetting14_ptr);
using CIndexListPopNode_Front16_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int*, char*);
using CIndexListPopNode_Front16_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int*, char*, CIndexListPopNode_Front16_ptr);
using CIndexListPushNode_Back18_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*);
using CIndexListPushNode_Back18_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int, char*, CIndexListPushNode_Back18_ptr);
using CIndexListResetList20_ptr = void (WINAPIV*)(struct CIndexList*);
using CIndexListResetList20_clbk = void (WINAPIV*)(struct CIndexList*, CIndexListResetList20_ptr);
using CIndexListSetList22_ptr = bool (WINAPIV*)(struct CIndexList*, unsigned int, unsigned int, bool);
using CIndexListSetList22_clbk = bool (WINAPIV*)(struct CIndexList*, unsigned int, unsigned int, bool, CIndexListSetList22_ptr);
using CIndexListdtor_CIndexList27_ptr = void (WINAPIV*)(struct CIndexList*);
using CIndexListdtor_CIndexList27_clbk = void (WINAPIV*)(struct CIndexList*, CIndexListdtor_CIndexList27_ptr);
}; // end namespace Info
END_ATF_NAMESPACE
|
<?php
declare(strict_types=1);
namespace Jellyfish\LogMonolog;
use Codeception\Test\Unit;
use Jellyfish\Config\ConfigFacadeInterface;
use Jellyfish\Log\LogConstants;
use Monolog\Logger;
class LogMonologFactoryTest extends Unit
{
/**
* @var \Jellyfish\Config\ConfigFacadeInterface|\PHPUnit\Framework\MockObject\MockObject
*/
protected $configFacadeMock;
/**
* @var \Jellyfish\LogMonolog\LogMonologFactory
*/
protected LogMonologFactory $logMonologFactory;
/**
* @return void
*/
protected function _before(): void
{
parent::_before();
$this->configFacadeMock = $this->getMockBuilder(ConfigFacadeInterface::class)
->disableOriginalConstructor()
->getMock();
$this->logMonologFactory = new LogMonologFactory(
$this->configFacadeMock,
DIRECTORY_SEPARATOR
);
}
/**
* @return void
*/
public function testGetLogger(): void
{
$this->configFacadeMock->expects(static::atLeastOnce())
->method('get')
->withConsecutive(
[LogConstants::LOGGER_NAME, LogConstants::DEFAULT_LOGGER_NAME],
[LogConstants::LOG_LEVEL, LogConstants::DEFAULT_LOG_LEVEL],
[LogConstants::LOG_LEVEL, LogConstants::DEFAULT_LOG_LEVEL]
)->willReturnOnConsecutiveCalls(
LogConstants::DEFAULT_LOGGER_NAME,
LogConstants::DEFAULT_LOG_LEVEL,
LogConstants::DEFAULT_LOG_LEVEL
);
static::assertInstanceOf(
Logger::class,
$this->logMonologFactory->getLogger()
);
}
}
|
module ActsAsTaggableOn
class Tagging < ::ActiveRecord::Base #:nodoc:
attr_accessible :tag,
:tag_id,
:context,
:taggable,
:taggable_type,
:taggable_id,
:tagger,
:tagger_type,
:tagger_id if defined?(ActiveModel::MassAssignmentSecurity)
belongs_to :tag, :class_name => 'ActsAsTaggableOn::Tag'
belongs_to :taggable, :polymorphic => true
belongs_to :tagger, :polymorphic => true
validates_presence_of :context
validates_presence_of :tag_id
validates_uniqueness_of :tag_id, :scope => [ :taggable_type, :taggable_id, :context, :tagger_id, :tagger_type ]
after_destroy :remove_unused_tags
private
def remove_unused_tags
if ActsAsTaggableOn.remove_unused_tags
if tag.taggings.count.zero?
tag.destroy
end
end
end
end
end
|
#!/bin/bash
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################
# Simple Test Harness #
# Instantiates the cookiecutter template, and runs its tests #
##############################################################
_curdir="$(pwd)"
cd staging || exit 1
cookiecutter --no-input "${_curdir}" module_name=module-test
cd ./terraform-google-module-test || exit 1
make docker_test_prepare
make docker_test_integration
_result=$?
if [ "$_result" -ne "0" ]; then
make docker_test_cleanup
exit $_result
fi
cd .. || exit 1
sudo rm -rf terraform-google-module-test
cd "${_curdir}" || exit 1
|
using Microsoft.Extensions.DependencyInjection;
namespace Phema.Random
{
public static class RandomExtensions
{
public static IServiceCollection AddRandom<TRandom>(this IServiceCollection services)
where TRandom : class, IRandom
{
return services.AddScoped<IRandom, TRandom>();
}
public static bool NextBool(this IRandom random)
{
return random.Next(2) == 0;
}
}
}
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module Shed.Images where
import Codec.Picture (DynamicImage (ImageRGB8), Image (..),
convertRGB8, decodeImage)
import Codec.Picture.Extra (scaleBilinear)
import Codec.Picture.Saving (imageToJpg)
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as BL
import Data.ByteString.Unsafe (unsafePackMallocCStringLen)
import Data.Monoid ((<>))
import Foreign.C.Types
import qualified Language.C.Inline as C
C.context (C.baseCtx <> C.bsCtx)
C.include "<stdlib.h>"
C.include "<string.h>"
C.include "<libexif/exif-loader.h>"
getExifThumbnail :: ByteString -> IO (Maybe ByteString)
getExifThumbnail jpg = (C.withPtr $ \str -> C.withPtr $ \size -> [C.block|
int {
ExifLoader *l = exif_loader_new();
ExifData *ed;
exif_loader_write(l, (unsigned char *)$bs-ptr:jpg, $bs-len:jpg);
ed = exif_loader_get_data(l);
exif_loader_unref(l);
if (ed) {
if (ed->data && ed->size) {
*$(char** str) = (char *)malloc(ed->size);
memcpy(*$(char** str), ed->data, ed->size);
*$(int* size) = ed->size;
exif_data_unref(ed);
return 0;
} else {
exif_data_unref(ed);
return -1;
}
} else {
return -1;
}
}
|]) >>= \(ptr, (len, rv)) ->
if rv == 0 then do
Just <$> unsafePackMallocCStringLen (ptr, fromIntegral len)
else
return Nothing
createThumbnail :: ByteString -> IO (Maybe BL.ByteString)
createThumbnail bs =
case decodeImage bs of
Left _ -> return Nothing
Right img' ->
do let img = convertRGB8 img'
let ht = imageHeight img
let wd = imageWidth img
let (wd', ht') = if ht >= wd then
((128 * wd) `div` ht, 128)
else
(128, (128 * ht) `div` wd)
return $ Just $ imageToJpg 75 $ ImageRGB8 $ scaleBilinear wd' ht' img
|
export const convertToDataUrl = (
file: Readonly<File>
): Promise<string | null> => {
const reader = new FileReader()
reader.readAsDataURL(file)
return new Promise(resolve => {
reader.addEventListener(
'load',
event => {
// `readAsDataURL`を用いるため、結果の型はstring
// see: https://developer.mozilla.org/ja/docs/Web/API/FileReader/result
const thumbnailDataUrl = event.target?.result as string | null
resolve(thumbnailDataUrl)
},
{ once: true }
)
})
}
|
package monixdoc.evaluation.task
import monix.eval.{Fiber, Task}
import monix.execution.CancelableFuture
import monix.execution.Scheduler.Implicits.global
import scala.concurrent.duration._
import scala.language.postfixOps
object App15bTaskRaceMany extends App {
println("\n-----")
val ta = Task(1 + 1).delayExecution(1 second)
val tb = Task(10).delayExecution(1 second)
val winnerTask: Task[Int] = Task.raceMany(Seq(ta, tb))
winnerTask.runToFuture
.foreach(r => println(s"Winner: $r"))
Thread.sleep(3000L)
println("-----\n")
}
|
#!/usr/bin/env bash
# 将hello l插入text.txt的第二行
sed -e "2a" -e "hello" ./test.txt
|
#pragma once
#include "parser.h"
#include <optional>
#include <utility>
namespace kapows::pc {
template <typename T> struct zero_t {
public: // typedefs
using result_type = T;
public: // observers
constexpr auto operator()(parser_input_t input) const -> parser_output_t<T> {
return std::nullopt;
}
};
// Zero :: Parser a
template <typename T> inline constexpr auto zero = zero_t<T>{};
template <typename T> struct result_t {
public: // typedefs
using result_type = T;
private: // variables
T x;
public: // constructors
constexpr result_t(T x) : x(x) {}
public: // observers
constexpr auto operator()(parser_input_t input) const -> parser_output_t<T> {
return std::make_pair(x, input);
}
};
// Result :: a -> Parser a
template <typename T> constexpr auto result(T x) { return result_t<T>{x}; }
struct item_t {
public: // typedefs
using result_type = char;
public: // observers
constexpr auto operator()(parser_input_t input) const
-> parser_output_t<char> {
if (!input.empty()) {
return result(input.front())(input.substr(1));
} else {
return zero<char>(input);
}
};
};
// Item :: Parser a
inline constexpr auto item = item_t{};
} // namespace kapows::pc
|
import pygame
import time
class time_counting:
def __init__(self, time_):
self.font = pygame.font.Font(None, 25)
self.frame_rate = 60
self.time = time_
self.prev_time = time.time()
screen_size = pygame.display.get_surface().get_size()
width = screen_size[0]
self.multicateur = 1 if width // 2 > 1000 else ((width * (3 / 8)) / (1000))
def update(self, game_speed):
now = time.time()
dt = now - self.prev_time
self.total_seconds = self.time.frame_count // self.frame_rate
self.time.m = self.total_seconds // 60
self.time.s = self.total_seconds % 60
self.time.h = self.total_seconds // 3600
self.time.frame_count += round(dt * 60 * game_speed)
self.prev_time = now
def draw_time(self, screen):
output_string = "{0:02}:{1:02}:{2:02}".format(
self.time.h, self.time.m, self.time.s
)
text = self.font.render(output_string, True, (255, 255, 255))
screen.blit(text, [10, 10 + 105 * self.multicateur]) # position
|
package org.opentorah.docbook
import org.opentorah.xml.{Attribute, Element, Parsable, Parser, Unparser}
final class OutputConfiguration(val format: String)
// TODO split out format and variant
// TODO clean up parsing/unparsing
object OutputConfiguration extends Element[OutputConfiguration]("output"):
override def contentParsable: Parsable[OutputConfiguration] = new Parsable[OutputConfiguration]:
private val formatAttribute: Attribute.Required[String] = Attribute("format").required
override def parser: Parser[OutputConfiguration] = for
format: String <- formatAttribute()
yield OutputConfiguration(format)
override def unparser: Unparser[OutputConfiguration] = Unparser.concat[OutputConfiguration](
formatAttribute(_.format)
)
def toSet(output: Seq[OutputConfiguration]): Set[String] = output.map(_.format).toSet
def fromSet(output: Set[String]): Seq[OutputConfiguration] = output.toSeq.map(OutputConfiguration(_))
|
<?php
/**
* Created by PhpStorm
* User: Pony
* Date: 2021/12/23
* Time: 5:01 下午
*/
declare(strict_types=1);
namespace PonyCool\Es;
class Config
{
protected array $hosts;
protected string $host;
protected int $port;
protected string $scheme;
protected string $user;
protected string $pass;
public function __construct()
{
$this->scheme = 'http';
$this->host = '';
$this->port = 9200;
$this->hosts = [];
$this->user = '';
$this->pass = '';
}
/**
* @return array
*/
public function getHosts(): array
{
return $this->hosts;
}
/**
* @param array $hosts
* @return Config
*/
public function setHosts(array $hosts): Config
{
$this->hosts = $hosts;
return $this;
}
/**
* @return string
*/
public function getHost(): string
{
return $this->host;
}
/**
* @param string $host
* @return Config
*/
public function setHost(string $host): Config
{
$this->host = $host;
return $this;
}
/**
* @return int
*/
public function getPort(): int
{
return $this->port;
}
/**
* @param int $port
* @return Config
*/
public function setPort(int $port): Config
{
$this->port = $port;
return $this;
}
/**
* @return string
*/
public function getScheme(): string
{
return $this->scheme;
}
/**
* @param string $scheme
* @return Config
*/
public function setScheme(string $scheme): Config
{
$this->scheme = $scheme;
return $this;
}
/**
* @return string
*/
public function getUser(): string
{
return $this->user;
}
/**
* @param string $user
* @return Config
*/
public function setUser(string $user): Config
{
$this->user = $user;
return $this;
}
/**
* @return string
*/
public function getPass(): string
{
return $this->pass;
}
/**
* @param string $pass
* @return Config
*/
public function setPass(string $pass): Config
{
$this->pass = $pass;
return $this;
}
}
|
#!/usr/bin/env bash
for comp in `cat $1`; do
host $comp
done
|
using Xunit;
using Main.Models;
namespace MainTests.Models
{
public class Box1Tests
{
[Fact]
public void CanCreateBox()
{
var box = new Box1(30);
Assert.NotNull(box);
Assert.Equal(30, box.Length(0));
}
[Fact]
public void CanPutWidgetInBox()
{
var box = new Box1(10);
var orientation = new int[] { 5 };
var widget = new Widget(orientation);
var position = new int[] { 0 };
Assert.NotNull(box);
Assert.Equal(10, box.Length(0));
Assert.NotNull(widget);
Assert.Equal(5, widget.Orientation[0]);
var result = box.PlaceInBox(widget, position);
Assert.True(result);
Assert.Equal("***** ", box.LayerToString());
}
}
}
|
package ledgerstate
import (
"bytes"
"encoding/binary"
"math/rand"
"sync"
"testing"
"time"
"github.com/iotaledger/hive.go/byteutils"
"github.com/iotaledger/hive.go/crypto/ed25519"
"github.com/iotaledger/hive.go/identity"
"github.com/iotaledger/hive.go/marshalutil"
"github.com/iotaledger/hive.go/objectstorage"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/blake2b"
)
// region AliasOutput Tests
func TestAliasOutput_NewAliasOutputMint(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
stateAddy := randEd25119Address()
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, stateAddy)
assert.NoError(t, err)
iotaBal, ok := alias.Balances().Get(ColorIOTA)
assert.True(t, ok)
assert.Equal(t, DustThresholdAliasOutputIOTA, iotaBal)
assert.True(t, alias.GetStateAddress().Equals(stateAddy))
assert.Nil(t, alias.GetImmutableData())
})
t.Run("CASE: Happy path with immutable data", func(t *testing.T) {
stateAddy := randEd25119Address()
data := []byte("dummy")
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, stateAddy, data)
assert.NoError(t, err)
iotaBal, ok := alias.Balances().Get(ColorIOTA)
assert.True(t, ok)
assert.Equal(t, DustThresholdAliasOutputIOTA, iotaBal)
assert.True(t, alias.GetStateAddress().Equals(stateAddy))
assert.True(t, bytes.Equal(alias.GetImmutableData(), data))
})
t.Run("CASE: Below dust threshold", func(t *testing.T) {
stateAddy := randEd25119Address()
data := []byte("dummy")
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA - 1}, stateAddy, data)
assert.Error(t, err)
assert.Nil(t, alias)
})
t.Run("CASE: State address is an alias", func(t *testing.T) {
stateAddy := randAliasAddress()
data := []byte("dummy")
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, stateAddy, data)
assert.NoError(t, err)
assert.NotNil(t, alias)
})
t.Run("CASE: Non existent state address", func(t *testing.T) {
data := []byte("dummy")
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, nil, data)
assert.Error(t, err)
assert.Nil(t, alias)
})
t.Run("CASE: Too big state data", func(t *testing.T) {
stateAddy := randAliasAddress()
data := make([]byte, MaxOutputPayloadSize+1)
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, stateAddy, data)
assert.Error(t, err)
assert.Nil(t, alias)
})
}
func TestAliasOutput_NewAliasOutputNext(t *testing.T) {
originAlias := dummyAliasOutput()
t.Run("CASE: Happy path, no governance update", func(t *testing.T) {
nextAlias := originAlias.NewAliasOutputNext()
assert.True(t, originAlias.GetAliasAddress().Equals(nextAlias.GetAliasAddress()))
assert.True(t, originAlias.GetStateAddress().Equals(nextAlias.GetStateAddress()))
assert.True(t, originAlias.GetGoverningAddress().Equals(nextAlias.GetGoverningAddress()))
// outputid is actually irrelevant here
assert.True(t, bytes.Equal(nextAlias.ID().Bytes(), originAlias.ID().Bytes()))
assert.Equal(t, originAlias.Balances().Bytes(), nextAlias.Balances().Bytes())
assert.Equal(t, originAlias.GetStateIndex()+1, nextAlias.GetStateIndex())
assert.Equal(t, originAlias.GetStateData(), nextAlias.GetStateData())
assert.Equal(t, originAlias.GetGovernanceMetadata(), nextAlias.GetGovernanceMetadata())
assert.Equal(t, originAlias.GetImmutableData(), nextAlias.GetImmutableData())
assert.Equal(t, originAlias.GetIsGovernanceUpdated(), nextAlias.GetIsGovernanceUpdated())
})
t.Run("CASE: Happy path, governance update", func(t *testing.T) {
nextAlias := originAlias.NewAliasOutputNext(true)
assert.True(t, originAlias.GetAliasAddress().Equals(nextAlias.GetAliasAddress()))
assert.True(t, originAlias.GetStateAddress().Equals(nextAlias.GetStateAddress()))
assert.True(t, originAlias.GetGoverningAddress().Equals(nextAlias.GetGoverningAddress()))
// outputid is actually irrelevant here
assert.True(t, bytes.Equal(nextAlias.ID().Bytes(), originAlias.ID().Bytes()))
assert.Equal(t, originAlias.Balances().Bytes(), nextAlias.Balances().Bytes())
assert.Equal(t, originAlias.GetStateIndex(), nextAlias.GetStateIndex())
assert.Equal(t, originAlias.GetStateData(), nextAlias.GetStateData())
assert.Equal(t, originAlias.GetGovernanceMetadata(), nextAlias.GetGovernanceMetadata())
assert.Equal(t, originAlias.GetImmutableData(), nextAlias.GetImmutableData())
assert.NotEqual(t, originAlias.GetIsGovernanceUpdated(), nextAlias.GetIsGovernanceUpdated())
})
t.Run("CASE: Previous was governance update, next is not", func(t *testing.T) {
originAlias = dummyAliasOutput()
// previous output was a governance update
originAlias.SetIsGovernanceUpdated(true)
nextAlias := originAlias.NewAliasOutputNext()
// created output should not be a governance update
assert.False(t, nextAlias.GetIsGovernanceUpdated())
assert.True(t, originAlias.GetAliasAddress().Equals(nextAlias.GetAliasAddress()))
assert.True(t, originAlias.GetStateAddress().Equals(nextAlias.GetStateAddress()))
assert.True(t, originAlias.GetGoverningAddress().Equals(nextAlias.GetGoverningAddress()))
// outputid is actually irrelevant here
assert.True(t, bytes.Equal(nextAlias.ID().Bytes(), originAlias.ID().Bytes()))
assert.Equal(t, originAlias.Balances().Bytes(), nextAlias.Balances().Bytes())
assert.Equal(t, originAlias.GetStateIndex()+1, nextAlias.GetStateIndex())
assert.Equal(t, originAlias.GetStateData(), nextAlias.GetStateData())
assert.Equal(t, originAlias.GetGovernanceMetadata(), nextAlias.GetGovernanceMetadata())
assert.Equal(t, originAlias.GetImmutableData(), nextAlias.GetImmutableData())
})
t.Run("CASE: Previous was governance update, next as well", func(t *testing.T) {
originAlias = dummyAliasOutput()
// previous output was a governance update
originAlias.SetIsGovernanceUpdated(true)
nextAlias := originAlias.NewAliasOutputNext(true)
// created output should be a governance update
assert.True(t, nextAlias.GetIsGovernanceUpdated())
assert.True(t, originAlias.GetAliasAddress().Equals(nextAlias.GetAliasAddress()))
assert.True(t, originAlias.GetStateAddress().Equals(nextAlias.GetStateAddress()))
assert.True(t, originAlias.GetGoverningAddress().Equals(nextAlias.GetGoverningAddress()))
// outputid is actually irrelevant here
assert.True(t, bytes.Equal(nextAlias.ID().Bytes(), originAlias.ID().Bytes()))
assert.Equal(t, originAlias.Balances().Bytes(), nextAlias.Balances().Bytes())
assert.Equal(t, originAlias.GetStateIndex(), nextAlias.GetStateIndex())
assert.Equal(t, originAlias.GetStateData(), nextAlias.GetStateData())
assert.Equal(t, originAlias.GetGovernanceMetadata(), nextAlias.GetGovernanceMetadata())
assert.Equal(t, originAlias.GetImmutableData(), nextAlias.GetImmutableData())
})
}
func TestAliasOutputFromMarshalUtil(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
originAlias := dummyAliasOutput().WithDelegationAndTimelock(time.Now())
bytesLength := len(originAlias.Bytes())
marshaledAlias, consumed, err := OutputFromBytes(originAlias.Bytes())
assert.NoError(t, err)
assert.Equal(t, bytesLength, consumed)
assert.Equal(t, marshaledAlias.Bytes(), originAlias.Bytes())
})
t.Run("CASE: Wrong type", func(t *testing.T) {
originAlias := dummyAliasOutput()
originBytes := originAlias.Bytes()
// manually change output type byte
originBytes[0] = 1
marshalUtil := marshalutil.New(originBytes)
_, err := AliasOutputFromMarshalUtil(marshalUtil)
assert.Error(t, err)
})
t.Run("CASE: Wrong flag for state data", func(t *testing.T) {
originAlias := dummyAliasOutput()
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.ClearBit(flagAliasOutputStateDataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
assert.Error(t, err)
})
t.Run("CASE: Wrong flag for governance metadata", func(t *testing.T) {
originAlias := dummyAliasOutput()
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.ClearBit(flagAliasOutputGovernanceMetadataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Wrong flag for immutable data", func(t *testing.T) {
originAlias := dummyAliasOutput()
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.ClearBit(flagAliasOutputImmutableDataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
assert.Error(t, err)
})
t.Run("CASE: Wrong flag for governance address", func(t *testing.T) {
originAlias := dummyAliasOutput()
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.ClearBit(flagAliasOutputGovernanceSet)
// manually change flags
originBytes[1] = byte(flags)
_, consumedBytes, err := OutputFromBytes(originBytes)
assert.NoError(t, err)
assert.NotEqual(t, len(originBytes), consumedBytes)
})
t.Run("CASE: Flags provided, state data missing", func(t *testing.T) {
originAlias := dummyAliasOutput()
// remove the data
_ = originAlias.SetStateData(nil)
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.SetBit(flagAliasOutputStateDataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Flags provided, governance metadata missing", func(t *testing.T) {
originAlias := dummyAliasOutput()
// remove the data
_ = originAlias.SetGovernanceMetadata(nil)
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.SetBit(flagAliasOutputGovernanceMetadataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Flags provided, immutable data missing", func(t *testing.T) {
originAlias := dummyAliasOutput()
// remove the data
err := originAlias.SetImmutableData(nil)
assert.NoError(t, err)
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.SetBit(flagAliasOutputImmutableDataPresent)
// manually change flags
originBytes[1] = byte(flags)
_, _, err = OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Flags provided, governing address missing", func(t *testing.T) {
originAlias := dummyAliasOutput()
// remove the data
originAlias.SetGoverningAddress(originAlias.stateAddress)
originBytes := originAlias.Bytes()
flags := originAlias.mustFlags()
flags = flags.SetBit(flagAliasOutputGovernanceSet)
// manually change flags
originBytes[1] = byte(flags)
_, _, err := OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Invalid balances", func(t *testing.T) {
originAlias := dummyAliasOutput()
// remove the data
invalidBalancesBytes := NewColoredBalances(map[Color]uint64{ColorIOTA: 99}).Bytes()
originBytes := originAlias.Bytes()
// serialized balances start at : output type (1 byte) + flags (1 byte) + AliasAddressLength bytes
copy(originBytes[1+1+AddressLength:], invalidBalancesBytes)
_, _, err := OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Invalid state index for chain starting output", func(t *testing.T) {
originAlias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address())
assert.NoError(t, err)
originBytes := originAlias.Bytes()
stateIndexStartIndex := 1 + 1 + AddressLength + len(originAlias.balances.Bytes()) + AddressLength
binary.LittleEndian.PutUint32(originBytes[stateIndexStartIndex:], 5)
_, _, err = OutputFromBytes(originBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Too much state data", func(t *testing.T) {
originAlias := dummyAliasOutput()
originAlias.governanceMetadata = nil
originAlias.immutableData = nil
originAlias.governingAddress = nil
originAlias.stateData = []byte{1}
originBytes := originAlias.Bytes()
stateDataSizeIndex := 1 + 1 + AddressLength + len(originAlias.balances.Bytes()) + AddressLength + 4
binary.LittleEndian.PutUint16(originBytes[stateDataSizeIndex:], MaxOutputPayloadSize+1)
fakeStateData := make([]byte, MaxOutputPayloadSize)
// original one byte state data is left untouched
modBytes := byteutils.ConcatBytes(originBytes, fakeStateData)
_, _, err := OutputFromBytes(modBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Too much governance metadata", func(t *testing.T) {
originAlias := dummyAliasOutput()
originAlias.governanceMetadata = []byte{1}
originAlias.immutableData = nil
originAlias.governingAddress = nil
originAlias.stateData = nil
originBytes := originAlias.Bytes()
governanceMetadataSizeIndex := 1 + 1 + AddressLength + len(originAlias.balances.Bytes()) + AddressLength + 4
binary.LittleEndian.PutUint16(originBytes[governanceMetadataSizeIndex:], MaxOutputPayloadSize+1)
fakeGovernanceMetadata := make([]byte, MaxOutputPayloadSize)
// original one byte governance metadata is left untouched
modBytes := byteutils.ConcatBytes(originBytes, fakeGovernanceMetadata)
_, _, err := OutputFromBytes(modBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Too much immutable data", func(t *testing.T) {
originAlias := dummyAliasOutput()
originAlias.governanceMetadata = nil
originAlias.immutableData = []byte{1}
originAlias.governingAddress = nil
originAlias.stateData = nil
originBytes := originAlias.Bytes()
immutableDataSizeIndex := 1 + 1 + AddressLength + len(originAlias.balances.Bytes()) + AddressLength + 4
binary.LittleEndian.PutUint16(originBytes[immutableDataSizeIndex:], MaxOutputPayloadSize+1)
fakeImmutableData := make([]byte, MaxOutputPayloadSize)
// original one byte state data is left untouched
modBytes := byteutils.ConcatBytes(originBytes, fakeImmutableData)
_, _, err := OutputFromBytes(modBytes)
t.Log(err)
assert.Error(t, err)
})
}
func TestAliasOutput_SetBalances(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
err := alias.SetBalances(map[Color]uint64{ColorIOTA: 1337})
assert.NoError(t, err)
cBalBytes := alias.Balances().Bytes()
assert.Equal(t, NewColoredBalances(map[Color]uint64{ColorIOTA: 1337}).Bytes(), cBalBytes)
})
t.Run("CASE: Below threshold", func(t *testing.T) {
alias := dummyAliasOutput()
err := alias.SetBalances(map[Color]uint64{ColorIOTA: 99})
assert.Error(t, err)
})
}
func TestAliasOutput_SetAliasAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
newAliAddy := randAliasAddress()
alias.SetAliasAddress(newAliAddy)
assert.True(t, alias.aliasAddress.Equals(newAliAddy))
})
}
func TestAliasOutput_Balances(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.Equal(t, NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}).Bytes(), alias.Balances().Bytes())
})
}
func TestAliasOutput_Bytes(t *testing.T) {
t.Run("Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
aBytes := alias.Bytes()
mUtil := marshalutil.New(aBytes)
restoredAlias, err := AliasOutputFromMarshalUtil(mUtil)
assert.NoError(t, err)
assert.True(t, alias.GetAliasAddress().Equals(restoredAlias.GetAliasAddress()))
assert.True(t, alias.GetStateAddress().Equals(restoredAlias.GetStateAddress()))
assert.True(t, alias.GetGoverningAddress().Equals(restoredAlias.GetGoverningAddress()))
assert.Equal(t, alias.Balances().Bytes(), restoredAlias.Balances().Bytes())
assert.Equal(t, alias.GetStateIndex(), restoredAlias.GetStateIndex())
assert.Equal(t, alias.GetStateData(), restoredAlias.GetStateData())
assert.Equal(t, alias.GetGovernanceMetadata(), restoredAlias.GetGovernanceMetadata())
assert.Equal(t, alias.GetImmutableData(), restoredAlias.GetImmutableData())
assert.Equal(t, alias.GetIsGovernanceUpdated(), restoredAlias.GetIsGovernanceUpdated())
})
}
func TestAliasOutput_Compare(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
aBytes := alias.Bytes()
mUtil := marshalutil.New(aBytes)
restoredAlias, err := AliasOutputFromMarshalUtil(mUtil)
assert.NoError(t, err)
assert.True(t, alias.Compare(restoredAlias) == 0)
})
}
func TestAliasOutput_GetGoverningAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
governingAddy := alias.GetGoverningAddress()
assert.True(t, governingAddy.Equals(alias.governingAddress))
})
t.Run("CASE: Self governed", func(t *testing.T) {
alias := dummyAliasOutput()
alias.governingAddress = nil
governingAddy := alias.GetGoverningAddress()
assert.True(t, governingAddy.Equals(alias.stateAddress))
})
}
func TestAliasOutput_GetGovernanceMetadata(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := alias.GetGovernanceMetadata()
assert.Equal(t, data, alias.governanceMetadata)
})
t.Run("CASE: No data", func(t *testing.T) {
alias := dummyAliasOutput()
alias.governanceMetadata = nil
data := alias.GetGovernanceMetadata()
assert.Equal(t, data, alias.governanceMetadata)
})
}
func TestAliasOutput_GetImmutableData(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := alias.GetImmutableData()
assert.Equal(t, data, alias.immutableData)
})
t.Run("CASE: No data", func(t *testing.T) {
alias := dummyAliasOutput()
alias.immutableData = nil
data := alias.GetImmutableData()
assert.Equal(t, data, alias.immutableData)
})
}
func TestAliasOutput_GetIsGovernanceUpdated(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
isGovUpdate := alias.GetIsGovernanceUpdated()
assert.Equal(t, isGovUpdate, alias.isGovernanceUpdate)
})
t.Run("CASE: Happy path, false", func(t *testing.T) {
alias := dummyAliasOutput()
alias.isGovernanceUpdate = false
isGovUpdate := alias.GetIsGovernanceUpdated()
assert.Equal(t, isGovUpdate, alias.isGovernanceUpdate)
})
}
func TestAliasOutput_IsDelegated(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
isDelegated := alias.IsDelegated()
assert.Equal(t, isDelegated, alias.isDelegated)
})
t.Run("CASE: Happy path, true", func(t *testing.T) {
alias := dummyAliasOutput()
alias.isDelegated = true
isDelegated := alias.IsDelegated()
assert.Equal(t, isDelegated, alias.isDelegated)
})
}
func TestAliasOutput_GetStateAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
stateAddy := alias.GetStateAddress()
assert.True(t, stateAddy.Equals(alias.stateAddress))
})
}
func TestAliasOutput_GetStateData(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := alias.GetStateData()
assert.Equal(t, data, alias.stateData)
})
t.Run("CASE: No data", func(t *testing.T) {
alias := dummyAliasOutput()
alias.stateData = nil
data := alias.GetStateData()
assert.Equal(t, data, alias.stateData)
})
}
func TestAliasOutput_GetStateIndex(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
alias.stateIndex = 5
sIndex := alias.GetStateIndex()
assert.Equal(t, sIndex, alias.stateIndex)
})
}
func TestAliasOutput_ID(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
id := alias.ID()
assert.Equal(t, id, alias.outputID)
})
}
func TestAliasOutput_Input(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
input := alias.Input()
assert.Equal(t, input.Base58(), NewUTXOInput(alias.outputID).Base58())
})
t.Run("CASE: No output id yet", func(t *testing.T) {
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address())
assert.NoError(t, err)
assert.Panics(t, func() {
_ = alias.Input()
})
})
}
func TestAliasOutput_IsOrigin(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.False(t, alias.IsOrigin())
})
t.Run("CASE: Is origin", func(t *testing.T) {
alias, err := NewAliasOutputMint(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address())
assert.NoError(t, err)
assert.True(t, alias.IsOrigin())
})
t.Run("CASE: Check IsOrigin after booking", func(t *testing.T) {
tokens := map[Color]uint64{ColorIOTA: 200}
pub, _, err := ed25519.GenerateKey()
require.NoError(t, err)
addr := NewED25519Address(pub)
out, err := NewAliasOutputMint(tokens, addr)
require.NoError(t, err)
require.True(t, out.IsOrigin())
// imitate booking
out.SetID(OutputID{})
outUpd := out.UpdateMintingColor().(*AliasOutput)
require.True(t, outUpd.IsOrigin())
})
}
func TestAliasOutput_IsSelfGoverned(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.False(t, alias.IsSelfGoverned())
})
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
alias.governingAddress = nil
assert.True(t, alias.IsSelfGoverned())
})
}
func TestAliasOutput_ObjectStorageKey(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.Equal(t, alias.outputID.Bytes(), alias.ObjectStorageKey())
})
}
func TestAliasOutput_ObjectStorageValue(t *testing.T) {
// same as Bytes()
}
func TestAliasOutput_DelegationTimelock(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
// not a delegated aliasoutput,
assert.True(t, alias.DelegationTimelock().IsZero())
// is a delegated output, but no timelock set
alias.isDelegated = true
assert.True(t, alias.DelegationTimelock().IsZero())
// delegated output, timelock set
timelock := time.Now()
alias.delegationTimelock = timelock
assert.True(t, timelock.Equal(alias.DelegationTimelock()))
})
}
func TestAliasOutput_DelegationTimeLockedNow(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
timelock := time.Now()
alias := dummyAliasOutput()
alias.isDelegated = true
err := alias.SetDelegationTimelock(timelock)
assert.NoError(t, err)
assert.True(t, alias.DelegationTimeLockedNow(timelock.Add(-time.Second)))
assert.False(t, alias.DelegationTimeLockedNow(timelock.Add(time.Second)))
})
t.Run("CASE: Delegation without timelock", func(t *testing.T) {
timelock := time.Now()
alias := dummyAliasOutput()
alias.isDelegated = true
assert.False(t, alias.DelegationTimeLockedNow(timelock.Add(-time.Second)))
assert.False(t, alias.DelegationTimeLockedNow(timelock.Add(time.Second)))
})
}
func TestAliasOutput_SetDelegationTimelock(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
timelock := time.Now()
alias := dummyAliasOutput()
// not delegated,
err := alias.SetDelegationTimelock(timelock)
t.Log(err)
assert.Error(t, err)
assert.True(t, alias.DelegationTimelock().IsZero())
// delegated
alias.isDelegated = true
err = alias.SetDelegationTimelock(timelock)
assert.NoError(t, err)
assert.True(t, alias.DelegationTimelock().Equal(timelock))
})
}
func TestAliasOutput_SetGoverningAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
newAddy := randAliasAddress()
alias.SetGoverningAddress(newAddy)
assert.True(t, alias.GetGoverningAddress().Equals(newAddy))
})
}
func TestAliasOutput_SetID(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
newID := randOutputID()
alias.SetID(newID)
assert.Equal(t, alias.ID().Bytes(), newID.Bytes())
})
}
func TestAliasOutput_SetImmutableData(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := []byte("new dummy nft data")
err := alias.SetImmutableData(data)
assert.NoError(t, err)
assert.Equal(t, alias.GetImmutableData(), data)
})
t.Run("CASE: Too much data", func(t *testing.T) {
alias := dummyAliasOutput()
data := make([]byte, MaxOutputPayloadSize+1)
err := alias.SetImmutableData(data)
t.Log(err)
assert.Error(t, err)
})
}
func TestAliasOutput_SetGovernanceMetadata(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := []byte("new dummy gov metadata")
err := alias.SetGovernanceMetadata(data)
assert.NoError(t, err)
assert.Equal(t, alias.GetGovernanceMetadata(), data)
})
t.Run("CASE: Too much data", func(t *testing.T) {
alias := dummyAliasOutput()
data := make([]byte, MaxOutputPayloadSize+1)
err := alias.SetGovernanceMetadata(data)
t.Log(err)
assert.Error(t, err)
})
}
func TestAliasOutput_SetIsGovernanceUpdated(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
alias.SetIsGovernanceUpdated(true)
assert.Equal(t, alias.GetIsGovernanceUpdated(), true)
})
}
func TestAliasOutput_SetIsOrigin(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.Equal(t, false, alias.isOrigin)
alias.SetIsOrigin(true)
assert.Equal(t, true, alias.isOrigin)
})
}
func TestAliasOutput_SetIsDelegated(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
isDelegated := true
alias.SetIsDelegated(isDelegated)
assert.Equal(t, alias.isDelegated, isDelegated)
})
}
func TestAliasOutput_SetStateAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
newAliasAddy := randAliasAddress()
newAddy := randEd25119Address()
err := alias.SetStateAddress(newAliasAddy)
assert.NoError(t, err)
err = alias.SetStateAddress(newAddy)
assert.NoError(t, err)
assert.True(t, alias.GetStateAddress().Equals(newAddy))
})
}
func TestAliasOutput_SetStateData(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
data := []byte("new dummy nft data")
err := alias.SetStateData(data)
assert.NoError(t, err)
assert.Equal(t, alias.GetStateData(), data)
})
t.Run("CASE: Too much data", func(t *testing.T) {
alias := dummyAliasOutput()
data := make([]byte, MaxOutputPayloadSize+1)
err := alias.SetStateData(data)
assert.Error(t, err)
})
}
func TestAliasOutput_SetStateIndex(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
alias.SetStateIndex(5)
assert.Equal(t, alias.GetStateIndex(), uint32(5))
})
}
func TestAliasOutput_Type(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
assert.Equal(t, alias.Type(), AliasOutputType)
})
}
func TestAliasOutput_Update(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
other := dummyAliasOutput()
assert.Panics(t, func() {
alias.Update(other)
})
})
}
func TestAliasOutput_UpdateMintingColor(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
alias := dummyAliasOutput()
alias.balances = NewColoredBalances(map[Color]uint64{
ColorIOTA: DustThresholdAliasOutputIOTA,
ColorMint: 500,
})
updated := alias.UpdateMintingColor()
balance, ok := updated.Balances().Get(blake2b.Sum256(alias.ID().Bytes()))
assert.True(t, ok)
assert.Equal(t, uint64(500), balance)
assert.True(t, updated.Address().Equals(alias.GetAliasAddress()))
})
t.Run("CASE: No mint", func(t *testing.T) {
alias := dummyAliasOutput()
alias.balances = NewColoredBalances(map[Color]uint64{
ColorIOTA: DustThresholdAliasOutputIOTA,
})
updated := alias.UpdateMintingColor()
balance, ok := updated.Balances().Get(blake2b.Sum256(alias.ID().Bytes()))
assert.False(t, ok)
assert.Equal(t, uint64(0), balance)
balance, ok = updated.Balances().Get(ColorIOTA)
assert.True(t, ok)
assert.Equal(t, DustThresholdAliasOutputIOTA, balance)
assert.True(t, updated.Address().Equals(alias.GetAliasAddress()))
})
t.Run("CASE: Alias address is updated", func(t *testing.T) {
alias := dummyAliasOutput(true)
alias.aliasAddress = AliasAddress{}
alias.balances = NewColoredBalances(map[Color]uint64{
ColorIOTA: DustThresholdAliasOutputIOTA,
ColorMint: 500,
})
updated := alias.UpdateMintingColor()
balance, ok := updated.Balances().Get(blake2b.Sum256(alias.ID().Bytes()))
assert.True(t, ok)
assert.Equal(t, uint64(500), balance)
assert.True(t, updated.Address().Equals(NewAliasAddress(alias.ID().Bytes())))
})
}
func TestAliasOutput_checkBasicValidity(t *testing.T) {
t.Run("CASE: state address nil", func(t *testing.T) {
alias := dummyAliasOutput()
alias.stateAddress = nil
err := alias.checkBasicValidity()
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: state address same as alias address", func(t *testing.T) {
alias := dummyAliasOutput()
alias.stateAddress = alias.GetAliasAddress()
err := alias.checkBasicValidity()
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: governing address same as alias address", func(t *testing.T) {
alias := dummyAliasOutput()
alias.governingAddress = alias.GetAliasAddress()
err := alias.checkBasicValidity()
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: delegation timelock for non delegated output", func(t *testing.T) {
alias := dummyAliasOutput()
alias.isDelegated = false
alias.delegationTimelock = time.Now()
err := alias.checkBasicValidity()
t.Log(err)
assert.Error(t, err)
})
}
func TestAliasOutput_mustValidate(t *testing.T) {
t.Run("CASE: did not validate", func(t *testing.T) {
alias := dummyAliasOutput()
alias.stateAddress = nil
assert.Panics(t, func() {
alias.mustValidate()
})
})
}
func TestAliasOutput_validateTransition(t *testing.T) {
t.Run("CASE: Happy path, state transition", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Happy path, governance transition", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Modified alias address", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.aliasAddress = *randAliasAddress()
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Modified immutable data", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.immutableData = []byte("something new")
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Gov update, modified state data", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.stateData = []byte("something new")
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Gov update, modified state index", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.stateIndex = prev.stateIndex + 1
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Gov update, modified balance", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
newBalance := prev.Balances().Map()
newBalance[ColorIOTA]++
next.balances = NewColoredBalances(newBalance)
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Gov update, modified state address", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.stateAddress = randEd25119Address()
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Gov update, modified governance address", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.governingAddress = randAliasAddress()
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Gov update, modified governance metadata", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.governanceMetadata = []byte("chain is run by another VM")
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Gov update, modified delegation status", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(true)
next.isDelegated = true
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: Gov update, delegation without delegation lock", func(t *testing.T) {
prev := dummyAliasOutput().WithDelegation()
next := prev.NewAliasOutputNext(true)
assert.Equal(t, true, next.IsDelegated())
err := prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: time.Now()}})
assert.NoError(t, err)
})
t.Run("CASE: Gov update, delegation with delegation lock", func(t *testing.T) {
timelock := time.Now()
prev := dummyAliasOutput().WithDelegationAndTimelock(timelock)
next := prev.NewAliasOutputNext(true)
assert.Equal(t, true, next.IsDelegated())
// happy case, timelock expired
err := prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: timelock.Add(time.Second)}})
assert.NoError(t, err)
// not happy case, timelock is still active
err = prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: timelock.Add(-time.Second)}})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, delegation without timelock", func(t *testing.T) {
prev := dummyAliasOutput().WithDelegation()
next := prev.NewAliasOutputNext(false)
err := prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: time.Now()}})
assert.NoError(t, err)
})
t.Run("CASE: State update, delegation with timelock", func(t *testing.T) {
timelock := time.Now()
prev := dummyAliasOutput().WithDelegationAndTimelock(timelock)
next := prev.NewAliasOutputNext(false)
// timelock is active state transition allowed
err := prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: timelock.Add(-time.Second)}})
assert.NoError(t, err)
// timelock expired, state transition should fail
err = prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: timelock.Add(time.Second)}})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, delegated, delegation timelock changed", func(t *testing.T) {
prev := dummyAliasOutput().WithDelegation()
next := prev.NewAliasOutputNext(false)
next.delegationTimelock = time.Now()
err := prev.validateTransition(next, &Transaction{essence: &TransactionEssence{timestamp: time.Now()}})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, modified delegation status", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.isDelegated = true
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update of delegated output, modified balance status", func(t *testing.T) {
prev := dummyAliasOutput()
prev.isDelegated = true
next := prev.NewAliasOutputNext(false)
bal := next.balances.Map()
bal[ColorIOTA]++
next.balances = NewColoredBalances(bal)
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, wrong state index", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.stateIndex = prev.GetStateIndex() + 2
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, modify state address", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.stateAddress = randEd25119Address()
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, modify governance address", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.governingAddress = randAliasAddress()
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, modify governance metadata", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.governanceMetadata = []byte("chain is run by another VM")
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, was self governed", func(t *testing.T) {
prev := dummyAliasOutput()
prev.governingAddress = nil
next := prev.NewAliasOutputNext(false)
next.governingAddress = randAliasAddress()
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, was not self governed", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.governingAddress = nil
err := prev.validateTransition(next, &Transaction{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: State update, modify state data", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
next.stateData = []byte("new state data")
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
t.Run("CASE: State update, modify balances", func(t *testing.T) {
prev := dummyAliasOutput()
next := prev.NewAliasOutputNext(false)
newBalance := prev.Balances().Map()
newBalance[ColorIOTA]++
next.balances = NewColoredBalances(newBalance)
err := prev.validateTransition(next, &Transaction{})
assert.NoError(t, err)
})
}
func TestAliasOutput_validateDestroyTransition(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
prev := dummyAliasOutput()
err := prev.validateDestroyTransitionNow(time.Time{})
assert.NoError(t, err)
})
t.Run("CASE: More balance than minimum", func(t *testing.T) {
prev := dummyAliasOutput()
newBalance := prev.Balances().Map()
newBalance[ColorIOTA]++
prev.balances = NewColoredBalances(newBalance)
err := prev.validateDestroyTransitionNow(time.Time{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Delegated output, more balance than minimum", func(t *testing.T) {
prev := dummyAliasOutput()
newBalance := prev.Balances().Map()
newBalance[ColorIOTA]++
prev.balances = NewColoredBalances(newBalance)
prev.isDelegated = true
err := prev.validateDestroyTransitionNow(time.Time{})
assert.NoError(t, err)
})
t.Run("CASE: More color balance than minimum", func(t *testing.T) {
prev := dummyAliasOutput()
newBalance := prev.Balances().Map()
newBalance[Color{8}] = 1
prev.balances = NewColoredBalances(newBalance)
err := prev.validateDestroyTransitionNow(time.Time{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Only color balance", func(t *testing.T) {
prev := dummyAliasOutput()
prev.balances = NewColoredBalances(map[Color]uint64{{8}: DustThresholdAliasOutputIOTA})
err := prev.validateDestroyTransitionNow(time.Time{})
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Can destroy delegation if not-timelocked", func(t *testing.T) {
prev := dummyAliasOutput()
prev.SetIsDelegated(true)
err := prev.validateDestroyTransitionNow(time.Now())
assert.NoError(t, err)
})
t.Run("CASE: Can't destroy timelocked delegation", func(t *testing.T) {
prev := dummyAliasOutput()
deadline := time.Now()
nowis := deadline.Add(-1 * time.Nanosecond)
prev.SetIsDelegated(true)
err := prev.SetDelegationTimelock(deadline)
assert.NoError(t, err)
assert.True(t, prev.DelegationTimeLockedNow(nowis))
err = prev.validateDestroyTransitionNow(nowis)
assert.Error(t, err)
})
t.Run("CASE: Can destroy timeUNlocked delegation", func(t *testing.T) {
prev := dummyAliasOutput()
deadline := time.Now()
nowis := deadline.Add(1 * time.Nanosecond)
prev.SetIsDelegated(true)
err := prev.SetDelegationTimelock(deadline)
assert.NoError(t, err)
assert.False(t, prev.DelegationTimeLockedNow(nowis))
err = prev.validateDestroyTransitionNow(nowis)
assert.NoError(t, err)
})
}
func TestAliasOutput_findChainedOutputAndCheckFork(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
prev := dummyAliasOutput()
chained := prev.NewAliasOutputNext(false)
outputs := Outputs{chained}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
found, err := prev.findChainedOutputAndCheckFork(tx)
assert.NoError(t, err)
assert.Equal(t, chained.Bytes(), found.Bytes())
})
t.Run("CASE: No alias output", func(t *testing.T) {
prev := dummyAliasOutput()
outputs := Outputs{NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
// not found means that returned output is nil, no error
found, _ := prev.findChainedOutputAndCheckFork(tx)
assert.Nil(t, found)
})
t.Run("CASE: Duplicated alias output", func(t *testing.T) {
prev := dummyAliasOutput()
chained1 := prev.NewAliasOutputNext(false)
chained2 := prev.NewAliasOutputNext(true)
outputs := Outputs{chained1, chained2}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
found, err := prev.findChainedOutputAndCheckFork(tx)
t.Log(err)
assert.Error(t, err)
assert.Nil(t, found)
})
t.Run("CASE: More than one alias in outputs", func(t *testing.T) {
prev := dummyAliasOutput()
chained := prev.NewAliasOutputNext(false)
chainedFake := prev.NewAliasOutputNext(false)
chainedFake.aliasAddress = *randAliasAddress()
outputs := Outputs{chained, chainedFake}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
found, err := prev.findChainedOutputAndCheckFork(tx)
assert.NoError(t, err)
assert.Equal(t, chained.Bytes(), found.Bytes())
})
}
func TestAliasOutput_hasToBeUnlockedForGovernanceUpdate(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
prev := dummyAliasOutput()
chained := prev.NewAliasOutputNext(true)
outputs := Outputs{chained}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
ok := prev.hasToBeUnlockedForGovernanceUpdate(tx)
assert.True(t, ok)
})
t.Run("CASE: No governance update", func(t *testing.T) {
prev := dummyAliasOutput()
chained := prev.NewAliasOutputNext(false)
outputs := Outputs{chained}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
ok := prev.hasToBeUnlockedForGovernanceUpdate(tx)
assert.False(t, ok)
})
t.Run("CASE: Duplicated alias", func(t *testing.T) {
prev := dummyAliasOutput()
chained := prev.NewAliasOutputNext(true)
chainedDuplicate := chained.clone()
chainedDuplicate.stateData = []byte("duplicated")
outputs := Outputs{chained, chainedDuplicate}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
ok := prev.hasToBeUnlockedForGovernanceUpdate(tx)
assert.False(t, ok)
})
t.Run("CASE: No alias output found", func(t *testing.T) {
prev := dummyAliasOutput()
next := NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())
outputs := Outputs{next}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(prev.ID())), NewOutputs(outputs...))
// unlockblocks are irrelevant now
tx := NewTransaction(essence, UnlockBlocks{NewReferenceUnlockBlock(0)})
ok := prev.hasToBeUnlockedForGovernanceUpdate(tx)
assert.True(t, ok)
})
}
func TestAliasOutput_unlockedGovernanceByAliasIndex(t *testing.T) {
governingAliasStateWallet := genRandomWallet()
governingAlias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: governingAliasStateWallet.address,
stateIndex: 10,
stateData: []byte("some data"),
immutableData: []byte("some data"),
isGovernanceUpdate: false,
governingAddress: randAliasAddress(),
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
aliasStateWallet := genRandomWallet()
alias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: aliasStateWallet.address,
stateIndex: 10,
stateData: []byte("some data"),
immutableData: []byte("some data"),
isGovernanceUpdate: false,
governingAddress: governingAlias.GetAliasAddress(),
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
t.Run("CASE: Happy path", func(t *testing.T) {
// unlocked for gov transition
nextAlias := alias.NewAliasOutputNext(true)
// we are updating the state address (simulate committer rotation)
nextAlias.stateAddress = randEd25119Address()
// unlocked for state transition
nextGoverningAlias := governingAlias.NewAliasOutputNext(false)
outputs := Outputs{nextAlias, nextGoverningAlias}
inputs := Outputs{}
inputsOfTx := NewInputs(NewUTXOInput(alias.ID()), NewUTXOInput(governingAlias.ID()))
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, inputsOfTx, NewOutputs(outputs...))
var indexOfAliasInput, indexOfGoverningAliasInput int
for i, input := range inputsOfTx {
castedInput := input.(*UTXOInput)
if castedInput.referencedOutputID == alias.ID() {
indexOfAliasInput = i
inputs = append(inputs, alias)
}
if castedInput.referencedOutputID == governingAlias.ID() {
indexOfGoverningAliasInput = i
inputs = append(inputs, governingAlias)
}
}
unlocks := make(UnlockBlocks, len(inputsOfTx))
unlocks[indexOfAliasInput] = NewAliasUnlockBlock(uint16(indexOfGoverningAliasInput))
unlocks[indexOfGoverningAliasInput] = NewSignatureUnlockBlock(governingAliasStateWallet.sign(essence))
tx := NewTransaction(essence, unlocks)
ok, err := alias.unlockedGovernanceTransitionByAliasIndex(tx, uint16(indexOfGoverningAliasInput), inputs)
assert.NoError(t, err)
assert.True(t, ok)
})
t.Run("CASE: Self governed alias can't be unlocked by alias reference", func(t *testing.T) {
dummyAlias := dummyAliasOutput()
dummyAlias.governingAddress = nil
ok, err := dummyAlias.unlockedGovernanceTransitionByAliasIndex(&Transaction{}, 0, Outputs{})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
t.Run("CASE: Governing address is not alias", func(t *testing.T) {
dummyAlias := dummyAliasOutput()
dummyAlias.governingAddress = randEd25119Address()
ok, err := dummyAlias.unlockedGovernanceTransitionByAliasIndex(&Transaction{}, 0, Outputs{})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
t.Run("CASE: Invalid referenced index", func(t *testing.T) {
dummyAlias := dummyAliasOutput()
ok, err := dummyAlias.unlockedGovernanceTransitionByAliasIndex(&Transaction{}, 1, Outputs{})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
t.Run("CASE: Referenced output is not an alias", func(t *testing.T) {
dummyAlias := dummyAliasOutput()
ok, err := dummyAlias.unlockedGovernanceTransitionByAliasIndex(&Transaction{}, 0, Outputs{NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
t.Run("CASE: Referenced output has different alias address", func(t *testing.T) {
dummyAlias := dummyAliasOutput()
dummyGoverningAlias := dummyAliasOutput()
ok, err := dummyAlias.unlockedGovernanceTransitionByAliasIndex(&Transaction{}, 0, Outputs{dummyGoverningAlias})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
}
func TestAliasOutput_UnlockValid(t *testing.T) {
w := genRandomWallet()
governingWallet := genRandomWallet()
alias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: w.address,
stateIndex: 10,
stateData: []byte("some data"),
immutableData: []byte("some immutable data"),
isGovernanceUpdate: false,
governingAddress: governingWallet.address,
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
t.Run("CASE: Alias unlocked by signature", func(t *testing.T) {
chained := alias.NewAliasOutputNext(false)
outputs := Outputs{chained}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := w.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
assert.NoError(t, err)
assert.True(t, valid)
})
t.Run("CASE: Alias can't be unlocked by invalid signature", func(t *testing.T) {
chained := alias.NewAliasOutputNext(false)
clonedAlias := alias.clone()
clonedAlias.stateAddress = randEd25119Address()
outputs := Outputs{chained}
inputs := Outputs{clonedAlias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(clonedAlias.ID())), NewOutputs(outputs...))
// sign with bad signature
unlockBlocks := w.unlockBlocks(essence)
tx := NewTransaction(essence, unlockBlocks)
valid, err := clonedAlias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Alias output destroyed, no gov update", func(t *testing.T) {
outputs := Outputs{NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := w.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Alias output destroyed, gov update", func(t *testing.T) {
outputs := Outputs{NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := governingWallet.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
assert.NoError(t, err)
assert.True(t, valid)
})
t.Run("CASE: Alias output can't be destroyed, gov update", func(t *testing.T) {
clonedAlias := alias.clone()
clonedAlias.balances = NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA + 1})
outputs := Outputs{NewSigLockedSingleOutput(DustThresholdAliasOutputIOTA, randEd25119Address())}
inputs := Outputs{clonedAlias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(clonedAlias.ID())), NewOutputs(outputs...))
unlockBlocks := governingWallet.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := clonedAlias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Duplicated alias output", func(t *testing.T) {
chained := alias.NewAliasOutputNext(false)
clonedChained := chained.clone()
// need to change some bytes not to be considered duplicate already in NewOutputs()
clonedChained.stateData = []byte("random data")
outputs := Outputs{chained, clonedChained}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := w.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Governance update, sig valid", func(t *testing.T) {
chained := alias.NewAliasOutputNext(true)
outputs := Outputs{chained}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := governingWallet.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
assert.NoError(t, err)
assert.True(t, valid)
})
t.Run("CASE: Governance update, sig invalid", func(t *testing.T) {
chained := alias.NewAliasOutputNext(true)
outputs := Outputs{chained}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := w.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Governance update, transition invalid", func(t *testing.T) {
chained := alias.NewAliasOutputNext(true)
chained.stateData = []byte("this should not be changed")
outputs := Outputs{chained}
inputs := Outputs{alias}
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, NewInputs(NewUTXOInput(alias.ID())), NewOutputs(outputs...))
unlockBlocks := governingWallet.unlockBlocks(essence)
// w.unlockBlocks puts a signature unlock block for all inputs
tx := NewTransaction(essence, unlockBlocks)
valid, err := alias.UnlockValid(tx, unlockBlocks[0], inputs)
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Unlocked by other alias", func(t *testing.T) {
governingAliasStateWallet := genRandomWallet()
governingAlias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: governingAliasStateWallet.address,
stateIndex: 10,
stateData: []byte("some data"),
immutableData: []byte("some data"),
isGovernanceUpdate: false,
governingAddress: randAliasAddress(),
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
aliasStateWallet := genRandomWallet()
governedAlias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: aliasStateWallet.address,
stateIndex: 10,
stateData: []byte("some data"),
immutableData: []byte("some data"),
isGovernanceUpdate: false,
governingAddress: governingAlias.GetAliasAddress(),
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
// unlocked for gov transition
nextAlias := governedAlias.NewAliasOutputNext(true)
// we are updating the state address (simulate committer rotation)
nextAlias.stateAddress = randEd25119Address()
// unlocked for state transition
nextGoverningAlias := governingAlias.NewAliasOutputNext(false)
outputs := Outputs{nextAlias, nextGoverningAlias}
inputs := Outputs{}
inputsOfTx := NewInputs(NewUTXOInput(governedAlias.ID()), NewUTXOInput(governingAlias.ID()))
essence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, inputsOfTx, NewOutputs(outputs...))
var indexOfAliasInput, indexOfGoverningAliasInput int
for i, input := range inputsOfTx {
castedInput := input.(*UTXOInput)
if castedInput.referencedOutputID == governedAlias.ID() {
indexOfAliasInput = i
inputs = append(inputs, governedAlias)
}
if castedInput.referencedOutputID == governingAlias.ID() {
indexOfGoverningAliasInput = i
inputs = append(inputs, governingAlias)
}
}
unlocks := make(UnlockBlocks, len(inputsOfTx))
unlocks[indexOfAliasInput] = NewAliasUnlockBlock(uint16(indexOfGoverningAliasInput))
unlocks[indexOfGoverningAliasInput] = NewSignatureUnlockBlock(governingAliasStateWallet.sign(essence))
tx := NewTransaction(essence, unlocks)
ok, err := governedAlias.UnlockValid(tx, unlocks[indexOfAliasInput], inputs)
assert.NoError(t, err)
assert.True(t, ok)
})
t.Run("CASE: Unsupported unlock block", func(t *testing.T) {
txEssence := NewTransactionEssence(0, time.Time{}, identity.ID{}, identity.ID{}, nil, nil)
tx := NewTransaction(txEssence, nil)
ok, err := alias.UnlockValid(tx, NewReferenceUnlockBlock(0), Outputs{})
t.Log(err)
assert.Error(t, err)
assert.False(t, ok)
})
}
func TestAliasOutput_Clone(t *testing.T) {
out := dummyAliasOutput()
out.isDelegated = true
outBack := out.Clone()
outBackT, ok := outBack.(*AliasOutput)
assert.True(t, ok)
assert.True(t, out != outBackT)
assert.True(t, out.stateAddress != outBackT.stateAddress)
assert.True(t, out.governingAddress != outBackT.governingAddress)
assert.True(t, notSameMemory(out.immutableData, outBackT.immutableData))
assert.True(t, notSameMemory(out.stateData, outBackT.stateData))
assert.EqualValues(t, out.Bytes(), outBack.Bytes())
}
// endregion
// region ExtendedLockedOutput Tests
func TestExtendedLockedOutput_Address(t *testing.T) {
t.Run("CASE: Address is signature backed", func(t *testing.T) {
addy := randEd25119Address()
o := &ExtendedLockedOutput{address: addy}
assert.True(t, o.Address().Equals(addy))
})
t.Run("CASE: Address is alias address", func(t *testing.T) {
addy := randAliasAddress()
o := &ExtendedLockedOutput{address: addy}
assert.True(t, o.Address().Equals(addy))
})
}
func TestExtendedLockedOutput_Balances(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
bal := NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA})
o := &ExtendedLockedOutput{balances: bal}
assert.Equal(t, bal.Bytes(), o.Balances().Bytes())
})
}
func TestExtendedLockedOutput_Bytes(t *testing.T) {
t.Run("CASE: Happy path, all optional fields", func(t *testing.T) {
o := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address()).
WithFallbackOptions(randEd25119Address(), time.Now().Add(2*time.Hour)).
WithTimeLock(time.Now().Add(1 * time.Hour))
err := o.SetPayload([]byte("some metadata"))
assert.NoError(t, err)
oBytes := o.Bytes()
var restored Output
restored, _, err = OutputFromBytes(oBytes)
assert.NoError(t, err)
castedRestored, ok := restored.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, o.balances.Bytes(), castedRestored.balances.Bytes())
assert.True(t, o.address.Equals(castedRestored.address))
assert.Equal(t, o.id.Bytes(), castedRestored.id.Bytes())
assert.True(t, o.fallbackDeadline.Equal(castedRestored.fallbackDeadline))
assert.True(t, o.fallbackAddress.Equals(castedRestored.fallbackAddress))
assert.True(t, o.timelock.Equal(castedRestored.timelock))
assert.Equal(t, o.payload, castedRestored.payload)
})
t.Run("CASE: Happy path, no optional fields", func(t *testing.T) {
o := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address())
oBytes := o.Bytes()
restored, _, err := OutputFromBytes(oBytes)
assert.NoError(t, err)
castedRestored, ok := restored.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, o.balances.Bytes(), castedRestored.balances.Bytes())
assert.True(t, o.address.Equals(castedRestored.address))
assert.Equal(t, o.id.Bytes(), castedRestored.id.Bytes())
assert.True(t, o.fallbackDeadline.Equal(castedRestored.fallbackDeadline))
assert.Nil(t, o.fallbackAddress)
assert.Nil(t, castedRestored.fallbackAddress)
assert.True(t, o.timelock.Equal(castedRestored.timelock))
assert.Equal(t, o.payload, castedRestored.payload)
})
t.Run("CASE: Happy path, optional timelock", func(t *testing.T) {
o := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address()).
WithTimeLock(time.Now().Add(1 * time.Hour))
oBytes := o.Bytes()
restored, _, err := OutputFromBytes(oBytes)
assert.NoError(t, err)
castedRestored, ok := restored.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, o.balances.Bytes(), castedRestored.balances.Bytes())
assert.True(t, o.address.Equals(castedRestored.address))
assert.Equal(t, o.id.Bytes(), castedRestored.id.Bytes())
assert.True(t, o.fallbackDeadline.Equal(castedRestored.fallbackDeadline))
assert.Nil(t, o.fallbackAddress)
assert.Nil(t, castedRestored.fallbackAddress)
assert.True(t, o.timelock.Equal(castedRestored.timelock))
assert.Equal(t, o.payload, castedRestored.payload)
})
t.Run("CASE: Happy path, optional fallback", func(t *testing.T) {
o := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address()).
WithFallbackOptions(randEd25119Address(), time.Now().Add(2*time.Hour))
oBytes := o.Bytes()
restored, _, err := OutputFromBytes(oBytes)
assert.NoError(t, err)
castedRestored, ok := restored.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, o.balances.Bytes(), castedRestored.balances.Bytes())
assert.True(t, o.address.Equals(castedRestored.address))
assert.Equal(t, o.id.Bytes(), castedRestored.id.Bytes())
assert.True(t, o.fallbackDeadline.Equal(castedRestored.fallbackDeadline))
assert.True(t, o.fallbackAddress.Equals(castedRestored.fallbackAddress))
assert.True(t, o.timelock.Equal(castedRestored.timelock))
assert.Equal(t, o.payload, castedRestored.payload)
})
t.Run("CASE: Happy path, optional payload", func(t *testing.T) {
o := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}, randEd25119Address())
err := o.SetPayload([]byte("some metadata"))
assert.NoError(t, err)
oBytes := o.Bytes()
var restored Output
restored, _, err = OutputFromBytes(oBytes)
assert.NoError(t, err)
castedRestored, ok := restored.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, o.balances.Bytes(), castedRestored.balances.Bytes())
assert.True(t, o.address.Equals(castedRestored.address))
assert.Equal(t, o.id.Bytes(), castedRestored.id.Bytes())
assert.True(t, o.fallbackDeadline.Equal(castedRestored.fallbackDeadline))
assert.Nil(t, o.fallbackAddress)
assert.Nil(t, castedRestored.fallbackAddress)
assert.True(t, o.timelock.Equal(castedRestored.timelock))
assert.Equal(t, o.payload, castedRestored.payload)
})
}
func TestExtendedLockedOutput_Compare(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
clone := output.Clone()
assert.Equal(t, 0, output.Compare(clone))
})
t.Run("CASE: Not equal", func(t *testing.T) {
output := dummyExtendedLockedOutput()
clone := output.Clone()
castedClone, ok := clone.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, 0, output.Compare(castedClone))
// change one byte
castedClone.payload[0] = output.payload[0] + 1
assert.NotEqual(t, 0, output.Compare(castedClone))
})
}
func TestExtendedLockedOutput_FallbackAddress(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
assert.True(t, output.FallbackAddress().Equals(output.fallbackAddress))
})
t.Run("CASE: Address nil", func(t *testing.T) {
output := dummyExtendedLockedOutput()
output.fallbackAddress = nil
assert.Nil(t, output.FallbackAddress())
})
}
func TestExtendedLockedOutput_FallbackOptions(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
fAddy, fDeadline := output.FallbackOptions()
assert.True(t, fAddy.Equals(output.fallbackAddress))
assert.True(t, fDeadline.Equal(output.fallbackDeadline))
})
}
func TestExtendedLockedOutput_GetPayload(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
payload := output.GetPayload()
assert.Equal(t, output.payload, payload)
output.payload = nil
payload = output.GetPayload()
assert.Nil(t, payload)
})
}
func TestExtendedLockedOutput_ID(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
id := output.ID()
assert.Equal(t, output.id.Bytes(), id.Bytes())
})
}
func TestExtendedLockedOutput_Input(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
input, ok := output.Input().(*UTXOInput)
assert.True(t, ok)
assert.Equal(t, input.referencedOutputID.Bytes(), output.ID().Bytes())
})
t.Run("CASE: No output id yet", func(t *testing.T) {
// serialized form of output doesn't have outputid
output, _, err := OutputFromBytes(dummyExtendedLockedOutput().Bytes())
assert.NoError(t, err)
assert.Panics(t, func() {
_, _ = output.Input().(*UTXOInput)
})
output.SetID(randOutputID())
assert.NotPanics(t, func() {
_, _ = output.Input().(*UTXOInput)
})
})
}
func TestExtendedLockedOutput_ObjectStorageKey(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
assert.Equal(t, output.ID().Bytes(), output.ObjectStorageKey())
})
}
func TestExtendedLockedOutput_ObjectStorageValue(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
assert.Equal(t, output.Bytes(), output.ObjectStorageValue())
})
}
func TestExtendedLockedOutput_SetID(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
newID := randOutputID()
var ok bool
output, ok = output.SetID(newID).(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, newID.Bytes(), output.ID().Bytes())
})
}
func TestExtendedLockedOutput_SetPayload(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
data := make([]byte, MaxOutputPayloadSize)
output := dummyExtendedLockedOutput()
err := output.SetPayload(data)
assert.NoError(t, err)
})
t.Run("CASE: Too much data", func(t *testing.T) {
data := make([]byte, MaxOutputPayloadSize+1)
output := dummyExtendedLockedOutput()
err := output.SetPayload(data)
t.Log(err)
assert.Error(t, err)
})
}
func TestExtendedLockedOutput_TimeLock(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
assert.True(t, output.TimeLock().Equal(output.timelock))
})
}
func TestExtendedLockedOutput_TimeLockedNow(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
timelockDate := time.Now()
output.timelock = timelockDate
assert.True(t, output.TimeLockedNow(timelockDate.Add(-time.Minute)))
assert.True(t, output.TimeLockedNow(timelockDate.Add(-time.Nanosecond)))
assert.False(t, output.TimeLockedNow(timelockDate))
assert.False(t, output.TimeLockedNow(timelockDate.Add(time.Second)))
})
}
func TestExtendedLockedOutput_Type(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := &ExtendedLockedOutput{}
assert.Equal(t, ExtendedLockedOutputType, output.Type())
})
}
func TestExtendedLockedOutput_UnlockAddressNow(t *testing.T) {
fallbackDeadline := time.Now()
fallbackAddress := randEd25119Address()
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithFallbackOptions(fallbackAddress, fallbackDeadline)
assert.True(t, output.UnlockAddressNow(fallbackDeadline.Add(-time.Minute)).Equals(output.Address()))
assert.True(t, output.UnlockAddressNow(fallbackDeadline.Add(time.Minute)).Equals(output.FallbackAddress()))
})
t.Run("CASE: No fallback address", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithFallbackOptions(nil, fallbackDeadline)
assert.True(t, output.UnlockAddressNow(fallbackDeadline.Add(-time.Minute)).Equals(output.Address()))
assert.True(t, output.UnlockAddressNow(fallbackDeadline.Add(time.Minute)).Equals(output.Address()))
})
}
func TestExtendedLockedOutput_Update(t *testing.T) {
t.Run("CASE: Update panics", func(t *testing.T) {
output := &ExtendedLockedOutput{}
assert.Panics(t, func() {
output.Update(&ExtendedLockedOutput{})
})
})
}
func TestExtendedLockedOutput_UpdateMintingColor(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
output.balances = NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA, ColorMint: 100})
updated, ok := output.UpdateMintingColor().(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, output.id.Bytes(), updated.id.Bytes())
assert.True(t, updated.address.Equals(output.address))
assert.True(t, updated.fallbackAddress.Equals(output.fallbackAddress))
assert.True(t, updated.fallbackDeadline.Equal(output.fallbackDeadline))
assert.True(t, updated.timelock.Equal(output.timelock))
assert.Equal(t, output.payload, updated.payload)
mintBalance, valid := output.Balances().Get(ColorMint)
assert.True(t, valid)
coloredBalance, uValid := updated.Balances().Get(blake2b.Sum256(output.ID().Bytes()))
assert.True(t, uValid)
assert.Equal(t, mintBalance, coloredBalance)
})
t.Run("CASE: No color mint", func(t *testing.T) {
output := dummyExtendedLockedOutput()
output.balances = NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA, {8}: 100})
updated, ok := output.UpdateMintingColor().(*ExtendedLockedOutput)
assert.True(t, ok)
assert.Equal(t, output.id.Bytes(), updated.id.Bytes())
assert.True(t, updated.address.Equals(output.address))
assert.True(t, updated.fallbackAddress.Equals(output.fallbackAddress))
assert.True(t, updated.fallbackDeadline.Equal(output.fallbackDeadline))
assert.True(t, updated.timelock.Equal(output.timelock))
assert.Equal(t, output.payload, updated.payload)
assert.Equal(t, updated.Balances().Bytes(), output.Balances().Bytes())
})
t.Run("CASE: Output had too big payload", func(t *testing.T) {
output := dummyExtendedLockedOutput()
output.balances = NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA, {8}: 100})
output.payload = make([]byte, MaxOutputPayloadSize+1)
assert.Panics(t, func() {
output.UpdateMintingColor()
})
})
}
func TestExtendedLockedOutput_WithFallbackOptions(t *testing.T) {
fallbackDeadline := time.Now()
fallbackAddress := randEd25119Address()
t.Run("CASE: Happy path", func(t *testing.T) {
output := (&ExtendedLockedOutput{}).WithFallbackOptions(fallbackAddress, fallbackDeadline)
assert.True(t, fallbackAddress.Equals(output.FallbackAddress()))
assert.True(t, fallbackDeadline.Equal(output.fallbackDeadline))
})
t.Run("CASE: nil fallback address", func(t *testing.T) {
output := (&ExtendedLockedOutput{}).WithFallbackOptions(nil, fallbackDeadline)
assert.Nil(t, output.FallbackAddress())
assert.True(t, fallbackDeadline.Equal(output.fallbackDeadline))
})
}
func TestExtendedLockedOutput_WithTimeLock(t *testing.T) {
timelock := time.Now()
t.Run("CASE: Happy path", func(t *testing.T) {
output := (&ExtendedLockedOutput{}).WithTimeLock(timelock)
assert.True(t, timelock.Equal(output.TimeLock()))
})
}
func TestNewExtendedLockedOutput(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
addy := randAliasAddress()
balances := NewColoredBalances(map[Color]uint64{ColorIOTA: 1})
output := NewExtendedLockedOutput(balances.Map(), addy)
assert.Equal(t, balances.Bytes(), output.Balances().Bytes())
assert.True(t, addy.Equals(output.Address()))
})
}
func TestExtendedOutputFromMarshalUtil(t *testing.T) {
t.Run("CASE: Happy path", func(t *testing.T) {
output := dummyExtendedLockedOutput()
outputBytes := output.Bytes()
marshalUtil := marshalutil.New(outputBytes)
restored, err := ExtendedOutputFromMarshalUtil(marshalUtil)
assert.NoError(t, err)
assert.Equal(t, len(outputBytes), marshalUtil.ReadOffset())
assert.Equal(t, outputBytes, restored.Bytes())
})
t.Run("CASE: Wrong type", func(t *testing.T) {
output := dummyExtendedLockedOutput()
outputBytes := output.Bytes()
outputBytes[0] = byte(AliasOutputType)
marshalUtil := marshalutil.New(outputBytes)
_, err := ExtendedOutputFromMarshalUtil(marshalUtil)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Fallback flag provided, missing data", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithFallbackOptions(nil, time.Time{})
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.SetBit(flagExtendedLockedOutputFallbackPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
_, _, err := OutputFromBytes(outputBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Timelock flag provided, missing data", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithTimeLock(time.Time{})
err := output.SetPayload(nil)
assert.NoError(t, err)
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.SetBit(flagExtendedLockedOutputTimeLockPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
_, _, err = OutputFromBytes(outputBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Payload flag provided, missing data", func(t *testing.T) {
output := dummyExtendedLockedOutput()
err := output.SetPayload(nil)
assert.NoError(t, err)
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.SetBit(flagExtendedLockedOutputPayloadPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
_, _, err = OutputFromBytes(outputBytes)
t.Log(err)
assert.Error(t, err)
})
t.Run("CASE: Fallback present, wrong flag", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithTimeLock(time.Time{})
err := output.SetPayload(nil)
assert.NoError(t, err)
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.ClearBit(flagExtendedLockedOutputFallbackPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
var consumedBytes int
_, consumedBytes, err = OutputFromBytes(outputBytes)
assert.NoError(t, err)
// we did not consume all bytes
assert.NotEqual(t, len(outputBytes), consumedBytes)
})
t.Run("CASE: Timelock present, wrong flag", func(t *testing.T) {
output := dummyExtendedLockedOutput().WithTimeLock(time.Now()).WithFallbackOptions(nil, time.Time{})
err := output.SetPayload(nil)
assert.NoError(t, err)
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.ClearBit(flagExtendedLockedOutputTimeLockPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
var consumedBytes int
_, consumedBytes, err = OutputFromBytes(outputBytes)
assert.NoError(t, err)
// we did not consume all bytes
assert.NotEqual(t, len(outputBytes), consumedBytes)
})
t.Run("CASE: Payload present, wrong flag", func(t *testing.T) {
output := dummyExtendedLockedOutput()
outputBytes := output.Bytes()
flags := output.compressFlags()
flags = flags.ClearBit(flagExtendedLockedOutputPayloadPresent)
outputBytes[1+len(output.balances.Bytes())+AddressLength] = byte(flags)
_, consumedBytes, err := OutputFromBytes(outputBytes)
assert.NoError(t, err)
// we did not consume all bytes
assert.NotEqual(t, len(outputBytes), consumedBytes)
})
}
func TestExtendedLockedOutput_UnlockValid(t *testing.T) {
t.Run("CASE: Happy path, unlocked by sig", func(t *testing.T) {
w := genRandomWallet()
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, w.address)
input.SetID(randOutputID())
output := NewSigLockedColoredOutput(NewColoredBalances(map[Color]uint64{ColorIOTA: 1}), randEd25119Address())
essence := NewTransactionEssence(0, time.Now(), identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock := NewSignatureUnlockBlock(w.sign(essence))
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.True(t, valid)
})
t.Run("CASE: Happy path, unlocked by alias", func(t *testing.T) {
w := genRandomWallet()
alias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: w.address, // alias state controller is our wallet
stateIndex: 10,
}
nextAlias := alias.NewAliasOutputNext(false)
toBeConsumedExtended := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, alias.GetAliasAddress())
toBeConsumedExtended.SetID(randOutputID())
nextAliasBalance := alias.Balances().Map()
// add 1 more iota from consumed extended output
nextAliasBalance[ColorIOTA]++
err := nextAlias.SetBalances(nextAliasBalance)
assert.NoError(t, err)
essence := NewTransactionEssence(0, time.Now(), identity.ID{}, identity.ID{}, NewInputs(toBeConsumedExtended.Input(), alias.Input()), NewOutputs(nextAlias))
// which input index did the alias get?
var aliasInputIndex uint16
orderedInputs := make(Outputs, len(essence.Inputs()))
for i, input := range essence.Inputs() {
casted := input.(*UTXOInput)
if casted.ReferencedOutputID() == alias.ID() {
aliasInputIndex = uint16(i)
orderedInputs[i] = alias
}
if casted.ReferencedOutputID() == toBeConsumedExtended.ID() {
orderedInputs[i] = toBeConsumedExtended
}
}
// create mapping from outputID to unlockBlock
inputToUnlockMapping := make(map[OutputID]UnlockBlock)
inputToUnlockMapping[alias.ID()] = NewSignatureUnlockBlock(w.sign(essence))
inputToUnlockMapping[toBeConsumedExtended.ID()] = NewAliasUnlockBlock(aliasInputIndex)
// fill unlock blocks
unlocks := make(UnlockBlocks, len(essence.Inputs()))
for i, input := range essence.Inputs() {
unlocks[i] = inputToUnlockMapping[input.(*UTXOInput).ReferencedOutputID()]
}
tx := NewTransaction(essence, unlocks)
valid, uErr := toBeConsumedExtended.UnlockValid(tx, inputToUnlockMapping[toBeConsumedExtended.ID()], orderedInputs)
assert.NoError(t, uErr)
assert.True(t, valid)
valid, uErr = alias.UnlockValid(tx, inputToUnlockMapping[alias.ID()], orderedInputs)
assert.NoError(t, uErr)
assert.True(t, valid)
})
t.Run("CASE: Referenced input not alias", func(t *testing.T) {
w := genRandomWallet()
nowis := time.Now()
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, w.address)
input.SetID(randOutputID())
output := NewSigLockedColoredOutput(NewColoredBalances(map[Color]uint64{ColorIOTA: 1}), randEd25119Address())
essence := NewTransactionEssence(0, nowis, identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock := NewAliasUnlockBlock(0)
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{input})
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Referenced wrong alias", func(t *testing.T) {
alias := &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: randEd25119Address(), // alias state controller is our wallet
stateIndex: 10,
}
nowis := time.Now()
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, randAliasAddress())
input.SetID(randOutputID())
// for the sake of this test, tx doesn't have to be valid
essence := NewTransactionEssence(0, nowis, identity.ID{}, identity.ID{}, NewInputs(alias.Input()), NewOutputs(input))
// important is that we reference an alias that has different aliasAddress
unlockBlock := NewAliasUnlockBlock(0)
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{alias})
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Output is timelocked, can't spend", func(t *testing.T) {
w := genRandomWallet()
nowis := time.Now()
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, w.address).WithTimeLock(nowis.Add(time.Hour))
input.SetID(randOutputID())
output := NewSigLockedColoredOutput(NewColoredBalances(map[Color]uint64{ColorIOTA: 1}), randEd25119Address())
// tx timestamp before timelock
essence := NewTransactionEssence(0, nowis, identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock := NewSignatureUnlockBlock(w.sign(essence))
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.False(t, valid)
})
t.Run("CASE: Output is timelocked, spend after", func(t *testing.T) {
w := genRandomWallet()
nowis := time.Now()
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, w.address).WithTimeLock(nowis.Add(time.Hour))
input.SetID(randOutputID())
output := NewSigLockedColoredOutput(NewColoredBalances(map[Color]uint64{ColorIOTA: 1}), randEd25119Address())
// tx timestamp is exactly timelock, output is allowed to be spent from that moment on
essence := NewTransactionEssence(0, nowis.Add(time.Hour), identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock := NewSignatureUnlockBlock(w.sign(essence))
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.True(t, valid)
})
t.Run("CASE: Unsupported unlock block", func(t *testing.T) {
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, randAliasAddress())
unlockBlock := NewReferenceUnlockBlock(0)
valid, err := input.UnlockValid(&Transaction{essence: &TransactionEssence{}}, unlockBlock, Outputs{input})
t.Log(err)
assert.Error(t, err)
assert.False(t, valid)
})
t.Run("CASE: Fallback address present", func(t *testing.T) {
destWallet := genRandomWallet()
myWallet := genRandomWallet()
nowis := time.Now()
// until nowis+30 minutes, only w wallet can spend it, after that, only myWallet
input := NewExtendedLockedOutput(map[Color]uint64{ColorIOTA: 1}, destWallet.address).WithFallbackOptions(myWallet.address, nowis.Add(30*time.Minute))
input.SetID(randOutputID())
output := NewSigLockedColoredOutput(NewColoredBalances(map[Color]uint64{ColorIOTA: 1}), randEd25119Address())
// t =< nowis + 30 mins, destWallet can spend it
essence := NewTransactionEssence(0, nowis, identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock := NewSignatureUnlockBlock(destWallet.sign(essence))
tx := NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err := input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.True(t, valid)
// t =< nowis + 30 mins, myWallet can't spend it
essence = NewTransactionEssence(0, nowis, identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock = NewSignatureUnlockBlock(myWallet.sign(essence))
tx = NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err = input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.False(t, valid)
// t > nowis + 30 mins, destWallet can't spend it
essence = NewTransactionEssence(0, nowis.Add(30*time.Minute).Add(time.Nanosecond), identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock = NewSignatureUnlockBlock(destWallet.sign(essence))
tx = NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err = input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.False(t, valid)
// t > nowis + 30 mins, myWallet can spend it
essence = NewTransactionEssence(0, nowis.Add(30*time.Minute).Add(time.Nanosecond), identity.ID{}, identity.ID{}, NewInputs(input.Input()), NewOutputs(output))
unlockBlock = NewSignatureUnlockBlock(myWallet.sign(essence))
tx = NewTransaction(essence, UnlockBlocks{unlockBlock})
valid, err = input.UnlockValid(tx, unlockBlock, Outputs{input})
assert.NoError(t, err)
assert.True(t, valid)
})
}
func TestExtendedLockedOutput_Clone(t *testing.T) {
out := dummyExtendedLockedOutput()
outBack := out.Clone()
outBackT, ok := outBack.(*ExtendedLockedOutput)
assert.True(t, ok)
assert.True(t, out != outBackT)
assert.True(t, notSameMemory(out.payload, outBackT.payload))
assert.True(t, out.address != outBackT.address)
assert.True(t, out.fallbackAddress != outBackT.fallbackAddress)
assert.EqualValues(t, out.Bytes(), outBack.Bytes())
}
// endregion
// region test utils
func genRandomWallet() wallet {
kp := ed25519.GenerateKeyPair()
return wallet{
kp,
NewED25519Address(kp.PublicKey),
}
}
func notSameMemory(s1, s2 []byte) bool {
if s1 == nil || s2 == nil {
return true
}
return &s1[cap(s1)-1] != &s2[cap(s2)-1]
}
func dummyAliasOutput(origin ...bool) *AliasOutput {
orig := false
if len(origin) > 0 {
orig = origin[0]
}
return &AliasOutput{
outputID: randOutputID(),
outputIDMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: DustThresholdAliasOutputIOTA}),
aliasAddress: *randAliasAddress(),
stateAddress: randEd25119Address(),
stateIndex: 0,
stateData: []byte("initial"),
governanceMetadata: []byte("This chain runs EVM v0.0.0"),
immutableData: []byte("don't touch this"),
isGovernanceUpdate: false,
isOrigin: orig,
isDelegated: false,
governingAddress: randAliasAddress(),
delegationTimelock: time.Time{},
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
}
func dummyExtendedLockedOutput() *ExtendedLockedOutput {
return &ExtendedLockedOutput{
id: randOutputID(),
idMutex: sync.RWMutex{},
balances: NewColoredBalances(map[Color]uint64{ColorIOTA: 1}),
address: randEd25119Address(),
fallbackAddress: randEd25119Address(),
fallbackDeadline: time.Unix(1001, 0),
timelock: time.Unix(2000, 0),
payload: []byte("a payload"),
StorableObjectFlags: objectstorage.StorableObjectFlags{},
}
}
func randEd25119Address() *ED25519Address {
keyPair := ed25519.GenerateKeyPair()
return NewED25519Address(keyPair.PublicKey)
}
func randAliasAddress() *AliasAddress {
randOutputIDBytes := make([]byte, 32)
_, _ = rand.Read(randOutputIDBytes)
return NewAliasAddress(randOutputIDBytes)
}
func randOutputID() OutputID {
randOutputIDBytes := make([]byte, 34)
_, _ = rand.Read(randOutputIDBytes)
outputID, _, _ := OutputIDFromBytes(randOutputIDBytes)
return outputID
}
// endregion
|
//------------------------------------------------------------------------------
/*
This file is part of cbcd: https://github.com/cbc/cbcd
Copyright (c) 2012, 2013 cbc Labs Inc.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
//==============================================================================
#ifndef cbc_RPC_WSINFOSUB_H
#define cbc_RPC_WSINFOSUB_H
#include <cbc/server/WSSession.h>
#include <cbc/net/InfoSub.h>
#include <cbc/beast/net/IPAddressConversion.h>
#include <cbc/json/json_writer.h>
#include <cbc/rpc/Role.h>
#include <memory>
#include <string>
namespace cbc {
class WSInfoSub : public InfoSub
{
std::weak_ptr<WSSession> ws_;
std::string user_;
std::string fwdfor_;
public:
WSInfoSub(Source& source, std::shared_ptr<WSSession> const& ws)
: InfoSub(source)
, ws_(ws)
{
auto const& h = ws->request();
auto it = h.find("X-User");
if (it != h.end() &&
isIdentified(
ws->port(), beast::IPAddressConversion::from_asio(
ws->remote_endpoint()).address(), it->value().to_string()))
{
user_ = it->value().to_string();
it = h.find("X-Forwarded-For");
if (it != h.end())
fwdfor_ = it->value().to_string();
}
}
std::string
user() const
{
return user_;
}
std::string
forwarded_for() const
{
return fwdfor_;
}
void
send(Json::Value const& jv, bool)
{
auto sp = ws_.lock();
if(! sp)
return;
beast::multi_buffer sb;
Json::stream(jv,
[&](void const* data, std::size_t n)
{
sb.commit(boost::asio::buffer_copy(
sb.prepare(n), boost::asio::buffer(data, n)));
});
auto m = std::make_shared<
StreambufWSMsg<decltype(sb)>>(
std::move(sb));
sp->send(m);
}
};
} // cbc
#endif
|
<?php
namespace Reform\Tests\Validation\Rule;
use Reform\Validation\Rule\Required;
/**
* RequiredTest
*
* @author Glynn Forrest <me@glynnforrest.com>
**/
class RequiredTest extends RuleTest
{
protected $rule;
public function setup()
{
$this->rule = new Required();
}
public function dataProvider()
{
return array(
array(0, true),
array('0', true),
array(1, true),
array(-1, true),
array('3', true),
array(-5, true),
array('foo', true),
array('user1', true),
array('', false),
array(null, false),
array(array(), false)
);
}
}
|
#!/usr/bin/env bash
set -e
CXXFLAGS=""
if [ -f /etc/redhat-release ]; then
CXXFLAGS="-Wno-error=class-memaccess -Wno-ignored-qualifiers -Wno-stringop-truncation -Wno-cast-function-type"
fi
if [ -n "${WITH_RHEL8_RPMS}" ]; then
cd grpc
cd third_party
rmdir abseil-cpp protobuf
mv ../../abseil-cpp .
mv ../../protobuf .
cd ..
CMAKE_FLAGS=(
-DgRPC_ZLIB_PROVIDER=package
)
else
git clone -b "$GRPC_REVISION" --depth 1 https://github.com/grpc/grpc
cd grpc
git submodule update --init
CMAKE_FLAGS=(
-DgRPC_PROTOBUF_PROVIDER=package
-DgRPC_PROTOBUF_PACKAGE_TYPE=CONFIG
-DgRPC_ZLIB_PROVIDER=package
)
fi
cp NOTICE.txt "${LICENSE_DIR}/grpc-${GRPC_REVISION}"
mkdir -p cmake/build
cd cmake/build
cmake \
"${CMAKE_FLAGS[@]}" \
-DgRPC_BUILD_GRPC_CSHARP_PLUGIN=OFF \
-DgRPC_BUILD_GRPC_NODE_PLUGIN=OFF \
-DgRPC_BUILD_GRPC_OBJECTIVE_C_PLUGIN=OFF \
-DgRPC_BUILD_GRPC_PHP_PLUGIN=OFF \
-DgRPC_BUILD_GRPC_PYTHON_PLUGIN=OFF \
-DgRPC_BUILD_GRPC_RUBY_PLUGIN=OFF \
-DgRPC_CARES_PROVIDER=package \
-DgRPC_SSL_PROVIDER=package \
-DCMAKE_BUILD_TYPE=Release \
-DgRPC_INSTALL=ON \
-DCMAKE_INSTALL_PREFIX=/usr \
../..
make -j "${NPROCS:-2}" CXXFLAGS="${CXXFLAGS}"
make install
|
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using WebWallet.Models.Enumerations;
using WebWallet.ViewModels.Constants;
using WebWallet.ViewModels.Transaction;
namespace WebWallet.ViewModels.Goal
{
public class GoalVM
{
public string Id { get; set; }
[Required(AllowEmptyStrings = false, ErrorMessage = Message.RequiredField)]
[DisplayFormat(ConvertEmptyStringToNull = false)]
[Display(Name = "Име")]
public string Name { get; set; }
public DateTime CreatedOn { get; set; }
public DateTime ModifiedOn { get; set; }
[Required(ErrorMessage = Message.RequiredField)]
[Display(Name = "Сума")]
[Range(1.0, maximum: (double)decimal.MaxValue, ErrorMessage = Message.InvalidValue)]
[DisplayFormat(DataFormatString = FormatStrig.BGN, ApplyFormatInEditMode = false)]
public decimal Target { get; set; }
[Display(Name = "Оставаща Сума")]
[Range(0, maximum: (double)decimal.MaxValue, ErrorMessage = Message.InvalidValue)]
[DisplayFormat(DataFormatString = FormatStrig.BGN, ApplyFormatInEditMode = false)]
public decimal Remaining { get; set; }
[Required(ErrorMessage = Message.RequiredField)]
[Display(Name = "Краен срок")]
[DataType(DataType.Date)]
[DisplayFormat(ApplyFormatInEditMode = false, DataFormatString = FormatStrig.Date)]
public DateTime Deadline { get; set; }
[Required(ErrorMessage = Message.RequiredField)]
[Display(Name = "Тип")]
public GoalType Type { get; set; }
public string UserId { get; set; }
}
}
|
<?php
/**
* Buffered query utilities.
*/
namespace PhpMyAdmin\SqlParser\Utils;
use PhpMyAdmin\SqlParser\Context;
/**
* Buffer query utilities.
*
* Implements a specialized lexer used to extract statements from large inputs
* that are being buffered. After each statement has been extracted, a lexer or
* a parser may be used.
*
* @category Lexer
*
* @license https://www.gnu.org/licenses/gpl-2.0.txt GPL-2.0+
*/
class BufferedQuery
{
// Constants that describe the current status of the parser.
// A string is being parsed.
const STATUS_STRING = 16; // 0001 0000
const STATUS_STRING_SINGLE_QUOTES = 17; // 0001 0001
const STATUS_STRING_DOUBLE_QUOTES = 18; // 0001 0010
const STATUS_STRING_BACKTICK = 20; // 0001 0100
// A comment is being parsed.
const STATUS_COMMENT = 32; // 0010 0000
const STATUS_COMMENT_BASH = 33; // 0010 0001
const STATUS_COMMENT_C = 34; // 0010 0010
const STATUS_COMMENT_SQL = 36; // 0010 0100
/**
* The query that is being processed.
*
* This field can be modified just by appending to it!
*
* @var string
*/
public $query = '';
/**
* The options of this parser.
*
* @var array
*/
public $options = array();
/**
* The last delimiter used.
*
* @var string
*/
public $delimiter;
/**
* The length of the delimiter.
*
* @var int
*/
public $delimiterLen;
/**
* The current status of the parser.
*
* @var int
*/
public $status;
/**
* The last incomplete query that was extracted.
*
* @var string
*/
public $current = '';
/**
* Constructor.
*
* @param string $query the query to be parsed
* @param array $options the options of this parser
*/
public function __construct($query = '', array $options = array())
{
// Merges specified options with defaults.
$this->options = array_merge(
array(
/*
* The starting delimiter.
*
* @var string
*/
'delimiter' => ';',
/*
* Whether `DELIMITER` statements should be parsed.
*
* @var bool
*/
'parse_delimiter' => false,
/*
* Whether a delimiter should be added at the end of the
* statement.
*
* @var bool
*/
'add_delimiter' => false,
),
$options
);
$this->query = $query;
$this->setDelimiter($this->options['delimiter']);
}
/**
* Sets the delimiter.
*
* Used to update the length of it too.
*
* @param string $delimiter
*/
public function setDelimiter($delimiter)
{
$this->delimiter = $delimiter;
$this->delimiterLen = strlen($delimiter);
}
/**
* Extracts a statement from the buffer.
*
* @param bool $end whether the end of the buffer was reached
*
* @return string|false
*/
public function extract($end = false)
{
/**
* The last parsed position.
*
* This is statically defined because it is not used outside anywhere
* outside this method and there is probably a (minor) performance
* improvement to it.
*
* @var int
*/
static $i = 0;
if (empty($this->query)) {
return false;
}
/**
* The length of the buffer.
*
* @var int
*/
$len = strlen($this->query);
/**
* The last index of the string that is going to be parsed.
*
* There must be a few characters left in the buffer so the parser can
* avoid confusing some symbols that may have multiple meanings.
*
* For example, if the buffer ends in `-` that may be an operator or the
* beginning of a comment.
*
* Another example if the buffer ends in `DELIMITE`. The parser is going
* to require a few more characters because that may be a part of the
* `DELIMITER` keyword or just a column named `DELIMITE`.
*
* Those extra characters are required only if there is more data
* expected (the end of the buffer was not reached).
*
* @var int
*/
$loopLen = $end ? $len : $len - 16;
for (; $i < $loopLen; ++$i) {
/*
* Handling backslash.
*
* Even if the next character is a special character that should be
* treated differently, because of the preceding backslash, it will
* be ignored.
*/
if ((($this->status & static::STATUS_COMMENT) === 0) && ($this->query[$i] === '\\')) {
$this->current .= $this->query[$i] . $this->query[++$i];
continue;
}
/*
* Handling special parses statuses.
*/
if ($this->status === static::STATUS_STRING_SINGLE_QUOTES) {
// Single-quoted strings like 'foo'.
if ($this->query[$i] === '\'') {
$this->status = 0;
}
$this->current .= $this->query[$i];
continue;
} elseif ($this->status === static::STATUS_STRING_DOUBLE_QUOTES) {
// Double-quoted strings like "bar".
if ($this->query[$i] === '"') {
$this->status = 0;
}
$this->current .= $this->query[$i];
continue;
} elseif ($this->status === static::STATUS_STRING_BACKTICK) {
if ($this->query[$i] === '`') {
$this->status = 0;
}
$this->current .= $this->query[$i];
continue;
} elseif (($this->status === static::STATUS_COMMENT_BASH)
|| ($this->status === static::STATUS_COMMENT_SQL)
) {
// Bash-like (#) or SQL-like (-- ) comments end in new line.
if ($this->query[$i] === "\n") {
$this->status = 0;
}
$this->current .= $this->query[$i];
continue;
} elseif ($this->status === static::STATUS_COMMENT_C) {
// C-like comments end in */.
if (($this->query[$i - 1] === '*') && ($this->query[$i] === '/')) {
$this->status = 0;
}
$this->current .= $this->query[$i];
continue;
}
/*
* Checking if a string started.
*/
if ($this->query[$i] === '\'') {
$this->status = static::STATUS_STRING_SINGLE_QUOTES;
$this->current .= $this->query[$i];
continue;
} elseif ($this->query[$i] === '"') {
$this->status = static::STATUS_STRING_DOUBLE_QUOTES;
$this->current .= $this->query[$i];
continue;
} elseif ($this->query[$i] === '`') {
$this->status = static::STATUS_STRING_BACKTICK;
$this->current .= $this->query[$i];
continue;
}
/*
* Checking if a comment started.
*/
if ($this->query[$i] === '#') {
$this->status = static::STATUS_COMMENT_BASH;
$this->current .= $this->query[$i];
continue;
} elseif ($i + 2 < $len) {
if (($this->query[$i] === '-')
&& ($this->query[$i + 1] === '-')
&& Context::isWhitespace($this->query[$i + 2])) {
$this->status = static::STATUS_COMMENT_SQL;
$this->current .= $this->query[$i];
continue;
} elseif (($this->query[$i] === '/')
&& ($this->query[$i + 1] === '*')
&& ($this->query[$i + 2] !== '!')) {
$this->status = static::STATUS_COMMENT_C;
$this->current .= $this->query[$i];
continue;
}
}
/*
* Handling `DELIMITER` statement.
*
* The code below basically checks for
* `strtoupper(substr($this->query, $i, 9)) === 'DELIMITER'`
*
* This optimization makes the code about 3 times faster.
*
* `DELIMITER` is not being considered a keyword. The only context
* it has a special meaning is when it is the beginning of a
* statement. This is the reason for the last condition.
*/
if (($i + 9 < $len)
&& (($this->query[$i] === 'D') || ($this->query[$i] === 'd'))
&& (($this->query[$i + 1] === 'E') || ($this->query[$i + 1] === 'e'))
&& (($this->query[$i + 2] === 'L') || ($this->query[$i + 2] === 'l'))
&& (($this->query[$i + 3] === 'I') || ($this->query[$i + 3] === 'i'))
&& (($this->query[$i + 4] === 'M') || ($this->query[$i + 4] === 'm'))
&& (($this->query[$i + 5] === 'I') || ($this->query[$i + 5] === 'i'))
&& (($this->query[$i + 6] === 'T') || ($this->query[$i + 6] === 't'))
&& (($this->query[$i + 7] === 'E') || ($this->query[$i + 7] === 'e'))
&& (($this->query[$i + 8] === 'R') || ($this->query[$i + 8] === 'r'))
&& Context::isWhitespace($this->query[$i + 9])
) {
// Saving the current index to be able to revert any parsing
// done in this block.
$iBak = $i;
$i += 9; // Skipping `DELIMITER`.
// Skipping whitespaces.
while (($i < $len) && Context::isWhitespace($this->query[$i])) {
++$i;
}
// Parsing the delimiter.
$delimiter = '';
while (($i < $len) && (! Context::isWhitespace($this->query[$i]))) {
$delimiter .= $this->query[$i++];
}
// Checking if the delimiter definition ended.
if (($delimiter !== '')
&& ((($i < $len) && Context::isWhitespace($this->query[$i]))
|| (($i === $len) && $end))
) {
// Saving the delimiter.
$this->setDelimiter($delimiter);
// Whether this statement should be returned or not.
$ret = '';
if (! empty($this->options['parse_delimiter'])) {
// Appending the `DELIMITER` statement that was just
// found to the current statement.
$ret = trim(
$this->current . ' ' . substr($this->query, $iBak, $i - $iBak)
);
}
// Removing the statement that was just extracted from the
// query.
$this->query = substr($this->query, $i);
$i = 0;
// Resetting the current statement.
$this->current = '';
return $ret;
}
// Incomplete statement. Reverting
$i = $iBak;
return false;
}
/*
* Checking if the current statement finished.
*
* The first letter of the delimiter is being checked as an
* optimization. This code is almost as fast as the one above.
*
* There is no point in checking if two strings match if not even
* the first letter matches.
*/
if (($this->query[$i] === $this->delimiter[0])
&& (($this->delimiterLen === 1)
|| (substr($this->query, $i, $this->delimiterLen) === $this->delimiter))
) {
// Saving the statement that just ended.
$ret = $this->current;
// If needed, adds a delimiter at the end of the statement.
if (! empty($this->options['add_delimiter'])) {
$ret .= $this->delimiter;
}
// Removing the statement that was just extracted from the
// query.
$this->query = substr($this->query, $i + $this->delimiterLen);
$i = 0;
// Resetting the current statement.
$this->current = '';
// Returning the statement.
return trim($ret);
}
/*
* Appending current character to current statement.
*/
$this->current .= $this->query[$i];
}
if ($end && ($i === $len)) {
// If the end of the buffer was reached, the buffer is emptied and
// the current statement that was extracted is returned.
$ret = $this->current;
// Emptying the buffer.
$this->query = '';
$i = 0;
// Resetting the current statement.
$this->current = '';
// Returning the statement.
return trim($ret);
}
return '';
}
}
|
#!/bin/bash
subscriptionId="<TODO>"
publisherId="<TODO>"
offerId="<TODO>"
planId="<TODO>"
./AcceptAzureMarketplaceTerms.sh -i $subscriptionId -p $publisherId -o $offerId -n $planId
|
module Ruhoh::Resources::Notes
class Previewer < Ruhoh::Resources::Pages::Previewer
end
end
|
# frozen_string_literal: true
FactoryBot.define do
# Define your Spree extensions Factories within this file to enable applications, and other extensions to use and override them.
#
# Example adding this to your spec_helper will load these Factories for use:
# require 'spree_sale_prices/factories'
factory :sale_price, class: Spree::SalePrice do
value { 10.90 }
start_at { nil }
end_at { nil }
enabled { false }
calculator { Spree::Calculator::FixedAmountSalePriceCalculator.new }
association :price, factory: :international_price
factory :active_sale_price do
start_at { Time.now }
enabled { true }
end
end
factory :international_price, parent: :price do
currency { Money::Currency.all.map(&:iso_code).sample }
end
factory :eur_price, parent: :price do
currency { 'EUR' }
end
factory :usd_price, parent: :price do
currency { 'USD' }
end
factory :multi_price_variant, parent: :variant do
after(:create) do |variant, _evaluator|
create(:eur_price, variant: variant)
create(:usd_price, variant: variant)
variant.reload
end
end
end
|
<?php
namespace App\Models;
use App\Traits\OwnerConfig;
use App\Enums\PaymentStatus;
use App\Traits\EloquentHelpers;
use Illuminate\Database\Eloquent\Builder;
class Payment extends Model
{
use OwnerConfig, EloquentHelpers;
private $payEvent;
protected $fillable = [
'price',
'installment',
// 'status',
'payment_date',
// 'receipt_date',
];
protected $casts = [
'price' => 'double',
'payment_date' => 'date',
];
protected $observables = [
'pay',
'unpay',
];
public function getDescriptionForEvent(string $eventName): string
{
return (bool) $this->payEvent ? $this->payEvent : $eventName;
}
public function isPaid()
{
return (bool) $this->receipt_date;
}
public function getInstallment()
{
return $this->installment;
}
public function getDebt()
{
return $this->debt;
}
public function getPrice(bool $format = false)
{
if ($format) {
return number_format($this->price, 2, ',', '.');
}
return $this->price;
}
public function pay()
{
$this->status = PaymentStatus::PAGO;
$this->receipt_date = date('Y-m-d');
$this->payEvent = 'pay';
$this->save();
$this->fireModelEvent($this->payEvent, false);
}
public function unpay()
{
$this->status = PaymentStatus::PENDENTE;
$this->receipt_date = null;
$this->payEvent = 'unpay';
$this->save();
$this->fireModelEvent($this->payEvent, false);
}
public function debtor()
{
return $this->belongsTo(Debtor::class);
}
public function debt()
{
return $this->belongsTo(Debt::class);
}
public function scopePayed(Builder $query)
{
$query->where('status', PaymentStatus::PAGO);
}
}
|
fun main(args:Array<String>){
var x = 25
var y:String = "Game"
print(x)
print(y)
}
|
<?php
use Symfony\Component\Dotenv\Dotenv;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Exception\ResourceNotFoundException;
use Symfony\Component\Routing\Matcher\UrlMatcher;
use Symfony\Component\Routing\RequestContext;
require_once(__DIR__ . '/../vendor/autoload.php');
(new Dotenv())->usePutenv()->bootEnv(dirname(__DIR__).'/.env');
$request = Request::createFromGlobals();
$routes = include __DIR__ . '/../routes.php';
$context = new RequestContext();
$context->fromRequest($request);
$matcher = new UrlMatcher($routes, $context);
try {
extract($matcher->match($request->getPathInfo()), EXTR_SKIP);
ob_start();
/** @var string $_controller */
$controllerParts = explode('::', $_controller);
$controller = "\\Fontebasso\\Weather\\Controllers\\" . $controllerParts[0];
$action = $controllerParts[1];
$response = (new $controller())->$action($request);
} catch (ResourceNotFoundException $exception) {
$response = new Response('Not Found', 404);
} catch (Exception $exception) {
$response = new Response('An error occurred', 500);
}
$response->send();
|
const Employee = require("../lib/employee");
describe("Employee", () => {
describe("init", () => {
it("should create an instance of an employee when we call it with the new keyword", () => {
const employee1 = new Employee("Brad", 123, "bodell94@yahoo.com");
expect(employee1.name).toEqual("Brad");
expect(employee1.id).toEqual(123);
expect(employee1.email).toEqual("bodell94@yahoo.com");
})
})
describe("getName", () => {
it("should get the name of the employee that was created", () => {
const employee2 = new Employee("Brad", 123, "bodell94@yahoo.com");
const result = employee2.getName();
expect(result).toEqual("Brad");
})
})
describe("getId", () => {
it("should get the ID of the employee that was created", () => {
const employee3 = new Employee("Brad", 123, "bodell94@yahoo.com");
const result = employee3.getId();
expect(result).toEqual(123);
})
})
describe("getEmail", () => {
it("should get the email of the employee that was created", () => {
const employee4 = new Employee("Brad", 123, "bodell94@yahoo.com");
const result = employee4.getEmail();
expect(result).toEqual("bodell94@yahoo.com");
})
})
describe("getRole", () => {
it("should get the role of the employee that was created", () => {
const employee5 = new Employee("Brad", 123, "bodell94@yahoo.com");
const result = employee5.getRole();
expect(result).toEqual("Employee");
})
})
})
|
/*
* Copyright (c) 2013 Functional Streams for Scala
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package fs2
package io
package file
import cats.Show
import cats.kernel.Hash
import cats.kernel.Monoid
import cats.kernel.Order
private[file] trait PathApi {
/** Joins the given path segments together using the platform-specific separator as
* a delimiter, then normalizes the resulting path.
*/
def /(name: String): Path
/** Joins the given path segments together using the platform-specific separator as
* a delimiter, then normalizes the resulting path.
*/
def /(path: Path): Path
/** Resolve the given path against this path. */
def resolve(name: String): Path
/** Resolve the given path against this path. */
def resolve(path: Path): Path
/** Resolves the given path against this path's parent path. */
def resolveSibling(name: String): Path
/** Resolves the given path against this path's parent path. */
def resolveSibling(path: Path): Path
/** Constructs a relative path between this path and a given path. */
def relativize(path: Path): Path
/** Returns a path that is this path with redundant name elements eliminated. */
def normalize: Path
/** Tells whether or not this path is absolute. */
def isAbsolute: Boolean
/** Returns a Path object representing the absolute path of this path. */
def absolute: Path
/** Returns the name elements in the path. */
def names: Seq[Path]
/** Returns the name of the file or directory denoted by this path as a Path object.
* The file name is the farthest element from the root in the directory hierarchy.
*/
def fileName: Path
/** Returns the extension of the path, from the last occurrence of the . (period)
* character to end of string in the last portion of the path.
* If there is no . in the last portion of the path, or if there are no . characters
* other than the first character of the filename of path, an empty string is returned.
*/
def extName: String
/** Returns the parent path, or None if this path does not have a parent. */
def parent: Option[Path]
/** Tests if this path starts with the given path.
* This path starts with the given path if this path's root component
* starts with the root component of the given path,
* and this path starts with the same name elements as the given path.
* If the given path has more name elements than this path then false is returned.
*/
def startsWith(path: String): Boolean
/** Tests if this path starts with the given path.
* This path starts with the given path if this path's root component
* starts with the root component of the given path,
* and this path starts with the same name elements as the given path.
* If the given path has more name elements than this path then false is returned.
*/
def startsWith(path: Path): Boolean
/** Tests if this path ends with the given path.
* If the given path has N elements, and no root component, and this path has N or more elements,
* then this path ends with the given path if the last N elements of each path,
* starting at the element farthest from the root, are equal.
*/
def endsWith(path: String): Boolean
/** Tests if this path ends with the given path.
* If the given path has N elements, and no root component, and this path has N or more elements,
* then this path ends with the given path if the last N elements of each path,
* starting at the element farthest from the root, are equal.
*/
def endsWith(path: Path): Boolean
def toString: String
}
private[file] trait PathCompanionApi {
def apply(path: String): Path
implicit def instances: Monoid[Path] with Order[Path] with Hash[Path] with Show[Path] = algebra
private object algebra extends Monoid[Path] with Order[Path] with Hash[Path] with Show[Path] {
val empty: Path = Path("")
def combine(x: Path, y: Path): Path = x / y
def compare(x: Path, y: Path): Int = x.toString.compare(y.toString)
def hash(x: Path): Int = x.hashCode()
def show(t: Path): String = t.toString
}
}
|
import { fromJS, Map } from 'immutable';
import mapPageReducer, { initialState } from '../reducer';
import { mapPageActions } from '../actions';
import {
SET_INITIAL_LOCATION,
SET_PAGE_ERROR,
SET_LOADING,
} from '../constants';
const acts = mapPageActions(result => result);
const initState = initialState.toJS();
const completed = {
0: true,
1: false,
2: false,
};
describe('mapPageReducer', () => {
let state;
beforeEach(() => {
state = fromJS(initState);
});
it('should handle setInitialLocation action correctly', () => {
const newState = mapPageReducer(state, acts.setInitialLocation());
expect(newState.get('loading')).toEqual(true);
});
it('should handle addPersonStart action correctly', () => {
const newState = mapPageReducer(state, acts.addPersonStart());
expect(newState.get('addingPerson')).toEqual(true);
});
it('should set page error correctly', () => {
const error = { foo: 'bar' };
const newState = mapPageReducer(state, { type: SET_PAGE_ERROR, error });
expect(newState.get('error').toJS()).toEqual(error);
});
it('should set loading correctly', () => {
const value = true;
const newState = mapPageReducer(state, { type: SET_LOADING, value });
expect(newState.get('loading')).toEqual(value);
});
it('should handle savePersonData action correctly', () => {
const newState = mapPageReducer(state, acts.savePersonData());
expect(newState.get('addingPerson')).toEqual(false);
expect(newState.get('activeStep')).toEqual(0);
expect(newState.get('people').size).toEqual(1);
});
it('should set initial location correctly', () => {
const coords = [5, 5];
const newState = mapPageReducer(state, {
type: SET_INITIAL_LOCATION,
coords,
});
expect(newState.get('loading')).toEqual(false);
expect(newState.get('initialLocationLoaded')).toEqual(true);
expect(newState.get('initialLocation').toJS()).toEqual(coords);
});
it('should move to a step(forward) correctly', () => {
const newState = mapPageReducer(state, acts.moveToStep(1));
expect(newState.get('activeStep')).toEqual(1);
expect(newState.get('completed').toJS()).toEqual(completed);
});
it('should move to a step(backward) correctly', () => {
const newState = mapPageReducer(
state.set('activeStep', 2),
acts.moveToStep(0),
);
completed[0] = false;
expect(newState.get('activeStep')).toEqual(0);
expect(newState.get('completed').toJS()).toEqual(completed);
});
it('should handle handleFormChange action correctly', () => {
const key = 'foo';
const value = 'bar';
const newState = mapPageReducer(state, acts.handleFormChange(key, value));
expect(newState.get('newPerson').toJS()[key]).toEqual(value);
});
it('should handle handleNewPersonPositionChange action correctly', () => {
const data = { target: { _latlng: { lat: 5, lng: 5 } } };
const loc = [data.target._latlng.lat, data.target._latlng.lng]; // eslint-disable-line
const newState = mapPageReducer(
state,
acts.handleNewPersonPositionChange(data),
);
expect(newState.get('newPerson').get('location')).toEqual(loc);
});
it('should cancel add correctly', () => {
const addingState = state.set('activeStep', 3).set('addingPerson', true);
const newState = mapPageReducer(addingState, acts.cancelAdd());
expect(newState.get('completed')).toEqual(Map());
expect(newState.get('activeStep')).toEqual(0);
expect(newState.get('addingPerson')).toEqual(false);
});
it('should update person correctly', () => {
const key = 'testProp';
const val = 'testValue';
const newState = mapPageReducer(state, acts.handlePersonUpdate(key, val));
const personProp = newState
.get('people')
.get(state.get('personCurrentlyEditing'))
.get(key);
expect(personProp).toEqual(val);
});
it('should set save visit correctly', () => {
let newState = mapPageReducer(state, acts.savePersonData());
newState = newState.set('personCurrentlyEditing', 1);
const visitData = { found: true, note: 'awesome' };
newState = mapPageReducer(newState, acts.saveVisit(visitData));
const latestVisit = newState
.get('people')
.get(1)
.get('visits')
.toJS()
.pop();
delete latestVisit.date;
expect(latestVisit).toEqual(visitData);
});
it('should handle person click correctly', () => {
const index = 1;
const newState = mapPageReducer(state, acts.handlePersonClick(index));
expect(newState.get('personCurrentlyEditing')).toEqual(index);
});
it('should toggle addingVisit correctly', () => {
const newState = mapPageReducer(state, acts.toggleAddingVisit());
expect(newState.get('addingVisit')).toEqual(true);
});
it('should set addingVisit=false when popup closes', () => {
const newState = mapPageReducer(state, acts.onPopupClose());
expect(newState.get('addingVisit')).toEqual(false);
});
});
|
package com.smict.schedule.action;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.struts2.ServletActionContext;
import com.opensymphony.xwork2.ActionSupport;
import com.smict.auth.AuthModel;
import com.smict.person.data.BranchData;
import com.smict.person.data.DoctorData;
import com.smict.person.data.TreatmentRoomData;
import com.smict.person.model.BranchModel;
import com.smict.person.model.DoctorModel;
import com.smict.person.model.TreatmentRoomModel;
import com.smict.schedule.data.ScheduleData;
import com.smict.schedule.model.ScheduleModel;
import ldc.util.Auth;
import ldc.util.DateUtil;
@SuppressWarnings("serial")
public class ScheduleAction extends ActionSupport{
/**
* MODEL
*/
private DoctorModel doctorModel;
private BranchModel branchModel;
private TreatmentRoomModel treatmentRoomModel;
private ScheduleModel schModel;
private AuthModel authModel = new AuthModel();
/**
* DATA
*/
private DoctorData doctorData = new DoctorData();
private BranchData branchData = new BranchData();
private TreatmentRoomData treatmentRoomData = new TreatmentRoomData();
private ScheduleData schData = new ScheduleData();
/**
* MAP & LIST & ETC
*/
private HashMap<String, String> doctorMap = new HashMap<String, String>();
private HashMap<String, String> trMap = new HashMap<String, String>();
private List<DoctorModel> doctorList = new ArrayList<DoctorModel>();
private List<TreatmentRoomModel> trList = new ArrayList<TreatmentRoomModel>();
private int workDayId, branchId;
private String method;
/**
* CONSTRUCTOR.
*/
public ScheduleAction(){
/**
* AUTH CHECKING.
*/
Auth.authCheck(false);
}
public String dentistScheduleForm(){
/**
* GET DOCTOR LIST.
*/
getDoctorDropDown();
/**
* GET TREATMENT ROOM LIST.
*/
getTreatmentRoomDropDown();
return SUCCESS;
}
public String scheduleCheckingInOut(){
System.out.println(method + " " + branchId + " " + workDayId);
int rec = 0;
schModel = new ScheduleModel();
schModel.setBranchId(branchId);
schModel.setWorkDayId(workDayId);
HttpServletRequest request = ServletActionContext.getRequest();
String[] empId = request.getParameterValues("chkEmpId");
/**
* CHECKING WHETHER CHECK IN OR CHECK OUT.
*/
if(method.equals("in")){
rec = schData.scheduleCheckingIn(schModel);
schData.EmpCheckingIn(schModel,empId);
}else if(method.equals("out")){
rec = schData.scheduleCheckingOut(schModel);
}
if(rec > 0){
addActionMessage("Checkin' in success");
}else{
addActionMessage("Data not found.");
}
/**
* FETCH TREATMENT ROOM LIST.
*/
getTreatmentRoomDropDown();
return SUCCESS;
}
public String addDentistSchedule(){
/**
* FETCH BRANCH ID.
*/
schModel.setBranchId(Integer.valueOf(Auth.user().getBranchCode()));
/**
* SET THE DEFAULTS VALUE;
*/
schModel.setCheckInStatus("0");
schModel.setCheckInDateTime("0000-00-00 00:00:01");
schModel.setCheckOutDateTime("0000-00-00 00:00:01");
/**
* ADD NEW SCHEDULE.
*/
schData.insertDentistSchedule(schModel);
schModel = new ScheduleModel();
getDoctorDropDown();
getTreatmentRoomDropDown();
addActionMessage("Add dentist's schedule success!");
return INPUT;
}
/**
* validate;
*/
public void validateAddDentistSchedule(){
String msg = "";
DateUtil dateUtl = new DateUtil();
/**
* CONCAT DATE TIME.
*/
schModel.setStartDateTime(schModel.getWorkDate() + " " + schModel.getStartTime());
schModel.setEndDateTime(schModel.getWorkDate() + " " + schModel.getEndTime());
if(schModel.getWorkDate() == null || schModel.getWorkDate().equals("")){
msg = "Please fill working date.";
}
if(schModel.getStartTime() == null || schModel.getStartTime().equals("")){
msg += "Please fill start working time.";
}
if(schModel.getEndTime() == null || schModel.getEndTime().equals("")){
msg += "Please fill end working time.";
}
/**
* FETCH MINUTES DIFF.
*/
if(msg.equals("")){
schModel.setWorkHour(dateUtl.getMinutesDiff(
schModel.getStartDateTime(),
schModel.getEndDateTime()
));
if(schModel.getWorkHour() < 0){
msg += "Your range of time was wrong!";
}
}
/**
* CHECKING OVERLAP TIME RAGE.
*/
if(schData.findOverlapTimeRange(schModel)){
msg += "Your time range is overlapping with the other range!";
}
if(!msg.equals("")){
getDoctorDropDown();
getTreatmentRoomDropDown();
addActionError(msg);
}
}
/**
* UTILIZE METHOD.
*/
/**
* Get treatment room list for dropdown.
* @author anubissmile
*/
public void getTreatmentRoomDropDown(){
trList = treatmentRoomData.findRoomByBranchCode(Auth.user().getBranchCode());
for(TreatmentRoomModel tm : trList){
trMap.put(String.valueOf(tm.getRoom_id()).toString(), tm.getRoom_name());
}
}
/**
* Get doctor list for dropdown.
* @author anubissmile
*/
public void getDoctorDropDown(){
doctorList = doctorData.getDentistList(null);
for(DoctorModel dm : doctorList){
doctorMap.put(Integer.valueOf(dm.getDoctorID()).toString(), dm.getFirstname_th() + " " + dm.getLastname_th());
}
}
/**
* ======================================================================================= *
*/
/**
* GETTER & SETTER ZONE.
*/
public DoctorModel getDoctorModel() {
return doctorModel;
}
public void setDoctorModel(DoctorModel doctorModel) {
this.doctorModel = doctorModel;
}
public BranchModel getBranchModel() {
return branchModel;
}
public void setBranchModel(BranchModel branchModel) {
this.branchModel = branchModel;
}
public TreatmentRoomModel getTreatmentRoomModel() {
return treatmentRoomModel;
}
public void setTreatmentRoomModel(TreatmentRoomModel treatmentRoomModel) {
this.treatmentRoomModel = treatmentRoomModel;
}
public HashMap<String, String> getDoctorMap() {
return doctorMap;
}
public void setDoctorMap(HashMap<String, String> doctorMap) {
this.doctorMap = doctorMap;
}
public HashMap<String, String> getTrMap() {
return trMap;
}
public void setTrMap(HashMap<String, String> trMap) {
this.trMap = trMap;
}
public List<DoctorModel> getDoctorList() {
return doctorList;
}
public void setDoctorList(List<DoctorModel> doctorList) {
this.doctorList = doctorList;
}
public List<TreatmentRoomModel> getTrList() {
return trList;
}
public void setTrList(List<TreatmentRoomModel> trList) {
this.trList = trList;
}
/**
* @return the schModel
*/
public ScheduleModel getSchModel() {
return schModel;
}
/**
* @param schModel the schModel to set
*/
public void setSchModel(ScheduleModel schModel) {
this.schModel = schModel;
}
/**
* ======================================================================================= *
*/
public int getWorkDayId() {
return workDayId;
}
public void setWorkDayId(int workDayId) {
this.workDayId = workDayId;
}
public int getBranchId() {
return branchId;
}
public void setBranchId(int branchId) {
this.branchId = branchId;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
}
|
# Nullcraft
#===========================
# TODO
#===========================
#1. Get Growth Blocks Working...
#2. Multiple Tiers, Single Effect
#3. Testing Lag and using Extremes
#4. Start on early game content
#5. undetermined...
|
require 'uri'
require 'hocon/impl'
require 'hocon/impl/origin_type'
class Hocon::Impl::SimpleConfigOrigin
MERGE_OF_PREFIX = "merge of "
def self.new_file(file_path)
url = URI.join('file:///', file_path)
self.new(file_path, -1, -1,
Hocon::Impl::OriginType::FILE,
url, nil)
end
def self.new_simple(description)
self.new(description, -1, -1,
Hocon::Impl::OriginType::GENERIC,
nil, nil)
end
def self.remove_merge_of_prefix(desc)
if desc.start_with?(MERGE_OF_PREFIX)
desc = desc[MERGE_OF_PREFIX.length, desc.length - 1]
end
desc
end
def self.merge_two(a, b)
merged_desc = nil
merged_start_line = nil
merged_end_line = nil
merged_comments = nil
merged_type =
if a.origin_type == b.origin_type
a.origin_type
else
Hocon::Impl::OriginType.GENERIC
end
# first use the "description" field which has no line numbers
# cluttering it.
a_desc = remove_merge_of_prefix(a.description)
b_desc = remove_merge_of_prefix(b.description)
if a_desc == b_desc
merged_desc = a_desc
if a.line_number < 0
merged_start_line = b.line_number
elsif b.line_number < 0
merged_start_line = a.line_number
else
merged_start_line = [a.line_number, b.line_number].min
end
merged_end_line = [a.end_line_number, b.end_line_number].max
else
# this whole merge song-and-dance was intended to avoid this case
# whenever possible, but we've lost. Now we have to lose some
# structured information and cram into a string.
#
# description() method includes line numbers, so use it instead
# of description field.
a_full = remove_merge_of_prefix(a.description)
b_full = remove_merge_of_prefix(b.description)
merged_desc = "#{MERGE_OF_PREFIX}#{a_full},#{b_full}"
merged_start_line = -1
merged_end_line = -1
end
merged_url =
if Hocon::Impl::ConfigImplUtil.equals_handling_nil?(a.url_or_nil, b.url_or_nil)
a.url_or_nil
else
nil
end
if Hocon::Impl::ConfigImplUtil.equals_handling_nil?(a.comments_or_nil, b.comments_or_nil)
merged_comments = a.comments_or_nil
else
merged_comments = []
if a.comments_or_nil
merged_comments.concat(a.comments_or_nil)
end
if b.comments_or_nil
merged_comments.concat(b.comments_or_nil)
end
end
Hocon::Impl::SimpleConfigOrigin.new(
merged_desc, merged_start_line, merged_end_line,
merged_type, merged_url, merged_comments)
end
def self.merge_origins(stack)
if stack.empty?
raise ConfigBugError, "can't merge empty list of origins"
elsif stack.length == 1
stack[0]
elsif stack.length == 2
merge_two(stack[0], stack[1])
else
remaining = stack.clone
while remaining.length > 2
merged = merge_three(remaining[0], remaining[1], remaining[2])
remaining.pop
remaining.pop
remaining.pop
end
# should be down to either 1 or 2
merge_origins(remaining)
end
end
def initialize(description, line_number, end_line_number,
origin_type, url, comments)
if !description
raise ArgumentError, "description may not be nil"
end
@description = description
@line_number = line_number
@end_line_number = end_line_number
@origin_type = origin_type
@url_or_nil = url
@comments_or_nil = comments
end
attr_reader :description, :line_number, :end_line_number, :origin_type,
:url_or_nil, :comments_or_nil
def set_line_number(line_number)
if (line_number == @line_number) and
(line_number == @end_line_number)
self
else
Hocon::Impl::SimpleConfigOrigin.new(
@description, line_number, line_number,
@origin_type, @url_or_nil, @comments_or_nil)
end
end
def set_comments(comments)
if Hocon::Impl::ConfigImplUtil.equals_handling_nil?(comments, @comments_or_nil)
self
else
Hocon::Impl::SimpleConfigOrigin.new(
@description, @line_number, @end_line_number,
@origin_type, @url_or_nil, comments)
end
end
def prepend_comments(comments)
if Hocon::Impl::ConfigImplUtil.equals_handling_nil?(comments, @comments_or_nil)
self
elsif @comments_or_nil.nil?
set_comments(comments)
else
merged = []
merged.concat(comments)
merged.concat(@comments_or_nil)
set_comments(merged)
end
end
def comments
@comments_or_nil || []
end
end
|
<section class="circles reputation-circles">
<div class="container">
<div class="row">
<div class="col flex-col circle-col">
<div class="line line-1">
<?php hm_get_template_part( 'template-parts/progress-ring', [ 'percentage' => '94' ] ); ?>
<p>of consumers would use a business with a 4 out of 5 Star Rating.</p></div>
</div>
<div class="col flex-col circle-col">
<div class="line line-2">
<?php hm_get_template_part( 'template-parts/progress-ring', [ 'percentage' => '90' ] ); ?>
<p>will not purchase from a company with multiple bad reviews.</p></div>
</div>
<div class="col flex-col circle-col">
<div class="line line-3">
<?php hm_get_template_part( 'template-parts/progress-ring', [ 'percentage' => '86' ] ); ?>
<p>of consumers read online reviews before making purchasing decisions.</p>
</div>
</div>
</section>
|
import type { Component } from "./types";
export const componentApp: Component = {
id: 0,
name: "App",
type: "svelte",
source: `<script>
import Component from './Component1.svelte';
<\/script>
<Component name={"SvelteREPL"}/>`,
};
export const component1: Component = {
id: 1,
name: "Component1",
type: "svelte",
source: `<script>
export let name = "World";
</script>
<h1>Hello {name}</h1>`,
};
|
class Weechat < Formula
desc "Extensible IRC client"
homepage "https://www.weechat.org"
url "https://weechat.org/files/src/weechat-3.2.tar.xz"
sha256 "39a8adf374e80653c9dd2be06870341594ea081b3a9c3690132e556abf9d87a8"
license "GPL-3.0-or-later"
head "https://github.com/weechat/weechat.git"
bottle do
sha256 arm64_big_sur: "fbd66c10bc0224c5d21fbfb08c96f96d9ae2c014945fea2e1298ec0fff0b4030"
sha256 big_sur: "0261a4b52e5fd25067fc2d9af3af59090cadd466c36cd69fcf7a635fffcf0bd6"
sha256 catalina: "4d939d2a34065cdae47590d50b6b2aa3c7595b79af0957cb7535c7ba25ee0255"
sha256 mojave: "d9af6b42a994c3a8ee5632563dd5cc285395d0dcaa908f7b928ce5c1e3e845d7"
end
depends_on "asciidoctor" => :build
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "aspell"
depends_on "gettext"
depends_on "gnutls"
depends_on "libgcrypt"
depends_on "lua"
depends_on "ncurses"
depends_on "perl"
depends_on "python@3.9"
depends_on "ruby"
uses_from_macos "curl"
uses_from_macos "tcl-tk"
on_macos do
depends_on "libiconv"
end
def install
args = std_cmake_args + %W[
-DENABLE_MAN=ON
-DENABLE_GUILE=OFF
-DCA_FILE=#{Formula["gnutls"].pkgetc}/cert.pem
-DENABLE_JAVASCRIPT=OFF
-DENABLE_PHP=OFF
]
# Fix error: '__declspec' attributes are not enabled
# See https://github.com/weechat/weechat/issues/1605
args << "-DCMAKE_C_FLAGS=-fdeclspec" if ENV.compiler == :clang
# Fix system gem on Mojave
ENV["SDKROOT"] = ENV["HOMEBREW_SDKROOT"]
mkdir "build" do
system "cmake", "..", *args
system "make", "install", "VERBOSE=1"
end
end
test do
system "#{bin}/weechat", "-r", "/quit"
end
end
|
#!/usr/bin/env bash
SCRIPT_DIR=`dirname $0`
echo "script directory: $SCRIPT_DIR"
pushd "$SCRIPT_DIR/.."
mvn site
popd
pushd "$SCRIPT_DIR/../target/site/"
git init
git remote add javadoc https://dazraf@github.com/dazraf/vertx-futures.git
git fetch --depth=1 javadoc gh-pages
git add --all
git commit -m "javadoc"
git merge --no-edit -s ours remotes/javadoc/gh-pages
git push javadoc master:gh-pages
rm -rf .git
popd
|
---
layout: post
title: "Chennai"
date: 2017-07-21 19:25:00
categories: travel
---
<div class="post-sidebar">
<h3>Places Visited</h3>
<ul>
<li><a href="https://goo.gl/maps/JHMMfRx7Zjp" target="_blank">Chennai Government Museum</a></li>
<li><a href="http://amethystchennai.com" target="_blank">Amethyst Cafe</a></li>
<li><a href="https://goo.gl/maps/yiWHVEqV6PT2" target="_blank">Vivekananda House</a></li>
</ul>
</div>
Chennai has some beautiful architecture and Marina beach promenade is quite nice to walk.
Vivekananda House is located in an old "ice factory". The building is fascinating and the little museum in it
presents a window in the life of Swami Vivekananda the great monk of the Advaita Vedanta tradition who brought
Indian philosophy to the West.
It was also nice to have some authentic sushi at a tiny non-descript Japanese restaurant in Chennai!!
<br><br>
<img class="myImg" src="{{site.baseurl}}/assets/IMG_7269.jpg" alt=" " width="150" height="150">
<img class="myImg" src="{{site.baseurl}}/assets/IMG_7279.jpg" alt=" " width="150" height="150">
<br>
<div id="mapid" style='width: 700px; height: 400px;'></div>
<script>
makeMap(L, [13.0674, 80.2376], "Chennai", 9);
</script>
|
#!/usr/bin/env ruby
CWL_PATH=File.join(File.dirname(__FILE__), '..', 'examples')
if $0 == __FILE__
base_dir = File.expand_path(File.join(File.dirname(__FILE__), ".."))
lib_dir = File.join(base_dir, "lib")
test_dir = File.join(base_dir, "test")
$LOAD_PATH.unshift(lib_dir)
require 'test/unit'
exit Test::Unit::AutoRunner.run(true, test_dir)
end
|
using System;
namespace UnityThirdPartySdkManager.Editor.Configs
{
/// <summary>
/// 配置
/// </summary>
[Serializable]
public class Config
{
/// <summary>
/// 安卓配置
/// </summary>
public AndroidConfig android;
/// <summary>
/// ios配置
/// </summary>
public IosConfig ios;
/// <summary>
/// 微信配置
/// </summary>
public WeChatConfig weChat;
/// <summary>
/// 百度地图配置
/// </summary>
public BaiduMapConfig baiduMap;
}
}
|
require_relative 'utils'
module CartoDB
module Relocator
class QueueConsumer
include CartoDB::Relocator::Connections
def initialize(params={})
@config = params[:config]
@dbname = @config[:dbname]
@username = @config[:username]
end
def redis
@redis ||= Redis.new(@config[:redis])
end
def empty_queue
redis.del @dbname
end
def redis_migrator_loop(wait_for=5)
wait_for_counter = 0
puts "REDIS QUEUE LENGTH: #{redis.llen(@dbname)}"
while wait_for_counter < wait_for do
puts "Reading queue.."
key = redis.brpop(@dbname, 1)
if key == nil
puts "Nothing read for #{wait_for_counter} seconds."
wait_for_counter += 1
else
query = key[1]
begin
target_db.query(query)
puts "Query ran: #{query}"
rescue Exception => e
puts "ERROR on query #{query}: #{e}, #{e.backtrace}"
end
wait_for_counter = 0
end
end
end
end
end
end
|
const ln = require('./lyrics-src/ln');
const al = require('./lyrics-src/al');
const genius = require('./lyrics-src/genius');
// LN: Lyrical Nonsense
// AL: Animelyrics
const Provider = {
LN: ln,
AL: al,
GENIUS: genius,
};
/**
* @param {String} query keyword to find lyrics
* @param {Object[]} Provider
*/
const searchLyrics = async (query, Provider) => {
let results = Provider.map((provider) => provider.getResults(query));
results = await Promise.all(results);
results = results.reduce((total, result) => [...total, ...result], []);
if (results.length == 0) throw Error('No lyrics found');
return results;
};
module.exports = { Provider, searchLyrics };
|
class WeatherIconsUtil {
static const int dateTimestamp = 1614982233069;
static const Map<String, int> iconMap = {
'wi-day-sunny': 0xf00d,
'wi-day-cloudy': 0xf002,
'wi-day-cloudy-gusts': 0xf000,
'wi-day-cloudy-windy': 0xf001,
'wi-day-fog': 0xf003,
'wi-day-hail': 0xf004,
'wi-day-haze': 0xf0b6,
'wi-day-lightning': 0xf005,
'wi-day-rain': 0xf008,
'wi-day-rain-mix': 0xf006,
'wi-day-rain-wind': 0xf007,
'wi-day-showers': 0xf009,
'wi-day-sleet': 0xf0b2,
'wi-day-sleet-storm': 0xf068,
'wi-day-snow': 0xf00a,
'wi-day-snow-thunderstorm': 0xf06b,
'wi-day-snow-wind': 0xf065,
'wi-day-sprinkle': 0xf00b,
'wi-day-storm-showers': 0xf00e,
'wi-day-sunny-overcast': 0xf00c,
'wi-day-thunderstorm': 0xf010,
'wi-day-windy': 0xf085,
'wi-solar-eclipse': 0xf06e,
'wi-hot': 0xf072,
'wi-day-cloudy-high': 0xf07d,
'wi-day-light-wind': 0xf0c4,
'wi-night-clear': 0xf02e,
'wi-night-alt-cloudy': 0xf086,
'wi-night-alt-cloudy-gusts': 0xf022,
'wi-night-alt-cloudy-windy': 0xf023,
'wi-night-alt-hail': 0xf024,
'wi-night-alt-lightning': 0xf025,
'wi-night-alt-rain': 0xf028,
'wi-night-alt-rain-mix': 0xf026,
'wi-night-alt-rain-wind': 0xf027,
'wi-night-alt-showers': 0xf029,
'wi-night-alt-sleet': 0xf0b4,
'wi-night-alt-sleet-storm': 0xf06a,
'wi-night-alt-snow': 0xf02a,
'wi-night-alt-snow-thunderstorm': 0xf06d,
'wi-night-alt-snow-wind': 0xf067,
'wi-night-alt-sprinkle': 0xf02b,
'wi-night-alt-storm-showers': 0xf02c,
'wi-night-alt-thunderstorm': 0xf02d,
'wi-night-cloudy': 0xf031,
'wi-night-cloudy-gusts': 0xf02f,
'wi-night-cloudy-windy': 0xf030,
'wi-night-fog': 0xf04a,
'wi-night-hail': 0xf032,
'wi-night-lightning': 0xf033,
'wi-night-partly-cloudy': 0xf083,
'wi-night-rain': 0xf036,
'wi-night-rain-mix': 0xf034,
'wi-night-rain-wind': 0xf035,
'wi-night-showers': 0xf037,
'wi-night-sleet': 0xf0b3,
'wi-night-sleet-storm': 0xf069,
'wi-night-snow': 0xf038,
'wi-night-snow-thunderstorm': 0xf06c,
'wi-night-snow-wind': 0xf066,
'wi-night-sprinkle': 0xf039,
'wi-night-storm-showers': 0xf03a,
'wi-night-thunderstorm': 0xf03b,
'wi-lunar-eclipse': 0xf070,
'wi-stars': 0xf077,
'wi-storm-showers': 0xf01d,
'wi-thunderstorm': 0xf01e,
'wi-night-alt-cloudy-high': 0xf07e,
'wi-night-cloudy-high': 0xf080,
'wi-night-alt-partly-cloudy': 0xf081,
'wi-cloud': 0xf041,
'wi-cloudy': 0xf013,
'wi-cloudy-gusts': 0xf011,
'wi-cloudy-windy': 0xf012,
'wi-fog': 0xf014,
'wi-hail': 0xf015,
'wi-rain': 0xf019,
'wi-rain-mix': 0xf017,
'wi-rain-wind': 0xf018,
'wi-showers': 0xf01a,
'wi-sleet': 0xf0b5,
'wi-snow': 0xf01b,
'wi-sprinkle': 0xf01c,
'wi-snow-wind': 0xf064,
'wi-smog': 0xf074,
'wi-smoke': 0xf062,
'wi-lightning': 0xf016,
'wi-raindrops': 0xf04e,
'wi-raindrop': 0xf078,
'wi-dust': 0xf063,
'wi-snowflake-cold': 0xf076,
'wi-windy': 0xf021,
'wi-strong-wind': 0xf050,
'wi-sandstorm': 0xf082,
'wi-earthquake': 0xf0c6,
'wi-fire': 0xf0c7,
'wi-flood': 0xf07c,
'wi-meteor': 0xf071,
'wi-tsunami': 0xf0c5,
'wi-volcano': 0xf0c8,
'wi-hurricane': 0xf073,
'wi-tornado': 0xf056,
'wi-small-craft-advisory': 0xf0cc,
'wi-gale-warning': 0xf0cd,
'wi-storm-warning': 0xf0ce,
'wi-hurricane-warning': 0xf0cf,
'wi-wind-direction': 0xf0b1,
'wi-alien': 0xf075,
'wi-celsius': 0xf03c,
'wi-fahrenheit': 0xf045,
'wi-degrees': 0xf042,
'wi-thermometer': 0xf055,
'wi-thermometer-exterior': 0xf053,
'wi-thermometer-internal': 0xf054,
'wi-cloud-down': 0xf03d,
'wi-cloud-up': 0xf040,
'wi-cloud-refresh': 0xf03e,
'wi-horizon': 0xf047,
'wi-horizon-alt': 0xf046,
'wi-sunrise': 0xf051,
'wi-sunset': 0xf052,
'wi-moonrise': 0xf0c9,
'wi-moonset': 0xf0ca,
'wi-refresh': 0xf04c,
'wi-refresh-alt': 0xf04b,
'wi-umbrella': 0xf084,
'wi-barometer': 0xf079,
'wi-humidity': 0xf07a,
'wi-na': 0xf07b,
'wi-train': 0xf0cb,
'wi-moon-new': 0xf095,
'wi-moon-waxing-crescent-1': 0xf096,
'wi-moon-waxing-crescent-2': 0xf097,
'wi-moon-waxing-crescent-3': 0xf098,
'wi-moon-waxing-crescent-4': 0xf099,
'wi-moon-waxing-crescent-5': 0xf09a,
'wi-moon-waxing-crescent-6': 0xf09b,
'wi-moon-first-quarter': 0xf09c,
'wi-moon-waxing-gibbous-1': 0xf09d,
'wi-moon-waxing-gibbous-2': 0xf09e,
'wi-moon-waxing-gibbous-3': 0xf09f,
'wi-moon-waxing-gibbous-4': 0xf0a0,
'wi-moon-waxing-gibbous-5': 0xf0a1,
'wi-moon-waxing-gibbous-6': 0xf0a2,
'wi-moon-full': 0xf0a3,
'wi-moon-waning-gibbous-1': 0xf0a4,
'wi-moon-waning-gibbous-2': 0xf0a5,
'wi-moon-waning-gibbous-3': 0xf0a6,
'wi-moon-waning-gibbous-4': 0xf0a7,
'wi-moon-waning-gibbous-5': 0xf0a8,
'wi-moon-waning-gibbous-6': 0xf0a9,
'wi-moon-third-quarter': 0xf0aa,
'wi-moon-waning-crescent-1': 0xf0ab,
'wi-moon-waning-crescent-2': 0xf0ac,
'wi-moon-waning-crescent-3': 0xf0ad,
'wi-moon-waning-crescent-4': 0xf0ae,
'wi-moon-waning-crescent-5': 0xf0af,
'wi-moon-waning-crescent-6': 0xf0b0,
'wi-moon-alt-new': 0xf0eb,
'wi-moon-alt-waxing-crescent-1': 0xf0d0,
'wi-moon-alt-waxing-crescent-2': 0xf0d1,
'wi-moon-alt-waxing-crescent-3': 0xf0d2,
'wi-moon-alt-waxing-crescent-4': 0xf0d3,
'wi-moon-alt-waxing-crescent-5': 0xf0d4,
'wi-moon-alt-waxing-crescent-6': 0xf0d5,
'wi-moon-alt-first-quarter': 0xf0d6,
'wi-moon-alt-waxing-gibbous-1': 0xf0d7,
'wi-moon-alt-waxing-gibbous-2': 0xf0d8,
'wi-moon-alt-waxing-gibbous-3': 0xf0d9,
'wi-moon-alt-waxing-gibbous-4': 0xf0da,
'wi-moon-alt-waxing-gibbous-5': 0xf0db,
'wi-moon-alt-waxing-gibbous-6': 0xf0dc,
'wi-moon-alt-full': 0xf0dd,
'wi-moon-alt-waning-gibbous-1': 0xf0de,
'wi-moon-alt-waning-gibbous-2': 0xf0df,
'wi-moon-alt-waning-gibbous-3': 0xf0e0,
'wi-moon-alt-waning-gibbous-4': 0xf0e1,
'wi-moon-alt-waning-gibbous-5': 0xf0e2,
'wi-moon-alt-waning-gibbous-6': 0xf0e3,
'wi-moon-alt-third-quarter': 0xf0e4,
'wi-moon-alt-waning-crescent-1': 0xf0e5,
'wi-moon-alt-waning-crescent-2': 0xf0e6,
'wi-moon-alt-waning-crescent-3': 0xf0e7,
'wi-moon-alt-waning-crescent-4': 0xf0e8,
'wi-moon-alt-waning-crescent-5': 0xf0e9,
'wi-moon-alt-waning-crescent-6': 0xf0ea,
'wi-time-1': 0xf08a,
'wi-time-2': 0xf08b,
'wi-time-3': 0xf08c,
'wi-time-4': 0xf08d,
'wi-time-5': 0xf08e,
'wi-time-6': 0xf08f,
'wi-time-7': 0xf090,
'wi-time-8': 0xf091,
'wi-time-9': 0xf092,
'wi-time-10': 0xf093,
'wi-time-11': 0xf094,
'wi-time-12': 0xf089,
'wi-direction-up': 0xf058,
'wi-direction-up-right': 0xf057,
'wi-direction-right': 0xf04d,
'wi-direction-down-right': 0xf088,
'wi-direction-down': 0xf044,
'wi-direction-down-left': 0xf043,
'wi-direction-left': 0xf048,
'wi-direction-up-left': 0xf087,
'wi-wind-beaufort-0': 0xf0b7,
'wi-wind-beaufort-1': 0xf0b8,
'wi-wind-beaufort-2': 0xf0b9,
'wi-wind-beaufort-3': 0xf0ba,
'wi-wind-beaufort-4': 0xf0bb,
'wi-wind-beaufort-5': 0xf0bc,
'wi-wind-beaufort-6': 0xf0bd,
'wi-wind-beaufort-7': 0xf0be,
'wi-wind-beaufort-8': 0xf0bf,
'wi-wind-beaufort-9': 0xf0c0,
'wi-wind-beaufort-10': 0xf0c1,
'wi-wind-beaufort-11': 0xf0c2,
'wi-wind-beaufort-12': 0xf0c3,
'wind': 0xf0b1,
'wind-deg-0': 0xf05c,
'wind-deg-45': 0xf05a,
'wind-deg-90': 0xf059,
'wind-deg-135': 0xf05d,
'wind-deg-180': 0xf060,
'wind-deg-225': 0xf05e,
'wind-deg-270': 0xf061,
'wind-deg-315': 0xf05b,
};
}
|
package com.zeynelerdi.pastryshop.repository.db
import android.arch.persistence.room.Database
import android.arch.persistence.room.RoomDatabase
import android.arch.persistence.room.TypeConverters
import com.zeynelerdi.pastryshop.bin.Pages
/**
* Created by Zeynel Erdi Karabulut on 01/06/20.
* Application database. This database contains one table - [Pages].
*
* @author <a href="https://github.com/ZeynelErdiKarabulut">zeynelerdi</a>
*/
@Database(entities = [Pages::class], version = PSDatabase.DB_VERSION, exportSchema = true)
@TypeConverters(ImageListConverter::class)
abstract class PSDatabase : RoomDatabase() {
companion object {
//Database configs
/**
* Database name.
*/
const val DB_NAME = "ps_db"
/**
* Database version.
*/
const val DB_VERSION = 1
}
abstract fun pagesDao(): PagesDao
}
|
---
id: introducing-producers
title: Introducing Producers
sidebar_label: Producers
---
[producer](/docs/api/producer)s are the central concept of Engine. Engine
recommends that our components should only represent the view, and have as
little logic as possible. Producers are where the logic lives in an Engine app.
Simplest place to see producers in action can be Todo list's footer. A producer
will count the number of pending todos, and show them in the view. Extract
`Footer` out of `src/App.tsx` into its own component. Create `src/Footer.tsx`
with following contents:
```tsx
import React from "react";
const Footer = () => (
<footer className="footer">
<span className="todo-count">
<strong>1</strong> items left
</span>
<ul className="filters">
<li>
<a href="#/" className="selected">
All
</a>
</li>
<li>
<a href="#/active">Active</a>
</li>
<li>
<a href="#/completed">Completed</a>
</li>
</ul>
<button className="clear-completed">Clear completed</button>
</footer>
);
export default Footer;
```
Update `src/App.tsx` to use `Footer`:
```diff
+ import Footer from "./Footer";
...
- <footer className="footer">
- <span className="todo-count">
- <strong>1</strong> items left
- </span>
- <ul className="filters">
- <li>
- <a href="#/" className="selected">
- All
- </a>
- </li>
- <li>
- <a href="#/active">Active</a>
- </li>
- <li>
- <a href="#/completed">Completed</a>
- </li>
- </ul>
- <button className="clear-completed">Clear completed</button>{" "}
- </footer>
+ <Footer />
```
`Footer` will trust that `pendingCount` is going to be available in the state,
and that `it'll always contain the correct number of pending todo items. Update
`src/Footer.tsx` based on this assumption:
```diff
+ import { view, observe } from "@c11/engine.macro";
- const Footer = () => (
+ const Footer: view = ({ pendingCount = observe.pendingCount }) => (
<footer className="footer">
<span className="todo-count">
- <strong>1</strong> items left
+ <strong>{pendingCount}</strong> items left
</span>
<ul className="filters">
```
The logic for counting pending items in the `Footer` itself, in fact, in a
traditional React app that's exactly what we would have done. But Engine
strongly recommends that business logic should be kept out of `view`s, and put
it in `producer`s. Add a `producer` to the Footer. In `src/Footer.tsx`, add
`pendingCounter` producer:
```diff
+ const pendingCounter: producer = ({
+ updatePendingCount = update.pendingCount,
+ todosById = observe.todosById
+ }) => {
+ const pendingCount = Object.values(
+ todosById as { [id: string]: TodoItem }
+ ).reduce(
+ (accum: number, todo) =>
+ todo.status === TodoStatuses.done ? accum : accum + 1,
+ 0
+ );
+
+ updatePendingCount.set(pendingCount);
+ };
+
+ (Footer as any).producers = [pendingCounter];
export default Footer;
```
`producer`s are just normal functions which are labeled with
[producer](/docs/api/producer) macro. They can access the state the same way as
`view`s; they even have access to `prop`s that a view might get from its parent.
To add a producer to a component, `.producers` property of a view is given an
array of producers.
Similar to `view`s, a producer is triggered whenever anything that it `observe`s
changes. `pendingCounter` producer Observes `todosById` object, so whenever
anything in todosById changes, this producer is executed. Whenever status of any
todo item is updated, `pendingCount` gets updated accordingly.
In the next chapter, we'll take a look at how producers make it possible to a
very create workflow for view <-> producer communication.
|
<?php
namespace App\Http\Controllers;
use Illuminate\Support\Facades\Auth;
use Illuminate\Http\Request;
use DB;
class edit_single_mndyprojectController extends Controller
{
public function showElement(){
$showUnit = DB::table('unit')->get();
return view('edit_single_mndyproject',compact('showUnit'));
}
public function getProjectFromUnit(Request $request){
$idUnit = $request->get('id');
$data = DB::select("call spgetProjectfromunit('".$idUnit."')");
return response()->json($data);
}
public function getEmpFromPrjct(Request $request){
$id_prjct = $request->get('id');
$data = DB::select("call spEmployeeFromProject('".$id_prjct."')");
return response()->json($data);
}
// public function getStrDateFromEmp(Request $request){
// $id_emp = $request->get('id');
// $prjct_id = $request->get('prjct_id');
// $data = DB::table('project_employee')->select('START_WORK')
// ->where('EMPLOYEE_ID','=',$id_emp)
// ->where('PROJECT_DETAIL_ID','=',$prjct_id)
// ->get();
// return response()->json($data);
// }
public function filter_prjct(Request $request){
$prjct_id = $request->get('prjct_id');
$emp_id = $request->get('emp_id');
$dataPrjct = DB::select("call spMandaysfilter_prjct('".$emp_id."', '".$prjct_id."')");
$data ['content'] = $dataPrjct;
return json_encode($data);
}
public function update_prjct(Request $request){
$prjct_id = $request->get('prjct_id');
$emp_id = $request->get('emp_id');
$start = $request->get('start');
$finish = $request->get('finish');
$duration = $request->get('duration');
$PROJECT = $request->get('PROJECT');
$updateArr = array('START_WORK' => $start);
DB::table('project_employee')
->where('PROJECT_DETAIL_ID', $PROJECT)
->where('EMPLOYEE_ID', $emp_id)
->update($updateArr);
$msg['msg'] = 'Success Update';
return json_encode($msg);
}
}
|
#!/bin/bash
# Import the lib
source extlib.bash
# sourcing example.bash.conf is implied if it exists
# Read the default command line arguments
argparser "$@"
# Ensure only one instance of this script is running
checkpid
# Validate that the correct user is running this script per the config
requireuser
# Add task to run on exit
FINALCMDS+=("example_finally")
#####
# If you wish to propagate variables and functions to subshells or GNU Parallel, you will need to export them like so:
# export VARIABLE_NAME
# export -f FUNCTION_NAME
#####
main () {
#####
# Put your actions here! It is good practice to keep all logic in functions where possible
#####
# Set the name of this function for the logger
local CURRENT_FUNC="main"
# Log a test message
log_info "Starting tasks"
# Run the example function
example_function stuff things
# Log a message that shows a variable
log_debug "EXAMPLEVAR = ${EXAMPLEVAR}"
# Use find to create commands to run in parallel
find ./ -maxdepth 1 -type f -exec echo Found file {} \; | prunner -c echo
# ...
}
# Run main and pass it the arguments from your shell
main "$@"
quit 'INFO' 'All tasks completed successfully'
|
// GENERATED
package com.fkorotkov.kubernetes
import io.fabric8.kubernetes.api.model.Cluster as model_Cluster
import io.fabric8.kubernetes.api.model.NamedCluster as model_NamedCluster
fun model_NamedCluster.`cluster`(block: model_Cluster.() -> Unit = {}) {
if(this.`cluster` == null) {
this.`cluster` = model_Cluster()
}
this.`cluster`.block()
}
|
package edin.nn.sequence
import edin.general.YamlConfig
import edu.cmu.dynet.{Expression, ParameterCollection}
trait SequenceEncoderConfig{
val outDim:Int
def construct()(implicit model: ParameterCollection) : SequenceEncoder
}
object SequenceEncoderConfig{
def fromYaml(conf:YamlConfig) : SequenceEncoderConfig = {
if(conf("bi-directional").bool){
BiRNNConfig.fromYaml(conf)
}else{
MultiRNNConfig.fromYaml(conf)
}
// conf("bi-directional") match {
// case "recurrent" => MultiRNNConfig.fromYaml(conf)
// case "bi-recurrent" => BiRNNConfig.fromYaml(conf)
// }
}
}
trait SequenceEncoder{
def transduce(x:List[Expression]) : List[Expression]
}
|
//Loja de tintas
#include <stdio.h>
int main(void){
int metros,value,latas;
printf("Insira a quantidade de metros a ser pintado: \n");
scanf("%i",&metros);
latas = metros / 3;
value = latas * 80;
printf("Você terá de comprar %i latas\n",latas);
printf("E isso custará %i \n",value);
return 0;
}
|
# ChubbyMango
a little game written by _Lua_ & _LÖVE2d_
***
__Mango__ loves eating balls(or peach?), but she can't eat the ball bigger than herself.
***
You can press key `left` and `right` to control a panel. This panel will help you to carry __Mango__!
Press key `r` to restart.
|
//multilevel Inheritance example
class Car{
public Car()
{
System.out.println("Class Car");
}
public void vehicleType()
{
System.out.println("Vehicle Type: Car");
}
}
class Maruti extends Car{
public Maruti()
{
System.out.println("Class Maruti");
}
public void brand()
{
System.out.println("Brand: Maruti");
}
public void speed()
{
System.out.println("Max: 90Kmph");
}
}
public class Maruti800 extends Maruti{
public Maruti800()
{
System.out.println("Maruti Model: 800");
}
public void speed()
{
System.out.println("Max: 80Kmph");
}
public static void main(String args[])
{
Maruti800 obj=new Maruti800();
obj.vehicleType();
obj.brand();
obj.speed();
}
}
|
INSERT INTO `admin_permissions` (`admin_id`, `permission_id`) VALUES
(1, 41),
(1, 42),
(1, 43),
(1, 44),
(1, 45);
|
<?php namespace Rede\Gateway\Model;
use Rede\Gateway\Interfaces\Model;
/**
*
* @author Lucas Zerma - <lzerma@gmail.com>
* @since 01/04/2014
* @project www.lucaszerma.com/eredegw
* @see https://github.com/lzerma/gateway_rede
*
*/
class ContAuthTxn implements Model
{
/**
*
* @var String
*/
private $_caReference = false;
/**
*
* @var String
*/
private $_accountStatus;
/**
*
*/
public function __construct($result)
{
$this->setAccountStatus($result->ContAuthTxn->account_status);
if (isset($result->ContAuthTxn->ca_reference)) {
$this->setCaReference($result->ContAuthTxn->ca_reference);
}
}
/**
* (non-PHPdoc)
*
* @see \Rede\Gateway\Interfaces\Model::getXml()
*/
public function getXml()
{
}
/**
* @return the $_caReference
*/
public function getCaReference()
{
return $this->_caReference;
}
/**
* @return the $_accountStatus
*/
public function getAccountStatus()
{
return $this->_accountStatus;
}
/**
* @param string $_caReference
*/
private function setCaReference($_caReference)
{
$this->_caReference = (string)$_caReference;
}
/**
* @param string $_accountStatus
*/
private function setAccountStatus($_accountStatus)
{
$this->_accountStatus = (string)$_accountStatus;
}
}
|
set -ex
# Incorporate TARGET env var to the build and test process
if [[ $TARGET != *-musl ]]; then
cargo build --target "$TARGET" --verbose
cargo test --target "$TARGET" --verbose
else
# Build with musl in a Docker container
docker build -t build-"$PROJECT_NAME" -f docker/Dockerfile-musl .
chmod -R 777 "$TRAVIS_BUILD_DIR"
docker run -v "$TRAVIS_BUILD_DIR":/home/rust/src build-"$PROJECT_NAME" --verbose
fi
|
# InfoPushDataArticleContent
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**text** | **String** | | [optional]
**imageUrl** | **String** | | [optional]
**onPressed** | [**InfoPushDataClickable**](InfoPushDataClickable.md) | | [optional]
|
#!/usr/bin/ruby
policies = File.read(__dir__ + "/policies.txt").split(/\n/)
valid_passwords = []
policies.each do |policy|
req, char, pass = policy.split(/\s/)
char.sub!(":","")
min, max = req.split("-")
occurrence = pass.count(char)
next unless occurrence >= min.to_i
next unless occurrence <= max.to_i
valid_passwords.push policy
end
puts "PUZZLE 1"
puts valid_passwords.count
valid_passwords = []
policies.each do |policy|
req, char, pass = policy.split(/\s/)
char.sub!(":","")
pos_one, pos_two = req.split("-")
occurrences = pass.split('')
match_one = occurrences[pos_one.to_i - 1] == char
match_two = occurrences[pos_two.to_i - 1] == char
next if occurrences.count == 0
next unless match_one || match_two
next if match_one && match_two
valid_passwords.push policy
end
puts "PUZZLE 2"
puts valid_passwords.count
|
package com.coditory.gradle.manifest
import java.net.InetAddress
interface HostNameResolver {
fun resolveHostName(): String
companion object {
val INET_HOST_NAME_RESOLVER = object : HostNameResolver {
override fun resolveHostName(): String {
return InetAddress.getLocalHost().hostName
}
}
}
}
|
#!/bin/sh
./set-project.sh
PROJECT_ID=$(gcloud config get-value project)
cd terraform
terraform init
terraform apply --var "project=$PROJECT_ID"
|
import 'package:memorizer/entities/category_content.dart';
class CategoryPageResult {
final List<CategoryContent> categories;
int get totalResults {
return categories.length;
}
CategoryPageResult.fromJSON(Map<String, dynamic> json)
: categories = (json['categories'] as List).map((json) => CategoryContent.fromJSON(json)).toList();
}
|
import set from 'set-value';
import strind from 'strind';
import { Result, Results } from './FuzzyHighlighter';
function formatResults<T>(results: Results<T>): FinalResults<T> {
const finalResults: FinalResults<T> = [];
results.forEach((result, index) => {
finalResults.push({ ...result, formatted: { ...result.item } });
result.matches?.forEach(({ indices, key, value }) => {
if (!value || !key) return;
const output = strind(value, [...indices], ({ chars: text, matches }) => ({
text,
isHighlighted: matches
}));
const formattedResult = output.matched as IFormattedResult[];
const formatted = finalResults[index].formatted as IFormatted;
if (key.split('.').length > 1) {
set(formatted, key, formattedResult);
} else {
formatted[key] = formattedResult;
}
});
});
return finalResults;
}
interface IFormattedResult {
text: string;
isHighlighted: boolean;
}
interface IFormatted {
[key: string]: IFormattedResult[];
}
interface IFinalResult<T> extends Result<T> {
formatted: T | IFormatted;
}
export type FinalResults<T> = IFinalResult<T>[];
interface IFuzzyResult {
indices: ReadonlyArray<[number, number]>;
key: string;
value: string;
}
export { formatResults };
|
#set -ex
if [ $# != 5 ] ; then
echo "please input args:
arg 1: image name (image name must be lowercase,eg: filereader)
arg 2: image version(Tag)
arg 3: pkg name (package must be in pkg directories )
arg 4: system (centos:7.5 or alpine or debian)
arg 5: cpu arch (amd64 or arm64)"
exit 1;
fi
ROOT=$(cd $(dirname "$BASH_SOURCE") ; pwd)
NAME=$1
var=$3
PROCESS_NAME="${var%%-*}"
ARCH=$5
#alpine
#centos:7.5
#debian
BASE_OS=$4
if [[ $BASE_OS = "alpine" ]] ; then
if [[ $ARCH = "amd64" ]] ; then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-alpine-3.10:5.0.0"
elif [[ $ARCH = "arm64" ]] ;then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-alpine-3.10-arm64:5.0.0"
else
echo "Not support System : $BASE_OS"
exit
fi
elif [[ $BASE_OS = "centos:7.5" ]] ;then
if [[ $ARCH = "amd64" ]] ; then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-centos-7:5.2.0"
elif [[ $ARCH = "arm64" ]] ;then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-centos-7-arm64:5.0.0"
else
echo "Not support System : $BASE_OS"
exit
fi
elif [[ $BASE_OS = "debian" ]] ;then
if [[ $ARCH = "arm64" ]] ; then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-debian-stretch-arm64:5.0.0"
elif [[ $ARCH = "amd64" ]] ;then
BASE_OS_URL="registry-jinan-lab.inspurcloud.cn/library/os/inspur-debian-stretch:5.0.0"
else
echo "Not support System : $BASE_OS"
exit
fi
else
echo "Not support System : $BASE_OS"
exit
fi
BUILD_ARGS=(
--build-arg BASE_OS=${BASE_OS_URL}
--build-arg PACKAGENAME=$3
--build-arg NAME="${NAME}"
--build-arg PROCESS_NAME="${PROCESS_NAME}"
)
TARGET="$NAME:"$2
# Build the target and tag with the full tag.
docker build "${BUILD_ARGS[@]}" -f "dockerfile/Dockerfile-${BASE_OS}" -t "$TARGET" $ROOT
|
using System.Collections.Generic;
using JetBrains.Annotations;
using JetBrains.Application.Threading;
using JetBrains.Diagnostics;
using JetBrains.ReSharper.Feature.Services.Intentions;
using JetBrains.ReSharper.Feature.Services.QuickFixes;
using JetBrains.ReSharper.Psi.CSharp.Tree;
using JetBrains.ReSharper.Psi.Tree;
using JetBrains.Util;
namespace JetBrains.ReSharper.Plugins.Unity.CSharp.Feature.Services.CallGraph
{
public abstract class CallGraphQuickFixBase : IQuickFix
{
private readonly IMethodDeclaration myContainingBody;
protected CallGraphQuickFixBase([NotNull] ITreeNode node)
{
myContainingBody = node.GetContainingNode<IMethodDeclaration>(returnThis: true);
}
protected abstract IEnumerable<IntentionAction> GetBulbItems([NotNull] IMethodDeclaration methodDeclaration);
public IEnumerable<IntentionAction> CreateBulbItems()
{
Assertion.AssertNotNull(myContainingBody, "create bulb invoked without checking is available");
myContainingBody.GetPsiServices().Locks.AssertReadAccessAllowed();
return GetBulbItems(myContainingBody);
}
public bool IsAvailable(IUserDataHolder cache)
{
var containingBody = myContainingBody;
if (containingBody == null)
return false;
containingBody.GetPsiServices().Locks.AssertReadAccessAllowed();
return containingBody.IsValid() && IsAvailable(cache, containingBody);
}
protected abstract bool IsAvailable([NotNull] IUserDataHolder cache, [NotNull] IMethodDeclaration methodDeclaration);
}
}
|
package com.louyj.dbsync.job
import com.louyj.dbsync.SystemContext
import com.louyj.dbsync.config.DatabaseConfig
import com.louyj.dbsync.dbopt.DbOperationRegister
import com.louyj.dbsync.sync.HeartbeatComponent
import org.slf4j.LoggerFactory
import java.util.concurrent.TimeUnit
/**
*
* Create at 2020/8/24 18:06<br/>
*
* @author Louyj<br/>
*/
class CleanWorker(ctx: SystemContext)
extends HeartbeatComponent {
val logger = LoggerFactory.getLogger(getClass)
setName("cleanWorker")
start()
var lastExecute = System.currentTimeMillis()
override def run(): Unit = {
logger.info(s"Start clean worker, scheduled at fixed rate of ${ctx.sysConfig.cleanInterval}ms")
while (ctx.running) {
TimeUnit.MILLISECONDS.sleep(5000)
heartbeat()
if (System.currentTimeMillis() - lastExecute > ctx.sysConfig.cleanInterval) {
ctx.dbConfigs.foreach(cleanFun)
lastExecute = System.currentTimeMillis()
}
}
logger.info(s"Stop clean worker")
}
def cleanFun = (dbConfig: DatabaseConfig) => {
try {
logger.info(s"Start clean system tables for ${dbConfig.name}")
val jdbcTemplate = ctx.dsPools.jdbcTemplate(dbConfig.name)
val dbOpt = DbOperationRegister.dbOpts(dbConfig.`type`)
val count = dbOpt.cleanSysTable(jdbcTemplate, dbConfig, ctx.sysConfig.dataKeepHours)
logger.info(s"Finish clean system tables for ${dbConfig.name}, cleaned $count datas")
} catch {
case e: Exception => logger.warn("Clean task failed.", e)
}
}
override def heartbeatInterval(): Long = 5000
}
|
use super::*;
use crate::ids::parser::*;
use crate::scanner::Keywords;
// TODO Split lines with more than 80 characters.
// The only stuff that this formatter reorders are the the package name and imports,
// since they must appear first. Anything else, only the comments, spaces and indentantion are fixed.
pub fn format_document(parser: &Parser) -> Option<String> {
let mut body = String::new();
let mut library_body = String::new();
let mut is_comment = false;
for node in &parser.nodes {
match node {
ParserNode::Package(value)
| ParserNode::Server(value)
| ParserNode::Client(value) => {
let keyword = match node {
ParserNode::Package(_) => Keywords::Package,
ParserNode::Server(_) => Keywords::Server,
ParserNode::Client(_) => Keywords::Client,
};
let body = match node {
ParserNode::Package(_) => &mut library_body,
_ => &mut body,
};
let name_ident = match &value.ident {
ItemIdent::TypeName(name) => &name.ident,
ItemIdent::Identifier(name) => &name.ident,
};
let type_body = format!("{} {}{}", keyword, name_ident, OPEN_NEW_LINE);
body.push_str(type_body.as_str());
for node in &value.nodes {
match node {
ItemNode::ItemField(field) => {
body.push_str(INDENT);
body.push_str(field.to_string().as_str());
body.push_str(",\n");
}
ItemNode::Comment(_) => {}
}
}
body.push_str(CLOSE_NEW_LINE);
}
}
}
library_body += body.as_str();
Some(library_body.trim().to_string())
}
fn push_field_comment(body: &mut String, comments: &[String]) {
for comment in comments {
let comment_str = format!("{}{}{}\n", INDENT, COMMENT_START, comment.as_str());
body.push_str(comment_str.as_str());
}
}
fn push_comment(body: &mut String, comments: &[String]) {
for comment in comments {
let list_comment = format!("{}{}\n", COMMENT_START, comment.as_str());
body.push_str(list_comment.as_str());
}
}
|
module ApplicationHelper
def navbar(controller)
controller = controller.to_s
if policy(Object.const_get(controller.classify)).index?
content_tag(:li, class: controller_name == controller ? :active : nil) do
link_to controller.camelize, send("#{controller}_path")
end
end
end
def bootswatch_alert_types(alert_type)
case alert_type.to_sym
when :info then
'alert-info'
when :notice, :success then
'alert-success'
when :alert, :error then
'alert-danger'
else
'alert-warning'
end
end
end
|
// This module includes the embedded spritesheets. To add additional spritesheets
// update `build.rs`.
pub mod pngs {
include!(concat!(env!("OUT_DIR"), "/pngs.rs"));
}
pub mod constants;
pub mod generator;
pub mod grid_generator;
pub mod grid_renderer;
pub mod service;
pub mod sheets;
pub mod spelunkicon;
|
import pandas as pd
from pathlib import Path
from pins.rsconnect.fs import PinBundleManifest
from pins.meta import MetaFactory
p_root = Path("pins/tests/example-bundle")
p_root.parent.mkdir(parents=True, exist_ok=True)
p_index = p_root / "index.html"
p_index.write_text("<html><body>yo</body></html>")
p_data = p_root / "data_frame.csv"
df = pd.DataFrame({"x": [1, 2, 3]})
df.to_csv(p_data)
p_meta = p_root / "data.txt"
meta = MetaFactory().create(
str(p_data), "csv", title="some title", name="data_frame.csv"
)
meta.to_yaml(p_meta.open("w"))
# add manifest last, since it enumerates all the files
# this lets you download them individually from rsconnect
PinBundleManifest.add_manifest_to_directory(str(p_root))
|
const jsdom = require('jsdom')
const chalk = require('chalk')
const SpellChecker = require('spellchecker')
module.exports = {
spellcheck: function (content, inputPath) {
if (inputPath.endsWith('.md') || inputPath.endsWith('.markdown')) {
const { JSDOM } = jsdom
const { document } = new JSDOM(content).window
const spellcheckedElements = [
...document.querySelectorAll('[data-spellcheck]')
]
const text = spellcheckedElements
.map((el) => el.textContent.trim())
.join(' ')
const results = SpellChecker.checkSpelling(text)
if (results.length) {
results.forEach(({ start, end }) => {
const word = text.substring(start, end)
console.warn(
chalk.yellow(`Linter: check spelling for "${word}"`)
)
})
}
}
}
}
|
"use strict";
module.exports = context => {
return {
BinaryExpression: node => {
if (node.left.name > node.right.name) {
context.report({
node,
message: "目上の変数は比較演算子の右側に配置すべきです。"
});
}
}
};
};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.