hexsha stringlengths 40 40 | size int64 5 1.05M | ext stringclasses 98
values | lang stringclasses 21
values | max_stars_repo_path stringlengths 3 945 | max_stars_repo_name stringlengths 4 118 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 945 | max_issues_repo_name stringlengths 4 118 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 134k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 945 | max_forks_repo_name stringlengths 4 135 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 1.05M | avg_line_length float64 1 1.03M | max_line_length int64 2 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
febe81bf8ec6a824cef7c485c14cfa96c7e47e16 | 965 | sql | SQL | Deploy/demo-scripts/1-Watermark Load for customer table.sql | bratinpaul/https-github.com-microsoft-azure-data-services-go-fast-codebase | 163515f1d5b4f5a20c54b11dfe700832f0ed4417 | [
"MIT"
] | 1 | 2021-04-10T10:43:43.000Z | 2021-04-10T10:43:43.000Z | Deploy/demo-scripts/1-Watermark Load for customer table.sql | JonHarding/azure-data-services-go-fast-codebase | 5a42e70a9bf627393e3fdd232f9eae5be138ee70 | [
"MIT"
] | null | null | null | Deploy/demo-scripts/1-Watermark Load for customer table.sql | JonHarding/azure-data-services-go-fast-codebase | 5a42e70a9bf627393e3fdd232f9eae5be138ee70 | [
"MIT"
] | null | null | null | use adsgofast
go
update taskmaster set activeyn = 0
go
declare @TaskMasterId Int
select @TaskMasterId=TaskMasterId from taskmaster where taskmastername like '%SalesLT.Customer %'
INSERT [dbo].[TaskMasterWaterMark] ([TaskMasterId], [TaskMasterWaterMarkColumn], [TaskMasterWaterMarkColumnType], [TaskMasterWaterMark_DateTime], [TaskMasterWaterMark_BigInt], [TaskWaterMarkJSON], [ActiveYN], [UpdatedOn]) VALUES (@TaskMasterId, N'ModifiedDate', N'datetime', NULL, NULL, NULL, 1, CAST(N'0001-01-01T00:00:00.0000000+00:00' AS DateTimeOffset))
GO
--enable customer table ingestion for watermark test
update taskmaster set activeyn = 1 where taskmasterid in (select taskmasterid from taskmaster where taskmastername like '%SalesLT.Customer %' and [TaskMasterName] not like '%Generate Tasks%')
-- enable all taskmaster
update taskmaster set activeyn = 1 where taskmasterid in (select taskmasterid from taskmaster where [TaskMasterName] not like '%Generate Tasks%')
go | 48.25 | 357 | 0.796891 |
355c351f44588c3efbc9075d9b3141feb75b2751 | 466 | kt | Kotlin | plugins/grazie/src/main/kotlin/com/intellij/grazie/detection/Extensions.kt | Sajaki/intellij-community | 6748af2c40567839d11fd652ec77ba263c074aad | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | plugins/grazie/src/main/kotlin/com/intellij/grazie/detection/Extensions.kt | Sajaki/intellij-community | 6748af2c40567839d11fd652ec77ba263c074aad | [
"Apache-2.0"
] | 2 | 2022-02-19T09:45:05.000Z | 2022-02-27T20:32:55.000Z | plugins/grazie/src/main/kotlin/com/intellij/grazie/detection/Extensions.kt | bradleesand/intellij-community | 750ff9c10333c9c1278c00dbe8d88c877b1b9749 | [
"Apache-2.0"
] | 1 | 2020-10-15T05:56:42.000Z | 2020-10-15T05:56:42.000Z | package com.intellij.grazie.detection
import com.intellij.grazie.detector.model.Language
import com.intellij.grazie.detector.model.alphabet.Alphabet
import com.intellij.grazie.jlanguage.Lang
fun Lang.toLanguage() = Language.values().find { it.iso == this.iso }!!
/** Note that it will return SOME dialect */
fun Language.toLang() = Lang.values().find { it.iso == this.iso }!!
val Language.hasWhitespaces: Boolean
get() = alphabet.group != Alphabet.Group.ASIAN
| 33.285714 | 71 | 0.755365 |
39dc8dab7c40f976ae96c046b4d30dc11dcf1ac8 | 4,687 | java | Java | src/main/java/ensemble/samples/scenegraph/events/keyevent/KeyEventApp.java | shannah/javafx-ensemble8 | 97615ef48abeb80c1b8faf414a05ef51a80b05e1 | [
"Apache-2.0"
] | null | null | null | src/main/java/ensemble/samples/scenegraph/events/keyevent/KeyEventApp.java | shannah/javafx-ensemble8 | 97615ef48abeb80c1b8faf414a05ef51a80b05e1 | [
"Apache-2.0"
] | null | null | null | src/main/java/ensemble/samples/scenegraph/events/keyevent/KeyEventApp.java | shannah/javafx-ensemble8 | 97615ef48abeb80c1b8faf414a05ef51a80b05e1 | [
"Apache-2.0"
] | 2 | 2019-08-07T09:19:36.000Z | 2022-01-30T15:40:16.000Z | /*
* Copyright (c) 2008, 2014, Oracle and/or its affiliates.
* All rights reserved. Use is subject to license terms.
*
* This file is available and licensed under the following license:
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the distribution.
* - Neither the name of Oracle Corporation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ensemble.samples.scenegraph.events.keyevent;
import javafx.application.Application;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.event.EventHandler;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.ListView;
import javafx.scene.control.TextField;
import javafx.scene.input.KeyEvent;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
/**
* A sample that demonstrates various key events and their usage. Type in the
* text box to view the triggered events: key pressed, key typed and key
* released. Pressing the Shift, Ctrl, and Alt keys also trigger events.
*
* @sampleName KeyEvent
* @preview preview.png
* @see javafx.scene.input.KeyCode
* @see KeyEvent
* @see EventHandler
* @embedded
*/
public class KeyEventApp extends Application {
public Parent createContent() {
//create a console for logging key events
final ListView<String> console = new ListView<String>(FXCollections.<String>observableArrayList());
// listen on the console items and remove old ones when we get over 20 items
console.getItems().addListener((ListChangeListener.Change<? extends String> change) -> {
while (change.next()) {
if (change.getList().size() > 20.0) {
change.getList().remove(0);
}
}
});
console.setPrefHeight(150);
console.setMaxHeight(ListView.USE_PREF_SIZE);
// create text box for typing in
final TextField textBox = new TextField();
textBox.setPromptText("Write here");
textBox.setStyle("-fx-font-size: 34;");
//add a key listeners
textBox.setOnKeyPressed((KeyEvent ke) -> {
console.getItems().add("Key Pressed: " + ke.getText());
});
textBox.setOnKeyReleased((KeyEvent ke) -> {
console.getItems().add("Key Released: " + ke.getText());
});
textBox.setOnKeyTyped((KeyEvent ke) -> {
String text = "Key Typed: " + ke.getCharacter();
if (ke.isAltDown()) {
text += " , alt down";
}
if (ke.isControlDown()) {
text += " , ctrl down";
}
if (ke.isMetaDown()) {
text += " , meta down";
}
if (ke.isShiftDown()) {
text += " , shift down";
}
console.getItems().add(text);
});
VBox vb = new VBox(10);
vb.getChildren().addAll(textBox, console);
return vb;
}
@Override
public void start(Stage primaryStage) throws Exception {
primaryStage.setScene(new Scene(createContent()));
primaryStage.show();
}
/**
* Java main for when running without JavaFX launcher
* @param args command line arguments
*/
public static void main(String[] args) {
launch(args);
}
}
| 38.735537 | 107 | 0.663964 |
ef7332af0ca5400f5cb194b0298e7ff0c6bcbd83 | 501 | lua | Lua | Assets/LuaScripts/Modulus/ECS/LComponent/Factory/LAIComp.lua | nizhenchao/ECS_XLua | aa60d66754f18faea66f6b2e0a9a02a7cfefc15d | [
"Apache-2.0"
] | 2 | 2018-12-21T05:53:56.000Z | 2019-03-18T05:08:32.000Z | Assets/LuaScripts/Modulus/ECS/LComponent/Factory/LAIComp.lua | woxinfeixiang2012/ECS_XLua | cb54750c78783a4ead78b578d4c97ea973429d1e | [
"Apache-2.0"
] | null | null | null | Assets/LuaScripts/Modulus/ECS/LComponent/Factory/LAIComp.lua | woxinfeixiang2012/ECS_XLua | cb54750c78783a4ead78b578d4c97ea973429d1e | [
"Apache-2.0"
] | 1 | 2018-10-11T06:16:42.000Z | 2018-10-11T06:16:42.000Z | LAIComp = SimpleClass(LComponent)
function LAIComp:__init(type,uid,args)
self.conf = ConfigHelper:getConfigByKey('AIBehaviorConfig',tonumber(args))
self.lastThinkTime = -1
end
function LAIComp:isNeedUpdate()
return LuaExtend:getSecTimer() - self.lastThinkTime > self:getThinkTime()
end
function LAIComp:getThinkTime()
return self.conf and self.conf.thinkTime or 100
end
function LAIComp:thinkFinish()
self.lastThinkTime = LuaExtend:getSecTimer()
end
function LAIComp:update()
end | 22.772727 | 77 | 0.780439 |
67cddc22990abd312abe92e5853bb8080a55b3e1 | 2,645 | lua | Lua | modapi.lua | BomberPlayz/CaveDig | b2cbb4954717a743dbed1c1561eeaa49d0a4ef0b | [
"MIT"
] | 8 | 2020-02-14T14:03:35.000Z | 2021-11-14T20:34:53.000Z | modapi.lua | BomberPlayz/CaveDig | b2cbb4954717a743dbed1c1561eeaa49d0a4ef0b | [
"MIT"
] | 3 | 2020-02-16T21:19:55.000Z | 2020-07-15T11:48:02.000Z | modapi.lua | griffi-gh/CaveDig | 84f9103d7d7d714a326383544a895c9ff79ef3b2 | [
"MIT"
] | 2 | 2020-02-13T15:33:45.000Z | 2020-05-12T09:06:35.000Z | api={inner={},loader={},blocks={}}
mods={data={}}
api.inner.mod_directory="mods"
api.graphics=love.graphics
api.player={health={}}
function api.player.teleport(x,y,cx,cy)
local function movp(x,y)
phy.world:update(phy.player,px,py)
end
if x and y then
x2,y2 = px or x*world.tile.w , py or y*world.tile.h
cx2,cy2 = cx or world.chunk.id.x , cy or world.chunk.id.y
if x or y then movp(x2,y2) end
if cx or cy then chl.f.moveToChunk(cx2,cy2) end
end
end
function api.player.health.add(h)
if h then player.hp=player.hp+h end
end
function api.player.health.remove(h)
if h then player.hp=player.hp-h end
end
function api.player.health.set(h)
if h then
player.hp=(h/player.maxhp)*(player.maxhp*3)-(player.maxhp*2)
end
end
function api.player.health.get()
return (((player.hp+player.maxhp)/(player.maxhp*3))*player.maxhp)+(player.maxhp/2)-(player.maxhp/6)
end
function api.player.health.getMax()
return player.maxhp
end
function api.loader.modList()
local filesTable =love.filesystem.getDirectoryItems(api.inner.mod_directory)
local output={}
local j=1
for i,v in ipairs(filesTable) do
if v:find(".lua") ~= nil then
output[j]=v
j=j+1
end
end
return output
end
function api.blocks.createBlock(texture,strength,type,noCollision,action)
local id=table.count(world.tile.textures)+1
texture=api.inner.mod_directory.."/"..texture
action=action or ""
world.tile.texture_files[id]=texture
world.tile.textures[id]=love.graphics.newImage(texture)
if(noCollision)then
phy.nocollosion[table.count(phy.nocollosion)+1]=id
end
world.tile.ItemData[table.count(world.tile.ItemData)+1].strength=strength or 5
world.tile.ItemData[table.count(world.tile.ItemData)+1].type=type or "block"
world.tile.actions[table.count(world.tile.actions)+1]=loadstring(action)
return id
end
function api.inner.executeAll()
local modlist=api.loader.modList()
for i=1,table.count(modlist) do
mods.data[i]=require(api.inner.mod_directory.."."..modlist[i]:gsub("%.lua",""))
end
api.inner.areModsLoaded=true
end
function api.inner.loop()
if api.inner.areModsLoaded and table.count(mods.data)>0 then
for i=1,table.count(mods) do
if(mods.data[i].loop~=nil)then
mods.data[i].loop()
end
if(mods.data[i].init~=nil and api.inner.areModsInit==nil)then
mods.data[i].init()
api.inner.areModsInit=true
end
end
end
end
function api.inner.init()
love.filesystem.createDirectory(api.inner.mod_directory)
api.inner.executeAll()
end
| 28.44086 | 102 | 0.683554 |
e743f092509786ba4bdf4bda65bf6008434571f2 | 2,283 | js | JavaScript | smarty/js/found.js | yezhidi/demo | 8de5eb4c5ca53514b022e7716bb8781197031d82 | [
"Apache-2.0"
] | null | null | null | smarty/js/found.js | yezhidi/demo | 8de5eb4c5ca53514b022e7716bb8781197031d82 | [
"Apache-2.0"
] | null | null | null | smarty/js/found.js | yezhidi/demo | 8de5eb4c5ca53514b022e7716bb8781197031d82 | [
"Apache-2.0"
] | null | null | null | function chkname(form)
{
ajax=loadXMLDoc();
var user = form.user.value;
if(user=="")
{
alert("请输入用户名");
form.user.focus();
return false;
}
else{
var url="../colltroler/foundpwd.php?user="+user;
ajax.open("GET",url,true);
ajax.onreadystatechange=function()
{
if(ajax.readyState==4&&ajax.status==200)
{
var data=ajax.responseText;
if(data==0)
{
alert("没有该用户,请重新查找!");
form.user.select();
return false;
}
else{
document.getElementById("first").style.display="none";
document.getElementById("second").style.display='';
document.getElementById("question").innerHTML=data;
}
}
}
ajax.send(null);
}
}
function chkanswer(form)
{
ajax=loadXMLDoc();
var user = document.getElementById('user').value;
var answer=form.answer.value;
if(answer=="")
{
alert("请输入密保答案");
form.user.focus();
return false;
}
else{
var url="../colltroler/foundpwd.php?user="+user+"&answer="+answer;
ajax.open("GET",url,true);
ajax.onreadystatechange=function()
{
if(ajax.readyState==4&&ajax.status==200)
{
var data=ajax.responseText;
if(data==0)
{
alert("问题回答错误");
form.answer.select();
return false;
}
else{
document.getElementById("second").style.display="none";
document.getElementById("third").style.display='';
}
}
}
ajax.send(null);
}
}
function chkpasswd(form)
{
ajax=loadXMLDoc();
var user = document.getElementById('user').value;
var passwd1 =form.pwd1.value;
var passwd2 = form.pwd2.value;
if(passwd1==""||passwd1.length<6)
{
alert("请输入密码或密码需达6位数");
form.user.focus();
return false;
}
else{
var url="../colltroler/foundpwd.php?user="+user+"&pwd1="+passwd1;
ajax.open("GET",url,true);
ajax.onreadystatechange=function()
{
if(ajax.readyState==4&&ajax.status==200)
{
var data=ajax.responseText;
if(data==1)
{
alert("密码修改成功,请重新登录");
window.close();
}
}
}
ajax.send(null);//******///
}
}
//2018/4/29 21:10 | 22.60396 | 68 | 0.548401 |
acc34a9f9f537b13463cc7ec6e7a8797e4adb73f | 1,209 | cpp | C++ | problemes/probleme2xx/probleme204.cpp | ZongoForSpeed/ProjectEuler | 2e2d45f984d48a1da8275886c976f909a0de94ce | [
"MIT"
] | 6 | 2015-10-13T17:07:21.000Z | 2018-05-08T11:50:22.000Z | problemes/probleme2xx/probleme204.cpp | ZongoForSpeed/ProjectEuler | 2e2d45f984d48a1da8275886c976f909a0de94ce | [
"MIT"
] | null | null | null | problemes/probleme2xx/probleme204.cpp | ZongoForSpeed/ProjectEuler | 2e2d45f984d48a1da8275886c976f909a0de94ce | [
"MIT"
] | null | null | null | #include "problemes.h"
#include "premiers.h"
#include <algorithm>
typedef unsigned long long nombre;
typedef std::vector<nombre> vecteur;
typedef std::pair<nombre, nombre> paire;
ENREGISTRER_PROBLEME(204, "Generalised Hamming Numbers") {
// A Hamming number is a positive number which has no prime factor larger than 5.
// So the first few Hamming numbers are 1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15.
// There are 1105 Hamming numbers not exceeding 10^8.
//
// We will call a positive number a generalised Hamming number of type n, if it has no prime factor larger than n.
// Hence the Hamming numbers are the generalised Hamming numbers of type 5.
//
// How many generalised Hamming numbers of type 100 are there which don't exceed 10^9?
nombre limite = 1000000000;
nombre h = 100;
vecteur premiers;
premiers::crible2<nombre>(limite, std::back_inserter(premiers));
std::vector<bool> Hamming(limite + 1, true);
Hamming[0] = false;
for (auto p: premiers) {
if (p > h) for (nombre k = p; k < limite; k += p) Hamming[k] = false;
}
auto resultat = std::count(Hamming.begin(), Hamming.end(), true);
return std::to_string(resultat);
}
| 36.636364 | 118 | 0.674938 |
5945e11b233d7b510af5782fce983aee5ff37826 | 253 | asm | Assembly | data/mapHeaders/route16house.asm | adhi-thirumala/EvoYellow | 6fb1b1d6a1fa84b02e2d982f270887f6c63cdf4c | [
"Unlicense"
] | 16 | 2018-08-28T21:47:01.000Z | 2022-02-20T20:29:59.000Z | data/mapHeaders/route16house.asm | adhi-thirumala/EvoYellow | 6fb1b1d6a1fa84b02e2d982f270887f6c63cdf4c | [
"Unlicense"
] | 5 | 2019-04-03T19:53:11.000Z | 2022-03-11T22:49:34.000Z | data/mapHeaders/route16house.asm | adhi-thirumala/EvoYellow | 6fb1b1d6a1fa84b02e2d982f270887f6c63cdf4c | [
"Unlicense"
] | 2 | 2019-12-09T19:46:02.000Z | 2020-12-05T21:36:30.000Z | Route16House_h:
db HOUSE ; tileset
db ROUTE_16_HOUSE_HEIGHT, ROUTE_16_HOUSE_WIDTH ; dimensions (y, x)
dw Route16HouseBlocks, Route16HouseTextPointers, Route16HouseScript ; blocks, texts, scripts
db $00 ; connections
dw Route16HouseObject ; objects
| 36.142857 | 93 | 0.810277 |
af0bb323a9c857fe517b463bc431457ba24c4989 | 6,887 | rb | Ruby | lib/lbp/file.rb | lombardpress/lbp.rb | c3953ded6d6f85a1214d618ca4f7321016bd271b | [
"MIT"
] | null | null | null | lib/lbp/file.rb | lombardpress/lbp.rb | c3953ded6d6f85a1214d618ca4f7321016bd271b | [
"MIT"
] | null | null | null | lib/lbp/file.rb | lombardpress/lbp.rb | c3953ded6d6f85a1214d618ca4f7321016bd271b | [
"MIT"
] | null | null | null | require 'nokogiri'
#require 'lbp/functions'
#require 'lbp/item'
require 'open-uri'
require 'lbp'
module Lbp
# class should be renamed to Transcription
class File
attr_reader :xslt_dir, :file_path
def initialize(filepath, transcription_type, confighash)
@file_path = filepath
@confighash = confighash
unless confighash == nil
@stylesheets = @confighash[:stylesheets]
# identify propery xslt directory
end
# get trancription type from xmlfile
@transcription_type = transcription_type # critical or documentary # there is also a method for this if one needs to get the type from the file itself
# get xslt_version from xmlfile
@xslt_version = self.validating_schema_version
unless confighash == nil
@xslt_dir = "#{@confighash[:xslt_base]}#{@xslt_version}/#{@transcription_type}/"
end
end
def file
file = open(self.file_path)
if file.base_uri.to_s != self.file_path
file = open(self.file_path, {:http_basic_authentication => [@confighash[:git_username], @confighash[:git_password] ]})
end
return file
end
def nokogiri
xmldoc = Nokogiri::XML(self.file)
end
## End File Path Methods
## Get transcription type
def transcription_type_from_file
xmldoc = self.nokogiri
result = xmldoc.xpath("/tei:TEI/tei:text[1]/@type", 'tei' => 'http://www.tei-c.org/ns/1.0')
if result.length > 0
return result.to_s
else
return "unknown"
end
end
## get validating schema label
def validating_schema_version
xmldoc = self.nokogiri
result = xmldoc.xpath("/tei:TEI/tei:teiHeader[1]/tei:encodingDesc[1]/tei:schemaRef[1]/@n", 'tei' => 'http://www.tei-c.org/ns/1.0')
if result.length > 0
return result.to_s.split("-").last
else
return "default"
end
end
def transcription_type
end
### Item Header Extraction and Metadata Methods
def title
xmldoc = self.nokogiri
title = xmldoc.xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc[1]/tei:titleStmt[1]/tei:title[1]", 'tei' => 'http://www.tei-c.org/ns/1.0')
return title.text
end
def author
xmldoc = self.nokogiri
author = xmldoc.xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc/tei:titleStmt[1]/tei:author", 'tei' => 'http://www.tei-c.org/ns/1.0')
return author.text
end
def editor
xmldoc = self.nokogiri
editor = xmldoc.xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc/tei:titleStmt[1]/tei:editor", 'tei' => 'http://www.tei-c.org/ns/1.0')
return editor.text
end
def ed_no
xmldoc = self.nokogiri
ed_no = xmldoc.at_xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc[1]/tei:editionStmt[1]/tei:edition[1]/@n", 'tei' => 'http://www.tei-c.org/ns/1.0')
return ed_no.value
end
def ed_date
xmldoc = self.nokogiri
ed_date = xmldoc.at_xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc[1]/tei:editionStmt[1]/tei:edition[1]/tei:date[1]/@when", 'tei' => 'http://www.tei-c.org/ns/1.0')
return ed_date.value
end
def pub_date
if self.validating_schema_version == "1.0.0"
return "no pub date in this schema"
else
xmldoc = self.nokogiri
pub_date = xmldoc.at_xpath("/tei:TEI/tei:teiHeader[1]/tei:fileDesc[1]/tei:publicationStmt[1]/tei:date[1]/@when", 'tei' => 'http://www.tei-c.org/ns/1.0')
return pub_date.value
end
end
def encoding_method
xmldoc = self.nokogiri
encoding_method = xmldoc.at_xpath("/tei:TEI/tei:teiHeader[1]/tei:encodingDesc[1]/tei:variantEncoding[1]/@method", 'tei' => 'http://www.tei-c.org/ns/1.0')
return encoding_method.value
end
def encoding_location
xmldoc = self.nokogiri
encoding_location = xmldoc.at_xpath("/tei:TEI/tei:teiHeader[1]/tei:encodingDesc[1]/tei:variantEncoding[1]/@location", 'tei' => 'http://www.tei-c.org/ns/1.0')
return encoding_location.value
end
def number_of_columns
xmldoc = self.nokogiri
test = xmldoc.xpath("//tei:pb", 'tei' => 'http://www.tei-c.org/ns/1.0')
if @transcription_type == "critical"
number_of_columns = nil
elsif xmldoc.xpath("//tei:pb", 'tei' => 'http://www.tei-c.org/ns/1.0').count != 0
number_of_columns = 1
elsif xmldoc.xpath("//tei:cb", 'tei' => 'http://www.tei-c.org/ns/1.0').count != 0
number_of_columns = 2
end
return number_of_columns
end
### Begin transform (XSLT) methocs ###
def transform(xsltfile, xslt_param_array=[])
doc = xslt_transform(self.nokogiri, xsltfile, xslt_param_array)
end
def transform_apply(xsltfile, xslt_param_array=[])
doc = xslt_apply_to(self.nokogiri, xsltfile, xslt_param_array)
end
def transform_main_view(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:main_view] # "text_display.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
def transform_index_view(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:index_view] # "text_display_index.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
def transform_clean(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:clean_view] # "clean_forStatistics.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
def transform_clean_nokogiri(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:clean_view] # "clean_forStatistics.xsl"
doc = self.transform(xsltfile, xslt_param_array)
end
def transform_plain_text(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:plain_text] # "plaintext.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
def transform_plain_text_nokogiri(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:plain_text] # "plaintext.xsl"
doc = self.transform(xsltfile, xslt_param_array)
end
def transform_json(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:json] # "plaintext.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
def transform_toc(xslt_param_array=[])
xsltfile=@xslt_dir + @stylesheets[:toc] # "lectio_outline.xsl"
doc = self.transform_apply(xsltfile, xslt_param_array)
end
### End of Transformation Methods ###
### Begin Statistics Methods ###
def word_count
plaintext = self.transform_plain_text
size = plaintext.split.size
end
def word_array
plaintext = self.transform_plain_text
word_array = plaintext.split
word_array.map!{ |word| word.downcase}
end
def word_frequency(sort, order)
word_array = self.word_array
wf = Hash.new(0)
word_array.each { |word| wf[word] += 1 }
if sort == "frequency"
if order == "descending" # high to low
wf = wf.sort_by{|k,v| v}.reverse
elsif order == "ascending" # low to high
wf = wf.sort_by{|k,v| v}
end
elsif sort == "word"
if order == "descending" # z - a
wf = wf.sort_by{|k,v| k}.reverse
elsif order == "ascending" #a - z
wf = wf.sort_by{|k,v| k}
end
end
return wf.to_h
end
end
end
| 34.094059 | 165 | 0.68622 |
802eb5edd609805910085eabeb4a218efa275603 | 71,007 | java | Java | backend/de.metas.swat/de.metas.swat.base/src/main/java/de/metas/invoicecandidate/api/impl/InvoiceCandDAO.java | metasfresh/metasfresh | 699109d6544596face3fd94c646003ba0dc2d548 | [
"RSA-MD"
] | 1,144 | 2016-02-14T10:29:35.000Z | 2022-03-30T09:50:41.000Z | backend/de.metas.swat/de.metas.swat.base/src/main/java/de/metas/invoicecandidate/api/impl/InvoiceCandDAO.java | vestigegroup/metasfresh | 4b2d48c091fb2a73e6f186260a06c715f5e2fe96 | [
"RSA-MD"
] | 8,283 | 2016-04-28T17:41:34.000Z | 2022-03-30T13:30:12.000Z | backend/de.metas.swat/de.metas.swat.base/src/main/java/de/metas/invoicecandidate/api/impl/InvoiceCandDAO.java | vestigegroup/metasfresh | 4b2d48c091fb2a73e6f186260a06c715f5e2fe96 | [
"RSA-MD"
] | 441 | 2016-04-29T08:06:07.000Z | 2022-03-28T06:09:56.000Z | package de.metas.invoicecandidate.api.impl;
import ch.qos.logback.classic.Level;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import de.metas.aggregation.model.I_C_Aggregation;
import de.metas.async.AsyncBatchId;
import de.metas.bpartner.BPartnerId;
import de.metas.bpartner.service.IBPartnerDAO;
import de.metas.cache.annotation.CacheCtx;
import de.metas.cache.annotation.CacheTrx;
import de.metas.cache.model.CacheInvalidateMultiRequest;
import de.metas.cache.model.IModelCacheInvalidationService;
import de.metas.cache.model.ModelCacheInvalidationTiming;
import de.metas.common.util.CoalesceUtil;
import de.metas.common.util.time.SystemTime;
import de.metas.currency.ICurrencyBL;
import de.metas.document.engine.DocStatus;
import de.metas.inout.IInOutDAO;
import de.metas.invoice.InvoiceId;
import de.metas.invoicecandidate.InvoiceCandidateId;
import de.metas.invoicecandidate.api.IInvoiceCandBL;
import de.metas.invoicecandidate.api.IInvoiceCandDAO;
import de.metas.invoicecandidate.api.IInvoiceCandRecomputeTagger;
import de.metas.invoicecandidate.api.IInvoiceCandUpdateSchedulerService;
import de.metas.invoicecandidate.api.InvoiceCandRecomputeTag;
import de.metas.invoicecandidate.api.InvoiceCandidateMultiQuery;
import de.metas.invoicecandidate.api.InvoiceCandidateQuery;
import de.metas.invoicecandidate.api.InvoiceCandidate_Constants;
import de.metas.invoicecandidate.model.I_C_InvoiceCandidate_InOutLine;
import de.metas.invoicecandidate.model.I_C_Invoice_Candidate;
import de.metas.invoicecandidate.model.I_C_Invoice_Candidate_Agg;
import de.metas.invoicecandidate.model.I_C_Invoice_Candidate_Recompute;
import de.metas.invoicecandidate.model.I_C_Invoice_Detail;
import de.metas.invoicecandidate.model.I_C_Invoice_Line_Alloc;
import de.metas.invoicecandidate.model.I_M_ProductGroup;
import de.metas.invoicecandidate.model.X_C_Invoice_Candidate;
import de.metas.lang.SOTrx;
import de.metas.money.CurrencyConversionTypeId;
import de.metas.money.CurrencyId;
import de.metas.order.OrderId;
import de.metas.order.OrderLineId;
import de.metas.organization.IOrgDAO;
import de.metas.organization.OrgId;
import de.metas.payment.paymentterm.PaymentTermId;
import de.metas.process.IADPInstanceDAO;
import de.metas.process.PInstanceId;
import de.metas.security.IUserRolePermissions;
import de.metas.util.Check;
import de.metas.util.Loggables;
import de.metas.util.Services;
import de.metas.util.lang.ExternalHeaderIdWithExternalLineIds;
import de.metas.util.lang.ExternalId;
import de.metas.util.time.InstantInterval;
import lombok.NonNull;
import org.adempiere.ad.dao.ConstantQueryFilter;
import org.adempiere.ad.dao.ICompositeQueryFilter;
import org.adempiere.ad.dao.ICompositeQueryUpdater;
import org.adempiere.ad.dao.IQueryBL;
import org.adempiere.ad.dao.IQueryBuilder;
import org.adempiere.ad.dao.IQueryOrderBy.Direction;
import org.adempiere.ad.dao.IQueryOrderBy.Nulls;
import org.adempiere.ad.dao.IQueryOrderByBuilder;
import org.adempiere.ad.dao.impl.CompareQueryFilter.Operator;
import org.adempiere.ad.dao.impl.ModelColumnNameValue;
import org.adempiere.ad.persistence.ModelDynAttributeAccessor;
import org.adempiere.ad.table.api.IADTableDAO;
import org.adempiere.ad.trx.api.ITrx;
import org.adempiere.ad.trx.api.ITrxListenerManager.TrxEventTiming;
import org.adempiere.ad.trx.api.ITrxManager;
import org.adempiere.exceptions.AdempiereException;
import org.adempiere.exceptions.DBException;
import org.adempiere.model.InterfaceWrapperHelper;
import org.adempiere.service.ClientId;
import org.adempiere.util.lang.IContextAware;
import org.adempiere.util.lang.impl.TableRecordReference;
import org.adempiere.util.proxy.Cached;
import org.compiere.model.IQuery;
import org.compiere.model.I_C_BPartner;
import org.compiere.model.I_C_InvoiceLine;
import org.compiere.model.I_C_InvoiceSchedule;
import org.compiere.model.I_C_OrderLine;
import org.compiere.model.I_M_InOut;
import org.compiere.model.I_M_InOutLine;
import org.compiere.util.DB;
import org.compiere.util.Env;
import org.compiere.util.TimeUtil;
import org.slf4j.Logger;
import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import static org.adempiere.model.InterfaceWrapperHelper.delete;
/*
* #%L
* de.metas.swat.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
public class InvoiceCandDAO implements IInvoiceCandDAO
{
private final transient Logger logger = InvoiceCandidate_Constants.getLogger(InvoiceCandDAO.class);
private final transient IOrgDAO orgDAO = Services.get(IOrgDAO.class);
private static final ModelDynAttributeAccessor<I_C_Invoice_Candidate, Boolean> DYNATTR_IC_Avoid_Recreate //
= new ModelDynAttributeAccessor<>(IInvoiceCandDAO.class.getName() + "Avoid_Recreate", Boolean.class);
private final IQueryBL queryBL = Services.get(IQueryBL.class);
private final IInvoiceCandUpdateSchedulerService invoiceCandScheduler = Services.get(IInvoiceCandUpdateSchedulerService.class);
@Override
public I_C_Invoice_Candidate getById(@NonNull final InvoiceCandidateId invoiceCandidateId)
{
return InterfaceWrapperHelper.load(invoiceCandidateId, I_C_Invoice_Candidate.class);
}
@Override
public I_C_Invoice_Candidate getByIdOutOfTrx(@NonNull final InvoiceCandidateId invoiceCandidateId)
{
return InterfaceWrapperHelper.loadOutOfTrx(invoiceCandidateId, I_C_Invoice_Candidate.class);
}
@Override
public List<I_C_Invoice_Candidate> getByIds(@NonNull final Collection<InvoiceCandidateId> invoiceCandidateIds)
{
return InterfaceWrapperHelper.loadByRepoIdAwares(ImmutableSet.copyOf(invoiceCandidateIds), I_C_Invoice_Candidate.class);
}
@Override
public final Iterator<I_C_Invoice_Candidate> retrieveIcForSelection(final Properties ctx, final PInstanceId pinstanceId, final String trxName)
{
// Note that we can't filter by IsError in the where clause, because it wouldn't work with pagination.
// Background is that the number of candidates with "IsError=Y" might increase during the run.
final IQueryBuilder<I_C_Invoice_Candidate> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, ctx, trxName)
.setOnlySelection(pinstanceId);
return retrieveInvoiceCandidates(queryBuilder);
}
@Override
public final Iterator<I_C_Invoice_Candidate> retrieveNonProcessed(final IContextAware contextAware)
{
final IQueryBuilder<I_C_Invoice_Candidate> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, contextAware)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
queryBuilder.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID);
return queryBuilder
.create()
.setOption(IQuery.OPTION_GuaranteedIteratorRequired, true)
.setOption(IQuery.OPTION_IteratorBufferSize, 2000)
.iterate(I_C_Invoice_Candidate.class);
}
@Override
public <T extends I_C_Invoice_Candidate> Iterator<T> retrieveInvoiceCandidates(
@NonNull final IQueryBuilder<T> queryBuilder)
{
//
// Make sure we are retrieving in a order which is friendly for processing
final IQueryOrderByBuilder<T> orderBy = queryBuilder.orderBy();
orderBy
.clear()
//
// order by they header aggregation key to make sure candidates with the same key end up in the same invoice
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_HeaderAggregationKey)
//
// We need to aggregate by DateInvoiced too
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_DateInvoiced)
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_DateAcct)
//
// task 08241: return ICs with a set Bill_User_ID first, because, we can aggregate ICs with different Bill_User_IDs into one invoice, however, if there are any ICs with a Bill_User_ID
// set, and others with no Bill_User_ID, then we want the Bill_User_ID to end up in the C_Invoice (header) record.
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_Bill_User_ID, Direction.Ascending, Nulls.Last)
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID);
//
// Retrieve invoice candidates
return queryBuilder.create()
.setOption(IQuery.OPTION_GuaranteedIteratorRequired, false)
.setOption(IQuery.OPTION_IteratorBufferSize, 500)
.iterate(queryBuilder.getModelClass());
}
@Override
public List<I_C_Invoice_Candidate> retrieveReferencing(@NonNull final TableRecordReference reference)
{
return fetchInvoiceCandidates(reference.getTableName(), reference.getRecord_ID());
}
@Override
public int deleteAllReferencingInvoiceCandidates(@NonNull final Object model)
{
final String tableName = InterfaceWrapperHelper.getModelTableName(model);
final int tableId = Services.get(IADTableDAO.class).retrieveTableId(tableName);
final int recordId = InterfaceWrapperHelper.getId(model);
// i could do all this with "stream", but i find "old-school" easier to debug
int deleteCount = 0;
final List<I_C_Invoice_Candidate> icRecordsToDelete = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Table_ID, tableId)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Record_ID, recordId)
.create()
.list();
for (final I_C_Invoice_Candidate icRecordToDelete : icRecordsToDelete)
{
setProcessedToFalseIfIcNotNeeded(icRecordToDelete);
delete(icRecordToDelete);
deleteCount++;
}
return deleteCount;
}
/**
* Note: no need to save the record; just unset its processed flag to allow deletion if that makes sense.
*/
private void setProcessedToFalseIfIcNotNeeded(@NonNull final I_C_Invoice_Candidate icToDelete)
{
final boolean manuallyFlaggedAsProcessed = icToDelete.isProcessed() && !icToDelete.isProcessed_Calc();
if (!manuallyFlaggedAsProcessed)
{
return; // nothing to do
}
final IInvoiceCandDAO invoiceCandDAO = Services.get(IInvoiceCandDAO.class);
final boolean hasInvoiceLines = !invoiceCandDAO.retrieveIlForIc(icToDelete).isEmpty();
if (hasInvoiceLines)
{
return; // nothing to do
}
// icToDelete was manually set to "processed" to be out of the way; in this case, we can unprocess and delete it.
icToDelete.setProcessed(false);
}
@Override
public BigDecimal retrieveInvoicableAmount(@NonNull final I_C_BPartner billBPartner, @Nullable final LocalDate date)
{
final ICurrencyBL currencyBL = Services.get(ICurrencyBL.class);
final String trxName = InterfaceWrapperHelper.getTrxName(billBPartner);
final Properties ctx = InterfaceWrapperHelper.getCtx(billBPartner);
final OrgId orgId = OrgId.ofRepoId(billBPartner.getAD_Org_ID());
final InvoiceCandidateQuery query = InvoiceCandidateQuery.builder()
.billBPartnerId(BPartnerId.ofRepoId(billBPartner.getC_BPartner_ID()))
.orgId(orgId)
.dateToInvoice(date)
.error(false)
.build();
final CurrencyId targetCurrencyId = currencyBL.getBaseCurrency(ctx).getId();
final int adClientId = billBPartner.getAD_Client_ID();
return retrieveInvoicableAmount(ctx, query, targetCurrencyId, adClientId, I_C_Invoice_Candidate.COLUMNNAME_NetAmtToInvoice, trxName);
}
@Override
public <T extends org.compiere.model.I_C_Invoice> Map<Integer, T> retrieveInvoices(
final String tableName,
final int recordId,
final Class<T> clazz,
final boolean onlyUnpaid)
{
final Map<Integer, T> openInvoices = new HashMap<>();
final List<I_C_Invoice_Candidate> icsForCurrentTerm = fetchInvoiceCandidates(tableName, recordId);
Check.assumeNotNull(icsForCurrentTerm, "the method might return the empty list, but not null");
for (final I_C_Invoice_Candidate ic : icsForCurrentTerm)
{
final List<I_C_InvoiceLine> iclList = Services.get(IInvoiceCandDAO.class).retrieveIlForIc(ic);
for (final I_C_InvoiceLine il : iclList)
{
final T invoice = InterfaceWrapperHelper.create(il.getC_Invoice(), clazz);
// 04022 : Changed method to allow retrieval of all invoices
if (!onlyUnpaid || !invoice.isPaid())
{
openInvoices.put(invoice.getC_Invoice_ID(), invoice);
}
}
}
return openInvoices;
}
@Override
public I_C_Invoice_Line_Alloc retrieveIlaForIcAndIl(final I_C_Invoice_Candidate invoiceCand, final org.compiere.model.I_C_InvoiceLine invoiceLine)
{
// @formatter:off
return queryBL
.createQueryBuilder(I_C_Invoice_Line_Alloc.class, invoiceCand)
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Candidate_ID, invoiceCand.getC_Invoice_Candidate_ID())
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMN_C_InvoiceLine_ID, invoiceLine.getC_InvoiceLine_ID())
.addOnlyActiveRecordsFilter()
.filterByClientId()
//
.create()
.firstOnly(I_C_Invoice_Line_Alloc.class);
// @formatter:on
}
@Override
public <T extends org.compiere.model.I_M_InOutLine> List<T> retrieveInOutLinesForCandidate(
@NonNull final I_C_Invoice_Candidate ic,
@NonNull final Class<T> clazz)
{
// FIXME debug to see why c_invoicecandidate_inoutline have duplicates and take the inoutlines from there
// for now take it via orderline
final IQueryBuilder<I_M_InOutLine> queryBuilder = queryBL.createQueryBuilder(I_M_InOutLine.class, ic)
.addEqualsFilter(I_M_InOutLine.COLUMN_C_OrderLine_ID, ic.getC_OrderLine_ID())
.addOnlyActiveRecordsFilter();
// Order by M_InOut_ID, M_InOutLine_ID, just to have a predictible order
queryBuilder.orderBy()
.addColumn(I_M_InOutLine.COLUMN_M_InOut_ID)
.addColumn(I_M_InOutLine.COLUMN_M_InOutLine_ID);
return queryBuilder
.create()
.list(clazz);
}
@Override
public List<I_C_Invoice_Candidate> retrieveInvoiceCandidatesForOrderLineId(@NonNull final OrderLineId orderLineId)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_C_OrderLine_ID, orderLineId)
.addOnlyActiveRecordsFilter()
//
.create()
.list(I_C_Invoice_Candidate.class);
}
@Nullable
@Override
public I_C_InvoiceCandidate_InOutLine retrieveInvoiceCandidateInOutLine(@NonNull final I_C_Invoice_Candidate ic, @NonNull final I_M_InOutLine iol)
{
// there is a Unique Index on the 2 columns C_IC_IOL_Unique_Active, so i'm sure there's at most 1 line
return queryBL
.createQueryBuilder(I_C_InvoiceCandidate_InOutLine.class, ic)
.addEqualsFilter(I_C_InvoiceCandidate_InOutLine.COLUMN_C_Invoice_Candidate_ID, ic.getC_Invoice_Candidate_ID())
.addEqualsFilter(I_C_InvoiceCandidate_InOutLine.COLUMN_M_InOutLine_ID, iol.getM_InOutLine_ID())
.addOnlyActiveRecordsFilter()
//
.create()
.firstOnly(I_C_InvoiceCandidate_InOutLine.class);
}
@Override
public List<I_C_InvoiceCandidate_InOutLine> retrieveICIOLAssociationsExclRE(@NonNull final I_C_Invoice_Candidate invoiceCandidate)
{
final InvoiceCandidateId invoiceCandidateId = InvoiceCandidateId.ofRepoIdOrNull(invoiceCandidate.getC_Invoice_Candidate_ID());
if (invoiceCandidateId == null)
{
return ImmutableList.of(); // no associations for new/not saved ICs
}
return retrieveICIOLAssociationsExclRE(invoiceCandidateId);
}
@Override
public List<I_C_InvoiceCandidate_InOutLine> retrieveICIOLAssociationsExclRE(@NonNull final InvoiceCandidateId invoiceCandidateId)
{
// load all I_C_InvoiceCandidate_InOutLine and filter locally.
// i think it's safe to assume that there are not 1000s of records to load and this way the code is simpler
return queryBL.createQueryBuilder(I_C_InvoiceCandidate_InOutLine.class)
.addEqualsFilter(I_C_InvoiceCandidate_InOutLine.COLUMN_C_Invoice_Candidate_ID, invoiceCandidateId)
.addOnlyActiveRecordsFilter()
.orderBy(I_C_InvoiceCandidate_InOutLine.COLUMN_M_InOutLine_ID)
.create()
.stream(I_C_InvoiceCandidate_InOutLine.class)
.filter(this::isInOutCompletedOrClosed)
.collect(ImmutableList.toImmutableList());
}
private boolean isInOutCompletedOrClosed(@NonNull final I_C_InvoiceCandidate_InOutLine iciol)
{
final I_M_InOut inOut = iciol.getM_InOutLine().getM_InOut();
return inOut.isActive() && DocStatus.ofCode(inOut.getDocStatus()).isCompletedOrClosed();
}
@Override
public List<I_C_InvoiceCandidate_InOutLine> retrieveICIOLAssociationsForInOutLineInclInactive(final I_M_InOutLine inOutLine)
{
return retrieveICIOLAssociationsForInOutLineInclInactiveQuery(inOutLine)
.create()
.list(I_C_InvoiceCandidate_InOutLine.class);
}
private IQueryBuilder<I_C_InvoiceCandidate_InOutLine> retrieveICIOLAssociationsForInOutLineInclInactiveQuery(final I_M_InOutLine inOutLine)
{
return queryBL
.createQueryBuilder(I_C_InvoiceCandidate_InOutLine.class, inOutLine)
.addEqualsFilter(I_C_InvoiceCandidate_InOutLine.COLUMN_M_InOutLine_ID, inOutLine.getM_InOutLine_ID())
//
.orderBy()
.addColumn(I_C_InvoiceCandidate_InOutLine.COLUMN_M_InOutLine_ID)
.endOrderBy()
//
;
}
@Override
public final IQuery<I_C_Invoice_Candidate> retrieveInvoiceCandidatesQueryForInOuts(final Collection<? extends I_M_InOut> inouts)
{
Check.assumeNotEmpty(inouts, "inouts is not empty");
final List<I_M_InOutLine> inoutLines = Services.get(IInOutDAO.class).retrieveLinesForInOuts(inouts);
return inoutLines.stream()
.map(this::retrieveInvoiceCandidatesForInOutLineQuery)
.map(IQueryBuilder::create)
.reduce(IQuery.unionDistict())
.get();
}
@Override
public final List<I_C_Invoice_Candidate> retrieveInvoiceCandidatesForInOutLine(final I_M_InOutLine inoutLine)
{
return retrieveInvoiceCandidatesForInOutLineQuery(inoutLine)
.create()
.list(I_C_Invoice_Candidate.class);
}
@Override
public final IQueryBuilder<I_C_Invoice_Candidate> retrieveInvoiceCandidatesForInOutLineQuery(
@NonNull final I_M_InOutLine inoutLine)
{
final IQueryBuilder<I_C_Invoice_Candidate> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, inoutLine)
// NOTE: advice the query builder to explode the expressions to SQL UNIONs because that is MUCH more efficient on PostgreSQL.
.setOption(IQueryBuilder.OPTION_Explode_OR_Joins_To_SQL_Unions)
.setJoinOr();
//
// ICs which are directly created for this inout line
{
final ICompositeQueryFilter<I_C_Invoice_Candidate> filter = queryBL.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Table_ID, InterfaceWrapperHelper.getTableId(I_M_InOutLine.class))
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Record_ID, inoutLine.getM_InOutLine_ID());
queryBuilder.filter(filter);
}
//
// ICs which are created for inout line's C_OrderLine_ID
if (inoutLine.getC_OrderLine_ID() > 0)
{
final ICompositeQueryFilter<I_C_Invoice_Candidate> filter = queryBL.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Table_ID, InterfaceWrapperHelper.getTableId(I_C_OrderLine.class))
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Record_ID, inoutLine.getC_OrderLine_ID());
queryBuilder.filter(filter);
}
//
// IC-IOL associations
{
final IQuery<I_C_InvoiceCandidate_InOutLine> queryForICIOLs = retrieveICIOLAssociationsForInOutLineInclInactiveQuery(inoutLine)
.addOnlyActiveRecordsFilter()
.create();
queryBuilder.addInSubQueryFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, I_C_InvoiceCandidate_InOutLine.COLUMN_C_Invoice_Candidate_ID, queryForICIOLs);
}
return queryBuilder;
}
@Override
public final void save(final I_C_Invoice_Candidate invoiceCandidate)
{
try
{
InterfaceWrapperHelper.save(invoiceCandidate);
}
catch (final Exception saveException)
{
// If we got an error while saving a new IC, we can do nothing
if (invoiceCandidate.getC_Invoice_Candidate_ID() <= 0)
{
throw AdempiereException.wrapIfNeeded(saveException);
}
// If we don't have an error already set, we are setting the one that we just got it
if (!invoiceCandidate.isError())
{
Services.get(IInvoiceCandBL.class).setError(invoiceCandidate, saveException);
}
saveErrorToDB(invoiceCandidate);
}
}
@Override
public void saveAll(final Collection<I_C_Invoice_Candidate> invoiceCandidates)
{
invoiceCandidates.forEach(this::save);
}
@Override
public int deleteInvoiceDetails(final I_C_Invoice_Candidate ic)
{
return queryBL.createQueryBuilder(I_C_Invoice_Detail.class, ic)
.addEqualsFilter(I_C_Invoice_Detail.COLUMN_C_Invoice_Candidate_ID, ic.getC_Invoice_Candidate_ID())
.create()
.delete();
}
@Override
public void deleteAndAvoidRecreateScheduling(final I_C_Invoice_Candidate ic)
{
DYNATTR_IC_Avoid_Recreate.setValue(ic, true);
InterfaceWrapperHelper.delete(ic);
}
@Override
public boolean isAvoidRecreate(final I_C_Invoice_Candidate ic)
{
return DYNATTR_IC_Avoid_Recreate.is(ic, true);
}
@Override
public List<I_C_Invoice_Candidate> retrieveIcForIl(final I_C_InvoiceLine invoiceLine)
{
final IQueryBuilder<I_C_Invoice_Line_Alloc> ilaQueryBuilder = queryBL.createQueryBuilder(I_C_Invoice_Line_Alloc.class, invoiceLine)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMN_C_InvoiceLine_ID, invoiceLine.getC_InvoiceLine_ID());
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = ilaQueryBuilder
.andCollect(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Candidate_ID)
.addOnlyActiveRecordsFilter();
icQueryBuilder.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID);
return icQueryBuilder
.create()
.list();//
}
@NonNull
public List<I_C_Invoice_Candidate> retrieveInvoiceCandidates(@NonNull final InvoiceId invoiceId)
{
return queryBL.createQueryBuilder(I_C_InvoiceLine.class)
.addEqualsFilter(I_C_InvoiceLine.COLUMNNAME_C_Invoice_ID, invoiceId)
//collect related invoice line alloc
.andCollectChildren(I_C_Invoice_Line_Alloc.COLUMN_C_InvoiceLine_ID)
.addOnlyActiveRecordsFilter()
//collect related invoice candidates
.andCollect(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Candidate_ID)
.addOnlyActiveRecordsFilter()
.create()
.list();
}
@Override
public final Iterator<I_C_Invoice_Candidate> retrieveForHeaderAggregationKey(final Properties ctx, final String headerAggregationKey, final String trxName)
{
return retrieveForHeaderAggregationKeyQuery(ctx, headerAggregationKey, trxName)
.create()
.setOption(IQuery.OPTION_IteratorBufferSize, 100) // 50 is the default, but there might be orders with more than 50 lines
.setOption(IQuery.OPTION_GuaranteedIteratorRequired, true) // guaranteed=true, we can assume there won't be more than a some hundreds of invoice candidates with the same
// headerAggregationKey
.iterate(I_C_Invoice_Candidate.class);
}
private IQueryBuilder<I_C_Invoice_Candidate> retrieveForHeaderAggregationKeyQuery(
@CacheCtx final Properties ctx,
final String headerAggregationKey,
@CacheTrx final String trxName)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, ctx, trxName)
.addOnlyActiveRecordsFilter()
.addOnlyContextClient()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_HeaderAggregationKey, headerAggregationKey)
.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID)
.endOrderBy();
}
@Override
public final List<I_C_InvoiceLine> retrieveIlForIc(final I_C_Invoice_Candidate invoiceCand)
{
final InvoiceCandidateId invoiceCandidateId = InvoiceCandidateId.ofRepoId(invoiceCand.getC_Invoice_Candidate_ID());
return retrieveIlForIc(invoiceCandidateId);
}
@Override
public final List<I_C_InvoiceLine> retrieveIlForIc(@NonNull final InvoiceCandidateId invoiceCandidateId)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Line_Alloc.class)
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Candidate_ID, invoiceCandidateId)
//
// Collect invoice lines
.andCollect(I_C_Invoice_Line_Alloc.COLUMN_C_InvoiceLine_ID)
.addOnlyActiveRecordsFilter()
.addOnlyContextClient()
.orderBy()
.addColumn(I_C_InvoiceLine.COLUMN_C_InvoiceLine_ID)
.endOrderBy()
//
// Execute query
.create()
.list(I_C_InvoiceLine.class);
}
@Override
public final List<I_C_Invoice_Line_Alloc> retrieveIlaForIc(@NonNull final InvoiceCandidateId invoiceCandidateId)
{
final IQueryBuilder<I_C_Invoice_Line_Alloc> ilaQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Line_Alloc.class)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMNNAME_C_Invoice_Candidate_ID, invoiceCandidateId);
ilaQueryBuilder.orderBy()
.addColumn(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Line_Alloc_ID);
return ilaQueryBuilder.create().list();
}
@Override
public final List<I_C_Invoice_Line_Alloc> retrieveIlaForIl(final I_C_InvoiceLine il)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Line_Alloc.class, il)
.addEqualsFilter(I_C_Invoice_Line_Alloc.COLUMN_C_InvoiceLine_ID, il.getC_InvoiceLine_ID())
.addOnlyActiveRecordsFilter()
.addOnlyContextClient()
//
.orderBy()
.addColumn(I_C_Invoice_Line_Alloc.COLUMN_C_Invoice_Line_Alloc_ID)
.endOrderBy()
//
.create()
.list(I_C_Invoice_Line_Alloc.class);
}
/**
* Adds a record to {@link I_C_Invoice_Candidate_Recompute} to mark the given invoice candidate as invalid. This insertion doesn't interfere with other transactions. It's no problem if two of more
* concurrent transactions insert a record for the same invoice candidate.
*/
@Override
public final void invalidateCand(final I_C_Invoice_Candidate ic)
{
invalidateCands(ImmutableList.of(ic));
}
@Override
public final void invalidateCandsForProductGroup(final I_M_ProductGroup pg)
{
final String trxName = InterfaceWrapperHelper.getTrxName(pg);
final Properties ctx = InterfaceWrapperHelper.getCtx(pg);
final int referingAggregators = queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Agg.class, ctx, trxName)
.addEqualsFilter(I_C_Invoice_Candidate_Agg.COLUMN_M_ProductGroup_ID, pg.getM_ProductGroup_ID())
.create()
.count();
if (referingAggregators > 0)
{
// Note: we invalidate *every* candidate, so there is no need to use the different IInvoiceCandidateHandler implementations.
invalidateAllCands(ctx, trxName);
}
}
@Override
public void invalidateCandsThatReference(@NonNull final TableRecordReference recordReference)
{
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = retrieveInvoiceCandidatesForRecordQuery(recordReference)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(icQueryBuilder);
}
@Override
public final void invalidateCandsFor(@NonNull final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder)
{
final IQuery<I_C_Invoice_Candidate> icQuery = icQueryBuilder.create();
invalidateCandsFor(icQuery);
}
@Override
public final void invalidateCandsFor(@NonNull final ImmutableSet<InvoiceCandidateId> invoiceCandidateIds)
{
if (invoiceCandidateIds.isEmpty())
{
return; // nothing to do for us
}
// note: invalidate, no matter if Processed or not
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addInArrayFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, invoiceCandidateIds);
invalidateCandsFor(icQueryBuilder);
}
@Override
public final void invalidateCandsFor(@NonNull final IQuery<I_C_Invoice_Candidate> icQuery)
{
final String chunkUUID = UUID.randomUUID().toString();
final int count = icQuery
.insertDirectlyInto(I_C_Invoice_Candidate_Recompute.class)
.mapColumn(I_C_Invoice_Candidate_Recompute.COLUMNNAME_C_Invoice_Candidate_ID, I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID)
.mapColumn(I_C_Invoice_Candidate_Recompute.COLUMNNAME_C_Async_Batch_ID, I_C_Invoice_Candidate.COLUMNNAME_C_Async_Batch_ID)
.mapColumnToConstant(I_C_Invoice_Candidate_Recompute.COLUMNNAME_ChunkUUID, chunkUUID)
// NOTE: not setting the AD_PInstance_ID to null, because:
// 1. null is the default
// 2. there is an issue with the SQL INSERT that is rendered for NULL parameters, i.e. it cannot detect the database type for NULL
// .mapColumnToConstant(I_C_Invoice_Candidate_Recompute.COLUMNNAME_AD_PInstance_ID, null)
.execute()
.getRowsInserted();
logger.debug("Invalidated {} invoice candidates for {}", new Object[] { count, icQuery });
final List<Integer> asyncBatchIDs = queryBL.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_ChunkUUID, chunkUUID)
.create()
.listDistinct(I_C_Invoice_Candidate_Recompute.COLUMNNAME_C_Async_Batch_ID, Integer.class);
//
// Schedule an update for invalidated invoice candidates
if (count > 0)
{
asyncBatchIDs.stream()
.map(AsyncBatchId::ofRepoIdOrNone)
.map(asyncBatchId -> InvoiceCandUpdateSchedulerRequest.of(icQuery.getCtx(), icQuery.getTrxName(), AsyncBatchId.toAsyncBatchIdOrNull(asyncBatchId)))
.forEach(invoiceCandScheduler::scheduleForUpdate);
}
}
@Override
public final void invalidateCandsForHeaderAggregationKey(final Properties ctx, final String headerAggregationKey, final String trxName)
{
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = retrieveForHeaderAggregationKeyQuery(ctx, headerAggregationKey, trxName)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for HeaderAggregationKey={}", new Object[] { count, headerAggregationKey });
}
@Override
public final void invalidateCandsWithSameTableReference(final I_C_Invoice_Candidate ic)
{
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = retrieveInvoiceCandidatesForRecordQuery(TableRecordReference.ofReferenced(ic))
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for AD_Table_ID={} and Record_ID={}", new Object[] { count, adTableId, recordId });
}
@Override
public final void invalidateCandsForBPartner(final I_C_BPartner bpartner)
{
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = retrieveForBillPartnerQuery(bpartner)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for bPartner={}", new Object[] { count, bpartner });
}
@Override
public final void invalidateCandsForAggregationBuilder(final I_C_Aggregation aggregation)
{
if (aggregation == null)
{
return;
}
final int aggregationId = aggregation.getC_Aggregation_ID();
if (aggregationId <= 0)
{
return;
}
//
// Make sure the aggregation is about C_Invoice_Candidate table
final int invoiceCandidateTableId = InterfaceWrapperHelper.getTableId(I_C_Invoice_Candidate.class);
final int adTableId = aggregation.getAD_Table_ID();
final I_C_Aggregation aggregationOld = InterfaceWrapperHelper.createOld(aggregation, I_C_Aggregation.class);
final int adTableIdOld = aggregationOld.getAD_Table_ID();
if (adTableId != invoiceCandidateTableId && adTableIdOld != invoiceCandidateTableId)
{
return;
}
final Properties ctx = InterfaceWrapperHelper.getCtx(aggregation);
final String trxName = InterfaceWrapperHelper.getTrxName(aggregation);
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, ctx, trxName)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
//
// Add Header/Line AggregationKeyBuilder_ID filter
{
icQueryBuilder.addCompositeQueryFilter()
.setJoinOr()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_HeaderAggregationKeyBuilder_ID, aggregationId)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_LineAggregationKeyBuilder_ID, aggregationId);
}
//
// Invalidate
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for aggregation={}", new Object[] { count, aggregation });
}
@Override
public final void invalidateCandsForBPartnerInvoiceRule(final BPartnerId bpartnerId)
{
final IBPartnerDAO partnerDAO = Services.get(IBPartnerDAO.class);
final I_C_BPartner bpartner = partnerDAO.getById(bpartnerId);
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = retrieveForBillPartnerQuery(bpartner)
.addCoalesceEqualsFilter(X_C_Invoice_Candidate.INVOICERULE_CustomerScheduleAfterDelivery,
I_C_Invoice_Candidate.COLUMNNAME_InvoiceRule_Override,
I_C_Invoice_Candidate.COLUMNNAME_InvoiceRule)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for bpartner={}", new Object[] { count, bpartner });
}
@Override
public final void invalidateAllCands(final Properties ctx, final String trxName)
{
final IQuery<I_C_Invoice_Candidate_Recompute> alreadyInvalidatedICsQuery = queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName)
.create();
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, ctx, trxName)
// Not already processed
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false)
// Not already invalidated
.addNotInSubQueryFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, I_C_Invoice_Candidate_Recompute.COLUMN_C_Invoice_Candidate_ID, alreadyInvalidatedICsQuery);
invalidateCandsFor(icQueryBuilder);
}
protected final void invalidateCandsForSelection(final PInstanceId pinstanceId, final String trxName)
{
final Properties ctx = Env.getCtx();
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, ctx, trxName)
.setOnlySelection(pinstanceId)
// Invalidate no matter if Processed or not
// .addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false)
;
invalidateCandsFor(icQueryBuilder);
// logger.info("Invalidated {} C_Invoice_Candidates for AD_PInstance_ID={}", new Object[] { count, adPInstanceId });
}
private IQueryBuilder<I_C_Invoice_Candidate> retrieveInvoiceCandidatesForRecordQuery(
@NonNull final TableRecordReference tableRecordReference)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Table_ID, tableRecordReference.getAdTableId())
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Record_ID, tableRecordReference.getRecord_ID())
.addOnlyActiveRecordsFilter()
.addOnlyContextClient();
}
@Cached(cacheName = I_C_Invoice_Candidate.Table_Name + "#by#AD_Table_ID#Record_ID")
public List<I_C_Invoice_Candidate> fetchInvoiceCandidates(
@NonNull final String tableName,
final int recordId)
{
Check.assume(recordId > 0, "Param 'recordId' needs to be > 0");
return retrieveInvoiceCandidatesForRecordQuery(TableRecordReference.of(tableName, recordId))
.create()
.list(I_C_Invoice_Candidate.class);
}
@Override
public final Iterator<I_C_Invoice_Candidate> fetchInvalidInvoiceCandidates(
final Properties ctx,
@NonNull final InvoiceCandRecomputeTag recomputeTag,
final String trxName)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_AD_PInstance_ID, recomputeTag.getPinstanceId())
//
// Collect invoice candidates
.andCollect(I_C_Invoice_Candidate_Recompute.COLUMN_C_Invoice_Candidate_ID)
.addOnlyContextClient()
.addOnlyActiveRecordsFilter()
//
// Order BY: we need to return the not-manual invoice candidates first, because their NetAmtToInvoice is required when we evaluate the manual candidates
.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMNNAME_IsFreightCost, Direction.Ascending, Nulls.First)
.addColumn(I_C_Invoice_Candidate.COLUMN_IsManual)
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID)
.endOrderBy()
//
// Execute query:
// NOTE (task 03968): performance tweak that is necessary when updating around 70.000 candidates at once:
// don't use a 'guaranteed' iterator; *we don't need it* and selecting/ordering joining between
// C_Invoice_Candidate and T_Query_Selection is a performance-killer
.create()
.setOption(IQuery.OPTION_GuaranteedIteratorRequired, false)
.setOption(IQuery.OPTION_IteratorBufferSize, 500)
.iterate(I_C_Invoice_Candidate.class);
}
@Override
public InvoiceCandRecomputeTag generateNewRecomputeTag()
{
final PInstanceId pinstanceId = Services.get(IADPInstanceDAO.class).createSelectionId();
return InvoiceCandRecomputeTag.ofPInstanceId(pinstanceId);
}
@Override
public final IInvoiceCandRecomputeTagger tagToRecompute()
{
return new InvoiceCandRecomputeTagger(this);
}
private IQueryBuilder<I_C_Invoice_Candidate_Recompute> retrieveInvoiceCandidatesRecomputeFor(
@NonNull final InvoiceCandRecomputeTagger tagRequest)
{
final Properties ctx = tagRequest.getCtx();
final String trxName = tagRequest.getTrxName();
final IQueryBuilder<I_C_Invoice_Candidate_Recompute> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName);
//
// Append not locked where clause
queryBuilder.filter(LockedByOrNotLockedAtAllFilter.of(tagRequest.getLockedBy()));
//
// Only those which were already tagged with given tag
final InvoiceCandRecomputeTag taggedWith = tagRequest.getTaggedWith();
if (taggedWith != null)
{
final PInstanceId pinstanceId = InvoiceCandRecomputeTag.getPinstanceIdOrNull(taggedWith);
queryBuilder.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_AD_PInstance_ID, pinstanceId);
}
//
// Only a given set of invoice candidates
if (tagRequest.isOnlyC_Invoice_Candidate_IDs())
{
final Set<Integer> invoiceCandidateIds = tagRequest.getOnlyC_Invoice_Candidate_IDs();
if (invoiceCandidateIds == null || invoiceCandidateIds.isEmpty())
{
// i.e. tag none
queryBuilder.filter(ConstantQueryFilter.of(false));
}
else
{
queryBuilder.addInArrayOrAllFilter(I_C_Invoice_Candidate_Recompute.COLUMN_C_Invoice_Candidate_ID, invoiceCandidateIds);
}
}
//
// Limit maximum number of invalid invoice candidates to tag for updating
if (tagRequest.getLimit() > 0)
{
queryBuilder.setLimit(tagRequest.getLimit());
}
return queryBuilder;
}
protected final int tagToRecompute(@NonNull final InvoiceCandRecomputeTagger tagRequest)
{
final InvoiceCandRecomputeTag recomputeTag = tagRequest.getRecomputeTag();
final IQueryBuilder<I_C_Invoice_Candidate_Recompute> queryBuilder = retrieveInvoiceCandidatesRecomputeFor(tagRequest);
final IQuery<I_C_Invoice_Candidate_Recompute> query = queryBuilder.create();
final int count = query
.updateDirectly()
.addSetColumnValue(I_C_Invoice_Candidate_Recompute.COLUMNNAME_AD_PInstance_ID, recomputeTag.getPinstanceId())
.execute();
Loggables.withLogger(logger, Level.DEBUG)
.addLog("Marked {} {} records with recompute tag={}", count, I_C_Invoice_Candidate_Recompute.Table_Name, recomputeTag);
logger.debug("Query: {}", query);
logger.debug("Tagger: {}", tagRequest);
return count;
}
/**
* @return how many {@link I_C_Invoice_Candidate_Recompute} records will be tagged by given {@link InvoiceCandRecomputeTagger}.
*/
protected final int countToBeTagged(final InvoiceCandRecomputeTagger tagRequest)
{
return retrieveInvoiceCandidatesRecomputeFor(tagRequest)
.create()
.count();
}
/**
* Deletes those records from table I_C_Invoice_Candidate_Recompute.Table_Name that were formerly tagged with the given recompute tag.
*/
protected final void deleteRecomputeMarkersAndInvalidateCache(
@NonNull final InvoiceCandRecomputeTagger tagger,
@Nullable final Collection<Integer> onlyInvoiceCandidateIds)
{
final Properties ctx = tagger.getCtx();
final InvoiceCandRecomputeTag recomputeTag = tagger.getRecomputeTag();
final String trxName = tagger.getTrxName();
final IQueryBuilder<I_C_Invoice_Candidate_Recompute> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_AD_PInstance_ID, recomputeTag.getPinstanceId());
//
// Delete only the specified invoice candidate IDs
if (!Check.isEmpty(onlyInvoiceCandidateIds))
{
queryBuilder.addInArrayOrAllFilter(I_C_Invoice_Candidate_Recompute.COLUMN_C_Invoice_Candidate_ID, onlyInvoiceCandidateIds);
}
final IQuery<I_C_Invoice_Candidate_Recompute> query = queryBuilder.create();
final int count = query.deleteDirectly();
Loggables.withLogger(logger, Level.DEBUG)
.addLog("Deleted {} {} entries for tag={}, onlyInvoiceCandidateIds={}", count, I_C_Invoice_Candidate_Recompute.Table_Name, recomputeTag, onlyInvoiceCandidateIds);
logger.debug("Query: {}", query);
// invalidate the invoice candidate cache after commit
Services.get(ITrxManager.class)
.getTrxListenerManagerOrAutoCommit(trxName)
.newEventListener(TrxEventTiming.AFTER_COMMIT)
.registerHandlingMethod(trx -> invalidateInvoiceCandidateCache(onlyInvoiceCandidateIds));
}
private void invalidateInvoiceCandidateCache(@Nullable final Collection<Integer> onlyInvoiceCandidateIds)
{
final CacheInvalidateMultiRequest multiRequest;
if (Check.isEmpty(onlyInvoiceCandidateIds))
{
multiRequest = CacheInvalidateMultiRequest.allRecordsForTable(I_C_Invoice_Candidate.Table_Name);
}
else
{
multiRequest = CacheInvalidateMultiRequest.fromTableNameAndRecordIds(I_C_Invoice_Candidate.Table_Name, onlyInvoiceCandidateIds);
}
final IModelCacheInvalidationService modelCacheInvalidationService = Services.get(IModelCacheInvalidationService.class);
modelCacheInvalidationService.invalidate(multiRequest, ModelCacheInvalidationTiming.CHANGE);
}
protected final int untag(@NonNull final InvoiceCandRecomputeTagger tagger)
{
final Properties ctx = tagger.getCtx();
final InvoiceCandRecomputeTag recomputeTag = tagger.getRecomputeTag();
final String trxName = tagger.getTrxName();
final IQuery<I_C_Invoice_Candidate_Recompute> query = queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_AD_PInstance_ID, recomputeTag.getPinstanceId())
.create();
final int count = query
.updateDirectly()
.addSetColumnValue(I_C_Invoice_Candidate_Recompute.COLUMNNAME_AD_PInstance_ID, null)
.execute();
logger.debug("Un-tag {} {} records with were tagged with recompute tag={}", count, I_C_Invoice_Candidate_Recompute.Table_Name, recomputeTag);
logger.debug("Query: {}", query);
logger.debug("Tagger: {}", tagger);
return count;
}
@Override
public final boolean hasInvalidInvoiceCandidatesForTag(final InvoiceCandRecomputeTag tag)
{
final Properties ctx = Env.getCtx();
final String trxName = ITrx.TRXNAME_ThreadInherited;
final PInstanceId pinstanceId = InvoiceCandRecomputeTag.getPinstanceIdOrNull(tag);
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ctx, trxName)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_AD_PInstance_ID, pinstanceId)
.create()
.anyMatch();
}
private IQueryBuilder<I_C_Invoice_Candidate> retrieveForBillPartnerQuery(final I_C_BPartner bpartner)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class, bpartner)
.addOnlyActiveRecordsFilter()
.addOnlyContextClient()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Bill_BPartner_ID, bpartner.getC_BPartner_ID())
.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID)
.endOrderBy();
}
@Override
public Iterator<I_C_Invoice_Candidate> retrieveForInvoiceSchedule(final I_C_InvoiceSchedule invoiceSchedule)
{
final IQuery<I_C_BPartner> bpartnersQuery = queryBL.createQueryBuilder(I_C_BPartner.class, invoiceSchedule)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_BPartner.COLUMN_C_InvoiceSchedule_ID, invoiceSchedule.getC_InvoiceSchedule_ID())
.create();
return queryBL.createQueryBuilder(I_C_Invoice_Candidate.class, invoiceSchedule)
.addOnlyActiveRecordsFilter()
.addOnlyContextClient()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Processed, false)
.addInSubQueryFilter(I_C_Invoice_Candidate.COLUMNNAME_Bill_BPartner_ID, I_C_BPartner.COLUMNNAME_C_BPartner_ID, bpartnersQuery)
.addCoalesceEqualsFilter(X_C_Invoice_Candidate.INVOICERULE_EFFECTIVE_CustomerScheduleAfterDelivery,
I_C_Invoice_Candidate.COLUMNNAME_InvoiceRule_Override,
I_C_Invoice_Candidate.COLUMNNAME_InvoiceRule)
//
.orderBy()
.addColumn(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID)
.endOrderBy()
//
.create()
.setOption(IQuery.OPTION_IteratorBufferSize, 2000)
.iterate(I_C_Invoice_Candidate.class);
}
@Override
public final void updateDateInvoiced(
@Nullable final LocalDate dateInvoiced,
@NonNull final PInstanceId selectionId)
{
updateColumnForSelection(
I_C_Invoice_Candidate.COLUMNNAME_DateInvoiced, // invoiceCandidateColumnName
dateInvoiced, // value
false, // updateOnlyIfNull
selectionId // selectionId
);
}
@Override
public final void updateDateAcct(
@Nullable final LocalDate dateAcct,
@NonNull final PInstanceId selectionId)
{
updateColumnForSelection(
I_C_Invoice_Candidate.COLUMNNAME_DateAcct, // invoiceCandidateColumnName
dateAcct, // value
false, // updateOnlyIfNull
selectionId // selectionId
);
}
@Override
public final void updateNullDateAcctFromDateInvoiced(final PInstanceId selectionId)
{
updateColumnForSelection(
I_C_Invoice_Candidate.COLUMNNAME_DateAcct,
ModelColumnNameValue.forColumnName(I_C_Invoice_Candidate.COLUMNNAME_DateInvoiced), // value
true, // updateOnlyIfNull
selectionId // selectionId
);
}
@Override
public final void updatePOReference(final String poReference, final PInstanceId selectionId)
{
updateColumnForSelection(
I_C_Invoice_Candidate.COLUMNNAME_POReference, // invoiceCandidateColumnName
poReference, // value
false, // updateOnlyIfNull
selectionId // selectionId
);
}
@Override
public final void updateApprovalForInvoicingToTrue(@NonNull final PInstanceId selectionId)
{
updateColumnForSelection(
I_C_Invoice_Candidate.COLUMNNAME_ApprovalForInvoicing, // invoiceCandidateColumnName
true, // value
false, // updateOnlyIfNull
selectionId // selectionId
);
}
@Override
public void updateMissingPaymentTermIds(final PInstanceId selectionId)
{
final PInstanceId selectionToUpdateId = retrieveIcsToUpdateSelectionId(selectionId);
if (selectionToUpdateId == null)
{
return;
}
final PaymentTermId paymentTermId = retrievePaymentTermId(selectionId);
if (paymentTermId == null)
{
return;
}
final int updateCount = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOnlySelection(selectionToUpdateId)
.create()
.updateDirectly()
.addSetColumnValue(I_C_Invoice_Candidate.COLUMNNAME_C_PaymentTerm_Override_ID, paymentTermId)
.execute();
Loggables.withLogger(logger, Level.INFO)
.addLog("updateMissingPaymentTermIds - {} C_Invoice_Candidates were updated; selectionId={}, paymentTermId={}",
updateCount, selectionId, paymentTermId);
// Invalidate the candidates which we updated
invalidateCandsForSelection(selectionToUpdateId, ITrx.TRXNAME_ThreadInherited);
}
private PInstanceId retrieveIcsToUpdateSelectionId(final PInstanceId selectionId)
{
final PInstanceId selectionToUpdateId = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOnlySelection(selectionId)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_PaymentTerm_ID, null)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_PaymentTerm_Override_ID, null)
.create()
.createSelection();
if (selectionToUpdateId == null)
{
Loggables.withLogger(logger, Level.INFO)
.addLog("updateMissingPaymentTermIds - No C_Invoice_Candidate needs to be updated; selectionId={}",
selectionId);
}
return selectionToUpdateId;
}
private PaymentTermId retrievePaymentTermId(final PInstanceId selectionId)
{
final ICompositeQueryFilter<I_C_Invoice_Candidate> paymentTermSetFilter = queryBL
.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.setJoinOr()
.addNotEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_PaymentTerm_ID, null)
.addNotEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_PaymentTerm_Override_ID, null);
final I_C_Invoice_Candidate firstInvoiceCandidateWithPaymentTermId = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOnlySelection(selectionId)
.filter(paymentTermSetFilter)
.orderBy()
.addColumnAscending(I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID).endOrderBy()
.create()
.first();
if (firstInvoiceCandidateWithPaymentTermId == null)
{
Loggables.withLogger(logger, Level.INFO)
.addLog("updateMissingPaymentTermIds - No C_Invoice_Candidate selected by selectionId={} has a C_PaymentTerm_ID; nothing to update", selectionId);
return null;
}
return CoalesceUtil.coalesceSuppliers(
() -> PaymentTermId.ofRepoIdOrNull(firstInvoiceCandidateWithPaymentTermId.getC_PaymentTerm_Override_ID()),
() -> PaymentTermId.ofRepoIdOrNull(firstInvoiceCandidateWithPaymentTermId.getC_PaymentTerm_ID()));
}
/**
* Mass-update a given invoice candidate column.
* <p>
* If there were any changes, those invoice candidates will be invalidated.
*
* @param columnName {@link I_C_Invoice_Candidate}'s column to update
* @param value value to set (you can also use {@link ModelColumnNameValue})
* @param updateOnlyIfNull if true then it will update only if column value is null (not set)
* @param selectionId invoice candidates selection (AD_PInstance_ID)
*/
private final <T> void updateColumnForSelection(
@NonNull final String columnName,
@Nullable final T value,
final boolean updateOnlyIfNull,
@NonNull final PInstanceId selectionId)
{
//
// Create the selection which we will need to update
final IQueryBuilder<I_C_Invoice_Candidate> selectionQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOnlySelection(selectionId)
.addNotEqualsFilter(columnName, value) // skip those which have our value set
;
if (updateOnlyIfNull)
{
selectionQueryBuilder.addEqualsFilter(columnName, null);
}
final PInstanceId selectionToUpdateId = selectionQueryBuilder.create().createSelection();
if (selectionToUpdateId == null)
{
Loggables.withLogger(logger, Level.INFO)
.addLog("updateColumnForSelection - No C_Invoice_Candidate needs to be updated; selectionId={}, columnName={}; updateOnlyIfNull={}, newValue={}",
selectionId, columnName, updateOnlyIfNull, value);
return;
}
// Update our new selection
final IQuery<I_C_Invoice_Candidate> updateQuery = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOnlySelection(selectionToUpdateId)
.create();
final ICompositeQueryUpdater<I_C_Invoice_Candidate> updater = queryBL
.createCompositeQueryUpdater(I_C_Invoice_Candidate.class)
.addSetColumnValue(columnName, value);
final int updateCount = updateQuery.updateDirectly(updater);
Loggables.withLogger(logger, Level.INFO)
.addLog("updateColumnForSelection - {} C_Invoice_Candidates were updated; selectionId={}, columnName={}; updateOnlyIfNull={}, newValue={}",
updateCount, selectionId, columnName, updateOnlyIfNull, value);
// Invalidate the candidates which we updated
invalidateCandsForSelection(selectionToUpdateId, ITrx.TRXNAME_ThreadInherited);
}
@Override
public BigDecimal retrieveInvoicableAmount(
final Properties ctx,
@NonNull final InvoiceCandidateQuery query,
@NonNull final CurrencyId targetCurrencyId,
final int adClientId,
final String amountColumnName,
final String trxName)
{
final StringBuilder whereClause = new StringBuilder("1=1");
final List<Object> params = new ArrayList<>();
final OrgId orgId = query.getOrgIdNotNull();
// Bill BPartner
if (query.getBillBPartnerId() != null)
{
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_Bill_BPartner_ID).append("=?");
params.add(query.getBillBPartnerId().getRepoId());
}
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_AD_Org_ID).append("=?");
params.add(orgId.getRepoId());
// DateToInvoice
if (query.getDateToInvoice() != null)
{
whereClause.append(" AND ")
.append(" COALESCE(" + I_C_Invoice_Candidate.COLUMNNAME_DateToInvoice_Override + "," + I_C_Invoice_Candidate.COLUMNNAME_DateToInvoice + ")").append("<=?");
params.add(TimeUtil.asTimestamp(
query.getDateToInvoice(),
orgDAO.getTimeZone(orgId) /* note that if dateToInvoice is not null, then orgId is also not null */));
}
// Filter HeaderAggregationKey
if (query.getHeaderAggregationKey() != null)
{
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_HeaderAggregationKey).append("=?");
params.add(query.getHeaderAggregationKey());
}
// Exclude C_Invoice_Candidate
if (query.getExcludeC_Invoice_Candidate_ID() != null)
{
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID).append("<>?");
params.add(query.getExcludeC_Invoice_Candidate_ID().getRepoId());
}
if (query.getMaxManualC_Invoice_Candidate_ID() != null)
{
// either the candidate is *not* manual, or its ID is less or equal than MaxManualC_Invoice_Candidate_ID
whereClause.append(" AND (")
.append(I_C_Invoice_Candidate.COLUMNNAME_IsManual + "=? OR ")
.append(I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID).append("<=?");
whereClause.append(")");
params.add(false);
params.add(query.getMaxManualC_Invoice_Candidate_ID().getRepoId());
}
// Processed
if (query.getProcessed() != null)
{
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_Processed).append("=?");
params.add(query.getProcessed().booleanValue());
}
// Exclude those with errors
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_IsError).append("=?");
params.add(false);
// Filter by AD_Client_ID
whereClause.append(" AND ").append(I_C_Invoice_Candidate.COLUMNNAME_AD_Client_ID).append("=?");
params.add(adClientId);
final String sql = "SELECT "
+ I_C_Invoice_Candidate.COLUMNNAME_C_Currency_ID
+ ", " + I_C_Invoice_Candidate.COLUMNNAME_C_ConversionType_ID
+ ", SUM(" + amountColumnName + ") as NetAmt"
+ " FROM " + I_C_Invoice_Candidate.Table_Name
+ " WHERE " + whereClause
+ " GROUP BY "
+ I_C_Invoice_Candidate.COLUMNNAME_C_Currency_ID + ","
+ I_C_Invoice_Candidate.COLUMNNAME_C_ConversionType_ID;
final HashMap<CurrencyId, HashMap<CurrencyConversionTypeId, BigDecimal>> currencyId2conversion2Amt = new HashMap<>();
final PreparedStatement pstmt = DB.prepareStatement(sql, trxName);
ResultSet rs = null;
try
{
DB.setParameters(pstmt, params);
rs = pstmt.executeQuery();
while (rs.next())
{
final BigDecimal netAmt = rs.getBigDecimal("NetAmt");
if (rs.wasNull())
{
continue;
}
final CurrencyId currencyId = CurrencyId.ofRepoIdOrNull(rs.getInt(I_C_Invoice_Candidate.COLUMNNAME_C_Currency_ID));
final CurrencyConversionTypeId conversionTypeId = CurrencyConversionTypeId.ofRepoIdOrNull(rs.getInt(I_C_Invoice_Candidate.COLUMNNAME_C_ConversionType_ID));
HashMap<CurrencyConversionTypeId, BigDecimal> conversion2Amt = currencyId2conversion2Amt.get(currencyId);
if (conversion2Amt == null)
{
conversion2Amt = new HashMap<>();
currencyId2conversion2Amt.put(currencyId, conversion2Amt);
}
conversion2Amt.put(conversionTypeId, netAmt);
}
}
catch (final SQLException e)
{
throw new DBException(e, sql, params);
}
finally
{
DB.close(rs, pstmt);
}
// Conversion date to be used on currency conversion
final LocalDate dateConv = SystemTime.asLocalDate();
BigDecimal result = BigDecimal.ZERO;
for (final CurrencyId currencyId : currencyId2conversion2Amt.keySet())
{
final Map<CurrencyConversionTypeId, BigDecimal> conversion2Amt = currencyId2conversion2Amt.get(currencyId);
for (final CurrencyConversionTypeId conversionTypeId : conversion2Amt.keySet())
{
final BigDecimal amt = conversion2Amt.get(conversionTypeId);
final BigDecimal amtConverted = Services.get(ICurrencyBL.class).convert(
amt,
currencyId, // CurFrom_ID,
targetCurrencyId, // CurTo_ID,
dateConv, // ConvDate,
conversionTypeId,
ClientId.ofRepoId(adClientId),
orgId);
result = result.add(amtConverted);
}
}
return result;
}
@Override
public final List<I_M_InOutLine> retrieveInOutLines(final Properties ctx, final int C_OrderLine_ID, final String trxName)
{
return queryBL
.createQueryBuilder(I_M_InOutLine.class, ctx, trxName)
.addEqualsFilter(I_M_InOutLine.COLUMN_C_OrderLine_ID, C_OrderLine_ID)
.create()
.list(I_M_InOutLine.class);
}
protected void saveErrorToDB(@NonNull final I_C_Invoice_Candidate ic)
{
final String sql = "UPDATE " + I_C_Invoice_Candidate.Table_Name + " SET "
+ " " + I_C_Invoice_Candidate.COLUMNNAME_SchedulerResult + "=?"
+ "," + I_C_Invoice_Candidate.COLUMNNAME_IsError + "=?"
+ "," + I_C_Invoice_Candidate.COLUMNNAME_ErrorMsg + "=?"
+ "," + I_C_Invoice_Candidate.COLUMNNAME_AD_Note_ID + "=?"
+ " WHERE " + I_C_Invoice_Candidate.COLUMNNAME_C_Invoice_Candidate_ID + "=?";
final Object[] sqlParams = new Object[] {
ic.getSchedulerResult(), ic.isError(), ic.getErrorMsg(), ic.getAD_Note_ID(), ic.getC_Invoice_Candidate_ID()
};
final String trxName = InterfaceWrapperHelper.getTrxName(ic);
DB.executeUpdateEx(sql, sqlParams, trxName);
}
@Override
public final void invalidateCands(@Nullable final List<I_C_Invoice_Candidate> ics)
{
// Extract C_Invoice_Candidate_IDs
if (ics == null || ics.isEmpty())
{
return; // nothing to do for us
}
final ImmutableSet<InvoiceCandidateId> icIds = ics.stream()
.filter(Objects::nonNull)
.map(ic -> InvoiceCandidateId.ofRepoIdOrNull(ic.getC_Invoice_Candidate_ID()))
.filter(Objects::nonNull)
.distinct()
.collect(ImmutableSet.toImmutableSet());
if (icIds.isEmpty())
{
return;
}
// note: invalidate, no matter if Processed or not
final IQueryBuilder<I_C_Invoice_Candidate> icQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addInArrayFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, icIds);
invalidateCandsFor(icQueryBuilder);
}
@Override
public final boolean isToRecompute(final I_C_Invoice_Candidate ic)
{
return queryBL
.createQueryBuilder(I_C_Invoice_Candidate_Recompute.class, ic)
.addEqualsFilter(I_C_Invoice_Candidate_Recompute.COLUMN_C_Invoice_Candidate_ID, ic.getC_Invoice_Candidate_ID())
.setLimit(1)
.create()
.anyMatch();
}
@Override
public List<I_C_Invoice_Detail> retrieveInvoiceDetails(final I_C_Invoice_Candidate ic)
{
return queryBL.createQueryBuilder(I_C_Invoice_Detail.class, ic)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Detail.COLUMN_C_Invoice_Candidate_ID, ic.getC_Invoice_Candidate_ID())
.orderBy()
.addColumn(I_C_Invoice_Detail.COLUMNNAME_SeqNo)
.addColumn(I_C_Invoice_Detail.COLUMNNAME_IsPrinted, Direction.Descending, Nulls.Last)
.addColumn(I_C_Invoice_Detail.COLUMNNAME_IsDetailOverridesLine, Direction.Descending, Nulls.Last)
.addColumn(I_C_Invoice_Detail.COLUMNNAME_IsPrintBefore, Direction.Descending, Nulls.Last)
.endOrderBy()
.create()
.list();
}
@Override
public IQueryBuilder<I_C_Invoice_Candidate> applyDefaultFilter(
@NonNull final IQueryBuilder<I_C_Invoice_Candidate> queryBuilder)
{
final Properties ctx = queryBuilder.getCtx();
// shall never happen
if (ctx.isEmpty())
{
return queryBuilder;
}
// Only filter invoice candidates of the organizations this role has access to
final IUserRolePermissions userRolePermissions = Env.getUserRolePermissions(ctx);
return queryBuilder.addInArrayOrAllFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Org_ID, userRolePermissions.getAD_Org_IDs_AsSet());
}
@Override
public String getSQLDefaultFilter(final Properties ctx)
{
// Only filter invoice candidates of the organizations this role has access to
final IUserRolePermissions userRolePermissions = Env.getUserRolePermissions(ctx);
final StringBuilder defaultFilter = new StringBuilder("");
final String orgIDsAsString = userRolePermissions.getAD_Org_IDs_AsString();
if (!Check.isEmpty(orgIDsAsString))
{
defaultFilter.append(I_C_Invoice_Candidate.COLUMNNAME_AD_Org_ID)
.append(" IN (")
.append(orgIDsAsString)
.append(")");
}
return defaultFilter.toString();
}
// @Override
// public IQueryBuilder<I_C_Invoice_Candidate> retrieveInvoiceCandidatesForInventoryLineQuery(final I_M_InventoryLine inventoryLine)
// {
// final Properties ctx = InterfaceWrapperHelper.getCtx(inventoryLine);
// final int adTableId = InterfaceWrapperHelper.getTableId(I_M_InventoryLine.class);
// final int recordId = inventoryLine.getM_InventoryLine_ID();
// final String trxName = InterfaceWrapperHelper.getTrxName(inventoryLine);
//
// return retrieveInvoiceCandidatesForRecordQuery(ctx, adTableId, recordId, trxName);
// }
@Override
public Set<String> retrieveOrderDocumentNosForIncompleteGroupsFromSelection(final PInstanceId adPInstanceId)
{
final String sql = "SELECT * FROM C_Invoice_Candidate_SelectionIncompleteGroups WHERE AD_PInstance_ID=?";
final List<Object> sqlParams = Arrays.asList(adPInstanceId);
PreparedStatement pstmt = null;
ResultSet rs = null;
try
{
pstmt = DB.prepareStatement(sql, ITrx.TRXNAME_ThreadInherited);
DB.setParameters(pstmt, sqlParams);
rs = pstmt.executeQuery();
final ImmutableSet.Builder<String> orderDocumentNos = ImmutableSet.builder();
while (rs.next())
{
final String orderDocumentNo = rs.getString("OrderDocumentNo");
orderDocumentNos.add(orderDocumentNo);
}
return orderDocumentNos.build();
}
catch (final SQLException ex)
{
throw new DBException(ex, sql, sqlParams);
}
finally
{
DB.close(rs, pstmt);
}
}
@Override
public InvoiceableInvoiceCandIdResult getFirstInvoiceableInvoiceCandId(@NonNull final OrderId orderId)
{
final List<I_C_Invoice_Candidate> nonFreightCostICs = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_Order_ID, orderId)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_IsFreightCost, false)
.orderBy(I_C_Invoice_Candidate.COLUMNNAME_DeliveryDate)
.create()
.list();
if (nonFreightCostICs.isEmpty())
{
return new InvoiceableInvoiceCandIdResult(null/* firstInvoiceableInvoiceCandId */, false/* orderHasInvoiceCandidatesToWaitFor */);
}
for (final I_C_Invoice_Candidate nonFreightCostIC : nonFreightCostICs)
{
if (nonFreightCostIC.getQtyToInvoice().signum() > 0 || nonFreightCostIC.getQtyToInvoice_Override().signum() > 0)
{
return new InvoiceableInvoiceCandIdResult(
InvoiceCandidateId.ofRepoId(nonFreightCostIC.getC_Invoice_Candidate_ID())/* firstInvoiceableInvoiceCandId */,
true/* orderHasInvoiceCandidatesToWaitFor */);
}
}
return new InvoiceableInvoiceCandIdResult(
null/* firstInvoiceableInvoiceCandId */,
true/* orderHasInvoiceCandidatesToWaitFor */);
}
@Override
public void invalidateUninvoicedFreightCostCandidate(@NonNull final OrderId orderId)
{
final IQueryBuilder<I_C_Invoice_Candidate> freightCostCandQueryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_Order_ID, orderId)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_IsFreightCost, true)
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, false);
invalidateCandsFor(freightCostCandQueryBuilder);
}
@Override
public List<I_C_Invoice_Candidate> getByQuery(@NonNull final InvoiceCandidateMultiQuery multiQuery)
{
return convertToIQuery(multiQuery)
.list();
}
@Override
public int createSelectionByQuery(@NonNull final InvoiceCandidateMultiQuery multiQuery, @NonNull final PInstanceId pInstanceId)
{
return convertToIQuery(multiQuery)
.createSelection(pInstanceId);
}
private IQuery<I_C_Invoice_Candidate> convertToIQuery(@NonNull final InvoiceCandidateMultiQuery multiQuery)
{
final IQueryBuilder<I_C_Invoice_Candidate> queryBuilder = queryBL
.createQueryBuilder(I_C_Invoice_Candidate.class)
.setOption(IQueryBuilder.OPTION_Explode_OR_Joins_To_SQL_Unions, false) /* exploding ORs to unions works only with simple cases, but e.g. currently not if we want to use IQuery.createSelection() down the line */
.setJoinOr();
final List<InvoiceCandidateQuery> queries = multiQuery.getQueries();
for (final InvoiceCandidateQuery query : queries)
{
queryBuilder.filter(toFilter(query));
}
queryBuilder.orderBy(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID);
return queryBuilder.create();
}
private ICompositeQueryFilter<I_C_Invoice_Candidate> toFilter(@NonNull final InvoiceCandidateQuery query)
{
final ICompositeQueryFilter<I_C_Invoice_Candidate> filter = queryBL
.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.addOnlyActiveRecordsFilter();
final OrgId orgId = query.getOrgId();
if (orgId != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_AD_Org_ID, orgId);
}
final SOTrx soTrx = query.getSoTrx();
if (soTrx != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_IsSOTrx, soTrx.isSales());
}
final InvoiceCandidateId invoiceCandidateId = query.getInvoiceCandidateId();
if (invoiceCandidateId != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, invoiceCandidateId);
}
final BPartnerId billBPartnerId = query.getBillBPartnerId();
if (billBPartnerId != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_Bill_BPartner_ID, billBPartnerId);
}
final BPartnerId salesRepBPartnerId = query.getSalesRepBPartnerId();
if (salesRepBPartnerId != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMNNAME_C_BPartner_SalesRep_ID, salesRepBPartnerId);
}
final InstantInterval dateOrderedInterval = query.getDateOrderedInterval();
if (dateOrderedInterval != null)
{
final Timestamp from = TimeUtil.asTimestamp(dateOrderedInterval.getFrom());
final Timestamp to = TimeUtil.asTimestamp(dateOrderedInterval.getTo());
filter.addBetweenFilter(I_C_Invoice_Candidate.COLUMNNAME_DateOrdered, from, to);
}
final LocalDate dateToInvoice = query.getDateToInvoice();
if (dateToInvoice != null)
{
filter.addCoalesceEqualsFilter(
TimeUtil.asTimestamp(dateToInvoice, orgDAO.getTimeZone(orgId)) /* note that if dateToInvoice is not null, then orgId is also not null */,
I_C_Invoice_Candidate.COLUMNNAME_DateToInvoice_Override, I_C_Invoice_Candidate.COLUMNNAME_DateToInvoice);
}
final String headerAggregationKey = query.getHeaderAggregationKey();
if (!Check.isEmpty(headerAggregationKey, true))
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_HeaderAggregationKey, headerAggregationKey);
}
final InvoiceCandidateId excludeC_Invoice_Candidate_ID = query.getExcludeC_Invoice_Candidate_ID();
if (excludeC_Invoice_Candidate_ID != null)
{
filter.addNotEqualsFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, excludeC_Invoice_Candidate_ID);
}
final InvoiceCandidateId maxManualC_Invoice_Candidate_ID = query.getMaxManualC_Invoice_Candidate_ID();
if (maxManualC_Invoice_Candidate_ID != null)
{
final ICompositeQueryFilter<I_C_Invoice_Candidate> manualIcMaxFilter = queryBL
.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.setJoinOr()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_IsManual, false)
.addCompareFilter(I_C_Invoice_Candidate.COLUMN_C_Invoice_Candidate_ID, Operator.LESS_OR_EQUAL, maxManualC_Invoice_Candidate_ID.getRepoId());
filter.addFilter(manualIcMaxFilter);
}
final Boolean processed = query.getProcessed();
if (processed != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_Processed, processed);
}
final Boolean error = query.getError();
if (error != null)
{
filter.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_IsError, error);
}
final ExternalHeaderIdWithExternalLineIds externalIds = query.getExternalIds();
if (externalIds != null)
{
final String headerIdAsString = externalIds.getExternalHeaderId().getValue();
final ImmutableList<String> lineIdsAsString = externalIds
.getExternalLineIds()
.stream()
.map(ExternalId::getValue)
.collect(ImmutableList.toImmutableList());
final ICompositeQueryFilter<I_C_Invoice_Candidate> invoiceCandidatesFilter = queryBL
.createCompositeQueryFilter(I_C_Invoice_Candidate.class)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_C_Invoice_Candidate.COLUMN_ExternalHeaderId, headerIdAsString)
.addInArrayOrAllFilter(I_C_Invoice_Candidate.COLUMN_ExternalLineId, lineIdsAsString);
filter.addFilter(invoiceCandidatesFilter);
}
return filter;
}
}
| 38.012313 | 214 | 0.788134 |
ddfe55154482952cc85b62b0d4f7c79d06c206b8 | 20,184 | swift | Swift | Keyboard/KeyboardViewController.swift | massimoksi/Keybored | e720884b99cbf3f02ec14e23c9640e7319a27c01 | [
"MIT"
] | null | null | null | Keyboard/KeyboardViewController.swift | massimoksi/Keybored | e720884b99cbf3f02ec14e23c9640e7319a27c01 | [
"MIT"
] | null | null | null | Keyboard/KeyboardViewController.swift | massimoksi/Keybored | e720884b99cbf3f02ec14e23c9640e7319a27c01 | [
"MIT"
] | null | null | null | // KeyboardViewController.swift
//
// Copyright (c) 2015 Massimo Peri (@massimoksi)
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
import UIKit
class KeyboardViewController: UIInputViewController {
@IBOutlet var nameButton: UIButton!
@IBOutlet var surnameButton: UIButton!
@IBOutlet var mailButton: UIButton!
@IBOutlet var nextKeyboardButton: UIButton!
@IBOutlet var spaceButton: UIButton!
@IBOutlet var deleteButton: UIButton!
var name: String?
var surname: String?
var mail: String?
var buttonHeightConstraints: [NSLayoutConstraint]?
var buttonSpacingConstraints: [NSLayoutConstraint]?
override func updateViewConstraints() {
super.updateViewConstraints()
// Add custom view sizing constraints here
}
override func viewDidLoad() {
super.viewDidLoad()
// Retrieve settings from user defaults.
let userDefaults = NSUserDefaults(suiteName: "group.com.gmail.massimoperi.ios.Keybored")
self.name = userDefaults?.stringForKey("Name")
self.surname = userDefaults?.stringForKey("Surname")
self.mail = userDefaults?.stringForKey("Mail")
// Name button.
self.nameButton = UIButton.buttonWithType(.System) as UIButton
self.nameButton.setTitle(NSLocalizedString("Name", comment: ""), forState: .Normal)
self.nameButton.titleLabel?.font = UIFont.systemFontOfSize(21.0)
self.nameButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.nameButton.layer.cornerRadius = 5.0
self.nameButton.layer.shadowOpacity = 1.0
self.nameButton.layer.shadowRadius = 0.0
self.nameButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.nameButton.addTarget(self, action: "addName:", forControlEvents: .TouchUpInside)
self.view.addSubview(self.nameButton)
// Surname button.
self.surnameButton = UIButton.buttonWithType(.System) as UIButton
self.surnameButton.setTitle(NSLocalizedString("Surname", comment: ""), forState: .Normal)
self.surnameButton.titleLabel?.font = UIFont.systemFontOfSize(21.0)
self.surnameButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.surnameButton.layer.cornerRadius = 5.0
self.surnameButton.layer.shadowOpacity = 1.0
self.surnameButton.layer.shadowRadius = 0.0
self.surnameButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.surnameButton.addTarget(self, action: "addSurname:", forControlEvents: .TouchUpInside)
self.view.addSubview(self.surnameButton)
// Mail button.
self.mailButton = UIButton.buttonWithType(.System) as UIButton
self.mailButton.setTitle(NSLocalizedString("E-mail", comment: ""), forState: .Normal)
self.mailButton.titleLabel?.font = UIFont.systemFontOfSize(21.0)
self.mailButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.mailButton.layer.cornerRadius = 5.0
self.mailButton.layer.shadowOpacity = 1.0
self.mailButton.layer.shadowRadius = 0.0
self.mailButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.mailButton.addTarget(self, action: "addMail:", forControlEvents: .TouchUpInside)
self.view.addSubview(self.mailButton)
// Next keyboard button.
self.nextKeyboardButton = UIButton.buttonWithType(.System) as UIButton
self.nextKeyboardButton.setImage(UIImage(named: "Next"), forState: .Normal)
self.nextKeyboardButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.nextKeyboardButton.layer.cornerRadius = 5.0
self.nextKeyboardButton.layer.shadowOpacity = 1.0
self.nextKeyboardButton.layer.shadowRadius = 0.0
self.nextKeyboardButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.nextKeyboardButton.addTarget(self, action: "advanceToNextInputMode", forControlEvents: .TouchUpInside)
self.view.addSubview(self.nextKeyboardButton)
// Space button.
self.spaceButton = UIButton.buttonWithType(.System) as UIButton
self.spaceButton.setTitle(NSLocalizedString("space", comment: ""), forState: .Normal)
self.spaceButton.titleLabel?.font = UIFont.systemFontOfSize(21.0)
self.spaceButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.spaceButton.layer.cornerRadius = 5.0
self.spaceButton.layer.shadowOpacity = 1.0
self.spaceButton.layer.shadowRadius = 0.0
self.spaceButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.spaceButton.addTarget(self, action: "addSpace:", forControlEvents: .TouchUpInside)
self.view.addSubview(self.spaceButton)
// Delete button.
self.deleteButton = UIButton.buttonWithType(.System) as UIButton
self.deleteButton.setImage(UIImage(named: "Delete"), forState: .Normal)
self.deleteButton.setTranslatesAutoresizingMaskIntoConstraints(false)
self.deleteButton.layer.cornerRadius = 5.0
self.deleteButton.layer.shadowOpacity = 1.0
self.deleteButton.layer.shadowRadius = 0.0
self.deleteButton.layer.shadowOffset = CGSizeMake(0.0, 1.0)
self.deleteButton.addTarget(self, action: "backspace:", forControlEvents: .TouchUpInside)
self.view.addSubview(self.deleteButton)
self.setupConstraints()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
self.nameButton.enabled = (self.name != nil) ? true : false
self.surnameButton.enabled = (self.surname != nil) ? true : false
self.mailButton.enabled = (self.mail != nil) ? true : false
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated
}
override func textWillChange(textInput: UITextInput) {
// The app is about to change the document's contents. Perform any preparation here.
}
override func textDidChange(textInput: UITextInput) {
// The app has just changed the document's contents, the document context has been updated.
var normalTextColor: UIColor
var disabledTextColor: UIColor
var backgroundColor: UIColor
var accessoryBackgroundColor: UIColor
var shadowColor: UIColor
// Customize buttons depending on keyboard appearance.
var proxy = self.textDocumentProxy as UITextDocumentProxy
if proxy.keyboardAppearance == UIKeyboardAppearance.Dark {
normalTextColor = UIColor.whiteColor()
disabledTextColor = UIColor(white: 1.0, alpha: 0.4)
backgroundColor = UIColor(white: 0.0, alpha: 0.2)
accessoryBackgroundColor = UIColor(white: 1.0, alpha: 0.2)
shadowColor = UIColor.blackColor()
} else {
normalTextColor = UIColor.blackColor()
disabledTextColor = UIColor(white: 0.0, alpha: 0.2)
backgroundColor = UIColor(white: 1.0, alpha: 1.0)
accessoryBackgroundColor = UIColor(red: 171.0/255.0, green: 181.0/255.0, blue: 190.0/255.0, alpha: 1.0)
shadowColor = UIColor(red: 136.0/255.0, green: 139.0/255.0, blue: 143.0/255.0, alpha: 1.0)
}
self.nameButton.setTitleColor(normalTextColor, forState: .Normal)
self.nameButton.setTitleColor(disabledTextColor, forState: .Disabled)
self.nameButton.backgroundColor = backgroundColor
self.nameButton.layer.shadowColor = shadowColor.CGColor
self.surnameButton.setTitleColor(normalTextColor, forState: .Normal)
self.surnameButton.setTitleColor(disabledTextColor, forState: .Disabled)
self.surnameButton.backgroundColor = backgroundColor
self.surnameButton.layer.shadowColor = shadowColor.CGColor
self.mailButton.setTitleColor(normalTextColor, forState: .Normal)
self.mailButton.setTitleColor(disabledTextColor, forState: .Disabled)
self.mailButton.backgroundColor = backgroundColor
self.mailButton.layer.shadowColor = shadowColor.CGColor
self.nextKeyboardButton.tintColor = normalTextColor
self.nextKeyboardButton.backgroundColor = accessoryBackgroundColor
self.nextKeyboardButton.layer.shadowColor = shadowColor.CGColor
self.spaceButton.setTitleColor(normalTextColor, forState: .Normal)
self.spaceButton.backgroundColor = backgroundColor
self.spaceButton.layer.shadowColor = shadowColor.CGColor
self.deleteButton.tintColor = normalTextColor
self.deleteButton.backgroundColor = accessoryBackgroundColor
self.deleteButton.layer.shadowColor = shadowColor.CGColor
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
if let heightConstraints = self.buttonHeightConstraints {
self.view.removeConstraints(heightConstraints)
}
if let spacingConstraints = self.buttonSpacingConstraints {
self.view.removeConstraints(spacingConstraints)
}
if ((UIDevice.currentDevice().userInterfaceIdiom == .Phone) && (UIScreen.mainScreen().bounds.size.width > UIScreen.mainScreen().bounds.size.height)) {
var nameButtonHeightConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
var surnameButtonHeightConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
var mailButtonHeightConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
var nextKeyboardButtonHeightConstraint = NSLayoutConstraint(item: self.nextKeyboardButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
var spaceButtonHeightConstraint = NSLayoutConstraint(item: self.spaceButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
var deleteButtonHeightConstraint = NSLayoutConstraint(item: self.deleteButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 30.0)
self.buttonHeightConstraints = [
nameButtonHeightConstraint,
surnameButtonHeightConstraint,
mailButtonHeightConstraint,
nextKeyboardButtonHeightConstraint,
spaceButtonHeightConstraint,
deleteButtonHeightConstraint
]
var nameButtonTopConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Top, relatedBy: .Equal, toItem: self.view, attribute: .Top, multiplier: 1.0, constant: 8.0)
var surnameButtonTopConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Top, relatedBy: .Equal, toItem: self.nameButton, attribute: .Bottom, multiplier: 1.0, constant: 8.0)
var mailButtonTopConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Top, relatedBy: .Equal, toItem: self.surnameButton, attribute: .Bottom, multiplier: 1.0, constant: 8.0)
self.buttonSpacingConstraints = [
nameButtonTopConstraint,
surnameButtonTopConstraint,
mailButtonTopConstraint
]
}
else {
var nameButtonHeightConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
var surnameButtonHeightConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
var mailButtonHeightConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
var nextKeyboardButtonHeightConstraint = NSLayoutConstraint(item: self.nextKeyboardButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
var spaceButtonHeightConstraint = NSLayoutConstraint(item: self.spaceButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
var deleteButtonHeightConstraint = NSLayoutConstraint(item: self.deleteButton, attribute: .Height, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 41.0)
self.buttonHeightConstraints = [
nameButtonHeightConstraint,
surnameButtonHeightConstraint,
mailButtonHeightConstraint,
nextKeyboardButtonHeightConstraint,
spaceButtonHeightConstraint,
deleteButtonHeightConstraint
]
var nameButtonTopConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Top, relatedBy: .Equal, toItem: self.view, attribute: .Top, multiplier: 1.0, constant: 12.0)
var surnameButtonTopConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Top, relatedBy: .Equal, toItem: self.nameButton, attribute: .Bottom, multiplier: 1.0, constant: 12.0)
var mailButtonTopConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Top, relatedBy: .Equal, toItem: self.surnameButton, attribute: .Bottom, multiplier: 1.0, constant: 12.0)
self.buttonSpacingConstraints = [
nameButtonTopConstraint,
surnameButtonTopConstraint,
mailButtonTopConstraint
]
}
self.view.addConstraints(self.buttonHeightConstraints!)
self.view.addConstraints(self.buttonSpacingConstraints!)
}
// MARK: - Actions
@IBAction func addName(sender: UIButton) {
let proxy = self.textDocumentProxy as UITextDocumentProxy
if let name = self.name {
proxy.insertText(name)
}
}
@IBAction func addSurname(sender: UIButton) {
let proxy = self.textDocumentProxy as UITextDocumentProxy
if let surname = self.surname {
proxy.insertText(surname)
}
}
@IBAction func addMail(sender: UIButton) {
let proxy = self.textDocumentProxy as UITextDocumentProxy
if let mail = self.mail {
proxy.insertText(mail)
}
}
@IBAction func addSpace(sender: UIButton) {
let proxy = self.textDocumentProxy as UITextDocumentProxy
proxy.insertText(" ")
}
@IBAction func backspace(sender: UIButton) {
let proxy = self.textDocumentProxy as UITextDocumentProxy
proxy.deleteBackward()
}
// MARK: - Private functions
private func setupConstraints() {
let nameButtonWidthConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Width, relatedBy: .Equal, toItem: self.view, attribute: .Width, multiplier: 1.0, constant: -8.0)
nameButtonWidthConstraint.priority = 999
let nameButtonLeftSideConstraint = NSLayoutConstraint(item: self.nameButton, attribute: .Left, relatedBy: .Equal, toItem: self.view, attribute: .Left, multiplier: 1.0, constant: 4.0)
self.view.addConstraints([nameButtonWidthConstraint, nameButtonLeftSideConstraint])
let surnameButtonWidthConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Width, relatedBy: .Equal, toItem: self.view, attribute: .Width, multiplier: 1.0, constant: -8.0)
surnameButtonWidthConstraint.priority = 999
let surnameButtonLeftSideConstraint = NSLayoutConstraint(item: self.surnameButton, attribute: .Left, relatedBy: .Equal, toItem: self.view, attribute: .Left, multiplier: 1.0, constant: 4.0)
self.view.addConstraints([surnameButtonWidthConstraint, surnameButtonLeftSideConstraint])
let mailButtonWidthConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Width, relatedBy: .Equal, toItem: self.view, attribute: .Width, multiplier: 1.0, constant: -8.0)
mailButtonWidthConstraint.priority = 999
let mailButtonLeftSideConstraint = NSLayoutConstraint(item: self.mailButton, attribute: .Left, relatedBy: .Equal, toItem: self.view, attribute: .Left, multiplier: 1.0, constant: 4.0)
self.view.addConstraints([mailButtonWidthConstraint, mailButtonLeftSideConstraint])
let nextKeyboardButtonWidthConstraint = NSLayoutConstraint(item: self.nextKeyboardButton, attribute: .Width, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 42.0)
let nextKeyboardButtonLeftSideConstraint = NSLayoutConstraint(item: self.nextKeyboardButton, attribute: .Left, relatedBy: .Equal, toItem: self.view, attribute: .Left, multiplier: 1.0, constant: 4.0)
let nextKeyboardButtonBottomConstraint = NSLayoutConstraint(item: self.nextKeyboardButton, attribute: .Bottom, relatedBy: .Equal, toItem: self.view, attribute: .Bottom, multiplier: 1.0, constant: -4.0)
self.view.addConstraints([nextKeyboardButtonWidthConstraint, nextKeyboardButtonLeftSideConstraint, nextKeyboardButtonBottomConstraint])
let spaceButtonLeadingConstraint = NSLayoutConstraint(item: self.spaceButton, attribute: .Leading, relatedBy: .Equal, toItem: self.nextKeyboardButton, attribute: .Trailing, multiplier: 1.0, constant: 8.0)
spaceButtonLeadingConstraint.priority = 999
let spaceButtonTrailingConstraint = NSLayoutConstraint(item: self.spaceButton, attribute: .Trailing, relatedBy: .Equal, toItem: self.deleteButton, attribute: .Leading, multiplier: 1.0, constant: -8.0)
let spaceButtonBottomConstraint = NSLayoutConstraint(item: self.spaceButton, attribute: .Bottom, relatedBy: .Equal, toItem: self.view, attribute: .Bottom, multiplier: 1.0, constant: -4.0)
self.view.addConstraints([spaceButtonLeadingConstraint, spaceButtonTrailingConstraint, spaceButtonBottomConstraint])
let deleteButtonWidthConstraint = NSLayoutConstraint(item: self.deleteButton, attribute: .Width, relatedBy: .Equal, toItem: nil, attribute: .NotAnAttribute, multiplier: 1.0, constant: 42.0)
let deleteButtonRightSideConstraint = NSLayoutConstraint(item: self.deleteButton, attribute: .Right, relatedBy: .Equal, toItem: self.view, attribute: .Right, multiplier: 1.0, constant: -4.0)
let deleteButtonBottomConstraint = NSLayoutConstraint(item: self.deleteButton, attribute: .Bottom, relatedBy: .Equal, toItem: self.view, attribute: .Bottom, multiplier: 1.0, constant: -4.0)
self.view.addConstraints([deleteButtonWidthConstraint, deleteButtonRightSideConstraint, deleteButtonBottomConstraint])
}
}
| 60.612613 | 215 | 0.709473 |
2f3cac5637f6ae88a8f8d9d9ddba78205dfc035f | 1,075 | php | PHP | app/Deportista.php | Yerfer/CRUD_RUTINAS | 8a3ce3d5438b7241825a3cbff626d5fe213372cc | [
"MIT"
] | null | null | null | app/Deportista.php | Yerfer/CRUD_RUTINAS | 8a3ce3d5438b7241825a3cbff626d5fe213372cc | [
"MIT"
] | null | null | null | app/Deportista.php | Yerfer/CRUD_RUTINAS | 8a3ce3d5438b7241825a3cbff626d5fe213372cc | [
"MIT"
] | null | null | null | <?php namespace App;
use Illuminate\Auth\Authenticatable;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Auth\Passwords\CanResetPassword;
use Illuminate\Contracts\Auth\Authenticatable as AuthenticatableContract;
use Illuminate\Contracts\Auth\CanResetPassword as CanResetPasswordContract;
class Deportista extends Model implements AuthenticatableContract, CanResetPasswordContract {
use Authenticatable, CanResetPassword;
/**
* The database table used by the model.
*
* @var string
*/
protected $table = 'deportista';
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = ['cedula', 'nombre','apellido','telefono','email','password'];
/**
* The attributes excluded from the model's JSON form.
*
* @var array
*/
protected $hidden = ['password','remember_token'];
public function rutina(){
return $this->hasOne('App\DeportistaRutina');
}
public function objetivo(){
return $this->hasOne('App\Objetivo');
}
}
| 25 | 93 | 0.67814 |
6009afeb1fed960c1094ef304391f8e16443137c | 948 | swift | Swift | Coordinator/Utils/UIViewController+Utils.swift | ljnok/Coordinator | 6f999152784203379987f302a3ef8bfc2654b24f | [
"MIT"
] | null | null | null | Coordinator/Utils/UIViewController+Utils.swift | ljnok/Coordinator | 6f999152784203379987f302a3ef8bfc2654b24f | [
"MIT"
] | null | null | null | Coordinator/Utils/UIViewController+Utils.swift | ljnok/Coordinator | 6f999152784203379987f302a3ef8bfc2654b24f | [
"MIT"
] | null | null | null | //
// UIViewController+Utils.swift
// Base_architecture
//
// Created by ljnok on 2021/03/30.
//
import UIKit
extension UIViewController: NibIdentifiable, StoryboardIdentifiable {
var className: String {
return String(describing: Self.self)
}
static var storyboardIdentifier: String {
return String(describing: self)
}
static var nibIdentifier: String {
return String(describing: self)
}
}
extension NibIdentifiable where Self: UIViewController {
static func instantiateFromNib() -> Self {
return Self(nibName: nibIdentifier, bundle: nil)
}
}
extension StoryboardIdentifiable where Self: UIViewController {
static func instantiateFromStoryboard(bundle name: String = "Main") -> Self {
let storyboard = UIStoryboard(name: name, bundle: Bundle.main)
return storyboard.instantiateViewController(withIdentifier: storyboardIdentifier) as! Self
}
}
| 26.333333 | 98 | 0.704641 |
84c1c7fd126fceb1d806f021c1d2723e9d401867 | 106 | ps1 | PowerShell | sdk/resourcegraph/Microsoft.Azure.Management.ResourceGraph/src/generate.ps1 | jessicl-ms/azure-sdk-for-net | 5a5591b0a1a8a6ff4c8e8e3156a98a34c52020b5 | [
"MIT"
] | 3 | 2019-07-29T09:51:04.000Z | 2019-08-21T06:12:47.000Z | sdk/resourcegraph/Microsoft.Azure.Management.ResourceGraph/src/generate.ps1 | jessicl-ms/azure-sdk-for-net | 5a5591b0a1a8a6ff4c8e8e3156a98a34c52020b5 | [
"MIT"
] | 3 | 2019-07-30T02:39:18.000Z | 2019-07-30T02:45:31.000Z | sdk/resourcegraph/Microsoft.Azure.Management.ResourceGraph/src/generate.ps1 | jessicl-ms/azure-sdk-for-net | 5a5591b0a1a8a6ff4c8e8e3156a98a34c52020b5 | [
"MIT"
] | 3 | 2020-07-01T05:12:08.000Z | 2020-07-03T07:14:46.000Z | Start-AutoRestCodeGeneration -ResourceProvider "resourcegraph/resource-manager" -AutoRestVersion "latest"
| 53 | 105 | 0.867925 |
4bd891c6f308fe782ac8738eacfe123177e2ae55 | 34,303 | dart | Dart | pkg/vm/lib/transformations/type_flow/types.dart | zhaoxuyang/sdk | ecd82166c31297db61f062c9ba509a2d498d6cef | [
"BSD-3-Clause"
] | null | null | null | pkg/vm/lib/transformations/type_flow/types.dart | zhaoxuyang/sdk | ecd82166c31297db61f062c9ba509a2d498d6cef | [
"BSD-3-Clause"
] | null | null | null | pkg/vm/lib/transformations/type_flow/types.dart | zhaoxuyang/sdk | ecd82166c31297db61f062c9ba509a2d498d6cef | [
"BSD-3-Clause"
] | null | null | null | // Copyright (c) 2017, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
/// Declares the type system used by global type flow analysis.
library vm.transformations.type_flow.types;
import 'dart:core' hide Type;
import 'package:kernel/ast.dart';
import 'package:kernel/core_types.dart';
import 'utils.dart';
/// Dart class representation used in type flow analysis.
/// For each Dart class there is a unique instance of [TFClass].
/// Each [TFClass] has unique id which could be used to sort classes.
class TFClass {
final int id;
final Class classNode;
/// TFClass should not be instantiated directly.
/// Instead, [TypeHierarchy.getTFClass] should be used to obtain [TFClass]
/// instances specific to given [TypeHierarchy].
TFClass(this.id, this.classNode);
@override
int get hashCode => id;
@override
bool operator ==(other) => identical(this, other);
@override
String toString() => classNode.toString();
}
abstract class GenericInterfacesInfo {
// Return a type arguments vector which contains the immediate type parameters
// to 'klass' as well as the type arguments to all generic supertypes of
// 'klass', instantiated in terms of the type parameters on 'klass'.
//
// The offset into this vector from which a specific generic supertype's type
// arguments can be found is given by 'genericInterfaceOffsetFor'.
List<DartType> flattenedTypeArgumentsFor(Class klass);
// Return the offset into the flattened type arguments vector from which a
// specific generic supertype's type arguments can be found. The flattened
// type arguments vector is given by 'flattenedTypeArgumentsFor'.
int genericInterfaceOffsetFor(Class klass, Class iface);
// Similar to 'flattenedTypeArgumentsFor', but works for non-generic classes
// which may have recursive substitutions, e.g. 'class num implements
// Comparable<num>'.
//
// Since there are no free type variables in the result, 'RuntimeType' is
// returned instead of 'DartType'.
List<Type> flattenedTypeArgumentsForNonGeneric(Class klass);
}
abstract class TypesBuilder {
final CoreTypes coreTypes;
final bool nullSafety;
TypesBuilder(this.coreTypes, this.nullSafety);
/// Return [TFClass] corresponding to the given [classNode].
TFClass getTFClass(Class classNode);
/// Create a Type which corresponds to a set of instances constrained by
/// Dart type annotation [dartType].
/// [canBeNull] can be set to false to further constrain the resulting
/// type if value cannot be null.
Type fromStaticType(DartType type, bool canBeNull) {
Type result;
if (type is InterfaceType) {
final cls = type.classNode;
result = (cls == coreTypes.nullClass)
? const EmptyType()
: new ConeType(getTFClass(cls));
} else if (type == const DynamicType() || type == const VoidType()) {
result = const AnyType();
} else if (type == const BottomType() || type is NeverType) {
result = const EmptyType();
} else if (type is FunctionType) {
// TODO(alexmarkov): support function types
result = const AnyType();
} else if (type is TypeParameterType) {
final bound = type.bound;
// Protect against infinite recursion in case of cyclic type parameters
// like 'T extends T'. As of today, front-end doesn't report errors in such
// cases yet.
if (bound is TypeParameterType) {
result = const AnyType();
} else {
result = fromStaticType(bound, canBeNull);
}
} else {
throw 'Unexpected type ${type.runtimeType} $type';
}
if (nullSafety && type.nullability == Nullability.nonNullable) {
canBeNull = false;
}
if (canBeNull && result is! NullableType) {
result = new Type.nullable(result);
}
return result;
}
}
abstract class RuntimeTypeTranslator {
TypeExpr instantiateConcreteType(ConcreteType type, List<DartType> typeArgs);
}
/// Abstract interface to type hierarchy information used by types.
abstract class TypeHierarchy extends TypesBuilder
implements GenericInterfacesInfo {
TypeHierarchy(CoreTypes coreTypes, bool nullSafety)
: super(coreTypes, nullSafety);
/// Test if [sub] is a subtype of [sup].
bool isSubtype(Class sub, Class sup);
/// Return a more specific type for the type cone with [base] root.
/// May return EmptyType, AnyType, ConcreteType or a SetType.
Type specializeTypeCone(TFClass base);
Type _cachedIntType;
Type get intType {
return _cachedIntType ??= fromStaticType(coreTypes.intLegacyRawType, true);
}
}
/// Base class for type expressions.
/// Type expression is either a [Type] or a statement in a summary.
abstract class TypeExpr {
const TypeExpr();
/// Returns computed type of this type expression.
/// [types] is the list of types computed for the statements in the summary.
Type getComputedType(List<Type> types);
}
/// Base class for types inferred by the type flow analysis.
/// [Type] describes a specific set of values (Dart instances) and does not
/// directly correspond to a Dart type.
/// TODO(alexmarkov): consider detaching Type hierarchy from TypeExpr/Statement.
abstract class Type extends TypeExpr {
const Type();
/// Create a nullable type - union of [t] and the `null` object.
factory Type.nullable(Type t) => new NullableType(t);
/// Create a type representing arbitrary nullable object (`dynamic`).
factory Type.nullableAny() => new NullableType(const AnyType());
Class getConcreteClass(TypeHierarchy typeHierarchy) => null;
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) => false;
// Returns 'true' if this type will definitely pass a runtime type-check
// against 'runtimeType'. Returns 'false' if the test might fail (e.g. due to
// an approximation).
bool isSubtypeOfRuntimeType(
TypeHierarchy typeHierarchy, RuntimeType runtimeType);
@override
Type getComputedType(List<Type> types) => this;
/// Order of precedence for evaluation of union/intersection.
int get order;
/// Returns true iff this type is fully specialized.
bool get isSpecialized => true;
/// Returns specialization of this type using the given [TypeHierarchy].
Type specialize(TypeHierarchy typeHierarchy) => this;
/// Calculate union of this and [other] types.
Type union(Type other, TypeHierarchy typeHierarchy);
/// Calculate intersection of this and [other] types.
Type intersection(Type other, TypeHierarchy typeHierarchy);
}
/// Order of precedence between types for evaluation of union/intersection.
enum TypeOrder {
RuntimeType,
Unknown,
Empty,
Nullable,
Any,
Set,
Cone,
Concrete,
}
/// Type representing the empty set of instances.
class EmptyType extends Type {
const EmptyType();
@override
int get hashCode => 997;
@override
bool operator ==(other) => (other is EmptyType);
@override
String toString() => "_T {}";
@override
int get order => TypeOrder.Empty.index;
@override
Type union(Type other, TypeHierarchy typeHierarchy) => other;
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) => this;
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) {
return true;
}
}
/// Nullable type represents a union of a (non-nullable) type and the `null`
/// object. Other kinds of types do not contain `null` object (even AnyType).
class NullableType extends Type {
final Type baseType;
NullableType(this.baseType) {
assertx(baseType != null);
assertx(baseType is! NullableType);
}
@override
int get hashCode => (baseType.hashCode + 31) & kHashMask;
@override
bool operator ==(other) =>
identical(this, other) ||
(other is NullableType) && (this.baseType == other.baseType);
@override
String toString() => "${baseType}?";
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) =>
baseType.isSubtypeOf(typeHierarchy, cls);
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) {
if (typeHierarchy.nullSafety &&
other.nullability == Nullability.nonNullable) {
return false;
}
return baseType.isSubtypeOfRuntimeType(typeHierarchy, other);
}
@override
int get order => TypeOrder.Nullable.index;
@override
bool get isSpecialized => baseType.isSpecialized;
@override
Type specialize(TypeHierarchy typeHierarchy) => baseType.isSpecialized
? this
: new NullableType(baseType.specialize(typeHierarchy));
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.union(this, typeHierarchy);
}
if (other is NullableType) {
return new NullableType(baseType.union(other.baseType, typeHierarchy));
} else {
return new NullableType(baseType.union(other, typeHierarchy));
}
}
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.intersection(this, typeHierarchy);
}
if (other is NullableType) {
return new NullableType(
baseType.intersection(other.baseType, typeHierarchy));
} else {
return baseType.intersection(other, typeHierarchy);
}
}
}
/// Type representing any instance except `null`.
/// Semantically equivalent to ConeType of Object, but more efficient.
class AnyType extends Type {
const AnyType();
@override
int get hashCode => 991;
@override
bool operator ==(other) => (other is AnyType);
@override
String toString() => "_T ANY";
@override
int get order => TypeOrder.Any.index;
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.union(this, typeHierarchy);
}
return this;
}
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.intersection(this, typeHierarchy);
}
return other;
}
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) {
final rhs = other._type;
return (rhs is DynamicType) ||
(rhs is VoidType) ||
(rhs is InterfaceType &&
rhs.classNode == typeHierarchy.coreTypes.objectClass);
}
}
/// SetType is a union of concrete types T1, T2, ..., Tn, where n >= 2.
/// It represents the set of instances which types are in the {T1, T2, ..., Tn}.
class SetType extends Type {
/// List of concrete types, sorted by classId.
final List<ConcreteType> types;
int _hashCode;
/// Creates a new SetType using list of concrete types sorted by classId.
SetType(this.types) {
assertx(types.length >= 2);
assertx(isSorted(types));
}
@override
int get hashCode => _hashCode ??= _computeHashCode();
int _computeHashCode() {
int hash = 1237;
for (var t in types) {
hash = (((hash * 31) & kHashMask) + t.hashCode) & kHashMask;
}
return hash;
}
@override
bool operator ==(other) {
if (identical(this, other)) return true;
if ((other is SetType) && (types.length == other.types.length)) {
for (int i = 0; i < types.length; i++) {
if (types[i] != other.types[i]) {
return false;
}
}
return true;
}
return false;
}
@override
String toString() => "_T ${types}";
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) =>
types.every((ConcreteType t) => t.isSubtypeOf(typeHierarchy, cls));
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) =>
types.every((t) => t.isSubtypeOfRuntimeType(typeHierarchy, other));
@override
int get order => TypeOrder.Set.index;
static List<ConcreteType> _unionLists(
List<ConcreteType> types1, List<ConcreteType> types2) {
int i1 = 0;
int i2 = 0;
List<ConcreteType> types = <ConcreteType>[];
while ((i1 < types1.length) && (i2 < types2.length)) {
final t1 = types1[i1];
final t2 = types2[i2];
final id1 = t1.cls.id;
final id2 = t2.cls.id;
if (id1 < id2) {
types.add(t1);
++i1;
} else if (id1 > id2) {
types.add(t2);
++i2;
} else {
if (t1 == t2) {
types.add(t1);
} else {
// TODO(sjindel/tfa): Merge the type arguments vectors.
// (e.g., Map<?, int> vs Map<String, int> can become Map<?, int>)
types.add(t1.raw);
}
++i1;
++i2;
}
}
if (i1 < types1.length) {
types.addAll(types1.getRange(i1, types1.length));
} else if (i2 < types2.length) {
types.addAll(types2.getRange(i2, types2.length));
}
return types;
}
static List<ConcreteType> _intersectLists(
List<ConcreteType> types1, List<ConcreteType> types2) {
int i1 = 0;
int i2 = 0;
List<ConcreteType> types = <ConcreteType>[];
while ((i1 < types1.length) && (i2 < types2.length)) {
final t1 = types1[i1];
final t2 = types2[i2];
final id1 = t1.cls.id;
final id2 = t2.cls.id;
if (id1 < id2) {
++i1;
} else if (id1 > id2) {
++i2;
} else {
if (t1.typeArgs == null &&
t1.constant == null &&
t2.typeArgs == null &&
t2.constant == null) {
types.add(t1);
} else {
final intersect = t1.intersection(t2, null);
if (intersect is! EmptyType) {
types.add(intersect);
}
}
++i1;
++i2;
}
}
return types;
}
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.union(this, typeHierarchy);
}
if (other is SetType) {
return new SetType(_unionLists(types, other.types));
} else if (other is ConcreteType) {
return types.contains(other)
? this
: new SetType(_unionLists(types, <ConcreteType>[other]));
} else if (other is ConeType) {
return typeHierarchy
.specializeTypeCone(other.cls)
.union(this, typeHierarchy);
} else {
throw 'Unexpected type $other';
}
}
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.intersection(this, typeHierarchy);
}
if (other is SetType) {
List<ConcreteType> list = _intersectLists(types, other.types);
final size = list.length;
if (size == 0) {
return const EmptyType();
} else if (size == 1) {
return list.single;
} else {
return new SetType(list);
}
} else if (other is ConcreteType) {
for (var type in types) {
if (type == other) return other;
if (identical(type.cls, other.cls)) {
return type.intersection(other, typeHierarchy);
}
}
return EmptyType();
} else if (other is ConeType) {
return typeHierarchy
.specializeTypeCone(other.cls)
.intersection(this, typeHierarchy);
} else {
throw 'Unexpected type $other';
}
}
}
/// Type representing a subtype cone. It contains instances of all
/// Dart types which extend, mix-in or implement certain class.
/// TODO(alexmarkov): Introduce cones of types which extend but not implement.
class ConeType extends Type {
final TFClass cls;
ConeType(this.cls);
@override
Class getConcreteClass(TypeHierarchy typeHierarchy) =>
typeHierarchy.specializeTypeCone(cls).getConcreteClass(typeHierarchy);
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) =>
typeHierarchy.isSubtype(this.cls.classNode, cls);
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) {
final rhs = other._type;
if (rhs is DynamicType || rhs is VoidType) return true;
if (rhs is InterfaceType) {
return cls.classNode.typeParameters.isEmpty &&
typeHierarchy.isSubtype(cls.classNode, rhs.classNode);
}
return false;
}
@override
int get hashCode => (cls.id + 37) & kHashMask;
@override
bool operator ==(other) =>
identical(this, other) ||
(other is ConeType) && identical(this.cls, other.cls);
@override
String toString() => "_T ($cls)+";
@override
int get order => TypeOrder.Cone.index;
@override
bool get isSpecialized => false;
@override
Type specialize(TypeHierarchy typeHierarchy) =>
typeHierarchy.specializeTypeCone(cls);
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.union(this, typeHierarchy);
}
if (other is ConeType) {
if (this == other) {
return this;
}
if (typeHierarchy.isSubtype(other.cls.classNode, this.cls.classNode)) {
return this;
}
if (typeHierarchy.isSubtype(this.cls.classNode, other.cls.classNode)) {
return other;
}
} else if (other is ConcreteType) {
if (typeHierarchy.isSubtype(other.cls.classNode, this.cls.classNode)) {
return this;
}
}
return typeHierarchy.specializeTypeCone(cls).union(other, typeHierarchy);
}
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.intersection(this, typeHierarchy);
}
if (other is ConeType) {
if (this == other) {
return this;
}
if (typeHierarchy.isSubtype(other.cls.classNode, this.cls.classNode)) {
return other;
}
if (typeHierarchy.isSubtype(this.cls.classNode, other.cls.classNode)) {
return this;
}
} else if (other is ConcreteType) {
if (typeHierarchy.isSubtype(other.cls.classNode, this.cls.classNode)) {
return other;
} else {
return const EmptyType();
}
}
return typeHierarchy
.specializeTypeCone(cls)
.intersection(other, typeHierarchy);
}
}
/// Type representing a set of instances of a specific Dart class (no subtypes
/// or `null` object).
class ConcreteType extends Type implements Comparable<ConcreteType> {
final TFClass cls;
int _hashCode;
// May be null if there are no type arguments constraints. The type arguments
// should represent type sets, i.e. `UnknownType` or `RuntimeType`. The type
// arguments vector is factored against the generic interfaces implemented by
// the class (see [TypeHierarchy.flattenedTypeArgumentsFor]).
//
// The 'typeArgs' vector is null for non-generic classes, even if they
// implement a generic interface.
//
// 'numImmediateTypeArgs' is the length of the prefix of 'typeArgs' which
// holds the type arguments to the class itself.
final int numImmediateTypeArgs;
final List<Type> typeArgs;
// May be null if constant value is not inferred.
final Constant constant;
ConcreteType(this.cls, [List<Type> typeArgs_, this.constant])
: typeArgs = typeArgs_,
numImmediateTypeArgs =
typeArgs_ != null ? cls.classNode.typeParameters.length : 0 {
// TODO(alexmarkov): support closures
assertx(!cls.classNode.isAbstract);
assertx(typeArgs == null || cls.classNode.typeParameters.isNotEmpty);
assertx(typeArgs == null || typeArgs.any((t) => t is RuntimeType));
}
ConcreteType get raw => new ConcreteType(cls, null);
@override
Class getConcreteClass(TypeHierarchy typeHierarchy) => cls.classNode;
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class other) =>
typeHierarchy.isSubtype(cls.classNode, other);
bool isSubtypeOfRuntimeType(
TypeHierarchy typeHierarchy, RuntimeType runtimeType) {
final rhs = runtimeType._type;
if (rhs is DynamicType || rhs is VoidType) return true;
if (rhs is InterfaceType) {
if (rhs.classNode == typeHierarchy.coreTypes.functionClass) {
// TODO(35573): "implements/extends Function" is not handled correctly by
// the CFE. By returning "false" we force an approximation -- that a type
// check against "Function" might fail, whatever the LHS is.
return false;
}
if (!typeHierarchy.isSubtype(this.cls.classNode, rhs.classNode)) {
return false;
}
if (rhs.typeArguments.isEmpty) return true;
if (rhs.classNode == typeHierarchy.coreTypes.futureOrClass) {
assertx(cls.classNode != typeHierarchy.coreTypes.futureOrClass);
if (typeHierarchy.isSubtype(
cls.classNode, typeHierarchy.coreTypes.futureClass)) {
final RuntimeType lhs =
typeArgs == null ? RuntimeType(DynamicType(), null) : typeArgs[0];
return lhs.isSubtypeOfRuntimeType(
typeHierarchy, runtimeType.typeArgs[0]);
} else {
return isSubtypeOfRuntimeType(typeHierarchy, runtimeType.typeArgs[0]);
}
}
List<Type> usableTypeArgs = typeArgs;
if (usableTypeArgs == null) {
if (cls.classNode.typeParameters.isEmpty) {
usableTypeArgs =
typeHierarchy.flattenedTypeArgumentsForNonGeneric(cls.classNode);
} else {
return false;
}
}
final interfaceOffset =
typeHierarchy.genericInterfaceOffsetFor(cls.classNode, rhs.classNode);
assertx(usableTypeArgs.length - interfaceOffset >=
runtimeType.numImmediateTypeArgs);
for (int i = 0; i < runtimeType.numImmediateTypeArgs; ++i) {
final ta = usableTypeArgs[i + interfaceOffset];
if (ta is UnknownType) {
return false;
}
assertx(ta is RuntimeType);
if (!ta.isSubtypeOfRuntimeType(
typeHierarchy, runtimeType.typeArgs[i])) {
return false;
}
}
return true;
}
return false;
}
@override
int get hashCode => _hashCode ??= _computeHashCode();
int _computeHashCode() {
int hash = cls.hashCode ^ 0x1234 & kHashMask;
// We only need to hash the first type arguments vector, since the type
// arguments of the implemented interfaces are implied by it.
for (int i = 0; i < numImmediateTypeArgs; ++i) {
hash = (((hash * 31) & kHashMask) + typeArgs[i].hashCode) & kHashMask;
}
hash = ((hash * 31) & kHashMask) + constant.hashCode;
return hash;
}
@override
bool operator ==(other) {
if (identical(this, other)) return true;
if (other is ConcreteType) {
if (!identical(this.cls, other.cls) ||
this.numImmediateTypeArgs != other.numImmediateTypeArgs) {
return false;
}
if (this.typeArgs != null) {
for (int i = 0; i < numImmediateTypeArgs; ++i) {
if (this.typeArgs[i] != other.typeArgs[i]) {
return false;
}
}
}
if (this.constant != other.constant) {
return false;
}
return true;
} else {
return false;
}
}
// Note that this may return 0 for concrete types which are not equal if the
// difference is only in type arguments.
@override
int compareTo(ConcreteType other) => cls.id.compareTo(other.cls.id);
@override
String toString() {
if (typeArgs == null && constant == null) {
return "_T (${cls})";
}
final StringBuffer buf = new StringBuffer();
buf.write("_T (${cls}");
if (typeArgs != null) {
buf.write("<${typeArgs.take(numImmediateTypeArgs).join(', ')}>");
}
if (constant != null) {
buf.write(", $constant");
}
buf.write(")");
return buf.toString();
}
@override
int get order => TypeOrder.Concrete.index;
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.union(this, typeHierarchy);
}
if (other is ConcreteType) {
if (this == other) {
return this;
} else if (!identical(this.cls, other.cls)) {
final types = (this.cls.id < other.cls.id)
? <ConcreteType>[this, other]
: <ConcreteType>[other, this];
return new SetType(types);
} else {
assertx(typeArgs != null ||
constant != null ||
other.typeArgs != null ||
other.constant != null);
return raw;
}
} else {
throw 'Unexpected type $other';
}
}
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other.order < this.order) {
return other.intersection(this, typeHierarchy);
}
if (other is ConcreteType) {
if (this == other) {
return this;
}
if (!identical(this.cls, other.cls)) {
return EmptyType();
}
if (typeArgs == null && constant == null) {
return other;
} else if (other.typeArgs == null && other.constant == null) {
return this;
}
List<Type> mergedTypeArgs;
if (typeArgs == null) {
mergedTypeArgs = other.typeArgs;
} else if (other.typeArgs == null) {
mergedTypeArgs = typeArgs;
} else {
mergedTypeArgs = new List<Type>(typeArgs.length);
bool hasRuntimeType = false;
for (int i = 0; i < typeArgs.length; ++i) {
final merged =
typeArgs[i].intersection(other.typeArgs[i], typeHierarchy);
if (merged is EmptyType) {
return const EmptyType();
} else if (merged is RuntimeType) {
hasRuntimeType = true;
}
mergedTypeArgs[i] = merged;
}
if (!hasRuntimeType) {
mergedTypeArgs = null;
}
}
Constant mergedConstant;
if (constant == null) {
mergedConstant = other.constant;
} else if (other.constant == null || constant == other.constant) {
mergedConstant = constant;
} else {
return const EmptyType();
}
return new ConcreteType(cls, mergedTypeArgs, mergedConstant);
} else {
throw 'Unexpected type $other';
}
}
}
// Unlike the other 'Type's, this represents a single type, not a set of
// values. It is used as the right-hand-side of type-tests.
//
// The type arguments are represented in a form that is factored against the
// generic interfaces implemented by the type to enable efficient type-test
// against its interfaces. See 'TypeHierarchy.flattenedTypeArgumentsFor' for
// more details.
//
// This factored representation can have cycles for some types:
//
// class num implements Comparable<num> {}
// class A<T> extends Comparable<A<T>> {}
//
// To avoid these cycles, we approximate generic super-bounded types (the second
// case), so the representation for 'A<String>' would be simply 'UnknownType'.
// However, approximating non-generic types like 'int' and 'num' (the first
// case) would be too coarse, so we leave an null 'typeArgs' field for these
// types. As a result, when doing an 'isSubtypeOfRuntimeType' against
// their interfaces (e.g. 'int' vs 'Comparable<int>') we approximate the result
// as 'false'.
//
// So, the invariant about 'typeArgs' is that they will be 'null' iff the class
// is non-generic, and non-null (with at least one vector) otherwise.
class RuntimeType extends Type {
final DartType _type; // Doesn't contain type args.
final int numImmediateTypeArgs;
final List<RuntimeType> typeArgs;
RuntimeType(DartType type, this.typeArgs)
: _type = type,
numImmediateTypeArgs =
type is InterfaceType ? type.classNode.typeParameters.length : 0 {
if (_type is InterfaceType && numImmediateTypeArgs > 0) {
assertx(typeArgs != null);
assertx(typeArgs.length >= numImmediateTypeArgs);
assertx((_type as InterfaceType)
.typeArguments
.every((t) => t == const DynamicType()));
} else {
assertx(typeArgs == null);
}
}
int get order => TypeOrder.RuntimeType.index;
Nullability get nullability => _type.nullability;
RuntimeType withNullability(Nullability n) =>
RuntimeType(_type.withDeclaredNullability(n), typeArgs);
DartType get representedTypeRaw => _type;
DartType get representedType {
final type = _type;
if (type is InterfaceType && typeArgs != null) {
final klass = type.classNode;
final typeArguments = typeArgs
.take(klass.typeParameters.length)
.map((pt) => pt.representedType)
.toList();
return new InterfaceType(klass, type.nullability, typeArguments);
} else {
return type;
}
}
@override
int get hashCode {
int hash = _type.hashCode ^ 0x1234 & kHashMask;
// Only hash by the type arguments of the class. The type arguments of
// supertypes are are implied by them.
for (int i = 0; i < numImmediateTypeArgs; ++i) {
hash = (((hash * 31) & kHashMask) + typeArgs[i].hashCode) & kHashMask;
}
return hash;
}
@override
operator ==(other) {
if (identical(this, other)) return true;
if (other is RuntimeType) {
if (other._type != _type) return false;
assertx(numImmediateTypeArgs == other.numImmediateTypeArgs);
return typeArgs == null || listEquals(typeArgs, other.typeArgs);
}
return false;
}
@override
String toString() {
final head = _type is InterfaceType
? "${(_type as InterfaceType).classNode}"
: "$_type";
final typeArgsStrs = (numImmediateTypeArgs == 0)
? ""
: "<${typeArgs.take(numImmediateTypeArgs).map((t) => "$t").join(", ")}>";
final nullability = _type.nullability.suffix;
return "$head$typeArgsStrs$nullability";
}
@override
bool get isSpecialized =>
throw "ERROR: RuntimeType does not support isSpecialized.";
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) =>
throw "ERROR: RuntimeType does not support isSubtypeOf.";
@override
Type union(Type other, TypeHierarchy typeHierarchy) =>
throw "ERROR: RuntimeType does not support union.";
// This only works between "type-set" representations ('UnknownType' and
// 'RuntimeType') and is used when merging type arguments.
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other is UnknownType) {
return this;
} else if (other is RuntimeType) {
return this == other ? this : const EmptyType();
}
throw "ERROR: RuntimeType cannot intersect with ${other.runtimeType}";
}
@override
Type specialize(TypeHierarchy typeHierarchy) =>
throw "ERROR: RuntimeType does not support specialize.";
@override
Class getConcreteClass(TypeHierarchy typeHierarchy) =>
throw "ERROR: ConcreteClass does not support getConcreteClass.";
bool isSubtypeOfRuntimeType(
TypeHierarchy typeHierarchy, RuntimeType runtimeType) {
final rhs = runtimeType._type;
if (typeHierarchy.nullSafety &&
_type.nullability == Nullability.nullable &&
rhs.nullability == Nullability.nonNullable) {
return false;
}
if (rhs is DynamicType ||
rhs is VoidType ||
_type is BottomType ||
_type is NeverType) {
return true;
}
if (rhs is BottomType || rhs is NeverType) return false;
if (_type is DynamicType || _type is VoidType) {
return (rhs is InterfaceType &&
rhs.classNode == typeHierarchy.coreTypes.objectClass);
}
final thisClass = (_type as InterfaceType).classNode;
final otherClass = (rhs as InterfaceType).classNode;
if (!typeHierarchy.isSubtype(thisClass, otherClass)) return false;
// The typeHierarchy result maybe be inaccurate only if there are type
// arguments which need to be examined.
if (runtimeType.numImmediateTypeArgs == 0) {
return true;
}
if (otherClass == typeHierarchy.coreTypes.futureOrClass) {
if (thisClass == typeHierarchy.coreTypes.futureClass ||
thisClass == typeHierarchy.coreTypes.futureOrClass) {
return typeArgs[0]
.isSubtypeOfRuntimeType(typeHierarchy, runtimeType.typeArgs[0]);
} else {
return isSubtypeOfRuntimeType(typeHierarchy, runtimeType.typeArgs[0]);
}
}
List<Type> usableTypeArgs = typeArgs;
if (usableTypeArgs == null) {
assertx(thisClass.typeParameters.isEmpty);
usableTypeArgs =
typeHierarchy.flattenedTypeArgumentsForNonGeneric(thisClass);
}
final interfaceOffset =
typeHierarchy.genericInterfaceOffsetFor(thisClass, otherClass);
assertx(usableTypeArgs.length - interfaceOffset >=
runtimeType.numImmediateTypeArgs);
for (int i = 0; i < runtimeType.numImmediateTypeArgs; ++i) {
if (!usableTypeArgs[interfaceOffset + i]
.isSubtypeOfRuntimeType(typeHierarchy, runtimeType.typeArgs[i])) {
return false;
}
}
return true;
}
}
/// Type which is not known at compile time.
/// It is used as the right-hand-side of type tests.
class UnknownType extends Type {
const UnknownType();
@override
int get hashCode => 1019;
@override
bool operator ==(other) => (other is UnknownType);
@override
String toString() => "UNKNOWN";
@override
int get order => TypeOrder.Unknown.index;
@override
bool isSubtypeOf(TypeHierarchy typeHierarchy, Class cls) =>
throw "ERROR: UnknownType does not support isSubtypeOf.";
@override
Type union(Type other, TypeHierarchy typeHierarchy) {
if (other is UnknownType || other is RuntimeType) {
return this;
}
throw "ERROR: UnknownType does not support union with ${other.runtimeType}";
}
// This only works between "type-set" representations ('UnknownType' and
// 'RuntimeType') and is used when merging type arguments.
@override
Type intersection(Type other, TypeHierarchy typeHierarchy) {
if (other is UnknownType || other is RuntimeType) {
return other;
}
throw "ERROR: UnknownType does not support intersection with ${other.runtimeType}";
}
bool isSubtypeOfRuntimeType(TypeHierarchy typeHierarchy, RuntimeType other) {
final rhs = other._type;
return (rhs is DynamicType) ||
(rhs is VoidType) ||
(rhs is InterfaceType &&
rhs.classNode == typeHierarchy.coreTypes.objectClass &&
rhs.nullability != Nullability.nonNullable);
}
}
| 31.269827 | 87 | 0.655395 |
b44c6bae1627bae0b0c629f5ac458dd24e5e6272 | 2,576 | swift | Swift | Sources/Beagle/Sources/Components/ServerDrivenComponent/Screen/Screen.swift | HenriqueZup/beagle-ios | 72b71fb2ce70a844e817b5d75989786e5fbbb760 | [
"Apache-2.0"
] | null | null | null | Sources/Beagle/Sources/Components/ServerDrivenComponent/Screen/Screen.swift | HenriqueZup/beagle-ios | 72b71fb2ce70a844e817b5d75989786e5fbbb760 | [
"Apache-2.0"
] | null | null | null | Sources/Beagle/Sources/Components/ServerDrivenComponent/Screen/Screen.swift | HenriqueZup/beagle-ios | 72b71fb2ce70a844e817b5d75989786e5fbbb760 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 2020, 2022 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// The screen element will help you define the screen view structure.
/// By using this component you can define configurations like whether or
/// not you want to use safe areas or display a tool bar/navigation bar.
public struct Screen: ServerDrivenComponent, StyleComponent, HasContext {
/// identifies your screen globally inside your application so that it could have actions set on itself.
public var id: String?
/// Enables a few visual options to be changed.
public var style: Style?
/// Enables safe area to help you place your views within the visible portion of the overall interface.
public var safeArea: SafeArea? = SafeArea(top: true, leading: true, bottom: true, trailing: true)
/// Enables a action bar/navigation bar into your view. By default it is set as null.
public var navigationBar: NavigationBar?
/// Defines the child elements on this screen.
@AutoCodable
public var child: ServerDrivenComponent
/// Defines the context that be set to screen.
public var context: Context?
}
extension Screen {
enum CodingKeys: String, CodingKey {
case id
case style
case safeArea
case navigationBar
case child
case context
}
public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
id = try container.decodeIfPresent(String.self, forKey: .id)
style = try container.decodeIfPresent(Style.self, forKey: .style)
safeArea = try container.decodeIfPresent(SafeArea.self, forKey: .safeArea) ??
SafeArea(top: true, leading: true, bottom: true, trailing: true)
navigationBar = try container.decodeIfPresent(NavigationBar.self, forKey: .navigationBar)
child = try container.decode(forKey: .child)
context = try container.decodeIfPresent(Context.self, forKey: .context)
}
}
| 39.030303 | 108 | 0.705357 |
8b50c43b04cca964e8168b4159d6af0c73e6460b | 2,639 | kt | Kotlin | common/src/main/kotlin/tech/cuda/woden/common/configuration/WodenConfig.kt | cuda-tech/woden | 2efc2de1cad2646ea129637752b15d868ca13c7d | [
"Apache-2.0"
] | null | null | null | common/src/main/kotlin/tech/cuda/woden/common/configuration/WodenConfig.kt | cuda-tech/woden | 2efc2de1cad2646ea129637752b15d868ca13c7d | [
"Apache-2.0"
] | 1 | 2020-07-08T17:56:03.000Z | 2020-07-11T08:30:25.000Z | common/src/main/kotlin/tech/cuda/woden/common/configuration/WodenConfig.kt | cuda-tech/woden | 2efc2de1cad2646ea129637752b15d868ca13c7d | [
"Apache-2.0"
] | 1 | 2020-05-06T14:34:18.000Z | 2020-05-06T14:34:18.000Z | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tech.cuda.woden.common.configuration
import com.typesafe.config.ConfigFactory
import com.zaxxer.hikari.HikariDataSource
import com.zaxxer.hikari.pool.HikariPool
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import kotlinx.serialization.Transient
import kotlinx.serialization.hocon.Hocon
import tech.cuda.woden.common.configuration.datasource.DataSourceConfig
import tech.cuda.woden.common.configuration.email.EmailConfig
import tech.cuda.woden.common.configuration.scheduler.SchedulerConfig
import java.nio.file.Path
import java.nio.file.Paths
import java.sql.DriverManager
import javax.sql.DataSource
/**
* @author Jensen Qi <jinxiu.qi@alu.hit.edu.cn>
* @since 0.1.0
*/
@Serializable
data class WodenConfig(
@SerialName("datasource") private val datasourceConfig: DataSourceConfig,
val email: EmailConfig,
val scheduler: SchedulerConfig,
@SerialName("git") private val _git: String,
) {
val datasource: DataSource by lazy {
try {
HikariDataSource(datasourceConfig.hikariConfig)
} catch (e: HikariPool.PoolInitializationException) {
if (e.message == "Failed to initialize pool: Unknown database '${datasourceConfig.dbName}'") {
val urlWithoutDbName = datasourceConfig.hikariConfig.jdbcUrl.replace(datasourceConfig.dbName, "")
Class.forName("com.mysql.jdbc.Driver")
DriverManager.getConnection(urlWithoutDbName, datasourceConfig.username, datasourceConfig.password).use {
it.createStatement().use { statement ->
statement.execute("create database if not exists ${datasourceConfig.dbName} default character set = 'utf8mb4'")
}
}
HikariDataSource(datasourceConfig.hikariConfig)
} else {
throw e
}
}
}
@Transient
val gitPath: Path = Paths.get(_git)
}
val Woden = Hocon.decodeFromConfig(
WodenConfig.serializer(),
ConfigFactory.parseResources("woden.conf")
)
| 38.246377 | 135 | 0.713149 |
dffb1ecfeb82cf9939a5eee456467ca2ecddb9ef | 4,644 | kt | Kotlin | Kotlin/src/test/kotlin/com/gildedrose/GildedRoseTest.kt | abeshi-softwire/GildedRose-Refactoring-Kata | e9fb22c93bc2b0b24213afb56d28632b9def6034 | [
"MIT"
] | null | null | null | Kotlin/src/test/kotlin/com/gildedrose/GildedRoseTest.kt | abeshi-softwire/GildedRose-Refactoring-Kata | e9fb22c93bc2b0b24213afb56d28632b9def6034 | [
"MIT"
] | null | null | null | Kotlin/src/test/kotlin/com/gildedrose/GildedRoseTest.kt | abeshi-softwire/GildedRose-Refactoring-Kata | e9fb22c93bc2b0b24213afb56d28632b9def6034 | [
"MIT"
] | 1 | 2020-07-07T14:08:57.000Z | 2020-07-07T14:08:57.000Z | package com.gildedrose
import org.junit.Assert.*
import org.junit.Test
class GildedRoseTest {
private val aged_brie_name = "Aged Brie"
private val sulfuras_name = "Sulfuras, Hand of Ragnaros"
private val backstage_pass_name = "Backstage passes to a TAFKAL80ETC concert"
private fun GetApp(vararg items : Item): GildedRose {
return GildedRose(arrayOf(*items))
}
@Test
fun quality_never_negative() {
val app = GetApp(Item("foo", 0, 0))
app.updateQuality()
assertTrue("Quality can't be negative", app.items.all { it.quality >= 0 })
}
@Test
fun quality_decreases_over_time() {
val startQuality = 10
val app = GetApp(Item("foo", 10, startQuality))
app.updateQuality()
assertTrue("Quality must decrease", startQuality > app.items[0].quality)
}
@Test
fun sellin_negative_doubles_quality_loss() {
val startQuality = 10
val app = GetApp(Item("foo", 1, startQuality))
app.updateQuality()
val secondQuality = app.items[0].quality
app.updateQuality()
val finalQuality = app.items[0].quality
val diff1 = startQuality - secondQuality
val diff2 = secondQuality - finalQuality
assertTrue(diff1 * 2 == diff2) // Quality loss should double
// Using "assertTrue" rather than "assertEquals" because we don't know the "actual" value --
// we're just testing to make sure the values are equal.
}
@Test
fun sellin_lowers_by_one_daily() {
val startSellin = 1
val app = GetApp(Item("foo", startSellin, 10))
app.updateQuality()
val secondSellin = app.items[0].sellIn
app.updateQuality()
val finalSellin = app.items[0].sellIn
assertEquals(startSellin - 1, secondSellin)
assertEquals(secondSellin - 1, finalSellin)
}
@Test
fun aged_brie_improves() {
val startQuality = 10
val app = GetApp(Item(aged_brie_name, 50, startQuality))
app.updateQuality()
assertTrue("Quality of $aged_brie_name should increase",
app.items[0].quality > startQuality)
}
@Test
fun quality_never_above_50() {
val startQualities = arrayOf(49, 50)
val startItems = startQualities.map {Item(aged_brie_name, 50, quality = it)}
val app = GetApp(*startItems.toTypedArray())
app.updateQuality()
assertTrue("Quality should be <= 50", app.items.all {it.quality <= 50})
}
@Test
fun sulfuras_never_changes() {
val startQuality = 25
val startSellin = 3
val app = GetApp(Item(sulfuras_name, startSellin, startQuality))
app.updateQuality()
assertEquals(startQuality, app.items[0].quality);
assertEquals(startSellin, app.items[0].sellIn);
}
@Test
fun backstage_passes_improve() {
val startQuality = 10
val app = GetApp(Item(backstage_pass_name, 50, startQuality))
app.updateQuality()
assertTrue("Quality of ${app.items[0].name} should increase",
app.items[0].quality > startQuality)
}
@Test
fun backstage_passes_improve_more_as_deadline_approaches() {
val startQuality = 10
val app = GetApp(
Item(backstage_pass_name, 10, startQuality),
Item(backstage_pass_name, 5, startQuality))
app.updateQuality()
// Quality of $backstage_pass_name should increase by 2 with <=10 days left
assertEquals(startQuality+2, app.items[0].quality)
// Quality of $backstage_pass_name should increase by 3 with <=5 days left
assertEquals(startQuality+3, app.items[1].quality)
}
@Test
fun backstage_passes_quality_zero_past_sellby() {
val startQuality = 10
val app = GetApp(Item(backstage_pass_name, 0, startQuality))
app.updateQuality()
assertEquals(0, app.items[0].quality)
}
@Test
fun conjured_items_degrade_twice_as_fast() {
val startQuality = 10
// Only want degrading itemNames
for (itemName in arrayOf("foo", sulfuras_name)) {
val app = GetApp(
Item("Conjured $itemName", 10, startQuality),
Item(itemName, 10, startQuality)
)
app.updateQuality()
val diffConjured = startQuality - app.items[0].quality
val diffNormal = startQuality - app.items[1].quality
assertTrue("Conjured items should degrade twice as fast as normal items",
diffConjured == 2 * diffNormal)
}
}
}
| 31.808219 | 100 | 0.626615 |
a57c70a444f7f64c2ff62fab108f694f724c12d6 | 726 | asm | Assembly | oeis/015/A015174.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/015/A015174.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/015/A015174.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A015174: Sum of (Gaussian) q-binomial coefficients for q=-10.
; Submitted by Christian Krause
; 1,2,-7,184,7375,1854566,-733798243,1853096548948,7341689356896139,185324336420417243330,-7341318715564987522409479,1853228681381416966904537937712,734135578021202833790446696598284903,1853230149650719780628824101638488658632094,-7341352073752463172042926911168436199719281020715,185323000282365977327806415358214414702235362072779326476,7341352444398471078126955319244438959075021593621923864839673667,1853230017506364476752006027470077457704816180663421749768485656900020858
mov $1,$0
mov $0,0
add $1,1
mov $2,1
mov $3,1
lpb $1
sub $1,1
mov $4,$2
mul $2,-10
mul $4,$3
add $0,$4
sub $3,$4
add $3,$0
lpe
mov $0,$3
| 36.3 | 477 | 0.826446 |
9d1a82d9d956d62ada654ff5df26c61932a7b48f | 735 | dart | Dart | lib/src/widget/if_case_only.dart | agungnursatria/ifonly | ea330959d84f5324f295bf475379286513859a39 | [
"BSD-3-Clause"
] | 2 | 2021-09-10T07:05:22.000Z | 2021-09-10T09:12:06.000Z | lib/src/widget/if_case_only.dart | agungnursatria/ifonly | ea330959d84f5324f295bf475379286513859a39 | [
"BSD-3-Clause"
] | null | null | null | lib/src/widget/if_case_only.dart | agungnursatria/ifonly | ea330959d84f5324f295bf475379286513859a39 | [
"BSD-3-Clause"
] | null | null | null | import 'package:flutter/material.dart';
import 'package:ifonly/src/model/if_case_model.dart';
class IfCaseOnly<T> extends StatelessWidget {
final T value;
final Map<T, Widget Function(BuildContext context)> caseBuilder;
final Widget Function(BuildContext) defaultBuilder;
IfCaseOnly({
Key key,
@required this.value,
this.caseBuilder,
@required this.defaultBuilder,
}) : super(key: key);
@override
Widget build(BuildContext context) {
if (value is IfCases) {
for (var item in (value as IfCases).cases)
if (item.expression) return item.builder(context);
return defaultBuilder(context);
} else {
return caseBuilder[value](context) ?? defaultBuilder(context);
}
}
}
| 27.222222 | 68 | 0.69932 |
c07e8b6bd36f2e9c8f48101d6b0ab66939b551c8 | 704 | html | HTML | jazzmin/templates/admin_doc/base_docs.html | SVDouble/django-jazzmin | dddeee611a0657829fb6054f440376a8f2dc6ce9 | [
"MIT"
] | 972 | 2020-05-12T19:51:01.000Z | 2022-03-31T20:18:33.000Z | jazzmin/templates/admin_doc/base_docs.html | SVDouble/django-jazzmin | dddeee611a0657829fb6054f440376a8f2dc6ce9 | [
"MIT"
] | 290 | 2020-05-12T17:35:21.000Z | 2022-03-31T15:18:59.000Z | jazzmin/templates/admin_doc/base_docs.html | SVDouble/django-jazzmin | dddeee611a0657829fb6054f440376a8f2dc6ce9 | [
"MIT"
] | 166 | 2020-06-11T10:50:47.000Z | 2022-03-24T12:19:00.000Z | {% extends "admin/base_site.html" %}
{% block extrastyle %}
<style>
.literal-block, .doctest-block {
background: #272822;
color: #f8f8f2;
}
.module {
padding: 20px;
}
.card-header .card-title a {
color: #ffffff !important;
}
.card-header .card-title a:hover {
color: #f8f8f2 !important;
}
</style>
{% endblock %}
{% block content %}
{% block alerts %}{% endblock %}
<div id="content-main" class=col-12>
<div class="col-12">
<div class="card card-primary card-outline">
<div class="card-body pad table-responsive">
{% block docs_content %}{% endblock %}
</div>
</div>
</div>
</div>
{% endblock %} | 21.333333 | 50 | 0.559659 |
ffeb245de1b08d725680fd31b696205db651d6a6 | 4,931 | sql | SQL | db_login.sql | agus94/sistem-login | d85a6c7a7d9f665aecc26d15c06ce2db337ffb18 | [
"MIT"
] | null | null | null | db_login.sql | agus94/sistem-login | d85a6c7a7d9f665aecc26d15c06ce2db337ffb18 | [
"MIT"
] | null | null | null | db_login.sql | agus94/sistem-login | d85a6c7a7d9f665aecc26d15c06ce2db337ffb18 | [
"MIT"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 4.6.6deb5
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: Aug 12, 2019 at 01:28 PM
-- Server version: 10.1.40-MariaDB-0ubuntu0.18.04.1
-- PHP Version: 7.2.19-0ubuntu0.18.04.1
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `db_login`
--
-- --------------------------------------------------------
--
-- Table structure for table `user`
--
CREATE TABLE `user` (
`id` int(11) NOT NULL,
`name` varchar(128) NOT NULL,
`email` varchar(128) NOT NULL,
`image` varchar(128) NOT NULL,
`password` varchar(256) NOT NULL,
`role_id` int(11) NOT NULL,
`is_active` int(1) NOT NULL,
`date_created` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `user`
--
INSERT INTO `user` (`id`, `name`, `email`, `image`, `password`, `role_id`, `is_active`, `date_created`) VALUES
(6, 'Nur Achmad Agus Ismail', 'agusismail94@gmail.com', 'default.jpg', '$2y$10$B11sW8owJws0BWS8DXJaWOnD7le/8kfNxYycbWFIi5aozkjyEnRhu', 2, 1, 1564969868),
(7, 'Fiqih', 'fiqih@kokola.co.id', 'default.jpg', '$2y$10$oLat4tuUcdgQre46lfPyM.doQfAjw2h0I0MxxHbXgKkXVQEJpn.n.', 1, 1, 1564978437);
-- --------------------------------------------------------
--
-- Table structure for table `user_access_menu`
--
CREATE TABLE `user_access_menu` (
`id` int(11) NOT NULL,
`role_id` int(11) NOT NULL,
`menu_id` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `user_access_menu`
--
INSERT INTO `user_access_menu` (`id`, `role_id`, `menu_id`) VALUES
(1, 1, 1),
(2, 1, 2),
(3, 2, 2),
(4, 1, 3);
-- --------------------------------------------------------
--
-- Table structure for table `user_menu`
--
CREATE TABLE `user_menu` (
`id` int(11) NOT NULL,
`menu` varchar(128) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `user_menu`
--
INSERT INTO `user_menu` (`id`, `menu`) VALUES
(1, 'Admin'),
(2, 'User'),
(3, 'Menu'),
(5, 'Test'),
(6, 'Test');
-- --------------------------------------------------------
--
-- Table structure for table `user_role`
--
CREATE TABLE `user_role` (
`id` int(11) NOT NULL,
`role` varchar(128) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `user_role`
--
INSERT INTO `user_role` (`id`, `role`) VALUES
(1, 'Administrator'),
(2, 'Member');
-- --------------------------------------------------------
--
-- Table structure for table `user_sub_menu`
--
CREATE TABLE `user_sub_menu` (
`id` int(11) NOT NULL,
`menu_id` int(11) NOT NULL,
`title` varchar(128) NOT NULL,
`url` varchar(128) NOT NULL,
`icon` varchar(128) NOT NULL,
`is_active` int(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--
-- Dumping data for table `user_sub_menu`
--
INSERT INTO `user_sub_menu` (`id`, `menu_id`, `title`, `url`, `icon`, `is_active`) VALUES
(1, 1, 'Dashboard', 'admin', 'fas fa-fw fa-tachometer-alt', 1),
(3, 2, 'My Profile', 'user', 'fas fa-fw fa-user', 1),
(4, 2, 'Edit Profile', 'user/edit', 'fas fa-fw fa-user-edit', 1),
(5, 3, 'Menu Manegement', 'Menu', 'fas fa-fw fa-folder', 1),
(6, 3, 'Submenu Management', 'menu/submenu', 'fa fa-fw fa-folder-open', 1),
(7, 2, 'coba', 'coba/coba', 'fab fa-fw fa-youtube', 1);
--
-- Indexes for dumped tables
--
--
-- Indexes for table `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_access_menu`
--
ALTER TABLE `user_access_menu`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_menu`
--
ALTER TABLE `user_menu`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_role`
--
ALTER TABLE `user_role`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_sub_menu`
--
ALTER TABLE `user_sub_menu`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `user`
--
ALTER TABLE `user`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `user_access_menu`
--
ALTER TABLE `user_access_menu`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `user_menu`
--
ALTER TABLE `user_menu`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT for table `user_role`
--
ALTER TABLE `user_role`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `user_sub_menu`
--
ALTER TABLE `user_sub_menu`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 24.171569 | 153 | 0.637599 |
f57f754ab81b7c5a6b21ee3f65c1c2143cefa99b | 1,629 | lua | Lua | exampleIOS/bin/data/scripts/touchExample.lua | chaosct/ofxLua | 5236f0e92c28cff7e1856370b28405c0086d3aa6 | [
"BSD-2-Clause"
] | null | null | null | exampleIOS/bin/data/scripts/touchExample.lua | chaosct/ofxLua | 5236f0e92c28cff7e1856370b28405c0086d3aa6 | [
"BSD-2-Clause"
] | null | null | null | exampleIOS/bin/data/scripts/touchExample.lua | chaosct/ofxLua | 5236f0e92c28cff7e1856370b28405c0086d3aa6 | [
"BSD-2-Clause"
] | null | null | null |
touches = {} -- table as array
for i=1,10 do -- populate table, 10 should be enough ...
touches[i] = nil
end
----------------------------------------------------
function setup()
of.background(255)
of.enableSmoothing()
of.enableAlphaBlending()
end
----------------------------------------------------
function update()
end
----------------------------------------------------
function draw()
-- draw all the current touch events
for i,touch in ipairs(touches) do
if touch ~= nil then
of.pushMatrix()
of.translate(touch.x, touch.y, 0)
-- circle
of.setColor(
of.map(touch.x, 0, of.getWidth(), 50, 255),
of.map(touch.id, 0, 10, 127, 255),
of.map(touch.y, 0, of.getHeight(), 50, 255),
200)
of.fill()
of.circle(0, 0, 100)
-- id text
of.setColor(100)
of.drawBitmapString(tostring(touch.id), 0, -80)
of.popMatrix()
end
end
end
----------------------------------------------------
function touchDown(touch)
-- the = operator only sets table/object pointers in lua, it does not copy
touches[touch.id+1] = of.Touch(touch) -- create new Touch & copy data
end
----------------------------------------------------
function touchMoved(touch)
touches[touch.id+1] = of.Touch(touch)
end
----------------------------------------------------
function touchUp(touch)
touches[touch.id+1] = nil -- setting to nil deallocates
end
----------------------------------------------------
function touchDoubleTap(touch)
print("touchDoubleTap")
end
----------------------------------------------------
function touchCancelled(touch)
touches[touch.id+1] = nil
end
| 23.271429 | 75 | 0.494782 |
80802c96b92b6d250a828e96d8d6085f80ce5340 | 3,169 | java | Java | metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java | naboudieng/datahub | 1a5121a5aeb3940960e9994362860d4130b840f2 | [
"Apache-2.0"
] | 3,586 | 2020-01-27T11:09:57.000Z | 2022-03-15T16:13:30.000Z | metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java | shirshanka/datahub | 7d5da17d88dd2c0b412b27296c161b73fc60e979 | [
"Apache-2.0"
] | 1,678 | 2020-01-27T20:51:01.000Z | 2022-03-15T15:22:02.000Z | metadata-io/src/main/java/com/linkedin/metadata/timeseries/elastic/indexbuilder/MappingsBuilder.java | shirshanka/datahub | 7d5da17d88dd2c0b412b27296c161b73fc60e979 | [
"Apache-2.0"
] | 924 | 2020-01-28T20:10:50.000Z | 2022-03-15T10:01:23.000Z | package com.linkedin.metadata.timeseries.elastic.indexbuilder;
import com.google.common.collect.ImmutableMap;
import com.linkedin.data.schema.DataSchema;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.TimeseriesFieldCollectionSpec;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nonnull;
public class MappingsBuilder {
public static final String URN_FIELD = "urn";
public static final String TIMESTAMP_FIELD = "@timestamp";
public static final String TIMESTAMP_MILLIS_FIELD = "timestampMillis";
public static final String EVENT_GRANULARITY = "eventGranularity";
public static final String EVENT_FIELD = "event";
public static final String SYSTEM_METADATA_FIELD = "systemMetadata";
public static final String IS_EXPLODED_FIELD = "isExploded";
private MappingsBuilder() {
}
public static Map<String, Object> getMappings(@Nonnull final AspectSpec aspectSpec) {
if (!aspectSpec.isTimeseries()) {
throw new IllegalArgumentException(
String.format("Cannot apply timeseries field indexing for a non-timeseries aspect %s", aspectSpec.getName()));
}
Map<String, Object> mappings = new HashMap<>();
mappings.put(URN_FIELD, ImmutableMap.of("type", "keyword"));
mappings.put(TIMESTAMP_FIELD, ImmutableMap.of("type", "date"));
mappings.put(TIMESTAMP_MILLIS_FIELD, ImmutableMap.of("type", "date"));
mappings.put(EVENT_GRANULARITY, ImmutableMap.of("type", "keyword"));
mappings.put(EVENT_FIELD, ImmutableMap.of("type", "object", "enabled", false));
mappings.put(SYSTEM_METADATA_FIELD, ImmutableMap.of("type", "object", "enabled", false));
mappings.put(IS_EXPLODED_FIELD, ImmutableMap.of("type", "boolean"));
aspectSpec.getTimeseriesFieldSpecs()
.forEach(x -> mappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType())));
aspectSpec.getTimeseriesFieldCollectionSpecs()
.forEach(x -> mappings.put(x.getName(), getTimeseriesFieldCollectionSpecMapping(x)));
return ImmutableMap.of("properties", mappings);
}
private static Map<String, Object> getTimeseriesFieldCollectionSpecMapping(
TimeseriesFieldCollectionSpec timeseriesFieldCollectionSpec) {
Map<String, Object> collectionMappings = new HashMap<>();
collectionMappings.put(timeseriesFieldCollectionSpec.getTimeseriesFieldCollectionAnnotation().getKey(),
getFieldMapping(DataSchema.Type.STRING));
timeseriesFieldCollectionSpec.getTimeseriesFieldSpecMap()
.values()
.forEach(x -> collectionMappings.put(x.getName(), getFieldMapping(x.getPegasusSchema().getType())));
return ImmutableMap.of("properties", collectionMappings);
}
private static Map<String, Object> getFieldMapping(DataSchema.Type dataSchemaType) {
switch (dataSchemaType) {
case INT:
return ImmutableMap.of("type", "integer");
case LONG:
return ImmutableMap.of("type", "long");
case FLOAT:
return ImmutableMap.of("type", "float");
case DOUBLE:
return ImmutableMap.of("type", "double");
default:
return ImmutableMap.of("type", "keyword");
}
}
}
| 42.253333 | 120 | 0.734301 |
682d5fdcbc26f73d54e172af108ed48417fc0082 | 803 | html | HTML | docs/page/kotlindoc/it.discordbot.core/-j-d-a-controller/music-command.html | samuele794/TakaoBot | 6b471c694c95d6cbd72a5fc24415bcc4066219c0 | [
"Apache-2.0"
] | 2 | 2019-01-07T16:51:20.000Z | 2019-04-10T21:35:45.000Z | docs/page/kotlindoc/it.discordbot.core/-j-d-a-controller/music-command.html | samuele794/TakaoBot | 6b471c694c95d6cbd72a5fc24415bcc4066219c0 | [
"Apache-2.0"
] | null | null | null | docs/page/kotlindoc/it.discordbot.core/-j-d-a-controller/music-command.html | samuele794/TakaoBot | 6b471c694c95d6cbd72a5fc24415bcc4066219c0 | [
"Apache-2.0"
] | null | null | null | <HTML>
<HEAD>
<meta charset="UTF-8">
<title>JDAController.musicCommand - </title>
<link rel="stylesheet" href="../../style.css">
</HEAD>
<BODY>
<a href="../index.html">it.discordbot.core</a> / <a href="index.html">JDAController</a> / <a href="./music-command.html">musicCommand</a><br/>
<br/>
<h1>musicCommand</h1>
<a name="it.discordbot.core.JDAController$musicCommand"></a>
<code><span class="keyword">lateinit</span> <span class="keyword">var </span><span class="identifier">musicCommand</span><span class="symbol">: </span><a href="../../it.discordbot.command.music/-music-command/index.html"><span class="identifier">MusicCommand</span></a></code>
<p>MusicCommand</p>
<h3>Property</h3>
<p><a name="musicCommand"></a>
<code>musicCommand</code> - MusicCommand</p>
</BODY>
</HTML>
| 42.263158 | 276 | 0.692403 |
eb4d3488cadafc1d00a21ca0787a90b777724bbc | 386 | sql | SQL | seeds.sql | slogreco/employee-tracker | f349fbf5bc2d7b44e01555198b900ee68608f94f | [
"Unlicense"
] | null | null | null | seeds.sql | slogreco/employee-tracker | f349fbf5bc2d7b44e01555198b900ee68608f94f | [
"Unlicense"
] | null | null | null | seeds.sql | slogreco/employee-tracker | f349fbf5bc2d7b44e01555198b900ee68608f94f | [
"Unlicense"
] | null | null | null | INSERT INTO employee (first_name, last_name, role_id, manager_id)
VALUES ("David", "Rose", 5, 1), ("Alexis", "Rose", 4, 1), ("Stevie", "Budd", 3, 1);
INSERT INTO department (department_name)
VALUES ("Floral"), ("Party Planning"), ("Front Desk");
INSERT INTO employee_role (title, salary, department_id)
VALUES ("Florist", 40000, 1), ("Receptionist", 25000, 2), ("Drunk", 100000, 3);
| 38.6 | 83 | 0.673575 |
752d1a94803c37cf19e5c67af0870324e04a2fc6 | 237 | cs | C# | source/KihonEngine/Graphics/Assets/Maps/MapBuilderFromFile.cs | nico65535/Research.KihonEngineRewriteGameLogic | d6eae247d4dc05caa9a5c3dc627fa82db61f1a10 | [
"MIT"
] | null | null | null | source/KihonEngine/Graphics/Assets/Maps/MapBuilderFromFile.cs | nico65535/Research.KihonEngineRewriteGameLogic | d6eae247d4dc05caa9a5c3dc627fa82db61f1a10 | [
"MIT"
] | null | null | null | source/KihonEngine/Graphics/Assets/Maps/MapBuilderFromFile.cs | nico65535/Research.KihonEngineRewriteGameLogic | d6eae247d4dc05caa9a5c3dc627fa82db61f1a10 | [
"MIT"
] | null | null | null |
namespace KihonEngine.Graphics.Assets.Maps
{
public class MapBuilderFromFile : MapBuilderFromStream
{
public MapBuilderFromFile(string path)
: base(System.IO.File.OpenRead(path))
{
}
}
}
| 19.75 | 58 | 0.628692 |
70aee42ddc568153bdbf3bda4f6eb99a345ad1f5 | 974 | cs | C# | Assets/Scripts/ObjectSprite.cs | sganondorf/miniature-rotary-phone | 27671b29ed42b53e122fb50ce423fa4606405bf7 | [
"Apache-2.0"
] | null | null | null | Assets/Scripts/ObjectSprite.cs | sganondorf/miniature-rotary-phone | 27671b29ed42b53e122fb50ce423fa4606405bf7 | [
"Apache-2.0"
] | null | null | null | Assets/Scripts/ObjectSprite.cs | sganondorf/miniature-rotary-phone | 27671b29ed42b53e122fb50ce423fa4606405bf7 | [
"Apache-2.0"
] | null | null | null | using UnityEngine;
using UnityEngine.Networking;
using System.Collections;
public class ObjectSprite : NetworkBehaviour {
public Sprite sprOne;
public Sprite sprTwo;
public Sprite placeholder;
private SpriteRenderer rend;
private PlayerNumber playerNum;
// Use this for initialization
void Start () {
rend = GetComponent<SpriteRenderer>();
try
{
playerNum = PlayerNumber.GetLocalPlayerNumber();
} catch {
playerNum = null;
}
}
// Update is called once per frame
void Update () {
if (isServer && !isClient)
{
// One of the clients might be a server too.
// If dedicated server, use placeholder.
return;
}
if (playerNum == null)
{
try
{
playerNum = PlayerNumber.GetLocalPlayerNumber();
} catch {
playerNum = null;
}
return;
}
if (playerNum.IsPlayerOne()) {
rend.sprite = sprOne;
} else if (playerNum.IsPlayerTwo()) {
rend.sprite = sprTwo;
} else {
rend.sprite = placeholder;
}
}
}
| 18.730769 | 52 | 0.668378 |
aff04e8fb41916339bf834de5bedb22e1fdc5c94 | 4,225 | html | HTML | templates/mfctracker/index.html | gonzoua/mfctracker | 77800ee5ab17e4efefb12bfbd547bff1ed94c6a1 | [
"BSD-2-Clause"
] | 3 | 2016-10-19T05:01:31.000Z | 2019-06-06T18:20:11.000Z | templates/mfctracker/index.html | gonzoua/mfctracker | 77800ee5ab17e4efefb12bfbd547bff1ed94c6a1 | [
"BSD-2-Clause"
] | 3 | 2017-11-28T17:31:58.000Z | 2021-04-12T02:37:27.000Z | templates/mfctracker/index.html | gonzoua/mfctracker | 77800ee5ab17e4efefb12bfbd547bff1ed94c6a1 | [
"BSD-2-Clause"
] | 1 | 2020-06-26T14:05:53.000Z | 2020-06-26T14:05:53.000Z | {% extends "mfctracker/base.html" %}
{% load bootstrap3 %}
{% block head %}
<script>
$( document ).ready(function() {
$("#setfilter").click(function() {
$('input[name="extended_filters"]').val($("#extended_filters_edit").val());
$("#filtersform").submit();
});
$('[data-toggle="popover"]').popover({
html : true,
content: function() {
var content = $(this).attr("data-popover-content");
return $(content).children(".popover-body").html();
},
title: function() {
var title = $(this).attr("data-popover-content");
return $(title).children(".popover-heading").html();
}
})
});
</script>
{% endblock %}
{% block content %}
<!-- Content for filter help popover -->
<div class="hidden" id="a1">
<div class="popover-heading">
Filter syntax
</div>
<div class="popover-body">
<p>
<b>committer</b> filter by committer.<br/>
<b>@path/to/</b> filter by path in repository.<br/>
<b>committer@path/to/</b> filter by committer AND path in repository.<br/>
<b>abdef0123</b> filter by commit's SHA hash.<br/>
<b>rNNN</b> filter by SVN revision.<br/>
<b>rNNN-rMMM</b> filter by SVN revisions range (inclusive).<br/>
Multiple filters can be specified separated by spaces or comas in which case they'll be combined using OR operator.
</p>
</div>
</div>
<div class="container">
<div class="row">
<!-- Branch -->
<div class="col-md-3">
<div class="dropdown">
<button class="btn btn-default dropdown-toggle" type="button" id="dropdownBranches" data-toggle="dropdown" aria-haspopup="true" aria-expanded="true">
Target branch: <b>{{ current_branch.name }}</b>
<span class="caret"></span>
</button>
<ul class="dropdown-menu" aria-labelledby="dropdownBranches">
{% for branch in branches %}
<li><a href="{% url 'branch' branch_id=branch.pk %}">{{ branch.name }}</a></li>
{% endfor %}
</ul>
</div>
</div>
<div class="col-md-9 text-right">
<form id="filtersform" class="form-inline" action="{% url 'setfilter' branch_id=current_branch.pk %}" method="POST">
{% csrf_token %}
<div class="btn-group" data-toggle="buttons">
<label class="btn btn-primary filter {{ waiting_active }}">
<input type="checkbox" autocomplete="off" name="filter_waiting" {{ waiting_checked }}><span class="glyphicon glyphicon-ok"></span> Waiting
</label>
<label class="btn btn-success filter {{ ready_active }}">
<input type="checkbox" autocomplete="off" name="filter_ready" {{ ready_checked }}><span class="glyphicon glyphicon-ok"></span> Ready
</label>
<label class="btn btn-default filter {{ other_active }}">
<input type="checkbox" autocomplete="off" name="filter_other" {{ other_checked }}><span class="glyphicon glyphicon-ok"></span> No MFC date
</label>
</div>
<div class="form-group">
<input type="text" class="form-control" name="filters" value="{{ filters }}" placeholder="Filter">
<input type="hidden" name="page" value="{{ commits.number }}">
<input type="hidden" name="extended_filters" value="">
</div>
<button type="button" class="btn btn-primary" id="setfilter">Filter</button>
<button type="button" class="btn btn-link" data-toggle="popover" data-placement="bottom" data-popover-content="#a1">[?]</button>
<!-- <a class="btn btn-default" role="button" data-toggle="collapse" href="#advancedFilters" aria-expanded="false" aria-controls="advancedFilters">Advanced</a> -->
</form>
</div>
</div>
<div class="row">
</div>
<div class="row">
{% include "mfctracker/commits.html" %}
</div>
<div class="row">
<div class="col-md-12 text-center">
{% bootstrap_pagination commits %}
</div>
</div>
</div>
{% endblock %}
| 41.421569 | 175 | 0.562367 |
4a7e111d9a6274119376af720e1b638c6c0dbd87 | 1,846 | cs | C# | src/WebApi/src/Website.Application/BlogPosts/Queries/GetPublishedBlogPostsListQuery/BlogPostModel.cs | MTurnhout/Website | 293e656a7eba0b2a22d8bd191f80ad6d96db87ce | [
"Unlicense"
] | null | null | null | src/WebApi/src/Website.Application/BlogPosts/Queries/GetPublishedBlogPostsListQuery/BlogPostModel.cs | MTurnhout/Website | 293e656a7eba0b2a22d8bd191f80ad6d96db87ce | [
"Unlicense"
] | 21 | 2020-04-10T20:37:04.000Z | 2022-03-02T02:54:17.000Z | src/WebApi/src/Website.Application/BlogPosts/Queries/GetPublishedBlogPostsListQuery/BlogPostModel.cs | MTurnhout/Website | 293e656a7eba0b2a22d8bd191f80ad6d96db87ce | [
"Unlicense"
] | null | null | null | //-----------------------------------------------------------------------
// <copyright file="BlogPostModel.cs" company="Martijn Turnhout">
// Copyright (c) Martijn Turnhout. All Rights Reserved.
// </copyright>
// <author>Martijn Turnhout</author>
//-----------------------------------------------------------------------
namespace Website.Application.BlogPosts.Queries.GetPublishedBlogPostsListQuery
{
using System;
/// <summary>
/// Model that contains blog post details.
/// </summary>
public class BlogPostModel
{
/// <summary>
/// Gets or sets the identifier of the blog post.
/// </summary>
/// <value>The identifier of the blog post.</value>
public int Id { get; set; }
/// <summary>
/// Gets or sets the published date of the blog post.
/// </summary>
/// <value>The published date of the blog post.</value>
public DateTime PublishedAt { get; set; }
/// <summary>
/// Gets or sets the user name of user that created or modified of the blog post.
/// </summary>
/// <value>The user name of user that created or modified of the blog post.</value>
public string UserName { get; set; }
/// <summary>
/// Gets or sets the headline of the blog post.
/// </summary>
/// <value>The headline of the blog post.</value>
public string Headline { get; set; }
/// <summary>
/// Gets or sets the intro of the blog post.
/// </summary>
/// <value>The intro of the blog post.</value>
public string Intro { get; set; }
/// <summary>
/// Gets or sets the body of the blog post.
/// </summary>
/// <value>The body of the blog post.</value>
public string Body { get; set; }
}
}
| 34.185185 | 91 | 0.527086 |
204f5d1ea95056ec6b09bf631f8a4b31802d9295 | 3,426 | kt | Kotlin | app/src/main/java/com/febers/uestc_bbs/module/post/view/content/image_text/ImageTextHelper.kt | magicmmp/UESTC_BBS | 2925d6e7151d1c5f96cd62874c69bc52e77a5b85 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/febers/uestc_bbs/module/post/view/content/image_text/ImageTextHelper.kt | magicmmp/UESTC_BBS | 2925d6e7151d1c5f96cd62874c69bc52e77a5b85 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/com/febers/uestc_bbs/module/post/view/content/image_text/ImageTextHelper.kt | magicmmp/UESTC_BBS | 2925d6e7151d1c5f96cd62874c69bc52e77a5b85 | [
"Apache-2.0"
] | null | null | null | package com.febers.uestc_bbs.module.post.view.content.image_text
import android.content.Context
import android.graphics.drawable.Drawable
import android.text.Html
import android.text.Spannable
import android.text.SpannableStringBuilder
import android.text.style.ClickableSpan
import android.text.style.ForegroundColorSpan
import android.text.style.ImageSpan
import android.text.style.StyleSpan
import android.text.style.URLSpan
import android.text.util.Linkify
import android.view.View
import android.widget.TextView
import com.febers.uestc_bbs.module.context.ClickContext
import com.febers.uestc_bbs.utils.colorAccent
/**
* 构建可显示图片的textview
* 参考:https://github.com/CentMeng/RichTextView
*/
object ImageTextHelper {
private fun getUrlDrawable(source: String?, textView: TextView): Drawable {
val imageGetter = GlideImageGetter(textView.context, textView)
return imageGetter.getDrawable(source)
}
fun setImageText(tv: TextView, html: String, linkTextColor: Int? = null) {
val context = tv.context
val htmlStr = Html.fromHtml(html)
tv.isClickable = true
tv.setTextIsSelectable(true)
tv.text = htmlStr
//tv.setMovementMethod(LinkMovementMethod.newInstance());
//换成下面的方法,否则超链接设置失效
tv.autoLinkMask = Linkify.WEB_URLS or Linkify.EMAIL_ADDRESSES
linkTextColor?.let {
tv.setLinkTextColor(it)
}
val text = tv.text
if (text is Spannable) {
val end = text.length
val sp = tv.text as Spannable
val urls = sp.getSpans(0, end, URLSpan::class.java)
val images = sp.getSpans(0, end, ImageSpan::class.java)
val styleSpans = sp.getSpans(0, end, StyleSpan::class.java)
val colorSpans = sp.getSpans(0, end, ForegroundColorSpan::class.java)
val style = SpannableStringBuilder(text)
style.clearSpans()
for (url in urls) {
val myUrlSpan = MyUrlSpan(url.url, context)
style.setSpan(myUrlSpan, sp.getSpanStart(url), sp.getSpanEnd(url), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
val colorSpan = ForegroundColorSpan(colorAccent())
style.setSpan(colorSpan, sp.getSpanStart(url), sp.getSpanEnd(url), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
}
for (url in images) {
val span = ImageSpan(getUrlDrawable(url.source, tv), url.source!!)
style.setSpan(span, sp.getSpanStart(url), sp.getSpanEnd(url), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
val myUrlSpan = MyUrlSpan(url.source!!, context)
style.setSpan(myUrlSpan, sp.getSpanStart(url), sp.getSpanEnd(url), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
}
for (styleSpan in styleSpans) {
style.setSpan(styleSpan, sp.getSpanStart(styleSpan), sp.getSpanEnd(styleSpan), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
}
for (colorSpan in colorSpans) {
style.setSpan(colorSpan, sp.getSpanStart(colorSpan), sp.getSpanEnd(colorSpan), Spannable.SPAN_EXCLUSIVE_EXCLUSIVE)
}
tv.text = style
}
}
private class MyUrlSpan internal constructor(private val mUrl: String, private val mContext: Context) : ClickableSpan() {
override fun onClick(widget: View) {
ClickContext.linkClick(mUrl, mContext)
}
}
}
| 40.785714 | 130 | 0.672212 |
e74a3105eb0eec9e58d1236aa87c6f08750b47c8 | 220 | js | JavaScript | src/kyu7/catDogYearsReloaded.js | jirn073-76/Codewars-Katas | f1de16278fe2d990d66c5abec7c7774c8d897660 | [
"BSD-2-Clause"
] | null | null | null | src/kyu7/catDogYearsReloaded.js | jirn073-76/Codewars-Katas | f1de16278fe2d990d66c5abec7c7774c8d897660 | [
"BSD-2-Clause"
] | null | null | null | src/kyu7/catDogYearsReloaded.js | jirn073-76/Codewars-Katas | f1de16278fe2d990d66c5abec7c7774c8d897660 | [
"BSD-2-Clause"
] | null | null | null | ownedCatAndDog = (c,d) =>{
let [cY,dY] = [0,0]
if((c-=15)>=0) cY=1
if((c-=9)>=0) cY=2
if((d-=15)>=0) dY=1
if((d-=9)>=0) dY=2
if(c>0) cY+=(c/4)|0
if(d>0) dY+=(d/5)|0
return [cY,dY]
}
| 16.923077 | 26 | 0.395455 |
4c15c84b4476772d39e4083472ff50c8c8124b3f | 2,593 | php | PHP | resources/views/product/index.blade.php | abdullahalnoor/dynamic-content | 757c4e59834237f49ef11deca1f8d0e716b5f314 | [
"MIT"
] | 1 | 2019-04-15T09:49:50.000Z | 2019-04-15T09:49:50.000Z | resources/views/product/index.blade.php | abdullahalnoor/dynamic-content | 757c4e59834237f49ef11deca1f8d0e716b5f314 | [
"MIT"
] | null | null | null | resources/views/product/index.blade.php | abdullahalnoor/dynamic-content | 757c4e59834237f49ef11deca1f8d0e716b5f314 | [
"MIT"
] | null | null | null | @extends('welcome')
@section('content')
<div class="row justify-content-center">
<div class="col-md-8">
<div class="card ">
<div class="card-header">
Product
<button type="button" class="btn btn-primary float-right" id="addProductModal">
Add New
</button>
</div>
<div class="card-body" id="refreshTable">
<table class="table">
<thead>
<tr>
<th>Name</th>
<th>Action</th>
</tr>
</thead>
<tbody>
@forelse ($products as $product)
<tr>
<td> {{ $product->name }} </td>
<td>
<a href="" class="btn btn-info editProduct" data-route="{{route('product.edit',$product->id)}}">Edit</a>
</td>
</tr>
@empty
<tr>
<td colspan="2" class="text-center">No Product Found...</td>
</tr>
@endforelse
</tbody>
</table>
</div>
</div>
</div>
</div>
<div id="modals">
</div>
@include('components.delete-modal')
@endsection
@push('script')
<script>
$(document).ready(function(){
$.ajaxSetup({ headers: { 'X-CSRF-TOKEN': $('meta[name="csrf-token"]').attr('content') } });
$("#addProductModal").on("click",function(e){
e.preventDefault();
$.get("{{route('product.create')}}",function(data){
$("#modals").empty().append(data);
// console.log(data);
$("#modals #productModal").modal("show");
})
});
$("#modals").on("submit","#productCreateForm",function(e){
e.preventDefault();
var formData = new FormData($(this)[0]);
$.ajax({
url:"{{route('product.create')}}",
data:formData,
cache: false,
contentType: false,
processData: false,
type:'POST',
})
.done(function(data){
console.log(data);
$("#refreshTable").load(location.href + " #refreshTable");
})
.fail(function(err){
console.log(err.responseJSON.errors.image);
$("#modals #errImage").empty().append(err.responseJSON.errors.image);
});
});
$(".editProduct").on("click",function(e){
e.preventDefault();
var route = $(this).data("route");
$.get(route,function(data){
$("#modals").empty().append(data);
$("#modals #productModal").modal("show");
})
})
});
</script>
@endpush | 20.417323 | 120 | 0.476668 |
b7ed1fa6dfbc674ab8d94f7a18506adbb307d4b6 | 1,283 | kt | Kotlin | picture_library/src/main/java/com/luck/picture/lib/widget/longimage/ImageViewState.kt | MoustafaShahin/InsGallery | 6cc5ff6a5c86c491a852304c58ce979ed42d6225 | [
"Apache-2.0"
] | null | null | null | picture_library/src/main/java/com/luck/picture/lib/widget/longimage/ImageViewState.kt | MoustafaShahin/InsGallery | 6cc5ff6a5c86c491a852304c58ce979ed42d6225 | [
"Apache-2.0"
] | null | null | null | picture_library/src/main/java/com/luck/picture/lib/widget/longimage/ImageViewState.kt | MoustafaShahin/InsGallery | 6cc5ff6a5c86c491a852304c58ce979ed42d6225 | [
"Apache-2.0"
] | null | null | null | /*
Copyright 2014 David Morrissey
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.luck.picture.lib.widget.longimage
import android.graphics.PointF
import java.io.Serializable
/**
* Wraps the scale, center and orientation of a displayed image for easy restoration on screen rotate.
*/
class ImageViewState(private val scale: Float, center: PointF?, orientation: Int) : Serializable {
private val centerX: Float
private val centerY: Float
private val orientation: Int
fun getScale(): Float {
return scale
}
fun getCenter(): PointF? {
return PointF(centerX, centerY)
}
fun getOrientation(): Int {
return orientation
}
init {
centerX = center!!.x
centerY = center.y
this.orientation = orientation
}
} | 28.511111 | 102 | 0.721746 |
d270775f99d0aed8ecae16d60431676fa3b86d40 | 9,340 | php | PHP | index.php | RaphaelWJB/TCC-Codigo | 6e1080462fcfe41be889cf67021c7e1852295cbb | [
"MIT"
] | null | null | null | index.php | RaphaelWJB/TCC-Codigo | 6e1080462fcfe41be889cf67021c7e1852295cbb | [
"MIT"
] | null | null | null | index.php | RaphaelWJB/TCC-Codigo | 6e1080462fcfe41be889cf67021c7e1852295cbb | [
"MIT"
] | null | null | null | <!doctype html>
<html lang="pt-br">
<head>
<!-- Required meta tags -->
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="node_modules/bootstrap/compiler/bootstrap.css">
<link rel="stylesheet" href="node_modules/bootstrap/compiler/estilos.css">
<title>TCC</title>
<style>
</style>
</head>
<body>
<!--Container principal-->
<div class="container-fluid fundo">
<!--Menu-->
<nav class="navbar navbar-expand-lg navbar-dark bg-dark mb-3">
<div class="container">
<a href="#" class="navbar-brand"><img src="http://placehold.it/115x65" alt="LOGO"></a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarSite">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarSite">
<ul class="navbar-nav ml-auto p-2">
<li class="nav-item">
<a href="#" class="nav-link">HOME</a>
</li>
<li class="nav-item">
<a href="#" class="nav-link">CONHEÇA SGEA</a>
</li>
<li class="nav-item">
<a href="#formContato" class="nav-link">CONTATO</a>
</li>
<li class="nav-item">
<a href="login.php" class="nav-link">ENTRAR</a>
</li>
</ul>
</div>
</div>
</nav>
<!--Carrousel-->
<div id="carouselExampleIndicators" class="carousel slide" data-ride="carousel">
<ol class="carousel-indicators">
<li data-target="#carouselExampleIndicators" data-slide-to="0" class="active"></li>
<li data-target="#carouselExampleIndicators" data-slide-to="1"></li>
<li data-target="#carouselExampleIndicators" data-slide-to="2"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active">
<img class="d-block w-100" src="img/first.png" alt="First slide">
<div class="carousel-caption d-none d-md-block">
<h5>O que é Lorem Ipsum?</h5>
<p>Lorem Ipsum é simplesmente uma simulação de texto da indústria tipográfica e de impressos.</p>
</div>
</div>
<div class="carousel-item">
<img class="d-block w-100" src="img/second.png" alt="Second slide">
<div class="carousel-caption d-none d-md-block">
<h5>Porque nós o usamos?</h5>
<p>É um fato conhecido de todos que um leitor se distrairá com o conteúdo de texto legível.</p>
</div>
</div>
<div class="carousel-item">
<img class="d-block w-100" src="img/third.png" alt="Third slide">
<div class="carousel-caption d-none d-md-block">
<h5>De onde ele vem?</h5>
<p>Ao contrário do que se acredita, Lorem Ipsum não é simplesmente um texto randômico.</p>
</div>
</div>
</div>
<a class="carousel-control-prev" href="#carouselExampleIndicators" role="button" data-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="sr-only">Previous</span>
</a>
<a class="carousel-control-next" href="#carouselExampleIndicators" role="button" data-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
</div>
<!--DETALHES-->
<div class="container-fluid">
<div class="row">
<div class="col-sm-12 col-md-6 col-lg-4 mt-2 mb-2">
<div class="card h-100">
<img src="http://placehold.it/700x400" alt="">
</div>
</div>
<div class="col-sm-12 col-md-6 col-lg-4 mt-2 mb-2">
<div class="card h-100">
<img src="http://placehold.it/700x400" alt="">
</div>
</div>
<div class="col-sm-12 col-md-6 col-lg-4 m-auto detalhes" data-toggle="modal" data-target=".bd-example-modal-lg">
<div class="card h-100">
<img src="http://placehold.it/700x400" alt="">
</div>
</div>
<!--Modal modelo-->
<!-- Modal -->
<div class="modal fade" id="exampleModal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel" aria-hidden="true">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Detalhes 3</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<img src="http://placehold.it/700x400" alt="">
<!--<div class="card-body">-->
<h4 class="card-title">
<a href="#">Project One</a>
</h4>
<p class="card-text">Lorem ipsum dolor sit amet, consectetur adipisicing elit. Amet numquam aspernatur eum quasi sapiente nesciunt? Voluptatibus sit, repellat sequi itaque deserunt, dolores in, nesciunt, illum tempora ex quae? Nihil, dolorem!</p>
<!--</div>-->
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<div class="modal fade bd-example-modal-lg" tabindex="-1" role="dialog" aria-labelledby="myLargeModalLabel" aria-hidden="true">
<div class="modal-dialog modal-lg" role="document">
<div class="modal-content">
<div class="modal-header">
<h2>Detalhes</h2>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<img src="http://placehold.it/700x400" alt="">
<div class="card-body">
<h4 class="card-title">
<a href="#">Project One</a>
</h4>
<p class="card-text">Lorem ipsum dolor sit amet, consectetur adipisicing elit. Amet numquam aspernatur eum quasi sapiente nesciunt? Voluptatibus sit, repellat sequi itaque deserunt, dolores in, nesciunt, illum tempora ex quae? Nihil, dolorem!</p>
</div>
</div>
</div>
</div>
</div>
</div>
<!--Formulario-->
<div class="container w-50 p-4 form-container" id="formContato">
<form action="#" method="post" class="form-contato">
<h1 class="text-center">CONTATO</h1>
<div class="form-group">
<label for="cNome" class="mb-1">Nome</label>
<input type="text" class="form-control" id="cNome" placeholder="Nome">
</div>
<div class="form-group">
<label for="cEmail" class="mb-1">Email</label>
<input type="text" class="form-control" id="cEmail" placeholder="Email">
</div>
<div class="form-group">
<label for="cTelefone" class="mb-1">Telefone</label>
<input type="text" class="form-control" id="cTelefone" placeholder="Telefone">
</div>
<div class="form-group">
<label for="cAssunto" class="mb-1">Assunto</label>
<textarea name="" placeholder="Assunto" id="cAssunto" cols="30" rows="5" class="form-control"></textarea>
</div>
<div class="form-group">
<input type="submit" class="btn btn-success w-100" value="ENVIAR" name="btnEnviar">
</div>
</form>
</div>
</div>
<!--footer-->
<div class="container mt-2">
<div class="text-right p-2">
<span class="footer">©Desenvolvido por nois - 2018</span>
</div>
</div>
<!-- Optional JavaScript -->
<!-- jQuery first, then Popper.js, then Bootstrap JS -->
<script src="node_modules/jquery/dist/jquery.js"></script>
<script src="node_modules/popper.js/dist/umd/popper.js"></script>
<script src="node_modules/bootstrap/dist/js/bootstrap.js"></script>
<script src="node_modules/bootstrap/dist/js/meuScript.js"></script>
</body>
</html>
| 42.454545 | 278 | 0.502141 |
0461df213cc3fcd0465a7dfe31e4f11ddce46f6c | 580 | java | Java | src/main/java/com/qty/netty/decoder/CustomDecoder.java | CarterQ/lottery | 1c4cdaafc7fd19b44e960d9f84e16f0d9bb880b6 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/qty/netty/decoder/CustomDecoder.java | CarterQ/lottery | 1c4cdaafc7fd19b44e960d9f84e16f0d9bb880b6 | [
"Apache-2.0"
] | null | null | null | src/main/java/com/qty/netty/decoder/CustomDecoder.java | CarterQ/lottery | 1c4cdaafc7fd19b44e960d9f84e16f0d9bb880b6 | [
"Apache-2.0"
] | null | null | null | package com.qty.netty.decoder;
import java.util.List;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageDecoder;
public class CustomDecoder extends MessageToMessageDecoder<ByteBuf> {
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf msg, List<Object> out) throws Exception {
msg.markReaderIndex();
int count = msg.readInt();
if(msg.readableBytes()>=count){
ByteBuf length1 = msg.readRetainedSlice(count);
out.add(length1);
}else{
msg.resetReaderIndex();
}
}
}
| 24.166667 | 99 | 0.762069 |
808b6b7e9eb88f097b367774f1de59a72aa898e4 | 4,572 | java | Java | mockserver-core/src/test/java/org/mockserver/model/StringBodyTest.java | Avarko/mockserver | 7bb7f1862a6c07a60bdc9ea156c88e2e78ff5a83 | [
"Apache-2.0"
] | null | null | null | mockserver-core/src/test/java/org/mockserver/model/StringBodyTest.java | Avarko/mockserver | 7bb7f1862a6c07a60bdc9ea156c88e2e78ff5a83 | [
"Apache-2.0"
] | 2 | 2021-12-09T19:28:13.000Z | 2021-12-09T23:10:37.000Z | mockserver-core/src/test/java/org/mockserver/model/StringBodyTest.java | Avarko/mockserver | 7bb7f1862a6c07a60bdc9ea156c88e2e78ff5a83 | [
"Apache-2.0"
] | null | null | null | package org.mockserver.model;
import com.google.common.base.Charsets;
import com.google.common.net.MediaType;
import org.junit.Test;
import java.nio.charset.Charset;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotSame;
import static org.mockserver.model.BinaryBody.binary;
import static org.mockserver.model.StringBody.exact;
/**
* @author jamesdbloom
*/
public class StringBodyTest {
@Test
public void shouldAlwaysCreateNewObject() {
assertEquals(new StringBody("some_body").exact("some_body"), exact("some_body"));
assertNotSame(exact("some_body"), exact("some_body"));
}
@Test
public void shouldReturnValuesSetInConstructor() {
// when
StringBody stringBody = new StringBody("some_body");
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), nullValue());
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_8));
assertThat(stringBody.getContentType(), nullValue());
}
@Test
public void shouldReturnValuesSetInConstructorWithCharset() {
// when
StringBody stringBody = new StringBody("some_body", Charsets.UTF_16);
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), is(Charsets.UTF_16));
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_16));
assertThat(stringBody.getContentType(), is(MediaType.create("text", "plain").withCharset(Charsets.UTF_16).toString()));
}
@Test
public void shouldReturnValueSetInStaticConstructor() {
// when
StringBody stringBody = exact("some_body");
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), nullValue());
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_8));
assertThat(stringBody.getContentType(), nullValue());
}
@Test
public void shouldReturnValueSetInStaticConstructorWithCharset() {
// when
StringBody stringBody = exact("some_body", Charsets.UTF_16);
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), is(Charsets.UTF_16));
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_16));
assertThat(stringBody.getContentType(), is(MediaType.create("text", "plain").withCharset(Charsets.UTF_16).toString()));
}
@Test
public void shouldReturnValueSetInStaticConstructorWithNullCharset() {
// when
StringBody stringBody = exact("some_body", (Charset)null);
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), nullValue());
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_8));
assertThat(stringBody.getContentType(), nullValue());
}
@Test
public void shouldReturnValueSetInStaticConstructorWithContentType() {
// when
StringBody stringBody = exact("some_body", MediaType.PLAIN_TEXT_UTF_8);
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), is(Charsets.UTF_8));
assertThat(stringBody.getCharset(Charsets.UTF_16), is(Charsets.UTF_8));
assertThat(stringBody.getContentType(), is(MediaType.PLAIN_TEXT_UTF_8.toString()));
}
@Test
public void shouldReturnValueSetInStaticConstructorWithNullMediaType() {
// when
StringBody stringBody = exact("some_body", (MediaType) null);
// then
assertThat(stringBody.getValue(), is("some_body"));
assertThat(stringBody.getType(), is(Body.Type.STRING));
assertThat(stringBody.getCharset(null), nullValue());
assertThat(stringBody.getCharset(Charsets.UTF_8), is(Charsets.UTF_8));
assertThat(stringBody.getContentType(), nullValue());
}
}
| 38.1 | 127 | 0.69007 |
0463dcbf6f4f6a88fd2143deece94edf37310af7 | 2,382 | java | Java | libcore/sql/src/test/java/tests/SQLite/SQLiteTest.java | PPCDroid/dalvik | 3a3ffe5cfcd77ead36c034e64dcca56a526970e5 | [
"Apache-2.0"
] | 13 | 2015-09-30T03:09:20.000Z | 2020-11-04T11:28:30.000Z | libcore/sql/src/test/java/tests/SQLite/SQLiteTest.java | PPCDroid/dalvik | 3a3ffe5cfcd77ead36c034e64dcca56a526970e5 | [
"Apache-2.0"
] | null | null | null | libcore/sql/src/test/java/tests/SQLite/SQLiteTest.java | PPCDroid/dalvik | 3a3ffe5cfcd77ead36c034e64dcca56a526970e5 | [
"Apache-2.0"
] | 13 | 2015-05-17T12:55:10.000Z | 2020-09-03T02:04:16.000Z | /*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tests.SQLite;
import junit.framework.TestCase;
import java.io.File;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.logging.Logger;
public class SQLiteTest extends TestCase {
public static Connection conn;
public static File dbFile = null;
public void setUp() throws Exception {
String tmp = System.getProperty("java.io.tmpdir");
File tmpDir = new File(tmp);
try {
if (tmpDir.isDirectory()) {
dbFile = File.createTempFile("sqliteTest", ".db", tmpDir);
dbFile.deleteOnExit();
} else {
System.out.println("ctsdir does not exist");
}
Class.forName("SQLite.JDBCDriver").newInstance();
if (!dbFile.exists()) {
Logger.global.severe("DB file could not be created. Tests can not be executed.");
} else {
conn = DriverManager.getConnection("jdbc:sqlite:/"
+ dbFile.getPath());
}
assertNotNull("Error creating connection",conn);
} catch (IOException e) {
System.out.println("Problem creating test file in " + tmp);
} catch (SQLException e) {
// TODO Auto-generated catch block
fail("Exception: " + e.toString());
} catch (java.lang.Exception e) {
fail("Exception: " + e.toString());
}
}
public void tearDown() {
try {
if (!conn.isClosed()) {
conn.close();
}
} catch (SQLException e) {
fail("Couldn't close Connection: " + e.getMessage());
}
}
}
| 31.76 | 95 | 0.597397 |
507686daafdbaa924b9f109ab5c4bca585c67242 | 192 | html | HTML | index.html | jhisle-maker/hmmm | 95261a4fece3b9b7b41147440cb0581d502d16ba | [
"Apache-2.0"
] | null | null | null | index.html | jhisle-maker/hmmm | 95261a4fece3b9b7b41147440cb0581d502d16ba | [
"Apache-2.0"
] | null | null | null | index.html | jhisle-maker/hmmm | 95261a4fece3b9b7b41147440cb0581d502d16ba | [
"Apache-2.0"
] | null | null | null | <!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>uhhh.....</title>
</head>
<body>
<button type="button" onclick="
alert(....)">CLICK</button>
</body>
</html>
| 14.769231 | 31 | 0.567708 |
6b6691a670acd30bef1b3f5c1cc9cb5d3352e1ab | 554 | html | HTML | content/blog/2018/03/2018-03-12-rsi.html | jonnyspicer/jonnyspicer.com | d734f3c9a2405b8b47bd9ff1994920b1e6921f5f | [
"MIT"
] | null | null | null | content/blog/2018/03/2018-03-12-rsi.html | jonnyspicer/jonnyspicer.com | d734f3c9a2405b8b47bd9ff1994920b1e6921f5f | [
"MIT"
] | null | null | null | content/blog/2018/03/2018-03-12-rsi.html | jonnyspicer/jonnyspicer.com | d734f3c9a2405b8b47bd9ff1994920b1e6921f5f | [
"MIT"
] | 1 | 2021-12-02T03:17:35.000Z | 2021-12-02T03:17:35.000Z | ---
aliases:
- /mendokusai/2018/03/12/rsi
author: Jonny Spicer
type: blog
blogger_id: tag:blogger.com,1999:blog-4848445585442664375.post-7808050319154851631
blogger_orig_url: http://jonnyspicer.blogspot.com/2018/03/rsi.html
date: "2018-03-12T17:10:00Z"
modified_time: "2018-03-12T17:10:48.280-07:00"
categories: null
title: RSI
---
<span >I'm starting to get RSI in my hands and wrists, fuck my actual life. Time to start a stretching routine before I boot my computer and icing my arms lawl. Who says you can't get injured sitting at a computer?</span> | 39.571429 | 221 | 0.772563 |
33095e5747a61858a4f99db634d916b356b6c29e | 13,398 | cpp | C++ | platform/Win32Window.cpp | quyse/inanity | a39225c5a41f879abe5aa492bb22b500dbe77433 | [
"MIT"
] | 26 | 2015-04-22T05:25:25.000Z | 2020-11-15T11:07:56.000Z | platform/Win32Window.cpp | quyse/inanity | a39225c5a41f879abe5aa492bb22b500dbe77433 | [
"MIT"
] | 2 | 2015-01-05T10:41:27.000Z | 2015-01-06T20:46:11.000Z | platform/Win32Window.cpp | quyse/inanity | a39225c5a41f879abe5aa492bb22b500dbe77433 | [
"MIT"
] | 5 | 2016-08-02T11:13:57.000Z | 2018-10-26T11:19:27.000Z | #include "Win32Window.hpp"
#include "../graphics/Presenter.hpp"
#include "../graphics/RawTextureData.hpp"
#include "../input/Win32Manager.hpp"
#include "../Strings.hpp"
#include "../Exception.hpp"
#include <windowsx.h>
BEGIN_INANITY_PLATFORM
typedef UINT (WINAPI *GETDPIFORWINDOW_FUNC)(HWND);
Win32Window::Win32Window(HWND hWnd, bool own, WNDPROC prevWndProc)
: hWnd(hWnd), hdc(0), own(own), active(true), clientWidth(0), clientHeight(0), dpiScale(1), prevWndProc(prevWndProc), cursorHidden(false), fullscreen(false)
{
SetWindowLongPtr(hWnd, GWLP_USERDATA, (LONG_PTR)this);
RECT rect;
GetClientRect(hWnd, &rect);
clientWidth = rect.right - rect.left;
clientHeight = rect.bottom - rect.top;
// use multiple methods to determine initial DPI scale
UINT dpi;
// GetDpiForWindow is supported starting with Windows 10
HMODULE user32dll = GetModuleHandle(TEXT("user32.dll"));
GETDPIFORWINDOW_FUNC getDpiForWindow = (GETDPIFORWINDOW_FUNC)GetProcAddress(user32dll, "GetDpiForWindow");
if(getDpiForWindow)
{
dpi = getDpiForWindow(hWnd);
}
else
{
// works in every Windows version, but returns DPI of primary monitor
// which is a system-wide setting updated only on re-login
HDC hdc = GetDC(hWnd);
dpi = GetDeviceCaps(hdc, LOGPIXELSX);
ReleaseDC(hWnd, hdc);
}
dpiScale = float(dpi) / float(USER_DEFAULT_SCREEN_DPI);
}
Win32Window::~Win32Window()
{
if(hWnd)
{
if(hdc)
ReleaseDC(hWnd, hdc);
if(own)
DestroyWindow(hWnd);
else
SetWindowLongPtr(hWnd, GWLP_USERDATA, 0);
}
}
void Win32Window::SetTitle(const String& title)
{
SetWindowText(hWnd, Strings::UTF82Unicode(title).c_str());
}
void Win32Window::PlaceCursor(int x, int y)
{
POINT pt = { x, y };
ClientToScreen(hWnd, &pt);
SetCursorPos(pt.x, pt.y);
}
HICON Win32Window::CreateIconFromTexture(ptr<Graphics::RawTextureData> texture, BOOL isIcon, int hotX, int hotY)
{
int width = texture->GetImageWidth();
int height = texture->GetImageHeight();
BITMAPV5HEADER h;
ZeroMemory(&h, sizeof(h));
h.bV5Size = sizeof(h);
h.bV5Width = width;
h.bV5Height = height;
h.bV5Planes = 1;
h.bV5BitCount = 32;
h.bV5Compression = BI_BITFIELDS;
h.bV5RedMask = 0x00FF0000;
h.bV5GreenMask = 0x0000FF00;
h.bV5BlueMask = 0x000000FF;
h.bV5AlphaMask = 0xFF000000;
const uint8_t* pixels = (const uint8_t*)texture->GetMipData();
uint8_t* buf = new uint8_t[width * 4 * height];
for(int i = 0; i < height; ++i)
{
const uint8_t* linePixels = pixels + i * width * 4;
uint8_t* lineBuf = buf + (height - 1 - i) * width * 4;
for(int j = 0; j < width; ++j)
{
lineBuf[j * 4 + 0] = linePixels[j * 4 + 2];
lineBuf[j * 4 + 1] = linePixels[j * 4 + 1];
lineBuf[j * 4 + 2] = linePixels[j * 4 + 0];
lineBuf[j * 4 + 3] = linePixels[j * 4 + 3];
}
}
HDC hdc = GetDC(NULL);
HBITMAP hbmpColor = CreateDIBitmap(hdc, (BITMAPINFOHEADER*)&h, CBM_INIT, buf, (BITMAPINFO*)&h, DIB_RGB_COLORS);
HBITMAP hbmpMask = CreateBitmap(width, height, 1, 1, NULL);
ReleaseDC(NULL, hdc);
delete [] buf;
if(!hbmpColor || !hbmpMask)
{
if(hbmpColor) DeleteBitmap(hbmpColor);
if(hbmpMask) DeleteBitmap(hbmpMask);
THROW("Can't create bitmaps");
}
ICONINFO ii;
ii.fIcon = isIcon;
ii.xHotspot = hotX;
ii.yHotspot = hotY;
ii.hbmMask = hbmpMask;
ii.hbmColor = hbmpColor;
HICON icon = CreateIconIndirect(&ii);
DeleteBitmap(hbmpColor);
DeleteBitmap(hbmpMask);
return icon;
}
class Win32Window::Win32Icon : public Window::Icon
{
friend class Win32Window;
private:
HICON icon;
public:
Win32Icon(HICON icon) : icon(icon) {}
~Win32Icon()
{
DestroyIcon(icon);
}
};
ptr<Window::Icon> Win32Window::CreateIcon(ptr<Graphics::RawTextureData> texture)
{
BEGIN_TRY();
if(!(texture->GetFormat() == Graphics::PixelFormats::uintRGBA32S))
THROW("Win32 window icon must be RGBA");
HCURSOR icon = CreateIconFromTexture(texture, TRUE, 0, 0);
if(!icon) THROW("Can't create icon");
return NEW(Win32Icon(icon));
END_TRY("Can't create Win32 icon");
}
void Win32Window::SetIcon(ptr<Icon> icon)
{
ptr<Win32Icon> win32Icon = icon.FastCast<Win32Icon>();
HICON ico = win32Icon->icon;
SendMessage(hWnd, WM_SETICON, ICON_BIG, (LPARAM)ico);
SendMessage(hWnd, WM_SETICON, ICON_SMALL, (LPARAM)ico);
this->icon = win32Icon;
}
class Win32Window::Win32Cursor : public Window::Cursor
{
friend class Win32Window;
private:
HCURSOR cursor;
public:
Win32Cursor(HCURSOR cursor) : cursor(cursor) {}
~Win32Cursor()
{
DestroyCursor(cursor);
}
};
ptr<Window::Cursor> Win32Window::CreateCursor(ptr<Graphics::RawTextureData> texture, int hotX, int hotY)
{
BEGIN_TRY();
if(!(texture->GetFormat() == Graphics::PixelFormats::uintRGBA32S))
THROW("Win32 window cursor must be RGB");
HCURSOR cursor = CreateIconFromTexture(texture, FALSE, hotX, hotY);
if(!cursor) THROW("Can't create cursor");
return NEW(Win32Cursor(cursor));
END_TRY("Can't create Win32 cursor");
}
void Win32Window::SetCursor(ptr<Cursor> cursor)
{
ptr<Win32Cursor> lastCursor = this->cursor;
this->cursor = cursor.FastCast<Win32Cursor>();
UpdateCursor();
}
void Win32Window::Stop()
{
Window::Stop();
PostQuitMessage(0);
}
ptr<Win32Window> Win32Window::Create(ATOM windowClass, const String& title,
int left, int top, int width, int height, bool visible)
{
BEGIN_TRY();
//создать окно
HWND hWnd = CreateWindow(
(LPCTSTR)(UINT_PTR)windowClass, Strings::UTF82Unicode(title).c_str(),
WS_OVERLAPPEDWINDOW | (visible ? WS_VISIBLE : 0),
left, top, width, height,
NULL, NULL, GetModuleHandle(NULL), NULL);
if(!hWnd)
THROW("Can't create window");
return NEW(Win32Window(hWnd));
END_TRY("Can't create game window");
}
ptr<Win32Window> Win32Window::CreateForDirectX(const String& title, int left, int top, int width, int height, bool visible)
{
static ATOM windowClass = NULL;
//зарегистрировать класс окна, если еще не сделано
if(!windowClass)
{
WNDCLASS wndClass;
ZeroMemory(&wndClass, sizeof(wndClass));
wndClass.style = CS_DBLCLKS;
wndClass.hCursor = LoadCursor(NULL, IDC_ARROW);
wndClass.hbrBackground = GetStockBrush(BLACK_BRUSH);
wndClass.lpszClassName = TEXT("Win32DirectXWindow");
wndClass.hInstance = GetModuleHandle(NULL);
wndClass.lpfnWndProc = StaticWndProc;
windowClass = RegisterClass(&wndClass);
if(!windowClass)
THROW("Can't register window class for DirectX");
}
return Create(windowClass, title, left, top, width, height, visible);
}
ptr<Win32Window> Win32Window::CreateForOpenGL(const String& title, int left, int top, int width, int height, bool visible)
{
BEGIN_TRY();
static ATOM windowClass = NULL;
//зарегистрировать класс окна, если еще не сделано
if(!windowClass)
{
WNDCLASS wndClass;
ZeroMemory(&wndClass, sizeof(wndClass));
wndClass.style = CS_DBLCLKS | CS_OWNDC;
wndClass.hCursor = LoadCursor(NULL, IDC_ARROW);
wndClass.lpszClassName = TEXT("Win32OpenGLWindow");
wndClass.hInstance = GetModuleHandle(NULL);
wndClass.lpfnWndProc = StaticWndProc;
windowClass = RegisterClass(&wndClass);
if(!windowClass)
THROW("Can't register window class");
}
ptr<Win32Window> window = Create(windowClass, title, left, top, width, height, visible);
// get window's persistent HDC
HDC hdc = GetDC(window->GetHWND());
if(!hdc)
THROW_SECONDARY("Can't get window's HDC", Exception::SystemError());
// store it to window
window->hdc = hdc;
// choose & set pixel format
PIXELFORMATDESCRIPTOR pfd;
ZeroMemory(&pfd, sizeof(pfd));
pfd.nSize = sizeof(pfd);
pfd.nVersion = 1;
pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER;
pfd.iPixelType = PFD_TYPE_RGBA;
pfd.cColorBits = 24;
pfd.cDepthBits = 0;
pfd.iLayerType = PFD_MAIN_PLANE;
int pixelFormat = ChoosePixelFormat(hdc, &pfd);
if(!pixelFormat)
THROW("Can't choose pixel format");
if(!SetPixelFormat(hdc, pixelFormat, &pfd))
THROW("Can't set pixel format");
return window;
END_TRY("Can't create window for OpenGL");
}
ptr<Win32Window> Win32Window::CreateExisting(HWND hWnd, bool own)
{
WNDPROC prevWndProc = (WNDPROC)SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)&StaticWndProc);
return NEW(Win32Window(hWnd, own, prevWndProc));
}
HWND Win32Window::GetHWND() const
{
return hWnd;
}
HDC Win32Window::GetHDC() const
{
return hdc;
}
bool Win32Window::IsActive() const
{
return active;
}
int Win32Window::GetClientWidth() const
{
return clientWidth;
}
int Win32Window::GetClientHeight() const
{
return clientHeight;
}
LRESULT CALLBACK Win32Window::StaticWndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
Win32Window* window = (Win32Window*)GetWindowLongPtr(hWnd, GWLP_USERDATA);
if(window)
return window->WndProc(uMsg, wParam, lParam);
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
LRESULT Win32Window::WndProc(UINT uMsg, WPARAM wParam, LPARAM lParam)
{
if(inputManager && inputManager->ProcessWindowMessage(uMsg, wParam, lParam))
return 0;
switch(uMsg)
{
case WM_ACTIVATE:
{
unsigned state = LOWORD(wParam);
bool active = (state == WA_ACTIVE || state == WA_CLICKACTIVE);
// update activity flag
this->active = active;
// update mouse lock
UpdateMouseLock();
// update cursor visibility
UpdateCursorVisible();
// tell input manager that window has been deactivated
if(!active && inputManager)
inputManager->ReleaseButtonsOnUpdate();
}
return 0;
case WM_MOVE:
UpdateMouseLock();
return 0;
case WM_SIZE:
clientWidth = LOWORD(lParam);
clientHeight = HIWORD(lParam);
if(presenter)
presenter->Resize(clientWidth, clientHeight);
UpdateMouseLock();
return 0;
case WM_SETCURSOR:
if(LOWORD(lParam) == HTCLIENT)
{
UpdateCursor();
return 0;
}
break;
case WM_DPICHANGED:
{
dpiScale = float(LOWORD(wParam)) / float(USER_DEFAULT_SCREEN_DPI);
const RECT* rect = (const RECT*)lParam;
SetWindowPos(hWnd, NULL, rect->left, rect->top, rect->right - rect->left, rect->bottom - rect->top, SWP_NOACTIVATE | SWP_NOZORDER);
}
break;
case WM_SYSCOMMAND:
// prevent use of Alt for system menu
// see https://msdn.microsoft.com/en-us/library/windows/desktop/ms646360
if(wParam == SC_KEYMENU && GET_Y_LPARAM(lParam) <= 0) return 0;
break;
case WM_CLOSE:
if(preventUserClose) Stop();
else Close();
return 0;
case WM_DESTROY:
hWnd = 0;
ClipCursor(NULL);
Stop();
return 0;
}
if(prevWndProc)
return CallWindowProc(prevWndProc, hWnd, uMsg, wParam, lParam);
return DefWindowProc(hWnd, uMsg, wParam, lParam);
}
void Win32Window::SetInputManager(ptr<Input::Win32Manager> inputManager)
{
this->inputManager = inputManager;
}
bool Win32Window::Do(Handler* activeHandler)
{
/* По-видимому, PeekMessage может обрабатывать некоторые сообщения (в том числе WM_ACTIVATE)
синхронно, не возвращая их в msg. Поэтому потом, определяя выход из GetMessage по WM_QUIT,
нужно опираться на значение lastActive, так как active может поменяться внутри PeekMessage.
*/
MSG msg;
bool lastActive;
while(((lastActive = active) || !sleepWhenInactive) ? PeekMessage(&msg, 0, 0, 0, PM_REMOVE) : GetMessage(&msg, 0, 0, 0))
{
if(msg.message == WM_QUIT)
return false;
TranslateMessage(&msg);
DispatchMessage(&msg);
}
// если окно активно, или сказано не спать, работаем
if(active || !sleepWhenInactive)
{
if(inputManager)
inputManager->Update();
activeHandler->Fire();
}
// иначе если окно неактивно, и вышли мы из цикла по GetMessage(), то это сообщение WM_QUIT
else if(!lastActive)
return false;
return true;
}
void Win32Window::Close()
{
if(hWnd)
DestroyWindow(hWnd);
}
void Win32Window::SetFullScreen(bool fullscreen)
{
if(this->fullscreen == fullscreen) return;
this->fullscreen = fullscreen;
if(fullscreen)
{
// remember window placement before going fullscreen
LONG_PTR styles = GetWindowLongPtr(hWnd, GWL_STYLE);
preFullScreenPlacement.length = sizeof(preFullScreenPlacement);
GetWindowPlacement(hWnd, &preFullScreenPlacement);
// remove window frame
SetWindowLongPtr(hWnd, GWL_STYLE, (styles & (~WS_OVERLAPPEDWINDOW)) | WS_POPUP);
// if window is maximized already, restore it first
// otherwise Windows will not maximize it over whole screen
if(styles & WS_MAXIMIZE) ShowWindowAsync(hWnd, SW_RESTORE);
// maximize window
ShowWindowAsync(hWnd, SW_MAXIMIZE);
}
else
{
// bring window frame back
SetWindowLongPtr(hWnd, GWL_STYLE, (GetWindowLongPtr(hWnd, GWL_STYLE) & (~WS_POPUP)) | WS_OVERLAPPEDWINDOW);
// restore window placement
SetWindowPlacement(hWnd, &preFullScreenPlacement);
}
}
float Win32Window::GetDPIScale() const
{
return dpiScale;
}
void Win32Window::GetRect(int& left, int& top, int& width, int& height)
{
RECT rect;
GetWindowRect(hWnd, &rect);
left = rect.left;
top = rect.top;
width = rect.right - rect.left;
height = rect.bottom - rect.top;
}
void Win32Window::Run(ptr<Handler> activeHandler)
{
while(Do(activeHandler));
}
void Win32Window::UpdateMouseLock()
{
// get if we actually want to set mouse lock
bool actualMouseLock = mouseLock && active;
if(actualMouseLock)
{
// clip cursor into client rect in screen coordinates
RECT rect;
GetClientRect(hWnd, &rect);
MapWindowPoints(hWnd, NULL, (LPPOINT)&rect, 2);
ClipCursor(&rect);
}
else
ClipCursor(NULL);
}
void Win32Window::UpdateCursorVisible()
{
if(cursorVisible == cursorHidden)
{
ShowCursor(cursorVisible ? TRUE : FALSE);
cursorHidden = !cursorVisible;
}
}
void Win32Window::UpdateCursor()
{
::SetCursor(cursor->cursor);
}
END_INANITY_PLATFORM
| 25.375 | 156 | 0.720331 |
0f6ee6dbb01efdfbd8b2c12b308b962d152dbf19 | 423 | cpp | C++ | LeetCode/Count Number of Nice Subarrays/main.cpp | Code-With-Aagam/competitive-programming | 610520cc396fb13a03c606b5fb6739cfd68cc444 | [
"MIT"
] | 2 | 2022-02-08T12:37:41.000Z | 2022-03-09T03:48:56.000Z | LeetCode/Count Number of Nice Subarrays/main.cpp | ShubhamJagtap2000/competitive-programming-1 | 3a9a2e3dd08f8fa8ab823f295cd020d08d3bff84 | [
"MIT"
] | null | null | null | LeetCode/Count Number of Nice Subarrays/main.cpp | ShubhamJagtap2000/competitive-programming-1 | 3a9a2e3dd08f8fa8ab823f295cd020d08d3bff84 | [
"MIT"
] | null | null | null | class Solution {
public:
int numberOfSubarrays(vector<int> &nums, int k) {
for (auto &ele : nums) {
ele %= 2;
}
// count of subarrays having k ones in it???
// count of subarrays having sum k
map<int, int> mp;
int n = nums.size(), sum = 0, ans = 0;
mp[0] = 1;
for (int i = 0; i < n; i++) {
sum += nums[i];
if (mp[sum - k] > 0) {
ans += mp[sum - k];
}
mp[sum]++;
}
return ans;
}
}; | 20.142857 | 50 | 0.513002 |
8016b015c2002f25cf473b9cd17230045d3ccc50 | 2,699 | java | Java | h2o-core/src/main/java/water/fvec/C8DChunk.java | gridgentoo/h2o-tree | 92478be7633fcf3f4b550fe4cbf69bf85112391e | [
"Apache-2.0"
] | 1 | 2018-03-22T12:45:32.000Z | 2018-03-22T12:45:32.000Z | h2o-core/src/main/java/water/fvec/C8DChunk.java | gridgentoo/h2o-tree | 92478be7633fcf3f4b550fe4cbf69bf85112391e | [
"Apache-2.0"
] | null | null | null | h2o-core/src/main/java/water/fvec/C8DChunk.java | gridgentoo/h2o-tree | 92478be7633fcf3f4b550fe4cbf69bf85112391e | [
"Apache-2.0"
] | null | null | null | package water.fvec;
import water.AutoBuffer;
import water.MemoryManager;
import water.util.UnsafeUtils;
/**
* The empty-compression function, where data is in 'double's.
*/
public class C8DChunk extends Chunk {
C8DChunk( byte[] bs ) { _mem=bs; _start = -1; set_len(_mem.length>>3); }
@Override protected final long at8_impl( int i ) {
double res = UnsafeUtils.get8d(_mem, i << 3);
if( Double.isNaN(res) ) throw new IllegalArgumentException("at8_abs but value is missing");
return (long)res;
}
@Override protected final double atd_impl( int i ) { return UnsafeUtils.get8d(_mem,i<<3) ; }
@Override protected final boolean isNA_impl( int i ) { return Double.isNaN(UnsafeUtils.get8d(_mem,i<<3)); }
@Override boolean set_impl(int idx, long l) { return false; }
/**
* Fast explicit set for double.
* @param i
* @param d
*/
public void set8D(int i, double d) {UnsafeUtils.set8d(_mem,i<<3,d);}
public double get8D(int i) {return UnsafeUtils.get8d(_mem,i<<3);}
@Override boolean set_impl(int i, double d) {
UnsafeUtils.set8d(_mem,i<<3,d);
return true;
}
@Override boolean set_impl(int i, float f ) {
UnsafeUtils.set8d(_mem,i<<3,f);
return true;
}
@Override boolean setNA_impl(int idx) { UnsafeUtils.set8d(_mem,(idx<<3),Double.NaN); return true; }
@Override public NewChunk inflate_impl(NewChunk nc) {
//nothing to inflate - just copy
nc.alloc_doubles(_len);
for( int i=0; i< _len; i++ )
nc.doubles()[i] = UnsafeUtils.get8d(_mem,(i<<3));
nc.set_sparseLen(nc.set_len(_len));
return nc;
}
// 3.3333333e33
// public int pformat_len0() { return 22; }
// public String pformat0() { return "% 21.15e"; }
@Override public final void initFromBytes () {
_start = -1; _cidx = -1;
set_len(_mem.length>>3);
assert _mem.length == _len <<3;
}
@Override
public double [] getDoubles(double [] vals, int from, int to){
for(int i = from; i < to; ++i)
vals[i - from] = UnsafeUtils.get8d(_mem, i << 3);
return vals;
}
/**
* Dense bulk interface, fetch values from the given range
* @param vals
* @param from
* @param to
*/
@Override
public double [] getDoubles(double [] vals, int from, int to, double NA){
for(int i = from; i < to; ++i) {
double d = UnsafeUtils.get8d(_mem, i << 3);
vals[i - from] = Double.isNaN(d)?NA:d;
}
return vals;
}
/**
* Dense bulk interface, fetch values from the given ids
* @param vals
* @param ids
*/
@Override
public double [] getDoubles(double [] vals, int [] ids){
int j = 0;
for(int i:ids) vals[j++] = UnsafeUtils.get8d(_mem,i<<3);
return vals;
}
}
| 29.336957 | 109 | 0.630974 |
02afece9d60970050d758bfd716542cd9cf02af8 | 813 | swift | Swift | Package.swift | mitsuse/blueprint | 574ca2b097796e0e285a2ba662073e28f1d5a0c4 | [
"MIT"
] | null | null | null | Package.swift | mitsuse/blueprint | 574ca2b097796e0e285a2ba662073e28f1d5a0c4 | [
"MIT"
] | null | null | null | Package.swift | mitsuse/blueprint | 574ca2b097796e0e285a2ba662073e28f1d5a0c4 | [
"MIT"
] | null | null | null | // swift-tools-version:5.3
import Foundation
import PackageDescription
let package = Package(
name: "Blueprint",
products: [
.library(name: "Blueprint", targets: ["Blueprint"]),
],
dependencies: [
.package(url: "https://github.com/ReactiveX/RxSwift.git", .upToNextMinor(from: "6.1.0")),
.package(name: "Domain", url: "https://github.com/mitsuse/domain.git", .upToNextMinor(from: "0.5.0")),
.package(url: "https://github.com/Quick/Quick.git", .upToNextMinor(from: "3.1.2")),
.package(url: "https://github.com/Quick/Nimble.git", .upToNextMinor(from: "9.0.0")),
],
targets: [
.target(name: "Blueprint", dependencies: ["RxSwift", "Domain"]),
.testTarget(name: "BlueprintTests", dependencies: ["Blueprint", "Quick", "Nimble"]),
]
)
| 36.954545 | 110 | 0.619926 |
8713cef8f14fb461c7b0f7c712f690e6f60fa9b9 | 4,239 | ps1 | PowerShell | Random scripts and functions/Export-XenApp_Application.ps1 | superklamer/Citrix | 192d17b5eb3b4890d4eca5eb645ce9c591b173c1 | [
"Apache-2.0"
] | 2 | 2020-10-13T18:38:36.000Z | 2021-04-09T16:06:24.000Z | Random scripts and functions/Export-XenApp_Application.ps1 | superklamer/Citrix | 192d17b5eb3b4890d4eca5eb645ce9c591b173c1 | [
"Apache-2.0"
] | null | null | null | Random scripts and functions/Export-XenApp_Application.ps1 | superklamer/Citrix | 192d17b5eb3b4890d4eca5eb645ce9c591b173c1 | [
"Apache-2.0"
] | 1 | 2019-06-09T00:38:38.000Z | 2019-06-09T00:38:38.000Z | Import-Module "C:\Program Files\Citrix\Provisioning Services Console\Citrix.PVS.SnapIn.dll"
if (-not (Get-PSSnapin | where {$_.Name -Like "*Citrix*"})) {
Add-PSSnapin Citrix*
}
Function Export-Application {
[CmdletBinding()]
Param(
[Parameter(Mandatory)]
[ValidateNotNullOrEmpty()]
$DeliveryGroup,
[ValidateSet('DC1','DC2','DC3', 'DC')]
$DC,
[Switch]
$DisasterRecovery
)
BEGIN{
$logs = Start-LogHighLevelOperation -AdminAddress $DC -Source "Studio" -StartTime (Get-Date).ToString() -Text "Exporting applications for Delivery Group `'$DeliveryGroup`'"
Write-Verbose "Creating log in Studio with ID: $($logs.Id)"
$Location = $MyInvocation.MyCommand.Path -replace $MyInvocation.MyCommand.Name,""
set-location $Location
$DG = Get-BrokerDesktopGroup -AdminAddress $DC -Name $DeliveryGroup
}
PROCESS{
$Apps = Get-BrokerApplication -AssociatedDesktopGroupUid $DG.Uid -MaxRecordCount 2147483647
$Results = @()
foreach($App in $Apps) {
if ($DisasterRecovery) {
if ($App.AdminFolderName.StartsWith('XA_FARM1')) {$App.AdminFolderName = "XA_BACKUPFARM1" + $App.AdminFolderName.Substring(7)}
}
$Properties = @{
AdminFolderName = $App.AdminFolderName
AdminFolderUid = $App.AdminFolderUid
AllAssociatedDesktopGroupUids = $App.AllAssociatedDesktopGroupUids
AllAssociatedDesktopGroupUUIDs = $App.AllAssociatedDesktopGroupUUIDs
ApplicationName = $App.ApplicationName
ApplicationType = $App.ApplicationType
AssociatedApplicationGroupUids = $App.AssociatedApplicationGroupUids
AssociatedApplicationGroupUUIDs = $App.AssociatedApplicationGroupUUIDs
AssociatedDesktopGroupPriorities = $App.AssociatedDesktopGroupPriorities
AssociatedDesktopGroupUids = $App.AssociatedDesktopGroupUids
AssociatedDesktopGroupUUIDs = $App.AssociatedDesktopGroupUUIDs
AssociatedUserFullNames = $App.AssociatedUserFullNames
AssociatedUserNames = $App.AssociatedUserNames
AssociatedUserUPNs = $App.AssociatedUserUPNs
BrowserName = $App.BrowserName
ClientFolder = $App.ClientFolder
CommandLineArguments = $App.CommandLineArguments
CommandLineExecutable = $App.CommandLineExecutable
ConfigurationSlotUids = $App.ConfigurationSlotUids
CpuPriorityLevel = $App.CpuPriorityLevel
Description = $App.Description
Enabled = $App.Enabled
HomeZoneName = $App.HomeZoneName
HomeZoneOnly = $App.HomeZoneOnly
HomeZoneUid = $App.HomeZoneUid
IconFromClient = $App.IconFromClient
EncodedIconData = (Get-Brokericon -Uid $App.IconUid).EncodedIconData # Grabs Icon Image
IconUid = $App.IconUid
IgnoreUserHomeZone = $App.IgnoreUserHomeZone
MachineConfigurationNames = $App.MachineConfigurationNames
MachineConfigurationUids = $App.MachineConfigurationUids
MaxPerUserInstances = $App.MaxPerUserInstances
MaxTotalInstances = $App.MaxTotalInstances
MetadataKeys = $App.MetadataKeys
MetadataMap = $App.MetadataMap
Name = $App.Name
PublishedName = $App.PublishedName
SecureCmdLineArgumentsEnabled = $App.SecureCmdLineArgumentsEnabled
ShortcutAddedToDesktop = $App.ShortcutAddedToDesktop
ShortcutAddedToStartMenu = $App.ShortcutAddedToStartMenu
StartMenuFolder = $App.StartMenuFolder
Tags = $App.Tags
Uid = $App.Uid
UserFilterEnabled = $App.UserFilterEnabled
UUID = $App.UUID
Visible = $App.Visible
WaitForPrinterCreation = $App.WaitForPrinterCreation
WorkingDirectory = $App.WorkingDirectory
}
# Stores each Application setting for export
$Results += New-Object psobject -Property $Properties
}
}
END{
$FileName = $DeliveryGroup + $(Get-Date -Format yyyyMMddTHHmm) + "AP.xml"
$Results | export-clixml .\$FileName
}
}
Export-Application -DeliveryGroup XA_PPHXUEMCTX -DC pphxctxdc1 -DisasterRecovery | 41.558824 | 181 | 0.678934 |
04fb338a65eb24ac6bed1c6ee2776b6f6725cd37 | 2,663 | java | Java | S2-common-dom/src/main/java/org/symphonyoss/s2/common/dom/json/JsonArray.java | byronsym/S2-common | df3cb3684d3bf30613970c8b15e10f6d2dae5e56 | [
"Apache-2.0"
] | null | null | null | S2-common-dom/src/main/java/org/symphonyoss/s2/common/dom/json/JsonArray.java | byronsym/S2-common | df3cb3684d3bf30613970c8b15e10f6d2dae5e56 | [
"Apache-2.0"
] | null | null | null | S2-common-dom/src/main/java/org/symphonyoss/s2/common/dom/json/JsonArray.java | byronsym/S2-common | df3cb3684d3bf30613970c8b15e10f6d2dae5e56 | [
"Apache-2.0"
] | null | null | null | /*
*
*
* Copyright 2017 Symphony Communication Services, LLC.
*
* Licensed to The Symphony Software Foundation (SSF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SSF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.symphonyoss.s2.common.dom.json;
import java.io.IOException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import javax.annotation.Nullable;
import org.symphonyoss.s2.common.dom.DomWriter;
import org.symphonyoss.s2.common.dom.TypeAdaptor;
import org.symphonyoss.s2.common.exception.InvalidValueException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
public abstract class JsonArray<N extends IJsonDomNode> implements IJsonArray<N>
{
@Override
public JsonArray<N> writeTo(DomWriter writer, @Nullable String terminator) throws IOException
{
if(isEmpty())
{
writer.writeItem("[]", terminator);
}
else
{
Iterator<N> it = iterator();
writer.openBlock("[");
while(it.hasNext())
{
it.next().writeTo(writer, it.hasNext() ? "," : null);
}
writer.closeBlock("]", terminator);
}
return this;
}
public <T> ImmutableSet<T> asImmutableSetOf(Class<T> type) throws InvalidValueException
{
Set<T> set = new HashSet<>();
Iterator<N> it = iterator();
while(it.hasNext())
{
T value = TypeAdaptor.adapt(type, it.next());
if(!set.add(value))
throw new InvalidValueException("Duplicate value in set input.");
}
return ImmutableSet.copyOf(set);
}
public <T> ImmutableList<T> asImmutableListOf(Class<T> type) throws InvalidValueException
{
List<T> list = new LinkedList<>();
Iterator<N> it = iterator();
while(it.hasNext())
{
T value = TypeAdaptor.adapt(type, it.next());
list.add(value);
}
return ImmutableList.copyOf(list);
}
}
| 27.173469 | 95 | 0.685693 |
865acb0d8e5fcce2737f211ddad463da9557a757 | 1,347 | rs | Rust | libs/datamodel/core/src/json/mcf/source.rs | forkkit/prisma-engine | 6c4c4c7d16a37f56a0bfe16205465fa8148c8567 | [
"Apache-2.0"
] | null | null | null | libs/datamodel/core/src/json/mcf/source.rs | forkkit/prisma-engine | 6c4c4c7d16a37f56a0bfe16205465fa8148c8567 | [
"Apache-2.0"
] | null | null | null | libs/datamodel/core/src/json/mcf/source.rs | forkkit/prisma-engine | 6c4c4c7d16a37f56a0bfe16205465fa8148c8567 | [
"Apache-2.0"
] | null | null | null | use crate::{configuration, StringFromEnvVar};
use serde_json;
#[serde(rename_all = "camelCase")]
#[derive(Debug, serde::Serialize)]
pub struct SourceConfig {
pub name: String,
pub connector_type: String,
pub url: StringFromEnvVar,
#[serde(skip_serializing_if = "Option::is_none")]
pub documentation: Option<String>,
}
pub fn render_sources_to_json_value(sources: &[Box<dyn configuration::Source + Send + Sync>]) -> serde_json::Value {
let res = sources_to_json_structs(sources);
serde_json::to_value(&res).expect("Failed to render JSON.")
}
pub fn render_sources_to_json(sources: &[Box<dyn configuration::Source + Send + Sync>]) -> String {
let res = sources_to_json_structs(sources);
serde_json::to_string_pretty(&res).expect("Failed to render JSON.")
}
fn sources_to_json_structs(sources: &[Box<dyn configuration::Source + Send + Sync>]) -> Vec<SourceConfig> {
let mut res: Vec<SourceConfig> = Vec::new();
for source in sources {
res.push(source_to_json_struct(&**source));
}
res
}
fn source_to_json_struct(source: &dyn configuration::Source) -> SourceConfig {
SourceConfig {
name: source.name().clone(),
connector_type: String::from(source.connector_type()),
url: source.url().clone(),
documentation: source.documentation().clone(),
}
}
| 32.071429 | 116 | 0.691166 |
7a46a313adc9ce219f5eb01740b4cfef2722d830 | 2,943 | dart | Dart | lib/resources/names_ar_EG.dart | Firelands128/locale_names | b6e7d8bd59283b4c9fdd004a2842bbb277a284b2 | [
"BSD-3-Clause"
] | 1 | 2021-02-10T04:47:01.000Z | 2021-02-10T04:47:01.000Z | lib/resources/names_ar_EG.dart | Firelands128/locale_names | b6e7d8bd59283b4c9fdd004a2842bbb277a284b2 | [
"BSD-3-Clause"
] | null | null | null | lib/resources/names_ar_EG.dart | Firelands128/locale_names | b6e7d8bd59283b4c9fdd004a2842bbb277a284b2 | [
"BSD-3-Clause"
] | null | null | null | const names = {
"ar_001": "العربية الرسمية الحديثة",
"arn": "الأروكانية",
"as": "الأساميزية",
"as_IN": "الأساميزية (الهند)",
"ban": "اللغة البالية",
"be_BY": "البيلاروسية (روسيا البيضاء)",
"ceb": "السيبونية",
"cy": "الولشية",
"cy_GB": "الولشية (المملكة المتحدة)",
"da": "الدنماركية",
"da_DK": "الدنماركية (الدانمرك)",
"da_GL": "الدنماركية (غرينلاند)",
"de_AT": "الألمانية (النمسا)",
"de_CH": "الألمانية (سويسرا)",
"dsb": "الصربية السفلى",
"en_AU": "الإنجليزية (أستراليا)",
"en_CA": "الإنجليزية (كندا)",
"en_GB": "الإنجليزية (المملكة المتحدة)",
"en_US": "الإنجليزية (الولايات المتحدة)",
"es_ES": "الإسبانية (إسبانيا)",
"es_MX": "الإسبانية (المكسيك)",
"eu": "لغة الباسك",
"fa_AF": "الفارسية (أفغانستان)",
"ff": "الفلة",
"fo": "الفارويز",
"fr_CA": "الفرنسية (كندا)",
"fr_CH": "الفرنسية (سويسرا)",
"frc": "Cajun French",
"gan": "Gan Chinese",
"gn": "الجواراني",
"hak": "Hakka Chinese",
"haw": "لغة أهل الهاواي",
"hsb": "الصربية العليا",
"hsn": "Xiang Chinese",
"ht": "الهايتية",
"hy": "الأرمينية",
"ibb": "الإيبيبيوية",
"is": "الأيسلاندية",
"kkj": "Kako",
"kn": "الكانادية",
"kn_IN": "الكانادية (الهند)",
"kr": "الكانيوري",
"ky": "الكيرغزستانية",
"ky_Cyrl": "الكيرغزستانية (السيريلية)",
"ky_Cyrl_KG": "الكيرغزستانية (السيريلية, قرغيزستان)",
"ky_KG": "الكيرغزستانية (قرغيزستان)",
"lb": "اللوكسمبرجية",
"lg": "الجاندا",
"li": "الليمبرجيشية",
"lt": "اللتوانية",
"lu": "اللبا-كاتانجا",
"mg": "المالاجاشية",
"ml": "الماليالام",
"mr_IN": "الماراثية (الهند)",
"ms": "لغة الملايو",
"mzn": "Mazanderani",
"nan": "Min Nan Chinese",
"nap": "اللغة النابولية",
"nb": "البوكمالية النرويجية",
"nd": "النديبيل الشمالي",
"nl_BE": "الهولندية (بلجيكا)",
"nn": "النينورسك النرويجي",
"oc": "الأوكيتانية",
"om": "الأورومو",
"or": "الأورييا",
"prg": "Prussian",
"ps": "البشتونية",
"pt_BR": "البرتغالية (البرازيل)",
"pt_PT": "البرتغالية (البرتغال)",
"quc": "كيشي",
"ro_MD": "الرومانية (مولدافيا)",
"rom": "غجري",
"sah": "الساخية",
"sat": "السانتالي",
"se": "السامي الشمالي",
"sh_BA": "صربية-كرواتية (البوسنة والهرسك)",
"shn": "الشانية",
"te": "التيلجو",
"ti": "التيغرينية",
"ti_ER": "التيغرينية (أريتريا)",
"ti_ET": "التيغرينية (إثيوبيا)",
"tig": "التغرية",
"tt": "التتارية",
"ug_Arab": "الأويغورية (العربية)",
"ug_Arab_CN": "الأويغورية (العربية, الصين)",
"ug_CN": "الأويغورية (الصين)",
"ur": "الأردية",
"uz_AF": "الأوزبكية (أفغانستان)",
"uz_Arab": "الأوزبكية (العربية)",
"uz_Arab_AF": "الأوزبكية (العربية, أفغانستان)",
"uz_Cyrl": "الأوزبكية (السيريلية)",
"uz_Cyrl_UZ": "الأوزبكية (السيريلية, أوزبكستان)",
"uz_Latn": "الأوزبكية (اللاتينية)",
"uz_Latn_UZ": "الأوزبكية (اللاتينية, أوزبكستان)",
"uz_UZ": "الأوزبكية (أوزبكستان)",
"wbp": "Warlpiri",
"wo": "الولوف",
"wuu": "Wu Chinese",
"yo": "اليوروبية",
"zh_Hans": "الصينية (المبسطة)",
"zh_Hant": "الصينية (التقليدية)"
};
| 28.298077 | 55 | 0.599388 |
cb13ec11aea267e9bad2efcbe3b34723edcd1bf7 | 11,192 | dart | Dart | test/task_either_test.dart | tim-smart/fp_dart | c36a2999075a9a8eedd3443a14c339b1178a82ea | [
"MIT"
] | 3 | 2022-01-06T07:55:15.000Z | 2022-01-31T07:21:20.000Z | test/task_either_test.dart | tim-smart/fpdt | c36a2999075a9a8eedd3443a14c339b1178a82ea | [
"MIT"
] | null | null | null | test/task_either_test.dart | tim-smart/fpdt | c36a2999075a9a8eedd3443a14c339b1178a82ea | [
"MIT"
] | null | null | null | import 'dart:async';
import 'package:fpdt/fpdt.dart';
import 'package:fpdt/either.dart' as E;
import 'package:fpdt/option.dart' as O;
import 'package:fpdt/task.dart' as T;
import 'package:fpdt/task_either.dart' as TE;
import 'package:test/test.dart';
void main() {
group('right', () {
test('resolves to a Right', () async {
final r = await TE.right(123)();
expect(r, E.right(123));
});
});
group('left', () {
test('resolves to a Left', () async {
final r = await TE.left('fail')();
expect(r, E.left('fail'));
});
});
group('toFuture', () {
test('resolves to value on right', () async {
final te = TE.right(123);
expect(await TE.toFuture(te), 123);
});
test('resolves to an error on left', () async {
final te = TE.left('fail');
await expectLater(() => TE.toFuture(te), throwsA('fail'));
});
});
group('toFutureVoid', () {
test('resolves to void on right', () async {
final c = Completer.sync();
await TE.right(123).chain(TE.toFutureVoid(c.complete));
expect(c.isCompleted, false);
});
test('resolves to void on left and runs the effect', () async {
final c = Completer.sync();
await TE.left('fail').chain(TE.toFutureVoid(c.complete));
expect(c.isCompleted, true);
});
});
group('fromOption', () {
test('resolve to a right if some', () async {
final r = await O.some(123).chain(TE.fromOption(() => ''))();
expect(r, E.right(123));
});
test('resolves to a left if none', () async {
final r = await O.none().chain(TE.fromOption(() => 'none'))();
expect(r, E.left('none'));
});
});
group('fromNullable', () {
test('resolve to a right if non-null', () async {
final r = await TE.fromNullable(123, () => 'left')();
expect(r, E.right(123));
});
test('resolves to a left if none', () async {
final r = await TE.fromNullable(null, () => 'left')();
expect(r, E.left('left'));
});
});
group('fromNullableK', () {
test('resolve to a right if non-null', () async {
final r = await 123.chain(TE.fromNullableK(
(i) => i,
(i) => 'left',
))();
expect(r, E.right(123));
});
test('resolves to a left if none', () async {
final r = await null.chain(TE.fromNullableK(
(i) => null,
(i) => 'left',
))();
expect(r, E.left('left'));
});
});
group('chainNullableK', () {
test('resolve to a right if non-null', () async {
final r = await TE.right(123).chain(TE.chainNullableK(
(i) => i * 2,
(i) => 'left',
))();
expect(r, E.right(246));
});
test('resolves to a left if none', () async {
final r = await TE.right(null).chain(TE.chainNullableK(
(i) => null,
(i) => 'left',
))();
expect(r, E.left('left'));
});
test('does nothing if left', () async {
final r = await TE.left('fail').chain(TE.chainNullableK(
(i) => i,
(i) => 'left',
))();
expect(r, E.left('fail'));
});
});
group('fromEither', () {
test('resolve to a right if right', () async {
final r = await E.right(123).chain(TE.fromEither)();
expect(r, E.right(123));
});
test('resolves to a left if none', () async {
final r = await E.left('left').chain(TE.fromEither)();
expect(r, E.left('left'));
});
});
group('fromTask', () {
test('resolves to a right', () async {
final r = await T.value(123).chain(TE.fromTask)();
expect(r, E.right(123));
});
});
group('fold', () {
test('returns the onRight result when Right', () async {
final r = await TE.right(123).chain(TE.fold(
(l) => 'left',
(r) => 'right',
))();
expect(r, 'right');
});
test('returns the onLeft result when left', () async {
final r = await TE.left('asdf').chain(TE.fold(
(l) => 'left',
(r) => 'right',
))();
expect(r, 'left');
});
});
group('flatMap', () {
test('returns the transformed result on Right', () async {
final r = await TE.right(123).chain(TE.flatMap((i) => TE.right(i * 2)))();
expect(r, E.right(246));
});
test('does nothing on left', () async {
final r =
await TE.left('left').chain(TE.flatMap((i) => TE.right(i * 2)))();
expect(r, E.left('left'));
});
});
group('pure', () {
test('transforms the StateReaderTaskEither', () async {
final r = TE.right(123).chain(TE.pure(124));
expect(
await r(),
E.right(124),
);
});
test('does not transform left', () async {
final r = TE.left('fail').chain(TE.call(TE.left('asdf')));
expect(
await r(),
E.left('fail'),
);
});
});
group('call', () {
test('transforms the StateReaderTaskEither', () async {
final r = TE.right(123).chain(TE.call(TE.right(124)));
expect(
await r(),
E.right(124),
);
});
test('resolves left values', () async {
final r = TE.right(123).chain(TE.call(TE.left('fail')));
expect(
await r(),
E.left('fail'),
);
});
});
group('flatMapFirst', () {
test('runs the function on right, and discards the result', () async {
final r =
await TE.right(123).chain(TE.flatMapFirst((i) => TE.right(i * 2)))();
expect(r, E.right(123));
});
test('does not discord left values', () async {
final r =
await TE.right(123).chain(TE.flatMapFirst((i) => TE.left('fail')))();
expect(r, E.left('fail'));
});
test('does nothing on left', () async {
final r = await TE
.left('left')
.chain(TE.flatMapFirst((i) => TE.right(i * 2)))();
expect(r, E.left('left'));
});
});
group('tryCatch', () {
test('resolves to a right when there is no error', () async {
final r = await TE.tryCatch(
() async => 123,
(err, stack) => 'fail',
)();
expect(r, E.right(123));
});
test('resolves to a left when there is an error', () async {
final r = await TE.tryCatch(
() async => throw 'error',
(err, stack) => 'fail',
)();
expect(r, E.left('fail'));
});
});
group('alt', () {
test('does nothing on right', () async {
final r = await TE.right(123).chain(TE.alt((_) => TE.right(-1)))();
expect(r, E.right(123));
});
test('returns the transformed result on left', () async {
final r = await TE.left('left').chain(TE.alt((i) => TE.right('$i-y')))();
expect(r, E.right('left-y'));
});
});
group('orElse', () {
test('does nothing on right', () async {
final r = await TE.right(123).chain(TE.orElse(TE.right(-1)))();
expect(r, E.right(123));
});
test('returns the transformed result on left', () async {
final r = await TE.left('left').chain(TE.orElse(TE.right('else')))();
expect(r, E.right('else'));
});
});
group('getOrElse', () {
test('returns a task', () async {
final r = await TE.right(123).chain(TE.getOrElse((_) => -1))();
expect(r, 123);
});
test('returns the fallback on left', () async {
final r = await TE.left('left').chain(TE.getOrElse((i) => '$i-y'))();
expect(r, 'left-y');
});
});
group('tryCatchK', () {
test('runs the function on right', () async {
final r = await TE.right(123).chain(TE.flatMap(TE.tryCatchK(
(i) async => i * 2,
(err, stack) => 'fail',
)))();
expect(r, E.right(246));
});
test('does nothing on left', () async {
final r = await TE.left('left').chain(TE.flatMap(TE.tryCatchK(
(i) async => i * 2,
(err, stack) => 'fail',
)))();
expect(r, E.left('left'));
});
});
group('tryCatchK2', () {
final task = TE.tryCatchK2(
(int a, int b) => a > 5 ? a + b : throw 'error',
(err, stack) => 'fail',
);
test('resolves to right on success', () async {
expect(await task(10, 5)(), E.right(15));
});
test('resolves to left on error', () async {
expect(await task(3, 5)(), E.left('fail'));
});
});
group('chainTryCatchK', () {
test('runs the function on right', () async {
final r = await TE.right(123).chain(TE.chainTryCatchK(
(i) async => i * 2,
(err, stack) => 'fail',
))();
expect(r, E.right(246));
});
test('does nothing on left', () async {
final r = await TE.left('left').chain(TE.chainTryCatchK(
(i) async => i * 2,
(err, stack) => 'fail',
))();
expect(r, E.left('left'));
});
test('errors are handled', () async {
final r = await TE.right(123).chain(TE.chainTryCatchK(
(i) async => throw 'error',
(err, stack) => 'fail',
))();
expect(r, E.left('fail'));
});
});
group('map', () {
test('transforms a right', () async {
final r = await TE.right(123).chain(TE.map((i) => i * 2))();
expect(r, E.right(246));
});
test('does nothing on left', () async {
final r = await TE.left('left').chain(TE.map((i) => i * 2))();
expect(r, E.left('left'));
});
});
group('filter', () {
test('does nothing if right and predicate passes', () async {
final r = await TE.right(123).chain(TE.filter(
(i) => i == 123,
(i) => 'left',
))();
expect(r, E.right(123));
});
test('returns orElse if predicate fails', () async {
final r = await TE.right(123).chain(TE.filter(
(i) => i != 123,
(i) => 'left',
))();
expect(r, E.left('left'));
});
test('does nothing on left', () async {
final r = await TE.left('asdf').chain(TE.filter(
(i) => i != 123,
(i) => 'left',
))();
expect(r, E.left('asdf'));
});
});
group('sequence', () {
test('transforms the iterable into a TaskEither', () async {
final result = await TE.sequence([
TE.right(1),
TE.right(2),
TE.right(3),
])();
expect(result, E.right(const IListConst([1, 2, 3])));
});
test('resolves to a left if an item is left', () async {
final result = await TE.sequence<String, int>([
TE.right(1),
TE.left('fail'),
TE.right(3),
])();
expect(result, E.left('fail'));
});
});
group('sequenceSeq', () {
test('transforms the iterable into a TaskEither', () async {
final result = await TE.sequenceSeq([
TE.right(1),
TE.right(2),
TE.right(3),
])();
expect(result, E.right(const IListConst([1, 2, 3])));
});
test('resolves to a left if an item is left', () async {
final result = await TE.sequenceSeq<String, int>([
TE.right(1),
TE.left('fail'),
TE.right(3),
])();
expect(result, E.left('fail'));
});
});
}
| 26.711217 | 80 | 0.496873 |
ca7e89825ca7a93a512f4f30d8ff8865a7916fb4 | 2,523 | asm | Assembly | Transynther/x86/_processed/NONE/_xt_/i7-8650U_0xd2_notsx.log_428_99.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 9 | 2020-08-13T19:41:58.000Z | 2022-03-30T12:22:51.000Z | Transynther/x86/_processed/NONE/_xt_/i7-8650U_0xd2_notsx.log_428_99.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 1 | 2021-04-29T06:29:35.000Z | 2021-05-13T21:02:30.000Z | Transynther/x86/_processed/NONE/_xt_/i7-8650U_0xd2_notsx.log_428_99.asm | ljhsiun2/medusa | 67d769b8a2fb42c538f10287abaf0e6dbb463f0c | [
"MIT"
] | 3 | 2020-07-14T17:07:07.000Z | 2022-03-21T01:12:22.000Z | .global s_prepare_buffers
s_prepare_buffers:
push %r10
push %r11
push %r13
push %rdi
lea addresses_normal_ht+0x4b17, %r13
cmp %r10, %r10
movw $0x6162, (%r13)
nop
nop
nop
nop
nop
xor $14407, %r11
pop %rdi
pop %r13
pop %r11
pop %r10
ret
.global s_faulty_load
s_faulty_load:
push %r11
push %r9
push %rax
push %rbx
push %rdi
push %rdx
// Store
mov $0x3b67710000000dd4, %rbx
nop
nop
nop
and $47007, %r9
movb $0x51, (%rbx)
nop
sub %rdi, %rdi
// Faulty Load
lea addresses_normal+0x5027, %rdx
nop
sub %r9, %r9
movb (%rdx), %r11b
lea oracles, %rbx
and $0xff, %r11
shlq $12, %r11
mov (%rbx,%r11,1), %r11
pop %rdx
pop %rdi
pop %rbx
pop %rax
pop %r9
pop %r11
ret
/*
<gen_faulty_load>
[REF]
{'OP': 'LOAD', 'src': {'type': 'addresses_normal', 'size': 16, 'AVXalign': False, 'NT': False, 'congruent': 0, 'same': False}}
{'OP': 'STOR', 'dst': {'type': 'addresses_NC', 'size': 1, 'AVXalign': False, 'NT': False, 'congruent': 0, 'same': False}}
[Faulty Load]
{'OP': 'LOAD', 'src': {'type': 'addresses_normal', 'size': 1, 'AVXalign': False, 'NT': False, 'congruent': 0, 'same': True}}
<gen_prepare_buffer>
{'OP': 'STOR', 'dst': {'type': 'addresses_normal_ht', 'size': 2, 'AVXalign': False, 'NT': False, 'congruent': 4, 'same': False}}
{'34': 428}
34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34 34
*/
| 36.042857 | 1,283 | 0.657154 |
751c15645be47c98c306671a98e4d8bf8da5d974 | 1,059 | cs | C# | Domain/Models/Money/Price.cs | MatthewTheroux/project_rvtr_campground | fe0a272cd6a8e47f82dc44f8504cf94c802b10cb | [
"MIT"
] | null | null | null | Domain/Models/Money/Price.cs | MatthewTheroux/project_rvtr_campground | fe0a272cd6a8e47f82dc44f8504cf94c802b10cb | [
"MIT"
] | null | null | null | Domain/Models/Money/Price.cs | MatthewTheroux/project_rvtr_campground | fe0a272cd6a8e47f82dc44f8504cf94c802b10cb | [
"MIT"
] | null | null | null | using System;
///
namespace Campgrounds.Domain.Models.Money
{
///
public class Price
{
public decimal Amount { get; set; } = 0.00M;
public string Currency { get; private set; } = "USD"; // =$
public decimal SalesTaxRate { get; private set; } = .07M; // = 07% default
public decimal SalesTax { get { return Amount * SalesTaxRate; } }
public decimal WithTax { get { return Amount + SalesTax; } }
// [II].BODY
/// constructor | blank constructor with defaults
public Price(decimal _amt = 0M, string _curr = "USD")
{
Amount = (decimal)_amt;
Currency = _curr;
}
/// Set and return the new sales tax rate.
public decimal SetSalesTaxRate(decimal _rate) { return SalesTaxRate = _rate; }
// [III]. FOOT
///
public string As2Decimal(decimal _amt)
{
return String.Format("{0:C2}", _amt);
}// /md 'As2Decimal'
///
public override string ToString()
{
return $"{As2Decimal(Amount)} {Currency}";
}
}// /cla 'Price'
}// /ns '..Models.Money'
// [EoF] | 24.627907 | 82 | 0.599622 |
a044c35a2e376296fa69c541e91cbbfa7a86adef | 1,869 | asm | Assembly | session_02/04-rwmemoria2/rwmemoria2.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | 1 | 2019-03-06T13:26:10.000Z | 2019-03-06T13:26:10.000Z | session_02/04-rwmemoria2/rwmemoria2.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | null | null | null | session_02/04-rwmemoria2/rwmemoria2.asm | DigiOhhh/LabArchitettura2-2017-2018 | da34b9a75ab2945ac70d5cbf69395d0db5172f6f | [
"MIT"
] | 1 | 2019-03-06T13:25:28.000Z | 2019-03-06T13:25:28.000Z | .data
A: .space 16 # vettore con 4 elementi
B: .space 16 # vettore con 4 elementi
c: .space 4 # vettore con 4 elementi
.text
.globl main
main:
# Inizializzazione registri indirizzi
la $s0, A
la $s1, B
la $s2, c
# Inizializzazione valori (c=2)
addi $t0, $zero, 2 # $t0=2
sw $t0, 0($s2) # c=$t0
# Inizializzazione vettori A e B
addi $t0, $zero, -1 # $t0 = -1
sw $t0, 0($s0) # A[0] = -1
sw $t0, 4($s0) # A[1] = -1
sw $t0, 0($s1) # B[0] = -1
sw $t0, 8($s1) # B[2] = -1
sw $t0, 12($s1) # B[3] = -1
addi $t0, $zero, 1 # $t0 = 1
sw $t0, 8($s0) # A[2] = 1
addi $t0, $zero, 4 # $t0 = 4
sw $t0, 12($s0) # A[3] = 1
addi $t0, $zero, 6 # $t0 = 6
sw $t0, 4($s1) # B[1] = 1
# calcolo A[c-1] = c*(B[A[c]] + c)/A[2*c-1]
lw $t0, 0($s2) # $t0 = c
addi $t1, $zero, 4 # $t1 = 4
mult $t0, $t1 # lo = $t0 * $t1 = c * 4
mflo $t2 # $t2=c*4, offset di A[c]
add $t2, $s0, $t2 # $t2 = $s0+$t2, indirizzo di A[c]
lw $t3, 0($t2) # $t3 = A[c]
mult $t1, $t3 # lo = 4 * A[c]
mflo $t3 # $t3=4 * A[c], offset di B[A[c]]
add $t3, $s1, $t3 # $t3 = $s1+$t3, indirizzo di B[A[c]]
lw $t2, 0($t3) # $t2 = B[A[c]]
add $t2, $t0, $t2 # $t2 = B[A[c]] + c
mult $t0, $t2 # lo = c * (B[A[c]] + c)
mflo $t2 # $t2 = c * (B[A[c]] + c)
addi $t3, $zero, 2 # $t3 = 2
mult $t0, $t3 # lo = 2 * c
mflo $t3 # $t3 = 2 * c
addi $t3, $t3, -1 # $t3 = 2 * c - 1
mult $t1, $t3 # lo = 4 * (2 * c - 1)
mflo $t3 # $t3 = 4 * (2 * c - 1), offset di A[2*c-1]
add $t3, $s0, $t3 # $t3 = $s1+$t3, indirizzo di A[2*c-1]
addi $t0, $t0, -1 # $t0 = c -1
mult $t0, $t1 # lo = (c-1) * 4
mflo $t0 # $t0=(c-1) * 4, offset di A[c-1]
add $t1, $s0, $t0 # $t1 = $s0+$t0, indirizzo di A[c-1]
lw $t0, 0($t3) # $t0 = A[2*c-1]
div $t2, $t0 # lo = c*(B[A[c]] + c)/A[2*c-1]
mflo $t2 # $t2 = c*(B[A[c]] + c)/A[2*c-1]
sw $t2,0($t1) # A[c-1] = c*(B[A[c]] + c)/A[2*c-1]
| 30.145161 | 57 | 0.460139 |
8f0dc7242993790c7f8d8af7aed771101c1680c7 | 1,280 | java | Java | src/main/java/ulcambridge/foundations/viewer/crowdsourcing/model/Term.java | cambridge-collection/cudl-tagging-service | 4123d906e117e596839930ebe44157cba1407124 | [
"BSD-2-Clause"
] | null | null | null | src/main/java/ulcambridge/foundations/viewer/crowdsourcing/model/Term.java | cambridge-collection/cudl-tagging-service | 4123d906e117e596839930ebe44157cba1407124 | [
"BSD-2-Clause"
] | null | null | null | src/main/java/ulcambridge/foundations/viewer/crowdsourcing/model/Term.java | cambridge-collection/cudl-tagging-service | 4123d906e117e596839930ebe44157cba1407124 | [
"BSD-2-Clause"
] | null | null | null | package ulcambridge.foundations.viewer.crowdsourcing.model;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
/**
*
* @author Lei
*
*/
public class Term {
private final String name;
private final int raw;
private final double value;
@JsonCreator
public Term(String name, int raw, double value) {
this.name = name;
this.raw = raw;
this.value = value;
}
@JsonProperty("name")
public String getName() {
return name;
}
@JsonProperty("raw")
public int getRaw() {
return raw;
}
@JsonProperty("value")
public double getValue() {
return value;
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (!(obj instanceof Term))
return false;
if (obj == this)
return true;
Term rhs = (Term) obj;
return new EqualsBuilder().append(name, rhs.getName()).isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(99, 97).append(name).toHashCode();
}
}
| 20.983607 | 74 | 0.614844 |
9c077835c780f9c763288c74a727ab9e30df4a36 | 11,634 | cc | C++ | compiler/emit/call.cc | asoffer/icarus | 5c9af79d1a39e14d95da1adacbdd7392908eedc5 | [
"Apache-2.0"
] | null | null | null | compiler/emit/call.cc | asoffer/icarus | 5c9af79d1a39e14d95da1adacbdd7392908eedc5 | [
"Apache-2.0"
] | null | null | null | compiler/emit/call.cc | asoffer/icarus | 5c9af79d1a39e14d95da1adacbdd7392908eedc5 | [
"Apache-2.0"
] | null | null | null | #include "ast/ast.h"
#include "base/meta.h"
#include "compiler/builtin_module.h"
#include "compiler/compiler.h"
#include "compiler/emit/common.h"
#include "compiler/emit/compiler_common.h"
#include "compiler/emit/copy_move_assignment.h"
#include "compiler/emit/initialize.h"
#include "compiler/instantiate.h"
#include "compiler/module.h"
#include "compiler/resources.h"
#include "ir/instruction/instructions.h"
#include "type/generic.h"
namespace compiler {
namespace {
// TODO: Replace with `builtin` module.
bool EmitBuiltinCall(Compiler &c, std::string_view f,
absl::Span<ast::Call::Argument const> args,
ir::PartialResultBuffer &out) {
if (f == "slice") {
type::Slice const *slice_type =
type::Slc(c.context()
.qual_types(&args[0].expr())[0]
.type()
.as<type::BufferPointer>()
.pointee());
auto slice = c.state().TmpAlloca(slice_type);
// TODO: These have the wrong types, or at least these types are not the
// types of the values held, but that's what's expected by EmitMoveAssign.
type::Typed<ir::RegOr<ir::addr_t>> data(
c.current_block()->Append(type::SliceDataInstruction{
.slice = slice,
.result = c.current().subroutine->Reserve(),
}),
type::BufPtr(slice_type->data_type()));
type::Typed<ir::RegOr<ir::addr_t>> length(
c.current_block()->Append(type::SliceLengthInstruction{
.slice = slice,
.result = c.current().subroutine->Reserve(),
}),
type::Slice::LengthType());
ir::PartialResultBuffer buffer;
c.EmitToBuffer(&args[0].expr(), buffer);
MoveAssignmentEmitter emitter(c);
emitter(data,
type::Typed(buffer[0],
type::Type(type::BufPtr(slice_type->data_type()))));
buffer.clear();
c.EmitToBuffer(&args[1].expr(), buffer);
emitter(length, type::Typed(buffer[0], type::Slice::LengthType()));
out.append(slice);
return true;
}
if (f == "foreign") {
// `EvaluateOrDiagnoseAs` cannot yet support slices because we it
// internally converts compile-time types to a type::Type and it doesn't
// know which instance of type::Slice it should use.
auto name_buffer =
c.EvaluateToBufferOrDiagnose(type::Typed<ast::Expression const *>(
&args[0].expr(), type::Slc(type::Char)));
if (not name_buffer) { return true; }
auto maybe_foreign_type =
c.EvaluateOrDiagnoseAs<type::Type>(&args[1].expr());
if (not maybe_foreign_type) { return true; }
auto slice = name_buffer->get<ir::Slice>(0);
std::string name(slice);
if (maybe_foreign_type->is<type::Pointer>()) {
auto result = c.current_block()->Append(ir::LoadDataSymbolInstruction{
.name = std::move(name),
.result = c.current().subroutine->Reserve()});
out.append(result);
} else if (auto const *f = maybe_foreign_type->if_as<type::Function>()) {
out.append(c.shared_context().ForeignFunction(std::move(name), f));
} else {
UNREACHABLE();
}
return true;
}
if (f == "debug_ir") {
c.current_block()->Append(
ir::DebugIrInstruction{.fn = c.current().subroutine});
return true;
}
if (f == "compilation_error") { UNREACHABLE(); }
return false;
}
} // namespace
void Compiler::EmitToBuffer(ast::Call const *node,
ir::PartialResultBuffer &out) {
if (auto const *a = node->callee()->if_as<ast::Access>()) {
if (context().qual_types(a->operand())[0] ==
type::QualType::Constant(type::Module)) {
if (*EvaluateOrDiagnoseAs<ir::ModuleId>(a->operand()) ==
ir::ModuleId::Builtin()) {
if (EmitBuiltinCall(*this, a->member_name(), node->arguments(), out)) {
return;
}
}
}
}
auto qts = context().qual_types(node);
// Constant arguments need to be computed entirely before being used to
// instantiate a generic function.
ir::CompleteResultBuffer buffer;
auto constant_arguments =
EmitConstantArguments(*this, node->arguments(), buffer);
// TODO: Support mixed overloads
if (auto const *gs_type = context()
.qual_types(node->callee())[0]
.type()
.if_as<type::Generic<type::Struct>>()) {
out.append(
type::Type(gs_type->Instantiate(work_resources(), constant_arguments)));
return;
}
// TODO: It'd be nice to not stack-allocate register-sized values.
std::vector<type::Typed<ir::RegOr<ir::addr_t>>> outs;
outs.reserve(qts.size());
for (type::QualType const &qt : qts) {
outs.emplace_back(state().TmpAlloca(qt.type()), qt.type());
}
EmitCall(*this, context().CallMetadata(node).resolved(), constant_arguments,
node->arguments(), outs);
// TODO: Why is this conditional on the size of qts?
if (qts.size() == 1) {
out.append(PtrFix(current(), outs[0]->reg(), qts[0].type()));
}
}
void Compiler::EmitMoveInit(
ast::Call const *node,
absl::Span<type::Typed<ir::RegOr<ir::addr_t>> const> to) {
if (auto const *a = node->callee()->if_as<ast::Access>()) {
if (context().qual_types(a->operand())[0] ==
type::QualType::Constant(type::Module)) {
if (*EvaluateOrDiagnoseAs<ir::ModuleId>(a->operand()) ==
ir::ModuleId::Builtin()) {
ir::PartialResultBuffer out;
if (EmitBuiltinCall(*this, a->member_name(), node->arguments(), out)) {
if (out.empty()) { return; }
MoveInitializationEmitter emitter(*this);
emitter(to[0], out);
return;
}
}
}
}
// Constant arguments need to be computed entirely before being used to
// instantiate a generic function.
ir::CompleteResultBuffer buffer;
auto constant_arguments =
EmitConstantArguments(*this, node->arguments(), buffer);
// TODO: Support mixed overloads
if (auto const *gs_type = context()
.qual_types(node->callee())[0]
.type()
.if_as<type::Generic<type::Struct>>()) {
ir::RegOr<type::Type> t(
type::Type(gs_type->Instantiate(work_resources(), constant_arguments)));
ir::PartialResultBuffer t_buf;
t_buf.append(t);
CopyAssignmentEmitter emitter(*this);
emitter(to[0], type::Typed(t_buf[0], type::Type_));
return;
}
EmitCall(*this, context().CallMetadata(node).resolved(), constant_arguments,
node->arguments(), to);
}
void Compiler::EmitCopyInit(
ast::Call const *node,
absl::Span<type::Typed<ir::RegOr<ir::addr_t>> const> to) {
if (auto const *a = node->callee()->if_as<ast::Access>()) {
if (context().qual_types(a->operand())[0] ==
type::QualType::Constant(type::Module)) {
if (*EvaluateOrDiagnoseAs<ir::ModuleId>(a->operand()) ==
ir::ModuleId::Builtin()) {
ir::PartialResultBuffer out;
if (EmitBuiltinCall(*this, a->member_name(), node->arguments(), out)) {
if (out.empty()) { return; }
CopyInitializationEmitter emitter(*this);
emitter(to[0], out);
return;
}
}
}
}
// Constant arguments need to be computed entirely before being used to
// instantiate a generic function.
ir::CompleteResultBuffer buffer;
auto constant_arguments =
EmitConstantArguments(*this, node->arguments(), buffer);
// TODO: Support mixed overloads
if (auto const *gs_type = context()
.qual_types(node->callee())[0]
.type()
.if_as<type::Generic<type::Struct>>()) {
ir::RegOr<type::Type> t(
type::Type(gs_type->Instantiate(work_resources(), constant_arguments)));
ir::PartialResultBuffer t_buf;
t_buf.append(t);
CopyAssignmentEmitter emitter(*this);
emitter(to[0], type::Typed(t_buf[0], type::Type_));
return;
}
EmitCall(*this, context().CallMetadata(node).resolved(), constant_arguments,
node->arguments(), to);
}
void Compiler::EmitMoveAssign(
ast::Call const *node,
absl::Span<type::Typed<ir::RegOr<ir::addr_t>> const> to) {
if (auto const *a = node->callee()->if_as<ast::Access>()) {
if (context().qual_types(a->operand())[0] ==
type::QualType::Constant(type::Module)) {
if (*EvaluateOrDiagnoseAs<ir::ModuleId>(a->operand()) ==
ir::ModuleId::Builtin()) {
ir::PartialResultBuffer out;
if (EmitBuiltinCall(*this, a->member_name(), node->arguments(), out)) {
if (out.empty()) { return; }
MoveAssignmentEmitter emitter(*this);
emitter(to[0],
type::Typed(out[0], context().qual_types(node)[0].type()));
return;
}
}
}
}
// Constant arguments need to be computed entirely before being used to
// instantiate a generic function.
ir::CompleteResultBuffer buffer;
auto constant_arguments =
EmitConstantArguments(*this, node->arguments(), buffer);
// TODO: Support mixed overloads
if (auto const *gs_type = context()
.qual_types(node->callee())[0]
.type()
.if_as<type::Generic<type::Struct>>()) {
ir::RegOr<type::Type> t(
type::Type(gs_type->Instantiate(work_resources(), constant_arguments)));
ir::PartialResultBuffer t_buf;
t_buf.append(t);
MoveAssignmentEmitter emitter(*this);
emitter(to[0], type::Typed(t_buf[0], type::Type_));
}
EmitCall(*this, context().CallMetadata(node).resolved(), constant_arguments,
node->arguments(), to);
}
void Compiler::EmitCopyAssign(
ast::Call const *node,
absl::Span<type::Typed<ir::RegOr<ir::addr_t>> const> to) {
if (auto const *a = node->callee()->if_as<ast::Access>()) {
if (context().qual_types(a->operand())[0] ==
type::QualType::Constant(type::Module)) {
if (*EvaluateOrDiagnoseAs<ir::ModuleId>(a->operand()) ==
ir::ModuleId::Builtin()) {
ir::PartialResultBuffer out;
if (EmitBuiltinCall(*this, a->member_name(), node->arguments(), out)) {
if (out.empty()) { return; }
CopyAssignmentEmitter emitter(*this);
emitter(to[0],
type::Typed(out[0], context().qual_types(node)[0].type()));
return;
}
}
}
}
// Constant arguments need to be computed entirely before being used to
// instantiate a generic function.
ir::CompleteResultBuffer buffer;
auto constant_arguments =
EmitConstantArguments(*this, node->arguments(), buffer);
// TODO: Support mixed overloads
if (auto const *gs_type = context()
.qual_types(node->callee())[0]
.type()
.if_as<type::Generic<type::Struct>>()) {
ir::RegOr<type::Type> t(
type::Type(gs_type->Instantiate(work_resources(), constant_arguments)));
ir::PartialResultBuffer t_buf;
t_buf.append(t);
CopyAssignmentEmitter emitter(*this);
emitter(to[0], type::Typed(t_buf[0], type::Type_));
}
EmitCall(*this, context().CallMetadata(node).resolved(), constant_arguments,
node->arguments(), to);
}
bool Compiler::PatternMatch(
ast::Call const *node, PatternMatchingContext &pmc,
absl::flat_hash_map<ast::Declaration::Id const *, ir::CompleteResultBuffer>
&bindings) {
NOT_YET();
}
} // namespace compiler
| 35.254545 | 80 | 0.598075 |
3959ccaac5e04e9253a4b69f94e161a38ef43640 | 4,637 | swift | Swift | Tests/PusherChannelTests.swift | JonathanDowning/pusher-websocket-swift | 063497e066be81a1a2e8cfd58e7962524a2e7e66 | [
"MIT"
] | null | null | null | Tests/PusherChannelTests.swift | JonathanDowning/pusher-websocket-swift | 063497e066be81a1a2e8cfd58e7962524a2e7e66 | [
"MIT"
] | null | null | null | Tests/PusherChannelTests.swift | JonathanDowning/pusher-websocket-swift | 063497e066be81a1a2e8cfd58e7962524a2e7e66 | [
"MIT"
] | null | null | null | import XCTest
#if WITH_ENCRYPTION
@testable import PusherSwiftWithEncryption
#else
@testable import PusherSwift
#endif
class PusherChannelTests: XCTestCase {
var chan: PusherChannel!
override func setUp() {
super.setUp()
chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
}
func testANewChannelGetsCreatedWithTheCorrectNameAndNoCallbacks() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertEqual(chan.name, "test-channel", "the channel name should be test-channel")
XCTAssertEqual(chan.eventHandlers.count, 0, "the channel should have no callbacks")
}
func testBindingACallbackToAChannelForAGivenEventName() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertEqual(chan.eventHandlers.count, 0, "the channel should have no callbacks")
let _ = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 1, "the channel should have one callback")
}
func testUnbindingADataCallbackForAGivenEventNameAndCallbackId() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertNil(chan.eventHandlers["test-event"], "the channel should have no callbacks for event \"test-event\"")
let idOne = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
let _ = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 2, "the channel should have two callbacks for event \"test-event\"")
chan.unbind(eventName: "test-event", callbackId: idOne)
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 1, "the channel should have one callback for event \"test-event\"")
}
func testUnbindingAnEventCallbackForAGivenEventNameAndCallbackId() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertNil(chan.eventHandlers["test-event"], "the channel should have no callbacks for event \"test-event\"")
let idOne = chan.bind(eventName: "test-event", eventCallback: { (event: PusherEvent) -> Void in })
let _ = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 2, "the channel should have two callbacks for event \"test-event\"")
chan.unbind(eventName: "test-event", callbackId: idOne)
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 1, "the channel should have one callback for event \"test-event\"")
}
func testUnbindingAllCallbacksForAGivenEventName() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertNil(chan.eventHandlers["test-event"], "the channel should have no callbacks for event \"test-event\"")
let _ = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
let _ = chan.bind(eventName: "test-event", eventCallback: { (event: PusherEvent) -> Void in })
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 2, "the channel should have two callbacks for event \"test-event\"")
chan.unbindAll(forEventName: "test-event")
XCTAssertEqual(chan.eventHandlers["test-event"]?.count, 0, "the channel should have no callbacks for event \"test-event\"")
}
func testUnbindingAllCallbacksForAGivenChannel() {
let chan = PusherChannel(name: "test-channel", connection: MockPusherConnection())
XCTAssertEqual(chan.eventHandlers.count, 0, "the channel should have no callbacks")
let _ = chan.bind(eventName: "test-event", callback: { (data: Any?) -> Void in })
let _ = chan.bind(eventName: "test-event", eventCallback: { (event: PusherEvent) -> Void in })
let _ = chan.bind(eventName: "test-event-3", callback: { (data: Any?) -> Void in })
XCTAssertEqual(chan.eventHandlers.count, 2, "the channel should have two event names with callbacks")
chan.unbindAll()
XCTAssertEqual(chan.eventHandlers.count, 0, "the channel should have no callbacks")
}
func testCanSetDecryptionKey() {
let decryptionKey = "EOWC/ked3NtBDvEs9gFwk7x4oZEbH9I0Lz2qkopBxxs="
let chan = PusherChannel(name: "private-encrypted-test-channel", connection: MockPusherConnection())
chan.decryptionKey = decryptionKey
XCTAssertEqual(chan.decryptionKey, decryptionKey)
}
}
| 58.696203 | 132 | 0.69463 |
7ceb727c52c1a01b90228959033b190ecfba0701 | 12,486 | rs | Rust | projects/SeedGenCli/src/util/settings.rs | Mawex/OriWotwRandomizerClient | 5a0b295e3e80b7e2a7a3f23a93c265bbc155cb1b | [
"MIT"
] | null | null | null | projects/SeedGenCli/src/util/settings.rs | Mawex/OriWotwRandomizerClient | 5a0b295e3e80b7e2a7a3f23a93c265bbc155cb1b | [
"MIT"
] | null | null | null | projects/SeedGenCli/src/util/settings.rs | Mawex/OriWotwRandomizerClient | 5a0b295e3e80b7e2a7a3f23a93c265bbc155cb1b | [
"MIT"
] | null | null | null | use std::{
io,
path::PathBuf,
collections::hash_map::DefaultHasher,
hash::Hasher,
};
use rustc_hash::FxHashSet;
use serde::{Serialize, Deserialize};
use super::{
Difficulty, Glitch, GoalMode,
constants::{DEFAULT_SPAWN, SLUGSTRINGS}
};
/// Representation of settings as they are written by the java-based seed generator
#[allow(clippy::struct_excessive_bools)]
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
struct OldSeedFlags {
pub force_wisps: bool, // compability note: used for goal mode logic
pub force_trees: bool, // compability note: used for goal mode logic
pub force_quests: bool, // compability note: used for goal mode logic
pub world_tour: bool, // compability note: used for goal mode logic
pub no_hints: bool, // compability note: unused
pub no_sword: bool, // compability note: used for sword init
pub rain: bool, // compability note: used for day-night-cycle
pub no_k_s_doors: bool, // compability note: used for black market
pub random_spawn: bool, // compability note: unused
}
/// Representation of settings as they are written by the java-based seed generator
#[allow(clippy::struct_excessive_bools)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct PreRustSettings {
pub tps: bool,
pub spoilers: bool,
pub unsafe_paths: bool,
pub gorlek_paths: bool,
pub glitch_paths: bool,
pub quest_locs: bool,
pub output_folder: PathBuf,
pub flags: OldSeedFlags,
pub web_conn: bool,
pub bonus_items: bool,
pub debug_info: bool,
pub seir_launch: bool,
pub spawn_loc: String,
pub header_list: Vec<PathBuf>,
}
fn read_pre_rustgen(json: &str) -> Result<Settings, io::Error> {
let old_settings: PreRustSettings = serde_json::from_str(json)?;
let difficulty = if old_settings.unsafe_paths {
Difficulty::Unsafe
} else if old_settings.gorlek_paths {
Difficulty::Gorlek
} else {
Difficulty::Moki
};
let glitches = if old_settings.glitch_paths {
vec![
Glitch::SwordSentryJump,
Glitch::HammerSentryJump,
Glitch::ShurikenBreak,
Glitch::SentryBreak,
Glitch::HammerBreak,
Glitch::SpearBreak,
Glitch::SentryBurn,
Glitch::RemoveKillPlane,
]
} else { Vec::default() };
let mut header_list = old_settings.header_list;
if old_settings.tps { header_list.push(PathBuf::from("teleporters")); }
if !old_settings.quest_locs { header_list.push(PathBuf::from("no_quests")); }
if old_settings.bonus_items { header_list.push(PathBuf::from("bonus_items")); }
if old_settings.seir_launch { header_list.push(PathBuf::from("launch_on_seir")); }
if !old_settings.flags.no_hints { header_list.push(PathBuf::from("hints")); }
if !old_settings.flags.no_sword { header_list.push(PathBuf::from("spawn_with_sword")); }
if !old_settings.flags.rain { header_list.push(PathBuf::from("no_rain")); }
if old_settings.flags.no_k_s_doors { header_list.push(PathBuf::from("no_ks_doors")); }
let spawn_loc = if old_settings.flags.random_spawn { Spawn::Random } else { Spawn::Set(old_settings.spawn_loc) };
let mut goalmodes = FxHashSet::default();
if old_settings.flags.force_wisps { goalmodes.insert(GoalMode::Wisps); }
if old_settings.flags.force_trees { goalmodes.insert(GoalMode::Trees); }
if old_settings.flags.force_quests { goalmodes.insert(GoalMode::Quests); }
if old_settings.flags.world_tour { goalmodes.insert(GoalMode::Relics); }
Ok(Settings {
difficulty,
glitches,
goalmodes,
spoilers: old_settings.spoilers,
web_conn: old_settings.web_conn,
spawn_loc,
header_list,
..Settings::default()
})
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Pre0_13_2Settings {
pub version: Option<String>,
pub presets: Vec<PathBuf>,
pub worlds: usize,
pub players: Vec<String>,
pub pathsets: Vec<String>,
pub goalmodes: FxHashSet<GoalMode>,
pub spawn_loc: Spawn,
pub spoilers: bool,
pub web_conn: bool,
pub hard: bool,
pub header_list: Vec<PathBuf>,
pub header_args: Vec<String>,
}
fn read_pre_0_13_2(json: &str) -> Result<Settings, io::Error> {
let old_settings: Pre0_13_2Settings = serde_json::from_str(json)?;
let mut difficulty = Difficulty::Moki;
let mut glitches = Vec::new();
for pathset in old_settings.pathsets {
match &pathset[..] {
"Gorlek" => if difficulty < Difficulty::Gorlek { difficulty = Difficulty::Gorlek },
"Unsafe" => if difficulty < Difficulty::Unsafe { difficulty = Difficulty::Unsafe },
"SwordSentryJump" => glitches.push(Glitch::SwordSentryJump),
"HammerSentryJump" => glitches.push(Glitch::HammerSentryJump),
"ShurikenBreak" => glitches.push(Glitch::ShurikenBreak),
"SentryBreak" => glitches.push(Glitch::SentryBreak),
"HammerBreak" => glitches.push(Glitch::HammerBreak),
"SpearBreak" => glitches.push(Glitch::SpearBreak),
"SentryBurn" => glitches.push(Glitch::SentryBurn),
"RemoveKillPlane" => glitches.push(Glitch::RemoveKillPlane),
_ => {},
}
}
Ok(Settings {
version: old_settings.version,
presets: old_settings.presets,
worlds: old_settings.worlds,
players: old_settings.players,
difficulty,
glitches,
goalmodes: old_settings.goalmodes,
spawn_loc: old_settings.spawn_loc,
spoilers: old_settings.spoilers,
web_conn: old_settings.web_conn,
hard: old_settings.hard,
header_list: old_settings.header_list,
header_args: old_settings.header_args,
})
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
pub enum Spawn {
Set(String),
Random,
FullyRandom,
}
impl Default for Spawn {
fn default() -> Spawn {
Spawn::Set(DEFAULT_SPAWN.to_string())
}
}
// TODO output folder?
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Settings {
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
pub presets: Vec<PathBuf>,
pub worlds: usize,
pub players: Vec<String>,
pub difficulty: Difficulty,
pub glitches: Vec<Glitch>,
pub goalmodes: FxHashSet<GoalMode>,
pub spawn_loc: Spawn,
pub spoilers: bool,
pub web_conn: bool,
pub hard: bool,
pub header_list: Vec<PathBuf>,
pub header_args: Vec<String>,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
version: None,
presets: Vec::default(),
worlds: 1,
players: Vec::default(),
difficulty: Difficulty::default(),
glitches: Vec::default(),
goalmodes: FxHashSet::default(),
spawn_loc: Spawn::default(),
spoilers: true,
web_conn: false,
hard: false,
header_list: Vec::default(),
header_args: Vec::default(),
}
}
}
impl Settings {
pub fn compability_parse(json: &str) -> Result<Settings, String> {
serde_json::from_str(&json).or_else(|err| { // current
read_pre_0_13_2(&json).or_else(|_| { // < 0.13.2
read_pre_rustgen(&json).map_err(|_| format!("Failed to read settings: {}", err)) // javagen
})
})
}
pub fn from_seed(seed: &str) -> Result<Settings, String> {
let mut settings = Settings::default();
for line in seed.lines() {
if let Some(config) = line.strip_prefix("// Config: ") {
settings = Settings::compability_parse(&config)?;
}
}
Ok(settings)
}
pub fn from_preset(mut preset: PathBuf) -> Result<Settings, String> {
preset.set_extension("json");
let content = super::read_file(&preset, "presets")?;
Settings::compability_parse(&content)
}
pub fn write(&self) -> Result<String, String> {
serde_json::to_string(&self).map_err(|err| format!("Invalid Settings: {}", err))
}
fn merge(&mut self, other: Settings) {
let Settings {
version: other_version,
presets: mut other_presets,
worlds: other_worlds,
players: other_players,
difficulty: other_difficulty,
glitches: other_glitches,
goalmodes: other_goalmodes,
spawn_loc: other_spawn_loc,
spoilers: other_spoilers,
web_conn: other_web_conn,
hard: other_hard,
header_list: mut other_header_list,
header_args: mut other_header_args,
} = other;
if other_version.is_some() {
self.version = other_version;
}
self.presets.append(&mut other_presets);
if self.worlds < other_worlds {
self.worlds = other_worlds;
self.players = other_players;
}
if self.difficulty < other_difficulty {
self.difficulty = other_difficulty;
}
for glitch in other_glitches {
self.glitches.push(glitch);
}
self.goalmodes.extend(other_goalmodes);
if other_spawn_loc != Spawn::default() {
self.spawn_loc = other_spawn_loc;
}
self.spoilers = self.spoilers && other_spoilers;
self.web_conn = self.web_conn || other_web_conn;
self.hard = self.hard || other_hard;
self.header_list.append(&mut other_header_list);
self.header_args.append(&mut other_header_args);
}
pub fn apply_presets(mut self) -> Result<Settings, String> {
let mut merged_settings = Settings::default();
for preset in self.presets {
let preset = Settings::from_preset(preset)?;
let preset = preset.apply_presets()?;
merged_settings.merge(preset);
}
self.presets = Vec::new();
merged_settings.merge(self);
Ok(merged_settings)
}
pub fn slugify(&self, seed: &str) -> String {
let string = serde_json::to_string(&self).unwrap();
let mut hasher = DefaultHasher::new();
hasher.write(string.as_bytes());
hasher.write(seed.as_bytes());
let hash = hasher.finish();
let mut slug = String::new();
for (index, slug_strings) in SLUGSTRINGS.iter().enumerate() {
let length = slug_strings.len();
let mut shift = 1;
loop {
if length < 2_usize.pow(shift) {
shift -= 1;
break;
}
shift += 1;
};
let word_index = (hash >> (index as u32 * shift)) & (2_u32.pow(shift) - 1) as u64;
slug += slug_strings[word_index as usize];
}
slug
}
}
pub fn read_spawn(seed: &str) -> Result<String, String> {
for line in seed.lines() {
if let Some(spawn) = line.strip_prefix("Spawn:") {
return Ok(spawn[spawn.find("//").ok_or_else(|| String::from("Failed to read Spawn location"))? + 2..].trim().to_string());
}
}
Ok(DEFAULT_SPAWN.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
use rand::{
Rng,
distributions::{Distribution, Alphanumeric},
};
#[test]
fn slugification() {
let mut rng = rand::thread_rng();
let mut slugs = FxHashSet::default();
for _ in 0..10000 {
let mut settings = Settings::default();
let goalmodes = vec![GoalMode::Wisps,GoalMode::Trees,GoalMode::Quests,GoalMode::Relics];
for goalmode in goalmodes {
if rng.gen_bool(0.25) {
settings.goalmodes.insert(goalmode);
}
}
let mut seed = String::new();
for _ in 0.. rng.gen_range(8..20) {
seed.push(char::from(Alphanumeric.sample(&mut rng)));
}
let slug = settings.slugify(&seed);
if slugs.contains(&slug) {
panic!("After {} settings, two had the same slug: {}", slugs.len(), slug);
} else {
slugs.insert(slug);
}
}
}
}
| 33.745946 | 134 | 0.604998 |
96f5f929c0b84307d8dddca7c8f5cd637cde07ed | 979 | hpp | C++ | third_party/boost/simd/function/shl.hpp | SylvainCorlay/pythran | 908ec070d837baf77d828d01c3e35e2f4bfa2bfa | [
"BSD-3-Clause"
] | 6 | 2018-02-25T22:23:33.000Z | 2021-01-15T15:13:12.000Z | third_party/boost/simd/function/shl.hpp | SylvainCorlay/pythran | 908ec070d837baf77d828d01c3e35e2f4bfa2bfa | [
"BSD-3-Clause"
] | null | null | null | third_party/boost/simd/function/shl.hpp | SylvainCorlay/pythran | 908ec070d837baf77d828d01c3e35e2f4bfa2bfa | [
"BSD-3-Clause"
] | 7 | 2017-12-12T12:36:31.000Z | 2020-02-10T14:27:07.000Z | //==================================================================================================
/*!
@file
@copyright 2016 NumScale SAS
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
*/
//==================================================================================================
#ifndef BOOST_SIMD_FUNCTION_SHL_HPP_INCLUDED
#define BOOST_SIMD_FUNCTION_SHL_HPP_INCLUDED
#if defined(DOXYGEN_ONLY)
namespace boost { namespace simd
{
/*!
@ingroup group-operator
This function object returns the first operand shifted left by the second one
Infix notation can be used with operator '<<'
@par Header <boost/simd/function/shl.hpp>
This is an alias of @ref shift_left
**/
Value shl(Value const& x, IntegerValue const& n);
} }
#endif
#include <boost/simd/function/scalar/shift_left.hpp>
#include <boost/simd/function/simd/shl.hpp>
#endif
| 26.459459 | 100 | 0.585291 |
536b640a08e7e34abbe5f7de75a1dc7636e23db0 | 967 | kt | Kotlin | generators/app/templates/app/src/main/java/com/wiredcraft/androidtemplate/factory/CoreViewModelFactory.kt | seazon/generator-android-boilerplate | 465205770e9c1221c17a7aff110324cf0de1f3d0 | [
"Apache-2.0"
] | null | null | null | generators/app/templates/app/src/main/java/com/wiredcraft/androidtemplate/factory/CoreViewModelFactory.kt | seazon/generator-android-boilerplate | 465205770e9c1221c17a7aff110324cf0de1f3d0 | [
"Apache-2.0"
] | null | null | null | generators/app/templates/app/src/main/java/com/wiredcraft/androidtemplate/factory/CoreViewModelFactory.kt | seazon/generator-android-boilerplate | 465205770e9c1221c17a7aff110324cf0de1f3d0 | [
"Apache-2.0"
] | null | null | null | package <%= appPackage %>.factory
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import javax.inject.Inject
import javax.inject.Provider
class CoreViewModelFactory @Inject constructor(
private val creators: Map<Class<out ViewModel>,
@JvmSuppressWildcards Provider<ViewModel>>
) : ViewModelProvider.Factory {
override fun <T : ViewModel?> create(modelClass: Class<T>): T {
var creator = creators[modelClass]
if (creator == null) {
for (entry in creators) {
if (modelClass.isAssignableFrom(entry.key)) {
creator = entry.value
break
}
}
}
if (creator == null) throw IllegalArgumentException("Unknown model class: ${modelClass.canonicalName}")
try {
return creator.get() as T
} catch (e: Exception) {
throw RuntimeException(e)
}
}
} | 29.30303 | 111 | 0.607032 |
28edbd2b037fc7c613eb2e7035def72ff55ec705 | 9,311 | cpp | C++ | ZeroEngine/Core/ModuleVehicle.cpp | Germanins6/Zero-Engine | db0dacdf2951700c94b41d32db3bac99b04a110a | [
"MIT"
] | null | null | null | ZeroEngine/Core/ModuleVehicle.cpp | Germanins6/Zero-Engine | db0dacdf2951700c94b41d32db3bac99b04a110a | [
"MIT"
] | 7 | 2020-10-05T21:56:42.000Z | 2020-12-30T18:22:07.000Z | ZeroEngine/Core/ModuleVehicle.cpp | Germanins6/Zero-Engine | db0dacdf2951700c94b41d32db3bac99b04a110a | [
"MIT"
] | 1 | 2021-01-10T20:46:28.000Z | 2021-01-10T20:46:28.000Z | #include "Application.h"
#include "ModuleVehicle.h"
using namespace physx;
ModuleVehicle::ModuleVehicle(Application* app, bool start_enabled) : Module(app, start_enabled) {
gVehicle4W = nullptr;
}
ModuleVehicle::~ModuleVehicle() {
}
update_status ModuleVehicle::Update(float dt) {
if(gVehicle4W != nullptr){
//Cycle through the driving modes to demonstrate how to accelerate/reverse/brake/turn etc.
//incrementDrivingMode(dt);
//Update the control inputs for the vehicle.
if (gMimicKeyInputs)
PxVehicleDrive4WSmoothDigitalRawInputsAndSetAnalogInputs(gKeySmoothingData, gSteerVsForwardSpeedTable, gVehicleInputData, dt, gIsVehicleInAir, *gVehicle4W);
else
PxVehicleDrive4WSmoothAnalogRawInputsAndSetAnalogInputs(gPadSmoothingData, gSteerVsForwardSpeedTable, gVehicleInputData, dt, gIsVehicleInAir, *gVehicle4W);
//Raycasts.
PxVehicleWheels* vehicles[1] = { gVehicle4W };
PxRaycastQueryResult* raycastResults = gVehicleSceneQueryData->getRaycastQueryResultBuffer(0);
const PxU32 raycastResultsSize = gVehicleSceneQueryData->getQueryResultBufferSize();
PxVehicleSuspensionRaycasts(gBatchQuery, 1, vehicles, raycastResultsSize, raycastResults);
//Vehicle update.
const PxVec3 grav = App->physX->mScene->getGravity();
PxWheelQueryResult wheelQueryResults[PX_MAX_NB_WHEELS];
PxVehicleWheelQueryResult vehicleQueryResults[1] = { {wheelQueryResults, gVehicle4W->mWheelsSimData.getNbWheels()} };
PxVehicleUpdates(dt, grav, *gFrictionPairs, 1, vehicles, vehicleQueryResults);
//Work out if the vehicle is in the air.
gIsVehicleInAir = gVehicle4W->getRigidDynamicActor()->isSleeping() ? false : PxVehicleIsInAir(vehicleQueryResults[0]);
}
return UPDATE_CONTINUE;
}
bool ModuleVehicle::Init() {
PxInitVehicleSDK(*App->physX->mPhysics);
PxVehicleSetBasisVectors(PxVec3(0, 1, 0), PxVec3(0, 0, 1));
PxVehicleSetUpdateMode(PxVehicleUpdateMode::eVELOCITY_CHANGE);
//Create the batched scene queries for the suspension raycasts.
gVehicleSceneQueryData = snippetvehicle::VehicleSceneQueryData::allocate(1, PX_MAX_NB_WHEELS, 1, 1, WheelSceneQueryPreFilterBlocking, NULL, App->physX->mAllocator);
gBatchQuery = snippetvehicle::VehicleSceneQueryData::setUpBatchedSceneQuery(0, *gVehicleSceneQueryData, App->physX->mScene);
//Create the friction table for each combination of tire and surface type.
gFrictionPairs = snippetvehicle::createFrictionPairs(App->physX->mMaterial);
return true;
}
void ModuleVehicle::CreateVehicle(PxF32 mass, PxVec3 dimensions, PxF32 wmass, PxF32 wradius, PxF32 wwidth) {
//Create a plane to drive on.
PxFilterData groundPlaneSimFilterData(COLLISION_FLAG_GROUND, COLLISION_FLAG_GROUND_AGAINST, 0, 0);
gGroundPlane = snippetvehicle::createDrivablePlane(groundPlaneSimFilterData, App->physX->mMaterial, App->physX->mPhysics);
App->physX->mScene->addActor(*gGroundPlane);
//Create a vehicle that will drive on the plane.
snippetvehicle::VehicleDesc vehicleDesc = initVehicleDesc(mass, dimensions, wmass, wradius, wwidth);
gVehicle4W = snippetvehicle::createVehicle4W(vehicleDesc, App->physX->mPhysics, App->physX->mCooking);
PxTransform startTransform(PxVec3(0, (vehicleDesc.chassisDims.y * 0.5f + vehicleDesc.wheelRadius + 1.0f), 0), PxQuat(PxIdentity));
gVehicle4W->getRigidDynamicActor()->setGlobalPose(startTransform);
App->physX->mScene->addActor(*gVehicle4W->getRigidDynamicActor());
//Set the vehicle to rest in first gear.
//Set the vehicle to use auto-gears.
gVehicle4W->setToRestState();
gVehicle4W->mDriveDynData.forceGearChange(PxVehicleGearsData::eFIRST);
gVehicle4W->mDriveDynData.setUseAutoGears(true);
gVehicleModeTimer = 0.0f;
gVehicleOrderProgress = 0;
}
snippetvehicle::VehicleDesc ModuleVehicle::initVehicleDesc(PxF32 mass, PxVec3 dimensions, PxF32 wmass, PxF32 wradius, PxF32 wwidth)
{
//Set up the chassis mass, dimensions, moment of inertia, and center of mass offset.
//The moment of inertia is just the moment of inertia of a cuboid but modified for easier steering.
//Center of mass offset is 0.65m above the base of the chassis and 0.25m towards the front.
const PxF32 chassisMass = mass;
const PxVec3 chassisDims(dimensions);
const PxVec3 chassisMOI
((chassisDims.y * chassisDims.y + chassisDims.z * chassisDims.z) * chassisMass / 12.0f,
(chassisDims.x * chassisDims.x + chassisDims.z * chassisDims.z) * 0.8f * chassisMass / 12.0f,
(chassisDims.x * chassisDims.x + chassisDims.y * chassisDims.y) * chassisMass / 12.0f);
const PxVec3 chassisCMOffset(0.0f, -chassisDims.y * 0.5f + 0.65f, 0.25f);
//Set up the wheel mass, radius, width, moment of inertia, and number of wheels.
//Moment of inertia is just the moment of inertia of a cylinder.
const PxF32 wheelMass = wmass;
const PxF32 wheelRadius = wradius;
const PxF32 wheelWidth = wwidth;
const PxF32 wheelMOI = 0.5f * wheelMass * wheelRadius * wheelRadius;
const PxU32 nbWheels = 4;
snippetvehicle::VehicleDesc vehicleDesc;
vehicleDesc.chassisMass = chassisMass;
vehicleDesc.chassisDims = chassisDims;
vehicleDesc.chassisMOI = chassisMOI;
vehicleDesc.chassisCMOffset = chassisCMOffset;
vehicleDesc.chassisMaterial = App->physX->mMaterial;
vehicleDesc.chassisSimFilterData = PxFilterData(COLLISION_FLAG_CHASSIS, COLLISION_FLAG_CHASSIS_AGAINST, 0, 0);
vehicleDesc.wheelMass = wheelMass;
vehicleDesc.wheelRadius = wheelRadius;
vehicleDesc.wheelWidth = wheelWidth;
vehicleDesc.wheelMOI = wheelMOI;
vehicleDesc.numWheels = nbWheels;
vehicleDesc.wheelMaterial = App->physX->mMaterial;
vehicleDesc.chassisSimFilterData = PxFilterData(COLLISION_FLAG_WHEEL, COLLISION_FLAG_WHEEL_AGAINST, 0, 0);
return vehicleDesc;
}
void ModuleVehicle::incrementDrivingMode(const PxF32 timestep, DriveMode type)
{
//LOG("%f %f %f", gVehicle4W->getRigidDynamicActor()->getLinearVelocity().x, gVehicle4W->getRigidDynamicActor()->getLinearVelocity().y, gVehicle4W->getRigidDynamicActor()->getLinearVelocity().z);
gVehicleModeTimer += timestep;
if (gVehicleModeTimer > gVehicleModeLifetime)
{
//If we move Forwards we change Gear to First
if (DriveMode::eDRIVE_MODE_ACCEL_FORWARDS == type)
{
gVehicle4W->mDriveDynData.forceGearChange(PxVehicleGearsData::eFIRST);
}
//If we move Backwars we change Gear to Reverse
if (DriveMode::eDRIVE_MODE_ACCEL_REVERSE == type)
{
gVehicle4W->mDriveDynData.forceGearChange(PxVehicleGearsData::eREVERSE);
}
//Start driving in the selected mode.
DriveMode eDriveMode = type;
switch (eDriveMode)
{
case DriveMode::eDRIVE_MODE_ACCEL_FORWARDS:
startAccelerateForwardsMode();
break;
case DriveMode::eDRIVE_MODE_ACCEL_REVERSE:
startAccelerateReverseMode();
break;
case DriveMode::eDRIVE_MODE_HARD_TURN_LEFT:
startTurnHardLeftMode();
break;
case DriveMode::eDRIVE_MODE_HANDBRAKE_TURN_LEFT:
startHandbrakeTurnLeftMode();
break;
case DriveMode::eDRIVE_MODE_HARD_TURN_RIGHT:
startTurnHardRightMode();
break;
case DriveMode::eDRIVE_MODE_HANDBRAKE_TURN_RIGHT:
startHandbrakeTurnRightMode();
break;
case DriveMode::eDRIVE_MODE_BRAKE:
startBrakeMode();
break;
case DriveMode::eDRIVE_MODE_NONE:
break;
};
}
}
void ModuleVehicle::startAccelerateForwardsMode()
{
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalAccel(true);
}
else
{
gVehicleInputData.setAnalogAccel(1.0f);
}
}
void ModuleVehicle::startAccelerateReverseMode()
{
gVehicle4W->mDriveDynData.forceGearChange(PxVehicleGearsData::eREVERSE);
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalAccel(true);
}
else
{
gVehicleInputData.setAnalogAccel(1.0f);
}
}
void ModuleVehicle::startBrakeMode()
{
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalBrake(true);
}
else
{
gVehicleInputData.setAnalogBrake(1.0f);
}
}
void ModuleVehicle::startTurnHardLeftMode()
{
if (gMimicKeyInputs)
{
//gVehicleInputData.setDigitalAccel(true);
gVehicleInputData.setDigitalSteerLeft(true);
}
else
{
//gVehicleInputData.setAnalogAccel(true);
gVehicleInputData.setAnalogSteer(-1.0f);
}
}
void ModuleVehicle::startTurnHardRightMode()
{
if (gMimicKeyInputs)
{
//gVehicleInputData.setDigitalAccel(true);
gVehicleInputData.setDigitalSteerRight(true);
}
else
{
//gVehicleInputData.setAnalogAccel(1.0f);
gVehicleInputData.setAnalogSteer(1.0f);
}
}
void ModuleVehicle::startHandbrakeTurnLeftMode()
{
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalSteerLeft(true);
gVehicleInputData.setDigitalHandbrake(true);
}
else
{
gVehicleInputData.setAnalogSteer(-1.0f);
gVehicleInputData.setAnalogHandbrake(1.0f);
}
}
void ModuleVehicle::startHandbrakeTurnRightMode()
{
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalSteerRight(true);
gVehicleInputData.setDigitalHandbrake(true);
}
else
{
gVehicleInputData.setAnalogSteer(1.0f);
gVehicleInputData.setAnalogHandbrake(1.0f);
}
}
void ModuleVehicle::releaseAllControls()
{
if (gMimicKeyInputs)
{
gVehicleInputData.setDigitalAccel(false);
gVehicleInputData.setDigitalSteerLeft(false);
gVehicleInputData.setDigitalSteerRight(false);
gVehicleInputData.setDigitalBrake(false);
gVehicleInputData.setDigitalHandbrake(false);
}
else
{
gVehicleInputData.setAnalogAccel(0.0f);
gVehicleInputData.setAnalogSteer(0.0f);
gVehicleInputData.setAnalogBrake(0.0f);
gVehicleInputData.setAnalogHandbrake(0.0f);
}
} | 31.886986 | 196 | 0.779938 |
90efc55a06cd83eaa036a1aa3dd61c527e256eef | 731 | py | Python | improllowup/urls.py | jjolivares/improllow-up | 67d7028426090bdbc6c8fd77c02e687ee11f07aa | [
"MIT"
] | 1 | 2022-02-11T12:23:05.000Z | 2022-02-11T12:23:05.000Z | improllowup/urls.py | jjolivares/improllow-up | 67d7028426090bdbc6c8fd77c02e687ee11f07aa | [
"MIT"
] | null | null | null | improllowup/urls.py | jjolivares/improllow-up | 67d7028426090bdbc6c8fd77c02e687ee11f07aa | [
"MIT"
] | 1 | 2022-01-21T08:21:36.000Z | 2022-01-21T08:21:36.000Z | from django.conf.urls import include, url
from django.contrib import admin
from users.views import logout_user, connection
urlpatterns = [
url(
r'^customers/',
include('customers.urls', namespace="customers", app_name='customers')
),
url(
r'^users/',
include('users.urls', namespace="users", app_name='users')
),
url(
r'^projects/',
include('projects.urls', namespace="projects", app_name='projects')
),
url(
r'^tasks/',
include('tasks.urls', namespace="tasks", app_name='tasks')
),
url(r'^$', connection, name="login"),
url(r'^logout$', logout_user, name="logout"),
url(r'^admin/', include(admin.site.urls)),
]
| 26.107143 | 78 | 0.592339 |
9679457b35cbca5851f3a504bccc312950ed5b0a | 8,216 | php | PHP | resources/views/admin/pages/New folder/price.blade.php | rootmap/apparelspark.com | 87278dfdd523bd0eca59dd7b94ce43274978714c | [
"MIT"
] | null | null | null | resources/views/admin/pages/New folder/price.blade.php | rootmap/apparelspark.com | 87278dfdd523bd0eca59dd7b94ce43274978714c | [
"MIT"
] | null | null | null | resources/views/admin/pages/New folder/price.blade.php | rootmap/apparelspark.com | 87278dfdd523bd0eca59dd7b94ce43274978714c | [
"MIT"
] | null | null | null | @extends('admin.layout.master')
@section('title','Price')
@section('breadcrumb','Price')
@section('content')
<div class="app-content content">
<div class="content-wrapper">
@include('admin.include.breadcrumb')
<div class="content-body"><!-- horizontal grid start -->
@include('admin.include.msg')
<!-- Centered Input start -->
<section class="checkbox-input-grid" id="checkbox-input-grid">
<div class="row match-height">
<div class="col-xl-6 col-lg-12 offset-md-3">
<div class="card">
<div class="card-header">
@if(isset($edit))
<h4 class="card-title">Edit Price</h4>
@else
<h4 class="card-title">Add Price</h4>
@endif
<a class="heading-elements-toggle"><i class="ft-align-justify font-medium-3"></i></a>
<div class="heading-elements">
<ul class="list-inline mb-0">
<li><a data-action="collapse"><i class="ft-minus"></i></a></li>
<li><a data-action="reload"><i class="ft-rotate-cw"></i></a></li>
<li><a data-action="expand"><i class="ft-maximize"></i></a></li>
<li><a data-action="close"><i class="ft-x"></i></a></li>
</ul>
</div>
</div>
<div class="card-content collapse show">
<div class="card-body">
<form enctype="multipart/form-data" method="post"
@if(isset($edit))
action="{{url('admin-site/price/modify/'.$edit->id)}}"
@else
action="{{url('admin-site/price/save')}}"
@endif>
{{ csrf_field() }}
<div class="form-body">
<div class="row">
<label class="col-md-3 text-right">Title</label>
<div class="col-md-8">
<div class="form-group">
<input type="text" class="form-control" placeholder="Title" @if(isset($edit))
value="{{$edit->title}}" @endif name="title">
</div>
</div>
</div>
<div class="row">
<label class="col-md-3 text-right">Position</label>
<div class="col-md-8">
<div class="form-group">
<select class="form-control" name="position">
<option value="0" selected="" disabled="">Please Select Option</option>
<option @if(isset($edit)){{ $edit->position === 'Position 1' ? 'selected' : '' }} @endif value="Position 1">Position 1</option>
<option @if(isset($edit)){{ $edit->position === 'Position 2' ? 'selected' : '' }} @endif value="Position 2">Position 2</option>
<option @if(isset($edit)){{ $edit->position === 'Position 3' ? 'selected' : '' }} @endif value="Position 3">Position 3</option>
</select>
</div>
</div>
</div>
<div class="row contact-repeater">
<label class="col-md-3 text-right">Features</label>
<div class="col-md-8">
<div class="cloneRow">
<div class="input-group mb-1" data-repeater-item>
<input type="text" placeholder="Features" value="" class="form-control" id="example-tel-input" name="features[]">
<div class="input-group-append">
<span class="input-group-btn" id="button-addon2">
<button class="btn btn-danger removeFetRow" type="button" onclick="removeFetRow(this)"><i class="ft-x"></i></button>
</span>
</div>
</div>
</div>
<button type="button" id="createNewRow" class="btn btn-primary">
<i class="icon-plus4"></i> Add new Features
</button>
</div>
</div>
</div>
<div class="form-actions">
<div class="text-center">
<button type="submit" class="btn btn-primary">Submit <i class="ft-thumbs-up position-right"></i></button>
<button type="reset" class="btn btn-warning">Reset <i class="ft-refresh-cw position-right"></i></button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</section>
<!-- Centered Input end -->
<section id="base-style">
<div class="row">
<div class="col-12">
<div class="card">
<div class="card-header">
<h4 class="card-title">Price List</h4>
<a class="heading-elements-toggle"><i class="fa fa-ellipsis-v font-medium-3"></i></a>
<div class="heading-elements">
<ul class="list-inline mb-0">
<li><a data-action="collapse"><i class="ft-minus"></i></a></li>
<li><a data-action="reload"><i class="ft-rotate-cw"></i></a></li>
<li><a data-action="expand"><i class="ft-maximize"></i></a></li>
<li><a data-action="close"><i class="ft-x"></i></a></li>
</ul>
</div>
</div>
<div class="card-content collapse show">
<div class="card-body card-dashboard">
<table class="table table-striped table-bordered base-style">
<thead>
<tr>
<th>#</th>
<th>Title</th>
<th>Position</th>
<th>Action</th>
</tr>
</thead>
<tbody>
@if(isset($data))
@foreach($data as $row)
<tr>
<td>{{ $row->id }}</td>
<td>{{ $row->title }}</td>
<td>{{ $row->position }}</td>
<td>
<a href="{{url('admin-site/price/edit/'.$row->id)}}" title="Edit" class="btn btn-icon btn-outline-primary"><i class="fa fa-pencil-square-o"></i></a>
<a href="{{url('admin-site/price/delete/'.$row->id)}}" title="Delete" class="btn btn-icon btn-outline-danger"><i class="ft-trash-2"></i></a>
</td>
</tr>
@endforeach
@endif
</tbody>
<tfoot>
<tr>
<th>#</th>
<th>Title</th>
<th>Position</th>
<th>Action</th>
</tr>
</tfoot>
</table>
</div>
</div>
</div>
</div>
</div>
</section>
</div>
</div>
</div>
<!-- ////////////////////////////////////////////////////////////////////////////-->
<!--/ Base style table -->
@endsection
@section('css')
<link rel="stylesheet" type="text/css" href="{{url('admin/app-assets/vendors/css/tables/datatable/datatables.min.css')}}">
<link rel="stylesheet" type="text/css" href="{{url('admin/app-assets/css/core/menu/menu-types/vertical-menu-modern.css')}}">
<link rel="stylesheet" type="text/css" href="{{url('admin/app-assets/css/core/colors/palette-gradient.min.css')}}">
@endsection
@section('js')
<script src="{{url('admin/app-assets/vendors/js/tables/datatable/datatables.min.js')}}"></script>
<script src="{{url('admin/app-assets/js/scripts/tables/datatables/datatable-styling.min.js')}}"></script>
<script src="{{url('admin/app-assets/vendors/js/forms/repeater/jquery.repeater.min.js')}}"></script>
<script src="{{url('admin/app-assets/js/scripts/forms/form-repeater.min.js')}}"></script>
<script type="text/javascript">
$(document).ready(function(){
$("#createNewRow").click(function(){
//$(".cloneRow").clone().insertAfter("contact-repeater:last");
$(".cloneRow:last").after($('.cloneRow:last').clone());
});
});
function removeFetRow(removeFetRow)
{
$(removeFetRow).parent().parent().parent('div').remove();
}
</script>
@endsection
| 42.350515 | 184 | 0.473832 |
b671e3c49d7612efd88716256eed8479b48db65c | 3,614 | asm | Assembly | Project 8/VM translator-final/Translator-1/asm files/StaticsTest.asm | zychosen/nand2tetris | 1627cd230339778af6a854497e79d86eb97361ed | [
"MIT"
] | 1 | 2021-09-19T09:16:29.000Z | 2021-09-19T09:16:29.000Z | Project 8/VM translator-final/Translator-1/asm files/StaticsTest.asm | zychosen/nand2tetris | 1627cd230339778af6a854497e79d86eb97361ed | [
"MIT"
] | null | null | null | Project 8/VM translator-final/Translator-1/asm files/StaticsTest.asm | zychosen/nand2tetris | 1627cd230339778af6a854497e79d86eb97361ed | [
"MIT"
] | null | null | null | @256
D=A
@SP
M=D
@$ret.0
D=A
@SP
M=M+1
A=M-1
M=D
@LCL
D=M
@SP
M=M+1
A=M-1
M=D
@ARG
D=M
@SP
M=M+1
A=M-1
M=D
@THIS
D=M
@SP
M=M+1
A=M-1
M=D
@THAT
D=M
@SP
M=M+1
A=M-1
M=D
@SP
D=M
@5
D=D-A
@0
D=D-A
@ARG
M=D
@SP
D=M
@LCL
M=D
@Sys.init
0;JMP
($ret.0)
(Class1.set)
@0
D=A
@R13
M=D
@SKIP.0
D;JEQ
(SETUP.0)
@SP
M=M+1
A=M-1
M=0
@R13
M=M-1
D=M
@SETUP.0
D;JNE
(SKIP.0)
@0
D=A
@ARG
D=M+D
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class1.0
D=A
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@1
D=A
@ARG
D=M+D
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class1.1
D=A
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@0
D=A
@SP
AM=M+1
A=A-1
M=D
@LCL
D=M
@R13
M=D
@5
A=D-A
D=M
@R14
M=D
@SP
AM=M-1
D=M
@ARG
A=M
M=D
D=M+1
@ARG
D=M+1
@SP
M=D
@1
D=A
@R13
A=M-D
D=M
@THAT
M=D
@2
D=A
@R13
A=M-D
D=M
@THIS
M=D
@3
D=A
@R13
A=M-D
D=M
@ARG
M=D
@4
D=A
@R13
A=M-D
D=M
@LCL
M=D
@R14
A=M
0;JMP
(Class1.get)
@0
D=A
@R13
M=D
@SKIP.1
D;JEQ
(SETUP.1)
@SP
M=M+1
A=M-1
M=0
@R13
M=M-1
D=M
@SETUP.1
D;JNE
(SKIP.1)
@Class1.0
D=A
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class1.1
D=A
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@SP
A=M-1
D=M
@SP
M=M-1
A=M-1
MD=M-D
@LCL
D=M
@R13
M=D
@5
A=D-A
D=M
@R14
M=D
@SP
AM=M-1
D=M
@ARG
A=M
M=D
D=M+1
@ARG
D=M+1
@SP
M=D
@1
D=A
@R13
A=M-D
D=M
@THAT
M=D
@2
D=A
@R13
A=M-D
D=M
@THIS
M=D
@3
D=A
@R13
A=M-D
D=M
@ARG
M=D
@4
D=A
@R13
A=M-D
D=M
@LCL
M=D
@R14
A=M
0;JMP
(Class2.set)
@0
D=A
@R13
M=D
@SKIP.2
D;JEQ
(SETUP.2)
@SP
M=M+1
A=M-1
M=0
@R13
M=M-1
D=M
@SETUP.2
D;JNE
(SKIP.2)
@0
D=A
@ARG
D=M+D
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class2.0
D=A
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@1
D=A
@ARG
D=M+D
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class2.1
D=A
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@0
D=A
@SP
AM=M+1
A=A-1
M=D
@LCL
D=M
@R13
M=D
@5
A=D-A
D=M
@R14
M=D
@SP
AM=M-1
D=M
@ARG
A=M
M=D
D=M+1
@ARG
D=M+1
@SP
M=D
@1
D=A
@R13
A=M-D
D=M
@THAT
M=D
@2
D=A
@R13
A=M-D
D=M
@THIS
M=D
@3
D=A
@R13
A=M-D
D=M
@ARG
M=D
@4
D=A
@R13
A=M-D
D=M
@LCL
M=D
@R14
A=M
0;JMP
(Class2.get)
@0
D=A
@R13
M=D
@SKIP.3
D;JEQ
(SETUP.3)
@SP
M=M+1
A=M-1
M=0
@R13
M=M-1
D=M
@SETUP.3
D;JNE
(SKIP.3)
@Class2.0
D=A
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@Class2.1
D=A
A=D
D=M
@SP
A=M
M=D
@SP
M=M+1
@SP
A=M-1
D=M
@SP
M=M-1
A=M-1
MD=M-D
@LCL
D=M
@R13
M=D
@5
A=D-A
D=M
@R14
M=D
@SP
AM=M-1
D=M
@ARG
A=M
M=D
D=M+1
@ARG
D=M+1
@SP
M=D
@1
D=A
@R13
A=M-D
D=M
@THAT
M=D
@2
D=A
@R13
A=M-D
D=M
@THIS
M=D
@3
D=A
@R13
A=M-D
D=M
@ARG
M=D
@4
D=A
@R13
A=M-D
D=M
@LCL
M=D
@R14
A=M
0;JMP
(Sys.init)
@0
D=A
@R13
M=D
@SKIP.4
D;JEQ
(SETUP.4)
@SP
M=M+1
A=M-1
M=0
@R13
M=M-1
D=M
@SETUP.4
D;JNE
(SKIP.4)
@6
D=A
@SP
AM=M+1
A=A-1
M=D
@8
D=A
@SP
AM=M+1
A=A-1
M=D
@$ret.1
D=A
@SP
M=M+1
A=M-1
M=D
@LCL
D=M
@SP
M=M+1
A=M-1
M=D
@ARG
D=M
@SP
M=M+1
A=M-1
M=D
@THIS
D=M
@SP
M=M+1
A=M-1
M=D
@THAT
D=M
@SP
M=M+1
A=M-1
M=D
@SP
D=M
@5
D=D-A
@2
D=D-A
@ARG
M=D
@SP
D=M
@LCL
M=D
@Class1.set
0;JMP
($ret.1)
@0
D=A
@5
D=A+D
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@23
D=A
@SP
AM=M+1
A=A-1
M=D
@15
D=A
@SP
AM=M+1
A=A-1
M=D
@$ret.2
D=A
@SP
M=M+1
A=M-1
M=D
@LCL
D=M
@SP
M=M+1
A=M-1
M=D
@ARG
D=M
@SP
M=M+1
A=M-1
M=D
@THIS
D=M
@SP
M=M+1
A=M-1
M=D
@THAT
D=M
@SP
M=M+1
A=M-1
M=D
@SP
D=M
@5
D=D-A
@2
D=D-A
@ARG
M=D
@SP
D=M
@LCL
M=D
@Class2.set
0;JMP
($ret.2)
@0
D=A
@5
D=A+D
@R15
M=D
@SP
M=M-1
A=M
D=M
@R15
A=M
M=D
@$ret.3
D=A
@SP
M=M+1
A=M-1
M=D
@LCL
D=M
@SP
M=M+1
A=M-1
M=D
@ARG
D=M
@SP
M=M+1
A=M-1
M=D
@THIS
D=M
@SP
M=M+1
A=M-1
M=D
@THAT
D=M
@SP
M=M+1
A=M-1
M=D
@SP
D=M
@5
D=D-A
@0
D=D-A
@ARG
M=D
@SP
D=M
@LCL
M=D
@Class1.get
0;JMP
($ret.3)
@$ret.4
D=A
@SP
M=M+1
A=M-1
M=D
@LCL
D=M
@SP
M=M+1
A=M-1
M=D
@ARG
D=M
@SP
M=M+1
A=M-1
M=D
@THIS
D=M
@SP
M=M+1
A=M-1
M=D
@THAT
D=M
@SP
M=M+1
A=M-1
M=D
@SP
D=M
@5
D=D-A
@0
D=D-A
@ARG
M=D
@SP
D=M
@LCL
M=D
@Class2.get
0;JMP
($ret.4)
(WHILE)
@WHILE
0;JMP | 4.977961 | 12 | 0.535971 |
e9b83951e40d0af2177c45b451d32d435350b903 | 611 | go | Go | dir.go | sicko7947/sickocommon | 7813298fbe31e812bbed33355fd45953a3811f86 | [
"MIT"
] | null | null | null | dir.go | sicko7947/sickocommon | 7813298fbe31e812bbed33355fd45953a3811f86 | [
"MIT"
] | null | null | null | dir.go | sicko7947/sickocommon | 7813298fbe31e812bbed33355fd45953a3811f86 | [
"MIT"
] | null | null | null | package sickocommon
import (
"os"
"runtime"
)
func PathCheckAndCreate(s string, perm os.FileMode) error {
ext, err := PathExists(s)
if !ext {
return os.MkdirAll(s, perm)
}
return err
}
func PathExists(path string) (bool, error) {
_, err := os.Stat(path)
if err == nil {
return true, nil
}
if os.IsNotExist(err) {
return false, nil
}
return false, err
}
func GetAppdataPath() string {
if runtime.GOOS == "windows" {
home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH")
if home == "" {
home = os.Getenv("USERPROFILE")
}
return home
}
return os.Getenv("HOME") + "\\AppData"
}
| 16.513514 | 59 | 0.639935 |
41032eea4c1ef4cefa3603796295bc69661400e0 | 830 | h | C | DDrawSystem.h | toebes/PiggViewer | 662742e5a12497b9862bc07c1166025badc07705 | [
"BSD-2-Clause"
] | null | null | null | DDrawSystem.h | toebes/PiggViewer | 662742e5a12497b9862bc07c1166025badc07705 | [
"BSD-2-Clause"
] | null | null | null | DDrawSystem.h | toebes/PiggViewer | 662742e5a12497b9862bc07c1166025badc07705 | [
"BSD-2-Clause"
] | null | null | null | // DDrawSystem.h: interface for the CDDrawSystem class.
//
//////////////////////////////////////////////////////////////////////
#if !defined(AFX_DDRAWSYSTEM_H__1E152EB4_ED1D_4079_BDD4_773383DD98C8__INCLUDED_)
#define AFX_DDRAWSYSTEM_H__1E152EB4_ED1D_4079_BDD4_773383DD98C8__INCLUDED_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
#include <ddraw.h>
#define _CHARACTORBUILDER_
class CDDrawSystem
{
public:
CDDrawSystem();
virtual ~CDDrawSystem();
BOOL Init(HWND hWnd);
void Terminate();
void Clear();
void TestDraw(int x, int y);
void Display();
protected:
LPDIRECTDRAW7 m_pDD;
LPDIRECTDRAWSURFACE7 m_pddsFrontBuffer;
LPDIRECTDRAWSURFACE7 m_pddsStoreBuffer;
LPDIRECTDRAWCLIPPER pcClipper;
HWND hWnd;
};
#endif // !defined(AFX_DDRAWSYSTEM_H__1E152EB4_ED1D_4079_BDD4_773383DD98C8__INCLUDED_)
| 21.842105 | 86 | 0.731325 |
fac924aad71646c58506462fd203cca6725469c6 | 1,229 | asm | Assembly | _build/dispatcher/jmp_ippsGFpECSharedSecretDH_a40510aa.asm | zyktrcn/ippcp | b0bbe9bbb750a7cf4af5914dd8e6776a8d544466 | [
"Apache-2.0"
] | 1 | 2021-10-04T10:21:54.000Z | 2021-10-04T10:21:54.000Z | _build/dispatcher/jmp_ippsGFpECSharedSecretDH_a40510aa.asm | zyktrcn/ippcp | b0bbe9bbb750a7cf4af5914dd8e6776a8d544466 | [
"Apache-2.0"
] | null | null | null | _build/dispatcher/jmp_ippsGFpECSharedSecretDH_a40510aa.asm | zyktrcn/ippcp | b0bbe9bbb750a7cf4af5914dd8e6776a8d544466 | [
"Apache-2.0"
] | null | null | null | extern m7_ippsGFpECSharedSecretDH:function
extern n8_ippsGFpECSharedSecretDH:function
extern y8_ippsGFpECSharedSecretDH:function
extern e9_ippsGFpECSharedSecretDH:function
extern l9_ippsGFpECSharedSecretDH:function
extern n0_ippsGFpECSharedSecretDH:function
extern k0_ippsGFpECSharedSecretDH:function
extern ippcpJumpIndexForMergedLibs
extern ippcpSafeInit:function
segment .data
align 8
dq .Lin_ippsGFpECSharedSecretDH
.Larraddr_ippsGFpECSharedSecretDH:
dq m7_ippsGFpECSharedSecretDH
dq n8_ippsGFpECSharedSecretDH
dq y8_ippsGFpECSharedSecretDH
dq e9_ippsGFpECSharedSecretDH
dq l9_ippsGFpECSharedSecretDH
dq n0_ippsGFpECSharedSecretDH
dq k0_ippsGFpECSharedSecretDH
segment .text
global ippsGFpECSharedSecretDH:function (ippsGFpECSharedSecretDH.LEndippsGFpECSharedSecretDH - ippsGFpECSharedSecretDH)
.Lin_ippsGFpECSharedSecretDH:
db 0xf3, 0x0f, 0x1e, 0xfa
call ippcpSafeInit wrt ..plt
align 16
ippsGFpECSharedSecretDH:
db 0xf3, 0x0f, 0x1e, 0xfa
mov rax, qword [rel ippcpJumpIndexForMergedLibs wrt ..gotpc]
movsxd rax, dword [rax]
lea r11, [rel .Larraddr_ippsGFpECSharedSecretDH]
mov r11, qword [r11+rax*8]
jmp r11
.LEndippsGFpECSharedSecretDH:
| 31.512821 | 119 | 0.820993 |
716c8dfa5470789cd8fbae4f3e4df40b5e4a4df9 | 376 | tsx | TypeScript | packages/gatsby-plugin-image/src/components/main-image.tsx | pipaliyajaydip/gatsby | 1e78a896399a935f8e4bc9bdc71433fc8873cbfc | [
"MIT"
] | 4 | 2021-04-26T15:07:54.000Z | 2022-02-22T07:44:00.000Z | packages/gatsby-plugin-image/src/components/main-image.tsx | pipaliyajaydip/gatsby | 1e78a896399a935f8e4bc9bdc71433fc8873cbfc | [
"MIT"
] | 3 | 2022-02-27T23:27:24.000Z | 2022-03-08T23:39:30.000Z | packages/gatsby-plugin-image/src/components/main-image.tsx | pipaliyajaydip/gatsby | 1e78a896399a935f8e4bc9bdc71433fc8873cbfc | [
"MIT"
] | 1 | 2020-11-16T10:12:49.000Z | 2020-11-16T10:12:49.000Z | import React, { forwardRef } from "react"
import { Picture, PictureProps } from "./picture"
export type MainImageProps = PictureProps
export const MainImage = forwardRef<HTMLImageElement, MainImageProps>(
function MainImage({ ...props }, ref) {
return <Picture ref={ref} {...props} />
}
)
MainImage.displayName = `MainImage`
MainImage.propTypes = Picture.propTypes
| 26.857143 | 70 | 0.728723 |
4ab37008a8517329ce5ed35935c9683cc5d0f33d | 10,006 | cs | C# | configdownloader/main.Designer.cs | nikop/SteamControllerConfigDownloader | eb08a030099fb6874a0dac1f112bf2a46e870634 | [
"MIT"
] | 21 | 2015-11-25T18:32:37.000Z | 2020-11-17T19:53:56.000Z | configdownloader/main.Designer.cs | nikop/SteamControllerConfigDownloader | eb08a030099fb6874a0dac1f112bf2a46e870634 | [
"MIT"
] | 3 | 2015-11-27T19:02:47.000Z | 2019-12-06T05:13:03.000Z | configdownloader/main.Designer.cs | nikop/SteamControllerConfigDownloader | eb08a030099fb6874a0dac1f112bf2a46e870634 | [
"MIT"
] | 8 | 2015-11-25T20:16:13.000Z | 2021-11-05T12:36:44.000Z | namespace configdownloader
{
partial class main
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.inputAppID = new System.Windows.Forms.TextBox();
this.label1 = new System.Windows.Forms.Label();
this.get = new System.Windows.Forms.Button();
this.datagridConfigs = new System.Windows.Forms.DataGridView();
this.appDataGridViewTextBoxColumn = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.nameDataGridViewTextBoxColumn = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.RatesUp = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.RatesDown = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.configItemBindingSource = new System.Windows.Forms.BindingSource(this.components);
this.saveFileDialog1 = new System.Windows.Forms.SaveFileDialog();
this.statusStrip1 = new System.Windows.Forms.StatusStrip();
this.currentStatus = new System.Windows.Forms.ToolStripStatusLabel();
((System.ComponentModel.ISupportInitialize)(this.datagridConfigs)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.configItemBindingSource)).BeginInit();
this.statusStrip1.SuspendLayout();
this.SuspendLayout();
//
// inputAppID
//
this.inputAppID.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.inputAppID.Location = new System.Drawing.Point(55, 6);
this.inputAppID.Name = "inputAppID";
this.inputAppID.Size = new System.Drawing.Size(659, 20);
this.inputAppID.TabIndex = 0;
this.inputAppID.TextChanged += new System.EventHandler(this.inputAppID_TextChanged);
this.inputAppID.KeyUp += new System.Windows.Forms.KeyEventHandler(this.inputAppID_KeyUp);
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(12, 9);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(37, 13);
this.label1.TabIndex = 1;
this.label1.Text = "AppID";
//
// get
//
this.get.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.get.Location = new System.Drawing.Point(720, 6);
this.get.Name = "get";
this.get.Size = new System.Drawing.Size(75, 20);
this.get.TabIndex = 2;
this.get.Text = "Get";
this.get.UseVisualStyleBackColor = true;
this.get.Click += new System.EventHandler(this.get_Click);
//
// datagridConfigs
//
this.datagridConfigs.AllowUserToAddRows = false;
this.datagridConfigs.AllowUserToDeleteRows = false;
this.datagridConfigs.AllowUserToResizeRows = false;
this.datagridConfigs.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.datagridConfigs.AutoGenerateColumns = false;
this.datagridConfigs.ClipboardCopyMode = System.Windows.Forms.DataGridViewClipboardCopyMode.Disable;
this.datagridConfigs.ColumnHeadersHeightSizeMode = System.Windows.Forms.DataGridViewColumnHeadersHeightSizeMode.AutoSize;
this.datagridConfigs.Columns.AddRange(new System.Windows.Forms.DataGridViewColumn[] {
this.appDataGridViewTextBoxColumn,
this.nameDataGridViewTextBoxColumn,
this.RatesUp,
this.RatesDown});
this.datagridConfigs.DataSource = this.configItemBindingSource;
this.datagridConfigs.EditMode = System.Windows.Forms.DataGridViewEditMode.EditProgrammatically;
this.datagridConfigs.Location = new System.Drawing.Point(12, 32);
this.datagridConfigs.MultiSelect = false;
this.datagridConfigs.Name = "datagridConfigs";
this.datagridConfigs.RowHeadersBorderStyle = System.Windows.Forms.DataGridViewHeaderBorderStyle.Single;
this.datagridConfigs.RowHeadersVisible = false;
this.datagridConfigs.SelectionMode = System.Windows.Forms.DataGridViewSelectionMode.FullRowSelect;
this.datagridConfigs.Size = new System.Drawing.Size(783, 302);
this.datagridConfigs.TabIndex = 3;
this.datagridConfigs.CellDoubleClick += new System.Windows.Forms.DataGridViewCellEventHandler(this.dataGridView1_CellDoubleClick);
//
// appDataGridViewTextBoxColumn
//
this.appDataGridViewTextBoxColumn.DataPropertyName = "App";
this.appDataGridViewTextBoxColumn.HeaderText = "App";
this.appDataGridViewTextBoxColumn.Name = "appDataGridViewTextBoxColumn";
//
// nameDataGridViewTextBoxColumn
//
this.nameDataGridViewTextBoxColumn.AutoSizeMode = System.Windows.Forms.DataGridViewAutoSizeColumnMode.Fill;
this.nameDataGridViewTextBoxColumn.DataPropertyName = "Name";
this.nameDataGridViewTextBoxColumn.HeaderText = "Name";
this.nameDataGridViewTextBoxColumn.Name = "nameDataGridViewTextBoxColumn";
//
// RatesUp
//
this.RatesUp.DataPropertyName = "RatesUp";
this.RatesUp.HeaderText = "RatesUp";
this.RatesUp.Name = "RatesUp";
//
// RatesDown
//
this.RatesDown.DataPropertyName = "RatesDown";
this.RatesDown.HeaderText = "RatesDown";
this.RatesDown.Name = "RatesDown";
//
// configItemBindingSource
//
this.configItemBindingSource.DataSource = typeof(configdownloader.ConfigItem);
//
// saveFileDialog1
//
this.saveFileDialog1.Filter = "Config|*.vdf";
//
// statusStrip1
//
this.statusStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.currentStatus});
this.statusStrip1.Location = new System.Drawing.Point(0, 342);
this.statusStrip1.Name = "statusStrip1";
this.statusStrip1.Size = new System.Drawing.Size(803, 22);
this.statusStrip1.TabIndex = 4;
this.statusStrip1.Text = "statusStrip1";
//
// currentStatus
//
this.currentStatus.Name = "currentStatus";
this.currentStatus.Size = new System.Drawing.Size(128, 17);
this.currentStatus.Text = "Connecting to Steam...";
//
// main
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(803, 364);
this.Controls.Add(this.statusStrip1);
this.Controls.Add(this.datagridConfigs);
this.Controls.Add(this.get);
this.Controls.Add(this.label1);
this.Controls.Add(this.inputAppID);
this.Name = "main";
this.Text = "Steam Controller Config Downloader";
this.FormClosed += new System.Windows.Forms.FormClosedEventHandler(this.main_FormClosed);
this.Load += new System.EventHandler(this.main_Load);
((System.ComponentModel.ISupportInitialize)(this.datagridConfigs)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.configItemBindingSource)).EndInit();
this.statusStrip1.ResumeLayout(false);
this.statusStrip1.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.TextBox inputAppID;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Button get;
private System.Windows.Forms.DataGridView datagridConfigs;
private System.Windows.Forms.DataGridViewTextBoxColumn appDataGridViewTextBoxColumn;
private System.Windows.Forms.DataGridViewTextBoxColumn nameDataGridViewTextBoxColumn;
private System.Windows.Forms.BindingSource configItemBindingSource;
private System.Windows.Forms.SaveFileDialog saveFileDialog1;
private System.Windows.Forms.DataGridViewTextBoxColumn RatesUp;
private System.Windows.Forms.DataGridViewTextBoxColumn RatesDown;
private System.Windows.Forms.StatusStrip statusStrip1;
private System.Windows.Forms.ToolStripStatusLabel currentStatus;
}
}
| 50.791878 | 164 | 0.635119 |
176059d9d264b851e6e1b66830c89754285823d1 | 2,060 | lua | Lua | configdir/awesome/widgets/volume_pulse.lua | mall0c/dotfiles | 95641e7b9a5d5d2c8555ea5af14c63a34f742e2d | [
"MIT"
] | 2 | 2020-11-14T09:25:55.000Z | 2020-12-10T23:44:45.000Z | configdir/awesome/widgets/volume_pulse.lua | mall0c/dotfiles | 95641e7b9a5d5d2c8555ea5af14c63a34f742e2d | [
"MIT"
] | null | null | null | configdir/awesome/widgets/volume_pulse.lua | mall0c/dotfiles | 95641e7b9a5d5d2c8555ea5af14c63a34f742e2d | [
"MIT"
] | 3 | 2020-11-21T05:23:39.000Z | 2021-12-14T22:33:42.000Z | local awful = require("awful")
local utils = require("utils")
local lain = require("lain")
local icons = require("icons")
local BaseWidget = require("widgets.base").BaseWidget
local VolWidget = BaseWidget.derive()
function VolWidget:create(args)
args = args or {}
self.default_step = args.default_step or 3 -- Default volume step in percent
args.settings = function()
self.volume_now = volume_now
local vol = (self.volume_now.left + self.volume_now.right) / 2
widget:set_markup(math.floor(vol) .. "%")
if self.volume_now.muted == "no" then
self:set_icon(icons.get_3level("volume", vol))
else
self:set_icon(icons.muted)
end
end
self.lainwidget = lain.widget.pulse(args)
local box = self:init(self.lainwidget.widget, icons.volume_high)
box:buttons(awful.util.table.join(
awful.button({}, 1, function() utils.toggle_run("pavucontrol") end),
awful.button({}, 3, function() self:toggleMute() end),
awful.button({}, 4, function() self:incVolume() end),
awful.button({}, 5, function() self:decVolume() end)
))
end
function VolWidget:update()
self.lainwidget.update()
end
function VolWidget:incVolume(val)
val = val or self.default_step
self:changeVolume(string.format("+%s", val))
end
function VolWidget:decVolume(val)
val = val or self.default_step
self:changeVolume(string.format("-%s", val))
end
-- val should be a string "+X" or "-X"
function VolWidget:changeVolume(val)
-- utils.async("amixer -c 0 set Master " .. val, function(stdout) self:update() end)
print(self.volume_now.device)
utils.async(string.format("pactl set-sink-volume %s %s%%", self.volume_now.device, val),
function(_) self:update() end)
end
function VolWidget:toggleMute()
-- utils.async("amixer set Master toggle", function(stdout) self:update() end)
os.execute(string.format("pactl set-sink-mute %s toggle", self.volume_now.device),
function(_) self:update() end)
end
return VolWidget
| 30.746269 | 92 | 0.670388 |
53923d86297d7179eeb7699514fdd9005d97d3fa | 7,415 | lua | Lua | BB_Menu/c_init.lua | MegadreamsBE/BlueBird | 9cfeeeaa0ddced824ae0708cb6d616078642c97a | [
"MIT"
] | 3 | 2020-04-30T04:45:38.000Z | 2022-03-12T23:21:41.000Z | BB_Menu/c_init.lua | MegadreamsBE/BlueBird | 9cfeeeaa0ddced824ae0708cb6d616078642c97a | [
"MIT"
] | null | null | null | BB_Menu/c_init.lua | MegadreamsBE/BlueBird | 9cfeeeaa0ddced824ae0708cb6d616078642c97a | [
"MIT"
] | 2 | 2017-07-01T00:14:29.000Z | 2021-02-27T21:19:11.000Z | ---------------------------------<
-- Bluebird Menu
-- c_init.lua
---------------------------------<
-- *~ Variables ~*
---------------------------------<
g_Me = getLocalPlayer()
g_Resource = getThisResource()
g_Root = getRootElement()
g_ResourceRoot = getResourceRootElement(g_Resource)
g_ScreenX, g_ScreenY = guiGetScreenSize()
g_UIX, g_UIY = 800, 600
g_CenterX, g_CenterY = g_ScreenX/2, g_ScreenY/2
Menu = {}
Elements = {}
-- Elements.Container = Showing element of the menu, which is the pane which slides to make the menu (dis)appear.
-- Elements.Menu = The tile
---------------------------------<
-- *~ Functions ~*
---------------------------------<
local keyEdit1 = nil
local keyEdit2 = nil
local keyEdit3 = nil
local keyEdit4 = nil
local keyEdit5 = nil
local keyEdit6 = nil
local keyWindow = nil
local checkKey = false
function performKeyCheck()
showCursor(true)
keyWindow = guiCreateWindow((g_CenterX-conv(300)), (g_CenterY-conv(65)), 500, 130, "BlueBird Beta Key Checker", false)
local keyInfoLabel = guiCreateLabel(10,20,500,50,"Please fill in your BlueBird Beta Key (tip: paste the whole key in the first editbox).\nIn case you don't have a key visit our Donation Panel at shc-clan.com!", false, keyWindow)
local keyLabel = guiCreateLabel(10,63,50,50,"Key:", false, keyWindow)
guiSetFont(keyLabel,"default-bold-small")
keyEdit1 = guiCreateEdit(50,60,50,25, "", false, keyWindow)
guiCreateLabel(110,63,50,50,"-", false, keyWindow)
keyEdit2 = guiCreateEdit(125,60,50,25, "", false, keyWindow)
guiCreateLabel(185,63,50,50,"-", false, keyWindow)
keyEdit3 = guiCreateEdit(200,60,50,25, "", false, keyWindow)
guiCreateLabel(260,63,50,50,"-", false, keyWindow)
keyEdit4 = guiCreateEdit(275,60,50,25, "", false, keyWindow)
guiCreateLabel(335,63,50,50,"-", false, keyWindow)
keyEdit5 = guiCreateEdit(350,60,50,25, "", false, keyWindow)
guiCreateLabel(410,63,50,50,"-", false, keyWindow)
keyEdit6 = guiCreateEdit(425,60,50,25, "", false, keyWindow)
local keyButton = guiCreateButton(50,90,100,50, "Verify Key!", false, keyWindow)
addEventHandler("onClientGUIChanged", keyEdit1, onKeyChange)
addEventHandler("onClientGUIClick", keyButton, keyVerifying)
end
function onKeyChange(element)
if(element == keyEdit1) then
local text = guiGetText(keyEdit1)
if(#text > 8) then
text = string.gsub(text, " ", "")
keys = split(text,"-")
if(#keys==6) then
guiSetText(keyEdit1,keys[1])
guiSetText(keyEdit2,keys[2])
guiSetText(keyEdit3,keys[3])
guiSetText(keyEdit4,keys[4])
guiSetText(keyEdit5,keys[5])
guiSetText(keyEdit6,keys[6])
end
end
end
end
function keyVerifying()
local key = guiGetText(keyEdit1).."-"..guiGetText(keyEdit2).."-"..guiGetText(keyEdit3).."-"..guiGetText(keyEdit4).."-"..guiGetText(keyEdit5).."-"..guiGetText(keyEdit6)
triggerServerEvent("verifyKey",getLocalPlayer(),key)
end
function onVerified()
setElementData(localPlayer,"BB.isVerified",true)
destroyElement(keyWindow)
Menu.Init()
end
addEvent("keyVerified",true)
addEventHandler("keyVerified",getRootElement(),onVerified)
function conv(size)
local newSize = size*(g_ScreenX/1366)
return newSize
end
function string:split(separator)
if separator == '.' then
separator = '%.'
end
local result = {}
for part in self:gmatch('(.-)' .. separator) do
result[#result+1] = part
end
result[#result+1] = self:match('.*' .. separator .. '(.*)$') or self
return result
end
function Menu.Init()
-- Check whether the use has a 800x600 or higher, else block it. (lol)
if (g_ScreenX < 800) or (g_ScreenY < 600) then
outputChatBox("Warning: #FFFFFFBluebird has been disabled, your display needs to have a resolution of 800x600 or bigger.", 255,0,0,true)
return
end
if not (getElementData(localPlayer,"BB.isVerified") == true) then
if (checkKey == true) then
triggerServerEvent("onPlayerGetMuted",localPlayer)
performKeyCheck()
return
end
end
-- Disable input on join.
showPlayerHudComponent("all", false)
showCursor(true)
--guiSetInputMode("no_binds")
showChat(false)
setElementData(localPlayer,"UAG.MenuShown",true)
-- Create Menu()
Elements.Container = exports.BB_GUI:dxCreateScrollPane(g_CenterX-g_UIX/2, g_CenterY-g_UIY/2, g_UIX, g_UIY, false)
Elements.Menu = exports.BB_GUI:dxCreateTile(0,g_UIY,g_UIX,g_UIY, Elements.Container, false,false,0)
-- Trigger the event to let all the resources know that the Menu is ready for use.
exports.BB_GUI:dxSetPaneSize(Elements.Container,0, 2*g_UIY)
exports.BB_GUI:dxSetProperty(Elements.Container, "Scrollable", false)
exports.BB_GUI:dxSetProperty(Elements.Container, "SlideTime", 1000)
Elements.Container_Top = exports.BB_GUI:dxCreateFocusPoint(0,0 , Elements.Container)
Elements.Container_Center = exports.BB_GUI:dxCreateFocusPoint(0,g_UIY , Elements.Container)
Elements.Container_Bottom = exports.BB_GUI:dxCreateFocusPoint(0,g_UIY*2 , Elements.Container)
-- Horizontal pane (3x width)
Elements.HorizontalPane = exports.BB_GUI:dxCreateScrollPane(0,0,1,1, Elements.Menu,true,true)
exports.BB_GUI:dxSetPaneSize(Elements.HorizontalPane,2*g_UIX, 0)
exports.BB_GUI:dxSetProperty(Elements.HorizontalPane, "SlideTime", 700)
Elements.HorizontalPane_Left = exports.BB_GUI:dxCreateFocusPoint(0, 0, Elements.HorizontalPane)
Elements.HorizontalPane_Center = exports.BB_GUI:dxCreateFocusPoint(g_UIX, 0, Elements.HorizontalPane)
Elements.HorizontalPane_Right = exports.BB_GUI:dxCreateFocusPoint(g_UIX*2,0, Elements.HorizontalPane)
-- Vertical panes
Elements.VerticalPane_Left = exports.BB_GUI:dxCreateScrollPane(0 ,0,g_UIX,g_UIY, Elements.HorizontalPane)
Elements.VerticalPane_Center = exports.BB_GUI:dxCreateScrollPane(g_UIX ,0,g_UIX,g_UIY, Elements.HorizontalPane)
Elements.VerticalPane_Right = exports.BB_GUI:dxCreateScrollPane(g_UIX*2 ,0,g_UIX,g_UIY, Elements.HorizontalPane)
exports.BB_GUI:dxSetProperty(Elements.Menu, "Enabled", false) -- Disable the menu > Accesable for login app
triggerEvent("onClientBlueBirdReady", g_Me)
addEventHandler("onClientPreRender", g_Root, Menu.DrawExtras)
addEvent("onClientDXFocusPointReached", true)
addEventHandler("onClientDXFocusPointReached", Elements.Container, changeInput)
end
function Menu.DrawExtras()
if not isElement(Elements.Container) then return end
local pos = exports.BB_GUI:dxGetRawData(Elements.Container, "cOffsets")[2]
local alpha
if pos < g_UIY then alpha = (pos/g_UIY)*255
elseif pos > g_UIY then alpha = 255-((pos-g_UIY)/g_UIY)*255
else alpha = 255 end
if pos < g_UIY then
dxDrawRectangle(g_CenterX-(g_UIX)/2,g_CenterY+(g_UIY)/2-pos, g_UIX, pos, exports.BB_GUI:dxGetColor("Background"))
elseif pos > g_UIY then
dxDrawRectangle(g_CenterX-(g_UIX)/2,g_CenterY-(g_UIY)/2, g_UIX, g_UIY+1*(g_UIY-pos), exports.BB_GUI:dxGetColor("Background"))
else
dxDrawRectangle(g_CenterX-(g_UIX)/2,g_CenterY-(g_UIY)/2, g_UIX, g_UIY, exports.BB_GUI:dxGetColor("Background"))
end
--dxDrawImage(g_CenterX-(g_UIX+80)/2, g_CenterY-(g_UIY/2), 880, 40, "images/border.png",0,0,0, tocolor(255,255,255,alpha))
--dxDrawImage(g_CenterX-(g_UIX+80)/2, g_CenterY+(g_UIY/2)-40, 880, 40, "images/border.png", 180,0,0, tocolor(255,255,255,alpha))
end
---------------------------------<
-- *~ Events/Handlings ~*
---------------------------------<
addEvent("onClientBlueBirdReady", true)
addEventHandler("onClientResourceStart", g_ResourceRoot, Menu.Init)
fileDelete("c_init.lua") | 36.170732 | 229 | 0.724073 |
6d8e0d123a8a87dcb4fc06b2c4a5486190a29c9c | 961 | dart | Dart | gedi/lib/widgets/icon_round_button.dart | gdsckoreahackathon2022/24_GEDI | d499bf881a2c1ffa727da6ad76194f049650f931 | [
"MIT"
] | 3 | 2022-02-05T01:51:07.000Z | 2022-02-09T05:40:07.000Z | gedi/lib/widgets/icon_round_button.dart | gdsckoreahackathon2022/24_GEDI | d499bf881a2c1ffa727da6ad76194f049650f931 | [
"MIT"
] | null | null | null | gedi/lib/widgets/icon_round_button.dart | gdsckoreahackathon2022/24_GEDI | d499bf881a2c1ffa727da6ad76194f049650f931 | [
"MIT"
] | 2 | 2022-02-06T09:42:05.000Z | 2022-02-08T06:56:24.000Z | import 'package:flutter/material.dart';
class IconRoundButton extends StatelessWidget {
const IconRoundButton({
Key? key,
required this.width,
required this.height,
required this.icon,
required this.backgroundColor,
this.onPressed,
}) : super(key: key);
final double width;
final double height;
final IconData icon;
final Color backgroundColor;
final Function()? onPressed;
@override
Widget build(BuildContext context) {
return GestureDetector(
onTap: onPressed,
child: Container(
width: width,
height: height,
margin: const EdgeInsets.all(10),
decoration: BoxDecoration(
border: Border.all(
color: Colors.black,
width: 5.0,
),
color: backgroundColor,
borderRadius: BorderRadius.circular(24),
),
child: Icon(
icon,
size: height * 0.8,
),
),
);
}
}
| 22.348837 | 50 | 0.597294 |
19a539cd2184ae4174c9ee2e85878b6ae4bc91fd | 135 | ps1 | PowerShell | vcpkg/cmake_build_vcpkg.ps1 | Eshanatnight/dotfiles | 8b8a581b232a95b589572977c0a8e77cc7584f16 | [
"Unlicense"
] | null | null | null | vcpkg/cmake_build_vcpkg.ps1 | Eshanatnight/dotfiles | 8b8a581b232a95b589572977c0a8e77cc7584f16 | [
"Unlicense"
] | null | null | null | vcpkg/cmake_build_vcpkg.ps1 | Eshanatnight/dotfiles | 8b8a581b232a95b589572977c0a8e77cc7584f16 | [
"Unlicense"
] | null | null | null | mkdir build
Set-Location build
cmake .. -DCMAKE_TOOLCHAIN_FILE=D:/Program Files/vcpkg/scripts/buildsystems/vcpkg.cmake
cmake --build .
| 27 | 87 | 0.807407 |
883b06ebf4935391da1703cdc18705ab0226979e | 582 | hpp | C++ | src/framework/marshal/primitives.hpp | MalcolmD/CloudCVPlusDetection | b614bca8441d942cd7a433e24d0641abc06a5f5a | [
"BSD-3-Clause"
] | 5 | 2015-02-09T17:44:49.000Z | 2015-07-09T15:31:22.000Z | src/framework/marshal/primitives.hpp | MalcolmD/CloudCVPlusDetection | b614bca8441d942cd7a433e24d0641abc06a5f5a | [
"BSD-3-Clause"
] | 2 | 2015-02-02T22:23:24.000Z | 2015-02-18T08:10:06.000Z | src/framework/marshal/primitives.hpp | MalcolmD/CloudCVPlusDetection | b614bca8441d942cd7a433e24d0641abc06a5f5a | [
"BSD-3-Clause"
] | 8 | 2015-01-14T04:08:56.000Z | 2021-06-30T23:24:45.000Z | #pragma once
#include <string>
#include <vector>
#include <map>
#include <v8.h>
typedef v8::Local<v8::Value> V8Result;
V8Result MarshalFromNative(int value);
V8Result MarshalFromNative(unsigned int value);
V8Result MarshalFromNative(float value);
V8Result MarshalFromNative(double value);
V8Result MarshalFromNative(const char * value);
bool MarshalToNative(V8Result inVal, int& outVal);
bool MarshalToNative(V8Result inVal, unsigned int& outVal);
bool MarshalToNative(V8Result inVal, float& outVal);
bool MarshalToNative(V8Result inVal, double& outVal); | 26.454545 | 60 | 0.766323 |
e529df8a891078eb42a0667c24d1ea06ae6d2c11 | 296 | ts | TypeScript | inspector/build/components/actionTabs/lines/booleanLineComponent.d.ts | BabylonJS/CDN | f3b9650c18dacce6a52937aa3584fb1130f13d18 | [
"Apache-2.0",
"MIT-0",
"MIT"
] | 10 | 2019-06-23T17:45:40.000Z | 2022-02-17T02:40:30.000Z | inspector/build/components/actionTabs/lines/booleanLineComponent.d.ts | BabylonJS/CDN | f3b9650c18dacce6a52937aa3584fb1130f13d18 | [
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2019-01-10T07:58:33.000Z | 2019-12-02T09:34:04.000Z | inspector/build/components/actionTabs/lines/booleanLineComponent.d.ts | BabylonJS/CDN | f3b9650c18dacce6a52937aa3584fb1130f13d18 | [
"Apache-2.0",
"MIT-0",
"MIT"
] | 12 | 2018-11-03T13:07:46.000Z | 2022-03-18T09:17:51.000Z | import * as React from "react";
export interface IBooleanLineComponentProps {
label: string;
value: boolean;
}
export declare class BooleanLineComponent extends React.Component<IBooleanLineComponentProps> {
constructor(props: IBooleanLineComponentProps);
render(): JSX.Element;
}
| 29.6 | 95 | 0.773649 |
2f1f1c0e2881cae34c98e93f48b119c9234b9b90 | 1,784 | java | Java | src/main/java/thaumicenergistics/integration/jei/ThEJEI.java | Chaoschaot232/ThaumicEnergistics | c445b4ec8893222c68e0b8a93dee9b4e7909f58b | [
"MIT"
] | null | null | null | src/main/java/thaumicenergistics/integration/jei/ThEJEI.java | Chaoschaot232/ThaumicEnergistics | c445b4ec8893222c68e0b8a93dee9b4e7909f58b | [
"MIT"
] | null | null | null | src/main/java/thaumicenergistics/integration/jei/ThEJEI.java | Chaoschaot232/ThaumicEnergistics | c445b4ec8893222c68e0b8a93dee9b4e7909f58b | [
"MIT"
] | null | null | null | package thaumicenergistics.integration.jei;
import mezz.jei.api.recipe.transfer.IRecipeTransferHandlerHelper;
import net.minecraft.inventory.Container;
import net.minecraft.item.ItemStack;
import thaumicenergistics.api.IThEItems;
import thaumicenergistics.api.ThEApi;
import mezz.jei.api.IModPlugin;
import mezz.jei.api.IModRegistry;
import mezz.jei.api.JEIPlugin;
import mezz.jei.api.ingredients.IIngredientBlacklist;
import mezz.jei.api.recipe.VanillaRecipeCategoryUid;
import mezz.jei.api.recipe.transfer.IRecipeTransferHandler;
/**
* @author BrockWS
* @author Alex811
*/
@JEIPlugin
public class ThEJEI implements IModPlugin {
@Override
public void register(IModRegistry registry) {
IRecipeTransferHandlerHelper rthh = registry.getJeiHelpers().recipeTransferHandlerHelper();
IIngredientBlacklist blacklist = registry.getJeiHelpers().getIngredientBlacklist();
IThEItems items = ThEApi.instance().items();
items.arcaneTerminal().maybeStack(1).ifPresent(stack -> registerWorkbenchCatalyst(registry, new ACTRecipeTransferHandler<>(rthh), stack));
items.arcaneInscriber().maybeStack(1).ifPresent(stack -> registerWorkbenchCatalyst(registry, new ACIRecipeTransferHandler<>(rthh), stack));
items.dummyAspect().maybeStack(1).ifPresent(blacklist::addIngredientToBlacklist);
}
public void registerWorkbenchCatalyst(IModRegistry registry, IRecipeTransferHandler<? extends Container> handler, ItemStack stack){
registry.getRecipeTransferRegistry().addRecipeTransferHandler(handler, VanillaRecipeCategoryUid.CRAFTING);
registry.getRecipeTransferRegistry().addRecipeTransferHandler(handler, "THAUMCRAFT_ARCANE_WORKBENCH");
registry.addRecipeCatalyst(stack, "THAUMCRAFT_ARCANE_WORKBENCH");
}
}
| 43.512195 | 147 | 0.792601 |
335ba978b5061cfa55957c7483329c1073ea2ab1 | 196 | sql | SQL | services/headless-lms/migrations/20210720140135_fix_gradings_field_names.up.sql | rage/secret-project-331 | 3c78c02f2f1d2e4539522e73c3065ae8866604e3 | [
"Apache-2.0"
] | 6 | 2021-09-23T05:28:58.000Z | 2022-03-11T09:25:05.000Z | services/headless-lms/migrations/20210720140135_fix_gradings_field_names.up.sql | rage/secret-project-331 | 3c78c02f2f1d2e4539522e73c3065ae8866604e3 | [
"Apache-2.0"
] | 232 | 2021-07-09T07:13:31.000Z | 2022-03-31T13:35:09.000Z | services/headless-lms/migrations/20210720140135_fix_gradings_field_names.up.sql | rage/secret-project-331 | 3c78c02f2f1d2e4539522e73c3065ae8866604e3 | [
"Apache-2.0"
] | null | null | null | -- Add up migration script here
ALTER TABLE gradings RENAME COLUMN unscaled_score_maximum TO unscaled_score_given;
ALTER TABLE gradings RENAME COLUMN unscaled_max_points TO unscaled_score_maximum; | 65.333333 | 82 | 0.872449 |
67d2932fbcc3659b61cb7e23cf8f99fb6eb8a7a2 | 713 | swift | Swift | ApiDemo-Swift/ApiDemo-Swift/MartinArtViewController.swift | suzp1984/IOS-ApiDemo | 5a1ed91f65cb6e31609a704967d78cbc3d6a5e4a | [
"Apache-2.0"
] | null | null | null | ApiDemo-Swift/ApiDemo-Swift/MartinArtViewController.swift | suzp1984/IOS-ApiDemo | 5a1ed91f65cb6e31609a704967d78cbc3d6a5e4a | [
"Apache-2.0"
] | null | null | null | ApiDemo-Swift/ApiDemo-Swift/MartinArtViewController.swift | suzp1984/IOS-ApiDemo | 5a1ed91f65cb6e31609a704967d78cbc3d6a5e4a | [
"Apache-2.0"
] | null | null | null | //
// MartinArtViewController.swift
// ApiDemo-Swift
//
// Created by Jacob su on 9/12/16.
// Copyright © 2016 suzp1984@gmail.com. All rights reserved.
//
import UIKit
class MartinArtViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
let martinView = MartinView()
martinView.frame = self.view.bounds
martinView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
self.view.addSubview(martinView)
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
| 23 | 71 | 0.669004 |
9953e8053753642ee64706563c3280a61a4ee2cb | 7,749 | c | C | net/netfilter/nft_masq.c | jainsakshi2395/linux | 7ccb860232bb83fb60cd6bcf5aaf0c008d903acb | [
"Linux-OpenIB"
] | 5 | 2020-07-08T01:35:16.000Z | 2021-04-12T16:35:29.000Z | net/netfilter/nft_masq.c | jainsakshi2395/linux | 7ccb860232bb83fb60cd6bcf5aaf0c008d903acb | [
"Linux-OpenIB"
] | 1 | 2021-01-27T01:29:47.000Z | 2021-01-27T01:29:47.000Z | net/netfilter/nft_masq.c | jainsakshi2395/linux | 7ccb860232bb83fb60cd6bcf5aaf0c008d903acb | [
"Linux-OpenIB"
] | null | null | null | // SPDX-License-Identifier: GPL-2.0-only
/*
* Copyright (c) 2014 Arturo Borrero Gonzalez <arturo@debian.org>
*/
#include <linux/kernel.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/netlink.h>
#include <linux/netfilter.h>
#include <linux/netfilter/nf_tables.h>
#include <net/netfilter/nf_tables.h>
#include <net/netfilter/nf_nat.h>
#include <net/netfilter/nf_nat_masquerade.h>
struct nft_masq {
u32 flags;
u8 sreg_proto_min;
u8 sreg_proto_max;
};
static const struct nla_policy nft_masq_policy[NFTA_MASQ_MAX + 1] = {
[NFTA_MASQ_FLAGS] = { .type = NLA_U32 },
[NFTA_MASQ_REG_PROTO_MIN] = { .type = NLA_U32 },
[NFTA_MASQ_REG_PROTO_MAX] = { .type = NLA_U32 },
};
static int nft_masq_validate(const struct nft_ctx *ctx,
const struct nft_expr *expr,
const struct nft_data **data)
{
int err;
err = nft_chain_validate_dependency(ctx->chain, NFT_CHAIN_T_NAT);
if (err < 0)
return err;
return nft_chain_validate_hooks(ctx->chain,
(1 << NF_INET_POST_ROUTING));
}
static int nft_masq_init(const struct nft_ctx *ctx,
const struct nft_expr *expr,
const struct nlattr * const tb[])
{
u32 plen = sizeof_field(struct nf_nat_range, min_addr.all);
struct nft_masq *priv = nft_expr_priv(expr);
int err;
if (tb[NFTA_MASQ_FLAGS]) {
priv->flags = ntohl(nla_get_be32(tb[NFTA_MASQ_FLAGS]));
if (priv->flags & ~NF_NAT_RANGE_MASK)
return -EINVAL;
}
if (tb[NFTA_MASQ_REG_PROTO_MIN]) {
err = nft_parse_register_load(tb[NFTA_MASQ_REG_PROTO_MIN],
&priv->sreg_proto_min, plen);
if (err < 0)
return err;
if (tb[NFTA_MASQ_REG_PROTO_MAX]) {
err = nft_parse_register_load(tb[NFTA_MASQ_REG_PROTO_MAX],
&priv->sreg_proto_max,
plen);
if (err < 0)
return err;
} else {
priv->sreg_proto_max = priv->sreg_proto_min;
}
}
return nf_ct_netns_get(ctx->net, ctx->family);
}
static int nft_masq_dump(struct sk_buff *skb, const struct nft_expr *expr)
{
const struct nft_masq *priv = nft_expr_priv(expr);
if (priv->flags != 0 &&
nla_put_be32(skb, NFTA_MASQ_FLAGS, htonl(priv->flags)))
goto nla_put_failure;
if (priv->sreg_proto_min) {
if (nft_dump_register(skb, NFTA_MASQ_REG_PROTO_MIN,
priv->sreg_proto_min) ||
nft_dump_register(skb, NFTA_MASQ_REG_PROTO_MAX,
priv->sreg_proto_max))
goto nla_put_failure;
}
return 0;
nla_put_failure:
return -1;
}
static void nft_masq_ipv4_eval(const struct nft_expr *expr,
struct nft_regs *regs,
const struct nft_pktinfo *pkt)
{
struct nft_masq *priv = nft_expr_priv(expr);
struct nf_nat_range2 range;
memset(&range, 0, sizeof(range));
range.flags = priv->flags;
if (priv->sreg_proto_min) {
range.min_proto.all = (__force __be16)nft_reg_load16(
®s->data[priv->sreg_proto_min]);
range.max_proto.all = (__force __be16)nft_reg_load16(
®s->data[priv->sreg_proto_max]);
}
regs->verdict.code = nf_nat_masquerade_ipv4(pkt->skb, nft_hook(pkt),
&range, nft_out(pkt));
}
static void
nft_masq_ipv4_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
{
nf_ct_netns_put(ctx->net, NFPROTO_IPV4);
}
static struct nft_expr_type nft_masq_ipv4_type;
static const struct nft_expr_ops nft_masq_ipv4_ops = {
.type = &nft_masq_ipv4_type,
.size = NFT_EXPR_SIZE(sizeof(struct nft_masq)),
.eval = nft_masq_ipv4_eval,
.init = nft_masq_init,
.destroy = nft_masq_ipv4_destroy,
.dump = nft_masq_dump,
.validate = nft_masq_validate,
};
static struct nft_expr_type nft_masq_ipv4_type __read_mostly = {
.family = NFPROTO_IPV4,
.name = "masq",
.ops = &nft_masq_ipv4_ops,
.policy = nft_masq_policy,
.maxattr = NFTA_MASQ_MAX,
.owner = THIS_MODULE,
};
#ifdef CONFIG_NF_TABLES_IPV6
static void nft_masq_ipv6_eval(const struct nft_expr *expr,
struct nft_regs *regs,
const struct nft_pktinfo *pkt)
{
struct nft_masq *priv = nft_expr_priv(expr);
struct nf_nat_range2 range;
memset(&range, 0, sizeof(range));
range.flags = priv->flags;
if (priv->sreg_proto_min) {
range.min_proto.all = (__force __be16)nft_reg_load16(
®s->data[priv->sreg_proto_min]);
range.max_proto.all = (__force __be16)nft_reg_load16(
®s->data[priv->sreg_proto_max]);
}
regs->verdict.code = nf_nat_masquerade_ipv6(pkt->skb, &range,
nft_out(pkt));
}
static void
nft_masq_ipv6_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
{
nf_ct_netns_put(ctx->net, NFPROTO_IPV6);
}
static struct nft_expr_type nft_masq_ipv6_type;
static const struct nft_expr_ops nft_masq_ipv6_ops = {
.type = &nft_masq_ipv6_type,
.size = NFT_EXPR_SIZE(sizeof(struct nft_masq)),
.eval = nft_masq_ipv6_eval,
.init = nft_masq_init,
.destroy = nft_masq_ipv6_destroy,
.dump = nft_masq_dump,
.validate = nft_masq_validate,
};
static struct nft_expr_type nft_masq_ipv6_type __read_mostly = {
.family = NFPROTO_IPV6,
.name = "masq",
.ops = &nft_masq_ipv6_ops,
.policy = nft_masq_policy,
.maxattr = NFTA_MASQ_MAX,
.owner = THIS_MODULE,
};
static int __init nft_masq_module_init_ipv6(void)
{
return nft_register_expr(&nft_masq_ipv6_type);
}
static void nft_masq_module_exit_ipv6(void)
{
nft_unregister_expr(&nft_masq_ipv6_type);
}
#else
static inline int nft_masq_module_init_ipv6(void) { return 0; }
static inline void nft_masq_module_exit_ipv6(void) {}
#endif
#ifdef CONFIG_NF_TABLES_INET
static void nft_masq_inet_eval(const struct nft_expr *expr,
struct nft_regs *regs,
const struct nft_pktinfo *pkt)
{
switch (nft_pf(pkt)) {
case NFPROTO_IPV4:
return nft_masq_ipv4_eval(expr, regs, pkt);
case NFPROTO_IPV6:
return nft_masq_ipv6_eval(expr, regs, pkt);
}
WARN_ON_ONCE(1);
}
static void
nft_masq_inet_destroy(const struct nft_ctx *ctx, const struct nft_expr *expr)
{
nf_ct_netns_put(ctx->net, NFPROTO_INET);
}
static struct nft_expr_type nft_masq_inet_type;
static const struct nft_expr_ops nft_masq_inet_ops = {
.type = &nft_masq_inet_type,
.size = NFT_EXPR_SIZE(sizeof(struct nft_masq)),
.eval = nft_masq_inet_eval,
.init = nft_masq_init,
.destroy = nft_masq_inet_destroy,
.dump = nft_masq_dump,
.validate = nft_masq_validate,
};
static struct nft_expr_type nft_masq_inet_type __read_mostly = {
.family = NFPROTO_INET,
.name = "masq",
.ops = &nft_masq_inet_ops,
.policy = nft_masq_policy,
.maxattr = NFTA_MASQ_MAX,
.owner = THIS_MODULE,
};
static int __init nft_masq_module_init_inet(void)
{
return nft_register_expr(&nft_masq_inet_type);
}
static void nft_masq_module_exit_inet(void)
{
nft_unregister_expr(&nft_masq_inet_type);
}
#else
static inline int nft_masq_module_init_inet(void) { return 0; }
static inline void nft_masq_module_exit_inet(void) {}
#endif
static int __init nft_masq_module_init(void)
{
int ret;
ret = nft_masq_module_init_ipv6();
if (ret < 0)
return ret;
ret = nft_masq_module_init_inet();
if (ret < 0) {
nft_masq_module_exit_ipv6();
return ret;
}
ret = nft_register_expr(&nft_masq_ipv4_type);
if (ret < 0) {
nft_masq_module_exit_inet();
nft_masq_module_exit_ipv6();
return ret;
}
ret = nf_nat_masquerade_inet_register_notifiers();
if (ret < 0) {
nft_masq_module_exit_ipv6();
nft_masq_module_exit_inet();
nft_unregister_expr(&nft_masq_ipv4_type);
return ret;
}
return ret;
}
static void __exit nft_masq_module_exit(void)
{
nft_masq_module_exit_ipv6();
nft_masq_module_exit_inet();
nft_unregister_expr(&nft_masq_ipv4_type);
nf_nat_masquerade_inet_unregister_notifiers();
}
module_init(nft_masq_module_init);
module_exit(nft_masq_module_exit);
MODULE_LICENSE("GPL");
MODULE_AUTHOR("Arturo Borrero Gonzalez <arturo@debian.org>");
MODULE_ALIAS_NFT_EXPR("masq");
MODULE_DESCRIPTION("Netfilter nftables masquerade expression support");
| 25.406557 | 77 | 0.735966 |
759058a1b8de268a27d5d4d51a47339d1f0af915 | 17,446 | cs | C# | projects/Trading/Rylobot/src/Peaks.cs | psryland/rylogic_code | f79e471fe0d6714c5e0cf8385ddc2a88ab2e082b | [
"CNRI-Python"
] | 2 | 2020-11-11T16:19:04.000Z | 2021-01-19T01:53:29.000Z | projects/Trading/Rylobot/src/Peaks.cs | psryland/rylogic_code | f79e471fe0d6714c5e0cf8385ddc2a88ab2e082b | [
"CNRI-Python"
] | 1 | 2020-07-27T09:00:21.000Z | 2020-07-27T10:58:10.000Z | projects/Trading/Rylobot/src/Peaks.cs | psryland/rylogic_code | f79e471fe0d6714c5e0cf8385ddc2a88ab2e082b | [
"CNRI-Python"
] | 1 | 2021-04-04T01:39:55.000Z | 2021-04-04T01:39:55.000Z | using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using pr.extn;
using pr.maths;
namespace Rylobot
{
/// <summary>Types of patterns found using 'PricePeaks'</summary>
[Flags] public enum EPeakPattern
{
High = 1 << 0,
Low = 1 << 1,
BreakOut = 1 << 2,
Reversal = 1 << 3,
BreakOutHigh = BreakOut | High,
BreakOutLow = BreakOut | Low,
HighReversal = Reversal | High,
LowReversal = Reversal | Low,
}
/// <summary>Detects the highs and lows of the price</summary>
[DebuggerDisplay("[{Beg},{End}) strength={Strength} hh={HigherHighs} hl={HigherLows} lh={LowerHighs} ll={LowerLows}")]
public class PricePeaks
{
// Notes:
// - price is trending up if the lows are getting higher
// - price is trending down if the highs are getting lower
// - trend is unknown otherwise
// - Break outs can be detected by comparing two sets of price peaks
/// <summary>Find the highs and lows of the price</summary>
/// <param name="instr">The instrument to find peaks in</param>
/// <param name="iend">The last candle, i.e. look backwards from here</param>
public PricePeaks(Instrument instr, Idx iend, int window_size = 5)
{
Instrument = instr;
WindowSize = window_size;
ConfirmTrend = 0.5;
Beg = iend;
End = iend;
Highs = new List<Peak>();
Lows = new List<Peak>();
#region Find peaks
{
var threshold = ConfirmTrend * Instrument.MCS;
var corr_hi = new Correlation();
var corr_lo = new Correlation();
// The last high/low encountered
var hi = (Peak)null;
var lo = (Peak)null;
var done_hi = false;
var done_lo = false;
// Iterate through the peaks
foreach (var pk in FindPeaks(iend))
{
// Save the first peak as it might be a break out
if (FirstPeak == null)
{
FirstPeak = pk;
continue;
}
var last = pk.High ? hi : lo;
var peaks = pk.High ? Highs : Lows;
var corr = pk.High ? corr_hi : corr_lo;
var trend = pk.High ? TrendHigh : TrendLow;
var done = pk.High ? done_hi : done_lo;
// First peak encountered?
if (last == null)
{
// Just save the peak
corr.Add(pk.Index, pk.Price);
peaks.Add(pk);
}
// The trend has not been broken
else if (!done)
{
// Second peak encountered
if (trend == null)
{
// Form a trend line between the peaks
if (pk.High) TrendHigh = Monic.FromPoints(pk.Index, pk.Price, last.Index, last.Price);
else TrendLow = Monic.FromPoints(pk.Index, pk.Price, last.Index, last.Price);
corr.Add(pk.Index, pk.Price);
peaks.Add(pk);
}
// 3+ peak encountered, confirm trend strength
else
{
// Get the predicted value from the trend line
var p = trend.F(pk.Index);
if (Math.Abs(p - pk.Price) < threshold)
{
// If within tolerance, the trend is confirmed
corr.Add(pk.Index, pk.Price);
if (pk.High) TrendHigh = corr.LinearRegression;
else TrendLow = corr.LinearRegression;
peaks.Add(pk);
}
else
{
if (pk.High) done_hi = true;
else done_lo = true;
// Otherwise, if the trend does not have 3 points, it is rejected
if (peaks.Count < 3)
{
if (pk.High) TrendHigh = null;
else TrendLow = null;
}
}
}
}
// Save the peak as last
if (pk.High) hi = pk;
else lo = pk;
// If the high and low trends are done, break the loop
if (done_hi && done_lo)
break;
}
}
#endregion
}
/// <summary>The window size to use for peak detection</summary>
public int WindowSize { get; set; }
/// <summary>The cut-off for being considered 'near' the trend line (in units of MCS)</summary>
public double ConfirmTrend { get; set; }
/// <summary>The instrument on which the SnR levels have been calculated</summary>
public Instrument Instrument
{
[DebuggerStepThrough] get { return m_instrument; }
set
{
if (m_instrument == value) return;
m_instrument = value;
}
}
private Instrument m_instrument;
/// <summary>The index of the first candle considered</summary>
public Idx Beg { get; private set; }
/// <summary>The index of the last candle considered</summary>
public Idx End { get; private set; }
/// <summary>The first peak encountered (possible break out)</summary>
public Peak FirstPeak { get; private set; }
/// <summary>The peaks that contribute to the trend</summary>
public List<Peak> Highs { get; private set; }
public List<Peak> Lows { get; private set; }
/// <summary>Return all peaks in order of increasing age</summary>
public IEnumerable<Peak> Peaks
{
get
{
int h = 0, hend = Highs.Count;
int l = 0, lend = Lows.Count;
for (; h != hend && l != lend;)
yield return Highs[h].Index > Lows[l].Index ? Highs[h++] : Lows[l++];
for (; h != hend;)
yield return Highs[h++];
for (; l != lend;)
yield return Lows[l++];
}
}
/// <summary>The total number of peaks contributing to the peak data</summary>
public int PeakCount
{
get { return Highs.Count + Lows.Count; }
}
/// <summary>The upper trend line (or null)</summary>
public Monic TrendHigh { get; private set; }
/// <summary>The lower trend line (or null)</summary>
public Monic TrendLow { get; private set; }
/// <summary>How strong the upper trend line is (in the range [0,+1))</summary>
public double TrendHighStrength
{
get { return TrendStrength(TrendHigh, Highs, true); }
}
/// <summary>How strong the lower trend line is (in the range [0,+1))</summary>
public double TrendLowStrength
{
get { return TrendStrength(TrendLow, Lows, false); }
}
/// <summary>Determine a measure of trend strength</summary>
private double TrendStrength(Monic trend, List<Peak> peaks, bool high)
{
if (trend == null)
return 0.0;
// Trend strength has to be a measure of how often price approached the trend line
// and bounced off. Candles miles away from the trend line don't count, only consider
// candles that span or are within a tolerance range of the trend line.
var above = 0.0; var below = 0.0; var count = 0;
var threshold = ConfirmTrend * Instrument.MCS;
foreach (var c in Instrument.CandleRange(peaks.Back().Index, Instrument.IdxLast - WindowSize))
{
var p = trend.F(c.Index + Instrument.IdxFirst);
if (c.High < p - threshold) continue;
if (c.Low > p + threshold) continue;
above += Math.Max(0, c.High - p);
below += Math.Max(0, p - c.Low);
++count;
}
// There must be some candles contributing
var total = above + below;
if (total == 0)
return 0.0;
// Return the proportion of above to below
var strength = (high ? +1 : -1) * (below - above) / total;
// Weight the strength based on the number of candles that contribute
var weighted_count = Maths.Sigmoid(count, 6);
return strength * weighted_count;
}
/// <summary>A value in the range [0,+1) indicating trend strength</summary>
public double Strength
{
get { return Math.Max(TrendHighStrength, TrendLowStrength); }
}
/// <summary>Return the direction of the peak trend</summary>
public int Sign
{
get
{
var slope =
TrendHighStrength > TrendLowStrength ? TrendHigh.A :
TrendLowStrength > TrendHighStrength ? TrendLow.A :
0.0;
var trend = Instrument.MeasureTrendFromSlope(slope);
return Math.Abs(trend) >= 0.5 ? Math.Sign(trend) : 0;
}
}
/// <summary>The minimum distance between any two adjacent high/low peaks. Useful when 'Strength' is near zero</summary>
public QuoteCurrency PeakGap
{
get
{
var last = (Peak)null;
var dist = (QuoteCurrency)double.MaxValue;
foreach (var pk in Peaks)
{
if (last == null)
last = pk;
else if (last.High != pk.High)
dist = Math.Min(dist, Math.Abs(pk.Price - last.Price));
}
return dist;
}
}
/// <summary>True if a break-out is detected</summary>
public bool IsBreakOut
{
get { return IsBreakOutHigh || IsBreakOutLow; }
}
/// <summary>True if a break-out to the long side is detected</summary>
public bool IsBreakOutHigh
{
get { return IsBreakOutInternal(TrendHigh, Highs, true); }
}
/// <summary>True if a break-out to the short side is detected</summary>
public bool IsBreakOutLow
{
get { return IsBreakOutInternal(TrendLow, Lows, false); }
}
/// <summary>True if a break-out to the 'high' side is detected</summary>
private bool IsBreakOutInternal(Monic trend, List<Peak> peaks, bool high)
{
// A break-out is when the latest candle is significantly above the upper trend line
// or below the lower trend line and showing signs of going further. Also, the preceding candles
// must be below the trend line.
// No trend, no break-out
if (trend == null)
return false;
// The latest candle must be in the break-out direction
var sign = high ? +1 : -1;
var latest = Instrument.Latest;
if (latest.Sign != sign)
return false;
// The price must be beyond the trend by a significant amount
var price_threshold = trend.F(0.0) + sign * Instrument.MCS;
if (Math.Sign(latest.Close - price_threshold) != sign)
return false;
// Only the latest few candles can be beyond the trend line
// and all must be in the direction of the break out.
if (peaks[0].Index < -2)
{
// Allow the last two candles to be part of the break out
foreach (var c in Instrument.CandleRange(peaks[0].Index, -2))
{
// If more than half the candle is beyond the trend line, not a breakout
var ratio = sign * Instrument.Compare(c, trend, false);
if (ratio > 0)
return false;
}
}
return true;
}
/// <summary>Returns the price peaks using a window with size 'window_size'</summary>
public IEnumerable<Peak> FindPeaks(Idx iend)
{
// Create window buffers for the high/low prices
var price_hi = new QuoteCurrency[WindowSize];
var price_lo = new QuoteCurrency[WindowSize];
for (int i = 0; i != WindowSize; ++i)
{
price_hi[i] = -double.MaxValue;
price_lo[i] = +double.MaxValue;
}
// Look for peaks
int d = 0, hh = -1, ll = -1, hcount = 0, lcount = 0;
for (Idx i = iend; i-- != Instrument.IdxFirst; d = (d+1) % WindowSize)
{
var candle = Instrument[i];
Beg = i;
// Add the new price values
price_hi[d] = candle.High;
price_lo[d] = candle.Low;
// Find the ring buffer index of the highest and lowest price
var h = price_hi.IndexOfMaxBy(x => x);
var l = price_lo.IndexOfMinBy(x => x);
// If the high is the highest for the full window size, output it
if (hh == h)
{
if (++hcount == WindowSize)
{
// Skip index == 0 because it's not a complete candle
var idx = i + (d - hh + WindowSize) % WindowSize;
if (Instrument.IdxLast - idx > 1) yield return new Peak(idx, price_hi[hh], true);
hh = -1;
hcount = 0;
}
}
else
{
hh = h;
hcount = 1;
}
// If the low is the lowest for the full window size, output it
if (ll == l)
{
if (++lcount == WindowSize)
{
// Skip index == 0 because it's not a complete candle
var idx = i + (d - ll + WindowSize) % WindowSize;
if (Instrument.IdxLast - idx > 1) yield return new Peak(idx, price_lo[ll], false);
ll = -1;
lcount = 0;
}
}
else
{
ll = l;
lcount = 1;
}
}
}
}
/// <summary>A single price peak</summary>
[DebuggerDisplay("{Index} peak={Price} high={High}")]
public class Peak
{
public Peak(Idx index, QuoteCurrency price, bool high)
{
Index = index;
Price = price;
High = high;
}
/// <summary>The candle index of the peak</summary>
public Idx Index { get; private set; }
/// <summary>The price at the peak</summary>
public QuoteCurrency Price { get; private set; }
/// <summary>True if the peak is a high, false if it's a low</summary>
public bool High { get; private set; }
}
}
#if false
/// <summary>A value in the range [-1,+1] indicating trend strength</summary>
public double Strength
{
get
{
// Combine the peak counts into a trend strength measure.
// A sequence of 1 is no trend (0), 2 is a moderate trend (0.5), 3 is strong trend (0.75), 4+ is awesome trend (1.0)
// Scale each count onto the range [-1,+1]
var hh = HigherHighs != 0 ? +1.0 - 1.0/HigherHighs : 0.0;
var hl = HigherLows != 0 ? +1.0 - 1.0/HigherLows : 0.0;
var lh = LowerHighs != 0 ? +1.0 - 1.0/LowerHighs : 0.0;
var ll = LowerLows != 0 ? +1.0 - 1.0/LowerLows : 0.0;
// Return the weighted average
// 'hl' and 'lh' have more significance than 'hh' and 'll'.
const double a = 1.0, b = 1.5;
return (a*hh + b*hl - b*lh - a*ll) / (a+b);
}
}
/// <summary>A polynomial curve fitted to the high or low peaks</summary>
public IPolynomial TrendLine(bool high)
{
// A trend line is valid when the latest two peaks form a line that
// the third latest peak confirms.
// Create a line from the last two peaks
var peaks = Peaks.Where(x => x.High == high).ToArray();
if (peaks.Length < 3) return null;
var line = Monic.FromPoints(
(double)peaks[1].Index, (double)peaks[1].Price,
(double)peaks[0].Index, (double)peaks[0].Price);
// Look for the third peak to confirm the trend line
var p3 = line.F((double)peaks[2].Index);
var ConfirmThreshold = Instrument.MedianCS_50 * 0.5;
if (Misc.Abs(peaks[2].Price - p3) > ConfirmThreshold)
return null;
return line;
//// Perform a linear regression on the peaks
//var corr = new Correlation();
//foreach (var hi in Peaks.Where(x => x.High))
// corr.Add((double)hi.Index, (double)hi.Price);
//return corr.Count >= 2 ? corr.LinearRegression : new Monic(0,0);
}
/// <summary>Return a measure of how much the price obeys the trend line</summary>
public double TrendLineStrength(bool high)
{
var line = TrendLine(high);
if (line == null)
return 0.0;
var sign = high ? +1 : -1;
var peaks = Peaks.Where(x => x.High == high).ToArray();
Debug.Assert(peaks.Length >= 3);
// Compare the ratio of max price on each side of the line
var right = 0.0;
var wrong = 0.0;
foreach (var c in Instrument.CandleRange(peaks[2].Index, Instrument.IdxLast))
{
// If the candle is 'above' the trend line then the trend line is less good
var diff = sign * (c.WickLimit(sign) - line.F((double)(c.Index + Instrument.IdxFirst)));
if (diff > 0) wrong = Math.Max(+diff, wrong);
if (diff < 0) right = Math.Max(-diff, right);
}
if (right == 0) return 0.0;
if (wrong == 0) return 1.0;
var ratio = (right - wrong) / (right + wrong);
// Scale the threshold value to 0.5.
const double threshold = 0.5;
return Maths.Sigmoid(ratio, threshold);
}
#endif
// Get the last two high peaks and last two low peaks
//
/*
// The last high/low encountered
var hi = (Peak)null;
var lo = (Peak)null;
// The number of sequential higher lows, etc
var hh_done = false; var lh_done = false;
var hl_done = false; var ll_done = false;
// Scan through historic peaks (*remember* scanning backwards in time!)
foreach (var pk in FindPeaks(iend))
{
// All trends finished?
if (hh_done && hl_done && lh_done && ll_done)
break;
// Record the last peak used
if (End == iend)
End = pk.Index;
// Found a high peak
if (pk.High)
{
if (hi == null)
{
Peaks.Add(pk);
}
else
{
// A higher high?
if (hi.Price > pk.Price)
{
if (!hh_done)
{
++HigherHighs;
Beg = pk.Index;
Peaks.Add(pk);
}
if (hi.Price > pk.Price)
{
lh_done = true;
ll_done = true;
}
}
// A lower high?
else if (hi.Price < pk.Price)
{
if (!lh_done)
{
++LowerHighs;
Beg = pk.Index;
Peaks.Add(pk);
}
if (hi.Price < pk.Price)
{
hh_done = true;
hl_done = true;
}
}
}
hi = pk;
}
else // pk.Low
{
// The first low encountered
if (lo == null)
{
Peaks.Add(pk);
}
else
{
// A lower low?
if (lo.Price < pk.Price)
{
if (!ll_done)
{
++LowerLows;
Beg = pk.Index;
Peaks.Add(pk);
}
if (lo.Price < pk.Price)
{
hl_done = true;
hh_done = true;
}
}
// A higher low?
else if (lo.Price > pk.Price)
{
if (!hl_done)
{
++HigherLows;
Beg = pk.Index;
Peaks.Add(pk);
}
if (lo.Price > pk.Price)
{
ll_done = true;
lh_done = true;
}
}
}
lo = pk;
}
}
*/
| 28.184168 | 123 | 0.589648 |
cf808f37e89868a049bbd8853e1ce2a940942b82 | 636 | swift | Swift | DiscourseClient/DiscourseClient/Model/Requests/DeleteTopicRequest.swift | jsmdev/xi-bootcamp-mobile-ios-advanced-ui | 716bfe83c2b6f32823abe9c6d9a220fbc2dc0b14 | [
"MIT"
] | 1 | 2022-02-01T14:30:33.000Z | 2022-02-01T14:30:33.000Z | DiscourseClient/DiscourseClient/Model/Requests/DeleteTopicRequest.swift | jsmdev/xi-bootcamp-mobile-ios-advanced-ui | 716bfe83c2b6f32823abe9c6d9a220fbc2dc0b14 | [
"MIT"
] | null | null | null | DiscourseClient/DiscourseClient/Model/Requests/DeleteTopicRequest.swift | jsmdev/xi-bootcamp-mobile-ios-advanced-ui | 716bfe83c2b6f32823abe9c6d9a220fbc2dc0b14 | [
"MIT"
] | 1 | 2021-07-23T21:23:21.000Z | 2021-07-23T21:23:21.000Z | //
// DeleteTopicRequest.swift
// DiscourseClient
//
// Created by Roberto Garrido on 28/03/2020.
// Copyright © 2020 Roberto Garrido. All rights reserved.
//
import Foundation
class DeleteTopicRequest: APIRequest {
typealias Response = DeleteTopicResponse
let id: Int
init(id: Int) {
self.id = id
}
var method: Method {
return .DELETE
}
var path: String {
return "/t/\(id).json"
}
var parameters: [String : String] {
return [:]
}
var body: [String : Any] {
return [:]
}
var headers: [String : String] {
return [:]
}
}
| 15.9 | 58 | 0.564465 |
cd080d1abfb84059b4963bb87218b3fd56028ead | 11,793 | dart | Dart | prakesh_support/lib/main.dart | MarceloRab/search_app_bar_page | 78eb2f0bf9893517bc06796dfc249ec773faec1d | [
"MIT"
] | 8 | 2020-10-06T23:55:19.000Z | 2021-09-23T14:25:38.000Z | prakesh_support/lib/main.dart | MarceloRab/search_app_bar_page | 78eb2f0bf9893517bc06796dfc249ec773faec1d | [
"MIT"
] | 5 | 2020-09-28T03:03:22.000Z | 2021-10-07T23:16:59.000Z | prakesh_support/lib/main.dart | MarceloRab/search_app_bar_page | 78eb2f0bf9893517bc06796dfc249ec773faec1d | [
"MIT"
] | 1 | 2021-04-12T04:53:32.000Z | 2021-04-12T04:53:32.000Z | //import 'package:diacritic/diacritic.dart';
import 'package:flutter/material.dart';
import 'package:get/get.dart';
import 'package:search_app_bar_page/search_app_bar_page.dart';
void main() {
runApp(
GetMaterialApp(
title: 'AppExample',
initialRoute: AppPages.INITIAL,
getPages: AppPages.routes,
///Here is the injection that you only dispose of with the app.
// initialBinding: SharingBindings(),
),
);
}
class SharingBindings extends Bindings {
@override
void dependencies() {
Get.put(SharingController());
}
}
abstract class Routes {
static const HOME = '/home';
static const EXAMPLE_ONE = '/example_one';
static const EXAMPLE_TWO = '/example_two';
}
class AppPages {
static const INITIAL = Routes.HOME;
static final routes = [
GetPage(
name: Routes.HOME,
page: () => HomePage(),
///Here you do the injection in a modular way.
///As it is the first page, I don't need bindings on the others.
/// It has the same controller.
binding: SharingBindings(),
),
GetPage(
name: Routes.EXAMPLE_ONE,
page: () => HomePageExample1(),
),
GetPage(
name: Routes.EXAMPLE_TWO,
page: () {
///Collect the injection performed on the SharingBindings of HomePage.
Get.find<SharingController>().getRxList(TypeList.expired);
6.delay(() {
Get.find<SharingController>().getRxList(TypeList.consumed);
Get.snackbar('LIsta consumed', 'Changed');
});
12.delay(() {
Get.find<SharingController>().getRxList(TypeList.active);
Get.snackbar('LIsta active', 'Changed');
});
return HomePageExample2();
},
),
];
}
class HomePage extends StatelessWidget {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('HomeView'),
centerTitle: true,
),
body: Center(
child: Column(
mainAxisSize: MainAxisSize.min,
mainAxisAlignment: MainAxisAlignment.center,
children: [
MaterialButton(
onPressed: () {
Get.toNamed(Routes.EXAMPLE_ONE);
},
child: const Text(
'Page Example One',
style: TextStyle(fontSize: 20),
)),
MaterialButton(
onPressed: () {
Get.toNamed(Routes.EXAMPLE_TWO);
},
child: const Text(
'Page Example Two',
style: TextStyle(fontSize: 20),
)),
],
),
),
);
}
}
class HomePageExample1 extends StatefulWidget {
@override
_HomePageExample1State createState() => _HomePageExample1State();
}
class _HomePageExample1State extends State<HomePageExample1> {
late SharingController controller;
@override
void initState() {
controller = Get.find<SharingController>();
controller.getRxList(TypeList.expired);
6.delay(() {
controller.getRxList(TypeList.consumed);
Get.snackbar('LIsta consumed', 'Changed');
});
12.delay(() {
controller.getRxList(TypeList.active);
Get.snackbar('LIsta active', 'Changed');
});
super.initState();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('HomeView'),
centerTitle: true,
),
body: Obx(() {
///Advantage. Just insert the get inside and any changes
///redo this function
if (controller.isLoading) {
return const Center(child: CircularProgressIndicator());
}
return ListView(
shrinkWrap: true,
children: controller.listCchoice
.map((Person person) => Card(
margin:
const EdgeInsets.symmetric(horizontal: 16, vertical: 4),
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(4)),
// color: Theme.of(context).primaryColorDark,
child: Padding(
padding: const EdgeInsets.all(14.0),
child: Row(
children: [
Expanded(
child: Text(
'Name: ${person.name}',
style: const TextStyle(fontSize: 16),
),
),
Expanded(
child: Text(
'Age: ${person.age!.toStringAsFixed(2)}',
style: const TextStyle(fontSize: 12),
),
)
],
),
)))
.toList(),
);
}),
);
}
}
/// GetView = > You already do the find to collect the injection automatically.
class HomePageExample2 extends GetView<SharingController> {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('HomeView'),
centerTitle: true,
),
body: controller.isLoadingRxList.getStreamWidget(
obxWidgetBuilder: (context, isloading) {
return ListView(
shrinkWrap: true,
children: controller.listCchoice
.map((Person person) => Card(
margin:
const EdgeInsets.symmetric(horizontal: 16, vertical: 4),
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(4)),
// color: Theme.of(context).primaryColorDark,
child: Padding(
padding: const EdgeInsets.all(14.0),
child: Row(
children: [
Expanded(
child: Text(
'Name: ${person.name}',
style: const TextStyle(fontSize: 16),
),
),
Expanded(
child: Text(
'Age: ${person.age!.toStringAsFixed(2)}',
style: const TextStyle(fontSize: 12),
),
)
],
),
)))
.toList(),
);
}),
);
}
}
class SharingController extends GetxController {
/*final RxList<Person> consumed = <Person>[].obs;
final RxList<Person> expired = <Person>[].obs;
final RxList<Person> active = <Person>[].obs;*/
final timeDelay = 3.0;
List<Person> consumed = <Person>[];
List<Person> expired = <Person>[];
List<Person> active = <Person>[];
List<Person> listCchoice = <Person>[];
///This will make the list reactive.
final isLoadingRxList = true.obs;
set isLoading(bool? value) => isLoadingRxList.value = value;
bool get isLoading => isLoadingRxList.value!;
Future<List> getRxList(TypeList typeList) async {
isLoading = true;
switch (typeList) {
case TypeList.consumed:
await loadMyConsumedSharings();
isLoading = false;
break;
case TypeList.expired:
await loadMyExpiredSharings();
isLoading = false;
break;
case TypeList.active:
await loadMyActiveSharings();
isLoading = false;
break;
}
return expired;
}
Future<void> loadMyActiveSharings() async {
await timeDelay.delay();
listCchoice.clear();
listCchoice.addAll(activePerson);
}
Future<void> loadMyConsumedSharings() async {
await timeDelay.delay();
listCchoice.clear();
listCchoice.addAll(consumedPerson);
}
Future<void> loadMyExpiredSharings() async {
await timeDelay.delay();
listCchoice.clear();
listCchoice.addAll(expiredPerson);
}
}
enum TypeList { consumed, expired, active }
final activePerson = <Person>[
Person(name: 'Rafaela Pinho', age: 30),
Person(name: 'Paulo Emilio Silva', age: 45),
Person(name: 'Pedro Gomes', age: 18),
Person(name: 'Orlando Guerra', age: 23),
Person(name: 'Zacarias Triste', age: 15),
];
final expiredPerson = <Person>[
Person(name: 'Rafaela Pinho', age: 30),
Person(name: 'Paulo Emilio Silva', age: 45),
Person(name: 'Pedro Gomes', age: 18),
Person(name: 'Orlando Guerra', age: 23),
Person(name: 'Zacarias Triste', age: 15),
Person(name: 'Antonio Rabelo', age: 33),
Person(name: 'Leticia Maciel', age: 47),
Person(name: 'Patricia Oliveira', age: 19),
Person(name: 'Pedro Lima', age: 15),
Person(name: 'Junior Rabelo', age: 33),
Person(name: 'Lucia Maciel', age: 47),
Person(name: 'Ana Oliveira', age: 19),
Person(name: 'Thiago Silva', age: 33),
Person(name: 'Charles Ristow', age: 47),
Person(name: 'Raquel Montenegro', age: 19),
Person(name: 'Rafael Peireira', age: 15),
Person(name: 'Nome Comum', age: 33),
];
final consumedPerson = <Person>[
Person(name: 'Rafaela Pinho', age: 30),
Person(name: 'Paulo Emilio Silva', age: 45),
Person(name: 'Pedro Gomes', age: 18),
Person(name: 'Orlando Guerra', age: 23),
Person(name: 'Ana Pereira', age: 23),
Person(name: 'Zacarias Triste', age: 15),
Person(name: 'Antonio Rabelo', age: 33),
Person(name: 'Leticia Maciel', age: 47),
Person(name: 'Patricia Oliveira', age: 19),
Person(name: 'Pedro Lima', age: 15),
Person(name: 'Fabio Melo', age: 51),
Person(name: 'Junior Rabelo', age: 33),
Person(name: 'Lucia Maciel', age: 47),
Person(name: 'Ana Oliveira', age: 19),
Person(name: 'Thiago Silva', age: 33),
Person(name: 'Charles Ristow', age: 47),
Person(name: 'Raquel Montenegro', age: 19),
Person(name: 'Rafael Peireira', age: 15),
Person(name: 'Thiago Ferreira', age: 33),
Person(name: 'Joaquim Gomes', age: 18),
Person(name: 'Esther Guerra', age: 23),
Person(name: 'Pedro Braga', age: 19),
Person(name: 'Milu Silva', age: 17),
Person(name: 'William Ristow', age: 47),
Person(name: 'Elias Tato', age: 22),
Person(name: 'Dada Istomesmo', age: 44),
Person(name: 'Nome Incomum', age: 52),
Person(name: 'Qualquer Nome', age: 9),
Person(name: 'First Last', age: 11),
Person(name: 'Bom Dia', age: 23),
Person(name: 'Bem Mequiz', age: 13),
Person(name: 'Mal Mequer', age: 71),
Person(name: 'Quem Sabe', age: 35),
Person(name: 'Miriam Leitao', age: 33),
Person(name: 'Gabriel Mentiroso', age: 19),
Person(name: 'Caio Petro', age: 27),
Person(name: 'Tanto Nome', age: 66),
Person(name: 'Nao Diga', age: 33),
Person(name: 'Fique Queto', age: 11),
Person(name: 'Cicero Gome', age: 37),
Person(name: 'Carlos Gome', age: 48),
Person(name: 'Mae Querida', age: 45),
Person(name: 'Exausto Nome', age: 81),
];
//class Person extends CacheJson {
class Person {
final String? name;
final int? age;
final String? id;
final String? avatar;
final String? username;
final String? image;
Person({
this.name,
this.age,
this.id,
this.avatar,
this.username,
this.image,
});
@override
String toString() {
return 'Person{name: $name, age: $age}';
}
factory Person.fromMap(Map<String, dynamic> map) {
return Person(
name: map['name'] as String?,
age: (map['age'] as int?) ?? 0,
id: map['id'] as String?,
avatar: map['avatar'] as String?,
username: map['username'] as String?,
image: map['image'] as String?,
);
}
Map<String, dynamic> toMap() {
return {
'name': name,
'age': age,
'id': id,
'avatar': avatar,
'username': username,
'image': image,
};
}
}
| 29.263027 | 79 | 0.56118 |
827404913a0a8413caab700e8cf9b0e08c9eaccb | 265 | sql | SQL | openGaussBase/testcase/PROCEDURE/PROC_ARRAY/Opengauss_Function_Procedure_Array_Case0009.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/PROCEDURE/PROC_ARRAY/Opengauss_Function_Procedure_Array_Case0009.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/PROCEDURE/PROC_ARRAY/Opengauss_Function_Procedure_Array_Case0009.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | -- @describe:存储过程中定义数组类型,begin end中间语句为null
--创建存储过程
CREATE OR REPLACE procedure pro_record_009() AS
TYPE months_varray IS VARRAY(12) OF varchar2(200);
BEGIN
null;
END;
/
--调用存储过程
call pro_record_009();
--删除存储过程
drop procedure pro_record_009;
| 8.28125 | 50 | 0.720755 |
acfe461220eef9f02474d9e41e72bd3f9e400a4f | 4,052 | cpp | C++ | AppTools/help/AppCfg.cpp | hermixy/AppTools | 893dbefd1c4d9eea566e7b1d8e43bed55b8cf999 | [
"Apache-2.0"
] | 1 | 2022-03-02T03:27:04.000Z | 2022-03-02T03:27:04.000Z | AppTools/help/AppCfg.cpp | hermixy/AppTools | 893dbefd1c4d9eea566e7b1d8e43bed55b8cf999 | [
"Apache-2.0"
] | null | null | null | AppTools/help/AppCfg.cpp | hermixy/AppTools | 893dbefd1c4d9eea566e7b1d8e43bed55b8cf999 | [
"Apache-2.0"
] | 1 | 2022-03-02T03:27:05.000Z | 2022-03-02T03:27:05.000Z | #include "AppCfg.h"
#include "UiSet.h"
#pragma execution_character_set("utf-8")
//TCP
QString AppCfg::tcpModel = "TCPServer";
QString AppCfg::tcpIp = "127.0.0.1";
int AppCfg::tcpPort = 8000;
bool AppCfg::tcpHexSend = true;
bool AppCfg::tcpHexRecv = true;
int AppCfg::tcpAutoSendTime = 1000;
int AppCfg::tcpAutoConnectTime = 1000;
QString AppCfg::tcpData="";
//Serial
int AppCfg::serialBaudRate=9600;
int AppCfg::serialStopBit=1;
int AppCfg::serialDataBit=8;
QString AppCfg::serialCheckData="无";
bool AppCfg::serialHexSend=true; //16进制发送
bool AppCfg::serialHexRecv=true; //16进制接收
int AppCfg::serialAutoSendTime=1000; //自动发送间隔
QString AppCfg::serialData="";
void AppCfg::readConfig()
{
if (!checkConfig()) {
qDebug()<<"check error";
return;
}
QString fileName = QString("%1/cfg/config.ini").arg(AppPath);
QSettings set(fileName, QSettings::IniFormat);
set.beginGroup("tcpconfig");//向当前组追加前缀
AppCfg::tcpModel = set.value("model").toString();
AppCfg::tcpIp = set.value("ip").toString();
AppCfg::tcpPort = set.value("port").toInt();
AppCfg::tcpHexSend = set.value("hexsend").toBool();
AppCfg::tcpHexRecv = set.value("hexrecv").toBool();
AppCfg::tcpAutoSendTime = set.value("autosendtime").toInt();
AppCfg::tcpAutoConnectTime = set.value("autoconnecttime").toInt();
AppCfg::tcpData=set.value("senddata").toString();
set.endGroup();
set.beginGroup("serialconfig");//向当前组追加前缀
AppCfg::serialBaudRate = set.value("BaudRate").toInt();
AppCfg::serialStopBit = set.value("StopBit").toInt();
AppCfg::serialDataBit = set.value("DataBit").toInt();
AppCfg::serialCheckData = set.value("CheckData").toString();
AppCfg::serialHexSend = set.value("HexSend").toBool();
AppCfg::serialHexRecv = set.value("HexRecv").toBool();
AppCfg::serialAutoSendTime = set.value("AutoSendTime").toInt();
AppCfg::serialData=set.value("SendData").toString();
set.endGroup();
}
void AppCfg::writeConfig()
{
QString fileName = QString("%1/cfg/config.ini").arg(AppPath);
QSettings set(fileName, QSettings::IniFormat);
set.beginGroup("tcpconfig");
set.setValue("model", AppCfg::tcpModel);
set.setValue("ip", AppCfg::tcpIp);
set.setValue("port", AppCfg::tcpPort);
set.setValue("hexsend", AppCfg::tcpHexSend);
set.setValue("hexrecv", AppCfg::tcpHexRecv);
set.setValue("autosendtime", AppCfg::tcpAutoSendTime);
set.setValue("autoconnecttime", AppCfg::tcpAutoConnectTime);
set.setValue("senddata",AppCfg::tcpData);
set.endGroup();
set.beginGroup("serialconfig");
set.setValue("BaudRate", AppCfg::serialBaudRate);
set.setValue("StopBit", AppCfg::serialStopBit);
set.setValue("DataBit", AppCfg::serialDataBit);
set.setValue("CheckData", AppCfg::serialCheckData);
set.setValue("HexSend", AppCfg::serialHexSend);
set.setValue("HexRecv", AppCfg::serialHexRecv);
set.setValue("AutoSendTime", AppCfg::serialAutoSendTime);
set.setValue("SendData",AppCfg::serialData);
set.endGroup();
}
void AppCfg::newConfig()
{
writeConfig();
}
bool AppCfg::checkConfig()
{
QString fileName = QString("%1/cfg/config.ini").arg(AppPath);
//如果配置文件大小为0,则以初始值继续运行,并生成配置文件
QFile file(fileName);
if (file.size() == 0) {
qDebug()<<"filesize=0";
newConfig();
return false;
}
return true;
}
void AppCfg::writeError(QString str)
{
QString fileName = QString("%1/cfg/Error_%2.txt").arg(AppPath).arg(QDATE);
QFile file(fileName);
file.open(QIODevice::WriteOnly | QIODevice::Append | QFile::Text);
QTextStream stream(&file);
stream << DATETIME << " " << str << "\n";
}
void AppCfg::newDir(QString dirname)
{
//如果路径中包含斜杠字符则说明是绝对路径
//linux系统路径字符带有 / windows系统 路径字符带有 :/
if (!dirname.startsWith("/") && !dirname.contains(":/"))
{
dirname = QString("%1/%2").arg(AppPath).arg(dirname);
}
QDir dir(dirname);
if (!dir.exists())
{
dir.mkpath(dirname);
}
}
| 30.238806 | 78 | 0.66387 |
4a5a8811770fc7e59450dfab3b124fd2056c7ec6 | 8,357 | js | JavaScript | src/Components/Drawer.js | TheBaphomet666/LAB05IETI | 720458e320500dfd23c231b9ebf9d0ee72700077 | [
"MIT"
] | null | null | null | src/Components/Drawer.js | TheBaphomet666/LAB05IETI | 720458e320500dfd23c231b9ebf9d0ee72700077 | [
"MIT"
] | null | null | null | src/Components/Drawer.js | TheBaphomet666/LAB05IETI | 720458e320500dfd23c231b9ebf9d0ee72700077 | [
"MIT"
] | null | null | null | import React from 'react';
import PropTypes from 'prop-types';
import AppBar from '@material-ui/core/AppBar';
import CssBaseline from '@material-ui/core/CssBaseline';
import Divider from '@material-ui/core/Divider';
import Drawer from '@material-ui/core/Drawer';
import Hidden from '@material-ui/core/Hidden';
import IconButton from '@material-ui/core/IconButton';
import InboxIcon from '@material-ui/icons/MoveToInbox';
import List from '@material-ui/core/List';
import ListItem from '@material-ui/core/ListItem';
import ListItemIcon from '@material-ui/core/ListItemIcon';
import ListItemText from '@material-ui/core/ListItemText';
import MailIcon from '@material-ui/icons/Mail';
import MenuIcon from '@material-ui/icons/Menu';
import Toolbar from '@material-ui/core/Toolbar';
import Typography from '@material-ui/core/Typography';
import Avatar from "@material-ui/core/Avatar";
import CreateIcon from '@material-ui/icons/Create';
import ListItemAvatar from "@material-ui/core/ListItemAvatar";
import ListItemSecondaryAction from "@material-ui/core/ListItemSecondaryAction";
import { withStyles } from '@material-ui/core/styles';
import Card from "./Card"
import Cardlist from "./CardList"
const drawerWidth = 240;
const styles = theme => ({
root: {
display: 'flex',
},
drawer: {
[theme.breakpoints.up('sm')]: {
width: drawerWidth,
flexShrink: 0,
},
},
appBar: {
marginLeft: drawerWidth,
[theme.breakpoints.up('sm')]: {
width: `calc(100% - ${drawerWidth}px)`,
},
},
menuButton: {
marginRight: 20,
[theme.breakpoints.up('sm')]: {
display: 'none',
},
},
toolbar: theme.mixins.toolbar,
drawerPaper: {
width: drawerWidth,
},
content: {
flexGrow: 1,
padding: theme.spacing.unit * 3,
},
});
class ResponsiveDrawer extends React.Component {
state = {
mobileOpen: false,
};
constructor(props) {
super(props);
this.handleLogout = this.handleLogout.bind(this);
this.handleNewTask = this.handleNewTask.bind(this);
this.state = {
cardList: [],
};
this.getTaks = this.getTaks.bind(this);
}
componentDidMount() {
this.getTaks();
}
getTaks(query) {
/*fetch('https://www.googleapis.com/books/v1/volumes?q=' + query)
.then(response => response.json())
.then(data => {
let booksList = [];
data.items.forEach(function (book) {
booksList.push({
title: book.volumeInfo.title,
image: book.volumeInfo.imageLinks ? book.volumeInfo.imageLinks.thumbnail : "",
language: book.volumeInfo.language
})
});
this.setState({booksList: booksList});
});*/
// console.log(localStorage.getItem("Tasks") === null);
if(!(localStorage.getItem("Tasks") === null)){
let cardList = JSON.parse(localStorage.getItem("Tasks"));
this.setState({cardList: cardList});
//console.log(this.state.cardList);
}
}
handleDrawerToggle = () => {
this.setState(state => ({ mobileOpen: !state.mobileOpen }));
};
render() {
const { classes, theme } = this.props;
const drawer = (
<div>
<div className={classes.toolbar} />
<Divider/>
<List className={classes.avatarBox}>
<ListItem alignItems="left">
<ListItemAvatar style={{
left: -5,
}}>
<Avatar>N</Avatar>
</ListItemAvatar>
<ListItemText
primary="Oscar Pinto"
secondary="moka117@hotmail.com"
/>
<ListItemSecondaryAction style={{
position: 'absolute',
left: 190,
top: '30%',
transform: 'translateY(-50%)',
}}>
<IconButton onClick="">
<CreateIcon/>
</IconButton>
</ListItemSecondaryAction>
</ListItem>
</List>
<Divider />
<List>
<ListItem button key={"New Task"} onClick={this.handleNewTask}>
<ListItemIcon> <MailIcon /></ListItemIcon>
<ListItemText primary={"New Task"} />
</ListItem>
<ListItem button key={"Log-Out"} onClick={this.handleLogout}>
<ListItemIcon><InboxIcon /> </ListItemIcon>
<ListItemText primary={"Log-Out"} />
</ListItem>
</List>
</div>
);
return (
<div className={classes.root}>
<CssBaseline />
<AppBar position="fixed" className={classes.appBar}>
<Toolbar>
<IconButton
color="inherit"
aria-label="Open drawer"
onClick={this.handleDrawerToggle}
className={classes.menuButton}
>
<MenuIcon />
</IconButton>
<Typography variant="h6" color="inherit" noWrap>
Tasks
</Typography>
</Toolbar>
</AppBar>
<nav className={classes.drawer}>
{/* The implementation can be swapped with js to avoid SEO duplication of links. */}
<Hidden smUp implementation="css">
<Drawer
container={this.props.container}
variant="temporary"
anchor={theme.direction === 'rtl' ? 'right' : 'left'}
open={this.state.mobileOpen}
onClose={this.handleDrawerToggle}
classes={{
paper: classes.drawerPaper,
}}
>
{drawer}
</Drawer>
</Hidden>
<Hidden xsDown implementation="css">
<Drawer
classes={{
paper: classes.drawerPaper,
}}
variant="permanent"
open
>
{drawer}
</Drawer>
</Hidden>
</nav>
<main className={classes.content}>
<div className={classes.toolbar} />
<Cardlist cardsList={this.state.cardList} />
</main>
</div>
);
}
handleLogout(e) {
//localStorage.setItem('Called', "true");
localStorage.setItem('IsLoggedIn', "false");
window.location.reload(false);
}
handleNewTask(e) {
//localStorage.setItem('Called', "true");
window.location.replace("/NewTask");
}
}
ResponsiveDrawer.propTypes = {
classes: PropTypes.object.isRequired,
// Injected by the documentation to work in an iframe.
// You won't need it on your project.
container: PropTypes.object,
theme: PropTypes.object.isRequired,
};
Array.prototype.unique = function() {
var a = this.concat();
for(var i=0; i<a.length; ++i) {
for(var j=i+1; j<a.length; ++j) {
if(a[i] === a[j])
a.splice(j--, 1);
}
}
return a;
};
export default withStyles(styles, { withTheme: true })(ResponsiveDrawer); | 32.901575 | 104 | 0.468829 |
2d882c3ee8d04d8f63b50879588d2ab4fdbe7ee9 | 1,936 | html | HTML | _includes/business-accounting.html | mbattur/um-global | fcb78dc81fb2f6f7e5871296d2365922797f6032 | [
"Apache-2.0"
] | null | null | null | _includes/business-accounting.html | mbattur/um-global | fcb78dc81fb2f6f7e5871296d2365922797f6032 | [
"Apache-2.0"
] | null | null | null | _includes/business-accounting.html | mbattur/um-global | fcb78dc81fb2f6f7e5871296d2365922797f6032 | [
"Apache-2.0"
] | null | null | null | <section class="bg-primary" id="business-accounting">
<div class="container">
<div class="row">
<div class="col-lg-12 text-center">
<h2 class="section-heading">Business Accounting & Bookkeeping</h2>
<hr class="light">
</div>
</div>
</div>
<div class="container">
<div class="row">
<div class="col-lg-4 col-md-6 text-center">
<div class="service-box">
<i class="fa fa-4x fa-usd wow bounceIn text-white"></i>
<h3>Business Accounting</h3>
<p class="text-white">Turn to us for dependable accounting solutions and practical advice. We’ll know how to serve you because our CPAs and accountants are experienced in working with a variety of small, midsized, and large businesses.</p>
</div>
</div>
<div class="col-lg-4 col-md-6 text-center">
<div class="service-box">
<i class="fa fa-4x fa-area-chart wow bounceIn text-white" data-wow-delay=".1s"></i>
<h3>Bookkeeping</h3>
<p class="text-white">As a small business owner you have more important things to do than to keep your own books. We take care of your books for you, so you can get back to the job of running your business and generating profits.</p>
</div>
</div>
<div class="col-lg-4 col-md-6 text-center">
<div class="service-box">
<i class="fa fa-4x fa-check-circle-o wow bounceIn text-white" data-wow-delay=".2s"></i>
<h3>Agreed Upon Procedures</h3>
<p class="text-white">Agreed Upon Procedures (AUP) are conducted by our audit professionals in accordance with attestation standards established by the American Institute of Certified Public Accountants.</p>
</div>
</div>
</div>
</div>
</section>
| 53.777778 | 257 | 0.583678 |
df4fddf1db768a3543fb37d7f8efce905a5ea051 | 760 | rb | Ruby | lib/elfcat/base.rb | gbudiman/elfcat | 29c10e9ea286ddfdcbd1bda38c2786ffed5d8268 | [
"MIT"
] | null | null | null | lib/elfcat/base.rb | gbudiman/elfcat | 29c10e9ea286ddfdcbd1bda38c2786ffed5d8268 | [
"MIT"
] | null | null | null | lib/elfcat/base.rb | gbudiman/elfcat | 29c10e9ea286ddfdcbd1bda38c2786ffed5d8268 | [
"MIT"
] | null | null | null | class Base
attr_reader :data, :debug
def initialize
@data = Hash.new
@debug = Proc.new { self.debug }
end
def method_missing _method
return @data[_method][:data]
end
def [](_name)
return @data[_name]
end
def debug
raise RuntimeError, "Base::debug() is stubbed. Please override this method"
end
def parse_struct _struct, _resource, _element_count, _element_size
_element_count.times do |i|
st = Hash.new
struct_address = i * _element_size
_struct.each do |k, v|
st[k] = Util.concatenate(_resource, struct_address + v[0], v[1])
end
@data[i] = st.dup
end
end
def parse_slice _address, _length
return $resource.slice(_address, _length).split(/\x0/).dup
end
end | 20.540541 | 79 | 0.652632 |
b7994495c6eeec908ecc12ae35a19e764035e6ce | 321 | dart | Dart | app/lib/utils/external_definitions/external_definitions.dart | softmarshmallow/robbin | 87fa2b77a2f02cb482be320c70b2e3e5462a4875 | [
"MIT"
] | 6 | 2020-07-22T03:12:02.000Z | 2021-08-16T13:29:01.000Z | app/lib/utils/external_definitions/external_definitions.dart | softmarshmallow/wor.io | 87fa2b77a2f02cb482be320c70b2e3e5462a4875 | [
"MIT"
] | 5 | 2020-11-27T16:34:11.000Z | 2022-01-22T13:57:57.000Z | app/lib/utils/external_definitions/external_definitions.dart | softmarshmallow/wor.io | 87fa2b77a2f02cb482be320c70b2e3e5462a4875 | [
"MIT"
] | null | null | null | class ExternalDefinitionUtils {
static String buildOxfordDefinition(String token) {
return "https://www.lexico.com/definition/$token";
}
static String buildGoogleDefinition(String token) {
var uri = 'https://www.google.com/search?q=$token';
var encoded = Uri.encodeFull(uri);
return encoded;
}
}
| 26.75 | 55 | 0.71028 |
60569ee89159420493f7b466f8e2a03e84be369a | 5,885 | html | HTML | nosotros.html | DanielAristy/Bienes_Raices | 8d48ce4ffc61594326f37780c771f65ce53fd221 | [
"Apache-2.0"
] | null | null | null | nosotros.html | DanielAristy/Bienes_Raices | 8d48ce4ffc61594326f37780c771f65ce53fd221 | [
"Apache-2.0"
] | null | null | null | nosotros.html | DanielAristy/Bienes_Raices | 8d48ce4ffc61594326f37780c771f65ce53fd221 | [
"Apache-2.0"
] | 1 | 2019-08-10T05:12:05.000Z | 2019-08-10T05:12:05.000Z | <!DOCTYPE html>
<html lang="en" dir="ltr">
<head>
<meta charset="utf-8">
<title>Bienes Raices</title>
<link href="https://fonts.googleapis.com/css?family=Lato" rel="stylesheet">
<link rel="stylesheet" href="css/normalize.css">
<link rel="stylesheet" href="css/style.css">
</head>
<!--Bienes Raices logotipo, links, y parrafo-->
<header class="site-header">
<div class="contenedor contenido">
<div class="barra">
<a href="index.html">
<img src="img/logo.svg" alt="Logotipo de Bienes Raices">
</a>
<div class="mobile-menu">
<a href="#navegacion">
<img src="img/barras.svg" alt="Icono Menu">
</a>
</div>
<nav id = "navegacion" class="navegacion "><!--Ventanas de navegacion-->
<a href="nosotros.html">Nosotros</a>
<a href="anuncios.html">Anuncios</a>
<a href="blog.html">Blog</a>
<a href="contacto.html">Contacto</a>
</nav>
</div>
</div><!--Contenedor-->
</header><!--Header-->
<main class="contenedor">
<h1 class="fuenteW centrar-texto">Conoce Sobre Nosotros</h1>
<div class="contenido-nosotros">
<div class="imagen">
<img src="img/nosotros.jpg" alt="Imagen Sobre Nosotros">
</div>
<div class="texto-nosotros">
<blockquote>25 Años de Experiencia</blockquote>
<p>Praesent nec enim felis. Fusce tristique ultrices velit,
eu placerat arcu ultrices vel. Ut eu justo augue. Nunc molestie
dui id bibendum condimentum. Nulla ac blandit elit. Pellentesque
habitant morbi tristique senectus et netus et malesuada fames ac
turpis egestas. Sed euismod velit id magna convallis consequat.
Orci varius natoque penatibus et magnis dis parturient montes,
nascetur ridiculus mus. Curabitur rutrum dapibus pulvinar.
Nullam at accumsan urna. In lacinia pulvinar est, ac commodo elit
bibendum sed. Morbi quis eleifend magna. Pellentesque nisi sapien,
efficitur vitae molestie vel, tincidunt semper enim. Mauris aliquam
arcu eget nisi facilisis, ut blandit leo porta. Vivamus nec pulvinar purus,
vitae cursus tellus.</p>
<p>Suspendisse blandit, quam et pharetra consectetur, odio libero viverra
ipsum, in ullamcorper mauris urna non risus. Nunc nec consectetur felis.
Vestibulum quis sem nisl. Curabitur vulputate felis eu tortor volutpat
rhoncus. Donec in ex ut leo euismod commodo. Pellentesque a urna rhoncus,
sodales turpis in, finibus tortor. Praesent congue laoreet libero eget
convallis.Duis placerat ipsum lectus, ac molestie turpis sollicitudin id.
Donec tristique elit ac pulvinar blandit. Etiam vulputate eros id auctor
iaculis. Aliquam ut ligula est.</p>
<p>non vehicula mauris. Integer aliquet vitae diam non dapibus.
Duis luctus placerat sapien quis ultrices. Etiam pulvinar iaculis ipsum,
at convallis libero iaculis ut. Proin sit amet velit aliquam, ultrices lacus
a, vestibulum eros. Phasellus ac velit id lectus euismod placerat. Quisque
non felis ornare, ullamcorper sem a, fermentum augue. Fusce tincidunt dictum
mi finibus condimentum.
</p>
</div>
</div>
</main>
<!--Seccion mas sobre nosotros-->
<section class="contenedor seccion contenido">
<h2 class="fuenteW centrar-texto">Más Sobre Nosotros</h2>
<div class="icono-nosotros">
<div class="icono">
<img src="img/icono1.svg" alt="Icono Seguridad">
<h3>Seguridad</h3>
<p>Toda la seguridad que tu necesitas para tu pagina
web la puedes encontrar en la descripcion de este
canal.</p>
</div class="icono">
<div class="icono">
<img src="img/icono2.svg" alt="Icono Precio">
<h3>El Mejor Precio</h3>
<p>Si estas buscando el mejor precio solo debes ingresar
a esta pagina y en la caja de menssajes nos dejas la
inquietud y nosotros con mucho gusto nos pondremos en
contacto.</p>
</div class="icono">
<div class="icono">
<img src="img/icono3.svg" alt="Icono Tiempo">
<h3>A Tiempo</h3>
<p>Si necesitas todas tus actualizaciones a tiempo
solo debes ponernos en la pagina de configuraciones
tu correo para contactarnos contigo y darte la solucion
a tiempo.</p>
</div class="icono">
</div>
</section>
<!--final de la pagina-->
<footer class="final-pg seccion">
<div class="contenedor contenedor-final">
<nav class="navegacion"> <!--Aplique mis estilos de iluminar los enlaces-->
<a href="nosotros.html">Nosotros</a>
<a href="anuncios.html">Anuncios</a>
<a href="blog.html">Blog</a>
<a href="contacto.html">Contacto</a>
</nav>
<p class="copyright"> Todos Los Derechos Reservados 2019 © </p>
</div>
</footer>
</body>
</html>
| 44.583333 | 96 | 0.544605 |
ddefc4294cdf3c62485ed8e6772f5ffe21ffcc41 | 1,314 | php | PHP | application/views/contact/confirm.php | ryoikema/first | d87d1f1896e8b214063698fb08cd02911ca7d29c | [
"MIT"
] | null | null | null | application/views/contact/confirm.php | ryoikema/first | d87d1f1896e8b214063698fb08cd02911ca7d29c | [
"MIT"
] | null | null | null | application/views/contact/confirm.php | ryoikema/first | d87d1f1896e8b214063698fb08cd02911ca7d29c | [
"MIT"
] | null | null | null | <div class="l-content">
<div class="post">
<h3>お問い合わせ 内容確認</h3>
<p class="confirm_text">
お問い合わせ内容はこちらでよろしいですか?<br>
よろしければ「送信する」ボタンを押してください。
</p>
<?php echo form_open('contact/confirm') ?>
<table class="contact_form">
<tbody>
<tr class="confirm_tr">
<th><label for="name">お名前</label></th>
<td><?php echo set_value('name'); ?></td>
</tr>
<tr class="confirm_tr">
<th><label for="email">メールアドレス</label></th>
<td><?php echo set_value('email'); ?></td>
</tr>
<tr class="confirm_tr">
<th><label for="text">お問い合わせ内容</label></th>
<td><?php echo set_value('text'); ?></td>
</tr>
<tr>
<td class="submit_btn" colspan="2">
<?php echo form_hidden('name', set_value('name')); ?>
<?php echo form_hidden('email', set_value('email')); ?>
<?php echo form_hidden('text', set_value('text')); ?>
<input type="submit" name="return_edit" value="戻る">
<input type="submit" name="submit" value="送信">
</td>
</tr>
</tbody>
</table>
</form>
</div>
</div><!--/l-content--> | 33.692308 | 70 | 0.462709 |
dda36a328b02e5829294e947a9911844ad9a0790 | 560 | php | PHP | app/Models/Passenger.php | Fatima1160801/projectG | 04986b0797cee6dd619fd83e73c03186b5e4f5ae | [
"MIT"
] | null | null | null | app/Models/Passenger.php | Fatima1160801/projectG | 04986b0797cee6dd619fd83e73c03186b5e4f5ae | [
"MIT"
] | 4 | 2021-08-16T13:11:08.000Z | 2022-03-30T02:19:33.000Z | app/Models/Passenger.php | Fatima1160801/projectG | 04986b0797cee6dd619fd83e73c03186b5e4f5ae | [
"MIT"
] | null | null | null | <?php
namespace App\Models;
use App\Models\Trip;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Factories\HasFactory;
class Passenger extends Model
{
use HasFactory;
protected $guarded=['id','created_at','updated_at'];
public function trips (){
return $this->belongsToMany(Trip::class);
}
public function payments(){
return $this->hasMany(Payment::class);
}
public function Passenger_Trip(){
return $this->belongsToMany(Passenger_Trip::class);
}
}
| 21.538462 | 63 | 0.657143 |
282474d868dfed299370610f15a4a5e3fc1b672a | 20,955 | cpp | C++ | active_3d_planning_core/src/module/trajectory_generator/rrt_star.cpp | danielduberg/mav_active_3d_planning | 0790e7d26175bf7d315975a4d91548b56f9ee877 | [
"BSD-3-Clause"
] | null | null | null | active_3d_planning_core/src/module/trajectory_generator/rrt_star.cpp | danielduberg/mav_active_3d_planning | 0790e7d26175bf7d315975a4d91548b56f9ee877 | [
"BSD-3-Clause"
] | null | null | null | active_3d_planning_core/src/module/trajectory_generator/rrt_star.cpp | danielduberg/mav_active_3d_planning | 0790e7d26175bf7d315975a4d91548b56f9ee877 | [
"BSD-3-Clause"
] | null | null | null | #define _USE_MATH_DEFINES
#include "active_3d_planning_core/module/trajectory_generator/rrt_star.h"
#include "active_3d_planning_core/data/trajectory.h"
#include "active_3d_planning_core/module/module_factory.h"
#include "active_3d_planning_core/planner/planner_I.h"
#include "active_3d_planning_core/tools/defaults.h"
#include <algorithm>
#include <cmath>
#include <random>
#include <vector>
namespace active_3d_planning {
namespace trajectory_generator {
// RRTStar
ModuleFactoryRegistry::Registration<RRTStar> RRTStar::registration("RRTStar");
RRTStar::RRTStar(PlannerI &planner) : RRT(planner) {}
void RRTStar::setupFromParamMap(Module::ParamMap *param_map) {
RRT::setupFromParamMap(param_map);
setParam<bool>(param_map, "rewire_root", &p_rewire_root_, true);
setParam<bool>(param_map, "rewire_intermediate", &p_rewire_intermediate_, true);
setParam<bool>(param_map, "rewire_update", &p_rewire_update_, true);
setParam<bool>(param_map, "update_subsequent", &p_update_subsequent_, true);
setParam<bool>(param_map, "reinsert_root", &p_reinsert_root_, true);
setParam<double>(param_map, "max_rewire_range", &p_max_rewire_range_,
p_max_extension_range_ + 0.1);
setParam<double>(param_map, "max_density_range", &p_max_density_range_, 0.0);
setParam<int>(param_map, "n_neighbors", &p_n_neighbors_, 10);
planner_.getFactory().registerLinkableModule("RRTStarGenerator", this);
}
bool RRTStar::checkParamsValid(std::string *error_message) {
if (p_max_rewire_range_ <= 0.0) {
*error_message = "rewire_range expected > 0";
return false;
}
return RRT::checkParamsValid(error_message);
}
bool RRTStar::selectSegment(TrajectorySegment **result,
TrajectorySegment *root) {
// If the root has changed, reset the kdtree and populate with the current
// trajectory tree
if (previous_root_ != root) {
resetTree(root);
previous_root_ = root;
if (p_rewire_update_) {
rewireIntermediate(root);
}
if (p_sampling_mode_ == "semilocal") {
// Check whether the minimum number of local points is achieved and store
// how many to go
double query_pt[3] = {root->trajectory.back().position_W.x(),
root->trajectory.back().position_W.y(),
root->trajectory.back().position_W.z()};
std::size_t ret_index[p_semilocal_count_];
double out_dist[p_semilocal_count_];
nanoflann::KNNResultSet<double> resultSet(p_semilocal_count_);
resultSet.init(ret_index, out_dist);
kdtree_->findNeighbors(resultSet, query_pt, nanoflann::SearchParams(10));
semilocal_count_ = p_semilocal_count_;
for (int i = 0; i < resultSet.size(); ++i) {
if (out_dist[p_semilocal_count_ - 1] <=
p_semilocal_radius_max_ * p_semilocal_radius_max_) {
semilocal_count_ -= 1;
}
}
}
}
RRT::selectSegment(result, root);
}
bool RRTStar::expandSegment(TrajectorySegment *target,
std::vector<TrajectorySegment *> *new_segments) {
tree_is_reset_ = true; // select was called earlier, resetting the tree on new root segment
if (!target) {
// Segment selection failed
return false;
}
// Check max segment range and cropping
if (!adjustGoalPosition(target->trajectory.back().position_W, &goal_pos_)) {
return false;
}
// Check maximum sampling density
if (p_max_density_range_ > 0.0) {
double query_pt[3] = {goal_pos_.x(), goal_pos_.y(), goal_pos_.z()};
std::size_t ret_index[1];
double out_dist[1];
nanoflann::KNNResultSet<double> resultSet(1);
resultSet.init(ret_index, out_dist);
kdtree_->findNeighbors(resultSet, query_pt, nanoflann::SearchParams(10));
if (resultSet.size() > 0) {
if (out_dist[0] <= p_max_density_range_ * p_max_density_range_) {
return false;
}
}
}
// Compute the gain of the new point (evaluation must be single point!)
TrajectorySegment *new_segment;
EigenTrajectoryPoint goal_point;
goal_point.position_W = goal_pos_;
goal_point.setFromYaw((double) rand() / (double) RAND_MAX * 2.0 * M_PI); // random orientation
if (p_crop_segments_) {
new_segment = target->spawnChild();
connectPoses(target->trajectory.back(), goal_point, &(new_segment->trajectory),
false); // collision already checked
} else {
new_segment = new TrajectorySegment();
new_segment->trajectory.push_back(goal_point);
new_segment->parent = nullptr;
}
planner_.getTrajectoryEvaluator().computeGain(new_segment);
// Find nearby parent candidates
std::vector<TrajectorySegment *> candidate_parents;
if (findNearbyCandidates(goal_pos_, &candidate_parents)) {
rewireToBestParent(new_segment, candidate_parents);
}
// Rewire existing segments within range to the new segment, if this increases
// their value
if (p_rewire_intermediate_) {
TrajectorySegment *current = new_segment;
while (current) {
// the connection of the new segment to the root cannot be rewired
// (loops!)
candidate_parents.erase(std::remove(candidate_parents.begin(),
candidate_parents.end(), current),
candidate_parents.end());
current = current->parent;
}
std::vector<TrajectorySegment *> new_parent = {new_segment};
for (int i = 0; i < candidate_parents.size(); ++i) {
rewireToBestParent(candidate_parents[i], new_parent);
}
}
// Add to the kdtree
if (new_segment->parent) {
tree_data_.addSegment(new_segment);
kdtree_->addPoints(tree_data_.points.size() - 1, tree_data_.points.size() - 1);
return true;
} else {
delete new_segment;
return false;
}
}
bool RRTStar::rewireIntermediate(TrajectorySegment *root) {
// After updating, try rewire all from inside out
std::vector<TrajectorySegment *> segments;
// order w.r.t distance
root->getTree(&segments);
std::vector<std::pair<double, TrajectorySegment *>> distance_pairs;
for (int i = 1; i < segments.size(); ++i) {
distance_pairs.push_back(
std::make_pair((segments[i]->trajectory.back().position_W -
root->trajectory.back().position_W)
.norm(),
segments[i]));
}
std::sort(distance_pairs.begin(), distance_pairs.end());
// rewire
std::vector<TrajectorySegment *> candidate_parents;
std::vector<TrajectorySegment *> safe_parents;
for (int i = 0; i < distance_pairs.size(); ++i) {
candidate_parents.clear();
if (!findNearbyCandidates(
distance_pairs[i].second->trajectory.back().position_W,
&candidate_parents)) {
continue;
}
safe_parents.clear();
for (int j = 0; j < candidate_parents.size(); ++j) {
// cannot rewire to own children (loops!)
TrajectorySegment *current = candidate_parents[j];
while (true) {
if (current) {
if (current == distance_pairs[i].second) {
break;
}
current = current->parent;
} else {
safe_parents.push_back(candidate_parents[j]);
break;
}
}
}
rewireToBestParent(distance_pairs[i].second, safe_parents);
}
return true;
}
bool RRTStar::rewireRoot(TrajectorySegment *root, int *next_segment) {
if (!p_rewire_root_) {
return true;
}
if (!tree_is_reset_) {
// Force reset (dangling pointers!)
resetTree(root);
}
tree_is_reset_ = false;
TrajectorySegment *next_root = root->children[*next_segment].get();
// Try rewiring non-next segments (to keept their branches alive)
std::vector<TrajectorySegment *> to_rewire;
root->getChildren(&to_rewire);
to_rewire.erase(std::remove(to_rewire.begin(), to_rewire.end(), next_root), to_rewire.end());
bool rewired_something = true;
while (rewired_something) {
rewired_something = false;
for (int i = 0; i < to_rewire.size(); ++i) {
if (rewireRootSingle(to_rewire[i], next_root)) {
to_rewire.erase(to_rewire.begin() + i);
rewired_something = true;
break;
}
}
}
// If necessary (some segments would die) reinsert old root
if (p_reinsert_root_ && to_rewire.size() > 0) {
EigenTrajectoryPointVector new_trajectory;
if ((next_root->trajectory.back().position_W - root->trajectory.back().position_W).norm() > 0.0) {
// don't reinsert zero movement nodes
if (connectPoses(next_root->trajectory.back(), root->trajectory.back(), &new_trajectory, false)) {
TrajectorySegment *reinserted_root = next_root->spawnChild();
reinserted_root->trajectory = new_trajectory;
// take info from old root (without value since already seen) will be
// discarded/updated anyways
reinserted_root->info = std::move(root->info);
planner_.getTrajectoryEvaluator().computeCost(reinserted_root);
planner_.getTrajectoryEvaluator().computeValue(reinserted_root);
tree_data_.addSegment(reinserted_root);
kdtree_->addPoints(tree_data_.points.size() - 1, tree_data_.points.size() - 1);
// rewire
for (TrajectorySegment *segment : to_rewire) {
for (int i = 0; i < segment->parent->children.size(); ++i) {
if (segment->parent->children[i].get() == segment) {
// Move from existing parent
reinserted_root->children.push_back(
std::move(segment->parent->children[i]));
segment->parent->children.erase(segment->parent->children.begin() + i);
segment->parent = reinserted_root;
}
}
}
}
}
}
// Adjust the next best value
for (int i = 0; i < root->children.size(); ++i) {
if (root->children[i].get() == next_root) {
*next_segment = i;
break;
}
}
return true;
}
bool RRTStar::rewireRootSingle(TrajectorySegment *segment,
TrajectorySegment *new_root) {
// Try rewiring a single segment
std::vector<TrajectorySegment *> candidate_parents;
if (!findNearbyCandidates(segment->trajectory.back().position_W, &candidate_parents)) {
return false;
}
// remove all candidates that are still connected to the root
std::vector<TrajectorySegment *> safe_candidates;
TrajectorySegment *current;
for (int i = 0; i < candidate_parents.size(); ++i) {
current = candidate_parents[i];
while (current) {
if (current == new_root) {
safe_candidates.push_back(candidate_parents[i]);
break;
}
current = current->parent;
}
}
if (safe_candidates.empty()) {
return false;
}
// force rewire
return rewireToBestParent(segment, safe_candidates, true);
}
bool RRTStar::findNearbyCandidates(const Eigen::Vector3d &target_point,
std::vector<TrajectorySegment *> *result) {
// Also tried radius search here but that stuff blows for some reason...
double query_pt[3] = {target_point.x(), target_point.y(), target_point.z()};
std::size_t ret_index[p_n_neighbors_];
double out_dist[p_n_neighbors_];
nanoflann::KNNResultSet<double> resultSet(p_n_neighbors_);
resultSet.init(ret_index, out_dist);
kdtree_->findNeighbors(resultSet, query_pt, nanoflann::SearchParams(10));
bool candidate_found = false;
for (int i = 0; i < resultSet.size(); ++i) {
if (out_dist[i] <= p_max_rewire_range_ * p_max_rewire_range_) {
candidate_found = true;
result->push_back(tree_data_.data[ret_index[i]]);
}
}
return candidate_found;
}
bool RRTStar::rewireToBestParent(TrajectorySegment *segment,
const std::vector<TrajectorySegment *> &candidates, bool force_rewire) {
// Evaluate all candidate parents and store the best one in the segment
// Goal is the end point of the segment
EigenTrajectoryPoint goal_point = segment->trajectory.back();
// store the initial segment
TrajectorySegment best_segment = segment->shallowCopy();
TrajectorySegment *initial_parent = segment->parent;
// Find best segment
for (int i = 0; i < candidates.size(); ++i) {
segment->trajectory.clear();
segment->parent = candidates[i];
if (connectPoses(candidates[i]->trajectory.back(), goal_point,
&(segment->trajectory))) {
// Feasible connection: evaluate the trajectory
planner_.getTrajectoryEvaluator().computeCost(segment);
planner_.getTrajectoryEvaluator().computeValue(segment);
if (best_segment.parent == nullptr || force_rewire ||
segment->value > best_segment.value) {
best_segment = segment->shallowCopy();
force_rewire = false;
}
}
}
if (best_segment.parent == nullptr) {
// No connection found and no previous trajectory
return false;
} else {
// Apply best segment and rewire
segment->parent = best_segment.parent;
segment->trajectory = best_segment.trajectory;
segment->cost = best_segment.cost;
planner_.getTrajectoryEvaluator().computeValue(segment);
if (segment->parent == initial_parent) {
// Back to old parent
return ~force_rewire;
} else if (initial_parent == nullptr) {
// Found new parent
segment->parent->children.push_back(
std::unique_ptr<TrajectorySegment>(segment));
return true;
} else {
for (int i = 0; i < initial_parent->children.size(); ++i) {
if (initial_parent->children[i].get() == segment) {
// Move from existing parent
segment->parent->children.push_back(
std::move(initial_parent->children[i]));
initial_parent->children.erase(initial_parent->children.begin() + i);
// update subtree
if (p_update_subsequent_) {
std::vector<TrajectorySegment *> subtree;
segment->getTree(&subtree);
for (int j = 1; j < subtree.size(); ++j) {
planner_.getTrajectoryEvaluator().computeValue(subtree[j]);
}
}
return true;
}
}
// Rewiring failed (should not happen by construction)
return false;
}
}
}
} // namespace trajectory_generator
namespace trajectory_evaluator {
// RRTStarEvaluatorAdapter (just delegate everything, call rewire on select
// best)
ModuleFactoryRegistry::Registration<RRTStarEvaluatorAdapter>
RRTStarEvaluatorAdapter::registration("RRTStarEvaluatorAdapter");
RRTStarEvaluatorAdapter::RRTStarEvaluatorAdapter(PlannerI &planner)
: TrajectoryEvaluator(planner) {}
bool RRTStarEvaluatorAdapter::computeGain(TrajectorySegment *traj_in) {
return following_evaluator_->computeGain(traj_in);
}
bool RRTStarEvaluatorAdapter::computeCost(TrajectorySegment *traj_in) {
return following_evaluator_->computeCost(traj_in);
}
bool RRTStarEvaluatorAdapter::computeValue(TrajectorySegment *traj_in) {
return following_evaluator_->computeValue(traj_in);
}
int RRTStarEvaluatorAdapter::selectNextBest(TrajectorySegment *traj_in) {
int next = following_evaluator_->selectNextBest(traj_in);
generator_->rewireRoot(traj_in, &next);
return next;
}
bool RRTStarEvaluatorAdapter::updateSegment(TrajectorySegment *segment) {
return following_evaluator_->updateSegment(segment);
}
void RRTStarEvaluatorAdapter::visualizeTrajectoryValue(
VisualizationMarkers *markers, const TrajectorySegment &trajectory) {
following_evaluator_->visualizeTrajectoryValue(markers, trajectory);
}
void RRTStarEvaluatorAdapter::setupFromParamMap(Module::ParamMap *param_map) {
generator_ = dynamic_cast<trajectory_generator::RRTStar *>(
planner_.getFactory().readLinkableModule("RRTStarGenerator"));
// Create following evaluator
std::string args; // default args extends the parent namespace
std::string param_ns = (*param_map)["param_namespace"];
setParam<std::string>(param_map, "following_evaluator_args", &args,
param_ns + "/following_evaluator");
following_evaluator_ =
planner_.getFactory().createModule<TrajectoryEvaluator>(args, planner_,
verbose_modules_);
// setup parent
TrajectoryEvaluator::setupFromParamMap(param_map);
}
} // namespace trajectory_evaluator
} // namespace active_3d_planning
| 46.984305 | 118 | 0.524696 |
b69e7d9ac89d08dce4e3ad34b477eda29a523357 | 4,484 | rb | Ruby | app/services/charge_pattern.rb | l85m/trym | 83f45ed65c52f21e138f1793d1d42ab5b4ec4206 | [
"MIT"
] | 1 | 2015-04-10T13:04:11.000Z | 2015-04-10T13:04:11.000Z | app/services/charge_pattern.rb | demosophy/trym | 83f45ed65c52f21e138f1793d1d42ab5b4ec4206 | [
"MIT"
] | null | null | null | app/services/charge_pattern.rb | demosophy/trym | 83f45ed65c52f21e138f1793d1d42ab5b4ec4206 | [
"MIT"
] | 1 | 2016-08-23T10:37:57.000Z | 2016-08-23T10:37:57.000Z | class ChargePattern
attr_reader :interval, :interval_likely_recurring, :recurring_amounts_similar, :dates_are_perfectly_recurring, :recurring_date_count
def initialize(history)
@history = history.to_h
check_for_perfect_recurrance
unless @dates_are_perfectly_recurring
find_interval
end
end
private
def recurring_periods_in_days
[14, 28, 29, 30, 31, 90, 91, 92, 180, 181, 182, 183, 363, 364, 365, 366]
end
def check_for_perfect_recurrance
distances = distances_between_dates @history.keys
if @history.size > 2 && distances.uniq.size == 1 && recurring_periods_in_days.include?(distances.first)
@recurring_date_count = @history.size
@dates_are_perfectly_recurring = true
@interval_likely_recurring = true
@recurring_amounts_similar = amounts_are_similar? @history.values
@interval = distances.first
else
@dates_are_perfectly_recurring = false
end
end
def find_interval
## First try to find recurrance without any hocus pokus
@interval = find_recurring_interval( distances_between_dates @history.keys )
if @interval.present?
@interval_likely_recurring = true
@recurring_date_count = @history.size
else
## if that doesn't work, then try and group the charges by the charge amount and find a pattern there
@interval ||= find_recurring_interval distances_between_grouped_dates
if @interval.present?
@interval_likely_recurring = true
@recurring_amounts_similar = group_history_by_amount.values.collect{ |x| amounts_are_similar? x.map(&:last) }.select{ |x| x }.present?
@recurring_date_count = group_history_by_amount.values.size
## If that still doesn't work, try to find outliers in the data and remove them to see if there are any patterns
else
charges_on_similar_days_of_the_month = find_charges_on_similar_days_of_the_month
@interval ||= find_recurring_interval( distances_between_dates charges_on_similar_days_of_the_month.keys.sort )
if @interval.present?
@interval_likely_recurring = true
@recurring_amounts_similar = amounts_are_similar? charges_on_similar_days_of_the_month.values
@recurring_date_count = charges_on_similar_days_of_the_month.size
## Ok it's probably not recurring
else
@interval = -1
@interval_likely_recurring = false
@recurring_date_count = 0
end
end
end
if @recurring_amounts_similar.nil?
@recurring_amounts_similar = amounts_are_similar? @history.values
end
end
def find_recurring_interval(distances)
return nil if iqr_greater_than(distances, 5)
recurring_periods_in_days.each do |interval|
return interval if in_iqr?(distances, interval)
end
nil
end
def distances_between_grouped_dates
if @history.size > 3
grouped_history = group_history_by_amount
return [-1] unless grouped_history.present?
grouped_dates = group_dates(grouped_history)
distances = grouped_dates.collect{ |d| distances_between_dates d }.flatten
else
distances = [-1]
end
end
def amounts_are_similar?(amounts)
amounts = remove_outliers(amounts)
(amounts.min.to_f / amounts.max.to_f) > 0.8
end
def remove_outliers(vals)
floor = vals.percentile(25)
ciel = vals.percentile(75)
vals.reject{ |x| x < floor || x > ciel }
end
def group_history_by_amount
@history.reject{ |_,v| v < 0 }.group_by(&:last).select{ |_,d| d.size > 1 }
end
def group_dates(grouped_history)
grouped_history.values.map(&:to_h).collect(&:keys)
end
def distances_between_dates(dates)
dates.each_cons(2).collect{ |a,b| (b - a).to_i }
end
def iqr_greater_than(distances, test_val)
test_val < distances.percentile(75) - distances.percentile(25)
end
def iqr_less_than(distances, test_val)
test_val > distances.percentile(75) - distances.percentile(25)
end
def in_iqr?(arr, val)
val.between?(arr.percentile(25), arr.percentile(75))
end
def find_charges_on_similar_days_of_the_month
base_day = @history.keys.map(&:day).median
#we expect the day to fall within +/- 3 days of the median day of the history
scrubbed_charges = @history.select do |d,_|
#handle end of month
if Date.valid_date?(d.year,d.month,base_day)
test_day = Date.new(d.year,d.month,base_day)
else
test_day = d.end_of_month
end
( d - test_day ).to_i.abs < 3
end
#we need at least 60% of the charges to fall in the predicted interval
if (scrubbed_charges.size / @history.size.to_f) > 0.6
return scrubbed_charges
end
@history
end
end | 30.09396 | 138 | 0.746655 |
681625984592e9b6872d3cefad020d96904e7916 | 50 | hh | C++ | RAVL2/MSVC/include/Ravl/DArray1dIter2.hh | isuhao/ravl2 | 317e0ae1cb51e320b877c3bad6a362447b5e52ec | [
"BSD-Source-Code"
] | null | null | null | RAVL2/MSVC/include/Ravl/DArray1dIter2.hh | isuhao/ravl2 | 317e0ae1cb51e320b877c3bad6a362447b5e52ec | [
"BSD-Source-Code"
] | null | null | null | RAVL2/MSVC/include/Ravl/DArray1dIter2.hh | isuhao/ravl2 | 317e0ae1cb51e320b877c3bad6a362447b5e52ec | [
"BSD-Source-Code"
] | null | null | null |
#include "../.././Core/System/DArray1dIter2.hh"
| 12.5 | 47 | 0.64 |
04a87af388e7ae6b3191c1f09c921c59a583a617 | 101 | html | HTML | conformance-checkers/html/elements/ins/date-1900-02-29-novalid.html | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 2,479 | 2018-05-28T14:51:29.000Z | 2022-03-30T14:41:18.000Z | conformance-checkers/html/elements/ins/date-1900-02-29-novalid.html | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 7,642 | 2018-05-28T09:38:03.000Z | 2022-03-31T20:55:48.000Z | conformance-checkers/html/elements/ins/date-1900-02-29-novalid.html | ziransun/wpt | ab8f451eb39eb198584d547f5d965ef54df2a86a | [
"BSD-3-Clause"
] | 1,303 | 2018-05-29T14:50:02.000Z | 2022-03-30T17:30:42.000Z | <!DOCTYPE html>
<meta charset=utf-8>
<title>date-1900-02-29</title>
<ins datetime="1900-02-29"></ins> | 25.25 | 33 | 0.693069 |
c3f4f0fe69b883deb5e629a5ffd546f61d155652 | 1,562 | rs | Rust | src/lib.rs | 4lDO2/ioslice-rs | a39f1a64f813c4e390f1abe2779491e216be26ea | [
"MIT"
] | 1 | 2020-09-13T17:16:39.000Z | 2020-09-13T17:16:39.000Z | src/lib.rs | 4lDO2/ioslice-rs | a39f1a64f813c4e390f1abe2779491e216be26ea | [
"MIT"
] | null | null | null | src/lib.rs | 4lDO2/ioslice-rs | a39f1a64f813c4e390f1abe2779491e216be26ea | [
"MIT"
] | null | null | null | //! `#![no_std]`-friendly wrappers over the [`std::io::IoSlice`] and [`std::io::IoSliceMut`], which
//! are shared slices and exclusive slices, respectively, and ABI-compatible with system types for
//! I/O vectors.
//!
//! Internally, the struct will store the following based on crate features:
//!
//! * `std` - wrapping [`std::io::IoSlice`] directly, with accessors for it as well as conversion
//! functions and From impls.
//! * `libc` (and `#[cfg(unix)]`) - wrapping [`libc::iovec`] directly on platforms that support it.
//! A marker is also stored, to safely wrap the raw pointer, and forcing usage of this API to
//! follow the borrow checker rules.
//! * (none) - wrapping a regular slice, that may not have the same ABI guarantees as the types
//! from std or libc have.
//!
//! `IoSlice` will however implement `AsRef<[u8]>`, `Borrow<[u8]>`, and `Deref<Target = [u8]>`
//! regardless of the features used, so long as the slice is marked as initialized.
#![cfg_attr(not(any(test, feature = "std")), no_std)]
#![cfg_attr(all(feature = "nightly", feature = "alloc"), feature(new_uninit))]
#[cfg(all(unix, windows))]
compile_error!("cannot compile for both windows and unix");
#[cfg(feature = "alloc")]
extern crate alloc;
mod iovec;
pub use iovec::*;
#[inline]
unsafe fn cast_slice_same_layout<A, B>(a: &[A]) -> &[B] {
core::slice::from_raw_parts(a.as_ptr() as *const B, a.len())
}
#[inline]
unsafe fn cast_slice_same_layout_mut<A, B>(a: &mut [A]) -> &mut [B] {
core::slice::from_raw_parts_mut(a.as_mut_ptr() as *mut B, a.len())
}
| 41.105263 | 99 | 0.673496 |
fbb53804684020d956c29974b03cf17a2b5c2e8a | 227 | java | Java | java/Main.java | sanshigo345/salary_calculation | 2c9c50501752f2d9056d1d46799775d67783ff2c | [
"MIT"
] | null | null | null | java/Main.java | sanshigo345/salary_calculation | 2c9c50501752f2d9056d1d46799775d67783ff2c | [
"MIT"
] | null | null | null | java/Main.java | sanshigo345/salary_calculation | 2c9c50501752f2d9056d1d46799775d67783ff2c | [
"MIT"
] | null | null | null | import java.io.IOException;
public class Main {
public static void main(String[] args) throws IOException {
Personnel personnelObject = new Personnel();
personnelObject.createOutput(args[0], args[1]);
}
}
| 20.636364 | 61 | 0.696035 |
e87afb74e4d441190f5ebc51b4f5cada74daff8b | 349 | cc | C++ | C++/using_static_var.cc | MaRauder111/Cplusplus | 9b18f95b5aee67f67c6e579ba7f3e80d3a3c68a5 | [
"MIT"
] | null | null | null | C++/using_static_var.cc | MaRauder111/Cplusplus | 9b18f95b5aee67f67c6e579ba7f3e80d3a3c68a5 | [
"MIT"
] | null | null | null | C++/using_static_var.cc | MaRauder111/Cplusplus | 9b18f95b5aee67f67c6e579ba7f3e80d3a3c68a5 | [
"MIT"
] | null | null | null | #include<iostream>
void test(){
//The static variable is initialized only once and exists till the end of a program.
//It retains its value between multiple functions call.
static int i= 0;
int j = 0;
i++;
j++;
std::cout << "i = " << i << " j = " << j << std::endl;
}
int main(){
test();
test();
test();
} | 18.368421 | 88 | 0.544413 |
ca72b3ccdc14efd0cc0c650d50587daa76b7249f | 709 | swift | Swift | TwitterClone/TweetCell.swift | navsaini/TwitterClone | 4a55ddefdaeda61df4c67f2e3a3152369261eb6e | [
"Apache-2.0"
] | null | null | null | TwitterClone/TweetCell.swift | navsaini/TwitterClone | 4a55ddefdaeda61df4c67f2e3a3152369261eb6e | [
"Apache-2.0"
] | 2 | 2016-02-23T07:32:03.000Z | 2016-03-03T01:21:04.000Z | TwitterClone/TweetCell.swift | navsaini/TwitterClone | 4a55ddefdaeda61df4c67f2e3a3152369261eb6e | [
"Apache-2.0"
] | null | null | null | //
// TweetCell.swift
// TwitterClone
//
// Created by Nav Saini on 2/19/16.
// Copyright © 2016 Saini. All rights reserved.
//
import UIKit
class TweetCell: UITableViewCell {
@IBOutlet weak var tweetLabel: UILabel!
@IBOutlet weak var userLabel: UILabel!
@IBOutlet weak var profImageView: UIImageView!
@IBOutlet weak var retweetCountLabel: UILabel!
@IBOutlet weak var favCountLabel: UILabel!
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
override func setSelected(selected: Bool, animated: Bool) {
super.setSelected(selected, animated: animated)
// Configure the view for the selected state
}
}
| 22.870968 | 63 | 0.679831 |
1658bcf7c33e390f21977a0fc7034b90a9c2789a | 593 | sql | SQL | openGaussBase/testcase/KEYWORDS/Leakproof/Opengauss_Function_Keyword_Leakproof_Case0033.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Leakproof/Opengauss_Function_Keyword_Leakproof_Case0033.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Leakproof/Opengauss_Function_Keyword_Leakproof_Case0033.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | -- @testpoint:opengauss关键字Leakproof(非保留),作为视图名
--关键字explain作为视图名,不带引号,创建成功
CREATE or replace VIEW Leakproof AS
SELECT * FROM pg_tablespace WHERE spcname = 'pg_default';
drop view Leakproof;
--关键字explain作为视图名,加双引号,创建成功
CREATE or replace VIEW "Leakproof" AS
SELECT * FROM pg_tablespace WHERE spcname = 'pg_default';
drop VIEW "Leakproof";
--关键字explain作为视图名,加单引号,合理报错
CREATE or replace VIEW 'Leakproof' AS
SELECT * FROM pg_tablespace WHERE spcname = 'pg_default';
--关键字explain作为视图名,加反引号,合理报错
CREATE or replace VIEW `Leakproof` AS
SELECT * FROM pg_tablespace WHERE spcname = 'pg_default';
| 26.954545 | 57 | 0.782462 |
f517c2813b9167adcad616190d8d700de6649a80 | 297,303 | cpp | C++ | src/blockchain/main.cpp | johngreyland262/Stealth | 0df75295fcc2c91ab591a4f1aaf6ba3d4cddc652 | [
"MIT"
] | null | null | null | src/blockchain/main.cpp | johngreyland262/Stealth | 0df75295fcc2c91ab591a4f1aaf6ba3d4cddc652 | [
"MIT"
] | null | null | null | src/blockchain/main.cpp | johngreyland262/Stealth | 0df75295fcc2c91ab591a4f1aaf6ba3d4cddc652 | [
"MIT"
] | null | null | null | // Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2014-2018 Stealth R&D LLC
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "alert.h"
#include "checkpoints.h"
#include "txdb-leveldb.h"
#include "net.h"
#include "init.h"
#include "key.h"
#include "ui_interface.h"
#include "kernel.h"
#include "QPRegistry.hpp"
#include "explore.hpp"
#include "stealthaddress.h"
#include "chainparams.hpp"
#include <boost/algorithm/string/replace.hpp>
#include <boost/filesystem.hpp>
#include <boost/filesystem/fstream.hpp>
#include <boost/random/mersenne_twister.hpp>
#include <boost/random/uniform_int_distribution.hpp>
using namespace std;
//
// Global state
//
unsigned char pchMessageStart[4] = {
chainParams.pchMessageStartMainNet[0],
chainParams.pchMessageStartMainNet[1],
chainParams.pchMessageStartMainNet[2],
chainParams.pchMessageStartMainNet[3] };
extern map<txnouttype, QPKeyType> mapQPoSKeyTypes;
CCriticalSection cs_setpwalletRegistered;
set<CWallet*> setpwalletRegistered;
CCriticalSection cs_main;
CTxMemPool mempool;
unsigned int nTransactionsUpdated = 0;
map<uint256, CBlockIndex*> mapBlockIndex;
map<int, CBlockIndex*> mapBlockLookup;
set<pair<COutPoint, unsigned int> > setStakeSeen;
uint256 hashGenesisBlock = chainParams.hashGenesisBlockMainNet;
static CBigNum bnProofOfWorkLimit = chainParams.bnProofOfWorkLimitMainNet;
static CBigNum bnProofOfStakeLimit = chainParams.bnProofOfStakeLimitMainNet;
unsigned int nStakeMinAge = chainParams.nStakeMinAgeMainNet;
unsigned int nStakeMaxAge = chainParams.nStakeMaxAgeMainNet;
unsigned int nStakeTargetSpacing = chainParams.nTargetSpacingMainNet;
int nCoinbaseMaturity = chainParams.nCoinbaseMaturityMainNet;
CBlockIndex* pindexGenesisBlock = NULL;
int nBestHeight = -1;
CBigNum bnBestChainTrust = 0;
CBigNum bnBestInvalidTrust = 0;
uint256 hashBestChain = 0;
CBlockIndex* pindexBest = NULL;
int64_t nTimeBestReceived = 0;
CMedianFilter<int> cPeerBlockCounts(5, 0); // Amount of blocks that other nodes claim to have
map<uint256, CBlock*> mapOrphanBlocks;
multimap<uint256, CBlock*> mapOrphanBlocksByPrev;
set<pair<COutPoint, unsigned int> > setStakeSeenOrphan;
map<uint256, uint256> mapProofOfStake;
map<uint256, CDataStream*> mapOrphanTransactions;
map<uint256, map<uint256, CDataStream*> > mapOrphanTransactionsByPrev;
// Constant stuff for coinbase transactions we create:
CScript COINBASE_FLAGS;
double dHashesPerSec;
int64_t nHPSTimerStart;
// Settings
int64_t nTransactionFee = chainParams.MIN_TX_FEE;
int64_t nReserveBalance = 0;
//////////////////////////////////////////////////////////////////////////////
//
// network
//
int GetMinPeerProtoVersion(int nHeight)
{
static const map<int, int>::const_iterator b =
chainParams.mapProtocolVersions.begin();
static const map<int, int>::const_iterator e =
chainParams.mapProtocolVersions.end();
assert(b != e);
int nVersion = b->second;
int nFork = GetFork(nHeight);
// we can do it this way because maps are sorted
for (map<int, int>::const_iterator it = b; it != e; ++it)
{
if (it->first > nFork)
{
break;
}
nVersion = it->second;
}
return nVersion;
}
//////////////////////////////////////////////////////////////////////////////
//
// block creation
//
const char* DescribeBlockCreationResult(BlockCreationResult r)
{
switch (r)
{
case BLOCKCREATION_OK: return "OK";
case BLOCKCREATION_QPOS_IN_REPLAY: return "QPoS In Replay";
case BLOCKCREATION_NOT_CURRENTSTAKER: return "Not Current Staker";
case BLOCKCREATION_QPOS_BLOCK_EXISTS: return "QPoS Block Exists";
case BLOCKCREATION_INSTANTIATION_FAIL: return "Instantiation Fail";
case BLOCKCREATION_REGISTRY_FAIL: return "Registry Fail";
case BLOCKCREATION_PURCHASE_FAIL: return "Purchase Fail";
case BLOCKCREATION_CLAIM_FAIL: return "Claim Fail";
case BLOCKCREATION_PROOFTYPE_FAIL: return "Prooftype Fail";
}
return NULL;
}
//////////////////////////////////////////////////////////////////////////////
//
// block spacing
//
int GetTargetSpacing(const int nHeight)
{
static const int SPACING_M = chainParams.nTargetSpacingMainNet;
static const int SPACING_T = chainParams.nTargetSpacingTestNet;
if (GetFork(nHeight) < XST_FORKQPOS)
{
if (fTestNet)
{
return SPACING_T;
}
else
{
return SPACING_M;
}
}
return QP_TARGET_SPACING;
}
//////////////////////////////////////////////////////////////////////////////
//
// dispatching functions
//
// These functions dispatch to one or all registered wallets
void RegisterWallet(CWallet* pwalletIn)
{
{
LOCK(cs_setpwalletRegistered);
setpwalletRegistered.insert(pwalletIn);
}
}
void UnregisterWallet(CWallet* pwalletIn)
{
{
LOCK(cs_setpwalletRegistered);
setpwalletRegistered.erase(pwalletIn);
}
}
// get the wallet transaction with the given hash (if it exists)
bool static GetTransaction(const uint256& hashTx, CWalletTx& wtx)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
if (pwallet->GetTransaction(hashTx, wtx))
return true;
return false;
}
// erases transaction with the given hash from all wallets
void static EraseFromWallets(uint256 hash)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->EraseFromWallet(hash);
}
// make sure all wallets know about the given transaction, in the given block
void SyncWithWallets(const CTransaction& tx, const CBlock* pblock, bool fUpdate, bool fConnect)
{
if (!fConnect)
{
// ppcoin: wallets need to refund inputs when disconnecting coinstake
if (tx.IsCoinStake())
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
if (pwallet->IsFromMe(tx))
pwallet->DisableTransaction(tx);
}
return;
}
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->AddToWalletIfInvolvingMe(tx, pblock, fUpdate);
}
// notify wallets about a new best chain
void static SetBestChain(const CBlockLocator& loc)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->SetBestChain(loc);
}
// notify wallets about an updated transaction
void static UpdatedTransaction(const uint256& hashTx)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->UpdatedTransaction(hashTx);
}
// dump all wallets
void static PrintWallets(const CBlock& block)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->PrintWallet(block);
}
// notify wallets about an incoming inventory (for request counts)
void static Inventory(const uint256& hash)
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->Inventory(hash);
}
// ask wallets to resend their transactions
void ResendWalletTransactions()
{
BOOST_FOREACH(CWallet* pwallet, setpwalletRegistered)
pwallet->ResendWalletTransactions();
}
//////////////////////////////////////////////////////////////////////////////
//
// Registry Snapshots
//
void GetRegistrySnapshot(CTxDB &txdb, int nReplay, QPRegistry *pregistryTemp)
{
// find a snapshot earlier than pindexReplay
int nSnap = nReplay - (nReplay % BLOCKS_PER_SNAPSHOT);
bool fReadSnapshot = false;
while (nSnap >= GetPurchaseStart())
{
if (txdb.ReadRegistrySnapshot(nSnap, *pregistryTemp))
{
fReadSnapshot = true;
break;
}
nSnap -= BLOCKS_PER_SNAPSHOT;
}
if (!fReadSnapshot)
{
// ensure it is fresh
pregistryTemp->SetNull();
}
}
bool RewindRegistry(CTxDB &txdb,
CBlockIndex *pindexRewind,
QPRegistry *pregistry,
CBlockIndex* &pindexCurrentRet)
{
if (GetFork(pindexRewind->nHeight) < XST_FORKPURCHASE)
{
pregistry->SetNull();
return true;
}
if (!pindexRewind->pprev)
{
// this should never happen
printf("RewindRegistry(): TSNH no prev block to %s\n"
" can't replay registry to rewind block\n",
pindexRewind->GetBlockHash().ToString().c_str());
return false;
}
// this complicated loop finds the earliest snapshot that is a
// predecessor to pindexRewind
CBlockIndex *pindexSnap = pindexRewind;
unsigned int nReadSnapCount = 0;
while (true)
{
bool fSnapIsPrepurchase = false;
while (pindexSnap->pprev)
{
pindexSnap = pindexSnap->pprev;
if (GetFork(pindexSnap->nHeight) < XST_FORKPURCHASE)
{
fSnapIsPrepurchase = true;
break;
}
if ((pindexSnap->nHeight % BLOCKS_PER_SNAPSHOT == 0) &&
pindexSnap->IsInMainChain())
{
break;
}
}
if (fSnapIsPrepurchase)
{
pregistry->SetNull();
break;
}
if (!pindexSnap->pprev)
{
// this should never happen
printf("RewindRegistry(): TSNH no prev block to %s\n"
" can't replay registry to snap block\n",
pindexSnap->GetBlockHash().ToString().c_str());
return false;
}
nReadSnapCount += 1;
if (txdb.ReadRegistrySnapshot(pindexSnap->nHeight, *pregistry))
{
// 1. we need to actually replay the previous block
// 2. ensure the registry matches the index we backtracked to
if ((pregistry->GetBlockHeight() != pindexRewind->nHeight) &&
(pregistry->GetBlockHash() == pindexSnap->GetBlockHash()))
{
break;
}
}
}
uint256 hashRegistry = pregistry->GetBlockHash();
int nHeightRegistry = pregistry->GetBlockHeight();
printf("RewindRegistry(): replay registry\n"
" from %s (%d)\n to %s (%d)\n",
hashRegistry.ToString().c_str(),
nHeightRegistry,
pindexRewind->GetBlockHash().ToString().c_str(),
pindexRewind->nHeight);
pindexCurrentRet = mapBlockIndex[hashRegistry];
vector<CBlockIndex*> vreplay; // organized top to bottom
vreplay.push_back(pindexRewind);
CBlockIndex *pindexReplay = pindexRewind->pprev;
while (pindexReplay->GetBlockHash() != hashRegistry)
{
if (GetFork(pindexReplay->nHeight) < XST_FORKPURCHASE)
{
// no need to replay blocks preceeding use of registry
break;
}
uint256 hashReplay = pindexReplay->GetBlockHash();
if (!mapBlockIndex.count(hashReplay))
{
// this should happen exceedingly rarely if at all
printf("RewindRegistry(): TSNH no block %s, can't replay registry\n",
hashReplay.ToString().c_str());
return false;
}
if (!pindexReplay->pprev)
{
// this should rarely happen, and exceedingly so, if at all
printf("RewindRegistry(): TSRH no prev block to %s, can't replay registry\n",
hashReplay.ToString().c_str());
return false;
}
vreplay.push_back(pindexReplay);
pindexReplay = pindexReplay->pprev;
}
vector<CBlockIndex*>::reverse_iterator rit;
for (rit = vreplay.rbegin(); rit != vreplay.rend(); ++rit)
{
CBlockIndex *pindex = *rit;
if (!pregistry->UpdateOnNewBlock(pindex,
QPRegistry::ALL_SNAPS,
fDebugQPoS))
{
// this should rarely happen, if at all
printf("RewindRegistry(): TSRH couldn't update on %s, can't replay registry\n",
pindex->GetBlockHash().ToString().c_str());
return false;
}
pindexCurrentRet = pindex;
}
printf("RewindRegistry(): Done\n from %s\n to %s\n",
hashRegistry.ToString().c_str(),
pindexRewind->GetBlockHash().ToString().c_str());
return true;
}
//////////////////////////////////////////////////////////////////////////////
//
// mapOrphanTransactions
//
bool AddOrphanTx(const CDataStream& vMsg)
{
CTransaction tx;
CDataStream(vMsg) >> tx;
uint256 hash = tx.GetHash();
if (mapOrphanTransactions.count(hash))
return false;
CDataStream* pvMsg = new CDataStream(vMsg);
// Ignore big transactions, to avoid a
// send-big-orphans memory exhaustion attack. If a peer has a legitimate
// large transaction with a missing parent then we assume
// it will rebroadcast it later, after the parent transaction(s)
// have been mined or received.
// 10,000 orphans, each of which is at most 5,000 bytes big is
// at most 500 megabytes of orphans:
if (pvMsg->size() > 5000)
{
printf("ignoring large orphan tx (size: %" PRIszu ", hash: %s)\n", pvMsg->size(), hash.ToString().c_str());
delete pvMsg;
return false;
}
mapOrphanTransactions[hash] = pvMsg;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
mapOrphanTransactionsByPrev[txin.prevout.hash].insert(make_pair(hash, pvMsg));
printf("stored orphan tx %s (mapsz %" PRIszu ")\n", hash.ToString().c_str(),
mapOrphanTransactions.size());
return true;
}
void static EraseOrphanTx(uint256 hash)
{
if (!mapOrphanTransactions.count(hash))
return;
const CDataStream* pvMsg = mapOrphanTransactions[hash];
CTransaction tx;
CDataStream(*pvMsg) >> tx;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
mapOrphanTransactionsByPrev[txin.prevout.hash].erase(hash);
if (mapOrphanTransactionsByPrev[txin.prevout.hash].empty())
mapOrphanTransactionsByPrev.erase(txin.prevout.hash);
}
delete pvMsg;
mapOrphanTransactions.erase(hash);
}
unsigned int LimitOrphanTxSize(unsigned int nMaxOrphans)
{
unsigned int nEvicted = 0;
while (mapOrphanTransactions.size() > nMaxOrphans)
{
// Evict a random orphan:
uint256 randomhash = GetRandHash();
map<uint256, CDataStream*>::iterator it = mapOrphanTransactions.lower_bound(randomhash);
if (it == mapOrphanTransactions.end())
it = mapOrphanTransactions.begin();
EraseOrphanTx(it->first);
++nEvicted;
}
return nEvicted;
}
//////////////////////////////////////////////////////////////////////////////
//
// Inputs
//
const CTxOut& GetOutputFor(const CTxIn& input, const MapPrevTx& inputs)
{
MapPrevTx::const_iterator mi = inputs.find(input.prevout.hash);
if (mi == inputs.end())
throw runtime_error("CTransaction::GetOutputFor() : prevout.hash not found");
const CTransaction& txPrev = (mi->second).second;
if (input.prevout.n >= txPrev.vout.size())
throw runtime_error("CTransaction::GetOutputFor() : prevout.n out of range");
return txPrev.vout[input.prevout.n];
}
//////////////////////////////////////////////////////////////////////////////
//
// Outputs
//
int64_t GetMinOutputAmount(int nHeight)
{
if (nHeight < 0)
{
return 0;
}
int nFork = GetFork(nHeight);
if (nFork < XST_FORKPURCHASE)
{
return 0;
}
else if (nTestNet && (nFork < XST_FORKMISSFIX))
{
return 0;
}
else
{
return chainParams.MIN_TXOUT_AMOUNT;
}
}
//////////////////////////////////////////////////////////////////////////////
//
// CTransaction and CTxIndex
//
// empty keyRet on return means the input may be good, but can't get key
// can only get signatory (compressed pubkey) from PUBKEY/HASH and CLAIM
bool CTransaction::GetSignatory(const MapPrevTx &mapInputs,
unsigned int idx, CPubKey &keyRet) const
{
keyRet.Clear();
txnouttype typetxo;
vector<valtype> vSolutions;
if (idx >= vin.size())
{
return false;
}
CTxIn input = vin[idx];
if (GetFork(pindexBest->nHeight) >= XST_FORKQPOSB)
{
// the input should be in the main chain so that the registry isn't
// changed, leaving the input with an obsolete signatory
CTransaction tx;
uint256 hashBlock = 0;
unsigned int nTimeBlock;
if (GetTransaction(input.prevout.hash, tx, hashBlock, nTimeBlock))
{
if (hashBlock == 0)
{
// block of input transaction unknown
return false;
}
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end())
{
if ((*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (!pindex->IsInMainChain())
{
// block containing input transaction not in main chain
return false;
}
}
else
{
// block index is null for input transaction
return false;
}
}
else
{
// block containing input transaction not in block index
return false;
}
}
else
{
// input transaction unknown
return false;
}
}
CTxOut prevout = GetOutputFor(input, mapInputs);
if (!Solver(prevout.scriptPubKey, typetxo, vSolutions))
{
// input is bad
return false;
}
switch (typetxo)
{
case TX_PUBKEY:
keyRet = CPubKey(vSolutions.front());
if (!keyRet.IsCompressed())
{
// pubkey is fine, but only compressed pubkeys are allowed
return false;
}
break;
case TX_PUBKEYHASH:
case TX_CLAIM:
// extract pubkey from scriptSig (last 33 bytes)
// scriptSig: <sig> <pubKey>
// sig validation of inputs done elsewhere since this is a spend
if (input.scriptSig.size() >= 33)
{
CScript::const_iterator last = input.scriptSig.end();
CScript::const_iterator first = last - 33;
keyRet = CPubKey(static_cast<valtype>(CScript(first, last)));
}
else
{
return false;
}
break;
default:
return false;
}
return true;
}
// compares keys to the input signatory
bool CTransaction::ValidateSignatory(const MapPrevTx &mapInputs,
int idx, CPubKey &key) const
{
CPubKey keySignatory;
if (!GetSignatory(mapInputs, 0, keySignatory))
{
// nonstandard
return false;
}
if (keySignatory.IsEmpty())
{
// set key input should be PUBKEY, PUBKEYHASH, or CLAIM
return false;
}
return (keySignatory == key);
}
// compares vKeys to the input signatory
bool CTransaction::ValidateSignatory(const MapPrevTx &mapInputs,
int idx, vector<CPubKey> &vKeys) const
{
CPubKey keySignatory;
if (!GetSignatory(mapInputs, 0, keySignatory))
{
// nonstandard
return false;
}
if (keySignatory.IsEmpty())
{
// set key input should be PUBKEY, PUBKEYHASH, or CLAIM
return false;
}
bool fValid = false;
vector<CPubKey>::const_iterator it;
for (it = vKeys.begin(); it != vKeys.end(); ++it)
{
if (keySignatory == *it)
{
fValid = true;
break;
}
}
return fValid;
}
// compares owner key of nStakerID to the input signatory
bool CTransaction::ValidateSignatory(const QPRegistry *pregistry,
const MapPrevTx &mapInputs,
int idx, unsigned int nStakerID,
QPKeyType fKeyTypes) const
{
vector<CPubKey> vKeys;
if (fKeyTypes & QPKEY_OWNER)
{
CPubKey key;
if (!pregistry->GetOwnerKey(nStakerID, key))
{
// ID doesn't correspond to a qualified staker
return false;
}
vKeys.push_back(key);
}
if (fKeyTypes & QPKEY_MANAGER)
{
CPubKey key;
if (!pregistry->GetManagerKey(nStakerID, key))
{
// ID doesn't correspond to a qualified staker
return false;
}
vKeys.push_back(key);
}
if (fKeyTypes & QPKEY_DELEGATE)
{
CPubKey key;
if (!pregistry->GetDelegateKey(nStakerID, key))
{
// ID doesn't correspond to a qualified staker
return false;
}
vKeys.push_back(key);
}
if (fKeyTypes & QPKEY_CONTROLLER)
{
CPubKey key;
if (!pregistry->GetControllerKey(nStakerID, key))
{
// ID doesn't correspond to a qualified staker
return false;
}
vKeys.push_back(key);
}
return ValidateSignatory(mapInputs, idx, vKeys);
}
// this does a full check: depends on the state of the registry
// total value in and out is checked elsewhere, where it is more sensible
// mapRet is keyed by the normalized (lowercase) alias
bool CTransaction::CheckPurchases(const QPRegistry *pregistry,
int64_t nStakerPrice,
map<string, qpos_purchase> &mapRet) const
{
int nFork = GetFork(nBestHeight + 1);
BOOST_FOREACH(const CTxOut &txout, vout)
{
txnouttype typetxo;
vector<valtype> vSolutions;
if (!Solver(txout.scriptPubKey, typetxo, vSolutions))
{
// nonstandard
return false;
}
if ((typetxo != TX_PURCHASE1) && (typetxo != TX_PURCHASE4))
{
// not a purchase
continue;
}
if (nFork < XST_FORKPURCHASE)
{
// too soon to purchase
return DoS(100, error("CheckPurchase() : too soon"));
}
if (txout.nValue != 0)
{
// purchase output can't have a value
return DoS(100, error("CheckPurchase() : has value"));
}
if (vSolutions.empty())
{
// this should never happen (going to dos anyway just in case)
return DoS(100, error("CheckPurchase() : TSNH no vSolution"));
}
valtype vch = vSolutions.front();
qpos_purchase purchase;
ExtractPurchase(vch, purchase);
if (typetxo == TX_PURCHASE1)
{
if (purchase.keys.size() != 1)
{
// malformed PURCHASE1
return DoS(100, error("CheckPurchase() : malformed 1"));
}
}
else if (purchase.keys.size() < 3 || purchase.keys.size() > 4)
{
// malformed PURCHASE4
return DoS(100, error("CheckPurchase() : malformed 4"));
}
// disallowing more than 2x price removes any chance of overflow
if (purchase.value < nStakerPrice)
{
// too little paid for registration
return DoS(100, error("CheckPurchase() : too little paid"));
}
if (purchase.value > (nStakerPrice * 2))
{
// excessive amount paid for registration
return DoS(20, error("CheckPurchase() : too much paid"));
}
if (purchase.pcm > 100000)
{
// can't delegate more than 100%
return DoS(100, error("CheckPurchase() : too much delegate pay"));
}
string sLC;
// user is selecting an NFT by specifying an NFT ID instead of name
if (purchase.nft > 0)
{
if (!pregistry->NftIsAvailable(purchase.nft, sLC))
{
return DoS(100, error("CheckPurchase() : NFT unavailable"));
}
purchase.alias = mapNfts[purchase.nft].strNickname;
}
else if (pregistry->AliasIsAvailable(purchase.alias, sLC))
{
if (pregistry->GetNftIDForAlias(sLC, purchase.nft))
{
string sCharKey;
if (!pregistry->NftIsAvailable(purchase.nft, sCharKey))
{
return DoS(100, error("CheckPurchase() : NFT unavailable"));
}
if (sCharKey != sLC)
{
// the lookup map doesn't match the map by id
return error("CheckPurchase(): TSNH NFT mismatch");
}
purchase.alias = mapNfts[purchase.nft].strNickname;
}
}
else
{
// alias is not valid or is taken (should have checked)
return DoS(100, error("CheckPurchase() : alias unavailable"));
}
if (mapRet.count(sLC) > 0)
{
// tx registers same alias more than once
return DoS(100, error("CheckPurchase() : multiple regs"));
}
mapRet[sLC] = purchase;
}
// returns true for no registrations too
// to know if any were registered, check size of mapRet
return true;
}
// this does a full check: depends on the state of the registry
// lots of checks are done to allow 1, 2, 3, or 4 key changes in one tx
// enforces that owner change is last, if there are multiple changes
// vout order determines sequence of key changes
// return pair is <staker ID, vector of setkeys>
bool CTransaction::CheckSetKeys(const QPRegistry *pregistry,
const MapPrevTx &mapInputs,
vector<qpos_setkey> &vRet) const
{
// one block after the purchase period starts
int nFork = GetFork(nBestHeight);
vRet.clear();
int fKeyTypes = 0;
unsigned int nStakerID = 0;
BOOST_FOREACH(const CTxOut &txout, vout)
{
txnouttype typetxo;
vector<valtype> vSolutions;
if (!Solver(txout.scriptPubKey, typetxo, vSolutions))
{
printf("CheckSetKeys(): fail: nonstandard\n");
// nonstandard
return false;
}
if ((typetxo != TX_SETOWNER) &&
(typetxo != TX_SETMANAGER) &&
(typetxo != TX_SETDELEGATE) &&
(typetxo != TX_SETCONTROLLER))
{
continue;
}
// one block after the purchase period starts
if (nFork < XST_FORKPURCHASE)
{
// too soon to setkeys
return DoS(100, error("CheckSetKeys() : too soon"));
}
if (txout.nValue != 0)
{
// setkey output can't have a value
return DoS(100, error("CheckSetKeys() : has value"));
}
if (vin.size() != 1)
{
// key setting transactions have only 1 input
// to avoid complex searches through inputs for keys
return DoS(100, error("CheckSetKeys() : multiple inputs"));
}
QPKeyType fThisKeyType = mapQPoSKeyTypes[typetxo];
if (fKeyTypes & fThisKeyType)
{
return DoS(100, error("CheckSetKeys() : multiple assignment"));
}
if ((fThisKeyType != QPKEY_OWNER) && (QPKEY_OWNER & fKeyTypes))
{
// can't change owner then expect any other key change to work
// especially since registry won't be updated until the
// tx is connected
return DoS(100, error("CheckSetKeys() : owner already changed"));
}
if (((fThisKeyType != QPKEY_MANAGER) &&
(fThisKeyType != QPKEY_OWNER)) &&
(QPKEY_MANAGER & fKeyTypes))
{
// force manager change to come after other key type changes
// except owner, in case the signatory is manager
// will not go through the complexities of investigating signatory
// here just to allow more flexible ordering
return DoS(100, error("CheckSetKeys() : manager already changed"));
}
qpos_setkey setkey;
setkey.keytype = fThisKeyType;
ExtractSetKey(vSolutions.front(), setkey);
if ((fThisKeyType == QPKEY_DELEGATE) && (setkey.pcm > 100000))
{
return DoS(100, error("CheckSetKeys() : pcm too high"));
}
if (nStakerID == 0)
{
if (!pregistry->IsQualifiedStaker(setkey.id))
{
// disqualified stakers get purged before key
// changes would have any effect
return DoS(100, error("CheckSetKeys() : disqualified"));
}
nStakerID = setkey.id;
}
else if (setkey.id != nStakerID)
{
// avoid complexities by disallowing more than one ID
return DoS(100, error("CheckSetKeys() : changing multiple IDs"));
}
vRet.push_back(setkey);
fKeyTypes |= fThisKeyType;
}
if (!fKeyTypes)
{
printf("CheckSetKeys(): fail: no setkeys`\n");
return false;
}
if (QPKEY_OWNER & fKeyTypes)
{
// only owner can change owner
if (!ValidateSignatory(pregistry, mapInputs, 0, nStakerID, QPKEY_OWNER))
{
// signatory doesn't own staker
return DoS(100, error("CheckSetKeys() : sig not owner"));
}
}
else
{
// manager can change everything but owner, including manager
if (!ValidateSignatory(pregistry, mapInputs, 0, nStakerID, QPKEY_OM))
{
// signatory isn't staker or manager
return DoS(100, error("CheckSetKeys() : sig not owner or manager"));
}
}
return true;
}
// this does a full check: depends on the state of the registry
// only one state change per tx allowed because only one ID is allowed per tx
// true return value and setstateRet.id == 0 may be a good tx, but not setstate
bool CTransaction::CheckSetState(const QPRegistry *pregistry,
const MapPrevTx &mapInputs,
qpos_setstate &setstateRet) const
{
// one block after the purchase period starts
int nFork = GetFork(nBestHeight);
qpos_setstate setstate;
setstate.id = 0;
setstate.enable = false;
txnouttype typetxo;
vector<valtype> vSolutions;
for (unsigned int i = 0; i < vout.size(); ++i)
{
if (!Solver(vout[i].scriptPubKey, typetxo, vSolutions))
{
// nonstandard
printf("CheckSetState(): nonstandard\n");
return false;
}
if ((typetxo != TX_ENABLE) && (typetxo != TX_DISABLE))
{
continue;
}
// one block after the purchase period starts
if (nFork < XST_FORKPURCHASE)
{
// too soon to set state
return DoS(100, error("CheckSetState() : too soon"));
}
if (vout[i].nValue != 0)
{
// setstate output can't have a value
return DoS(100, error("CheckSetState() : has value"));
}
if (setstate.id != 0)
{
// only allow one per tx
return DoS(100, error("CheckSetState() : multiple setstates"));
}
if (vin.size() != 1)
{
// avoid searching for keys by allowing only 1 input
return DoS(100, error("CheckSetState() : multiple inputs"));
}
setstate.enable = (typetxo == TX_ENABLE);
ExtractSetState(vSolutions.front(), setstate);
if (!pregistry->IsQualifiedStaker(setstate.id))
{
// disallow setting state of disqualified stakers even if extant
return DoS(100, error("CheckSetState() : staker disqualified"));
}
if (!ValidateSignatory(pregistry, mapInputs, 0, setstate.id, QPKEY_OMC))
{
// signatory doesn't own, manage, or control the staker
return DoS(100, error("CheckSetState() : sig not O/M/C"));
}
}
setstateRet.id = setstate.id;
setstateRet.enable = setstate.enable;
return true;
}
// this does a full check: depends on the state of the registry
// false return value means claim is malformed or illegal
bool CTransaction::CheckClaim(const QPRegistry *pregistry,
const MapPrevTx &mapInputs,
qpos_claim &claimRet) const
{
// 0 claim value with return true means tx is okay, but not a claim
// any false return value means tx or inputs are bad
claimRet.value = 0;
bool fFoundClaim = false;
txnouttype typetxo;
vector<valtype> vSolutions;
// this loop is complicated so it doesn't return false
// for valid non-claim transactions
for (unsigned int i = 0; i < vout.size(); ++i)
{
if (!Solver(vout[i].scriptPubKey, typetxo, vSolutions))
{
// claim outputs must match a template (i.e. nonstandard)
return false;
}
// claims can only have 1 input and 1 output to keep things simple
if (typetxo == TX_CLAIM)
{
if (vin.size() != 1)
{
return DoS(100, error("CheckClaim() : qty inputs not 1"));
}
if (vout.size() != 1)
{
return DoS(100, error("CheckClaim() : qty outputs not 1"));
}
fFoundClaim = true;
}
}
if (!fFoundClaim)
{
// tx could very well be fine, but it isn't a claim
return true;
}
if ((!fTestNet) && (GetFork(nBestHeight - QP_BLOCKS_PER_DAY) < XST_FORKQPOS))
{
// too soon to claim
return DoS(100, error("CheckClaim() : too soon for claim"));
}
ExtractClaim(vSolutions.front(), claimRet);
if (claimRet.value < chainParams.MIN_TXOUT_AMOUNT)
{
// claim amount is too little for any tx
return DoS(100, error("CheckClaim() : too little"));
}
if (!pregistry->CanClaim(claimRet.key, claimRet.value))
{
// account owner (key) cannot claim this amount
return DoS(100, error("CheckClaim() : illegal amount"));
}
if (!ValidateSignatory(mapInputs, 0, claimRet.key))
{
// signatory doesn't own any registry ledger account
return DoS(100, error("CheckClaim() : invalid signatory"));
}
return true;
}
// this does a full check: depends on the state of the registry
// lots of checks are done to allow setting multiple keys
// does not allow setting the same key twice in a single transaction
// vout order determines sequence of key changes
// return is vector of setmetas
bool CTransaction::CheckSetMetas(const QPRegistry *pregistry,
const MapPrevTx &mapInputs,
vector<qpos_setmeta> &vRet) const
{
// one block after the purchase period starts
int nFork = GetFork(nBestHeight);
vRet.clear();
unsigned int nStakerID = 0;
map<string, string> mapMetas;
bool fCheckedSignatory = false;
BOOST_FOREACH(const CTxOut &txout, vout)
{
txnouttype typetxo;
vector<valtype> vSolutions;
if (!Solver(txout.scriptPubKey, typetxo, vSolutions))
{
printf("CheckSetMetas(): fail: nonstandard\n");
// nonstandard
return false;
}
if (typetxo != TX_SETMETA)
{
continue;
}
// one block after the purchase period starts
if (nFork < XST_FORKPURCHASE)
{
// too soon to setkeys
return DoS(100, error("CheckSetMets() : too soon"));
}
if (txout.nValue != 0)
{
// setmeta output can't have a value
return DoS(100, error("CheckSetMetas() : has value"));
}
if (vin.size() != 1)
{
// meta setting transactions have only 1 input
// to avoid complex searches through inputs for keys
return DoS(100, error("CheckSetMetas() : multiple inputs"));
}
qpos_setmeta setmeta;
ExtractSetMeta(vSolutions.front(), setmeta);
if (nStakerID == 0)
{
if (!pregistry->IsQualifiedStaker(setmeta.id))
{
// disqualified stakers get purged before key
// changes would have any effect
return DoS(100, error("CheckSetMetas() : not qualified"));
}
nStakerID = setmeta.id;
}
else if (setmeta.id != nStakerID)
{
// avoid complexities by disallowing more than one ID
return DoS(100, error("CheckSetMetas() : multiple stakers"));
}
QPKeyType fAuthorities = CheckMetaKey(setmeta.key);
if (fAuthorities == QPKEY_NONE)
{
return DoS(100, error("CheckSetMetas() : key not valid"));
}
if (!CheckMetaValue(setmeta.value))
{
return DoS(100, error("CheckSetMetas() : value not valid"));
}
if (mapMetas.count(setmeta.key))
{
// disallow setting the same key twice in the same transaction
return DoS(100, error("CheckSetMetas() : changing key twice"));
}
mapMetas[setmeta.key] = setmeta.value;
// only need to do this once per tx because only one ID allowed
if (!fCheckedSignatory)
{
if (!ValidateSignatory(pregistry, mapInputs, 0, setmeta.id, fAuthorities))
{
// signatory isn't authorized to set meta
return DoS(100, error("CheckSetMetas() : sig not authorized"));
}
fCheckedSignatory = true;
}
vRet.push_back(setmeta);
}
return true;
}
bool CTransaction::CheckQPoS(const QPRegistry *pregistryTemp,
const MapPrevTx &mapInputs,
unsigned int nTime,
const vector<QPTxDetails> &vDeets,
const CBlockIndex *pindexPrev,
map<string, qpos_purchase> &mapPurchasesRet,
map<unsigned int, vector<qpos_setkey> > &mapSetKeysRet,
map<CPubKey, vector<qpos_claim> > &mapClaimsRet,
map<unsigned int, vector<qpos_setmeta> > &mapSetMetasRet,
vector<QPTxDetails> &vDeetsRet) const
{
int nFork = GetFork(pindexPrev->nHeight + 1);
bool fPurchasesChecked = false;
bool fSetKeysChecked = false;
bool fSetStateChecked = false;
bool fClaimChecked = false;
bool fSetMetasChecked = false;
// validate the candidate qPoS transactions
BOOST_FOREACH(const QPTxDetails &deet, vDeets)
{
switch (static_cast<txnouttype>(deet.t))
{
case TX_PURCHASE1:
case TX_PURCHASE4:
{
if (fPurchasesChecked)
{
break;
}
uint32_t N = static_cast<uint32_t>(
pregistryTemp->GetNumberQualified());
int64_t nStakerPrice = GetStakerPrice(N,
pindexPrev->nMoneySupply,
nFork);
map<string, qpos_purchase> mapTxPrchs;
if (!CheckPurchases(pregistryTemp, nStakerPrice, mapTxPrchs))
{
return error("CheckQPoS(): purchase fail");
}
map<string, qpos_purchase>::const_iterator it;
// two loops because we have to check all first
// tx must fail if any fail
for (it = mapTxPrchs.begin(); it != mapTxPrchs.end(); ++it)
{
if (mapPurchasesRet.count(it->first))
{
// multiple purchases for name
return DoS(100, error("CheckQPoS() : multiple purchases"));
}
}
for (it = mapTxPrchs.begin(); it != mapTxPrchs.end(); ++it)
{
mapPurchasesRet[it->first] = it->second;
}
fPurchasesChecked = true;
break;
}
case TX_SETOWNER:
case TX_SETMANAGER:
case TX_SETDELEGATE:
case TX_SETCONTROLLER:
{
if (fSetKeysChecked)
{
break;
}
vector<qpos_setkey> vSetKeys;
if (!CheckSetKeys(pregistryTemp, mapInputs, vSetKeys))
{
return error("CheckQPoS(): setkey fail");
}
if (vSetKeys.empty())
{
// this should never happen (dos anyway just in case)
return DoS(100, error("CheckQPoS() : TSNH no keys set"));
}
if (mapSetKeysRet.count(vSetKeys[0].id))
{
return DoS(100, error("CheckQPoS() : multiple setkeys"));
}
mapSetKeysRet[vSetKeys[0].id] = vSetKeys;
fSetKeysChecked = true;
break;
}
case TX_ENABLE:
case TX_DISABLE:
{
if (fSetStateChecked)
{
// this should never happen (dos anyway just in case)
return DoS(100, error("CheckQPoS() : TSNH multiple setstates"));
}
qpos_setstate setstate;
if (!CheckSetState(pregistryTemp, mapInputs, setstate))
{
return error("CheckQPoS(): bad setstate");
}
fSetStateChecked = true;
break;
}
case TX_CLAIM:
{
if (fClaimChecked)
{
// this should never happen (dos anyway just in case)
return DoS(100, error("CheckQPoS() : TSNH multiple claims"));
}
qpos_claim claim;
if (!CheckClaim(pregistryTemp, mapInputs, claim))
{
return DoS(100, error("CheckQPoS() : bad claim"));
}
map<CPubKey, vector<qpos_claim> >::const_iterator it;
it = mapClaimsRet.find(claim.key);
if (it != mapClaimsRet.end())
{
int64_t nTotal = claim.value;
BOOST_FOREACH(const qpos_claim &c, it->second)
{
nTotal += c.value;
}
if (!pregistryTemp->CanClaim(claim.key, nTotal, nTime))
{
return DoS(100, error("CheckQPoS() : invalid total claimed"));
}
}
mapClaimsRet[claim.key].push_back(claim);
fClaimChecked = true;
break;
}
case TX_SETMETA:
{
if (fSetMetasChecked)
{
break;
}
vector<qpos_setmeta> vSetMetas;
if (!CheckSetMetas(pregistryTemp, mapInputs, vSetMetas))
{
return error("CheckQPoS(): setmeta fail");
}
if (vSetMetas.empty())
{
// this should never happen (dos anyway just in case)
return DoS(100, error("CheckQPoS() : no metas set"));
}
fSetMetasChecked = true;
break;
}
default:
{
// this should never happen
return DoS(100, error("CheckQPoS() : TSNH unknonwn qPoS op"));
}
}
vDeetsRet.push_back(deet);
}
return true;
}
// use only for fully validated transactions, no real checks are performed
// returns true if any of the qPoS transactions need inputs
bool CTransaction::GetQPTxDetails(const uint256& hashBlock,
vector<QPTxDetails> &vDeets) const
{
uint256 hashTx(GetHash());
bool fNeedsInputs = false;
vector<valtype> vSolutions;
txnouttype whichType;
for (unsigned int nOut = 0; nOut < vout.size(); ++nOut)
{
const CTxOut& txout = vout[nOut];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
{
continue;
}
QPTxDetails deets;
deets.t = whichType;
deets.hash = hashBlock;
deets.txid = hashTx;
deets.n = nOut;
switch (whichType)
{
case TX_PURCHASE1:
case TX_PURCHASE4:
ExtractPurchase(vSolutions.front(), deets);
break;
case TX_SETOWNER:
case TX_SETMANAGER:
case TX_SETDELEGATE:
case TX_SETCONTROLLER:
fNeedsInputs = true;
ExtractSetKey(vSolutions.front(), deets);
break;
case TX_ENABLE:
case TX_DISABLE:
fNeedsInputs = true;
ExtractSetState(vSolutions.front(), deets);
break;
case TX_CLAIM:
fNeedsInputs = true;
ExtractClaim(vSolutions.front(), deets);
break;
case TX_SETMETA:
fNeedsInputs = true;
ExtractSetMeta(vSolutions.front(), deets);
break;
default:
continue;
}
vDeets.push_back(deets);
}
return fNeedsInputs;
}
// returns true if any of the outputs are feework
bool CTransaction::HasFeework() const
{
vector<valtype> vSolutions;
txnouttype whichType;
for (unsigned int nOut = 0; nOut < vout.size(); ++nOut)
{
const CTxOut& txout = vout[nOut];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
{
continue;
}
if (whichType == TX_FEEWORK)
{
return true;
}
}
return false;
}
bool CTransaction::ReadFromDisk(CTxDB& txdb, COutPoint prevout, CTxIndex& txindexRet)
{
SetNull();
if (!txdb.ReadTxIndex(prevout.hash, txindexRet))
return false;
if (!ReadFromDisk(txindexRet.pos))
return false;
if (prevout.n >= vout.size())
{
SetNull();
return false;
}
return true;
}
bool CTransaction::ReadFromDisk(CTxDB& txdb, COutPoint prevout)
{
CTxIndex txindex;
return ReadFromDisk(txdb, prevout, txindex);
}
bool CTransaction::ReadFromDisk(COutPoint prevout)
{
CTxDB txdb("r");
CTxIndex txindex;
return ReadFromDisk(txdb, prevout, txindex);
}
bool CTransaction::IsQPoSTx() const
{
for (unsigned int i = 0; i < vout.size(); ++i)
{
txnouttype typetxo;
vector<valtype> vSolutions;
Solver(vout[i].scriptPubKey, typetxo, vSolutions);
switch (typetxo)
{
case TX_PURCHASE1:
case TX_PURCHASE4:
case TX_SETOWNER:
case TX_SETMANAGER:
case TX_SETDELEGATE:
case TX_SETCONTROLLER:
case TX_ENABLE:
case TX_DISABLE:
case TX_CLAIM:
case TX_SETMETA:
return true;
default:
continue;
}
}
return false;
}
bool CTransaction::IsStandard(int nNewHeight) const
{
if (nVersion > CTransaction::CURRENT_VERSION)
{
return false;
}
int nFork = GetFork(nNewHeight);
if (nFork >= XST_FORK005)
{
if (vout.size() < 1)
{
return false;
}
// Treat non-final transactions as non-standard to prevent a specific type
// of double-spend attack, as well as DoS attacks. (if the transaction
// can't be mined, the attacker isn't expending resources broadcasting it)
// Basically we don't want to propagate transactions that can't be included in
// the next block.
//
// However, IsFinalTx() is confusing... Without arguments, it uses
// chainActive.Height() to evaluate nLockTime; when a block is
// accepted, chainActive.Height()
// is set to the value of nHeight in the block. However, when IsFinalTx()
// is called within CBlock::AcceptBlock(), the height of the block *being*
// evaluated is what is used. Thus if we want to know if a transaction can
// be part of the *next* block, we need to call IsFinalTx() with one more
// than chainActive.Height().
//
// Timestamps on the other hand don't get any special treatment, because we
// can't know what timestamp the next block will have, and there aren't
// timestamp applications where it matters.
if (!IsFinal(nNewHeight))
{
return false;
}
// nTime, aka GetTxTime(), has a different purpose from nLockTime
// but can be used in similar attacks
// CTransaction gets timestamp from block upon NOTXTIME_VERSION
// block timestamp checked elsewhere
if (HasTimestamp() && (GetTxTime() > FutureDrift(GetAdjustedTime())))
{
return false;
}
// Extremely large transactions with lots of inputs can cost the network
// almost as much to process as they cost the sender in fees, because
// computing signature hashes is O(ninputs*txsize). Limiting transactions
// to MAX_STANDARD_TX_SIZE mitigates CPU exhaustion attacks.
unsigned int sz = GetSerializeSize(SER_NETWORK, CTransaction::CURRENT_VERSION);
if (sz >= chainParams.MAX_STANDARD_TX_SIZE)
{
return false;
}
}
BOOST_FOREACH(const CTxIn& txin, vin)
{
// Biggest 'standard' txin is a 3-signature 3-of-3 CHECKMULTISIG
// pay-to-script-hash, which is 3 ~80-byte signatures, 3
// ~65-byte public keys, plus a few script ops.
if (txin.scriptSig.size() > 500)
{
return false;
}
if (!txin.scriptSig.IsPushOnly())
{
return false;
}
}
unsigned int nDataOut = 0;
unsigned int nTxnOut = 0;
txnouttype whichType;
BOOST_FOREACH(const CTxOut& txout, vout)
{
if (!::IsStandard(txout.scriptPubKey, whichType))
{
return false;
}
if (whichType == TX_NULL_DATA)
{
nDataOut++;
}
else
{
if ((txout.nValue == 0) && (nFork < XST_FORKPURCHASE2))
{
return false;
}
nTxnOut++;
}
}
if (nDataOut > nTxnOut)
{
return false;
}
return true;
}
//
// Check transaction inputs, and make sure any
// pay-to-script-hash transactions are evaluating IsStandard scripts
//
// Why bother? To avoid denial-of-service attacks; an attacker
// can submit a standard HASH... OP_EQUAL transaction,
// which will get accepted into blocks. The redemption
// script can be anything; an attacker could use a very
// expensive-to-check-upon-redemption script like:
// DUP CHECKSIG DROP ... repeated 100 times... OP_1
//
bool CTransaction::AreInputsStandard(const MapPrevTx& mapInputs) const
{
if (IsCoinBase())
return true; // Coinbases don't use vin normally
for (unsigned int i = 0; i < vin.size(); i++)
{
const CTxOut& prev = GetOutputFor(vin[i], mapInputs);
vector<vector<unsigned char> > vSolutions;
txnouttype whichType;
// get the scriptPubKey corresponding to this input:
const CScript& prevScript = prev.scriptPubKey;
if (!Solver(prevScript, whichType, vSolutions))
return false;
int nArgsExpected = ScriptSigArgsExpected(whichType, vSolutions);
if (nArgsExpected < 0)
return false;
// Transactions with extra stuff in their scriptSigs are
// non-standard. Note that this EvalScript() call will
// be quick, because if there are any operations
// beside "push data" in the scriptSig the
// IsStandard() call returns false
vector<vector<unsigned char> > stack;
if (!EvalScript(stack, vin[i].scriptSig, *this, i, SCRIPT_VERIFY_NONE, 0))
return false;
if (whichType == TX_SCRIPTHASH)
{
if (stack.empty())
return false;
CScript subscript(stack.back().begin(), stack.back().end());
vector<vector<unsigned char> > vSolutions2;
txnouttype whichType2;
if (!Solver(subscript, whichType2, vSolutions2))
return false;
if (whichType2 == TX_SCRIPTHASH)
return false;
int tmpExpected;
tmpExpected = ScriptSigArgsExpected(whichType2, vSolutions2);
if (tmpExpected < 0)
return false;
nArgsExpected += tmpExpected;
}
if (stack.size() != (unsigned int)nArgsExpected)
return false;
}
return true;
}
unsigned int
CTransaction::GetLegacySigOpCount() const
{
unsigned int nSigOps = 0;
BOOST_FOREACH(const CTxIn& txin, vin)
{
nSigOps += txin.scriptSig.GetSigOpCount(false);
}
BOOST_FOREACH(const CTxOut& txout, vout)
{
nSigOps += txout.scriptPubKey.GetSigOpCount(false);
}
return nSigOps;
}
int CMerkleTx::SetMerkleBranch(const CBlock* pblock)
{
if (fClient)
{
if (hashBlock == 0)
return 0;
}
else
{
CBlock blockTmp;
if (pblock == NULL)
{
// Load the block this tx is in
CTxIndex txindex;
if (!CTxDB("r").ReadTxIndex(GetHash(), txindex))
return 0;
if (!blockTmp.ReadFromDisk(txindex.pos.nFile, txindex.pos.nBlockPos))
return 0;
pblock = &blockTmp;
}
// Update the tx's hashBlock
hashBlock = pblock->GetHash();
// Locate the transaction
for (nIndex = 0; nIndex < (int)pblock->vtx.size(); nIndex++)
if (pblock->vtx[nIndex] == *(CTransaction*)this)
break;
if (nIndex == (int)pblock->vtx.size())
{
vMerkleBranch.clear();
nIndex = -1;
printf("ERROR: SetMerkleBranch() : couldn't find tx in block\n");
return 0;
}
// Fill in merkle branch
vMerkleBranch = pblock->GetMerkleBranch(nIndex);
}
// Is the tx in a block that's in the main chain
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
return pindexBest->nHeight - pindex->nHeight + 1;
}
bool CTransaction::CheckTransaction(int nNewHeight) const
{
// Basic checks that don't depend on any context
if (vin.empty())
return DoS(10, error("CheckTransaction() : vin empty %s",
GetHash().ToString().c_str()));
if (vout.empty())
{
return DoS(10, error("CheckTransaction() : vout empty %s",
GetHash().ToString().c_str()));
}
// Size limits
if (::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) >
chainParams.MAX_BLOCK_SIZE)
{
return DoS(100, error("CheckTransaction() : size limits failed %s",
GetHash().ToString().c_str()));
}
// Check for negative or overflow output values
int64_t nValueOut = 0;
for (unsigned int i = 0; i < vout.size(); i++)
{
const CTxOut& txout = vout[i];
if (txout.IsEmpty() && !IsCoinBase() && !IsCoinStake())
{
return DoS(100, error("CheckTransaction() : txout empty for user transaction %s %u",
GetHash().ToString().c_str(), i));
}
int nFork = GetFork(nNewHeight);
if (nFork < XST_FORK004)
{
if ((!txout.IsEmpty()) && // not coinbase or coinstake
txout.nValue < GetMinOutputAmount(nNewHeight))
{
return DoS(100,
error("CheckTransaction() : txout.nValue below minimum"));
}
}
else if (nFork < XST_FORKPURCHASE)
{
if (txout.nValue < GetMinOutputAmount(nNewHeight))
{
return DoS(100,
error("CheckTransaction() : txout.nValue negative %s %u",
GetHash().ToString().c_str(), i));
}
}
else
{
vector<valtype> vSolutions;
txnouttype whichType;
if (Solver(txout.scriptPubKey, whichType, vSolutions))
{
if (((whichType >= TX_PURCHASE1) && (whichType <= TX_DISABLE)) ||
(whichType == TX_SETMETA) ||
(whichType == TX_FEEWORK) ||
(whichType == TX_NULL_DATA))
{
if (txout.nValue < 0)
{
return DoS(100,
error("CheckTransaction() : txout.nValue negative %s %u",
GetHash().ToString().c_str(), i));
}
}
// prevent dusting after purchases start
else if ((whichType >= TX_PUBKEY) && (whichType <= TX_MULTISIG))
{
if (txout.nValue < GetMinOutputAmount(nNewHeight))
{
return DoS(100,
error("CheckTransaction() : txout.nValue too small %s %u",
GetHash().ToString().c_str(), i));
}
}
}
}
if (txout.nValue > chainParams.MAX_MONEY)
return DoS(100,
error("CheckTransaction() : txout.nValue too high %s %u",
GetHash().ToString().c_str(), i));
nValueOut += txout.nValue;
if (!MoneyRange(nValueOut))
return DoS(100,
error("CheckTransaction() : txout total out of range %s %u",
GetHash().ToString().c_str(), i));
}
// Check for duplicate inputs
set<COutPoint> vInOutPoints;
BOOST_FOREACH(const CTxIn& txin, vin)
{
if (vInOutPoints.count(txin.prevout))
return false;
vInOutPoints.insert(txin.prevout);
}
if (IsCoinBase())
{
if (vin[0].scriptSig.size() < 2 || vin[0].scriptSig.size() > 100)
return DoS(100,
error("CheckTransaction() : coinbase script size is invalid %s",
GetHash().ToString().c_str()));
}
else
{
BOOST_FOREACH(const CTxIn& txin, vin)
if (txin.prevout.IsNull())
return DoS(10, error("CheckTransaction() : prevout is null %s",
GetHash().ToString().c_str()));
}
return true;
}
int64_t CTransaction::GetMinFee(unsigned int nBlockSize,
enum GetMinFee_mode mode,
unsigned int nBytes) const
{
static const unsigned int nMaxBlockSizeGen = chainParams.MAX_BLOCK_SIZE_GEN;
// Base fee is either MIN_TX_FEE or MIN_RELAY_TX_FEE
int64_t nBaseFee = (mode == GMF_RELAY) ? chainParams.MIN_RELAY_TX_FEE :
chainParams.MIN_TX_FEE;
int64_t nMinFee = (1 + (int64_t)nBytes / 1000) * nBaseFee;
unsigned int nNewBlockSize = nBlockSize + nBytes;
// Raise the price as the block approaches full
if (nBlockSize != 1 && nNewBlockSize >= nMaxBlockSizeGen/2)
{
if (nNewBlockSize >= nMaxBlockSizeGen)
{
return chainParams.MAX_MONEY;
}
nMinFee *= nMaxBlockSizeGen / (nMaxBlockSizeGen - nNewBlockSize);
}
if (!MoneyRange(nMinFee))
{
nMinFee = chainParams.MAX_MONEY;
}
return nMinFee;
}
uint32_t CTransaction::GetFeeworkHardness(unsigned int nBlockSize,
enum GetMinFee_mode mode,
unsigned int nBytes) const
{
// We reserve the last part of the block for money fee transactions,
// so there is no way to spam blocks full with feeless transactions.
static const uint64_t nMaxSize = chainParams.FEELESS_MAX_BLOCK_SIZE;
static const uint64_t nParts = chainParams.FEEWORK_BLOCK_PARTS;
// jump of feework memory cost, expresed as a percent
static const uint64_t nJump = chainParams.FEEWORK_COST_PCT_JUMP_PER_PART;
// The multiplier allows a simplification of, reducing the complexity
// of the calculation by one addition operation.
// cost' += ((cost * jump * 1000) / 1000) / 100
// The denominator of 100 reflects that jump is expressed as a percentage
// I.e. (setting A=1000 and B=100):
// cost' = cost + ((cost*jump*A) / A) / B)
// = (((B*cost/jump + cost) * jump*A ) / A) / B
// = (((B*cost + cost*jump) * A) / A) / B
// = ((cost * (B + jump) * A) / A) / B
// = (cost * (A * (B + jump)) / A) / B
// = (cost * multiplier) / (A*B)
// Where multiplier = A * (B + jump) = 1000 * (100 + jump)
// Note that we both multiply and divide by 1000 to get at
// least three digits of precision in the calculation
// because we are using integer arithmetic.
static const uint64_t nMultiplier = 1000 * (100 + nJump);
// cost is the memory cost of the feework
uint64_t nBaseCost = chainParams.FEELESS_MCOST_MIN;
if (mode == GMF_RELAY)
{
nBaseCost = chainParams.RELAY_FEELESS_MCOST_MIN;
}
// comp time increases linearly with memory cost
uint64_t nCost = (1 + (int64_t)nBytes / 1000) * nBaseCost;
uint64_t nNewBlockSize = nBlockSize + nBytes;
if (nNewBlockSize > nMaxSize)
{
return (uint64_t)numeric_limits<uint32_t>::max;
}
// Exponentially raise the memory hardness of feework as block fills
// up to 31 steps, at jump percent per step, stepwise compounded.
// The following loop does steps 2 - 31, counter i expressing which step.
for (uint64_t i = 2; i <= ((nParts * nNewBlockSize) / nMaxSize); ++i)
{
nCost = (nCost * nMultiplier) / 100000;
}
// A 1 MB tx in a 1 MB block (4,578,764,800) would slightly
// exceed the 32 bit max (1<<32 == 4,294,967,296).
return min(nCost, (uint64_t)numeric_limits<uint32_t>::max);
}
uint64_t CTransaction::GetFeeworkLimit(unsigned int nBlockSize,
enum GetMinFee_mode mode,
unsigned int nBytes) const
{
// We reserve the last part block for money fee transactions,
// so there is no way to spam blocks full with feeless transactions.
static const uint64_t nMaxSize = chainParams.FEELESS_MAX_BLOCK_SIZE;
static const uint64_t nParts = chainParams.FEEWORK_BLOCK_PARTS;
// decay of feework limit, expresed as a percent (less than 100%)
static const uint64_t nDecay = chainParams.FEEWORK_LIMIT_PCT_DECAY_PER_PART;
// feework limit is the limit of the hash value
uint64_t nBaseLimit = chainParams.TX_FEEWORK_LIMIT;
if (mode == GMF_RELAY)
{
nBaseLimit = chainParams.RELAY_TX_FEEWORK_LIMIT;
}
// difficulty increases (limit decreases) linearly with memory cost
uint64_t nLimit = nBaseLimit / (1 + (int64_t)nBytes / 1000);
uint64_t nNewBlockSize = nBlockSize + nBytes;
if (nNewBlockSize > nMaxSize)
{
return 0;
}
// Exponentially raise the difficulty of feework as block fills
// up to 30 steps, at decay percent per step, stepwise compounded
for (uint64_t i = 1; i < ((nParts * nNewBlockSize) / nMaxSize); ++i)
{
nLimit = (nDecay * nLimit) / 100;
}
return nLimit;
}
// Feeless transactions have a work proof (feework) as the last
// output. These have the structure [nonce, height, work], where
// the height specifies a particular block. Work is calculated as
// work: 64 bits argon2d(nonce, hashblock, vin, vout[:-1])
// hashblock: 256 bit hash of the block at the specified height
// vin: all inputs, with signatures stripped
// vout[:-1]: all outputs except for the work proof (last outptut)
// Checks:
// * work is 8 bytes (work is analogous to the PoW nonce)
// * height
// * 4 bytes
// * height >= best height - 24
// Allows feeless transactions to be cleared from mempool
// after 24 blocks if they don't get into a block.
// This is both a spam prevention measure and allows legit
// senders to try again in times of high tx volume.
// * hash
// * hash == argon2d(work, hashblock, tx*)
// tx* is tx with
// - signatures blanked
// - feework output
// * hash <= max feework limit
// * feework is last output
// * only 1 feework per tx
bool CTransaction::CheckFeework(Feework &feework,
bool fRequired,
FeeworkBuffer& buffer,
unsigned int nBlockSize,
enum GetMinFee_mode mode,
bool fCheckDepth) const
{
if (vout.empty())
{
feework.status = Feework::EMPTY;
return DoS(100, error("CheckFeework() : no outputs"));
}
if (IsCoinBase())
{
feework.status = Feework::COINBASE;
return DoS(100, error("CheckFeework() : tx is coinbase"));
}
if (IsCoinStake())
{
feework.status = Feework::COINSTAKE;
return DoS(100, error("CheckFeework() : tx is coinstake"));
}
CTxOut txout;
txnouttype typetxo;
vector<valtype> vSolutions;
unsigned int nIndexLast = vout.size() - 1;
for (unsigned int i = 0; i <= nIndexLast; ++i)
{
vSolutions.clear();
txout = vout[i];
if (!Solver(txout.scriptPubKey, typetxo, vSolutions))
{
feework.status = Feework::INSOLUBLE;
return error("CheckFeework() : output insoluble");
}
if (typetxo == TX_FEEWORK)
{
if (i != nIndexLast)
{
feework.status = Feework::MISPLACED;
return DoS(100, error("CheckFeework() : misplaced feework"));
}
}
}
if (typetxo == TX_FEEWORK)
{
if (!fTestNet && (nVersion < CTransaction::FEELESS_VERSION))
{
feework.status = Feework::BADVERSION;
return DoS(100, error("CheckFeework() : bad tx version"));
}
}
else
{
if (fRequired)
{
feework.status = Feework::MISSING;
return DoS(100, error("CheckFeework() : missing feework"));
}
else
{
feework.status = Feework::NONE;
return true;
}
}
valtype vch = vSolutions.front();
feework.ExtractFeework(vch);
// this test is OK because feework.height has to be in the best chain
if (!fTestNet && (GetFork(feework.height) < XST_FORKFEELESS))
{
feework.status = Feework::BADVERSION;
return DoS(100, error("CheckFeework(): too soon for feeless"));
}
CBlockIndex* pblockindex = mapBlockIndex[hashBestChain];
if (feework.height > pblockindex->nHeight)
{
feework.status = Feework::BLOCKUNKNOWN;
return DoS(34, error("CheckFeework() : unknown block"));
}
if (fCheckDepth &&
feework.height < (pblockindex->nHeight - chainParams.FEELESS_MAX_DEPTH))
{
feework.status = Feework::BLOCKTOODEEP;
return DoS(34, error("CheckFeework() : block is too deep"));
}
CBlock block;
while (pblockindex->nHeight > feework.height)
{
pblockindex = pblockindex->pprev;
}
CTransaction txTmp(*this);
// Remove the last output (the feework)
txTmp.vout.pop_back();
// Blank the sigs.
// Each will sign the work by virtue of signing the tx hash.
for (unsigned int i = 0; i < txTmp.vin.size(); i++)
{
txTmp.vin[i].scriptSig = CScript();
}
CDataStream ss(SER_DISK, CLIENT_VERSION);
ss << *(pblockindex->phashBlock) << txTmp;
// dynamic difficulty
// feework.mcost = chainParams.FEELESS_MCOST_MIN;
// feework.limit = GetFeeworkLimit(nBlockSize, mode, feework.bytes);
// dynamic memory hardness
feework.limit = (mode == GMF_RELAY) ?
chainParams.TX_FEEWORK_LIMIT :
chainParams.RELAY_TX_FEEWORK_LIMIT;
feework.GetFeeworkHash(ss, buffer);
uint32_t mcost = GetFeeworkHardness(nBlockSize, mode, feework.bytes);
if (!feework.Check(mcost))
{
return DoS(100, error("CheckFeework() : insufficient feework"));
}
return true;
}
bool CTxMemPool::accept(CTxDB& txdb, CTransaction &tx,
bool fCheckInputs, bool* pfMissingInputs)
{
if (pfMissingInputs)
{
*pfMissingInputs = false;
}
int nNewHeight = nBestHeight + 1;
int nFork = GetFork(nNewHeight);
if (!tx.CheckTransaction(nNewHeight))
{
return error("CTxMemPool::accept() : CheckTransaction failed");
}
// Coinbase is only valid in a block, not as a loose transaction
if (tx.IsCoinBase())
{
return tx.DoS(100, error("CTxMemPool::accept() : coinbase as individual tx"));
}
// ppcoin: coinstake is also only valid in a block, not as a loose transaction
if (tx.IsCoinStake())
{
return tx.DoS(100, error("CTxMemPool::accept() : coinstake as individual tx"));
}
// Rather not work on nonstandard transactions (unless -testnet)
if (!fTestNet && !tx.IsStandard(nNewHeight))
{
return error("CTxMemPool::accept() : nonstandard transaction type");
}
// Do we already have it?
uint256 hash = tx.GetHash();
{
LOCK(cs);
if (mapTx.count(hash))
{
return false;
}
}
if (fCheckInputs)
{
if (txdb.ContainsTx(hash))
{
return false;
}
}
// Check for conflicts with in-memory transactions
CTransaction* ptxOld = NULL;
for (unsigned int i = 0; i < tx.vin.size(); i++)
{
COutPoint outpoint = tx.vin[i].prevout;
if (mapNextTx.count(outpoint))
{
// Disable replacement feature for now
return false;
// TODO: Allow replacement?
#if 0
// Allow replacing with a newer version of the same transaction
if (i != 0)
{
return false;
}
ptxOld = mapNextTx[outpoint].ptx;
if (ptxOld->IsFinal())
{
return false;
}
if (!tx.IsNewerThan(*ptxOld))
{
return false;
}
for (unsigned int i = 0; i < tx.vin.size(); i++)
{
COutPoint outpoint = tx.vin[i].prevout;
if (!mapNextTx.count(outpoint) || mapNextTx[outpoint].ptx != ptxOld)
{
return false;
}
}
break;
#endif
}
}
// Like claims, registrations need to be checked for validity
// to prevent mempool flooding of bad registrations, although
// these are going to take a reserve of a lot more XST.
// The all-v-all check would be expensive, but registrations are
// expensive so they shouldn't cause a non-trivial burden here.
int64_t nValuePurchases = 0;
map<string, qpos_purchase> mapNames;
uint32_t N = static_cast<uint32_t>(
pregistryMain->GetNumberQualified());
int64_t nStakerPrice = GetStakerPrice(N, pindexBest->nMoneySupply, nFork);
if (!tx.CheckPurchases(pregistryMain, nStakerPrice, mapNames))
{
if (fDebugQPoS)
{
printf("Bad purchase:\n%s\n", tx.ToString().c_str());
}
return error("CTxMemPool::accept() : bad purchase");
}
if (!mapNames.empty())
{
if (mapRegistrations.count(hash) != 0)
{
// duplicate hash already checked but check here anyway
return false;
}
map<uint256, vector<string> >::const_iterator it;
for (it = mapRegistrations.begin(); it != mapRegistrations.end(); ++it)
{
vector<string>::const_iterator jt;
for (jt = it->second.begin(); jt != it->second.end(); ++jt)
{
if (mapNames.count(*jt) != 0)
{
// trying to register an alias already in the mempool
return false;
}
}
}
vector<string> vNames;
map<string, qpos_purchase>::const_iterator kt;
for (kt = mapNames.begin(); kt != mapNames.end(); ++kt)
{
vNames.push_back(kt->first);
nValuePurchases += kt->second.value;
}
mapRegistrations[hash] = vNames;
}
MapPrevTx mapInputs;
map<uint256, CTxIndex> mapUnused;
bool fInvalid = false;
if (!tx.FetchInputs(txdb, mapUnused, false, false, mapInputs, fInvalid))
{
if (fInvalid)
{
return error("accept() : FetchInputs found invalid tx %s",
hash.ToString().c_str());
}
if (pfMissingInputs)
{
*pfMissingInputs = true;
}
return false;
}
// The "input" for a claim is stored in the registry ledger which
// cannot be deducted from the ledger until the block is accepted into
// the main chain. Absent any checks, it is possible to spam the mempool
// with duplicate claims for the same pubkey with different txids.
// We protect against this "duplicate claim attack" by
// enforcing that only one claim for a given pubkey
// can exist in the mempool at once.
qpos_claim claim;
if (!tx.CheckClaim(pregistryMain, mapInputs, claim))
{
return false;
}
if (claim.value > 0)
{
map<uint256, CPubKey>::const_iterator it;
for (it = mapClaims.begin(); it != mapClaims.end(); ++it)
{
if (it->second == claim.key)
{
return false;
}
}
if (!pregistryMain->CanClaim(claim.key, claim.value))
{
return false;
}
}
// TODO: refactor some duplicate work on claims here
vector<QPTxDetails> vDeets;
tx.GetQPTxDetails(0, vDeets);
if (!vDeets.empty())
{
if (tx.HasFeework())
{
return error("accept(): qPoS tx rejected: has feework");
}
map<string, qpos_purchase> mapPurchasesTx;
map<unsigned int, vector<qpos_setkey> > mapSetKeysTx;
map<CPubKey, vector<qpos_claim> > mapClaimsTx;
map<unsigned int, vector<qpos_setmeta> > mapSetMetasTx;
vector<QPTxDetails> vDeetsTx;
if (!tx.CheckQPoS(pregistryMain,
mapInputs,
GetAdjustedTime(),
vDeets,
pindexBest,
mapPurchasesTx,
mapSetKeysTx,
mapClaimsTx,
mapSetMetasTx,
vDeetsTx))
{
return error("accept(): checking qPoS failed");
}
}
Feework feework;
feework.bytes = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (fCheckInputs)
{
unsigned int nSize = feework.bytes;
// Check for non-standard pay-to-script-hash in inputs
if (!tx.AreInputsStandard(mapInputs) && !fTestNet)
{
return error("CTxMemPool::accept(): "
"nonstandard transaction input");
}
// Note: if you modify this code to accept non-standard transactions, then
// you should add code here to check that the transaction does a
// reasonable number of ECDSA signature verifications.
int64_t nFees = tx.GetValueIn(mapInputs, claim.value) -
(tx.GetValueOut() + nValuePurchases);
// Don't accept it if it can't get into a block
int64_t txMinFee = tx.GetMinFee(1000, GMF_RELAY, nSize);
if (nFees < txMinFee)
{
tx.CheckFeework(feework, true, bfrFeeworkValidator,
1000, GMF_RELAY);
switch (feework.status)
{
case Feework::OK:
break;
case Feework::BADVERSION:
return tx.DoS(100,
error("CTxMemPool::accept(): feework not allowed %s, "
"%" PRId64 " < %" PRId64,
hash.ToString().c_str(), nFees, txMinFee));
case Feework::NONE:
return tx.DoS(100,
error("CTxMemPool::accept(): not enough fees %s, "
"%" PRId64 " < %" PRId64,
hash.ToString().c_str(), nFees, txMinFee));
case Feework::INSUFFICIENT:
return tx.DoS(100,
error("CTxMemPool::accept(): not enough feework %s, "
"%" PRId64 " < %" PRId64 "\n%s\n",
hash.ToString().c_str(),
nFees, txMinFee,
feework.ToString(" ").c_str()));
default:
// the feework check will produce more error info if applicable
return tx.DoS(100,
error("CTxMemPool::accept(): "
"not enough fees %s, %" PRId64 " < %" PRId64,
hash.ToString().c_str(),
nFees, txMinFee));
}
if (fDebugFeeless)
{
printf("CTxMemPool::accept(): accepting feework\n %s\n"
" %" PRId64 " < %" PRId64 "\n%s\n",
hash.ToString().c_str(), nFees, txMinFee,
feework.ToString(" ").c_str());
}
}
unsigned int flags = STANDARD_SCRIPT_VERIFY_FLAGS;
if (GetFork(nNewHeight) < XST_FORK005)
{
flags = flags & ~SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY;
}
// Check against previous transactions
// This is done last to help prevent CPU exhaustion denial-of-service attacks.
if (!tx.ConnectInputs(txdb, mapInputs, mapUnused, CDiskTxPos(1,1,1),
pindexBest, false, false, flags,
nValuePurchases, claim.value, feework))
{
return error("CTxMemPool::accept() : ConnectInputs failed %s",
hash.ToString().c_str());
}
} // end check inputs
// In case feework hasn't been checked, we check it here anyway to ensure
// the tx is well formed.
if (!feework.IsChecked())
{
if (!tx.CheckFeework(feework, false, bfrFeeworkValidator))
{
return error("CTxMemPool::accept() : feework rejected");
}
}
// Store transaction in memory
{
LOCK(cs);
if (ptxOld)
{
printf("CTxMemPool::accept() : replacing tx %s with new version\n",
ptxOld->GetHash().ToString().c_str());
remove(*ptxOld);
}
addUnchecked(hash, tx);
if (feework.IsOK())
{
mempool.addFeeless(feework.height, hash);
}
}
///// are we sure this is ok when loading transactions or restoring block txes
// If updated, erase old tx from wallet
if (ptxOld)
{
EraseFromWallets(ptxOld->GetHash());
}
printf("CTxMemPool::accept() : accepted %s (poolsz %" PRIszu ")\n",
hash.ToString().c_str(),
mapTx.size());
return true;
}
bool CTransaction::AcceptToMemoryPool(CTxDB& txdb, bool fCheckInputs, bool* pfMissingInputs)
{
return mempool.accept(txdb, *this, fCheckInputs, pfMissingInputs);
}
bool CTxMemPool::addUnchecked(const uint256& hash, CTransaction &tx)
{
// Add to memory pool without checking anything. Don't call this directly,
// call CTxMemPool::accept to properly check the transaction first.
{
mapTx[hash] = tx;
for (unsigned int i = 0; i < tx.vin.size(); i++)
mapNextTx[tx.vin[i].prevout] = CInPoint(&mapTx[hash], i);
nTransactionsUpdated++;
}
return true;
}
bool CTxMemPool::remove(const CTransaction &tx, bool fRecursive)
{
// Remove transaction from memory pool
{
LOCK(cs);
uint256 hash = tx.GetHash();
if (mapTx.count(hash))
{
if (fRecursive)
{
for (unsigned int i = 0; i < tx.vout.size(); i++)
{
map<COutPoint, CInPoint>:: iterator it =
mapNextTx.find(COutPoint(hash, i));
if (it != mapNextTx.end())
remove(*it->second.ptx, true);
}
}
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
mapNextTx.erase(txin.prevout);
}
mapTx.erase(hash);
nTransactionsUpdated++;
}
// The following are cheap and non recursive
// so they are done without checking mapTx, etc.
// Note also that mapFeeless is scanned and cleared every block,
// so there is no need to clear the hash here, which would
// require expensive all v. all checking.
mapClaims.erase(hash);
mapRegistrations.erase(hash);
}
return true;
}
bool CTxMemPool::removeConflicts(const CTransaction &tx)
{
// Remove transactions which depend on inputs of tx, recursively
LOCK(cs);
BOOST_FOREACH(const CTxIn &txin, tx.vin)
{
map<COutPoint, CInPoint>::iterator it = mapNextTx.find(txin.prevout);
if (it != mapNextTx.end()) {
const CTransaction &txConflict = *it->second.ptx;
if (txConflict != tx)
remove(txConflict, true);
}
}
return true;
}
void CTxMemPool::clear()
{
LOCK(cs);
mapTx.clear();
mapNextTx.clear();
++nTransactionsUpdated;
}
void CTxMemPool::queryHashes(vector<uint256>& vtxid)
{
vtxid.clear();
LOCK(cs);
vtxid.reserve(mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mapTx.begin();
mi != mapTx.end(); ++mi)
{
vtxid.push_back((*mi).first);
}
}
int CTxMemPool::removeInvalidPurchases()
{
uint32_t N = static_cast<uint32_t>(
pregistryMain->GetNumberQualified());
int nFork = GetFork(pindexBest->nHeight + 1);
int64_t nStakerPrice = GetStakerPrice(N, pindexBest->nMoneySupply, nFork);
set<uint256> setToRemove;
std::map<uint256, CTransaction>::iterator it;
LOCK(cs);
for (it = mapTx.begin(); it != mapTx.end(); ++it)
{
CTransaction& tx = it->second;
map<string, qpos_purchase> mapPurchases;
if (!tx.CheckPurchases(pregistryMain, nStakerPrice, mapPurchases))
{
// maps iterate sorted by key, so no need to do more
setToRemove.insert(it->first);
}
// FIXME: this will be unnecessary after FORK_PURCHASE3
BOOST_FOREACH(const PAIRTYPE(string, qpos_purchase)& item,
mapPurchases)
{
string strUnused;
if (!pregistryMain->AliasIsAvailable(item.first, strUnused))
{
setToRemove.insert(it->first);
}
}
}
BOOST_FOREACH(const uint256& txid, setToRemove)
{
if (exists(txid))
{
remove(mapTx[txid]);
if (fDebugQPoS)
{
printf("CTxMempool::removeInvalidPurchase():\n %s\n",
txid.GetHex().c_str());
}
}
}
return setToRemove.size();
}
bool CTxMemPool::addFeeless(const int nHeight, const uint256& txid)
{
return mapFeeless[nHeight].insert(txid).second;
}
int CTxMemPool::removeOldFeeless()
{
static const int MAXDEPTH = chainParams.FEELESS_MAX_DEPTH;
set<uint256> setTxToRemove;
set<int> setHeightToRemove;
MapFeeless::iterator it;
LOCK(cs);
for (it = mapFeeless.begin(); it != mapFeeless.end(); ++it)
{
if ((nBestHeight - it->first) <= MAXDEPTH)
{
// maps iterate sorted by key, so no need to do more
break;
}
setTxToRemove.insert(it->second.begin(), it->second.end());
setHeightToRemove.insert(it->first);
}
BOOST_FOREACH(const uint256& txid, setTxToRemove)
{
if (exists(txid))
{
remove(mapTx[txid]);
if (fDebugFeeless)
{
printf("CTxMempool::removeOldFeeless():\n %s\n",
txid.GetHex().c_str());
}
}
}
BOOST_FOREACH(const int& height, setHeightToRemove)
{
mapFeeless.erase(height);
if (fDebugFeeless)
{
printf("CTxMempool::removeOldFeeless(): height=%d\n", height);
}
}
return setTxToRemove.size();
}
int CMerkleTx::GetDepthInMainChainINTERNAL(CBlockIndex* &pindexRet) const
{
if (hashBlock == 0 || nIndex == -1)
return 0;
// Find the block it claims to be in
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
// Make sure the merkle branch connects to this block
if (!fMerkleVerified)
{
if (CBlock::CheckMerkleBranch(GetHash(), vMerkleBranch, nIndex) != pindex->hashMerkleRoot)
return 0;
fMerkleVerified = true;
}
pindexRet = pindex;
return pindexBest->nHeight - pindex->nHeight + 1;
}
int CMerkleTx::GetDepthInMainChain(CBlockIndex *&pindexRet) const
{
int nResult = GetDepthInMainChainINTERNAL(pindexRet);
if (nResult == 0 && !mempool.exists(GetHash()))
return -1;
return nResult;
}
int CMerkleTx::GetBlocksToMaturity() const
{
if (!(IsCoinBase() || IsCoinStake()))
return 0;
return max(0, (nCoinbaseMaturity+20) - GetDepthInMainChain());
}
bool CMerkleTx::AcceptToMemoryPool(CTxDB& txdb, bool fCheckInputs)
{
if (fClient)
{
printf("CMerkleTx(): fClient was unexpectedly true\n");
assert(0); // FIXME: is fClient ever true????
if (!IsInMainChain() && !ClientConnectInputs())
return false;
return CTransaction::AcceptToMemoryPool(txdb, false);
}
else
{
return CTransaction::AcceptToMemoryPool(txdb, fCheckInputs);
}
}
bool CMerkleTx::AcceptToMemoryPool()
{
CTxDB txdb("r");
return AcceptToMemoryPool(txdb);
}
bool CWalletTx::AcceptWalletTransaction(CTxDB& txdb, bool fCheckInputs)
{
{
LOCK(mempool.cs);
// Add previous supporting transactions first
BOOST_FOREACH(CMerkleTx& tx, vtxPrev)
{
if (!(tx.IsCoinBase() || tx.IsCoinStake()))
{
uint256 hash = tx.GetHash();
if (!mempool.exists(hash) && !txdb.ContainsTx(hash))
tx.AcceptToMemoryPool(txdb, fCheckInputs);
}
}
return AcceptToMemoryPool(txdb, fCheckInputs);
}
return false;
}
bool CWalletTx::AcceptWalletTransaction()
{
CTxDB txdb("r");
return AcceptWalletTransaction(txdb);
}
int CTxIndex::GetDepthInMainChain() const
{
// Read block header
CBlock block;
if (!block.ReadFromDisk(pos.nFile, pos.nBlockPos, false))
return 0;
// Find the block in the index
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(block.GetHash());
if (mi == mapBlockIndex.end())
return 0;
CBlockIndex* pindex = (*mi).second;
if (!pindex || !pindex->IsInMainChain())
return 0;
return 1 + nBestHeight - pindex->nHeight;
}
// Return transaction in tx, and if it was found inside a block, its hash is placed in hashBlock
bool GetTransaction(const uint256 &hash, CTransaction &tx,
uint256 &hashBlock, unsigned int &nTimeBlock)
{
{
LOCK(cs_main);
{
LOCK(mempool.cs);
if (mempool.lookup(hash, tx))
{
return true;
}
}
CTxDB txdb("r");
CTxIndex txindex;
if (tx.ReadFromDisk(txdb, COutPoint(hash, 0), txindex))
{
CBlock block;
if (block.ReadFromDisk(txindex.pos.nFile, txindex.pos.nBlockPos, false))
{
hashBlock = block.GetHash();
nTimeBlock = block.GetBlockTime();
}
return true;
}
}
return false;
}
//////////////////////////////////////////////////////////////////////////////
//
// CBlock and CBlockIndex
//
static CBlockIndex* pblockindexFBBHLast;
CBlockIndex* FindBlockByHeight(int nHeight)
{
CBlockIndex *pblockindex;
if (nHeight < nBestHeight / 2)
pblockindex = pindexGenesisBlock;
else
pblockindex = pindexBest;
if (pblockindexFBBHLast && abs(nHeight - pblockindex->nHeight) > abs(nHeight - pblockindexFBBHLast->nHeight))
pblockindex = pblockindexFBBHLast;
while (pblockindex->nHeight > nHeight)
pblockindex = pblockindex->pprev;
while (pblockindex->nHeight < nHeight)
pblockindex = pblockindex->pnext;
pblockindexFBBHLast = pblockindex;
return pblockindex;
}
bool CBlock::ReadFromDisk(const CBlockIndex* pindex, bool fReadTransactions)
{
if (!fReadTransactions)
{
*this = pindex->GetBlockHeader();
return true;
}
if (!ReadFromDisk(pindex->nFile, pindex->nBlockPos, fReadTransactions))
return false;
if (GetHash() != pindex->GetBlockHash())
return error("CBlock::ReadFromDisk() : GetHash() doesn't match index");
return true;
}
uint256 static GetOrphanRoot(const CBlock* pblock)
{
// Work back to the first block in the orphan chain
while (mapOrphanBlocks.count(pblock->hashPrevBlock))
pblock = mapOrphanBlocks[pblock->hashPrevBlock];
return pblock->GetHash();
}
// ppcoin: find block wanted by given orphan block
uint256 WantedByOrphan(const CBlock* pblockOrphan)
{
// Work back to the first block in the orphan chain
while (mapOrphanBlocks.count(pblockOrphan->hashPrevBlock))
pblockOrphan = mapOrphanBlocks[pblockOrphan->hashPrevBlock];
return pblockOrphan->hashPrevBlock;
}
int generateMTRandom(unsigned int s, int range)
{
boost::random::mt19937 gen(s);
boost::random::uniform_int_distribution<> dist(0, range);
return dist(gen);
}
// miner's coin base reward based on nHeight
// note: PoS started before PoW, so not every block was PoW before 5461
// 1 - 10: 23300 XST per block
// Total premine = 233000
// 11 - 260: 16 XST per block
// 261 - 1700: 8000 XST per block
// 1701 - 3140: 4000 XST per block
// 3141 - 4580: 2000 XST per block
// 4581 - 5460: 1000 XST per block
// 5461+ : Proof of Stake with 20% APY earnings on stake
int64_t GetProofOfWorkReward(int nHeight, int64_t nFees)
{
int64_t nSubsidy = 0 * COIN;
int nFork = GetFork(nHeight);
if (fTestNet)
{
if (nHeight == 0)
nSubsidy = 16 * COIN;
else if (nFork < XST_FORK002)
nSubsidy = 90000 * COIN;
}
else
{
if (nHeight == 0)
nSubsidy = 16 * COIN; // genesis block coinbase is unspendable
else if (nHeight <= 10)
nSubsidy = 23300 * COIN; // Blocks 1-10 are premine
else if (nHeight <= 260)
nSubsidy = 16 * COIN; // 4 hr Low Reward Period for Fairness
else if (nHeight <= 1700)
nSubsidy = 8000 * COIN;
else if (nHeight <= 3140)
nSubsidy = 4000 * COIN;
else if (nHeight <= 4580)
nSubsidy = 2000 * COIN;
else if (nFork < XST_FORK002)
nSubsidy = 1000 * COIN; // was 1 coin
}
return nSubsidy + nFees;
}
// miner's coin stake reward based on nBits and coin age spent (coin-days)
// simple algorithm, not depend on the diff
int64_t GetProofOfStakeReward(int64_t nCoinAge, unsigned int nBits)
{
int64_t nRewardCoinYear;
nRewardCoinYear = fTestNet ? chainParams.MAX_STEALTH_PROOF_OF_STAKE_TESTNET :
chainParams.MAX_STEALTH_PROOF_OF_STAKE_MAINNET;
int64_t nSubsidy = nCoinAge * nRewardCoinYear / 365;
if (fDebug && GetBoolArg("-printcreation"))
printf("GetProofOfStakeReward(): create=%s nCoinAge=%" PRId64 " nBits=%d\n",
FormatMoney(nSubsidy).c_str(), nCoinAge, nBits);
return nSubsidy;
}
int64_t GetQPoSReward(const CBlockIndex *pindexPrev)
{
// qPoS 5s blocks per year (365.25 * 24 * 60 * 60/5)
static const int64_t BPY = 6311520;
// 1% inflation (1/100)
static const int64_t divisor = BPY * RECIPROCAL_QPOS_INFLATION;
return pindexPrev->nMoneySupply / divisor;
}
int64_t GetStakerPrice(uint32_t N, int64_t nSupply, int nFork, bool fPurchase)
{
// testnet
// 1) stakers 1 to 22: 1st tier price (discount)
// 2) stakers 23 to 86: 2nd tier price (no discount)
// 3) stakers 87 to 214: 3rd tier price (premium)
// 4) stakers 215 to 470: 4th tier price (unaffordable)
static const uint32_t TIER1_T = 22;
static const uint32_t TIER2_T = 64;
static const int64_t K_SCALE_T = 4000;
static const int64_t K_INCENTIVE_T = 200;
// mainnet
// 1) stakers 1 to 11: 1st tier price (big discount)
// 2) stakers 12 to 43: 2nd tier price (discount)
// 3) stakers 44 to 107: 3rd tier price (no discount)
// 4) stakers 108 to 235: 4th tier price (premium)
// 5) stakers 236 to 491: 5th tier price (unaffordable)
static const uint32_t TIER1_M = 11;
static const uint32_t TIER2_M = 32;
static const int64_t K_SCALE_M = 12000;
static const int64_t K_INCENTIVE_M = 64;
// Excpeted fraction of the money supply increase waiting on a purchase.
// Based on 5 second blocks, this is about 10 min meaning if you have
// to wait 10 min for a purchase this estimate will still be enough.
// Adds less than 0.2 XST to the price of a 50,000 XST staker.
static const int64_t INVERSE_WAIT_INCREASE = 3153600;
static const int64_t K_SCALE = fTestNet ? K_SCALE_T : K_SCALE_M;
static const int64_t K_INCENTIVE = fTestNet ? K_INCENTIVE_T : K_INCENTIVE_M;
static const uint32_t K_TIER = fTestNet ? (TIER2_T - TIER1_T) :
(TIER2_M - TIER1_M);
int64_t blen = static_cast<int64_t>(bit_length(N + K_TIER));
if (fPurchase)
{
nSupply += (nSupply / INVERSE_WAIT_INCREASE);
}
if (nFork >= XST_FORKPURCHASE3)
{
return COIN * ((((nSupply / COIN) / K_SCALE) * (blen - 1)) +
(K_INCENTIVE * N));
}
return ((nSupply / K_SCALE) * (blen - 1)) + (K_INCENTIVE * N);
}
static const int64_t nTargetTimespan = 60 * 30; // 30 blocks
static const int64_t nTargetSpacingWorkMax = 3 * nStakeTargetSpacing;
//
// maximum nBits value could possible be required nTime after
// minimum proof-of-work required was nBase
//
unsigned int ComputeMaxBits(CBigNum bnTargetLimit, unsigned int nBase, int64_t nTime)
{
CBigNum bnResult;
bnResult.SetCompact(nBase);
bnResult *= 2;
while (nTime > 0 && bnResult < bnTargetLimit)
{
// Maximum 200% adjustment per day/10...because block times 1/10 PPC
bnResult *= 2;
nTime -= 24 * 60 * 6;
}
if (bnResult > bnTargetLimit)
bnResult = bnTargetLimit;
return bnResult.GetCompact();
}
//
// minimum amount of work that could possibly be required nTime after
// minimum proof-of-work required was nBase
//
unsigned int ComputeMinWork(unsigned int nBase, int64_t nTime)
{
return ComputeMaxBits(bnProofOfWorkLimit, nBase, nTime);
}
//
// minimum amount of stake that could possibly be required nTime after
// minimum proof-of-stake required was nBase
//
unsigned int ComputeMinStake(unsigned int nBase, int64_t nTime, unsigned int nBlockTime)
{
return ComputeMaxBits(bnProofOfStakeLimit, nBase, nTime);
}
// ppcoin: find last block index up to pindex
const CBlockIndex* GetLastBlockIndex(const CBlockIndex* pindex, bool fProofOfStake)
{
while (pindex && pindex->pprev && (pindex->IsProofOfStake() != fProofOfStake))
pindex = pindex->pprev;
return pindex;
}
unsigned int GetNextTargetRequired(const CBlockIndex* pindexLast, bool fProofOfStake)
{
CBigNum bnTargetLimit = bnProofOfWorkLimit;
if(fProofOfStake)
{
// Proof-of-Stake blocks has own target limit since nVersion=3 supermajority on mainNet and always on testNet
bnTargetLimit = bnProofOfStakeLimit;
}
if (pindexLast == NULL)
return bnTargetLimit.GetCompact(); // genesis block
const CBlockIndex* pindexPrev = GetLastBlockIndex(pindexLast, fProofOfStake);
if (pindexPrev->pprev == NULL)
return bnTargetLimit.GetCompact(); // first block
const CBlockIndex* pindexPrevPrev = GetLastBlockIndex(pindexPrev->pprev, fProofOfStake);
if (pindexPrevPrev->pprev == NULL)
return bnTargetLimit.GetCompact(); // second block
int64_t nActualSpacing = pindexPrev->GetBlockTime() - pindexPrevPrev->GetBlockTime();
if(nActualSpacing < 0)
{
// printf(">> nActualSpacing = %" PRI64d " corrected to 1.\n", nActualSpacing);
nActualSpacing = 1;
}
else if(nActualSpacing > nTargetTimespan)
{
// printf(">> nActualSpacing = %" PRI64d " corrected to nTargetTimespan (900).\n", nActualSpacing);
nActualSpacing = nTargetTimespan;
}
// ppcoin: target change every block
// ppcoin: retarget with exponential moving toward target spacing
CBigNum bnNew;
bnNew.SetCompact(pindexPrev->nBits);
int64_t nTargetSpacing = fProofOfStake ?
nStakeTargetSpacing :
min(nTargetSpacingWorkMax,
((int64_t) nStakeTargetSpacing *
(1 + pindexLast->nHeight - pindexPrev->nHeight)));
int64_t nInterval = nTargetTimespan / nTargetSpacing;
bnNew *= ((nInterval - 1) * nTargetSpacing + nActualSpacing + nActualSpacing);
bnNew /= ((nInterval + 1) * nTargetSpacing);
if (bnNew > bnTargetLimit)
bnNew = bnTargetLimit;
return bnNew.GetCompact();
}
bool CheckProofOfWork(uint256 hash, unsigned int nBits)
{
if (hash == hashGenesisBlock)
{
return true;
}
CBigNum bnTarget;
bnTarget.SetCompact(nBits);
// Check range
if (bnTarget <= 0 || bnTarget > bnProofOfWorkLimit)
{
return error("CheckProofOfWork() : nBits below minimum work");
}
// Check proof of work matches claimed amount
if (hash > bnTarget.getuint256())
{
printf("CheckProofOfWork() : block %s\n", hash.GetHex().c_str());
return error("CheckProofOfWork() : hash doesn't match nBits");
}
return true;
}
// Return maximum amount of blocks that other nodes claim to have
int GetNumBlocksOfPeers()
{
return max(cPeerBlockCounts.median(),
Checkpoints::GetTotalBlocksEstimate());
}
bool IsInitialBlockDownload()
{
int nBack = (GetFork(nBestHeight) < XST_FORKQPOS) ?
chainParams.LATEST_INITIAL_BLOCK_DOWNLOAD_TIME :
chainParams.LATEST_INITIAL_BLOCK_DOWNLOAD_TIME_QPOS;
if (pindexBest == NULL ||
nBestHeight < Checkpoints::GetTotalBlocksEstimate())
{
return true;
}
static int64_t nLastUpdate;
static CBlockIndex* pindexLastBest;
if (pindexBest != pindexLastBest)
{
pindexLastBest = pindexBest;
nLastUpdate = GetTime();
}
return ((GetTime() - nLastUpdate < 10) &&
(pindexBest->GetBlockTime() < GetTime() - nBack));
}
void static InvalidChainFound(CBlockIndex* pindexNew)
{
if (pindexNew->bnChainTrust > bnBestInvalidTrust)
{
bnBestInvalidTrust = pindexNew->bnChainTrust;
CTxDB().WriteBestInvalidTrust(bnBestInvalidTrust);
uiInterface.NotifyBlocksChanged();
}
printf("InvalidChainFound: invalid block=%s height=%d trust=%s date=%s\n",
pindexNew->GetBlockHash().ToString().c_str(), pindexNew->nHeight,
pindexNew->bnChainTrust.ToString().c_str(), DateTimeStrFormat("%x %H:%M:%S",
pindexNew->GetBlockTime()).c_str());
printf("InvalidChainFound: current best=%s height=%d trust=%s date=%s\n",
hashBestChain.ToString().c_str(), nBestHeight, bnBestChainTrust.ToString().c_str(),
DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime()).c_str());
}
void CBlock::UpdateTime(const CBlockIndex* pindexPrev)
{
nTime = max(GetBlockTime(), GetAdjustedTime());
}
bool CTransaction::DisconnectInputs(CTxDB& txdb)
{
// Relinquish previous transactions' spent pointers
if (!IsCoinBase())
{
BOOST_FOREACH(const CTxIn& txin, vin)
{
COutPoint prevout = txin.prevout;
// Get prev txindex from disk
CTxIndex txindex;
if (!txdb.ReadTxIndex(prevout.hash, txindex))
return error("DisconnectInputs() : ReadTxIndex failed");
if (prevout.n >= txindex.vSpent.size())
return error("DisconnectInputs() : prevout.n out of range");
// Mark outpoint as not spent
txindex.vSpent[prevout.n].SetNull();
// Write back
if (!txdb.UpdateTxIndex(prevout.hash, txindex))
return error("DisconnectInputs() : UpdateTxIndex failed");
}
}
// Remove transaction from index
// This can fail if a duplicate of this transaction was in a chain that got
// reorganized away. This is only possible if this transaction was completely
// spent, so erasing it would be a no-op anyway.
txdb.EraseTxIndex(*this);
return true;
}
bool CTransaction::FetchInputs(CTxDB& txdb, const map<uint256, CTxIndex>& mapTestPool,
bool fBlock, bool fMiner, MapPrevTx& inputsRet, bool& fInvalid) const
{
// FetchInputs can return false either because we just haven't seen some inputs
// (in which case the transaction should be stored as an orphan)
// or because the transaction is malformed (in which case the transaction should
// be dropped). If tx is definitely invalid, fInvalid will be set to true.
fInvalid = false;
if (IsCoinBase())
{
return true; // Coinbase transactions have no inputs to fetch.
}
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
if (inputsRet.count(prevout.hash))
{
continue; // Got it already
}
// Read txindex
CTxIndex& txindex = inputsRet[prevout.hash].first;
bool fFound = true;
if ((fBlock || fMiner) && mapTestPool.count(prevout.hash))
{
// Get txindex from current proposed changes
txindex = mapTestPool.find(prevout.hash)->second;
}
else
{
// Read txindex from txdb
fFound = txdb.ReadTxIndex(prevout.hash, txindex);
}
if (!fFound && (fBlock || fMiner))
{
return fMiner ? false :
error("FetchInputs() : %s prev tx %s index entry not found",
GetHash().ToString().c_str(),
prevout.hash.ToString().c_str());
}
// Read txPrev
CTransaction& txPrev = inputsRet[prevout.hash].second;
if (!fFound || txindex.pos == CDiskTxPos(1,1,1))
{
// Get prev tx from single transactions in memory
{
LOCK(mempool.cs);
if (!mempool.lookup(prevout.hash, txPrev))
return error("FetchInputs() : %s mempool Tx prev not found %s",
GetHash().ToString().c_str(),
prevout.hash.ToString().c_str());
}
if (!fFound)
{
txindex.vSpent.resize(txPrev.vout.size());
}
}
else
{
// Get prev tx from disk
if (!txPrev.ReadFromDisk(txindex.pos))
return error("FetchInputs() : %s ReadFromDisk prev tx %s failed",
GetHash().ToString().c_str(),
prevout.hash.ToString().c_str());
}
}
// Make sure all prevout.n indexes are valid:
for (unsigned int i = 0; i < vin.size(); i++)
{
const COutPoint prevout = vin[i].prevout;
assert(inputsRet.count(prevout.hash) != 0);
const CTxIndex& txindex = inputsRet[prevout.hash].first;
const CTransaction& txPrev = inputsRet[prevout.hash].second;
if (prevout.n >= txPrev.vout.size() || prevout.n >= txindex.vSpent.size())
{
// Revisit this if/when transaction replacement is implemented and allows
// adding inputs:
fInvalid = true;
return DoS(100,
error("FetchInputs() : tx %s prevout.n %d out of range, "
"prev vout size: %" PRIszu ", spent size: "
"%" PRIszu ", prev tx %s (%s)",
GetHash().ToString().c_str(), prevout.n,
txPrev.vout.size(), txindex.vSpent.size(),
prevout.hash.ToString().c_str(),
txPrev.GetHash().ToString().c_str()));
}
}
return true;
}
int64_t CTransaction::GetValueIn(const MapPrevTx& inputs, int64_t nClaim) const
{
if (IsCoinBase())
{
return 0;
}
if (nClaim == 0)
{
qpos_claim claim;
CheckClaim(pregistryMain, inputs, claim);
nClaim = claim.value;
}
int64_t nResult = nClaim;
for (unsigned int i = 0; i < vin.size(); i++)
{
nResult += GetOutputFor(vin[i], inputs).nValue;
}
return nResult;
}
int64_t CTransaction::GetClaimIn() const
{
int64_t nValue = 0;
txnouttype typetxo;
vector<valtype> vSolutions;
for (unsigned int i = 0; i < vout.size(); ++i)
{
// seriously, no checks
Solver(vout[i].scriptPubKey, typetxo, vSolutions);
if (typetxo == TX_CLAIM)
{
qpos_claim claim;
ExtractClaim(vSolutions.front(), claim);
nValue += claim.value;
}
}
return nValue;
}
unsigned int CTransaction::GetP2SHSigOpCount(const MapPrevTx& inputs) const
{
if (IsCoinBase())
{
return 0;
}
unsigned int nSigOps = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
const CTxOut& prevout = GetOutputFor(vin[i], inputs);
if (prevout.scriptPubKey.IsPayToScriptHash())
nSigOps += prevout.scriptPubKey.GetSigOpCount(vin[i].scriptSig);
}
return nSigOps;
}
bool CTransaction::ConnectInputs(CTxDB& txdb, MapPrevTx inputs,
map<uint256, CTxIndex>& mapTestPool,
const CDiskTxPos& posThisTx,
const CBlockIndex* pindexBlock,
bool fBlock, bool fMiner,
unsigned int flags,
int64_t nValuePurchases, int64_t nClaim,
Feework& feework)
{
// Take over previous transactions' spent pointers
// fBlock is true when this is called from AcceptBlock when a new best-block is added to the blockchain
// fMiner is true when called from the internal bitcoin miner
// ... both are false when called from CTransaction::AcceptToMemoryPool
unsigned int nTxTime = HasTimestamp() ? GetTxTime() : pindexBlock->nTime;
if (!IsCoinBase())
{
int64_t nValueIn = 0;
int64_t nFees = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
assert(inputs.count(prevout.hash) > 0);
CTxIndex& txindex = inputs[prevout.hash].first;
CTransaction& txPrev = inputs[prevout.hash].second;
if (prevout.n >= txPrev.vout.size() ||
prevout.n >= txindex.vSpent.size())
{
return DoS(100, error("ConnectInputs() : %s prevout.n out of range %d %"
PRIszu " %" PRIszu " prev tx %s\n%s",
GetHash().ToString().c_str(), prevout.n,
txPrev.vout.size(), txindex.vSpent.size(),
prevout.hash.ToString().c_str(),
txPrev.ToString().c_str()));
}
// If prev is coinbase or coinstake, check that it's matured
if (txPrev.IsCoinBase() || txPrev.IsCoinStake())
{
for (const CBlockIndex* pindex = pindexBlock;
pindex && pindexBlock->nHeight - pindex->nHeight < nCoinbaseMaturity;
pindex = pindex->pprev)
{
if (pindex->nBlockPos == txindex.pos.nBlockPos && pindex->nFile == txindex.pos.nFile)
{
return error("ConnectInputs() : tried to spend %s at depth %d",
txPrev.IsCoinBase() ? "coinbase" : "coinstake",
pindexBlock->nHeight - pindex->nHeight);
}
}
}
// check is meaningless if one or the other has no timestamp
if (txPrev.HasTimestamp() && HasTimestamp())
{
// ppcoin: check transaction timestamp
if (txPrev.GetTxTime() > nTxTime)
{
return DoS(100,
error("ConnectInputs() : "
"transaction timestamp earlier than input transaction"));
}
}
if (txPrev.vout[prevout.n].IsEmpty() &&
(GetFork(nBestHeight + 1) >= XST_FORK005))
{
return DoS(1, error("ConnectInputs() : special marker is not spendable"));
}
// Check for negative or overflow input values
nValueIn += txPrev.vout[prevout.n].nValue;
if (!MoneyRange(txPrev.vout[prevout.n].nValue) || !MoneyRange(nValueIn))
{
return DoS(100, error("ConnectInputs() : txin values out of range"));
}
}
// The first loop above does all the inexpensive checks.
// Only if ALL inputs pass do we perform expensive ECDSA signature checks.
// Helps prevent CPU exhaustion attacks.
for (unsigned int i = 0; i < vin.size(); i++)
{
COutPoint prevout = vin[i].prevout;
assert(inputs.count(prevout.hash) > 0);
CTxIndex& txindex = inputs[prevout.hash].first;
CTransaction& txPrev = inputs[prevout.hash].second;
// Check for conflicts (double-spend)
// This doesn't trigger the DoS code on purpose; if it did, it would make it easier
// for an attacker to attempt to split the network.
if (!txindex.vSpent[prevout.n].IsNull())
{
return fMiner ? false :
error("ConnectInputs() : %s prev tx already used at %s",
GetHash().ToString().c_str(),
txindex.vSpent[prevout.n].ToString().c_str());
}
// Skip ECDSA signature verification when connecting blocks (fBlock=true)
// before the last blockchain checkpoint. This is safe because block merkle hashes are
// still computed and checked, and any change will be caught at the next checkpoint.
if (!(fBlock && (nBestHeight < Checkpoints::GetTotalBlocksEstimate())))
{
// Verify signature
if (!VerifySignature(txPrev, *this, i, flags, 0))
{
return DoS(100,error("ConnectInputs() : %s VerifySignature failed",
GetHash().ToString().c_str()));
}
}
// Mark outpoints as spent
txindex.vSpent[prevout.n] = posThisTx;
// Write back
if (fBlock || fMiner)
{
mapTestPool[prevout.hash] = txindex;
}
}
if (IsCoinStake())
{
// ppcoin: coin stake tx earns reward instead of paying fee
uint64_t nCoinAge;
if (!GetCoinAge(txdb, pindexBlock->nTime, nCoinAge))
{
return error("ConnectInputs() : %s unable to get coin age for coinstake",
GetHash().ToString().c_str());
}
int64_t nStakeReward = GetValueOut() - nValueIn;
if (nStakeReward > (GetProofOfStakeReward(nCoinAge,
pindexBlock->nBits) -
GetMinFee() + chainParams.MIN_TX_FEE))
{
return DoS(100, error("ConnectInputs() : %s stake reward exceeded",
GetHash().ToString().c_str()));
}
}
else
{
int64_t nTxCredit = nValueIn + nClaim;
int64_t nTxDebit = GetValueOut() + nValuePurchases;
if (nTxCredit < nTxDebit)
{
return DoS(100, error("ConnectInputs() : %s value in < value out",
GetHash().ToString().c_str()));
}
// Tally transaction fees
int64_t nTxFee = nTxCredit - nTxDebit;
if (nTxFee < 0)
{
return DoS(100, error("ConnectInputs() : %s nTxFee < 0",
GetHash().ToString().c_str()));
}
// ppcoin: enforce transaction fees for every block
if (nTxFee < GetMinFee())
{
// The feework may have already been checked.
if (!feework.IsChecked())
{
feework.bytes = ::GetSerializeSize(*this,
SER_NETWORK,
PROTOCOL_VERSION);
CheckFeework(feework, true, bfrFeeworkValidator);
}
if (feework.IsInsufficient())
{
return fBlock
? DoS(100,
error("ConnectInputs() : "
"%s not enough feework\n%s\n",
GetHash().ToString().c_str(),
feework.ToString(" ").c_str()))
: false;
}
else if (!feework.IsOK())
{
return fBlock
? DoS(100,
error("ConnectInputs() : "
"%s not paying required fee=%s, paid=%s",
GetHash().ToString().c_str(),
FormatMoney(GetMinFee()).c_str(),
FormatMoney(nTxFee).c_str()))
: false;
}
if (fDebugFeeless)
{
printf("ConnectInputs(): feework\n %s\n%s\n",
GetHash().ToString().c_str(),
feework.ToString(" ").c_str());
}
}
nFees += nTxFee;
if (!MoneyRange(nFees))
{
return DoS(100, error("ConnectInputs() : nFees out of range"));
}
}
}
return true;
}
bool CTransaction::ClientConnectInputs()
{
if (IsCoinBase())
{
return false;
}
// Take over previous transactions' spent pointers
{
LOCK(mempool.cs);
int64_t nValueIn = 0;
for (unsigned int i = 0; i < vin.size(); i++)
{
// Get prev tx from single transactions in memory
COutPoint prevout = vin[i].prevout;
if (!mempool.exists(prevout.hash))
return false;
CTransaction& txPrev = mempool.lookup(prevout.hash);
if (prevout.n >= txPrev.vout.size())
return false;
unsigned int flags = STANDARD_SCRIPT_VERIFY_FLAGS;
if (GetFork(nBestHeight + 1) < XST_FORK005)
{
flags = flags & ~SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY;
}
// Verify signature
if (!VerifySignature(txPrev, *this, i, flags, 0))
return error("ClientConnectInputs() : VerifySignature failed");
nValueIn += txPrev.vout[prevout.n].nValue;
if (!MoneyRange(txPrev.vout[prevout.n].nValue) || !MoneyRange(nValueIn))
return error("ClientConnectInputs() : txin values out of range");
}
if (GetValueOut() > nValueIn)
return false;
}
return true;
}
bool CBlock::DisconnectBlock(CTxDB& txdb, CBlockIndex* pindex)
{
// Disconnect in reverse order
for (int i = vtx.size()-1; i >= 0; i--)
if (!vtx[i].DisconnectInputs(txdb))
return false;
// Update block index on disk without changing it in memory.
// The memory index structure will be changed after the db commits.
if (pindex->pprev)
{
CDiskBlockIndex blockindexPrev(pindex->pprev);
blockindexPrev.hashNext = 0;
if (!txdb.WriteBlockIndex(blockindexPrev))
return error("DisconnectBlock() : WriteBlockIndex failed");
}
// ppcoin: clean up wallet after disconnecting coinstake
BOOST_FOREACH(CTransaction& tx, vtx)
{
SyncWithWallets(tx, this, false, false);
}
if (fDebugExplore)
{
printf("DisconnectBlock(): %s done\n", pindex->GetBlockHash().ToString().c_str());
}
if (fWithExploreAPI)
{
ExploreDisconnectBlock(txdb, this);
}
return true;
}
bool CBlock::ConnectBlock(CTxDB& txdb, CBlockIndex* pindex,
QPRegistry *pregistryTemp, bool fJustCheck)
{
vector<QPTxDetails> vDeets;
// Check it again in case a previous version let a bad block in
// fCheckSig was always true here by default
if (!CheckBlock(pregistryTemp, vDeets, pindex->pprev,
!fJustCheck, !fJustCheck, true, !fJustCheck))
{
return false;
}
unsigned int flags = SCRIPT_VERIFY_NOCACHE | STANDARD_SCRIPT_VERIFY_FLAGS;
unsigned int nFork = GetFork(nBestHeight + 1);
if (nFork < XST_FORK005)
{
flags = flags & ~SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY;
}
// Do not allow blocks that contain transactions which 'overwrite' older transactions,
// unless those are already completely spent.
// If such overwrites are allowed, coinbases and transactions depending upon those
// can be duplicated to remove the ability to spend the first instance -- even after
// being sent to another address.
// See BIP30 and http://r6.ca/blog/20120206T005236Z.html for more information.
// This logic is not necessary for memory pool transactions, as AcceptToMemoryPool
// already refuses previously-known transaction ids entirely.
// This rule was originally applied all blocks whose timestamp was after March 15, 2012, 0:00 UTC.
// Now that the whole chain is irreversibly beyond that time it is applied to all blocks except the
// two in the chain that violate it. This prevents exploiting the issue against nodes in their
// initial block download.
bool fEnforceBIP30 = true; // Always active in Stealth
bool fStrictPayToScriptHash = true; // Always active in Stealth
//// issue here: it doesn't know the version
unsigned int nTxPos;
if (fJustCheck)
{
// FetchInputs treats CDiskTxPos(1,1,1) as a special "refer to memorypool" indicator
// Since we're just checking the block and not actually connecting it,
// it might not (and probably shouldn't) be on the disk to get the transaction from
nTxPos = 1;
}
else
{
CBlock blockTemp;
blockTemp.nVersion = pindex->nVersion;
nTxPos = pindex->nBlockPos +
::GetSerializeSize(blockTemp, SER_DISK, CLIENT_VERSION) -
(2 * GetSizeOfCompactSize(0)) +
GetSizeOfCompactSize(vtx.size());
}
map<uint256, CTxIndex> mapQueuedChanges;
int64_t nFees = 0;
int64_t nValueIn = 0;
int64_t nValueOut = 0;
int64_t nValuePurchases = 0;
int64_t nValueClaims = 0;
unsigned int nSigOps = 0;
BOOST_FOREACH(CTransaction& tx, vtx)
{
uint256 hashTx = tx.GetHash();
if (fEnforceBIP30) {
CTxIndex txindexOld;
if (txdb.ReadTxIndex(hashTx, txindexOld)) {
BOOST_FOREACH(CDiskTxPos &pos, txindexOld.vSpent)
if (pos.IsNull())
return false;
}
}
nSigOps += tx.GetLegacySigOpCount();
if (nSigOps > chainParams.MAX_BLOCK_SIGOPS)
return DoS(100, error("ConnectBlock() : too many sigops"));
CDiskTxPos posThisTx(pindex->nFile, pindex->nBlockPos, nTxPos);
if (!fJustCheck)
nTxPos += ::GetSerializeSize(tx, SER_DISK, CLIENT_VERSION);
MapPrevTx mapInputs;
if (tx.IsCoinBase())
{
nValueOut += tx.GetValueOut();
}
else
{
bool fInvalid;
if (!tx.FetchInputs(txdb, mapQueuedChanges, true, false, mapInputs, fInvalid))
return false;
if (fStrictPayToScriptHash)
{
// Add in sigops done by pay-to-script-hash inputs;
// this is to prevent a "rogue miner" from creating
// an incredibly-expensive-to-validate block.
nSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nSigOps > chainParams.MAX_BLOCK_SIGOPS)
return DoS(100, error("ConnectBlock() : too many sigops"));
}
qpos_claim claim;
claim.value = 0;
if (!tx.IsCoinStake())
{
if (!tx.CheckClaim(pregistryTemp, mapInputs, claim))
{
return error("ConnectBlock() : bad claim");
}
if (claim.value != 0)
{
nValueClaims += claim.value;
}
}
int64_t nTxValuePurchases = 0;
int64_t nTxValueIn = tx.GetValueIn(mapInputs, claim.value);
int64_t nTxValueOut = tx.GetValueOut();
nValueIn += nTxValueIn;
nValueOut += nTxValueOut;
if (!tx.IsCoinStake())
{
uint32_t N = static_cast<uint32_t>(
pregistryTemp->GetNumberQualified());
int64_t nStakerPrice = GetStakerPrice(N,
pindex->pprev->nMoneySupply,
nFork);
map<string, qpos_purchase> mapPurchases;
if (!tx.CheckPurchases(pregistryTemp, nStakerPrice, mapPurchases))
{
if (fDebugQPoS)
{
printf("Bad purchase:\n%s\n", tx.ToString().c_str());
}
return error("ConnectBlock() : bad purchase");
}
map<string, qpos_purchase>::const_iterator it;
for (it = mapPurchases.begin(); it != mapPurchases.end(); ++it)
{
nTxValuePurchases += it->second.value;
}
nValuePurchases += nTxValuePurchases;
nFees += nTxValueIn - (nTxValueOut + nTxValuePurchases);
}
Feework feework;
if (!tx.ConnectInputs(txdb, mapInputs, mapQueuedChanges,
posThisTx, pindex, true, false,
flags, nTxValuePurchases, claim.value,
feework))
{
return false;
}
}
mapQueuedChanges[hashTx] = CTxIndex(posThisTx, tx.vout.size());
}
// XST: track money supply and mint amount info
// mint & supply: claims are included in nValueIn to make accounting easier
// elsewhere, so they must be subtracted from nValueIn here
if (pindex->IsProofOfWork())
{
// XST: PoW miners keep fees
pindex->nMint = nValueOut + nValuePurchases - (nValueIn - nValueClaims);
}
else
{
// XST: non-PoW blocks burn fees
pindex->nMint = nValueOut + nValuePurchases + nFees - (nValueIn - nValueClaims);
}
// supply
// - fees are not considered here because they are captured by values out and in
if (nFork >= XST_FORKPURCHASE3)
{
// FIXME: need to calculate correct money supply retroactively and
// move this test to GetStakerPrice
pindex->nMoneySupply = (pindex->pprev ? pindex->pprev->nMoneySupply : 0) +
nValueOut - (nValueIn - nValueClaims);
}
else
{
// purchases were mistakenly added to nValueIn
pindex->nMoneySupply = (pindex->pprev ? pindex->pprev->nMoneySupply : 0) +
nValueOut - (nValuePurchases + nValueIn - nValueClaims);
}
pindex->vDeets = vDeets;
if (!txdb.WriteBlockIndex(CDiskBlockIndex(pindex)))
{
return error("Connect() : WriteBlockIndex for pindex failed");
}
// ppcoin: fees are not collected by miners as in bitcoin
// ppcoin: fees are destroyed to compensate the entire network
if (fDebug && GetBoolArg("-printcreation"))
{
printf("ConnectBlock() : destroy=%s nFees=%" PRId64 "\n",
FormatMoney(nFees).c_str(), nFees);
}
if (fJustCheck)
{
return true;
}
// Write queued txindex changes
for (map<uint256, CTxIndex>::iterator mi = mapQueuedChanges.begin(); mi != mapQueuedChanges.end(); ++mi)
{
if (!txdb.UpdateTxIndex((*mi).first, (*mi).second))
return error("ConnectBlock() : UpdateTxIndex failed");
}
uint256 prevHash = 0;
if(pindex->pprev)
{
prevHash = pindex->pprev->GetBlockHash();
}
// Update block index on disk without changing it in memory.
// The memory index structure will be changed after the db commits.
if (pindex->pprev)
{
CDiskBlockIndex blockindexPrev(pindex->pprev);
blockindexPrev.hashNext = pindex->GetBlockHash();
if (!txdb.WriteBlockIndex(blockindexPrev))
return error("ConnectBlock() : WriteBlockIndex failed");
}
// Watch for transactions paying to me
BOOST_FOREACH(CTransaction& tx, vtx)
{
SyncWithWallets(tx, this, true);
}
if (fDebugExplore)
{
printf("ConnectBlock(): %s done\n", pindex->GetBlockHash().ToString().c_str());
}
if (fWithExploreAPI)
{
ExploreConnectBlock(txdb, this);
}
return true;
}
bool static Reorganize(CTxDB& txdb,
CBlockIndex* pindexNew,
CBlockIndex* &pindexReplayRet)
{
printf("REORGANIZE\n");
// Find the fork
CBlockIndex* pfork = pindexBest;
CBlockIndex* plonger = pindexNew;
while (pfork != plonger)
{
while (plonger->nHeight > pfork->nHeight)
if (!(plonger = plonger->pprev))
return error("Reorganize() : plonger->pprev is null");
if (pfork == plonger)
break;
if (!(pfork = pfork->pprev))
return error("Reorganize() : pfork->pprev is null");
}
// List of what to disconnect
vector<CBlockIndex*> vDisconnect;
for (CBlockIndex* pindex = pindexBest; pindex != pfork; pindex = pindex->pprev)
{
vDisconnect.push_back(pindex);
}
// List of what to connect
vector<CBlockIndex*> vConnect;
for (CBlockIndex* pindex = pindexNew; pindex != pfork; pindex = pindex->pprev)
{
vConnect.push_back(pindex);
}
reverse(vConnect.begin(), vConnect.end());
if (pfork->pnext)
{
printf("REORGANIZE: Disconnect %" PRIszu " blocks\n %s to\n %s\n",
vDisconnect.size(),
pindexBest->GetBlockHash().ToString().c_str(),
pfork->pnext->GetBlockHash().ToString().c_str());
printf("REORGANIZE: Fork at \n %s\n",
pfork->GetBlockHash().ToString().c_str());
printf("REORGANIZE: Connect %" PRIszu " blocks\n %s to\n %s\n",
vConnect.size(),
pfork->pnext->GetBlockHash().ToString().c_str(),
pindexNew->GetBlockHash().ToString().c_str());
}
else
{
printf("REORGANIZE: Disconnect %" PRIszu " blocks\n %s to\n %s->next\n",
vDisconnect.size(),
pindexBest->GetBlockHash().ToString().c_str(),
pfork->GetBlockHash().ToString().c_str());
printf("REORGANIZE: Connect %" PRIszu " blocks\n %s->next to\n %s\n",
vConnect.size(),
pfork->GetBlockHash().ToString().c_str(),
pindexNew->GetBlockHash().ToString().c_str());
}
// Disconnect shorter branch
list<CTransaction> vResurrect;
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
{
CBlock block;
if (!block.ReadFromDisk(pindex))
{
return error("Reorganize() : ReadFromDisk for disconnect failed");
}
if (!block.DisconnectBlock(txdb, pindex))
{
return error("Reorganize() : DisconnectBlock %s failed",
pindex->GetBlockHash().ToString().c_str());
}
// Queue memory transactions to resurrect
BOOST_REVERSE_FOREACH(const CTransaction& tx, block.vtx)
{
if (!(tx.IsCoinBase() || tx.IsCoinStake()) &&
pindex->nHeight > Checkpoints::GetTotalBlocksEstimate())
{
vResurrect.push_front(tx);
}
}
}
// registry must replay from at least the fork
pindexReplayRet = pfork;
AUTO_PTR<QPRegistry> pregistryTemp(new QPRegistry());
if (!pregistryTemp.get())
{
return error("Reorganize() : creating temp registry failed");
}
CBlockIndex *pindexCurrent;
RewindRegistry(txdb, pfork, pregistryTemp.get(), pindexCurrent);
// Connect longer branch
vector<CTransaction> vDelete;
for (unsigned int i = 0; i < vConnect.size(); i++)
{
CBlockIndex* pindex = vConnect[i];
CBlock block;
if (!block.ReadFromDisk(pindex))
{
return error("Reorganize() : ReadFromDisk for connect failed");
}
if (!block.ConnectBlock(txdb, pindex, pregistryTemp.get()))
{
// Invalid block
return error("Reorganize() : ConnectBlock %s failed",
pindex->GetBlockHash().ToString().c_str());
}
// Queue memory transactions to delete
BOOST_FOREACH(const CTransaction& tx, block.vtx)
{
vDelete.push_back(tx);
}
if (!pregistryTemp->UpdateOnNewBlock(pindex,
QPRegistry::ALL_SNAPS,
true))
{
return error("Reorganize() : Update to %s failed",
pindex->GetBlockHash().ToString().c_str());
}
printf("Reorganize(): connected %s\n",
pindex->GetBlockHash().ToString().c_str());
}
if (!txdb.WriteHashBestChain(pindexNew->GetBlockHash()))
{
return error("Reorganize() : WriteHashBestChain failed");
}
// Make sure it's successfully written to disk before changing memory structure
if (!txdb.TxnCommit())
{
return error("Reorganize() : TxnCommit failed");
}
// Disconnect shorter branch
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
{
if (pindex->pprev)
{
pindex->pprev->pnext = NULL;
}
mapBlockLookup.erase(pindex->nHeight);
}
// Connect longer branch
BOOST_FOREACH(CBlockIndex* pindex, vConnect)
{
if (pindex->pprev)
{
pindex->pprev->pnext = pindex;
}
mapBlockLookup[pindex->nHeight] = pindex;
}
// Resurrect memory transactions that were in the disconnected branch
BOOST_FOREACH(CTransaction& tx, vResurrect)
{
tx.AcceptToMemoryPool(txdb, false);
}
// Delete redundant memory transactions that are in the connected branch
BOOST_FOREACH(CTransaction& tx, vDelete)
{
mempool.remove(tx);
}
printf("REORGANIZE: done\n");
return true;
}
// Called from inside SetBestChain: attaches a block to the new best chain being built
bool CBlock::SetBestChainInner(CTxDB& txdb,
CBlockIndex *pindexNew,
QPRegistry *pregistryTemp)
{
uint256 hash = GetHash();
// Adding to current best branch
if (!ConnectBlock(txdb, pindexNew, pregistryTemp) ||
!txdb.WriteHashBestChain(hash))
{
txdb.TxnAbort();
InvalidChainFound(pindexNew);
return false;
}
if (!txdb.TxnCommit())
return error("SetBestChainInner() : TxnCommit failed");
// Add to current best branch
pindexNew->pprev->pnext = pindexNew;
mapBlockLookup[pindexNew->nHeight] = pindexNew;
// Delete redundant memory transactions
BOOST_FOREACH(CTransaction& tx, vtx)
mempool.remove(tx);
return true;
}
bool CBlock::SetBestChain(CTxDB& txdb,
CBlockIndex *pindexNew,
QPRegistry *pregistryTemp,
bool &fReorganizedRet)
{
fReorganizedRet = false;
uint256 hash = GetHash();
if (!txdb.TxnBegin())
return error("SetBestChain() : TxnBegin failed");
if ((pindexGenesisBlock == NULL) &&
(hash == (fTestNet ? chainParams.hashGenesisBlockTestNet :
hashGenesisBlock)))
{
txdb.WriteHashBestChain(hash);
if (!txdb.TxnCommit())
return error("SetBestChain() : TxnCommit failed");
pindexGenesisBlock = pindexNew;
}
else if (hashPrevBlock == hashBestChain)
{
if (!SetBestChainInner(txdb, pindexNew, pregistryTemp))
return error("SetBestChain() : SetBestChainInner failed");
}
else
{
/**********************************************************************
* REORGANIZE
**********************************************************************/
// the first block in the new chain that will cause it to become the new best chain
CBlockIndex *pindexIntermediate = pindexNew;
// list of blocks that need to be connected afterwards
vector<CBlockIndex*> vpindexSecondary;
// Reorganize is costly in terms of db load, as it works in a single db transaction.
// Try to limit how much needs to be done inside
while (pindexIntermediate->pprev &&
(pindexIntermediate->pprev->bnChainTrust > pindexBest->bnChainTrust))
{
vpindexSecondary.push_back(pindexIntermediate);
pindexIntermediate = pindexIntermediate->pprev;
}
if (!vpindexSecondary.empty())
{
printf("Postponing %" PRIszu " reconnects\n", vpindexSecondary.size());
}
// Switch to new best branch
// pindexReplay is the oldest block needed for replay
CBlockIndex *pindexReplay = pindexIntermediate;
if (!Reorganize(txdb, pindexIntermediate, pindexReplay))
{
txdb.TxnAbort();
InvalidChainFound(pindexNew);
return error("SetBestChain() : Reorganize failed");
}
AUTO_PTR<QPRegistry> pregistryTempTemp(new QPRegistry());
if (!pregistryTempTemp.get())
{
return error("SetBestChain() : creating temp temp registry failed");
}
// FIXME: this could use refactoring with RewindRegistry
GetRegistrySnapshot(txdb, pindexReplay->nHeight, pregistryTempTemp.get());
// 1. load snapshot of registry preceding pindexReplay->nHeight
// 2. make new pindex = pindexReplay
// 3. roll this back to the snapshotblock+1
// 4. replay registry from snapshot+1 to vpindexSecondary.back()-1 (earliest)
if (vpindexSecondary.empty())
{
uint256 blockHash = pregistryTempTemp->GetBlockHash();
CBlockIndex *pindexCurrent = mapBlockIndex[blockHash];
while (pindexCurrent->pnext != NULL)
{
pindexCurrent = pindexCurrent->pnext;
pregistryTempTemp->UpdateOnNewBlock(pindexCurrent,
QPRegistry::ALL_SNAPS,
true);
if ((pindexCurrent->pnext != NULL) &&
(!(pindexCurrent->pnext->IsInMainChain() ||
// pindexNew is not yet in the main chain
(pindexCurrent->pnext == pindexNew))))
{
break;
}
}
}
else
{
uint256 blockHash = pregistryTempTemp->GetBlockHash();
CBlockIndex *pindexCurrent = mapBlockIndex[blockHash];
// note vpindexSecondary is descending block height
while (pindexCurrent != vpindexSecondary.back()->pprev)
{
if (pindexCurrent->pnext == NULL)
{
break;
}
pindexCurrent = pindexCurrent->pnext;
pregistryTempTemp->UpdateOnNewBlock(pindexCurrent,
QPRegistry::ALL_SNAPS,
true);
}
if (pindexCurrent != vpindexSecondary.back()->pprev)
{
return error("SetBestChain() : block index not found for replay");
}
}
// Connect further blocks
BOOST_REVERSE_FOREACH(CBlockIndex *pindex, vpindexSecondary)
{
CBlock block;
if (!block.ReadFromDisk(pindex))
{
printf("SetBestChain() : ReadFromDisk failed\n");
break;
}
if (!txdb.TxnBegin())
{
printf("SetBestChain() : TxnBegin 2 failed\n");
break;
}
// errors now are not fatal, we still did a reorganisation to a new chain in a valid way
if (!block.SetBestChainInner(txdb, pindex, pregistryTempTemp.get()))
{
break;
}
pregistryTempTemp->UpdateOnNewBlock(pindex,
QPRegistry::ALL_SNAPS,
true);
}
// copy rather than assign to retain mutexes, etc.
bool fExitReplay = !pregistryTemp->IsInReplayMode();
pregistryTemp->Copy(pregistryTempTemp.get());
if (fExitReplay)
{
pregistryTemp->ExitReplayMode();
}
fReorganizedRet = true;
}
// Update best block in wallet (so we can detect restored wallets)
bool fIsInitialDownload = IsInitialBlockDownload();
if (!fIsInitialDownload)
{
const CBlockLocator locator(pindexNew);
::SetBestChain(locator);
}
// New best block
hashBestChain = hash;
pindexBest = pindexNew;
pblockindexFBBHLast = NULL;
nBestHeight = pindexBest->nHeight;
bnBestChainTrust = pindexNew->bnChainTrust;
nTimeBestReceived = GetTime();
nTransactionsUpdated++;
printf("SetBestChain: new best=%s\n"
" height=%d staker=%s-%u trust=%s time=%" PRIu64 " (%s)\n",
hashBestChain.ToString().c_str(),
nBestHeight,
pregistryTemp->GetAliasForID(nStakerID).c_str(),
nStakerID,
bnBestChainTrust.ToString().c_str(),
pindexBest->GetBlockTime(),
DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime()).c_str());
bool fUseSyncCheckpoints = !GetBoolArg("-nosynccheckpoints", true);
if (fUseSyncCheckpoints)
{
printf("Stake checkpoint: %x\n", pindexBest->nStakeModifierChecksum);
}
// Check the version of the last 100 blocks to see if we need to upgrade:
if (!fIsInitialDownload)
{
int nUpgraded = 0;
const CBlockIndex* pindex = pindexBest;
for (int i = 0; i < 100 && pindex != NULL; i++)
{
if (pindex->nVersion > CBlock::CURRENT_VERSION)
++nUpgraded;
pindex = pindex->pprev;
}
if (nUpgraded > 0)
printf("SetBestChain: %d of last 100 blocks above version %d\n", nUpgraded, CBlock::CURRENT_VERSION);
if (nUpgraded > 100/2)
// strMiscWarning is read by GetWarnings(), called by Qt and the JSON-RPC code to warn the user:
strMiscWarning = _("Warning: This version is obsolete, upgrade required!");
}
string strCmd = GetArg("-blocknotify", "");
if (!fIsInitialDownload && !strCmd.empty())
{
boost::replace_all(strCmd, "%s", hashBestChain.GetHex());
boost::thread t(runCommand, strCmd); // thread runs free
}
return true;
}
bool CBlock::SetBestChain(CTxDB &txdb, CBlockIndex *pindexNew)
{
AUTO_PTR<QPRegistry> pregistryTemp(new QPRegistry(pregistryMain));
if (!pregistryTemp.get())
{
return error("SetBestChain() : creating new temp registry failed");
}
bool fReorganized;
return SetBestChain(txdb, pindexNew, pregistryTemp.get(), fReorganized);
}
// ppcoin: total coin age spent in transaction, in the unit of coin-days.
// Only those coins meeting minimum age requirement counts. As those
// transactions not in main chain are not currently indexed so we
// might not find out about their coin age. Older transactions are
// guaranteed to be in main chain by sync-checkpoint. This rule is
// introduced to help nodes establish a consistent view of the coin
// age (trust score) of competing branches.
bool CTransaction::GetCoinAge(CTxDB& txdb, unsigned int nBlockTime, uint64_t& nCoinAge) const
{
CBigNum bnCentSecond = 0; // coin age in the unit of cent-seconds
nCoinAge = 0;
CBigNum bnSeconds ; // age of coins (not coin age)
if (IsCoinBase())
{
return true;
}
unsigned int nTxTime = HasTimestamp() ? GetTxTime() : nBlockTime;
BOOST_FOREACH(const CTxIn& txin, vin)
{
// First try finding the previous transaction in database
CTransaction txPrev;
CTxIndex txindex;
if (!txPrev.ReadFromDisk(txdb, txin.prevout, txindex))
{
continue; // previous transaction not in main chain
}
// Read block header
CBlock block;
if (!block.ReadFromDisk(txindex.pos.nFile, txindex.pos.nBlockPos, false))
{
return false; // unable to read block of previous transaction
}
unsigned int nBlockTime = block.GetBlockTime();
unsigned int nTxPrevTime = txPrev.HasTimestamp() ?
txPrev.GetTxTime() : nBlockTime;
if (nTxTime < nTxPrevTime)
{
return false; // Transaction timestamp violation
}
if (nBlockTime + nStakeMinAge > nTxTime)
continue; // only count coins meeting min age requirement
int64_t nValueIn = txPrev.vout[txin.prevout.n].nValue;
bnSeconds = min(CBigNum(nTxTime - nTxPrevTime),
chainParams.MAX_COIN_SECONDS);
bnCentSecond += CBigNum(nValueIn) * bnSeconds / CENT;
if (fDebug && GetBoolArg("-printcoinage"))
printf("coin age nValueIn=%" PRId64 " nTimeDiff=%d bnCentSecond=%s\n",
nValueIn, nTxTime - nTxPrevTime, bnCentSecond.ToString().c_str());
}
CBigNum bnCoinDay = bnCentSecond * CENT / COIN / (24 * 60 * 60);
if (fDebug && GetBoolArg("-printcoinage"))
printf("coin age bnCoinDay=%s\n", bnCoinDay.ToString().c_str());
nCoinAge = bnCoinDay.getuint64();
return true;
}
// ppcoin: total coin age spent in block, in the unit of coin-days.
bool CBlock::GetCoinAge(uint64_t& nCoinAge) const
{
nCoinAge = 0;
CTxDB txdb("r");
BOOST_FOREACH(const CTransaction& tx, vtx)
{
uint64_t nTxCoinAge;
if (tx.GetCoinAge(txdb, nTime, nTxCoinAge))
{
nCoinAge += nTxCoinAge;
}
else
{
return false;
}
}
if (nCoinAge == 0) // block coin age minimum 1 coin-day
nCoinAge = 1;
if (fDebug && GetBoolArg("-printcoinage"))
printf("block coin age total nCoinDays=%" PRId64 "\n", nCoinAge);
return true;
}
bool CBlock::AddToBlockIndex(unsigned int nFile,
unsigned int nBlockPos,
const uint256& hashProof,
QPRegistry *pregistryTemp)
{
// Check for duplicate
uint256 hash = GetHash();
if (mapBlockIndex.count(hash))
return error("AddToBlockIndex() : %s already exists", hash.ToString().c_str());
// Construct new block index object
CBlockIndex* pindexNew = new CBlockIndex(nFile, nBlockPos, *this);
if (!pindexNew)
return error("AddToBlockIndex() : new CBlockIndex failed");
pindexNew->phashBlock = &hash;
map<uint256, CBlockIndex*>::iterator miPrev = mapBlockIndex.find(hashPrevBlock);
if (miPrev != mapBlockIndex.end())
{
pindexNew->pprev = (*miPrev).second;
pindexNew->nHeight = pindexNew->pprev->nHeight + 1;
}
// ppcoin: compute chain trust score
pindexNew->bnChainTrust = (pindexNew->pprev ?
pindexNew->pprev->bnChainTrust : 0) +
pindexNew->GetBlockTrust(pregistryTemp);
// ppcoin: compute stake entropy bit for stake modifier
if (!pindexNew->SetStakeEntropyBit(GetStakeEntropyBit(pindexNew->nHeight)))
return error("AddToBlockIndex() : SetStakeEntropyBit() failed");
// Record proof hash value
if (pindexNew->IsProofOfStake())
{
pindexNew->hashProofOfStake = hashProof;
}
// ppcoin: compute stake modifier
uint64_t nStakeModifier = 0;
bool fGeneratedStakeModifier = false;
if (!ComputeNextStakeModifier(pindexNew->pprev, nStakeModifier, fGeneratedStakeModifier))
return error("AddToBlockIndex() : ComputeNextStakeModifier() failed");
pindexNew->SetStakeModifier(nStakeModifier, fGeneratedStakeModifier);
pindexNew->nStakeModifierChecksum = GetStakeModifierChecksum(pindexNew);
if (!CheckStakeModifierCheckpoints(pindexNew->nHeight, pindexNew->nStakeModifierChecksum))
{
return error("AddToBlockIndex() : Rejected by stake modifier "
"checkpoint height=%d, modifier=0x%016" PRIx64,
pindexNew->nHeight, nStakeModifier);
}
pindexNew->nPicoPower = pregistryTemp->GetPicoPower();
// Add to mapBlockIndex
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.insert(make_pair(hash, pindexNew)).first;
if (pindexNew->IsProofOfStake())
setStakeSeen.insert(make_pair(pindexNew->prevoutStake, pindexNew->nStakeTime));
pindexNew->phashBlock = &((*mi).first);
// Write to disk block index
CTxDB txdb;
if (!txdb.TxnBegin())
{
return error("AddToBlockIndex(): could not begin db transaction");
}
txdb.WriteBlockIndex(CDiskBlockIndex(pindexNew));
if (!txdb.TxnCommit())
{
return error("AddToBlockIndex(): could not commit db transaction");
}
// New best
if (pindexNew->bnChainTrust > bnBestChainTrust)
{
AUTO_PTR<QPRegistry> pregistryTempTemp(new QPRegistry(pregistryTemp));
if (!pregistryTempTemp.get())
{
return error("AddToBlockIndex() : creating temp temp registry failed");
}
pregistryTempTemp->CheckSynced();
int nSnapType = IsInitialBlockDownload() ? QPRegistry::SPARSE_SNAPS :
QPRegistry::ALL_SNAPS;
if (!pregistryTempTemp->UpdateOnNewBlock(pindexNew,
nSnapType,
fDebugQPoS))
{
return error("AddToBlockIndex() : registry couldn't update new block\n %s",
hash.ToString().c_str());
}
bool fReorganized;
if (!SetBestChain(txdb, pindexNew, pregistryTemp, fReorganized))
{
printf("AddToBlockIndex() : could not set best chain with block\n");
return false;
}
// The above update on new block should not be applied to the temp registry
// if set best chain reorganized, orphaning the new block.
if (!fReorganized)
{
bool fExitReplay = !pregistryTemp->IsInReplayMode();
pregistryTemp->Copy(pregistryTempTemp.get());
if (fExitReplay)
{
pregistryTemp->ExitReplayMode();
}
}
}
else // need to update the temp registry just to check
{
pregistryTemp->CheckSynced();
int nSnapType = IsInitialBlockDownload() ? QPRegistry::SPARSE_SNAPS :
QPRegistry::ALL_SNAPS;
if (!pregistryTemp->UpdateOnNewBlock(pindexNew,
nSnapType,
fDebugQPoS))
{
return error("AddToBlockIndex() : registry couldn't update new block\n %s",
hash.ToString().c_str());
}
}
//txdb.Close();
if (pindexNew == pindexBest && !IsQuantumProofOfStake())
{
// Notify UI to display prev block's coinbase if it was ours
static uint256 hashPrevBestCoinBase;
UpdatedTransaction(hashPrevBestCoinBase);
if (vtx.size() > 0)
{
hashPrevBestCoinBase = vtx[0].GetHash();
}
}
uiInterface.NotifyBlocksChanged();
return true;
}
bool CBlock::CheckBlock(QPRegistry *pregistryTemp,
vector<QPTxDetails> &vDeetsRet,
CBlockIndex* pindexPrev,
bool fCheckPOW,
bool fCheckMerkleRoot,
bool fCheckSig,
bool fCheckQPoS) const
{
int nThisHeight = pindexPrev->nHeight + 1;
int nFork = GetFork(nThisHeight);
int nBlockFork = GetFork(nHeight);
uint256 hashBlock(GetHash());
// These are checks that are independent of context
// that can be verified before saving an orphan block.
// Size limits (note qPoS allows empty blocks)
if (((nBlockFork < XST_FORKQPOS) && vtx.empty()) ||
(vtx.size() > chainParams.MAX_BLOCK_SIZE) ||
(::GetSerializeSize(*this, SER_NETWORK, PROTOCOL_VERSION) >
chainParams.MAX_BLOCK_SIZE))
{
return DoS(100, error("CheckBlock() : size limits failed at height %d", nThisHeight));
}
// Check proof of work matches claimed amount
if (fCheckPOW && IsProofOfWork() && !CheckProofOfWork(hashBlock, nBits))
{
return DoS(50, error("CheckBlock() : proof of work failed"));
}
// Check block timestamp
if ((nBlockFork >= XST_FORKQPOS) && fCheckQPoS)
{
if (!pregistryTemp->IsInReplayMode())
{
pregistryTemp->UpdateOnNewTime(nTime,
pindexBest,
QPRegistry::NO_SNAPS,
fDebugQPoS);
if (!pregistryTemp->TimestampIsValid(nStakerID, nTime))
{
printf("CheckBlock(): now=%" PRId64 ", "
"hash=%s\n height=%d, staker=%u, "
"timestamp=%d, round=%u, seed=%u, "
"queue_start=%u\n "
"slot=%u, current window=(%u, %u)\n",
GetAdjustedTime(),
hashBlock.ToString().c_str(),
nHeight, nStakerID, nTime,
pregistryTemp->GetRound(),
pregistryTemp->GetRoundSeed(),
pregistryTemp->GetQueueMinTime(),
pregistryTemp->GetCurrentSlot(),
pregistryTemp->GetCurrentSlotStart(),
pregistryTemp->GetCurrentSlotEnd());
return error("CheckBlock() : timestamp is invalid for staker");
}
}
}
else if (nFork >= XST_FORK005)
{
if (GetBlockTime() > FutureDrift(GetAdjustedTime()))
{
return error("CheckBlock() : block timestamp too far in the future");
}
}
else
{
if (GetBlockTime() > (GetAdjustedTime() + chainParams.nMaxClockDrift))
{
return error("CheckBlock() : block timestamp too far in the future");
}
}
if (nBlockFork >= XST_FORKQPOS)
{
if (!vtx.empty())
{
for (unsigned int i = 0; i < vtx.size(); i++)
{
if (vtx[i].IsCoinBase() || vtx[i].IsCoinStake())
{
return DoS(100, error("CheckBlock() : no base/stake allowed"));
}
}
}
if (!IsQuantumProofOfStake())
{
return DoS(100, error("CheckBlock() : block must be qPoS"));
}
}
else
{
// First transaction must be coinbase, the rest must not be
if (!vtx[0].IsCoinBase())
{
return DoS(100, error("CheckBlock() : first tx is not coinbase"));
}
for (unsigned int i = 1; i < vtx.size(); i++)
{
if (vtx[i].IsCoinBase())
{
return DoS(100, error("CheckBlock() : more than one coinbase"));
}
}
}
// Check coinbase timestamp
if ((nFork < XST_FORK005) && (nBlockFork < XST_FORKQPOS))
{
// prior to XST_FORK006 CTransactions have timestamps
if (GetBlockTime() > (int64_t)vtx[0].GetTxTime() + chainParams.nMaxClockDrift)
{
return DoS(50, error("CheckBlock() : coinbase timestamp is too early"));
}
}
// exclude pow because
// (1) testnet usually has too few miners to stay in future drift
// (2) main net mining is done, so there is no need to check drift,
// and also the future drift was much more at the time
// this probably should be fixed "the right way" one day
else
{
if (!vtx.empty())
{
if (vtx[0].HasTimestamp() && !IsProofOfWork())
{
if (GetBlockTime() > FutureDrift((int64_t)vtx[0].GetTxTime()))
{
return DoS(50, error("CheckBlock() : coinbase timestamp: %" PRId64 " + 15 sec, "
"is too early for block: %" PRId64,
(int64_t)vtx[0].GetTxTime(), (int64_t)GetBlockTime()));
}
}
}
}
if (IsProofOfStake())
{
if (vtx[0].vout.size() != 1 || !vtx[0].vout[0].IsEmpty())
{
return DoS(100,
error("CheckBlock() : coinbase output not empty for proof-of-stake block"));
}
// Second transaction must be coinstake, the rest must not be
if (!vtx[1].IsCoinStake())
{
return DoS(100, error("CheckBlock() : second tx is not coinstake"));
}
for (unsigned int i = 2; i < vtx.size(); i++)
{
if (vtx[i].IsCoinStake())
{
return DoS(100, error("CheckBlock() : more than one coinstake"));
}
}
// Check coinstake timestamp
// no check upon XST_FORK006 because tx timestamps eliminated,
// effectivly making tx same as block
if (vtx[1].HasTimestamp() &&
(!CheckCoinStakeTimestamp(GetBlockTime(), (int64_t)vtx[1].GetTxTime())))
{
return DoS(50,
error("CheckBlock() : coinstake timestamp violation nTimeBlock=%"
PRId64 " nTimeTx=%u", GetBlockTime(), vtx[1].GetTxTime()));
}
if ((GetFork(nBestHeight + 1) >= XST_FORK002) && (IsProofOfWork()))
{
return DoS(100, error("CheckBlock() : Proof of work (%f XST) at t=%d on or after block %d.\n",
((double) vtx[0].GetValueOut() / (double) COIN),
(int) nTime,
(int) GetPoWCutoff()));
}
if (fCheckSig & !CheckBlockSignature(pregistryTemp))
{
return DoS(100, error("CheckBlock() : bad proof-of-stake block signature"));
}
}
if (IsQuantumProofOfStake())
{
if (fCheckSig & !CheckBlockSignature(pregistryTemp))
{
return DoS(100, error("CheckBlock() : bad qPoS block signature"));
}
}
// Check transactions
BOOST_FOREACH(const CTransaction& tx, vtx)
{
if (!tx.CheckTransaction(nHeight))
{
return DoS(tx.nDoS, error("CheckBlock() : CheckTransaction failed"));
}
// ppcoin: check transaction timestamp
// ignore as of XST_FORK006 as tx timestamps are thereupon eliminated
if (tx.HasTimestamp() && (GetBlockTime() < (int64_t)tx.GetTxTime()))
{
return DoS(50, error("CheckBlock() : block timestamp earlier than transaction timestamp"));
}
}
// Check for duplicate txids. This is caught by ConnectInputs(),
// but catching it earlier avoids a potential DoS attack:
set<uint256> uniqueTx;
BOOST_FOREACH(const CTransaction& tx, vtx)
{
uniqueTx.insert(tx.GetHash());
}
if (uniqueTx.size() != vtx.size())
return DoS(100, error("CheckBlock() : duplicate transaction"));
unsigned int nSigOps = 0;
BOOST_FOREACH(const CTransaction& tx, vtx)
{
nSigOps += tx.GetLegacySigOpCount();
}
if (nSigOps > chainParams.MAX_BLOCK_SIGOPS)
return DoS(100, error("CheckBlock() : out-of-bounds SigOpCount"));
// Check merkle root
if (fCheckMerkleRoot && hashMerkleRoot != BuildMerkleTree())
return DoS(100, error("CheckBlock() : hashMerkleRoot mismatch"));
if (fCheckQPoS)
{
/***********************************************************************
* qPoS checks
***********************************************************************/
// This loop gathers all valid qPoS transactions, keeping their order
// according to order in CBlock.vtx and order in CTransaction.vout
// Each tx.vout is looped over and the candidate qPoS transactions
// gathered, then they are checked within each transaction.
// Other checks herein include
// (1) ensuring the same alias is not attempted
// to be registered for two different stakers
// (2) setkey operations are not attempted on the same staker in
// two different transactions
// (3) claims are gathered by public key and the total claim can
// not exceed the registry balance for the key
// Note that any number and any sequence of valid setstate operations
// are permissible on the block level, even though only one is
// permitted on the transaction level. On the transaction level,
// the other important checks are that a setstate tx has only a
// single input and that the signatory of that input owns the staker.
CTxDB txdb("r");
map<string, qpos_purchase> mapPurchases;
map<unsigned int, vector<qpos_setkey> > mapSetKeys;
map<CPubKey, vector<qpos_claim> > mapClaims;
map<unsigned int, vector<qpos_setmeta> > mapSetMetas;
// holds ordered validated deets that will be applied to registry state
vDeetsRet.clear();
BOOST_FOREACH(const CTransaction &tx, vtx)
{
vector<QPTxDetails> vDeets;
MapPrevTx mapInputs;
// round up candidate qPoS transactions without any validation
bool fNeedsInputs = tx.GetQPTxDetails(hashBlock, vDeets);
if (fNeedsInputs)
{
// pre-fill mapInputs with prevouts in this (same) block
for (unsigned int i = 0; i < tx.vin.size(); ++i)
{
uint256 hash = tx.vin[i].prevout.hash;
BOOST_FOREACH(const CTransaction &tx2, vtx)
{
if (tx2 == tx)
{
// prevouts must be previous in the vtx (?)
break;
}
if (tx2.GetHash() == hash)
{
mapInputs[hash].second = tx;
// only need a dummy CTxIndex to check sig
mapInputs[hash].first.vSpent.resize(tx.vout.size());
continue;
}
}
}
map<uint256, CTxIndex> mapUnused;
bool fInvalid = false;
if (!tx.FetchInputs(txdb, mapUnused,
false, false, mapInputs, fInvalid))
{
// OK to remove from mempool here because the block production
// thread is done with the vecPriority from which the tx came
mempool.remove(tx);
if (fInvalid)
{
return DoS(50, error("CheckBlock(): invalid qPoS inputs\n"));
}
else
{
// no DoS here because inputs validated elsewhere
printf("CheckBlock(): fail fetching qPoS inputs\n");
return false;
}
}
}
if (!tx.CheckQPoS(pregistryTemp, mapInputs,
nTime, vDeets, pindexPrev,
mapPurchases, mapSetKeys,
mapClaims, mapSetMetas,
vDeetsRet))
{
// OK to remove from mempool here because the block production
// thread is done with the vecPriority from which the tx came
mempool.remove(tx);
return error("CheckBlock(): CheckQPoS fail");
}
}
// dry-run to ensure registry can successfully update
CBlock block = *this;
AUTO_PTR<CBlockIndex> pindexTempTemp(new CBlockIndex(0, 0, block));
if (!pindexTempTemp.get())
{
return error("CheckBlock(): TSNH create temp block index failed");
}
pindexTempTemp->pprev = pindexPrev;
pindexTempTemp->phashBlock = &hashBlock;
AUTO_PTR<QPRegistry> pregistryTempTemp(new QPRegistry(pregistryTemp));
if (!pregistryTempTemp.get())
{
return error("CheckBlock(): TSNH create temp*2 registry failed");
}
if (!pregistryTempTemp->UpdateOnNewBlock(pindexTempTemp.get(),
QPRegistry::NO_SNAPS,
false,
true))
{
return error("CheckBlock(): registry update failed");
}
/*** end qPos checks **********************************************/
}
return true;
}
bool CBlock::AcceptBlock(QPRegistry *pregistryTemp,
bool fIsMine,
bool fIsBootstrap)
{
int nFork = GetFork(nBestHeight + 1);
// Check for duplicate
uint256 hash = GetHash();
if (mapBlockIndex.count(hash))
return error("AcceptBlock() : block already in mapBlockIndex");
// Get prev block index
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashPrevBlock);
if (mi == mapBlockIndex.end())
{
return DoS(10, error("AcceptBlock() : prev block not found"));
}
CBlockIndex* pindexPrev = (*mi).second;
int nThisHeight = pindexPrev->nHeight + 1;
if (IsProofOfStake() && (nFork >= XST_FORKQPOS))
{
return DoS(100, error("AcceptBlock() : No more PoS allowed (height = %d)", nThisHeight));
}
if (IsProofOfWork() && (nFork >= XST_FORK002))
{
return DoS(100, error("AcceptBlock() : No more PoW allowed (height = %d)", nThisHeight));
}
if ((nFork < XST_FORKQPOS) && (nFork >= XST_FORK005))
{
if (vtx[0].HasTimestamp())
{
// Check coinbase timestamp
if (GetBlockTime() > FutureDrift((int64_t)vtx[0].GetTxTime()))
{
return DoS(50, error("AcceptBlock() : coinbase timestamp is too early"));
}
}
if (vtx[1].HasTimestamp())
{
// Check coinstake timestamp
if (IsProofOfStake() && !CheckCoinStakeTimestamp(GetBlockTime(), (int64_t)vtx[1].GetTxTime()))
{
return DoS(50, error("AcceptBlock() : coinstake timestamp violation nTimeBlock=%" PRId64
" nTimeTx=%u", GetBlockTime(), vtx[1].GetTxTime()));
}
}
}
// Check proof-of-work or proof-of-stake
if ((nFork < XST_FORKQPOS) &&
(nBits != GetNextTargetRequired(pindexPrev, IsProofOfStake())))
{
return DoS(100, error("AcceptBlock() : incorrect %s", IsProofOfWork() ? "proof-of-work" : "proof-of-stake"));
}
// FIXME: should just move time slot check here (see checkblock)
// Check timestamp against prev
if (nFork >= XST_FORKQPOS)
{
if (GetBlockTime() < pindexPrev->nTime)
{
return error("AcceptBlock() : block's timestamp is earlier than prev block");
}
if (GetBlockTime() == pindexPrev->nTime)
{
return error("AcceptBlock() : block's timestamp is same as prev block");
}
}
else if (nFork >= XST_FORK005)
{
if ((GetBlockTime() <= pindexPrev->GetPastTimeLimit()) ||
(FutureDrift(GetBlockTime()) < pindexPrev->GetBlockTime()))
{
return error("AcceptBlock() : block's timestamp is too early");
}
}
else if ((GetBlockTime() <= pindexPrev->GetMedianTimePast()) ||
((GetBlockTime() + chainParams.nMaxClockDrift) < pindexPrev->GetBlockTime()))
{
return error("AcceptBlock() : block's timestamp is too early");
}
// Check that all transactions are finalized
BOOST_FOREACH(const CTransaction& tx, vtx)
{
if (!tx.IsFinal(nThisHeight, GetBlockTime()))
{
return DoS(10, error("AcceptBlock() : contains a non-final transaction"));
}
}
// Check that the block chain matches the known block chain up to a checkpoint
if ((nFork < XST_FORKQPOS) && (!Checkpoints::CheckHardened(nThisHeight, hash)))
{
return DoS(100, error("AcceptBlock() : rejected by hardened checkpoint lock-in at %d", nThisHeight));
}
uint256 hashProof;
// Verify hash target and signature of coinstake tx
if (IsProofOfStake())
{
uint256 targetProofOfStake;
if (!CheckProofOfStake(vtx[1], nBits, hashProof, nTime))
{
printf("WARNING: AcceptBlock(): check proof-of-stake failed for block %s\n", hash.ToString().c_str());
return false; // do not error here as we expect this during initial block download
}
}
// PoW is checked in CheckBlock() & qPoS checked by registry
else
{
hashProof = GetHash();
}
// ppcoin: check that the block satisfies synchronized checkpoint
// xst:
// 1. don't even warn if bootstrapping
// 2. sync checkpoints are getting phased out, so ignored by default
if (!(fIsBootstrap || GetBoolArg("-nosynccheckpoints", true)))
{
if (!Checkpoints::CheckSync(hash, pindexPrev))
{
return error("AcceptBlock() : rejected by synchronized checkpoint");
}
}
// Ensure that block height is serialized somwhere
if (nFork >= XST_FORKQPOS)
{
if (nHeight != nThisHeight)
{
return DoS(100, error("AcceptBlock() : block height mismatch"));
}
}
else
{
// Pre-qPoS: Enforce rule that the coinbase starts with serialized block height
CScript expect = CScript() << nThisHeight;
if (!equal(expect.begin(), expect.end(), vtx[0].vin[0].scriptSig.begin()))
{
return DoS(100, error("AcceptBlock() : block height mismatch in coinbase"));
}
}
// Write block to history file
if (!CheckDiskSpace(::GetSerializeSize(*this, SER_DISK, CLIENT_VERSION)))
return error("AcceptBlock() : out of disk space");
unsigned int nFile = -1;
unsigned int nBlockPos = 0;
if (!WriteToDisk(nFile, nBlockPos))
return error("AcceptBlock() : WriteToDisk failed");
if (!AddToBlockIndex(nFile, nBlockPos, hashProof, pregistryTemp))
return error("AcceptBlock() : AddToBlockIndex failed");
// Relay inventory, but
// 1. don't relay old inventory during initial block download
// 2. don't relay blocks we produce that should be rolled back
if (hashBestChain == hash)
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (nBestHeight > (pnode->nStartingHeight - 2000))
{
if (fDebugNet)
{
printf("AcceptBlock(): pushing accepted block to %s\n :%s\n",
pnode->addrName.c_str(),
hash.ToString().c_str());
}
if (!pnode->PushInventory(CInv(MSG_BLOCK, hash)) && fDebugNet)
{
printf("AcceptBlock(): couldn't push accepted block to %s\n :%s\n",
pnode->addrName.c_str(),
hash.ToString().c_str());
}
}
}
}
// ppcoin: check pending sync-checkpoint
if (nFork < XST_FORKQPOS)
{
Checkpoints::AcceptPendingSyncCheckpoint();
}
return true;
}
CBigNum CBlockIndex::GetBlockTrust(const QPRegistry *pregistry) const
{
if (IsQuantumProofOfStake())
{
unsigned int nWeight;
if (!pregistry->GetStakerWeight(nStakerID, nWeight))
{
// set weight to 1 if staker is unknown
printf("GetBlockTrust(): no such staker %u\n", nStakerID);
nWeight = 1;
}
return CBigNum(nWeight);
}
CBigNum bnTarget;
bnTarget.SetCompact(nBits);
if (bnTarget <= 0)
{
return 0;
}
if (IsProofOfStake())
{
// Return trust score as usual
return (CBigNum(1)<<256) / (bnTarget+1);
}
else
{
// Calculate work amount for block
CBigNum bnPoWTrust = (bnProofOfWorkLimit / (bnTarget+1));
return bnPoWTrust > 1 ? bnPoWTrust : 1;
}
}
bool CBlockIndex::IsSuperMajority(int minVersion, const CBlockIndex* pstart, unsigned int nRequired, unsigned int nToCheck)
{
unsigned int nFound = 0;
for (unsigned int i = 0; i < nToCheck && nFound < nRequired && pstart != NULL; i++)
{
if (pstart->nVersion >= minVersion)
++nFound;
pstart = pstart->pprev;
}
return (nFound >= nRequired);
}
bool ProcessBlock(CNode* pfrom, CBlock* pblock,
bool fIsBootstrap, bool fJustCheck, bool fIsMine)
{
bool fAllowDuplicateStake = (fIsBootstrap && GetBoolArg("-permitdirtybootstrap", false));
// Check for duplicate
uint256 hash = pblock->GetHash();
if (hash == (fTestNet ? chainParams.hashGenesisBlockTestNet : hashGenesisBlock))
{
// not an error, but return false because it was not processed
printf("ProcessBlock() : skipping genesis block\n %s\n", hash.ToString().c_str());
return false;
}
if (mapBlockIndex.count(hash))
return error("ProcessBlock() : already have block %d %s", mapBlockIndex[hash]->nHeight, hash.ToString().c_str());
if (mapOrphanBlocks.count(hash))
return error("ProcessBlock() : already have block (orphan) %s", hash.ToString().c_str());
// ppcoin: check proof-of-stake
// Limited duplicity on stake: prevents block flood attack
// Duplicate stake allowed only when there is orphan child block
// xst: or on bootstrap, which happens rarely, and only if permitdirtybootstrap
if (pblock->IsProofOfStake() &&
setStakeSeen.count(pblock->GetProofOfStake()) &&
!mapOrphanBlocksByPrev.count(hash) &&
!Checkpoints::WantedByPendingSyncCheckpoint(hash) && !fAllowDuplicateStake)
{
return error("ProcessBlock() : duplicate proof-of-stake (%s, %d) for block %s",
pblock->GetProofOfStake().first.ToString().c_str(),
pblock->GetProofOfStake().second, hash.ToString().c_str());
}
CBlockLocator locator;
int nHeight = locator.GetBlockIndex()->nHeight;
// Preliminary checks
// Why make a temp registry?
// The registry clock is basically a Lamport clock, where advance
// of the local timestamp beyond the network event requires a local
// event (block production). To validate new blocks, the registry
// clock needs to advance to the network event (block) timestamp,
// as blocks are the only sources of time. However, care must be
// taken not to advance the timestamp just to check a block because
// that would make it easy for an attacker to advance its peers'
// clocks with a block that is not fully validated. Therefore, a temp
// registry must be used to be advanced by the block then check the
// block. If the block is valid using this registry, then the local
// clock can be advanced when the block is added to the growing chain.
AUTO_PTR<QPRegistry> pregistryTemp(new QPRegistry(pregistryMain));
if (!pregistryTemp.get())
{
return error("ProcessBlock() : creating temp registry failed");
}
bool fCheckOK;
if (fJustCheck)
{
printf("ProcessBlock(): just checking block\n");
vector<QPTxDetails> vDeets;
fCheckOK = pblock->CheckBlock(pregistryTemp.get(), vDeets, pindexBest,
true, true, false, false);
}
else
{
printf("ProcessBlock(): fully checking block\n");
vector<QPTxDetails> vDeets;
fCheckOK = pblock->CheckBlock(pregistryTemp.get(), vDeets, pindexBest);
}
if (!fCheckOK)
{
return error("ProcessBlock() : CheckBlock FAILED");
}
// no more proof of work
if (pblock->IsProofOfWork() && (GetFork(nHeight) >= XST_FORK002))
{
if (pfrom)
{
pfrom->Misbehaving(100);
}
printf("Proof-of-work on or after block %d.\n", GetPoWCutoff());
return error("Proof-of-work on or after block %d.\n", GetPoWCutoff());
}
// no more proof of stake
if (pblock->IsProofOfStake() && (GetFork(nHeight) >= XST_FORKQPOS))
{
if (pfrom)
{
pfrom->Misbehaving(100);
}
printf("Proof-of-stake on or after block %d.\n", GetQPoSStart());
return error("Proof-of-stake on or after block %d.\n", GetQPoSStart());
}
bool fUseSyncCheckpoints = !GetBoolArg("-nosynccheckpoints", true);
/****************************************************************************
* checkpoint specific code
*
* checkpoints are getting phased out
* this code uses checkpoints for some spam prevention
***************************************************************************/
if (fUseSyncCheckpoints)
{
CBlockIndex* pcheckpoint = Checkpoints::GetLastSyncCheckpoint();
if(pcheckpoint && fDebug)
{
const CBlockIndex* pindexLastPos = GetLastBlockIndex(pcheckpoint, true);
if(pindexLastPos)
{
printf("ProcessBlock(): Last POS Block Height: %d \n", pindexLastPos->nHeight);
}
else
{
printf("ProcessBlock(): Previous POS block not found.\n");
}
}
if (pcheckpoint &&
pblock->hashPrevBlock != hashBestChain &&
!Checkpoints::WantedByPendingSyncCheckpoint(hash))
{
// Extra checks to prevent "fill up memory by spamming with bogus blocks"
int64_t deltaTime = pblock->GetBlockTime() - pcheckpoint->nTime;
CBigNum bnNewBlock;
bnNewBlock.SetCompact(pblock->nBits);
CBigNum bnRequired;
const CBlockIndex* LastBlock = GetLastBlockIndex(pcheckpoint, true);
int nThisHeight = LastBlock->nHeight + 1;
unsigned int nLastBits = LastBlock->nBits;
if (pblock->IsProofOfStake()) {
bnRequired.SetCompact(ComputeMinStake(nLastBits, deltaTime, pblock->nTime));
if ((GetFork(nThisHeight) < XST_FORK002) && (bnNewBlock > bnProofOfStakeLimit)) {
bnNewBlock = bnNewBlock >> 2; // adjust target for big hashes
}
}
else if (pblock->IsProofOfWork())
{
bnRequired.SetCompact(ComputeMinWork(nLastBits, deltaTime));
}
if (bnNewBlock > bnRequired)
{
if (pfrom)
pfrom->Misbehaving(100);
return error("ProcessBlock() : block with too little %s",
pblock->IsProofOfStake() ? "proof-of-stake" :
"proof-of-work");
}
}
// ppcoin: ask for pending sync-checkpoint if any
if (!IsInitialBlockDownload())
Checkpoints::AskForPendingSyncCheckpoint(pfrom);
}
/**************************************************************************
* end of checkpoint specific code
**************************************************************************/
// If don't already have its previous block, shunt it off to holding area until we get it
if (!mapBlockIndex.count(pblock->hashPrevBlock))
{
printf("ProcessBlock: ORPHAN BLOCK, %s\n prev=%s\n",
hash.ToString().c_str(),
pblock->hashPrevBlock.ToString().c_str());
CBlock* pblock2 = new CBlock(*pblock);
// ppcoin: check proof-of-stake
if (pblock2->IsProofOfStake())
{
// Limited duplicity on stake: prevents block flood attack
// Duplicate stake allowed only when there is orphan child block
if (setStakeSeenOrphan.count(pblock2->GetProofOfStake()) &&
!mapOrphanBlocksByPrev.count(hash) &&
!(fUseSyncCheckpoints && Checkpoints::WantedByPendingSyncCheckpoint(hash)))
{
return error("ProcessBlock() : duplicate proof-of-stake (%s, %d) for orphan block %s",
pblock2->GetProofOfStake().first.ToString().c_str(),
pblock2->GetProofOfStake().second,
hash.ToString().c_str());
}
else
{
setStakeSeenOrphan.insert(pblock2->GetProofOfStake());
}
}
mapOrphanBlocks.insert(make_pair(hash, pblock2));
mapOrphanBlocksByPrev.insert(make_pair(pblock2->hashPrevBlock, pblock2));
// Ask this guy to fill in what we're missing
if (pfrom)
{
pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(pblock2));
// ppcoin: getblocks may not obtain the ancestor block rejected
// earlier by duplicate-stake check so we ask for it again directly
if (IsInitialBlockDownload())
{
pfrom->nOrphans += 1;
}
else
{
pfrom->nOrphans = 0;
pfrom->AskFor(CInv(MSG_BLOCK, WantedByOrphan(pblock2)));
}
}
return true;
}
/**************************************************************************
* Not an ORPHAN
**************************************************************************/
if (pregistryTemp->GetBlockHash() == pblock->hashPrevBlock)
{
if (fDebugQPoS)
{
printf("ProcessBlock(): no need to rewind registry to %s\n",
pblock->hashPrevBlock.ToString().c_str());
}
}
else
{
printf("ProcessBlock() rewind registry to accept: %s\n",
hash.ToString().c_str());
CBlockIndex *pindexRewind = mapBlockIndex[pblock->hashPrevBlock];
CTxDB txdb("r");
CBlockIndex *pindexCurrent;
pregistryTemp->SetNull();
if (!RewindRegistry(txdb, pindexRewind, pregistryTemp.get(), pindexCurrent))
{
return error("ProcessBlock() : Could not rewind registry to prev of %s",
hash.ToString().c_str());
}
}
// Store to disk
if (!pblock->AcceptBlock(pregistryTemp.get(), fIsMine, fIsBootstrap))
{
return error("ProcessBlock() : AcceptBlock FAILED %s", hash.ToString().c_str());
}
// Recursively process any orphan blocks that depended on this one
vector<uint256> vWorkQueue;
vWorkQueue.push_back(hash);
for (unsigned int i = 0; i < vWorkQueue.size(); i++)
{
uint256 hashPrev = vWorkQueue[i];
for (multimap<uint256, CBlock*>::iterator mi = mapOrphanBlocksByPrev.lower_bound(hashPrev);
mi != mapOrphanBlocksByPrev.upper_bound(hashPrev);
++mi)
{
CBlock* pblockOrphan = (*mi).second;
// Ensure that the next orphan would be correct top for this chain.
if (pblockOrphan->hashPrevBlock == pregistryTemp->GetBlockHash())
{
if (fDebugQPoS)
{
printf("ProcessBlock(): trying next orphan at %d\n %s\n",
pblockOrphan->nHeight,
pblockOrphan->GetHash().ToString().c_str());
}
}
else
{
printf("ProcessBlock() rewind registry to accept orphan\n"
" %s\n prev: %s\n",
pblockOrphan->GetHash().ToString().c_str(),
pblockOrphan->hashPrevBlock.ToString().c_str());
if (!mapBlockIndex.count(pblockOrphan->hashPrevBlock))
{
printf("ProcessBlock() : TSNH hash prev not in block index\n");
continue;
}
CBlockIndex *pindexRewind = mapBlockIndex[pblockOrphan->hashPrevBlock];
CTxDB txdb("r");
CBlockIndex *pindexCurrent;
pregistryTemp->SetNull();
if (!RewindRegistry(txdb, pindexRewind, pregistryTemp.get(), pindexCurrent))
{
printf("ProcessBlock() : TSNH could not rewind registry\n");
continue;
}
}
if (pblockOrphan->AcceptBlock(pregistryTemp.get(), fIsMine, fIsBootstrap))
{
printf("ProcessBlock(): accept orphan %s success\n",
pblockOrphan->GetHash().ToString().c_str());
vWorkQueue.push_back(pblockOrphan->GetHash());
}
else
{
printf("ProcessBlock(): accept orphan %s fail\n",
pblockOrphan->GetHash().ToString().c_str());
}
mapOrphanBlocks.erase(pblockOrphan->GetHash());
setStakeSeenOrphan.erase(pblockOrphan->GetProofOfStake());
delete pblockOrphan;
}
mapOrphanBlocksByPrev.erase(hashPrev);
}
// Only update main registry if on best chain
if (hashBestChain == pregistryTemp->GetBlockHash())
{
// Update main registry with pregistryTemp
bool fExitReplay = !pregistryMain->IsInReplayMode();
pregistryMain->Copy(pregistryTemp.get());
if (fExitReplay)
{
pregistryMain->ExitReplayMode();
}
}
// Clear mempool of purchases that have aged so much that
// the price is too low.
int nPurchasesRemoved = mempool.removeInvalidPurchases();
if (fDebugQPoS && nPurchasesRemoved)
{
printf("ProcessBlock(): removed %d purchases\n", nPurchasesRemoved);
}
// Clear mempool of feeless transactions that reference blocks
// too deep in the chain.
int nFeelessRemoved = mempool.removeOldFeeless();
if (fDebugFeeless && nFeelessRemoved)
{
printf("ProcessBlock(): removed %d feeless transactions\n",
nFeelessRemoved);
}
printf("ProcessBlock: ACCEPTED %s\n", hash.ToString().c_str());
// ppcoin: if responsible for sync-checkpoint send it
if (pfrom && !CSyncCheckpoint::strMasterPrivKey.empty())
{
Checkpoints::SendSyncCheckpoint(Checkpoints::AutoSelectSyncCheckpoint());
}
return true;
}
// ppcoin: sign block
bool CBlock::SignBlock(const CKeyStore& keystore,
const QPRegistry *pregistry)
{
vector<valtype> vSolutions;
txnouttype whichType;
if (IsQuantumProofOfStake())
{
if (pregistry == NULL)
{
return error("SignBlock(): pregistry is NULL");
}
CPubKey pubkey;
if (!pregistry->GetDelegateKey(nStakerID, pubkey))
{
return false;
}
CKey key;
if (!keystore.GetKey(pubkey.GetID(), key))
{
return false;
}
if (key.GetPubKey() != pubkey)
{
return false;
}
return key.Sign(GetHash(), vchBlockSig);
}
else if (IsProofOfStake())
{
const CTxOut& txout = vtx[1].vout[1];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
return false;
if (whichType == TX_PUBKEY)
{
// Sign
valtype& vchPubKey = vSolutions[0];
CKey key;
if (!keystore.GetKey(Hash160(vchPubKey), key))
return false;
if (key.GetPubKey() != vchPubKey)
return false;
return key.Sign(GetHash(), vchBlockSig);
}
}
#ifdef WITH_MINER
else /* PoW */
{
for(unsigned int i = 0; i < vtx[0].vout.size(); i++)
{
const CTxOut& txout = vtx[0].vout[i];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
continue;
if (whichType == TX_PUBKEY)
{
// Sign
valtype& vchPubKey = vSolutions[0];
CKey key;
if (!keystore.GetKey(Hash160(vchPubKey), key))
continue;
if (key.GetPubKey() != vchPubKey)
continue;
if(!key.Sign(GetHash(), vchBlockSig))
continue;
return true;
}
}
}
#endif /* WITH_MINER */
printf("Sign failed\n");
return false;
}
// ppcoin: check block signature
bool CBlock::CheckBlockSignature(const QPRegistry *pregistry) const
{
if (GetHash() == (fTestNet ? chainParams.hashGenesisBlockTestNet :
hashGenesisBlock))
return vchBlockSig.empty();
vector<valtype> vSolutions;
txnouttype whichType;
if(IsQuantumProofOfStake())
{
CPubKey vchPubKey;
if (!pregistry->GetDelegateKey(nStakerID, vchPubKey))
{
return false;
}
CKey key;
if (!key.SetPubKey(vchPubKey))
{
return false;
}
if (vchBlockSig.empty())
{
return false;
}
if (fDebugQPoS)
{
vector<unsigned char> vchHex = vchPubKey.Raw();
printf("CheckBlockSignature(): key=%s\n hash=%s\n",
HexStr(vchHex.begin(), vchHex.end()).c_str(),
GetHash().ToString().c_str());
}
return key.Verify(GetHash(), vchBlockSig);
}
else if(IsProofOfStake())
{
const CTxOut& txout = vtx[1].vout[1];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
{
return false;
}
if (whichType == TX_PUBKEY)
{
valtype& vchPubKey = vSolutions[0];
CKey key;
if (!key.SetPubKey(vchPubKey))
return false;
if (vchBlockSig.empty())
return false;
return key.Verify(GetHash(), vchBlockSig);
}
}
else /* PoW */
{
for(unsigned int i = 0; i < vtx[0].vout.size(); i++)
{
const CTxOut& txout = vtx[0].vout[i];
if (!Solver(txout.scriptPubKey, whichType, vSolutions))
{
return false;
}
if (whichType == TX_PUBKEY)
{
// Verify
valtype& vchPubKey = vSolutions[0];
CKey key;
if (!key.SetPubKey(vchPubKey))
continue;
if (vchBlockSig.empty())
continue;
if(!key.Verify(GetHash(), vchBlockSig))
continue;
return true;
}
}
}
return false;
}
bool CheckDiskSpace(uint64_t nAdditionalBytes)
{
uint64_t nFreeBytesAvailable = boost::filesystem::space(GetDataDir()).available;
// Check for nMinDiskSpace bytes (currently 50MB)
if (nFreeBytesAvailable < (chainParams.nMinDiskSpace + nAdditionalBytes))
{
fShutdown = true;
string strMessage = _("Warning: Disk space is low!");
strMiscWarning = strMessage;
printf("*** %s\n", strMessage.c_str());
uiInterface.ThreadSafeMessageBox(strMessage,
"Stealth",
(CClientUIInterface::OK |
CClientUIInterface::ICON_EXCLAMATION |
CClientUIInterface::MODAL));
StartShutdown();
return false;
}
return true;
}
static boost::filesystem::path BlockFilePath(unsigned int nFile)
{
string strBlockFn = strprintf("blk%04u.dat", nFile);
return GetDataDir() / strBlockFn;
}
FILE* OpenBlockFile(unsigned int nFile, unsigned int nBlockPos, const char* pszMode)
{
if ((nFile < 1) || (nFile == (unsigned int) -1))
return NULL;
FILE* file = fopen(BlockFilePath(nFile).string().c_str(), pszMode);
if (!file)
return NULL;
if (nBlockPos != 0 && !strchr(pszMode, 'a') && !strchr(pszMode, 'w'))
{
if (fseek(file, nBlockPos, SEEK_SET) != 0)
{
fclose(file);
return NULL;
}
}
return file;
}
static unsigned int nCurrentBlockFile = 1;
FILE* AppendBlockFile(unsigned int& nFileRet)
{
nFileRet = 0;
LOOP
{
FILE* file = OpenBlockFile(nCurrentBlockFile, 0, "ab");
if (!file)
return NULL;
if (fseek(file, 0, SEEK_END) != 0)
return NULL;
// FAT32 file size max 4GB, fseek and ftell max 2GB, so we must stay under 2GB
if (ftell(file) < (long)(0x7F000000 - MAX_SIZE))
{
nFileRet = nCurrentBlockFile;
return file;
}
fclose(file);
nCurrentBlockFile++;
}
}
bool LoadBlockIndex(bool fAllowNew)
{
// these are initialized for mainnet when setting global state above
if (fTestNet)
{
pchMessageStart[0] = chainParams.pchMessageStartTestNet[0];
pchMessageStart[1] = chainParams.pchMessageStartTestNet[1];
pchMessageStart[2] = chainParams.pchMessageStartTestNet[2];
pchMessageStart[3] = chainParams.pchMessageStartTestNet[3];
bnProofOfStakeLimit = chainParams.bnProofOfStakeLimitTestNet;
bnProofOfWorkLimit = chainParams.bnProofOfWorkLimitTestNet;
nStakeMinAge = chainParams.nStakeMinAgeTestNet;
nStakeMaxAge = chainParams.nStakeMaxAgeTestNet;
nModifierInterval = chainParams.MODIFIER_INTERVAL_TESTNET;
nModifierIntervalRatio = chainParams.MODIFIER_INTERVAL_RATIO_TESTNET;
nCoinbaseMaturity = chainParams.nCoinbaseMaturityTestNet;
nStakeTargetSpacing = chainParams.nTargetSpacingTestNet;
}
//
// Load block index
//
CTxDB txdb("cr+");
if (!txdb.LoadBlockIndex())
return false;
//txdb.Close();
//
// Init with genesis block
//
if (mapBlockIndex.empty())
{
if (!fAllowNew)
return false;
// Genesis block
const char* pszTimestamp = chainParams.strTimestamp.c_str();
CTransaction txNew;
txNew.SetTxTime(chainParams.nChainStartTime);
txNew.vin.resize(1);
txNew.vout.resize(1);
txNew.vin[0].scriptSig = CScript() << chainParams.nIgma << chainParams.bnIgma <<
vector<unsigned char>((const unsigned char*)pszTimestamp,
(const unsigned char*)pszTimestamp + strlen(pszTimestamp));
txNew.vout[0].SetEmpty();
CBlock block;
block.vtx.push_back(txNew);
block.hashPrevBlock = 0;
block.hashMerkleRoot = block.BuildMerkleTree();
block.nVersion = 1;
block.nTime = chainParams.nTimeGenesisBlock;
block.nBits = bnProofOfWorkLimit.GetCompact();
block.nNonce = chainParams.nNonceGenesisBlock;
if (false && (block.GetHash() != hashGenesisBlock))
{
// This will figure out a valid hash and Nonce if you're
// creating a different genesis block:
uint256 hashTarget = CBigNum().SetCompact(block.nBits).getuint256();
while (block.GetHash() > hashTarget)
{
++block.nNonce;
if (block.nNonce == 0)
{
printf("NONCE WRAPPED, incrementing time");
++block.nTime;
}
}
}
//// debug print
block.print();
printf("block.GetHash() == %s\n", block.GetHash().ToString().c_str());
printf("block.hashMerkleRoot == %s\n", block.hashMerkleRoot.ToString().c_str());
printf("block.nTime = %u \n", block.nTime);
printf("block.nNonce = %u \n", block.nNonce);
assert(block.hashMerkleRoot ==
(fTestNet ? chainParams.hashMerkleRootTestNet :
chainParams.hashMerkleRootMainNet));
assert(block.GetHash() ==
(fTestNet ? chainParams.hashGenesisBlockTestNet :
chainParams.hashGenesisBlockMainNet));
// Start new block file
unsigned int nFile;
unsigned int nBlockPos;
if (!block.WriteToDisk(nFile, nBlockPos))
return error("LoadBlockIndex() : writing genesis block to disk failed");
if (!block.AddToBlockIndex(nFile, nBlockPos, hashGenesisBlock, pregistryMain))
return error("LoadBlockIndex() : genesis block not accepted");
mapBlockLookup[0] = mapBlockIndex[block.GetHash()];
// ppcoin: initialize synchronized checkpoint
if (!Checkpoints::WriteSyncCheckpoint(fTestNet ?
chainParams.hashGenesisBlockTestNet :
chainParams.hashGenesisBlockMainNet))
{
return error("LoadBlockIndex() : failed to init sync checkpoint");
}
}
// ppcoin: if checkpoint master key changed must reset sync-checkpoint
{
CTxDB txdb;
string strPubKey = "";
if (!txdb.ReadCheckpointPubKey(strPubKey) || strPubKey != CSyncCheckpoint::strMasterPubKey)
{
// write checkpoint master key to db
txdb.TxnBegin();
if (!txdb.WriteCheckpointPubKey(CSyncCheckpoint::strMasterPubKey))
return error("LoadBlockIndex() : failed to write new checkpoint master key to db");
if (!txdb.TxnCommit())
return error("LoadBlockIndex() : failed to commit new checkpoint master key to db");
if ((!fTestNet) && !Checkpoints::ResetSyncCheckpoint())
return error("LoadBlockIndex() : failed to reset sync-checkpoint");
}
}
return true;
}
void PrintBlockTree()
{
// pre-compute tree structure
map<CBlockIndex*, vector<CBlockIndex*> > mapNext;
for (map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.begin(); mi != mapBlockIndex.end(); ++mi)
{
CBlockIndex* pindex = (*mi).second;
mapNext[pindex->pprev].push_back(pindex);
}
vector<pair<int, CBlockIndex*> > vStack;
vStack.push_back(make_pair(0, pindexGenesisBlock));
int nPrevCol = 0;
while (!vStack.empty())
{
int nCol = vStack.back().first;
CBlockIndex* pindex = vStack.back().second;
vStack.pop_back();
// print split or gap
if (nCol > nPrevCol)
{
for (int i = 0; i < nCol-1; i++)
printf("| ");
printf("|\\\n");
}
else if (nCol < nPrevCol)
{
for (int i = 0; i < nCol; i++)
printf("| ");
printf("|\n");
}
nPrevCol = nCol;
// print columns
for (int i = 0; i < nCol; i++)
printf("| ");
// print item
CBlock block;
block.ReadFromDisk(pindex);
printf("%d (%u,%u) %s %08x %s mint %7s tx %" PRIszu "",
pindex->nHeight,
pindex->nFile,
pindex->nBlockPos,
block.GetHash().ToString().c_str(),
block.nBits,
DateTimeStrFormat("%x %H:%M:%S", block.GetBlockTime()).c_str(),
FormatMoney(pindex->nMint).c_str(),
block.vtx.size());
PrintWallets(block);
// put the main time-chain first
vector<CBlockIndex*>& vNext = mapNext[pindex];
for (unsigned int i = 0; i < vNext.size(); i++)
{
if (vNext[i]->pnext)
{
swap(vNext[0], vNext[i]);
break;
}
}
// iterate children
for (unsigned int i = 0; i < vNext.size(); i++)
vStack.push_back(make_pair(nCol+i, vNext[i]));
}
}
bool LoadExternalBlockFile(FILE* fileIn)
{
int64_t nStart = GetTimeMillis();
int nLoaded = 0;
{
LOCK(cs_main);
try {
CAutoFile blkdat(fileIn, SER_DISK, CLIENT_VERSION);
unsigned int nPos = 0;
while (nPos != (unsigned int)-1 && blkdat.good() && !fRequestShutdown)
{
unsigned char pchData[65536];
do {
fseek(blkdat, nPos, SEEK_SET);
int nRead = fread(pchData, 1, sizeof(pchData), blkdat);
if (nRead <= 8)
{
nPos = (unsigned int)-1;
break;
}
void* nFind = memchr(pchData, pchMessageStart[0], nRead+1-sizeof(pchMessageStart));
if (nFind)
{
if (memcmp(nFind, pchMessageStart, sizeof(pchMessageStart))==0)
{
nPos += ((unsigned char*)nFind - pchData) + sizeof(pchMessageStart);
break;
}
nPos += ((unsigned char*)nFind - pchData) + 1;
}
else
nPos += sizeof(pchData) - sizeof(pchMessageStart) + 1;
} while(!fRequestShutdown);
if (nPos == (unsigned int)-1)
break;
fseek(blkdat, nPos, SEEK_SET);
unsigned int nSize;
blkdat >> nSize;
if (nSize > 0 && nSize <= chainParams.MAX_BLOCK_SIZE)
{
CBlock block;
blkdat >> block;
if (ProcessBlock(NULL, &block, true))
{
nLoaded++;
nPos += 4 + nSize;
}
if ((nMaxHeight > 0) && (nBestHeight >= nMaxHeight))
{
break;
}
}
}
}
catch (exception &e) {
printf("%s() : Deserialize or I/O error caught during load\n",
__PRETTY_FUNCTION__);
}
}
printf("Loaded %i blocks from external file in %" PRId64 "ms\n", nLoaded, GetTimeMillis() - nStart);
if (GetBoolArg("-quitonbootstrap", false))
{
printf("Quitting after bootstrap to block %d.\n", nBestHeight);
exit(EXIT_SUCCESS);
}
return nLoaded > 0;
}
//////////////////////////////////////////////////////////////////////////////
//
// CAlert
//
extern map<uint256, CAlert> mapAlerts;
extern CCriticalSection cs_mapAlerts;
string GetWarnings(string strFor)
{
int nPriority = 0;
string strStatusBar;
string strRPC;
if (GetBoolArg("-testsafemode"))
strRPC = "test";
// Misc warnings like out of disk space and clock is wrong
if (strMiscWarning != "")
{
nPriority = 1000;
strStatusBar = strMiscWarning;
}
// ppcoin: if detected invalid checkpoint enter safe mode
if (Checkpoints::hashInvalidCheckpoint != 0)
{
nPriority = 3000;
strStatusBar = strRPC = "WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.";
}
// Alerts
{
LOCK(cs_mapAlerts);
BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts)
{
const CAlert& alert = item.second;
if (alert.AppliesToMe() && (alert.nPriority > nPriority) && (alert.nID >= alert.nCancel))
{
nPriority = alert.nPriority;
strStatusBar = alert.strStatusBar;
if (nPriority > 1000)
strRPC = strStatusBar; // ppcoin: safe mode for high alert
}
}
}
if (strFor == "statusbar")
return strStatusBar;
else if (strFor == "rpc")
return strRPC;
assert(!"GetWarnings() : invalid parameter");
return "error";
}
//////////////////////////////////////////////////////////////////////////////
//
// Messages
//
bool static AlreadyHave(CTxDB& txdb, const CInv& inv)
{
switch (inv.type)
{
case MSG_TX:
{
bool txInMap = false;
{
LOCK(mempool.cs);
txInMap = (mempool.exists(inv.hash));
}
return txInMap ||
mapOrphanTransactions.count(inv.hash) ||
txdb.ContainsTx(inv.hash);
}
case MSG_BLOCK:
return mapBlockIndex.count(inv.hash) ||
mapOrphanBlocks.count(inv.hash);
}
// Don't know what it is, just say we already got one
return true;
}
bool Rollback()
{
// rollback to 3 queues ago (3 is not arbitrary)
uint256 hashRollback = pregistryMain->GetHashLastBlockPrev3Queue();
printf("ROLLBACK from %s to %s\n",
pindexBest->GetBlockHash().ToString().c_str(),
hashRollback.ToString().c_str());
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashRollback);
if (mi == mapBlockIndex.end())
{
return error("Rollback(): no block index to rollback to %s\n",
hashRollback.ToString().c_str());
}
CBlockIndex *pindexRollback = mi->second;
// List of what to disconnect
vector<CBlockIndex*> vDisconnect;
for (CBlockIndex* pindex = pindexBest;
pindex != pindexRollback;
pindex = pindex->pprev)
{
vDisconnect.push_back(pindex);
if (pindex->pprev == NULL)
{
break;
}
}
if (vDisconnect.empty())
{
// this is not an error, just unusual
printf("Rollback(): nothing to roll back to %s\n",
hashRollback.ToString().c_str());
return true;
}
printf("ROLLBACK: Disconnect %" PRIszu " blocks; %s back to %s\n",
vDisconnect.size(),
pindexBest->GetBlockHash().ToString().c_str(),
hashRollback.ToString().c_str());
CTxDB txdb;
if (!txdb.TxnBegin())
return error("Rollback() : TxnBegin failed");
// Disconnect
list<CTransaction> vResurrect;
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
{
CBlock block;
if (!block.ReadFromDisk(pindex))
{
return error("Rollback() : ReadFromDisk for disconnect %s failed",
pindex->GetBlockHash().ToString().c_str());
}
if (!block.DisconnectBlock(txdb, pindex))
{
return error("Rollback() : DisconnectBlock %s failed",
pindex->GetBlockHash().ToString().c_str());
}
// Queue memory transactions to resurrect
BOOST_REVERSE_FOREACH(const CTransaction& tx, block.vtx)
{
if (!(tx.IsCoinBase() || tx.IsCoinStake()) &&
pindex->nHeight > Checkpoints::GetTotalBlocksEstimate())
{
vResurrect.push_front(tx);
}
}
}
if (!txdb.WriteHashBestChain(hashRollback))
{
return error("Rollback() : WriteHashBestChain failed");
}
// Make sure it's successfully written to disk before changing memory structure
if (!txdb.TxnCommit())
{
return error("Rollback() : TxnCommit failed");
}
// Disconnect
BOOST_FOREACH(CBlockIndex* pindex, vDisconnect)
{
if (pindex->pprev)
{
pindex->pprev->pnext = NULL;
}
mapBlockLookup.erase(pindex->nHeight);
}
// Resurrect memory transactions that were in the disconnected branch
BOOST_FOREACH(CTransaction& tx, vResurrect)
{
tx.AcceptToMemoryPool(txdb, false);
}
// Get snapshot for temp registry and replay it to pindexRollback
AUTO_PTR<QPRegistry> pregistryTemp(new QPRegistry());
if (!pregistryTemp.get())
{
return error("Rollback() : creating temp registry failed");
}
CBlockIndex *pindexCurrent;
if (!RewindRegistry(txdb, pindexRollback, pregistryTemp.get(), pindexCurrent))
{
// don't fail, just take best chain possible
printf("Rollback(): could not rewind registry\n");
}
// copy rather than assign to retain mutexes, etc.
bool fExitReplay = !pregistryMain->IsInReplayMode();
pregistryMain->Copy(pregistryTemp.get());
if (fExitReplay)
{
pregistryMain->ExitReplayMode();
}
// Update best block in wallet (so we can detect restored wallets)
if (!IsInitialBlockDownload())
{
const CBlockLocator locator(pindexCurrent);
::SetBestChain(locator);
}
// New best block
pindexBest = pindexCurrent;
hashBestChain = pindexBest->GetBlockHash();
pblockindexFBBHLast = NULL;
nBestHeight = pindexBest->nHeight;
bnBestChainTrust = pindexBest->bnChainTrust;
nTimeBestReceived = pindexBest->nTime;
nTransactionsUpdated++;
printf("Rollback(): new best=%s\n"
" height=%d staker=%s trust=%s time=%" PRIu64 " (%s)\n",
hashBestChain.ToString().c_str(),
nBestHeight,
pregistryMain->GetAliasForID(pindexBest->nStakerID).c_str(),
bnBestChainTrust.ToString().c_str(),
pindexBest->GetBlockTime(),
DateTimeStrFormat("%x %H:%M:%S", pindexBest->GetBlockTime()).c_str());
printf("ROLLBACK: done\n");
return true;
}
bool static ProcessMessage(CNode* pfrom, string strCommand, CDataStream& vRecv)
{
if (fDebug) {
printf("ProcessMessage(): pfrom-addr %s\n",
pfrom->addrName.c_str());
}
static map<CService, CPubKey> mapReuseKey;
RandAddSeedPerfmon();
if (fDebug)
printf("received: %s (%" PRIszu " bytes)\n", strCommand.c_str(), vRecv.size());
if (mapArgs.count("-dropmessagestest") && GetRand(atoi(mapArgs["-dropmessagestest"])) == 0)
{
printf("dropmessagestest DROPPING RECV MESSAGE\n");
return true;
}
// The following code has the unintended side-effect of adding
// seed nodes twice. I'm leaving these comments to remind me
// to deal with seed nodes' being discovered twice.
// static const char *(*strOnionSeed)[1] = fTestNet ? strTestNetOnionSeed : strMainNetOnionSeed;
// seed nodes have license to send version as much as they want
// bool isOnionSeed;
// isOnionSeed = false;
// for (unsigned int seed_idx = 0; strOnionSeed[seed_idx][0] != NULL; seed_idx++) {
// if (pfrom->addr.ToString().c_str() == strOnionSeed[seed_idx][0]) {
// isOnionSeed = true;
// break;
// }
// }
// if ((strCommand == "version") && (isOnionSeed == false))
if (strCommand == "version")
{
// Each connection can only send one version message
if (pfrom->nVersion != 0)
{
pfrom->Misbehaving(1);
return false;
}
int64_t nTime;
CAddress addrMe;
CAddress addrFrom;
uint64_t nNonce = 1;
uint64_t verification_token = 0;
vRecv >> pfrom->nVersion >> pfrom->nServices >> nTime >> addrMe;
if (pfrom->nVersion < GetMinPeerProtoVersion(nBestHeight))
{
// disconnect from peers older than this proto version
printf("partner %s using obsolete version %i; disconnecting\n",
pfrom->addrName.c_str(), pfrom->nVersion);
pfrom->fDisconnect = true;
return false;
}
if (!vRecv.empty())
vRecv >> addrFrom >> verification_token >> nNonce;
if (!vRecv.empty()) {
vRecv >> pfrom->strSubVer;
pfrom->cleanSubVer = SanitizeString(pfrom->strSubVer);
}
if (!vRecv.empty())
vRecv >> pfrom->nStartingHeight;
if (pfrom->fInbound && addrMe.IsRoutable())
{
pfrom->addrLocal = addrMe;
SeenLocal(addrMe);
}
// Disconnect if we connected to ourself
if (nNonce == nLocalHostNonce && nNonce > 1)
{
printf("connected to self at %s, disconnecting\n", pfrom->addrName.c_str());
pfrom->fDisconnect = true;
return true;
}
// ppcoin: record my external IP reported by peer
if (addrFrom.IsRoutable() && addrMe.IsRoutable())
addrSeenByPeer = addrMe;
// Be shy and don't send version until we hear
if (fDebug) {
printf("ProcessMessage(): %s\n", pfrom->addrName.c_str());
}
if (pfrom->fInbound)
pfrom->PushVersion();
pfrom->fClient = !(pfrom->nServices & NODE_NETWORK);
AddTimeData(pfrom->addr, nTime);
// Change version
pfrom->PushMessage("verack");
pfrom->vSend.SetVersion(min(pfrom->nVersion, PROTOCOL_VERSION));
if (!pfrom->fInbound)
{
// Advertise our address
if (!IsInitialBlockDownload())
{
CAddress addr = GetLocalAddress(&pfrom->addr);
if (addr.IsRoutable())
pfrom->PushAddress(addr);
}
// Get recent addresses
if (pfrom->fOneShot || pfrom->nVersion >= CADDR_TIME_VERSION || addrman.size() < 1000)
{
pfrom->PushMessage("getaddr");
pfrom->fGetAddr = true;
}
addrman.Good(pfrom->addr);
} else {
addrFrom.SetPort(GetDefaultPort());
pfrom->addr = addrFrom;
if (CNode::IsBanned(addrFrom))
{
printf("connection from %s dropped (banned)\n", addrFrom.ToString().c_str());
pfrom->fDisconnect = true;
return true;
}
if (addrman.CheckVerificationToken(addrFrom, verification_token)) {
printf("connection from %s verified\n", addrFrom.ToString().c_str());
pfrom->fVerified = true;
addrman.Good(pfrom->addr);
} else {
printf("couldn't verify %s\n", addrFrom.ToString().c_str());
addrman.SetReconnectToken(addrFrom, verification_token);
}
}
// Ask the first connected node for block updates
static int nAskedForBlocks = 0;
if (!pfrom->fClient && !pfrom->fOneShot &&
(pfrom->nStartingHeight > (nBestHeight - 144)) &&
(pfrom->nVersion < NOBLKS_VERSION_START ||
pfrom->nVersion >= NOBLKS_VERSION_END) &&
(nAskedForBlocks < 1 || vNodes.size() <= 1))
{
nAskedForBlocks++;
pfrom->PushGetBlocks(pindexBest, uint256(0));
}
// Relay alerts
{
LOCK(cs_mapAlerts);
BOOST_FOREACH(PAIRTYPE(const uint256, CAlert)& item, mapAlerts)
item.second.RelayTo(pfrom);
}
// ppcoin: relay sync-checkpoint
{
LOCK(Checkpoints::cs_hashSyncCheckpoint);
if (!Checkpoints::checkpointMessage.IsNull())
Checkpoints::checkpointMessage.RelayTo(pfrom);
}
pfrom->fSuccessfullyConnected = true;
printf("receive version message: %s: version %d, blocks=%d, us=%s, them=%s, peer=%s, verification=%" PRId64 "\n", pfrom->cleanSubVer.c_str(), pfrom->nVersion, pfrom->nStartingHeight, addrMe.ToString().c_str(), addrFrom.ToString().c_str(), pfrom->addrName.c_str(), verification_token);
cPeerBlockCounts.input(pfrom->nStartingHeight);
// ppcoin: ask for pending sync-checkpoint if any
if (!IsInitialBlockDownload())
Checkpoints::AskForPendingSyncCheckpoint(pfrom);
}
else if (pfrom->nVersion == 0)
{
// Must have a version message before anything else
pfrom->Misbehaving(1);
return false;
}
else if (strCommand == "verack")
{
pfrom->vRecv.SetVersion(min(pfrom->nVersion, PROTOCOL_VERSION));
}
else if (strCommand == "addr")
{
vector<CAddress> vAddr;
vRecv >> vAddr;
// Don't want addr from older versions unless seeding
if (pfrom->nVersion < CADDR_TIME_VERSION && addrman.size() > 1000)
return true;
if (vAddr.size() > 1000)
{
pfrom->Misbehaving(20);
return error("message addr size() = %" PRIszu "", vAddr.size());
}
// Store the new addresses
vector<CAddress> vAddrOk;
int64_t nNow = GetAdjustedTime();
int64_t nSince = nNow - 10 * 60;
BOOST_FOREACH(CAddress& addr, vAddr)
{
if (fShutdown)
return true;
if (addr.nTime <= 100000000 || addr.nTime > nNow + 10 * 60)
addr.nTime = nNow - 5 * 24 * 60 * 60;
pfrom->AddAddressKnown(addr);
bool fReachable = IsReachable(addr);
if (addr.nTime > nSince && !pfrom->fGetAddr && vAddr.size() <= 10 && addr.IsRoutable())
{
// Relay to a limited number of other nodes
{
LOCK(cs_vNodes);
// Use deterministic randomness to send to the same nodes for 24 hours
// at a time so the setAddrKnowns of the chosen nodes prevent repeats
static uint256 hashSalt;
if (hashSalt == 0)
hashSalt = GetRandHash();
uint64_t hashAddr = addr.GetHash();
uint256 hashRand = hashSalt ^ (hashAddr<<32) ^ ((GetTime()+hashAddr)/(24*60*60));
hashRand = Hash(BEGIN(hashRand), END(hashRand));
multimap<uint256, CNode*> mapMix;
BOOST_FOREACH(CNode* pnode, vNodes)
{
if (pnode->nVersion < CADDR_TIME_VERSION)
continue;
unsigned int nPointer;
memcpy(&nPointer, &pnode, sizeof(nPointer));
uint256 hashKey = hashRand ^ nPointer;
hashKey = Hash(BEGIN(hashKey), END(hashKey));
mapMix.insert(make_pair(hashKey, pnode));
}
int nRelayNodes = fReachable ? 2 : 1; // limited relaying of addresses outside our network(s)
for (multimap<uint256, CNode*>::iterator mi = mapMix.begin(); mi != mapMix.end() && nRelayNodes-- > 0; ++mi)
((*mi).second)->PushAddress(addr);
}
}
// Do not store addresses outside our network
if (fReachable)
vAddrOk.push_back(addr);
}
addrman.Add(vAddrOk, pfrom->addr, 2 * 60 * 60);
if (vAddr.size() < 1000)
pfrom->fGetAddr = false;
if (pfrom->fOneShot)
pfrom->fDisconnect = true;
}
else if (strCommand == "inv")
{
if (fDebugNet)
{
printf("ProcessMessage(): inv\n");
}
vector<CInv> vInv;
vRecv >> vInv;
if (vInv.size() > chainParams.MAX_INV_SZ)
{
pfrom->Misbehaving(20);
return error("message inv size() = %" PRIszu "", vInv.size());
}
// find last block in inv vector
unsigned int nLastBlock = (unsigned int)(-1);
for (unsigned int nInv = 0; nInv < vInv.size(); nInv++)
{
if (vInv[vInv.size() - 1 - nInv].type == MSG_BLOCK)
{
nLastBlock = vInv.size() - 1 - nInv;
break;
}
}
CTxDB txdb("r");
for (unsigned int nInv = 0; nInv < vInv.size(); nInv++)
{
const CInv &inv = vInv[nInv];
if (fShutdown)
{
return true;
}
pfrom->AddInventoryKnown(inv);
bool fAlreadyHave = AlreadyHave(txdb, inv);
if (fDebugNet)
{
printf(" got inventory: %s %s\n",
inv.ToString().c_str(),
fAlreadyHave ? "have" : "new");
}
if (!fAlreadyHave)
{
if (fDebugNet)
{
printf(" not already have %s\n", inv.ToString().c_str());
}
pfrom->AskFor(inv);
}
else if (inv.type == MSG_BLOCK && mapOrphanBlocks.count(inv.hash))
{
if (fDebugNet)
{
printf(" map orphans count %s inv.hash\n",
inv.ToString().c_str());
}
pfrom->PushGetBlocks(pindexBest, GetOrphanRoot(mapOrphanBlocks[inv.hash]));
}
else if (nInv == nLastBlock)
{
if (fDebugNet)
{
printf(" inv is last block %s\n", inv.ToString().c_str());
}
// In case we are on a very long side-chain, it is possible that we already have
// the last block in an inv bundle sent in response to getblocks. Try to detect
// this situation and push another getblocks to continue.
pfrom->PushGetBlocks(mapBlockIndex[inv.hash], uint256(0));
if (fDebugNet)
{
printf(" force request: %s\n", inv.ToString().c_str());
}
}
// Track requests for our stuff
Inventory(inv.hash);
}
}
else if (strCommand == "getdata")
{
if (fDebugNet)
{
printf("received getdata from %s\n",
pfrom->addrName.c_str());
}
vector<CInv> vInv;
vRecv >> vInv;
if (vInv.size() > chainParams.MAX_INV_SZ)
{
pfrom->Misbehaving(20);
return error("message getdata size() = %" PRIszu "", vInv.size());
}
if (fDebugNet || (vInv.size() != 1))
{
printf(" (%" PRIszu " invsz)\n", vInv.size());
}
BOOST_FOREACH(const CInv& inv, vInv)
{
if (fShutdown)
return true;
if (fDebugNet || (vInv.size() == 1))
{
printf(" for: %s\n", inv.ToString().c_str());
}
if (inv.type == MSG_BLOCK)
{
// Send block from disk
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(inv.hash);
if (mi != mapBlockIndex.end())
{
CBlock block;
block.ReadFromDisk((*mi).second);
if (fDebugNet)
{
printf(" pushing block message to %s\n %s\n",
pfrom->addrName.c_str(),
block.GetHash().ToString().c_str());
}
pfrom->PushMessage("block", block);
// Trigger them to send a getblocks request for the next batch of inventory
if (inv.hash == pfrom->hashContinue)
{
// ppcoin: send latest proof-of-work block to allow the
// download node to accept as orphan (proof-of-stake
// block might be rejected by stake connection check)
vector<CInv> vInv;
vInv.push_back(CInv(MSG_BLOCK, GetLastBlockIndex(pindexBest, false)->GetBlockHash()));
pfrom->PushMessage("inv", vInv);
pfrom->hashContinue = 0;
}
}
}
else if (inv.IsKnownType())
{
// Send stream from relay memory
bool pushed = false;
{
LOCK(cs_mapRelay);
map<CInv, CDataStream>::iterator mi = mapRelay.find(inv);
if (mi != mapRelay.end())
{
if (inv.type == MSG_TX)
{
// don't act as a transaction relay if we are behind
if (((GetFork(nBestHeight) >= XST_FORKQPOS) &&
(pregistryMain->IsInReplayMode())) ||
IsInitialBlockDownload())
{
Inventory(inv.hash);
continue;
}
CDataStream vRecv((*mi).second);
CTransaction tx;
vRecv >> tx;
// QPoS transactions are state-dependent, so must
// be checked against the best state of the
// blockchain before they are relayed.
if (tx.IsQPoSTx())
{
if (!mempool.exists(inv.hash))
{
// If it doesn't get into mempool, drop it.
// If it does get in, push it from the mempool.
CTxDB txdb("r");
if (!tx.AcceptToMemoryPool(txdb, true))
{
Inventory(inv.hash);
continue;
}
}
}
else
{
pfrom->PushMessage(inv.GetCommand(), (*mi).second);
pushed = true;
}
}
else
{
pfrom->PushMessage(inv.GetCommand(), (*mi).second);
pushed = true;
}
}
}
// don't act as a transaction relay if we are behind
if ((!pushed) &&
(inv.type == MSG_TX) &&
((GetFork(nBestHeight) < XST_FORKQPOS) ||
(!(pregistryMain->IsInReplayMode()))) &&
(!IsInitialBlockDownload()))
{
LOCK(mempool.cs);
if (mempool.exists(inv.hash))
{
CTransaction tx = mempool.lookup(inv.hash);
// QPoS transactions are checked by
// seeing if they can get accepted into the mempool
if (tx.IsQPoSTx())
{
mempool.remove(tx);
CTxDB txdb("r");
if (!tx.AcceptToMemoryPool(txdb, true))
{
Inventory(inv.hash);
continue;
}
}
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss.reserve(1000);
ss << tx;
pfrom->PushMessage("tx", ss);
}
}
}
// Track requests for our stuff
Inventory(inv.hash);
}
}
else if (strCommand == "getblocks")
{
if (pfrom->nVersion < GetMinPeerProtoVersion(nBestHeight))
{
printf("partner %s using obsolete version %i; disconnecting\n", pfrom->addrName.c_str(), pfrom->nVersion);
pfrom->fDisconnect = true;
return false;
}
CBlockLocator locator;
uint256 hashStop;
vRecv >> locator >> hashStop;
// Find the last block the caller has in the main chain
CBlockIndex* pindex = locator.GetBlockIndex();
// Send the rest of the chain
if (pindex)
pindex = pindex->pnext;
int nLimit = chainParams.GETBLOCKS_LIMIT;
if (fDebugNet)
{
printf("getblocks %s: %d to %s limit %d\n",
pfrom->addrName.c_str(),
(pindex ? pindex->nHeight : -1),
hashStop.ToString().c_str(), nLimit);
}
for (; pindex; pindex = pindex->pnext)
{
if (pindex->GetBlockHash() == hashStop)
{
if (fDebugNet)
{
printf(" getblocks stopping at %d\n %s\n",
pindex->nHeight,
pindex->GetBlockHash().ToString().c_str());
}
// ppcoin: tell downloading node about the latest block if it's
// without risk of being rejected due to stake connection check
if ((GetFork(pindexBest->nHeight) >= XST_FORKQPOS) &&
(hashStop != hashBestChain) &&
(pindex->GetBlockTime() + nStakeMinAge > pindexBest->GetBlockTime()))
{
if (!pfrom->PushInventory(CInv(MSG_BLOCK, hashBestChain)) && fDebugNet)
{
printf(" couldn't push best chain inventory %d\n %s\n",
pindex->nHeight,
pindex->GetBlockHash().ToString().c_str());
}
}
break;
}
if (!pfrom->PushInventory(CInv(MSG_BLOCK, pindex->GetBlockHash())))
{
printf("couldn't push inventory %d to %s: stopping\n %s\n",
pindex->nHeight,
pfrom->addrName.c_str(),
pindex->GetBlockHash().ToString().c_str());
// When this block is requested, we'll send an inv that'll make them
// getblocks the next batch of inventory.
pfrom->hashContinue = pindex->GetBlockHash();
break;
}
if (--nLimit <= 0)
{
if (fDebugNet)
{
printf(" getblocks stopping at limit %d %s\n",
pindex->nHeight,
pindex->GetBlockHash().ToString().c_str());
}
// When this block is requested, we'll send an inv that'll make them
// getblocks the next batch of inventory.
pfrom->hashContinue = pindex->GetBlockHash();
break;
}
}
}
else if (strCommand == "checkpoint")
{
CSyncCheckpoint checkpoint;
vRecv >> checkpoint;
if (checkpoint.ProcessSyncCheckpoint(pfrom))
{
// Relay
pfrom->hashCheckpointKnown = checkpoint.hashCheckpoint;
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
checkpoint.RelayTo(pnode);
}
}
else if (strCommand == "getheaders")
{
CBlockLocator locator;
uint256 hashStop;
vRecv >> locator >> hashStop;
CBlockIndex* pindex = NULL;
if (locator.IsNull())
{
// If locator is null, return the hashStop block
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashStop);
if (mi == mapBlockIndex.end())
return true;
pindex = (*mi).second;
}
else
{
// Find the last block the caller has in the main chain
pindex = locator.GetBlockIndex();
if (pindex)
pindex = pindex->pnext;
}
vector<CBlock> vHeaders;
int nLimit = 2000;
printf("getheaders %d to %s\n", (pindex ? pindex->nHeight : -1), hashStop.ToString().c_str());
for (; pindex; pindex = pindex->pnext)
{
vHeaders.push_back(pindex->GetBlockHeader());
if (--nLimit <= 0 || pindex->GetBlockHash() == hashStop)
break;
}
pfrom->PushMessage("headers", vHeaders);
}
else if (strCommand == "tx")
{
vector<uint256> vWorkQueue;
vector<uint256> vEraseQueue;
CDataStream vMsg(vRecv);
CTxDB txdb("r");
CTransaction tx;
vRecv >> tx;
CInv inv(MSG_TX, tx.GetHash());
pfrom->AddInventoryKnown(inv);
bool fMissingInputs = false;
if (tx.AcceptToMemoryPool(txdb, true, &fMissingInputs))
{
SyncWithWallets(tx, NULL, true);
RelayTransaction(tx, inv.hash);
mapAlreadyAskedFor.erase(inv);
vWorkQueue.push_back(inv.hash);
vEraseQueue.push_back(inv.hash);
// Recursively process any orphan transactions that depended on this one
for (unsigned int i = 0; i < vWorkQueue.size(); i++)
{
uint256 hashPrev = vWorkQueue[i];
for (map<uint256, CDataStream*>::iterator mi = mapOrphanTransactionsByPrev[hashPrev].begin();
mi != mapOrphanTransactionsByPrev[hashPrev].end();
++mi)
{
const CDataStream& vMsg = *((*mi).second);
CTransaction tx;
CDataStream(vMsg) >> tx;
CInv inv(MSG_TX, tx.GetHash());
bool fMissingInputs2 = false;
if (tx.AcceptToMemoryPool(txdb, true, &fMissingInputs2))
{
printf(" accepted orphan tx %s\n", inv.hash.ToString().c_str());
SyncWithWallets(tx, NULL, true);
RelayMessage(inv, vMsg);
mapAlreadyAskedFor.erase(inv);
vWorkQueue.push_back(inv.hash);
vEraseQueue.push_back(inv.hash);
}
else if (!fMissingInputs2)
{
// invalid orphan
vEraseQueue.push_back(inv.hash);
printf(" removed invalid orphan tx %s\n", inv.hash.ToString().c_str());
}
}
}
BOOST_FOREACH(uint256 hash, vEraseQueue)
EraseOrphanTx(hash);
}
else if (fMissingInputs)
{
AddOrphanTx(vMsg);
// DoS prevention: do not allow mapOrphanTransactions to grow unbounded
unsigned int nEvicted = LimitOrphanTxSize(
chainParams.MAX_ORPHAN_TRANSACTIONS);
if (nEvicted > 0)
printf("mapOrphan overflow, removed %u tx\n", nEvicted);
}
if (tx.nDoS) pfrom->Misbehaving(tx.nDoS);
}
else if ((strCommand == "block") &&
((nMaxHeight <= 0) || (nBestHeight < nMaxHeight)))
{
CBlock block;
vRecv >> block;
if (fDebugNet)
{
printf("Received block %s from %s\n",
block.GetHash().ToString().c_str(),
pfrom->addrName.c_str());
}
CInv inv(MSG_BLOCK, block.GetHash());
pfrom->AddInventoryKnown(inv);
bool fProcessOK;
bool fCheck = true;
if (block.hashPrevBlock == hashBestChain)
{
printf("Processing block fully: %s\n"
" best=%s\n"
" prev=%s, fork=%u (FORKQPOS=%u)\n",
block.GetHash().ToString().c_str(),
hashBestChain.ToString().c_str(),
block.hashPrevBlock.ToString().c_str(),
GetFork(block.nHeight), XST_FORKQPOS);
fProcessOK = ProcessBlock(pfrom, &block);
fCheck = false;
}
else if (mapBlockIndex.count(block.hashPrevBlock))
{
CBlockIndex* pindexPrev = mapBlockIndex[block.hashPrevBlock];
printf("Previous of new nonsequential block %d is known:\n"
" Prev: %s\n This: %s\n",
pindexPrev->nHeight + 1,
pindexPrev->GetBlockHash().ToString().c_str(),
block.GetHash().ToString().c_str());
CBlockIndex* pindex = pindexPrev;
while (pindex && (pindex->nHeight > pindexBest->nHeight))
{
pindex = pindex->pprev;
}
if (pindex && (pindex == pindexBest))
{
// The chain from pindexBest to this block has somehow avoided
// being properly linked, but since it builds on the
// best chain, we assume it is best and set it as such.
printf("Setting best chain for prev of nonsequential block:\n"
" this=%s\n"
" best=%s\n"
" prev=%s, fork=%u (FORKQPOS=%u)\n",
block.GetHash().ToString().c_str(),
hashBestChain.ToString().c_str(),
block.hashPrevBlock.ToString().c_str(),
GetFork(block.nHeight), XST_FORKQPOS);
CTxDB txdb;
AUTO_PTR<QPRegistry> pregistryTemp(new QPRegistry(pregistryMain));
if (!pregistryTemp.get())
{
return error("ProcessMessage(): could not create temp registry\n");
}
pregistryTemp->CheckSynced();
bool fReorganized;
CBlock blockPrev;
if (!blockPrev.ReadFromDisk(pindexPrev))
{
return error("ProcessMessag(): ReadFromDisk failed");
}
if (blockPrev.SetBestChain(txdb,
pindexPrev,
pregistryTemp.get(),
fReorganized))
{
printf("Chain %s reorganized.\n",
fReorganized ? "was" : "was not");
bool fExitReplay = !pregistryMain->IsInReplayMode();
pregistryMain->Copy(pregistryTemp.get());
if (fExitReplay)
{
pregistryMain->ExitReplayMode();
}
fProcessOK = ProcessBlock(pfrom, &block);
}
else
{
const QPQueue* queue = pregistryTemp->GetQueue();
printf("ProcessMessage(): could not set best chain with block\n");
printf(" staker: %u\n Queue: %s\n",
block.nStakerID,
queue->ToString().c_str());
return false;
}
fCheck = false;
}
}
if (fCheck)
{
// Check nonsequential blocks as much as possible
// to mitigate certain types of spam attacks.
// A qPoS block can only fully validate if the registry is synced
// with the block's immediate predecessor.
// This full validation happens uppon connecting the block.
printf("Processing block just check:\n"
" This: %d %s\n Best: %d %s\n",
block.nHeight, block.GetHash().ToString().c_str(),
nBestHeight, hashBestChain.ToString().c_str());
fProcessOK = ProcessBlock(pfrom, &block, false, true);
}
// TODO: should this be recursive somehow?
if (fProcessOK)
{
mapAlreadyAskedFor.erase(inv);
}
else if (pregistryMain->ShouldRollback())
{
if (Rollback())
{
// going to reprocess block so reset its nDoS
block.nDoS = 0;
if ((GetFork(block.nHeight) < XST_FORKQPOS) ||
(block.nHeight == (nBestHeight + 1)))
{
printf("Processing block fully (should rollback)\n");
fProcessOK = ProcessBlock(pfrom, &block);
}
else
{
printf("Processing block just check (should rollback)\n");
fProcessOK = ProcessBlock(pfrom, &block, false, true);
}
if (fProcessOK)
{
mapAlreadyAskedFor.erase(inv);
}
}
else
{
// this should never happen
throw runtime_error(
"ProcessMessage(): TSNH ERROR: couldn't roll back");
}
}
if (block.nDoS)
{
pfrom->Misbehaving(block.nDoS);
}
if (pfrom->nOrphans > 2 * chainParams.GETBLOCKS_LIMIT)
{
printf("Node has exceeded max init download orphans.\n");
pfrom->Misbehaving(100);
}
}
else if (strCommand == "getaddr")
{
pfrom->vAddrToSend.clear();
vector<CAddress> vAddr = addrman.GetAddr();
BOOST_FOREACH(const CAddress &addr, vAddr)
pfrom->PushAddress(addr);
}
else if (strCommand == "mempool")
{
vector<uint256> vtxid;
mempool.queryHashes(vtxid);
vector<CInv> vInv;
for (unsigned int i = 0; i < vtxid.size(); i++) {
CInv inv(MSG_TX, vtxid[i]);
vInv.push_back(inv);
if (i == (chainParams.MAX_INV_SZ - 1))
break;
}
if (vInv.size() > 0)
pfrom->PushMessage("inv", vInv);
}
else if (strCommand == "checkorder")
{
uint256 hashReply;
vRecv >> hashReply;
if (!GetBoolArg("-allowreceivebyip"))
{
pfrom->PushMessage("reply", hashReply, (int)2, string(""));
return true;
}
CWalletTx order;
vRecv >> order;
/// we have a chance to check the order here
// Keep giving the same key to the same ip until they use it
if (!mapReuseKey.count(pfrom->addr))
pwalletMain->GetKeyFromPool(mapReuseKey[pfrom->addr], true);
// Send back approval of order and pubkey to use
CScript scriptPubKey;
scriptPubKey << mapReuseKey[pfrom->addr] << OP_CHECKSIG;
pfrom->PushMessage("reply", hashReply, (int)0, scriptPubKey);
}
else if (strCommand == "reply")
{
uint256 hashReply;
vRecv >> hashReply;
CRequestTracker tracker;
{
LOCK(pfrom->cs_mapRequests);
map<uint256, CRequestTracker>::iterator mi = pfrom->mapRequests.find(hashReply);
if (mi != pfrom->mapRequests.end())
{
tracker = (*mi).second;
pfrom->mapRequests.erase(mi);
}
}
if (!tracker.IsNull())
tracker.fn(tracker.param1, vRecv);
}
else if (strCommand == "ping")
{
if (pfrom->nVersion > BIP0031_VERSION)
{
uint64_t nonce = 0;
vRecv >> nonce;
// Echo the message back with the nonce. This allows for two useful features:
//
// 1) A remote node can quickly check if the connection is operational
// 2) Remote nodes can measure the latency of the network thread. If this node
// is overloaded it won't respond to pings quickly and the remote node can
// avoid sending us more work, like chain download requests.
//
// The nonce stops the remote getting confused between different pings: without
// it, if the remote node sends a ping once per second and this node takes 5
// seconds to respond to each, the 5th ping the remote sends would appear to
// return very quickly.
pfrom->PushMessage("pong", nonce);
}
}
else if (strCommand == "alert")
{
CAlert alert;
vRecv >> alert;
uint256 alertHash = alert.GetHash();
if (pfrom->setKnown.count(alertHash) == 0)
{
if (alert.ProcessAlert())
{
// Relay
pfrom->setKnown.insert(alertHash);
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
alert.RelayTo(pnode);
}
}
else {
// Small DoS penalty so peers that send us lots of
// duplicate/expired/invalid-signature/whatever alerts
// eventually get banned.
// This isn't a Misbehaving(100) (immediate ban) because the
// peer might be an older or different implementation with
// a different signature key, etc.
pfrom->Misbehaving(10);
}
}
}
else
{
// Ignore unknown commands for extensibility
}
// Update the last seen time for this node's address
if (pfrom->fNetworkNode)
{
if (strCommand == "version" || strCommand == "addr" || strCommand == "inv" || strCommand == "getdata" || strCommand == "ping")
AddressCurrentlyConnected(pfrom->addr);
}
return true;
}
bool ProcessMessages(CNode* pfrom)
{
// if (fDebug) {
// printf("ProcessMessages: %s\n",
// pfrom->addr.ToString().c_str());
// }
CDataStream& vRecv = pfrom->vRecv;
if (vRecv.empty()) {
// if (fDebug)
// {
// printf("ProcessMessages: %s [empty]\n",
// pfrom->addr.ToString().c_str());
// }
return true;
}
//
// Message format
// (4) message start
// (12) command
// (4) size
// (4) checksum
// (x) data
//
LOOP
{
// Don't bother if send buffer is too full to respond anyway
if (pfrom->vSend.size() >= SendBufferSize())
break;
// Scan for message start
CDataStream::iterator pstart = search(vRecv.begin(), vRecv.end(), BEGIN(pchMessageStart), END(pchMessageStart));
int nHeaderSize = vRecv.GetSerializeSize(CMessageHeader());
if (vRecv.end() - pstart < nHeaderSize)
{
if ((int)vRecv.size() > nHeaderSize)
{
printf("\n\nPROCESSMESSAGE MESSAGESTART NOT FOUND\n\n");
vRecv.erase(vRecv.begin(), vRecv.end() - nHeaderSize);
}
break;
}
if (pstart - vRecv.begin() > 0)
printf("\n\nPROCESSMESSAGE SKIPPED %" PRIpdd " BYTES\n\n", pstart - vRecv.begin());
vRecv.erase(vRecv.begin(), pstart);
// Read header
vector<char> vHeaderSave(vRecv.begin(), vRecv.begin() + nHeaderSize);
CMessageHeader hdr;
vRecv >> hdr;
if (!hdr.IsValid())
{
printf("\n\nPROCESSMESSAGE: ERRORS IN HEADER %s\n\n\n", hdr.GetCommand().c_str());
continue;
}
string strCommand = hdr.GetCommand();
if (fDebug) {
printf("ProcessMessages: %s [%s]\n",
pfrom->addrName.c_str(), strCommand.c_str());
}
// Message size
unsigned int nMessageSize = hdr.nMessageSize;
if (nMessageSize > MAX_SIZE)
{
printf("ProcessMessages(%s, %u bytes) : nMessageSize > MAX_SIZE\n", strCommand.c_str(), nMessageSize);
continue;
}
if (nMessageSize > vRecv.size())
{
// Rewind and wait for rest of message
vRecv.insert(vRecv.begin(), vHeaderSave.begin(), vHeaderSave.end());
break;
}
// Checksum
uint256 hash = Hash(vRecv.begin(), vRecv.begin() + nMessageSize);
unsigned int nChecksum = 0;
memcpy(&nChecksum, &hash, sizeof(nChecksum));
if (nChecksum != hdr.nChecksum)
{
printf("ProcessMessages(%s, %u bytes) : CHECKSUM ERROR nChecksum=%08x hdr.nChecksum=%08x\n",
strCommand.c_str(), nMessageSize, nChecksum, hdr.nChecksum);
continue;
}
// Copy message to its own buffer
CDataStream vMsg(vRecv.begin(), vRecv.begin() + nMessageSize, vRecv.nType, vRecv.nVersion);
try
{
vRecv.ignore(nMessageSize);
}
catch (ios_base::failure& e)
{
// can only be end of data, should cause failure in processing below
printf("ProcessMessages() : Exception '%s' caught, caused by unexpectedly reaching end of buffer\n", e.what());
}
// Process message
bool fRet = false;
try
{
{
LOCK(cs_main);
fRet = ProcessMessage(pfrom, strCommand, vMsg);
}
if (fShutdown)
return true;
}
catch (ios_base::failure& e)
{
if (strstr(e.what(), "end of data"))
{
// Allow exceptions from under-length message on vRecv
printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught, normally caused by a message being shorter than its stated length\n", strCommand.c_str(), nMessageSize, e.what());
}
else if (strstr(e.what(), "size too large"))
{
// Allow exceptions from over-long size
printf("ProcessMessages(%s, %u bytes) : Exception '%s' caught\n", strCommand.c_str(), nMessageSize, e.what());
}
else
{
PrintExceptionContinue(&e, "ProcessMessages()");
}
}
catch (exception& e) {
PrintExceptionContinue(&e, "ProcessMessages()");
} catch (...) {
PrintExceptionContinue(NULL, "ProcessMessages()");
}
if (!fRet)
printf("ProcessMessage(%s, %u bytes) FAILED\n", strCommand.c_str(), nMessageSize);
}
vRecv.Compact();
return true;
}
bool SendMessages(CNode* pto, bool fSendTrickle)
{
TRY_LOCK(cs_main, lockMain);
if (lockMain) {
// Don't send anything until we get their version message
if (pto->nVersion == 0)
{
return true;
}
// Keep-alive ping. We send a nonce of zero because we don't use it anywhere
// right now.
if (pto->nLastSend && GetTime() - pto->nLastSend > 30 * 60 && pto->vSend.empty()) {
uint64_t nonce = 0;
if (pto->nVersion > BIP0031_VERSION)
pto->PushMessage("ping", nonce);
else
pto->PushMessage("ping");
}
// Resend wallet transactions that haven't gotten in a block yet
ResendWalletTransactions();
// Address refresh broadcast
static int64_t nLastRebroadcast;
if (!IsInitialBlockDownload() && (GetTime() - nLastRebroadcast > 24 * 60 * 60))
{
{
LOCK(cs_vNodes);
BOOST_FOREACH(CNode* pnode, vNodes)
{
// Periodically clear setAddrKnown to allow refresh broadcasts
if (nLastRebroadcast)
pnode->setAddrKnown.clear();
// Rebroadcast our address
if (true)
{
CAddress addr = GetLocalAddress(&pnode->addr);
if (addr.IsRoutable())
pnode->PushAddress(addr);
}
}
}
nLastRebroadcast = GetTime();
}
//
// Message: addr
//
if (fSendTrickle)
{
vector<CAddress> vAddr;
vAddr.reserve(pto->vAddrToSend.size());
BOOST_FOREACH(const CAddress& addr, pto->vAddrToSend)
{
// returns true if wasn't already contained in the set
if (pto->setAddrKnown.insert(addr).second)
{
vAddr.push_back(addr);
// receiver rejects addr messages larger than 1000
if (vAddr.size() >= 1000)
{
pto->PushMessage("addr", vAddr);
vAddr.clear();
}
}
}
pto->vAddrToSend.clear();
if (!vAddr.empty())
pto->PushMessage("addr", vAddr);
}
//
// Message: inventory
//
vector<CInv> vInv;
vector<CInv> vInvWait;
{
LOCK(pto->cs_inventory);
vInv.reserve(pto->vInventoryToSend.size());
vInvWait.reserve(pto->vInventoryToSend.size());
BOOST_FOREACH(const CInv& inv, pto->vInventoryToSend)
{
if (pto->setInventoryKnown.count(inv))
continue;
// trickle out tx inv to protect privacy
if (inv.type == MSG_TX && !fSendTrickle)
{
// 1/4 of tx invs blast to all immediately
static uint256 hashSalt;
if (hashSalt == 0)
hashSalt = GetRandHash();
uint256 hashRand = inv.hash ^ hashSalt;
hashRand = Hash(BEGIN(hashRand), END(hashRand));
bool fTrickleWait = ((hashRand & 3) != 0);
// always trickle our own transactions
if (!fTrickleWait)
{
CWalletTx wtx;
if (GetTransaction(inv.hash, wtx))
if (wtx.fFromMe)
fTrickleWait = true;
}
if (fTrickleWait)
{
vInvWait.push_back(inv);
continue;
}
}
// returns true if wasn't already contained in the set
if (pto->setInventoryKnown.insert(inv).second)
{
vInv.push_back(inv);
if (vInv.size() >= 1000)
{
pto->PushMessage("inv", vInv);
vInv.clear();
}
}
}
pto->vInventoryToSend = vInvWait;
}
if (!vInv.empty())
pto->PushMessage("inv", vInv);
//
// Message: getdata
//
vector<CInv> vGetData;
int64_t nNow = GetTime() * 1000000;
CTxDB txdb("r");
while (!pto->mapAskFor.empty())
{
int64_t nRequestTime = (*pto->mapAskFor.begin()).first;
const CInv& inv = (*pto->mapAskFor.begin()).second;
if (nRequestTime > nNow)
{
if (fDebugNet)
{
printf("first getdata of %s request for future "
"(%s > %s)\n %s\n",
pto->addrName.c_str(),
DateTimeStrFormat("%H:%M:%S",
nRequestTime/1000000).c_str(),
DateTimeStrFormat("%H:%M:%S",
nNow/1000000).c_str(),
inv.ToString().c_str());
}
break;
}
if (AlreadyHave(txdb, inv))
{
printf("already have %s\n",
inv.ToString().c_str());
}
else
{
if (fDebugNet)
{
printf("sending getdata to %s:\n %s\n",
pto->addrName.c_str(),
inv.ToString().c_str());
}
vGetData.push_back(inv);
if (vGetData.size() >= 1000)
{
pto->PushMessage("getdata", vGetData);
vGetData.clear();
}
mapAlreadyAskedFor[inv] = nNow;
}
pto->mapAskFor.erase(pto->mapAskFor.begin());
}
if (!vGetData.empty())
pto->PushMessage("getdata", vGetData);
}
return true;
}
//////////////////////////////////////////////////////////////////////////////
//
// StealthMinter
//
#ifdef WITH_MINER
int static FormatHashBlocks(void* pbuffer, unsigned int len)
{
unsigned char* pdata = (unsigned char*)pbuffer;
unsigned int blocks = 1 + ((len + 8) / 64);
unsigned char* pend = pdata + 64 * blocks;
memset(pdata + len, 0, 64 * blocks - len);
pdata[len] = 0x80;
unsigned int bits = len * 8;
pend[-1] = (bits >> 0) & 0xff;
pend[-2] = (bits >> 8) & 0xff;
pend[-3] = (bits >> 16) & 0xff;
pend[-4] = (bits >> 24) & 0xff;
return blocks;
}
static const unsigned int pSHA256InitState[8] =
{0x6a09e667, 0xbb67ae85,
0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c,
0x1f83d9ab, 0x5be0cd19};
void SHA256Transform(void* pstate, void* pinput, const void* pinit)
{
SHA256_CTX ctx;
unsigned char data[64];
SHA256_Init(&ctx);
for (int i = 0; i < 16; i++)
((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]);
for (int i = 0; i < 8; i++)
ctx.h[i] = ((uint32_t*)pinit)[i];
SHA256_Update(&ctx, data, sizeof(data));
for (int i = 0; i < 8; i++)
((uint32_t*)pstate)[i] = ctx.h[i];
}
#endif /* WITH_MINER */
// Some explaining would be appreciated
class COrphan
{
public:
CTransaction* ptx;
set<uint256> setDependsOn;
double dPriority;
double dFeePerKb;
Feework feework;
COrphan(CTransaction* ptxIn)
{
ptx = ptxIn;
dPriority = dFeePerKb = 0;
}
void print() const
{
printf("COrphan(hash=%s, dPriority=%.1f, dFeePerKb=%.1f)\n",
ptx->GetHash().ToString().c_str(), dPriority, dFeePerKb);
BOOST_FOREACH(uint256 hash, setDependsOn)
printf(" setDependsOn %s\n", hash.ToString().c_str());
}
};
uint64_t nLastBlockTx = 0;
uint64_t nLastBlockSize = 0;
int64_t nLastCoinStakeSearchInterval = 0;
// We want to sort transactions by priority and fee, so:
typedef boost::tuple<double, double, Feework, CTransaction*> TxPriority;
class TxPriorityCompare
{
bool byFee;
public:
TxPriorityCompare(bool _byFee) : byFee(_byFee) { }
bool operator()(const TxPriority& a, const TxPriority& b)
{
if (byFee)
{
if (a.get<1>() == b.get<1>())
return a.get<0>() < b.get<0>();
return a.get<1>() < b.get<1>();
}
else
{
if (a.get<0>() == b.get<0>())
return a.get<1>() < b.get<1>();
return a.get<0>() < b.get<0>();
}
}
};
// WARNING: pblockRet is passed in uninitialized
BlockCreationResult CreateNewBlock(CWallet* pwallet,
ProofTypes fTypeOfProof,
AUTO_PTR<CBlock> &pblockRet)
{
bool fProofOfStake;
bool fQuantumPoS;
switch (fTypeOfProof)
{
case PROOFTYPE_POW:
fProofOfStake = false;
fQuantumPoS = false;
break;
case PROOFTYPE_POS:
fProofOfStake = true;
fQuantumPoS = false;
break;
case PROOFTYPE_QPOS:
fQuantumPoS = true;
fProofOfStake = false;
break;
default:
printf ("CreateNewBlock(): No such type of proof\n");
return BLOCKCREATION_PROOFTYPE_FAIL;
}
CReserveKey reservekey(pwallet);
CBlockIndex* pindexPrev = pindexBest;
int nHeight = pindexPrev->nHeight+1; // height of new block
int nFork = GetFork(nHeight);
if (fQuantumPoS)
{
if (pregistryMain->IsInReplayMode())
{
return BLOCKCREATION_QPOS_IN_REPLAY;
}
unsigned int nID;
unsigned int nTime;
bool fShould = pregistryMain->GetIDForCurrentTime(pindexBest, nID, nTime);
if (nID == 0)
{
// this should rarely happen, and never when not in replay
printf("CreateNewBlock(): TSRH Registry failed with 0 ID\n");
return BLOCKCREATION_REGISTRY_FAIL;
}
if (!fShould)
{
return BLOCKCREATION_QPOS_BLOCK_EXISTS;
}
CPubKey pubkey;
if (!pregistryMain->GetDelegateKey(nID, pubkey))
{
return BLOCKCREATION_REGISTRY_FAIL;
}
if (!pwallet->HaveKey(pubkey.GetID()))
{
return BLOCKCREATION_NOT_CURRENTSTAKER;
}
pblockRet->nStakerID = nID;
pblockRet->nHeight = nHeight;
pblockRet->nTime = nTime;
}
else
{
// Create coinbase tx
CTransaction txNew;
txNew.vin.resize(1);
txNew.vin[0].prevout.SetNull();
txNew.vout.resize(1);
txNew.vout[0].scriptPubKey << reservekey.GetReservedKey() << OP_CHECKSIG;
// Add our coinbase tx as first transaction
pblockRet->vtx.push_back(txNew);
}
// Largest block you're willing to create:
unsigned int nBlockMaxSize = GetArg("-blockmaxsize",
chainParams.DEFAULT_BLOCKMAXSIZE);
// Limit to betweeen 1K and MAX_BLOCK_SIZE-1K for sanity:
nBlockMaxSize = max((unsigned int)1000,
min((unsigned int)(chainParams.MAX_BLOCK_SIZE - 1000),
nBlockMaxSize));
// How much of the block should be dedicated to high-priority transactions,
// included regardless of the fees they pay
unsigned int nBlockPrioritySize = GetArg("-blockprioritysize",
chainParams.DEFAULT_BLOCKPRIORITYSIZE);
nBlockPrioritySize = min(nBlockMaxSize, nBlockPrioritySize);
// Minimum block size you want to create; block will be filled with free transactions
// until there are no more or the block reaches this size:
unsigned int nBlockMinSize = GetArg("-blockminsize",
chainParams.DEFAULT_BLOCKMINSIZE);
nBlockMinSize = min(nBlockMaxSize, nBlockMinSize);
// Fee-per-kilobyte amount considered the same as "free"
// Be careful setting this: if you set it to zero then
// a transaction spammer can cheaply fill blocks using
// 1-satoshi-fee transactions. It should be set above the real
// cost to you of processing a transaction.
int64_t nMinTxFee = chainParams.MIN_TX_FEE;
if (mapArgs.count("-mintxfee"))
{
ParseMoney(mapArgs["-mintxfee"], nMinTxFee);
}
// ppcoin: if coinstake available add coinstake tx
// only initialized at startup
unsigned int nCoinStakeTime = (unsigned int) GetAdjustedTime();
if (fProofOfStake) // attempt to find a coinstake
{
static unsigned int nLastCoinStakeSearchTime = (unsigned int) GetAdjustedTime();
pblockRet->nBits = GetNextTargetRequired(pindexPrev, true);
CTransaction txCoinStake;
// Upon XST_FORK006, transactions don't have meaningful timestamps.
// However to use the logic of this function for now, nCoinStakeTime will
// be used to hold the block timestamp. Upon XST_FORK006, the block
// timestamp will be a permanent record of its transaction timestamps.
if (txCoinStake.HasTimestamp())
{
txCoinStake.SetTxTime(nCoinStakeTime);
}
unsigned int nSearchTime = nCoinStakeTime; // search to current time
if (nSearchTime > nLastCoinStakeSearchTime)
{
if (pwallet->CreateCoinStake(*pwallet,
pblockRet->nBits,
nSearchTime - nLastCoinStakeSearchTime,
txCoinStake,
nCoinStakeTime))
{
unsigned int nTimeMax;
if (nFork < XST_FORK005)
{
nTimeMax = max(pindexPrev->GetPastTimeLimit()+1,
(pindexPrev->GetBlockTime() -
chainParams.nMaxClockDrift));
}
else
{
if (nFork >= XST_FORK006)
{
pblockRet->nTime = nCoinStakeTime;
}
nTimeMax = max(pblockRet->GetBlockTime(),
pindexPrev->GetBlockTime());
}
if (nCoinStakeTime >= nTimeMax)
{ // make sure coinstake would meet timestamp protocol
// as it would be the same as the block timestamp
pblockRet->vtx[0].vout[0].SetEmpty();
// this test simply marks that assinging tx ntime upon creation
// will be eliminated in the future
if (pblockRet->vtx[0].HasTimestamp())
{
pblockRet->vtx[0].SetTxTime(nCoinStakeTime);
}
pblockRet->vtx.push_back(txCoinStake);
}
}
nLastCoinStakeSearchInterval = nSearchTime - nLastCoinStakeSearchTime;
nLastCoinStakeSearchTime = nSearchTime;
}
}
if (!fQuantumPoS)
{
pblockRet->nBits = GetNextTargetRequired(pindexPrev,
pblockRet->IsProofOfStake());
}
// Collect memory pool transactions into the block
int64_t nFees = 0;
{
LOCK2(cs_main, mempool.cs);
CBlockIndex* pindexPrev = pindexBest;
CTxDB txdb("r");
// Priority order to process transactions
list<COrphan> vOrphan; // list memory doesn't move
map<uint256, vector<COrphan*> > mapDependers;
// This vector will be sorted into a priority queue:
vector<TxPriority> vecPriority;
vecPriority.reserve(mempool.mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi)
{
CTransaction& tx = (*mi).second;
if (tx.IsCoinBase() || tx.IsCoinStake() || !tx.IsFinal())
{
continue;
}
COrphan* porphan = NULL;
double dPriority = 0;
int64_t nTotalIn = 0;
bool fMissingInputs = false;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
// Read prev transaction
CTransaction txPrev;
CTxIndex txindex;
if (!txPrev.ReadFromDisk(txdb, txin.prevout, txindex))
{
// This should never happen; all transactions in the memory
// pool should connect to either transactions in the chain
// or other transactions in the memory pool.
if (!mempool.mapTx.count(txin.prevout.hash))
{
printf("ERROR: TSNH mempool transaction missing input\n");
if (fDebug) assert("mempool transaction missing input" == 0);
fMissingInputs = true;
if (porphan)
vOrphan.pop_back();
break;
}
// Has to wait for dependencies
if (!porphan)
{
// Use list for automatic deletion
vOrphan.push_back(COrphan(&tx));
porphan = &vOrphan.back();
}
mapDependers[txin.prevout.hash].push_back(porphan);
porphan->setDependsOn.insert(txin.prevout.hash);
nTotalIn += mempool.mapTx[txin.prevout.hash].vout[txin.prevout.n].nValue;
continue;
}
int64_t nValueIn = txPrev.vout[txin.prevout.n].nValue;
nTotalIn += nValueIn;
int nConf = txindex.GetDepthInMainChain();
dPriority += (double)nValueIn * nConf;
}
if (fMissingInputs)
{
continue;
}
nTotalIn += tx.GetClaimIn();
// Priority is sum(valuein * age) / txsize
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
dPriority /= nTxSize;
// This is a more accurate fee-per-kilobyte than is used by the client code, because the
// client code rounds up the size to the nearest 1K. That's good, because it gives an
// incentive to create smaller transactions.
double dFeePerKb = double(nTotalIn-tx.GetValueOut()) / (double(nTxSize)/1000.0);
// We can prioritize feeless transactions with a fee per kb facsimile.
// This means that a money fee transaction could add feework
// to bump its priority under times of high demand,
// not that there seems to be anything wrong with that.
Feework feework;
feework.bytes = nTxSize;
if (tx.CheckFeework(feework, false, bfrFeeworkValidator))
{
if (feework.IsOK())
{
dFeePerKb += double(feework.GetDiff()) /
(double(nTxSize) / 1000.0);
if (fDebugFeeless)
{
printf("CreateNewBlock(): feework\n %s\n%s\n",
tx.GetHash().ToString().c_str(),
feework.ToString(" ").c_str());
}
}
}
else
{
continue;
}
if (porphan)
{
porphan->dPriority = dPriority;
porphan->dFeePerKb = dFeePerKb;
porphan->feework = feework;
}
else
{
vecPriority.push_back(TxPriority(dPriority, dFeePerKb,
feework, &(*mi).second));
}
}
// Collect transactions into block
map<uint256, CTxIndex> mapTestPool;
uint64_t nBlockSize = 1000;
uint64_t nBlockTx = 0;
int nBlockSigOps = 100;
bool fSortedByFee = (nBlockPrioritySize <= 0);
TxPriorityCompare comparer(fSortedByFee);
make_heap(vecPriority.begin(), vecPriority.end(), comparer);
while (!vecPriority.empty())
{
// Take highest priority transaction off the priority queue:
double dPriority = vecPriority.front().get<0>();
double dFeePerKb = vecPriority.front().get<1>();
Feework feework = vecPriority.front().get<2>();
CTransaction& tx = *(vecPriority.front().get<3>());
pop_heap(vecPriority.begin(), vecPriority.end(), comparer);
vecPriority.pop_back();
// Size limits
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (nBlockSize + nTxSize >= nBlockMaxSize)
{
continue;
}
// Legacy limits on sigOps:
unsigned int nTxSigOps = tx.GetLegacySigOpCount();
if (nBlockSigOps + nTxSigOps >= chainParams.MAX_BLOCK_SIGOPS)
{
continue;
}
// Timestamp limit
if (tx.HasTimestamp())
{
if ( (tx.GetTxTime() > GetAdjustedTime()) ||
( pblockRet->IsProofOfStake() &&
(pblockRet->vtx[1].HasTimestamp()) &&
(tx.GetTxTime() > pblockRet->vtx[1].GetTxTime()) ) )
{
continue;
}
}
// ppcoin: simplify transaction fee - allow free = false
int64_t nMinFee = tx.GetMinFee(nBlockSize, GMF_BLOCK);
// Skip low fee transactions.
// This appears to be a pointless vestigal test because
// the mempool should reject low fee transactions.
if (dFeePerKb < nMinTxFee)
{
continue;
}
// Prioritize by fee once past the priority size or we run out of high-priority
// transactions:
if (!fSortedByFee &&
((nBlockSize + nTxSize >= nBlockPrioritySize) || (dPriority < COIN * 144 / 250)))
{
fSortedByFee = true;
comparer = TxPriorityCompare(fSortedByFee);
make_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
// Connecting shouldn't fail due to dependency on other memory pool transactions
// because we're already processing them in order of dependency
map<uint256, CTxIndex> mapTestPoolTmp(mapTestPool);
MapPrevTx mapInputs;
bool fInvalid;
if (!tx.FetchInputs(txdb, mapTestPoolTmp, false, true, mapInputs, fInvalid))
{
continue;
}
vector<QPTxDetails> vDeetsToCheck;
// use prev block hash for lack of any alternative
tx.GetQPTxDetails(*pindexBest->phashBlock, vDeetsToCheck);
if (!vDeetsToCheck.empty())
{
map<string, qpos_purchase> mapPurchasesUnused;
map<unsigned int, vector<qpos_setkey> > mapSetKeysUnused;
map<CPubKey, vector<qpos_claim> > mapClaimsUnused;
map<unsigned int, vector<qpos_setmeta> > mapSetMetasUnused;
vector<QPTxDetails> vDeetsUnused;
if (!tx.CheckQPoS(pregistryMain,
mapInputs,
GetTime(),
vDeetsToCheck,
pindexBest,
mapPurchasesUnused,
mapSetKeysUnused,
mapClaimsUnused,
mapSetMetasUnused,
vDeetsUnused))
{
printf("CreateNewBlock(): check qPoS failed, skipping:\n %s\n",
tx.GetHash().ToString().c_str());
continue;
}
}
// FIXME: this will be unnecessary after FORK_PURCHASE3
map<string, qpos_purchase> mapPurchases;
uint32_t N = static_cast<uint32_t>(
pregistryMain->GetNumberQualified());
int64_t nStakerPrice = GetStakerPrice(N,
pindexBest->nMoneySupply,
nFork);
if (!tx.CheckPurchases(pregistryMain, nStakerPrice, mapPurchases))
{
printf("CreateNewBlock(): purchase failed, skipping:\n %s\n",
tx.GetHash().ToString().c_str());
continue;
}
bool fSkip = false;
BOOST_FOREACH(const PAIRTYPE(string, qpos_purchase)& item,
mapPurchases)
{
string strUnused;
if (!pregistryMain->AliasIsAvailable(item.first, strUnused))
{
// this shouldn't happen normally
printf("CreateNewBlock(): TSHN alias %s unavailable, "
"skipping\n %s\n",
item.first.c_str(),
tx.GetHash().ToString().c_str());
fSkip = true;
break;
}
}
if (fSkip)
{
continue;
}
// end of FIXME
map<string, qpos_purchase>::const_iterator kt;
int64_t nValuePurchases = 0;
for (kt = mapPurchases.begin(); kt != mapPurchases.end(); ++kt)
{
nValuePurchases += kt->second.value;
}
qpos_claim claim;
if (!tx.CheckClaim(pregistryMain, mapInputs, claim))
{
printf("CreateNewBlock(): claim failed, skipping:\n %s\n",
tx.GetHash().ToString().c_str());
continue;
}
int64_t nTxFees = tx.GetValueIn(mapInputs) - tx.GetValueOut();
if (nTxFees < nMinFee)
{
if (!feework.IsOK())
{
continue;
}
}
nTxSigOps += tx.GetP2SHSigOpCount(mapInputs);
if (nBlockSigOps + nTxSigOps >= chainParams.MAX_BLOCK_SIGOPS)
{
continue;
}
if (!tx.ConnectInputs(txdb, mapInputs, mapTestPoolTmp,
CDiskTxPos(1,1,1), pindexPrev, false, true,
STANDARD_SCRIPT_VERIFY_FLAGS,
nValuePurchases, claim.value,
feework))
{
continue;
}
mapTestPoolTmp[tx.GetHash()] = CTxIndex(CDiskTxPos(1,1,1), tx.vout.size());
swap(mapTestPool, mapTestPoolTmp);
// Added
pblockRet->vtx.push_back(tx);
nBlockSize += nTxSize;
++nBlockTx;
nBlockSigOps += nTxSigOps;
nFees += nTxFees;
if (fDebug && GetBoolArg("-printpriority"))
{
printf("priority %.1f feeperkb %.1f txid %s\n",
dPriority, dFeePerKb, tx.GetHash().ToString().c_str());
}
// Add transactions that depend on this one to the priority queue
uint256 hash = tx.GetHash();
if (mapDependers.count(hash))
{
BOOST_FOREACH(COrphan* porphan, mapDependers[hash])
{
if (!porphan->setDependsOn.empty())
{
porphan->setDependsOn.erase(hash);
if (porphan->setDependsOn.empty())
{
vecPriority.push_back(TxPriority(porphan->dPriority,
porphan->dFeePerKb,
porphan->feework,
porphan->ptx));
push_heap(vecPriority.begin(),
vecPriority.end(),
comparer);
}
}
}
}
}
nLastBlockTx = nBlockTx;
nLastBlockSize = nBlockSize;
if (fDebug && GetBoolArg("-printpriority"))
{
printf("CreateNewBlock(): total size %" PRIu64 "\n", nBlockSize);
}
if (pblockRet->IsProofOfWork())
{
pblockRet->vtx[0].vout[0].nValue = GetProofOfWorkReward(nHeight,
nFees);
}
// Fill in header
pblockRet->hashPrevBlock = pindexPrev->GetBlockHash();
if (pblockRet->IsProofOfStake())
{
pblockRet->nTime = nCoinStakeTime;
}
if (nFork < XST_FORK006)
{
pblockRet->nTime = max(pindexPrev->GetPastTimeLimit() + 1,
pblockRet->GetMaxTransactionTime());
}
if (nFork < XST_FORKQPOS)
{
pblockRet->nTime = max(pblockRet->GetBlockTime(),
pindexPrev->GetPastTimeLimit() + 1);
}
else if (nFork < XST_FORK005)
{
pblockRet->nTime = max(pblockRet->GetBlockTime(),
(pindexPrev->GetBlockTime() -
chainParams.nMaxClockDrift));
}
if (pblockRet->IsProofOfWork())
{
pblockRet->UpdateTime(pindexPrev);
}
pblockRet->nNonce = 0;
}
return BLOCKCREATION_OK;
}
void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce)
{
// Update nExtraNonce
static uint256 hashPrevBlock;
if (hashPrevBlock != pblock->hashPrevBlock)
{
nExtraNonce = 0;
hashPrevBlock = pblock->hashPrevBlock;
}
++nExtraNonce;
// qPoS blocks don't have a coinbase transaction for vtx[0]
if (!pblock->IsQuantumProofOfStake())
{
// Height first in coinbase required for block.version=2
unsigned int nHeight = pindexPrev->nHeight+1;
pblock->vtx[0].vin[0].scriptSig = (CScript() << nHeight << CBigNum(nExtraNonce)) + COINBASE_FLAGS;
assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100);
}
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
}
#ifdef WITH_MINER
void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1)
{
//
// Pre-build hash buffers
//
struct
{
struct unnamed2
{
int nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
unsigned int nTime;
unsigned int nBits;
unsigned int nNonce;
}
block;
unsigned char pchPadding0[64];
uint256 hash1;
unsigned char pchPadding1[64];
}
tmp;
memset(&tmp, 0, sizeof(tmp));
tmp.block.nVersion = pblock->nVersion;
tmp.block.hashPrevBlock = pblock->hashPrevBlock;
tmp.block.hashMerkleRoot = pblock->hashMerkleRoot;
tmp.block.nTime = pblock->nTime;
tmp.block.nBits = pblock->nBits;
tmp.block.nNonce = pblock->nNonce;
FormatHashBlocks(&tmp.block, sizeof(tmp.block));
FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1));
// Byte swap all the input buffer
for (unsigned int i = 0; i < sizeof(tmp)/4; i++)
((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]);
// Precalc the first half of the first hash, which stays constant
SHA256Transform(pmidstate, &tmp.block, pSHA256InitState);
memcpy(pdata, &tmp.block, 128);
memcpy(phash1, &tmp.hash1, 64);
}
#endif /* WITH_MINER */
bool CheckWork(CBlock* pblock, CWallet& wallet,
CReserveKey& reservekey, const CBlockIndex* pindexPrev)
{
//// debug print
printf("CheckWork:\n");
uint256 hash = pblock->GetHash();
if (pblock->IsQuantumProofOfStake())
{
// printf(" new qPoS block found\n hash: %s\n", hash.GetHex().c_str());
int64_t nReward = GetQPoSReward(pindexPrev);
printf(" generated %s\n", FormatMoney(nReward).c_str());
}
else
{
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
if (hash > hashTarget && pblock->IsProofOfWork())
return error("CheckWork() : proof-of-work not meeting target");
printf(" new block found\n hash: %s\ntarget: %s\n", hash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf(" generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str());
}
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
{
return error("CheckWork() : generated block is deadend");
}
// Remove key from key pool
// FIXME: not really necessary for qPoS
reservekey.KeepKey();
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[pblock->GetHash()] = 0;
}
// Process this block the same as if we had received it from
// another node.
// Stakers produce qPoS on contingency, meaning the block can only
// fully validate if the registry is synced with the block's
// immediate predecessor.
// This full validation happens uppon connecting the block.
// Process this block the same as if we had received it from another node.
bool fProcessOK = ProcessBlock(NULL, pblock, false, false, true);
if (!fProcessOK)
{
return error("CheckWork() : ProcessBlock, block not accepted");
}
}
return true;
}
#ifdef WITH_MINER
void static ThreadStealthMiner(void* parg);
#endif /* WITH_MINER */
static bool fGenerateXST = false;
#ifdef WITH_MINER
static bool fLimitProcessors = false;
static int nLimitProcessors = -1;
#endif /* WITH_MINER */
void StealthMinter(CWallet *pwallet, ProofTypes fTypeOfProof)
{
bool fProofOfWork = false;
bool fProofOfStake = false;
uint64_t nSleepInterval = 0;
switch (fTypeOfProof)
{
case PROOFTYPE_POW:
fProofOfWork = true;
RenameThread("stealth-minter-pow");
printf("CPUMinter started for proof-of-work\n");
break;
case PROOFTYPE_POS:
nSleepInterval = 60000;
fProofOfStake = true;
RenameThread("stealth-minter-pos");
printf("CPUMinter started for proof-of-stake\n");
break;
default:
printf("StealthMinter(): bad proof type\n");
return;
}
SetThreadPriority(THREAD_PRIORITY_LOWEST);
// Each thread has its own key and counter
CReserveKey reservekey(pwallet);
unsigned int nExtraNonce = 0;
while (fGenerateXST || fProofOfStake)
{
if (fShutdown)
{
return;
}
if ((nMaxHeight > 0) && (nBestHeight >= nMaxHeight))
{
return;
}
// rollbacks mean qPoS can keep producing even with 0 connections
while (vNodes.empty() ||
IsInitialBlockDownload() ||
pwallet->IsLocked())
{
nLastCoinStakeSearchInterval = 0;
MilliSleep(1000);
if (fShutdown)
{
return;
}
if (!fGenerateXST && !fProofOfStake)
{
return;
}
}
while (pindexBest == NULL)
{
MilliSleep(1000);
}
//
// Create new block
//
#ifdef WITH_MINER
unsigned int nTransactionsUpdatedLast = nTransactionsUpdated;
#endif /* WITH_MINER */
CBlockIndex* pindexPrev = pindexBest;
int nHeight = pindexPrev->nHeight + 1;
// PoS ends with XST_FORKQPOS, so kill the PoS minter thread
if (fProofOfStake && (GetFork(nHeight) >= XST_FORKQPOS))
{
return;
}
// PoW ends with XST_FORK002, so kill any PoW minter thread
if (fProofOfWork && (GetFork(nHeight) >= XST_FORK002))
{
return;
}
AUTO_PTR<CBlock> pblock(new CBlock());
if (!pblock.get())
{
return;
}
{ // TODO: move PoS to ThreadMessageHandler2
BlockCreationResult nResult = CreateNewBlock(pwallet,
fTypeOfProof,
pblock);
if (nResult == BLOCKCREATION_INSTANTIATION_FAIL ||
nResult == BLOCKCREATION_REGISTRY_FAIL)
{
if (fDebugBlockCreation)
{
printf("block creation catastrophic fail with \"%s\"\n",
DescribeBlockCreationResult(nResult));
}
return;
}
IncrementExtraNonce(pblock.get(), pindexPrev, nExtraNonce);
if (fProofOfStake)
{
// ppcoin: if proof-of-stake block found then process block
if (pblock->IsProofOfStake())
{
if (!pblock->SignBlock(*pwalletMain, pregistryMain))
{
continue;
}
printf("StealthMinter : PoS block found %s\n",
pblock->GetHash().ToString().c_str());
pblock->print();
SetThreadPriority(THREAD_PRIORITY_NORMAL);
CheckWork(pblock.get(), *pwalletMain, reservekey, pindexPrev);
SetThreadPriority(THREAD_PRIORITY_LOWEST);
}
}
}
// space blocks better, sleep 1 minute after PoS mint, etc
MilliSleep(nSleepInterval);
continue;
/****************************************************************************
* The following preprocessor conditional block will trigger antivirus for
* hits similar to "bitcoin miner". It is not compiled by default for XST
* because PoW ended long ago.
****************************************************************************/
#ifdef WITH_MINER
printf("Running StealthMinter with %" PRIszu " "
"transactions in block (%u bytes)\n",
pblock->vtx.size(),
::GetSerializeSize(*pblock, SER_NETWORK, PROTOCOL_VERSION));
//
// Search
//
int64_t nStart = GetTime();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
uint256 hash;
LOOP
{
hash = pblock->GetHash();
if (hash <= hashTarget)
{
if (!pblock->SignBlock(*pwalletMain, pregistryMain))
{
break;
}
SetThreadPriority(THREAD_PRIORITY_NORMAL);
printf("proof-of-work found \n hash: %s \ntarget: %s\n",
hash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
CheckWork(pblock.get(), *pwalletMain, reservekey, pindexPrev);
SetThreadPriority(THREAD_PRIORITY_LOWEST);
break;
}
++pblock->nNonce;
// Meter hashes/sec
static int64_t nHashCounter;
if (nHPSTimerStart == 0)
{
nHPSTimerStart = GetTimeMillis();
nHashCounter = 0;
}
else
nHashCounter += 1;
if (GetTimeMillis() - nHPSTimerStart > 4000)
{
static CCriticalSection cs;
{
LOCK(cs);
if (GetTimeMillis() - nHPSTimerStart > 4000)
{
dHashesPerSec = 1000.0 * nHashCounter /
(GetTimeMillis() - nHPSTimerStart);
nHPSTimerStart = GetTimeMillis();
nHashCounter = 0;
printf("hashmeter %3d CPUs %6.0f khash/s\n",
vnThreadsRunning[THREAD_MINER],
dHashesPerSec/1000.0);
}
}
}
// Check for stop or if block needs to be rebuilt
if (fShutdown)
return;
if (!fGenerateXST)
return;
if (fLimitProcessors && vnThreadsRunning[THREAD_MINER] > nLimitProcessors)
return;
if (vNodes.empty())
break;
if (++pblock->nNonce >= 0xffff0000)
break;
if (nTransactionsUpdated != nTransactionsUpdatedLast && GetTime() - nStart > 60)
break;
if (pindexPrev != pindexBest)
break;
int nFork = GetFork(nHeight);
// Update nTime every few seconds
if (nFork < XST_FORK006)
{
pblock->nTime = max(pindexPrev->GetPastTimeLimit()+1,
pblock->GetMaxTransactionTime());
}
if (nFork < XST_FORK005)
{
pblock->nTime = max(pblock->GetBlockTime(),
(pindexPrev->GetBlockTime() -
chainParams.nMaxClockDrift));
}
else
{
pblock->nTime = max(pblock->GetBlockTime(),
pindexPrev->GetBlockTime()+1);
}
pblock->UpdateTime(pindexPrev);
// coinbase timestamp is irrelevant upon XST_FORK006
if (nFork < XST_FORK005)
{
if (pblock->GetBlockTime() >= ((int64_t)pblock->vtx[0].GetTxTime() +
chainParams.nMaxClockDrift))
{
break; // need to update coinbase timestamp
}
}
else if (nFork < XST_FORK006)
{
if (pblock->GetBlockTime() >= FutureDrift((int64_t)pblock->vtx[0].GetTxTime()))
{
break; // need to update coinbase timestamp
}
}
}
#endif /* WITH_MINER */
}
}
#ifdef WITH_MINER
void static ThreadStealthMiner(void* parg)
{
CWallet* pwallet = (CWallet*)parg;
try
{
vnThreadsRunning[THREAD_MINER]++;
StealthMinter(pwallet, PROOFTYPE_POW);
vnThreadsRunning[THREAD_MINER]--;
}
catch (exception& e)
{
vnThreadsRunning[THREAD_MINER]--;
PrintException(&e, "ThreadStealthMiner()");
}
catch (...)
{
vnThreadsRunning[THREAD_MINER]--;
PrintException(NULL, "ThreadStealthMiner()");
}
nHPSTimerStart = 0;
if (vnThreadsRunning[THREAD_MINER] == 0)
dHashesPerSec = 0;
printf("ThreadStealthMiner exiting, %d threads remaining\n",
vnThreadsRunning[THREAD_MINER]);
}
void GenerateXST(bool fGenerate, CWallet* pwallet)
{
fGenerateXST = fGenerate;
nLimitProcessors = GetArg("-genproclimit", -1);
if (nLimitProcessors == 0)
fGenerateXST = false;
fLimitProcessors = (nLimitProcessors != -1);
if (fGenerate)
{
int nProcessors = boost::thread::hardware_concurrency();
printf("%d processors\n", nProcessors);
if (nProcessors < 1)
nProcessors = 1;
if (fLimitProcessors && nProcessors > nLimitProcessors)
nProcessors = nLimitProcessors;
int nAddThreads = nProcessors - vnThreadsRunning[THREAD_MINER];
printf("Starting %d StealthMinter threads\n", nAddThreads);
for (int i = 0; i < nAddThreads; i++)
{
if (!NewThread(ThreadStealthMiner, pwallet))
printf("Error: NewThread(ThreadStealthMiner) failed\n");
MilliSleep(10);
}
}
}
#endif /* WITH_MINER */
| 34.172759 | 292 | 0.536174 |
70a74dd7486369f58b5cc18cb876d4cde5bd1935 | 307 | sql | SQL | src/models/food-schema.sql | MohammadAltamimi98/sql-app | f39bebedcd408fd40707fa225771fe889538d3a4 | [
"MIT"
] | null | null | null | src/models/food-schema.sql | MohammadAltamimi98/sql-app | f39bebedcd408fd40707fa225771fe889538d3a4 | [
"MIT"
] | null | null | null | src/models/food-schema.sql | MohammadAltamimi98/sql-app | f39bebedcd408fd40707fa225771fe889538d3a4 | [
"MIT"
] | null | null | null | DROP TABLE IF EXISTS foods;
-- varchar(255) : this will be a string its length won't exceed 255 characters
-- primary key is a unique identifier that we use with relations.
-- serial == auto increment + integer
CREATE TABLE foods(
id SERIAL PRIMARY KEY,
name varchar(255),
cuisine varchar(255)
) | 27.909091 | 79 | 0.726384 |