repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
bboyle/grunt-codepainter
Gruntfile.js
2116
/* * grunt-codepainter * https://github.com/bboyle/grunt-codepainter * * Copyright (c) 2014 Ben Boyle * Licensed under the MIT license. */ 'use strict'; module.exports = function(grunt) { // Project configuration. grunt.initConfig({ jshint: { options: { jshintrc: '.jshintrc' }, all: [ 'Gruntfile.js', 'package.json', 'tasks/*.js', '<%= nodeunit.tests %>' ] }, // Before generating any new files, remove any previously-created files. clean: { tests: ['tmp'] }, // Configuration to be run (and then tested). codepainter: { 'static': { options: { predef: 'idiomatic', style: { indent_style: 'tab' } }, files: { 'tmp/whitespace.js' : 'test/fixtures/whitespace.js', 'tmp/idiomatic.js' : 'test/fixtures/idiomatic.js' } }, dynamic: { options: { predef: 'idiomatic', style: { indent_style: 'tab' } }, files: [{ expand: true, cwd: 'test/fixtures/', src: ['*.js'], dest: 'tmp/dynamic/' }] }, editorConfig: { options: { editorConfig: true }, files: { 'tmp/editorconfig/whitespace.js' : 'test/fixtures/whitespace.js', 'tmp/editorconfig/idiomatic.js' : 'test/fixtures/idiomatic.js' } } }, // Unit tests. nodeunit: { tests: ['test/*_test.js'] } }); // Actually load this plugin's task(s). grunt.loadTasks('tasks'); // These plugins provide necessary tasks. grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-clean'); grunt.loadNpmTasks('grunt-contrib-nodeunit'); // Whenever the "test" task is run, first clean the "tmp" dir, then run this // plugin's task(s), then test the result. grunt.registerTask('test', ['clean', 'codepainter', 'nodeunit']); // By default, lint and run all tests. grunt.registerTask('default', ['jshint', 'test']); };
mit
infinity-square/angular-chosen
app/scripts/app.js
251
(function() { 'use strict'; /** * @ngdoc overview * @name infinity.angular-chosen * @description * # infinity.angular-chosen * * Main module of the application. */ angular .module('infinity.angular-chosen', []); })();
mit
EugeneZ/dungeonlords
packages/games/server/models/userGames.js
488
'use strict'; /** * Module dependencies. */ var mongoose = require('mongoose'), Schema = mongoose.Schema; var UserGamesSchema = new Schema({ user: { type: Schema.ObjectId, ref: 'User' }, games: [{ game: { type: Schema.ObjectId, ref: 'Game' }, players: [{ name: String }] // Leaving room for 'watched' games or whatever }] }); mongoose.model('UserGames', UserGamesSchema);
mit
TimHuangcheng/CI
application/home/controllers/Email.php
521
<?php defined('BASEPATH') OR exit('No direct script access allowed'); class Email extends MY_Controller { public function index() { //·¢ËÍÓʼþʾÀý $info = array( "user_name"=>'Tim', "data"=>$content, "to"=>'name@email.com' ); $this->load->service('s_email'); $resCode = $this->s_email->sendEmail($info,Constant::$template_forget_password_arr); if($resCode){ return Constant::SUCCESS; } } }
mit
bkahlert/seqan-research
raw/pmsb13/pmsb13-data-20130530/sources/caivjx2geotu92t0/2013-04-10T14-26-24.847+0200/sandbox/my_sandbox/apps/basic_seq_io_example/basic_seq_io_example.cpp
310
#include <iostream> #include <seqan/sequence.h> #include <seqan/seq_io.h> int main() { seqan::CharString id; seqan::Dna5String seq; //se seqan::SequenceStream seqStream("example.fa"); readRecord(id, seq, seqStream); std::cout << id << '\t' << seq << '\n'; return 0; }
mit
JohnLambe/JLCSUtils
JLCSUtils/MvpDemo/Heirarchical/TestHView.cs
524
using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Data; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using MvpFramework.WinForms; using MvpFramework; namespace MvpDemo.Heirarchical { public interface ITestHView : IView { } [View] public partial class TestHView : WindowViewBase, ITestHView { public TestHView() { InitializeComponent(); } } }
mit
rv8-io/rv8
src/gen/gen-switch.cc
5697
// // gen-switch.cc // #include <cstdio> #include <cstdlib> #include <functional> #include <algorithm> #include <memory> #include <string> #include <vector> #include <deque> #include <map> #include <set> #include "util.h" #include "cmdline.h" #include "model.h" #include "gen.h" std::vector<cmdline_option> rv_gen_switch::get_cmdline_options() { return std::vector<cmdline_option>{ { "-S", "--print-switch-h", cmdline_arg_type_none, "Print switch header", [&](std::string s) { return gen->set_option("print_switch_h"); } }, }; } static void print_switch_decoder_node(rv_gen *gen, rv_codec_node &node, size_t indent) { for (size_t i = 0; i < indent; i++) printf("\t"); printf("switch (%s) {\n", rv_meta_model::format_bitmask(node.bits, "inst", true).c_str()); for (auto &val : node.vals) { auto opcode_list = node.val_opcodes[val]; if (node.val_decodes[val].bits.size() == 0 && opcode_list.size() >= 1) { for (size_t i = 0; i < indent; i++) printf("\t"); if (val == rv_meta_model::DEFAULT) { printf("\tdefault: "); } else { printf("\tcase %lu: ", val); } // resolve distinct number of isa widths for this opcode std::vector<size_t> opcode_widths; for (auto opcode : opcode_list) { for (auto &ext : opcode->extensions) { if (std::find(opcode_widths.begin(), opcode_widths.end(), ext->isa_width) == opcode_widths.end()) { opcode_widths.push_back(ext->isa_width); } } } auto opcode = opcode_list.front(); // different opcodes that share encoding on different isa widths if (opcode_list.size() > 1 && opcode_list.size() == opcode_widths.size()) { // conditionals for different opcodes sharing encoding on different isa widths printf("\n"); for (auto oi = opcode_list.begin(); oi != opcode_list.end(); oi++) { auto opcode = *oi; for (size_t i = 0; i < indent; i++) printf("\t"); printf("\t\t%sif (%s && rv%lu) op = %s;\n", oi != opcode_list.begin() ? "else " : "", rv_meta_model::opcode_isa_shortname(opcode).c_str(), opcode->extensions.front()->isa_width, rv_meta_model::opcode_format("rv_op_", opcode, "_").c_str()); } for (size_t i = 0; i < indent; i++) printf("\t"); printf("\t\tbreak;\n"); } else { // if ambiguous, chooses first opcode if (opcode_widths.size() == 1) { printf("if (%s && rv%lu) op = %s; break;", rv_meta_model::opcode_isa_shortname(opcode).c_str(), opcode->extensions.front()->isa_width, rv_meta_model::opcode_format("rv_op_", opcode, "_").c_str()); } else { printf("if (%s) op = %s; break;", rv_meta_model::opcode_isa_shortname(opcode).c_str(), rv_meta_model::opcode_format("rv_op_", opcode, "_").c_str()); } // if ambiguous, add comment if (opcode_list.size() > 1) { printf(" //"); for (auto &opcode : opcode_list) { printf(" %s", opcode->name.c_str()); } } printf("\n"); } } else { for (size_t i = 0; i < indent; i++) printf("\t"); if (val == rv_meta_model::DEFAULT) { printf("\tdefault: "); } else { printf("\tcase %lu:\n", val); } for (size_t i = 0; i < indent; i++) printf("\t"); printf("\t\t//"); int count = 0; for (auto &opcode : opcode_list) { if (count++ == 12) { printf(" ..."); break; } printf(" %s", opcode->name.c_str()); } printf("\n"); if (node.val_decodes[val].bits.size() > 0) { print_switch_decoder_node(gen, node.val_decodes[val], indent + 2); } for (size_t i = 0; i < indent; i++) printf("\t"); printf("\t\tbreak;\n"); } } for (size_t i = 0; i < indent; i++) printf("\t"); printf("}\n"); } static void print_switch_h(rv_gen *gen) { printf(kCHeader, "switch.h"); printf("#ifndef rv_switch_h\n"); printf("#define rv_switch_h\n"); printf("\n"); // print opcode decoder std::vector<std::string> mnems = gen->get_inst_mnemonics(true, true); printf("/* Decode Instruction Opcode */\n\n"); printf("template <"); for (auto mi = mnems.begin(); mi != mnems.end(); mi++) { if (mi != mnems.begin()) printf(", "); printf("bool %s", mi->c_str()); } printf(">\n"); printf("inline opcode_t decode_inst_op(riscv::inst_t inst)\n"); printf("{\n"); printf("\topcode_t op = rv_op_illegal;\n"); print_switch_decoder_node(gen, gen->root_node, 1); printf("\treturn op;\n"); printf("}\n\n"); // print type decoder printf("/* Decode Instruction Type */\n\n"); printf("template <typename T>\n"); printf("inline void decode_inst_type(T &dec, riscv::inst_t inst)\n"); printf("{\n"); printf("\tdec.codec = rv_inst_codec[dec.op];\n"); printf("\tswitch (dec.codec) {\n"); for (auto &codec : gen->get_unique_codecs()) { printf("\t\tcase %-26s %-50s break;\n", format_string("rv_codec_%s:", codec.c_str()).c_str(), format_string("riscv::decode_%s(dec, inst);", codec.c_str()).c_str()); } printf("\t};\n"); printf("}\n\n"); // print encoder printf("/* Encode Instruction */\n\n"); printf("template <typename T>\n"); printf("inline riscv::inst_t encode_inst(T &dec)\n"); printf("{\n"); printf("\tdec.codec = rv_inst_codec[dec.op];\n"); printf("\triscv::inst_t inst = rv_inst_match[dec.op];\n"); printf("\tswitch (dec.codec) {\n"); for (auto &codec : gen->get_unique_codecs()) { printf("\t\tcase %-26s %-50s break;\n", format_string("rv_codec_%s:", codec.c_str()).c_str(), format_string("return inst |= riscv::encode_%s(dec);", codec.c_str()).c_str()); } printf("\t};\n"); printf("\treturn inst;\n"); printf("}\n"); printf("\n"); printf("#endif\n"); } void rv_gen_switch::generate() { if (gen->has_option("print_switch_h")) { gen->generate_codec(); print_switch_h(gen); } }
mit
johnnyreilly/proverb-angular
src/app/common/modalDialog.ts
1880
interface BootstrapDialogOptions { title: string; message: string; okText: string; cancelText: string; } interface BootstrapDialogScope extends ng.IScope { title: string; message: string; okText: string; cancelText: string; ok: () => void; cancel: () => void; } export const modalDialogServiceName = "modalDialog"; export class ModalDialogService { static $inject = ["$uibModal", "$templateCache"]; constructor( private $uibModal: ng.ui.bootstrap.IModalService) { } deleteDialog(message: string = "Delete item?") { const title = "Confirm"; return this.confirmationDialog(title, message); } confirmationDialog(title: string, msg: string, okText?: string, cancelText?: string) { const modalOptions = { controller: ModalInstance, keyboard: true, resolve: { options: () => ({ title: title, message: msg, okText: okText, cancelText: cancelText }) } as { [index: string]: any }, template: require("./modalDialog.html") }; return this.$uibModal.open(modalOptions).result; } } class ModalInstance { static $inject = ["$scope", "$uibModalInstance", "options"]; constructor ( $scope: BootstrapDialogScope, $uibModalInstance: ng.ui.bootstrap.IModalServiceInstance, options: BootstrapDialogOptions) { $scope.title = options.title || "Title"; $scope.message = options.message || ""; $scope.okText = options.okText || "OK"; $scope.cancelText = options.cancelText || "Cancel"; $scope.ok = function () { $uibModalInstance.close("ok"); }; $scope.cancel = function () { $uibModalInstance.dismiss("cancel"); }; } }
mit
ivelin1936/Studing-SoftUni-
Java WEB/Java MVC Frameworks - Spring/Exercise Unit Testing & Isolation/CarDealer/src/main/java/org/softuni/cardealer/service/CustomerServiceImpl.java
2187
package org.softuni.cardealer.service; import org.modelmapper.ModelMapper; import org.softuni.cardealer.domain.entities.Customer; import org.softuni.cardealer.domain.models.service.CustomerServiceModel; import org.softuni.cardealer.repository.CustomerRepository; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; @Service public class CustomerServiceImpl implements CustomerService { private final CustomerRepository customerRepository; private final ModelMapper modelMapper; @Autowired public CustomerServiceImpl(CustomerRepository customerRepository, ModelMapper modelMapper) { this.customerRepository = customerRepository; this.modelMapper = modelMapper; } @Override public CustomerServiceModel saveCustomer(CustomerServiceModel customerServiceModel) { Customer customer = this.modelMapper.map(customerServiceModel, Customer.class); customer = this.customerRepository.saveAndFlush(customer); return this.modelMapper.map(customer, CustomerServiceModel.class); } @Override public CustomerServiceModel editCustomer(CustomerServiceModel customerServiceModel) { Customer customer = this.customerRepository.findById(customerServiceModel.getId()).orElse(null); customer.setName(customerServiceModel.getName()); customer.setBirthDate(customerServiceModel.getBirthDate()); customer.setYoungDriver(customerServiceModel.isYoungDriver()); customer = this.customerRepository.saveAndFlush(customer); return this.modelMapper.map(customer, CustomerServiceModel.class); } @Override public CustomerServiceModel deleteCustomer(String id) { Customer customer = this.customerRepository.findById(id).orElse(null); this.customerRepository.delete(customer); return this.modelMapper.map(customer, CustomerServiceModel.class); } @Override public CustomerServiceModel findCustomerById(String id) { Customer customer = this.customerRepository.findById(id).orElse(null); return this.modelMapper.map(customer, CustomerServiceModel.class); } }
mit
android-samples/android-hello
src/jp/clockup/hello/MainActivity.java
1066
package jp.clockup.hello; import android.app.Activity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.widget.TextView; public class MainActivity extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); // setText TextView textView = (TextView)findViewById(R.id.textView1); textView.setText("HELLO WORLD!"); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
mit
mschmidae/analysis-model
src/test/java/edu/hm/hafner/ArchitectureRulesTest.java
4023
package edu.hm.hafner; import javax.xml.parsers.SAXParser; import org.apache.commons.digester3.Digester; import org.apache.commons.digester3.binder.DigesterLoader; import org.junit.jupiter.api.Test; import org.xml.sax.XMLReader; import com.tngtech.archunit.base.DescribedPredicate; import com.tngtech.archunit.core.domain.JavaCall; import com.tngtech.archunit.core.domain.JavaClasses; import com.tngtech.archunit.core.domain.JavaModifier; import com.tngtech.archunit.core.importer.ClassFileImporter; import com.tngtech.archunit.lang.ArchRule; import static com.tngtech.archunit.lang.syntax.ArchRuleDefinition.*; import edu.hm.hafner.util.VisibleForTesting; /** * Defines several architecture rules for the static analysis model and parsers. * * @author Ullrich Hafner */ class ArchitectureRulesTest { private static final DescribedPredicate<JavaCall<?>> ACCESS_IS_RESTRICTED_TO_TESTS = new AccessRestrictedToTests(); /** * Digester must not be used directly, rather use a SecureDigester instance. */ @Test void shouldNotCreateDigesterWithConstructor() { JavaClasses classes = getAnalysisModelClasses(); ArchRule noDigesterConstructor = noClasses().that().dontHaveSimpleName("SecureDigester") .should().callConstructor(Digester.class) .orShould().callConstructor(Digester.class, SAXParser.class) .orShould().callConstructor(Digester.class, XMLReader.class) .orShould().callMethod(DigesterLoader.class, "newDigester"); noDigesterConstructor.check(classes); } /** * Methods or constructors that are annotated with {@link VisibleForTesting} must not be called by other classes. * These methods are meant to be {@code private}. Only test classes are allowed to call these methods. */ @Test void shouldNotCallVisibleForTestingOutsideOfTest() { JavaClasses classes = new ClassFileImporter().importPackages("io.jenkins.plugins.analysis"); ArchRule noTestApiCalled = noClasses() .that().haveSimpleNameNotEndingWith("Test") .should().callCodeUnitWhere(ACCESS_IS_RESTRICTED_TO_TESTS); noTestApiCalled.check(classes); } /** * Test classes should not be public (Junit 5). */ @Test void shouldNotUsePublicInTestCases() { JavaClasses classes = getAnalysisModelClasses(); ArchRule noPublicClasses = noClasses() .that().dontHaveModifier(JavaModifier.ABSTRACT) .and().haveSimpleNameEndingWith("Test") .and().dontHaveSimpleName("IssueTest") .and().dontHaveSimpleName("EclipseParserTest") // base class for warnings-plugin .and().dontHaveSimpleName("Pep8ParserTest") // base class for warnings-plugin .should().bePublic(); noPublicClasses.check(classes); } /** * Prevents that deprecated classes from transitive dependencies are called. */ @Test void shouldNotCallCommonsLang() { JavaClasses classes = getAnalysisModelClasses(); ArchRule noTestApiCalled = noClasses() .should().accessClassesThat().resideInAPackage("org.apache.commons.lang.."); noTestApiCalled.check(classes); } private JavaClasses getAnalysisModelClasses() { return new ClassFileImporter().importPackages("edu.hm.hafner"); } /** * Matches if a call from outside the defining class uses a method or constructor annotated with * {@link VisibleForTesting}. */ private static class AccessRestrictedToTests extends DescribedPredicate<JavaCall<?>> { AccessRestrictedToTests() { super("access is restricted to tests"); } @Override public boolean apply(final JavaCall<?> input) { return input.getTarget().isAnnotatedWith(VisibleForTesting.class) && !input.getOriginOwner().equals(input.getTargetOwner()); } } }
mit
annex-apps/tenant-bundle
BrightpearlApiClient/lib/Model/OrderRowComposition.php
5938
<?php /** * OrderRowComposition * * PHP version 5 * * @category Class * @package BrightpearlApiClient * @author http://github.com/swagger-api/swagger-codegen * @license http://www.apache.org/licenses/LICENSE-2.0 Apache Licene v2 * @link https://github.com/swagger-api/swagger-codegen */ /** * Copyright 2016 SmartBear Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen * Do not edit the class manually. */ namespace BrightpearlApiClient\Model; use \ArrayAccess; /** * OrderRowComposition Class Doc Comment * * @category Class * @description * @package BrightpearlApiClient * @author http://github.com/swagger-api/swagger-codegen * @license http://www.apache.org/licenses/LICENSE-2.0 Apache Licene v2 * @link https://github.com/swagger-api/swagger-codegen */ class OrderRowComposition implements ArrayAccess { /** * Array of property to type mappings. Used for (de)serialization * @var string[] */ static $swaggerTypes = array( 'bundle_parent' => 'bool', 'bundle_child' => 'bool', 'parent_order_row_id' => 'int' ); /** * Array of attributes where the key is the local name, and the value is the original name * @var string[] */ static $attributeMap = array( 'bundle_parent' => 'bundleParent', 'bundle_child' => 'bundleChild', 'parent_order_row_id' => 'parentOrderRowId' ); /** * Array of attributes to setter functions (for deserialization of responses) * @var string[] */ static $setters = array( 'bundle_parent' => 'setBundleParent', 'bundle_child' => 'setBundleChild', 'parent_order_row_id' => 'setParentOrderRowId' ); /** * Array of attributes to getter functions (for serialization of requests) * @var string[] */ static $getters = array( 'bundle_parent' => 'getBundleParent', 'bundle_child' => 'getBundleChild', 'parent_order_row_id' => 'getParentOrderRowId' ); /** * $bundle_parent * @var bool */ protected $bundle_parent; /** * $bundle_child * @var bool */ protected $bundle_child; /** * $parent_order_row_id * @var int */ protected $parent_order_row_id; /** * Constructor * @param mixed[] $data Associated array of property value initalizing the model */ public function __construct(array $data = null) { if ($data != null) { $this->bundle_parent = $data["bundle_parent"]; $this->bundle_child = $data["bundle_child"]; $this->parent_order_row_id = $data["parent_order_row_id"]; } } /** * Gets bundle_parent * @return bool */ public function getBundleParent() { return $this->bundle_parent; } /** * Sets bundle_parent * @param bool $bundle_parent * @return $this */ public function setBundleParent($bundle_parent) { $this->bundle_parent = $bundle_parent; return $this; } /** * Gets bundle_child * @return bool */ public function getBundleChild() { return $this->bundle_child; } /** * Sets bundle_child * @param bool $bundle_child * @return $this */ public function setBundleChild($bundle_child) { $this->bundle_child = $bundle_child; return $this; } /** * Gets parent_order_row_id * @return int */ public function getParentOrderRowId() { return $this->parent_order_row_id; } /** * Sets parent_order_row_id * @param int $parent_order_row_id * @return $this */ public function setParentOrderRowId($parent_order_row_id) { $this->parent_order_row_id = $parent_order_row_id; return $this; } /** * Returns true if offset exists. False otherwise. * @param integer $offset Offset * @return boolean */ public function offsetExists($offset) { return isset($this->$offset); } /** * Gets offset. * @param integer $offset Offset * @return mixed */ public function offsetGet($offset) { return $this->$offset; } /** * Sets value based on offset. * @param integer $offset Offset * @param mixed $value Value to be set * @return void */ public function offsetSet($offset, $value) { $this->$offset = $value; } /** * Unsets offset. * @param integer $offset Offset * @return void */ public function offsetUnset($offset) { unset($this->$offset); } /** * Gets the string presentation of the object * @return string */ public function __toString() { if (defined('JSON_PRETTY_PRINT')) { return json_encode(\BrightpearlApiClient\ObjectSerializer::sanitizeForSerialization($this), JSON_PRETTY_PRINT); } else { return json_encode(\BrightpearlApiClient\ObjectSerializer::sanitizeForSerialization($this)); } } }
mit
Kiprosh/jtextshorten
src/best_in_place.js
21840
/* BestInPlace (for jQuery) version: 0.1.0 (01/01/2011) @requires jQuery >= v1.4 @requires jQuery.purr to display pop-up windows By Bernat Farrero based on the work of Jan Varwig. Examples at http://bernatfarrero.com Licensed under the MIT: http://www.opensource.org/licenses/mit-license.php Usage: Attention. The format of the JSON object given to the select inputs is the following: [["key", "value"],["key", "value"]] The format of the JSON object given to the checkbox inputs is the following: ["falseValue", "trueValue"] */ /*global jQuery */ //= require jquery.elastic function BestInPlaceEditor(e) { 'use strict'; this.element = e; this.initOptions(); this.bindForm(); this.initNil(); jQuery(this.activator).bind('click', {editor: this}, this.clickHandler); } BestInPlaceEditor.prototype = { // Public Interface Functions ////////////////////////////////////////////// activate : function () { 'use strict'; var to_display = ""; if (this.isNil()) { to_display = ""; } else if (this.original_content) { to_display = this.original_content; } else { if (this.sanitize) { to_display = this.element.text(); } else { to_display = this.element.html().replace('&amp;', '&'); } } this.oldValue = this.isNil() ? "" : this.element.html(); this.display_value = to_display; jQuery(this.activator).unbind("click", this.clickHandler); this.activateForm(); this.element.trigger(jQuery.Event("best_in_place:activate")); }, abort : function() { this.activateText(this.oldValue); jQuery(this.activator).bind('click', {editor: this}, this.clickHandler); this.element.trigger(jQuery.Event("best_in_place:abort")); this.element.trigger(jQuery.Event("best_in_place:deactivate")); }, abortIfConfirm : function () { if (!this.useConfirm) { this.abort(); return; } if (confirm("Are you sure you want to discard your changes?")) { this.abort(); } }, update : function() { var editor = this; if (this.formType in {"input":1, "textarea":1} && this.getValue() == this.oldValue) { // Avoid request if no change is made this.abort(); return true; } editor.ajax({ "type" : "post", "dataType" : "text", "data" : editor.requestData(), "success" : function(data){ editor.loadSuccessCallback(data); }, "error" : function(request, error){ editor.loadErrorCallback(request, error); } }); if (this.formType == "select") { var value = this.getValue(); this.previousCollectionValue = value; jQuery.each(this.values, function(i, v) { if (value == v[0]) { editor.element.html(v[1]); } } ); } else if (this.formType == "checkbox") { editor.element.html(this.getValue() ? this.values[1] : this.values[0]); } else { if (this.getValue() !== "") { this.sanitize ? editor.element.text(this.getValue()) : editor.element.html(this.getValue()); } else { editor.element.html(this.nil); } } editor.element.trigger(jQuery.Event("best_in_place:update")); }, activateForm : function() { alert("The form was not properly initialized. activateForm is unbound"); }, activateText : function(value){ this.element.html(value); if(this.isNil()) this.element.html(this.nil); }, // Helper Functions //////////////////////////////////////////////////////// initOptions : function() { // Try parent supplied info var self = this; self.element.parents().each(function(){ $parent = jQuery(this); self.url = self.url || $parent.attr("data-url"); self.collection = self.collection || $parent.attr("data-collection"); self.formType = self.formType || $parent.attr("data-type"); self.objectName = self.objectName || $parent.attr("data-object"); self.attributeName = self.attributeName || $parent.attr("data-attribute"); self.activator = self.activator || $parent.attr("data-activator"); self.okButton = self.okButton || $parent.attr("data-ok-button"); self.okButtonClass = self.okButtonClass || $parent.attr("data-ok-button-class"); self.cancelButton = self.cancelButton || $parent.attr("data-cancel-button"); self.cancelButtonClass = self.cancelButtonClass || $parent.attr("data-cancel-button-class"); self.nil = self.nil || $parent.attr("data-nil"); self.inner_class = self.inner_class || $parent.attr("data-inner-class"); self.html_attrs = self.html_attrs || $parent.attr("data-html-attrs"); self.original_content = self.original_content || $parent.attr("data-original-content"); self.collectionValue = self.collectionValue || $parent.attr("data-value"); }); // Try Rails-id based if parents did not explicitly supply something self.element.parents().each(function(){ var res = this.id.match(/^(\w+)_(\d+)$/i); if (res) { self.objectName = self.objectName || res[1]; } }); // Load own attributes (overrides all others) self.url = self.element.attr("data-url") || self.url || document.location.pathname; self.collection = self.element.attr("data-collection") || self.collection; self.formType = self.element.attr("data-type") || self.formtype || "input"; self.objectName = self.element.attr("data-object") || self.objectName; self.attributeName = self.element.attr("data-attribute") || self.attributeName; self.activator = self.element.attr("data-activator") || self.element; self.okButton = self.element.attr("data-ok-button") || self.okButton; self.okButtonClass = self.element.attr("data-ok-button-class") || self.okButtonClass || ""; self.cancelButton = self.element.attr("data-cancel-button") || self.cancelButton; self.cancelButtonClass = self.element.attr("data-cancel-button-class") || self.cancelButtonClass || ""; self.nil = self.element.attr("data-nil") || self.nil || "—"; self.inner_class = self.element.attr("data-inner-class") || self.inner_class || null; self.html_attrs = self.element.attr("data-html-attrs") || self.html_attrs; self.original_content = self.element.attr("data-original-content") || self.original_content; self.collectionValue = self.element.attr("data-value") || self.collectionValue; if (!self.element.attr("data-sanitize")) { self.sanitize = true; } else { self.sanitize = (self.element.attr("data-sanitize") == "true"); } if (!self.element.attr("data-use-confirm")) { self.useConfirm = true; } else { self.useConfirm = (self.element.attr("data-use-confirm") != "false"); } if ((self.formType == "select" || self.formType == "checkbox") && self.collection !== null) { self.values = jQuery.parseJSON(self.collection); } }, bindForm : function() { this.activateForm = BestInPlaceEditor.forms[this.formType].activateForm; this.getValue = BestInPlaceEditor.forms[this.formType].getValue; }, initNil: function() { if (this.element.html() === "") { this.element.html(this.nil); } }, isNil: function() { // TODO: It only work when form is deactivated. // Condition will fail when form is activated return this.element.html() === "" || this.element.html() === this.nil; }, getValue : function() { alert("The form was not properly initialized. getValue is unbound"); }, // Trim and Strips HTML from text sanitizeValue : function(s) { return jQuery.trim(s); }, /* Generate the data sent in the POST request */ requestData : function() { // To prevent xss attacks, a csrf token must be defined as a meta attribute csrf_token = jQuery('meta[name=csrf-token]').attr('content'); csrf_param = jQuery('meta[name=csrf-param]').attr('content'); var data = "_method=put"; data += "&" + this.objectName + '[' + this.attributeName + ']=' + encodeURIComponent(this.getValue()); if (csrf_param !== undefined && csrf_token !== undefined) { data += "&" + csrf_param + "=" + encodeURIComponent(csrf_token); } return data; }, ajax : function(options) { options.url = this.url; options.beforeSend = function(xhr){ xhr.setRequestHeader("Accept", "application/json"); }; return jQuery.ajax(options); }, // Handlers //////////////////////////////////////////////////////////////// loadSuccessCallback : function(data) { data = jQuery.trim(data); if(data && data!=""){ var response = jQuery.parseJSON(jQuery.trim(data)); if (response !== null && response.hasOwnProperty("display_as")) { this.element.attr("data-original-content", this.element.text()); this.original_content = this.element.text(); this.element.html(response["display_as"]); } this.element.trigger(jQuery.Event("best_in_place:success"), data); this.element.trigger(jQuery.Event("ajax:success"), data); } else { this.element.trigger(jQuery.Event("best_in_place:success")); this.element.trigger(jQuery.Event("ajax:success")); } // Binding back after being clicked jQuery(this.activator).bind('click', {editor: this}, this.clickHandler); this.element.trigger(jQuery.Event("best_in_place:deactivate")); if (this.collectionValue !== null && this.formType == "select") { this.collectionValue = this.previousCollectionValue; this.previousCollectionValue = null; } }, loadErrorCallback : function(request, error) { this.activateText(this.oldValue); this.element.trigger(jQuery.Event("best_in_place:error"), [request, error]); this.element.trigger(jQuery.Event("ajax:error"), request, error); // Binding back after being clicked jQuery(this.activator).bind('click', {editor: this}, this.clickHandler); this.element.trigger(jQuery.Event("best_in_place:deactivate")); }, clickHandler : function(event) { event.preventDefault(); event.data.editor.activate(); }, setHtmlAttributes : function() { var formField = this.element.find(this.formType); if(this.html_attrs){ var attrs = jQuery.parseJSON(this.html_attrs); for(var key in attrs){ formField.attr(key, attrs[key]); } } } }; // Button cases: // If no buttons, then blur saves, ESC cancels // If just Cancel button, then blur saves, ESC or clicking Cancel cancels (careful of blur event!) // If just OK button, then clicking OK saves (careful of blur event!), ESC or blur cancels // If both buttons, then clicking OK saves, ESC or clicking Cancel or blur cancels BestInPlaceEditor.forms = { "input" : { activateForm : function() { var output = jQuery(document.createElement('form')) .addClass('form_in_place') .attr('action', 'javascript:void(0);') .attr('style', 'display:inline'); var input_elt = jQuery(document.createElement('input')) .attr('type', 'text') .attr('name', this.attributeName) .val(this.display_value); if(this.inner_class !== null) { input_elt.addClass(this.inner_class); } output.append(input_elt); if(this.okButton) { output.append( jQuery(document.createElement('input')) .attr('type', 'submit') .attr('class', this.okButtonClass) .attr('value', this.okButton) ) } if(this.cancelButton) { output.append( jQuery(document.createElement('input')) .attr('type', 'button') .attr('class', this.cancelButtonClass) .attr('value', this.cancelButton) ) } this.element.html(output); this.setHtmlAttributes(); this.element.find("input[type='text']")[0].select(); this.element.find("form").bind('submit', {editor: this}, BestInPlaceEditor.forms.input.submitHandler); if (this.cancelButton) { this.element.find("input[type='button']").bind('click', {editor: this}, BestInPlaceEditor.forms.input.cancelButtonHandler); } this.element.find("input[type='text']").bind('blur', {editor: this}, BestInPlaceEditor.forms.input.inputBlurHandler); this.element.find("input[type='text']").bind('keyup', {editor: this}, BestInPlaceEditor.forms.input.keyupHandler); this.blurTimer = null; this.userClicked = false; }, getValue : function() { return this.sanitizeValue(this.element.find("input").val()); }, // When buttons are present, use a timer on the blur event to give precedence to clicks inputBlurHandler : function(event) { if (event.data.editor.okButton) { event.data.editor.blurTimer = setTimeout(function () { if (!event.data.editor.userClicked) { event.data.editor.abort(); } }, 500); } else { if (event.data.editor.cancelButton) { event.data.editor.blurTimer = setTimeout(function () { if (!event.data.editor.userClicked) { event.data.editor.update(); } }, 500); } else { event.data.editor.update(); } } }, submitHandler : function(event) { event.data.editor.userClicked = true; clearTimeout(event.data.editor.blurTimer); event.data.editor.update(); }, cancelButtonHandler : function(event) { event.data.editor.userClicked = true; clearTimeout(event.data.editor.blurTimer); event.data.editor.abort(); event.stopPropagation(); // Without this, click isn't handled }, keyupHandler : function(event) { if (event.keyCode == 27) { event.data.editor.abort(); } } }, "date" : { activateForm : function() { var that = this, output = jQuery(document.createElement('form')) .addClass('form_in_place') .attr('action', 'javascript:void(0);') .attr('style', 'display:inline'), input_elt = jQuery(document.createElement('input')) .attr('type', 'text') .attr('name', this.attributeName) .attr('value', this.sanitizeValue(this.display_value)); if(this.inner_class !== null) { input_elt.addClass(this.inner_class); } output.append(input_elt) this.element.html(output); this.setHtmlAttributes(); this.element.find('input')[0].select(); this.element.find("form").bind('submit', {editor: this}, BestInPlaceEditor.forms.input.submitHandler); this.element.find("input").bind('keyup', {editor: this}, BestInPlaceEditor.forms.input.keyupHandler); this.element.find('input') .datepicker({ onClose: function() { that.update(); } }) .datepicker('show'); }, getValue : function() { return this.sanitizeValue(this.element.find("input").val()); }, submitHandler : function(event) { event.data.editor.update(); }, keyupHandler : function(event) { if (event.keyCode == 27) { event.data.editor.abort(); } } }, "select" : { activateForm : function() { var output = jQuery(document.createElement('form')) .attr('action', 'javascript:void(0)') .attr('style', 'display:inline'); selected = '', oldValue = this.oldValue, select_elt = jQuery(document.createElement('select')) .attr('class', this.inned_class !== null ? this.inner_class : '' ), currentCollectionValue = this.collectionValue; jQuery.each(this.values, function (index, value) { var option_elt = jQuery(document.createElement('option')) // .attr('value', value[0]) .val(value[0]) .html(value[1]); if(value[0] == currentCollectionValue) { option_elt.attr('selected', 'selected'); } select_elt.append(option_elt); }); output.append(select_elt); this.element.html(output); this.setHtmlAttributes(); this.element.find("select").bind('change', {editor: this}, BestInPlaceEditor.forms.select.blurHandler); this.element.find("select").bind('blur', {editor: this}, BestInPlaceEditor.forms.select.blurHandler); this.element.find("select").bind('keyup', {editor: this}, BestInPlaceEditor.forms.select.keyupHandler); this.element.find("select")[0].focus(); }, getValue : function() { return this.sanitizeValue(this.element.find("select").val()); // return this.element.find("select").val(); }, blurHandler : function(event) { event.data.editor.update(); }, keyupHandler : function(event) { if (event.keyCode == 27) event.data.editor.abort(); } }, "checkbox" : { activateForm : function() { this.collectionValue = !this.getValue(); this.setHtmlAttributes(); this.update(); }, getValue : function() { return this.collectionValue; } }, "textarea" : { activateForm : function() { // grab width and height of text width = this.element.css('width'); height = this.element.css('height'); // construct form var output = jQuery(document.createElement('form')) .addClass('form_in_place') .attr('action', 'javascript:void(0);') .attr('style', 'display:inline'); var textarea_elt = jQuery(document.createElement('textarea')) .val(this.sanitizeValue(this.display_value)); if(this.inner_class !== null) { textarea_elt.addClass(this.inner_class); } output.append(textarea_elt); if(this.okButton) { output.append( jQuery(document.createElement('input')) .attr('type', 'submit') .attr('value', this.okButton) .attr('class', this.okButtonClass) ); } if(this.cancelButton) { output.append( jQuery(document.createElement('input')) .attr('type', 'button') .attr('value', this.cancelButton) .attr('class', this.cancelButtonClass) ) } this.element.html(output); this.setHtmlAttributes(); // set width and height of textarea jQuery(this.element.find("textarea")[0]).css({ 'min-width': width, 'min-height': height }); jQuery(this.element.find("textarea")[0]).elastic(); this.element.find("textarea")[0].focus(); this.element.find("form").bind('submit', {editor: this}, BestInPlaceEditor.forms.textarea.submitHandler); if (this.cancelButton) { this.element.find("input[type='button']").bind('click', {editor: this}, BestInPlaceEditor.forms.textarea.cancelButtonHandler); } this.element.find("textarea").bind('blur', {editor: this}, BestInPlaceEditor.forms.textarea.blurHandler); this.element.find("textarea").bind('keyup', {editor: this}, BestInPlaceEditor.forms.textarea.keyupHandler); this.blurTimer = null; this.userClicked = false; }, getValue : function() { return this.sanitizeValue(this.element.find("textarea").val()); }, // When buttons are present, use a timer on the blur event to give precedence to clicks blurHandler : function(event) { if (event.data.editor.okButton) { event.data.editor.blurTimer = setTimeout(function () { if (!event.data.editor.userClicked) { event.data.editor.abortIfConfirm(); } }, 500); } else { if (event.data.editor.cancelButton) { event.data.editor.blurTimer = setTimeout(function () { if (!event.data.editor.userClicked) { event.data.editor.update(); } }, 500); } else { event.data.editor.update(); } } }, submitHandler : function(event) { event.data.editor.userClicked = true; clearTimeout(event.data.editor.blurTimer); event.data.editor.update(); }, cancelButtonHandler : function(event) { event.data.editor.userClicked = true; clearTimeout(event.data.editor.blurTimer); event.data.editor.abortIfConfirm(); event.stopPropagation(); // Without this, click isn't handled }, keyupHandler : function(event) { if (event.keyCode == 27) { event.data.editor.abortIfConfirm(); } } } }; jQuery.fn.best_in_place = function() { function setBestInPlace(element) { if (!element.data('bestInPlaceEditor')) { element.data('bestInPlaceEditor', new BestInPlaceEditor(element)); return true; } } jQuery(this.context).delegate(this.selector, 'click', function () { var el = jQuery(this); if (setBestInPlace(el)) el.click(); }); this.each(function () { setBestInPlace(jQuery(this)); }); return this; };
mit
lizardschool/wordbook
tests/test_domain_translation.py
436
from wordbook.domain.models import Translation def test_translation_dto(): t = Translation( id=1, from_language='en', into_language='pl', word='apple', ipa='ejpyl', simplified='epyl', translated='jabłko', ) assert t.dto_autocomplete() == dict( id=1, word='apple', translation='jabłko', ipa='ejpyl', simplified='epyl', )
mit
yinlianwei/mylib
app/cache/dev/twig/6a/1a/b2cbfd05f0e2612a444b6664cd6e.php
5773
<?php /* AcmeDemoBundle::layout.html.twig */ class __TwigTemplate_6a1ab2cbfd05f0e2612a444b6664cd6e extends Twig_Template { public function __construct(Twig_Environment $env) { parent::__construct($env); $this->parent = false; $this->blocks = array( 'title' => array($this, 'block_title'), 'content_header' => array($this, 'block_content_header'), 'content_header_more' => array($this, 'block_content_header_more'), 'content' => array($this, 'block_content'), ); } protected function doDisplay(array $context, array $blocks = array()) { // line 1 echo "<!DOCTYPE html> <html lang=\"en\"> <head> <meta charset=\"UTF-8\" /> <title>"; // line 5 $this->displayBlock('title', $context, $blocks); echo "</title> <link rel=\"icon\" sizes=\"16x16\" href=\""; // line 6 echo twig_escape_filter($this->env, $this->env->getExtension('assets')->getAssetUrl("favicon.ico"), "html", null, true); echo "\" /> <link rel=\"stylesheet\" href=\""; // line 7 echo twig_escape_filter($this->env, $this->env->getExtension('assets')->getAssetUrl("bundles/acmedemo/css/demo.css"), "html", null, true); echo "\" /> </head> <body> <div id=\"symfony-wrapper\"> <div id=\"symfony-header\"> <a href=\""; // line 12 echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("_welcome"), "html", null, true); echo "\"> <img src=\""; // line 13 echo twig_escape_filter($this->env, $this->env->getExtension('assets')->getAssetUrl("bundles/acmedemo/images/logo.gif"), "html", null, true); echo "\" alt=\"Symfony logo\" /> </a> <form id=\"symfony-search\" method=\"GET\" action=\"http://symfony.com/search\"> <label for=\"symfony-search-field\"><span>Search on Symfony Website</span></label> <input name=\"q\" id=\"symfony-search-field\" type=\"search\" placeholder=\"Search on Symfony website\" class=\"medium_txt\" /> <input type=\"submit\" class=\"symfony-button-grey\" value=\"OK\" /> </form> </div> "; // line 22 $context['_parent'] = (array) $context; $context['_seq'] = twig_ensure_traversable($this->getAttribute($this->getAttribute($this->getAttribute($this->getContext($context, "app"), "session"), "flashbag"), "get", array(0 => "notice"), "method")); foreach ($context['_seq'] as $context["_key"] => $context["flashMessage"]) { // line 23 echo " <div class=\"flash-message\"> <em>Notice</em>: "; // line 24 echo twig_escape_filter($this->env, $this->getContext($context, "flashMessage"), "html", null, true); echo " </div> "; } $_parent = $context['_parent']; unset($context['_seq'], $context['_iterated'], $context['_key'], $context['flashMessage'], $context['_parent'], $context['loop']); $context = array_merge($_parent, array_intersect_key($context, $_parent)); // line 27 echo " "; // line 28 $this->displayBlock('content_header', $context, $blocks); // line 37 echo " <div class=\"symfony-content\"> "; // line 39 $this->displayBlock('content', $context, $blocks); // line 41 echo " </div> "; // line 43 if (array_key_exists("code", $context)) { // line 44 echo " <h2>Code behind this page</h2> <div class=\"symfony-content\">"; // line 45 echo $this->getContext($context, "code"); echo "</div> "; } // line 47 echo " </div> </body> </html> "; } // line 5 public function block_title($context, array $blocks = array()) { echo "Demo Bundle"; } // line 28 public function block_content_header($context, array $blocks = array()) { // line 29 echo " <ul id=\"menu\"> "; // line 30 $this->displayBlock('content_header_more', $context, $blocks); // line 33 echo " </ul> <div style=\"clear: both\"></div> "; } // line 30 public function block_content_header_more($context, array $blocks = array()) { // line 31 echo " <li><a href=\""; echo twig_escape_filter($this->env, $this->env->getExtension('routing')->getPath("_demo"), "html", null, true); echo "\">Demo Home</a></li> "; } // line 39 public function block_content($context, array $blocks = array()) { // line 40 echo " "; } public function getTemplateName() { return "AcmeDemoBundle::layout.html.twig"; } public function isTraitable() { return false; } public function getDebugInfo() { return array ( 141 => 40, 138 => 39, 131 => 31, 128 => 30, 121 => 33, 119 => 30, 116 => 29, 113 => 28, 107 => 5, 100 => 47, 95 => 45, 92 => 44, 90 => 43, 86 => 41, 84 => 39, 80 => 37, 78 => 28, 75 => 27, 66 => 24, 63 => 23, 59 => 22, 47 => 13, 43 => 12, 35 => 7, 31 => 6, 27 => 5, 21 => 1, 54 => 17, 48 => 14, 45 => 13, 39 => 11, 37 => 10, 29 => 4, 26 => 3,); } }
mit
ugent-cros/cros-core
drone-api/src/droneapi/model/properties/Rotation.java
784
package droneapi.model.properties; import java.io.Serializable; /** * Created by Cedric on 3/10/2015. */ public class Rotation implements Serializable{ private double roll; private double pitch; private double yaw; /*** * Orientation of drone in a 3D plane * @param roll Angle left/right in radians * @param pitch Angle forward/backward in radians * @param yaw Rotation relative to takeoff orientation in radians */ public Rotation(double roll, double pitch, double yaw) { this.roll = roll; this.pitch = pitch; this.yaw = yaw; } public double getRoll() { return roll; } public double getPitch() { return pitch; } public double getYaw() { return yaw; } }
mit
arvjus/fintrack-java
fintrack-web/src/main/java/org/zv/fintrack/util/Messages.java
1235
package org.zv.fintrack.util; import java.text.MessageFormat; import java.util.MissingResourceException; import java.util.ResourceBundle; import java.util.Locale; /** * Class provides access to message bundle. * * @author arvid.juskaitis */ public class Messages { /** * Class loader. * * @param defaultObject * @return */ protected static ClassLoader getCurrentClassLoader(Object defaultObject) { ClassLoader loader = Thread.currentThread().getContextClassLoader(); if (loader == null) { loader = defaultObject.getClass().getClassLoader(); } return loader; } /** * Retrieve and form a message. * * @param bundleName * @param locale * @param key * @param params * @return */ public static String getMessageResourceString(String bundleName, Locale locale, String key, Object[] params) { ResourceBundle bundle = ResourceBundle.getBundle(bundleName, locale, getCurrentClassLoader(params)); String text = null; try { text = bundle.getString(key); } catch (MissingResourceException e) { text = key; } if (params != null) { MessageFormat mf = new MessageFormat(text, locale); text = mf.format(params, new StringBuffer(), null).toString(); } return text; } }
mit
nessie1980/SharePortfolioManager
SharePortfolioManager/Forms/SalesForm/UsedBuyDetailsList/UserBuyDetailsList.Designer.cs
7851
//MIT License // //Copyright(c) 2017 - 2021 nessie1980(nessie1980 @gmx.de) // //Permission is hereby granted, free of charge, to any person obtaining a copy //of this software and associated documentation files (the "Software"), to deal //in the Software without restriction, including without limitation the rights //to use, copy, modify, merge, publish, distribute, sublicense, and/or sell //copies of the Software, and to permit persons to whom the Software is //furnished to do so, subject to the following conditions: // //The above copyright notice and this permission notice shall be included in all //copies or substantial portions of the Software. // //THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR //IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, //FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE //AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER //LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, //OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE //SOFTWARE. namespace SharePortfolioManager.SalesForm.UsedBuyDetailsList { partial class UsedBuyDetailsList { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.tblLayPnlOnwMessageBox = new System.Windows.Forms.TableLayoutPanel(); this.btnOk = new System.Windows.Forms.Button(); this.rchTxtBoxUsedBuyDetails = new System.Windows.Forms.RichTextBox(); this.grpBoxUsedBuyDetails = new System.Windows.Forms.GroupBox(); this.tblLayPnlOnwMessageBox.SuspendLayout(); this.grpBoxUsedBuyDetails.SuspendLayout(); this.SuspendLayout(); // // tblLayPnlOnwMessageBox // this.tblLayPnlOnwMessageBox.ColumnCount = 3; this.tblLayPnlOnwMessageBox.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Percent, 100F)); this.tblLayPnlOnwMessageBox.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Absolute, 180F)); this.tblLayPnlOnwMessageBox.ColumnStyles.Add(new System.Windows.Forms.ColumnStyle(System.Windows.Forms.SizeType.Absolute, 180F)); this.tblLayPnlOnwMessageBox.Controls.Add(this.btnOk, 2, 1); this.tblLayPnlOnwMessageBox.Controls.Add(this.rchTxtBoxUsedBuyDetails, 0, 0); this.tblLayPnlOnwMessageBox.Dock = System.Windows.Forms.DockStyle.Fill; this.tblLayPnlOnwMessageBox.Location = new System.Drawing.Point(3, 18); this.tblLayPnlOnwMessageBox.Name = "tblLayPnlOnwMessageBox"; this.tblLayPnlOnwMessageBox.RowCount = 1; this.tblLayPnlOnwMessageBox.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Percent, 100F)); this.tblLayPnlOnwMessageBox.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 33F)); this.tblLayPnlOnwMessageBox.RowStyles.Add(new System.Windows.Forms.RowStyle(System.Windows.Forms.SizeType.Absolute, 20F)); this.tblLayPnlOnwMessageBox.Size = new System.Drawing.Size(668, 197); this.tblLayPnlOnwMessageBox.TabIndex = 4; // // btnOk // this.btnOk.Location = new System.Drawing.Point(488, 164); this.btnOk.Margin = new System.Windows.Forms.Padding(0); this.btnOk.Name = "btnOk"; this.btnOk.Size = new System.Drawing.Size(180, 33); this.btnOk.TabIndex = 1; this.btnOk.Text = "_Ok"; this.btnOk.UseVisualStyleBackColor = true; this.btnOk.Click += new System.EventHandler(this.btnOk_Click); // // rchTxtBoxUsedBuyDetails // this.rchTxtBoxUsedBuyDetails.BackColor = System.Drawing.SystemColors.InactiveCaption; this.tblLayPnlOnwMessageBox.SetColumnSpan(this.rchTxtBoxUsedBuyDetails, 3); this.rchTxtBoxUsedBuyDetails.Dock = System.Windows.Forms.DockStyle.Fill; this.rchTxtBoxUsedBuyDetails.Location = new System.Drawing.Point(3, 3); this.rchTxtBoxUsedBuyDetails.Name = "rchTxtBoxUsedBuyDetails"; this.rchTxtBoxUsedBuyDetails.ReadOnly = true; this.rchTxtBoxUsedBuyDetails.Size = new System.Drawing.Size(662, 158); this.rchTxtBoxUsedBuyDetails.TabIndex = 5; this.rchTxtBoxUsedBuyDetails.Text = ""; // // grpBoxUsedBuyDetails // this.grpBoxUsedBuyDetails.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.grpBoxUsedBuyDetails.Controls.Add(this.tblLayPnlOnwMessageBox); this.grpBoxUsedBuyDetails.FlatStyle = System.Windows.Forms.FlatStyle.Flat; this.grpBoxUsedBuyDetails.Location = new System.Drawing.Point(5, 5); this.grpBoxUsedBuyDetails.Name = "grpBoxUsedBuyDetails"; this.grpBoxUsedBuyDetails.Size = new System.Drawing.Size(674, 218); this.grpBoxUsedBuyDetails.TabIndex = 5; this.grpBoxUsedBuyDetails.TabStop = false; this.grpBoxUsedBuyDetails.Text = "_UsedBuyDetails"; // // UsedBuyDetailsList // this.AcceptButton = this.btnOk; this.AutoScaleDimensions = new System.Drawing.SizeF(7F, 14F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.BackColor = System.Drawing.SystemColors.InactiveCaption; this.ClientSize = new System.Drawing.Size(684, 226); this.Controls.Add(this.grpBoxUsedBuyDetails); this.Font = new System.Drawing.Font("Consolas", 9F); this.MaximizeBox = false; this.MaximumSize = new System.Drawing.Size(700, 265); this.MinimizeBox = false; this.MinimumSize = new System.Drawing.Size(700, 265); this.Name = "UsedBuyDetailsList"; this.ShowIcon = false; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "_UsedBuyDetailsListCaption"; this.TopMost = true; this.Shown += new System.EventHandler(this.UsedBuyDetailsList_Shown); this.tblLayPnlOnwMessageBox.ResumeLayout(false); this.grpBoxUsedBuyDetails.ResumeLayout(false); this.ResumeLayout(false); } #endregion private System.Windows.Forms.TableLayoutPanel tblLayPnlOnwMessageBox; private System.Windows.Forms.Button btnOk; private System.Windows.Forms.RichTextBox rchTxtBoxUsedBuyDetails; private System.Windows.Forms.GroupBox grpBoxUsedBuyDetails; } }
mit
okonek/tidal-cli-client
app/backend/models/Artist.js
187
module.exports = class Artist { constructor(artistObject) { this.id = artistObject.id; this.name = artistObject.name; this.tracks = []; this.picture = artistObject.picture; } };
mit
tuanphpvn/core
tests/Metadata/Property/Factory/FileConfigurationMetadataFactoryProvider.php
2136
<?php /* * This file is part of the API Platform project. * * (c) Kévin Dunglas <dunglas@gmail.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ declare(strict_types=1); namespace ApiPlatform\Core\Tests\Metadata\Property\Factory; use ApiPlatform\Core\Metadata\Property\PropertyMetadata; use ApiPlatform\Core\Metadata\Property\SubresourceMetadata; /** * Property metadata provider for file configured factories tests. * * @author Baptiste Meyer <baptiste.meyer@gmail.com> */ abstract class FileConfigurationMetadataFactoryProvider extends \PHPUnit_Framework_TestCase { public function propertyMetadataProvider() { $metadata = [ 'description' => 'The dummy foo', 'readable' => true, 'writable' => true, 'readableLink' => false, 'writableLink' => false, 'required' => true, 'attributes' => [ 'foo' => ['Foo'], 'bar' => [['Bar'], 'baz' => 'Baz'], 'baz' => 'Baz', ], 'subresource' => new SubresourceMetadata('Foo', true), ]; return [[$this->getPropertyMetadata($metadata)]]; } public function decoratedPropertyMetadataProvider() { $metadata = [ 'description' => 'The dummy foo', 'readable' => true, 'writable' => true, 'readableLink' => true, 'writableLink' => false, 'required' => true, 'identifier' => false, 'attributes' => ['Foo'], 'subresource' => new SubresourceMetadata('Foo', true), ]; return [[$this->getPropertyMetadata($metadata)]]; } private function getPropertyMetadata(array $metadata): PropertyMetadata { $propertyMetadata = new PropertyMetadata(); foreach ($metadata as $propertyName => $propertyValue) { $propertyMetadata = $propertyMetadata->{'with'.ucfirst($propertyName)}($propertyValue); } return $propertyMetadata; } }
mit
schneidmaster/gitreports.com
spec/factories/users.rb
295
FactoryGirl.define do factory :user do username { Faker::Internet.user_name nil, %w[_] } name { Faker::Name.name } avatar_url { "https://github.com/identicons/#{username}.png" } access_token { Faker::Bitcoin.address } repositories { [] } organizations { [] } end end
mit
fluxbb/core
lang/en/userlist.php
498
<?php // Language definitions used in userlist.php return array( 'user_find_legend' => 'Find and sort users', 'user_search_info' => 'Enter a username to search for and/or a user group to filter by. The username field can be left blank. Use the wildcard character * for partial matches.', 'user_sort_info' => 'Sort users by name, date registered or number of posts and in ascending/descending order.', 'user_group' => 'User group', 'no_of_posts' => 'Number of posts', 'all_users' => 'All' );
mit
duke-libraries/fcn
signup.php
2766
<?php /** * signup.php: Allow users to sign up for game accounts. It's a simple form that collects the basic data-- * username, password, email--and passes it to signupProcessor.php. Validation occurs via some functions in * game/functions.php. * * @author William Shaw <william.shaw@duke.edu> * @author Katherine Jentleson <katherine.jentleson@duke.edu>, designer * @date 11/2012 */ require_once 'game/db.php'; ob_start( ); require 'game/functions.php'; ob_end_clean( ); ?> <html> <head> <script type="text/javascript" src="https://www.google.com/jsapi"></script><script type="text/javascript"> google.load( "jquery", "1" ); google.load( "jqueryui", "1" ); </script> <link rel="stylesheet" type="text/css" href="new/fcn.css"/> <link rel="stylesheet" type="text/css" href="new/jquery-ui.css"/> <script> $(document).ready( function( ) { $( "button#submit" ).button( ); $( "button#submit" ).click( function( ) { $("#signupForm").submit( ); return false; } ); } ); </script> </head> <body style="background-color:#fff;font-size:1em;"> <div style="width:50%;margin-left:auto;margin-right:auto;"> <form id="signupForm" action="signupProcessor.php" method="post"> <h1>Create a Collector Account</h1> Once you create a collector account, you'll be able to log in, view your collection, explore other users' collections, and buy, sell, or trade works in the marketplace. Collector accounts can belong to individuals or teams, but they can have only one email address associated with them. (This address is used only for verifying the account -- all other communication happens in-game.) <p/> Fantasy Collecting requires that you use a modern Web browser (Chrome, Opera, Firefox, Safari -- Internet Explorer is not supported). It works best with a display of at least 1280 x 800 pixels, which is typical for a 13" laptop. <p/> <center> <table width="500"> <tr> <td align="right">Collector (or Team) Name:</td><td><input name="name" size="30" style="font-size:1em;"/></td></tr> <tr><td align="Right">Password:</td><td><input name="password" type="password" size="30" style="font-size:1em;"/></td></tr> <tr><td align="right">Confirm password:</td><td><input name="confirm_password" type="password" size="30" style="font-size:1em;"/></td></tr> <tr><td align="right">Email:</td><td><input name="email" size="30" style="font-size:1em;"/></td></tr> <tr><td colspan="2"> <input type="checkbox" name="ok_to_use_record" value="1" checked> I give permission for the record of my game play to be used in future reseach, development, and promotion of Fantasy Collecting. </table> </div> <p/> <p/> <p/><center> <button id="submit">Sign Up</button></center> </div> </form> </div> </body> </html>
mit
hckrs/hckrs.io
packages/growth/growth/crawler-client.js
531
if (Meteor.isClient) { Crawler = {} // fetch usernames from github related to all cities from hckrs.io Crawler.fetchGithubUsersInAllCities = function(cb) { return Meteor.call('crawlFetchGithubUsersInAllCities', cb); } // fetch from github all usersnames related to the given city. // These usernames will be stored in the database. Crawler.fetchGithubUsersInCity = function(city, cb) { check(city, Match.In(City.identifiers())); return Meteor.call('crawlFetchGithubUsersInCity', city, cb); } }
mit
evanliomain/presentation_meteor
node_modules/glou/node_modules/err-tree/test/assert.spec.js
1040
'use strict'; var expect = require('chai').expect; var createAssert = require('..').createAssert; describe('errTree.createAssert()', function() { it('is a function', function() { expect(createAssert).to.be.a('Function'); }); describe('-> return value', function() { it('is a function', function() { expect(createAssert()).to.be.a('Function'); }); it('does not throw if its first argument is true', function() { var myAssert = createAssert(Error); function test() { myAssert(true); } expect(test).to.not.throw(); }); it('throws if its first argument is false', function() { var myAssert = createAssert(Error); function test() { myAssert(false); } expect(test).to.throw(Error); }); it('uses subsequent arguments to create the error', function() { var myAssert = createAssert(Error); function test() { myAssert(false, 'message'); } expect(test).to.throw(Error, 'message'); }); }); });
mit
dmytro-polupan/nasha_lepta
public/scripts/controllers/news/show.js
933
'use strict'; /** * @ngdoc function * @name nashaLeptaApp.controller:NewsShowCtrl * @description * # NewsShowCtrl * Controller of the nashaLeptaApp */ angular.module('nashaLeptaApp') .controller('NewsShowCtrl', function ($scope, $routeParams, FireObjects, SubProjectPrefixer, IsNeedToCompileFurther, $sce, model) { var noveltyModelsLocation = SubProjectPrefixer(model); $scope.noveltyModelsLocation=noveltyModelsLocation; FireObjects.find(noveltyModelsLocation, $routeParams.id).$loaded() .then(function(novelty) { var result = novelty.body; if(IsNeedToCompileFurther(result)){ $scope.toShow=null; $scope.toShowCompile=result; }else{ $scope.toShow = $sce.trustAsHtml(result); $scope.toShowCompile=null; } $scope.novelty = novelty; }, function(error) { console.error("NewsShowCtrl Error:", error); }) });
mit
romanornr/viacoin
src/validation.cpp
205207
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2016 The Bitcoin Core developers // Distributed under the MIT software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "validation.h" #include "arith_uint256.h" #include "chain.h" #include "chainparams.h" #include "checkpoints.h" #include "checkqueue.h" #include "consensus/consensus.h" #include "consensus/merkle.h" #include "consensus/tx_verify.h" #include "consensus/validation.h" #include "cuckoocache.h" #include "fs.h" #include "hash.h" #include "init.h" #include "policy/fees.h" #include "policy/policy.h" #include "policy/rbf.h" #include "pow.h" #include "primitives/block.h" #include "primitives/transaction.h" #include "random.h" #include "reverse_iterator.h" #include "script/script.h" #include "script/sigcache.h" #include "script/standard.h" #include "timedata.h" #include "tinyformat.h" #include "txdb.h" #include "txmempool.h" #include "ui_interface.h" #include "undo.h" #include "util.h" #include "utilmoneystr.h" #include "utilstrencodings.h" #include "validationinterface.h" #include "versionbits.h" #include "warnings.h" #include <atomic> #include <sstream> #include <boost/algorithm/string/replace.hpp> #include <boost/algorithm/string/join.hpp> #include <boost/thread.hpp> #if defined(NDEBUG) # error "Viacoin cannot be compiled without assertions." #endif /** * Global state */ CCriticalSection cs_main; BlockMap mapBlockIndex; CChain chainActive; CBlockIndex *pindexBestHeader = nullptr; CWaitableCriticalSection csBestBlock; CConditionVariable cvBlockChange; int nScriptCheckThreads = 0; std::atomic_bool fImporting(false); bool fReindex = false; bool fTxIndex = false; bool fHavePruned = false; bool fPruneMode = false; bool fIsBareMultisigStd = DEFAULT_PERMIT_BAREMULTISIG; bool fRequireStandard = true; bool fCheckBlockIndex = false; bool fCheckpointsEnabled = DEFAULT_CHECKPOINTS_ENABLED; size_t nCoinCacheUsage = 5000 * 300; uint64_t nPruneTarget = 0; int64_t nMaxTipAge = DEFAULT_MAX_TIP_AGE; bool fEnableReplacement = DEFAULT_ENABLE_REPLACEMENT; uint256 hashAssumeValid; arith_uint256 nMinimumChainWork; CFeeRate minRelayTxFee = CFeeRate(DEFAULT_MIN_RELAY_TX_FEE); CAmount maxTxFee = DEFAULT_TRANSACTION_MAXFEE; CBlockPolicyEstimator feeEstimator; CTxMemPool mempool(&feeEstimator); static void CheckBlockIndex(const Consensus::Params& consensusParams); /** Constant stuff for coinbase transactions we create: */ CScript COINBASE_FLAGS; const std::string strMessageMagic = "Viacoin Signed Message:\n"; // Internal stuff namespace { struct CBlockIndexWorkComparator { bool operator()(const CBlockIndex *pa, const CBlockIndex *pb) const { // First sort by most total work, ... if (pa->nChainWork > pb->nChainWork) return false; if (pa->nChainWork < pb->nChainWork) return true; // ... then by earliest time received, ... if (pa->nSequenceId < pb->nSequenceId) return false; if (pa->nSequenceId > pb->nSequenceId) return true; // Use pointer address as tie breaker (should only happen with blocks // loaded from disk, as those all have id 0). if (pa < pb) return false; if (pa > pb) return true; // Identical blocks. return false; } }; CBlockIndex *pindexBestInvalid; /** * The set of all CBlockIndex entries with BLOCK_VALID_TRANSACTIONS (for itself and all ancestors) and * as good as our current tip or better. Entries may be failed, though, and pruning nodes may be * missing the data for the block. */ std::set<CBlockIndex*, CBlockIndexWorkComparator> setBlockIndexCandidates; /** All pairs A->B, where A (or one of its ancestors) misses transactions, but B has transactions. * Pruned nodes may have entries where B is missing data. */ std::multimap<CBlockIndex*, CBlockIndex*> mapBlocksUnlinked; CCriticalSection cs_LastBlockFile; std::vector<CBlockFileInfo> vinfoBlockFile; int nLastBlockFile = 0; /** Global flag to indicate we should check to see if there are * block/undo files that should be deleted. Set on startup * or if we allocate more file space when we're in prune mode */ bool fCheckForPruning = false; /** * Every received block is assigned a unique and increasing identifier, so we * know which one to give priority in case of a fork. */ CCriticalSection cs_nBlockSequenceId; /** Blocks loaded from disk are assigned id 0, so start the counter at 1. */ int32_t nBlockSequenceId = 1; /** Decreasing counter (used by subsequent preciousblock calls). */ int32_t nBlockReverseSequenceId = -1; /** chainwork for the last block that preciousblock has been applied to. */ arith_uint256 nLastPreciousChainwork = 0; /** In order to efficiently track invalidity of headers, we keep the set of * blocks which we tried to connect and found to be invalid here (ie which * were set to BLOCK_FAILED_VALID since the last restart). We can then * walk this set and check if a new header is a descendant of something in * this set, preventing us from having to walk mapBlockIndex when we try * to connect a bad block and fail. * * While this is more complicated than marking everything which descends * from an invalid block as invalid at the time we discover it to be * invalid, doing so would require walking all of mapBlockIndex to find all * descendants. Since this case should be very rare, keeping track of all * BLOCK_FAILED_VALID blocks in a set should be just fine and work just as * well. * * Because we alreardy walk mapBlockIndex in height-order at startup, we go * ahead and mark descendants of invalid blocks as FAILED_CHILD at that time, * instead of putting things in this set. */ std::set<CBlockIndex*> g_failed_blocks; /** Dirty block index entries. */ std::set<CBlockIndex*> setDirtyBlockIndex; /** Dirty block file entries. */ std::set<int> setDirtyFileInfo; } // anon namespace CBlockIndex* FindForkInGlobalIndex(const CChain& chain, const CBlockLocator& locator) { // Find the first block the caller has in the main chain for (const uint256& hash : locator.vHave) { BlockMap::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) { CBlockIndex* pindex = (*mi).second; if (chain.Contains(pindex)) return pindex; if (pindex->GetAncestor(chain.Height()) == chain.Tip()) { return chain.Tip(); } } } return chain.Genesis(); } CCoinsViewDB *pcoinsdbview = nullptr; CCoinsViewCache *pcoinsTip = nullptr; CBlockTreeDB *pblocktree = nullptr; enum FlushStateMode { FLUSH_STATE_NONE, FLUSH_STATE_IF_NEEDED, FLUSH_STATE_PERIODIC, FLUSH_STATE_ALWAYS }; // See definition for documentation static bool FlushStateToDisk(const CChainParams& chainParams, CValidationState &state, FlushStateMode mode, int nManualPruneHeight=0); static void FindFilesToPruneManual(std::set<int>& setFilesToPrune, int nManualPruneHeight); static void FindFilesToPrune(std::set<int>& setFilesToPrune, uint64_t nPruneAfterHeight); bool CheckInputs(const CTransaction& tx, CValidationState &state, const CCoinsViewCache &inputs, bool fScriptChecks, unsigned int flags, bool cacheSigStore, bool cacheFullScriptStore, PrecomputedTransactionData& txdata, std::vector<CScriptCheck> *pvChecks = nullptr); static FILE* OpenUndoFile(const CDiskBlockPos &pos, bool fReadOnly = false); bool CheckFinalTx(const CTransaction &tx, int flags) { AssertLockHeld(cs_main); // By convention a negative value for flags indicates that the // current network-enforced consensus rules should be used. In // a future soft-fork scenario that would mean checking which // rules would be enforced for the next block and setting the // appropriate flags. At the present time no soft-forks are // scheduled, so no flags are set. flags = std::max(flags, 0); // CheckFinalTx() uses chainActive.Height()+1 to evaluate // nLockTime because when IsFinalTx() is called within // CBlock::AcceptBlock(), the height of the block *being* // evaluated is what is used. Thus if we want to know if a // transaction can be part of the *next* block, we need to call // IsFinalTx() with one more than chainActive.Height(). const int nBlockHeight = chainActive.Height() + 1; // BIP113 will require that time-locked transactions have nLockTime set to // less than the median time of the previous block they're contained in. // When the next block is created its previous block will be the current // chain tip, so we use that to calculate the median time passed to // IsFinalTx() if LOCKTIME_MEDIAN_TIME_PAST is set. const int64_t nBlockTime = (flags & LOCKTIME_MEDIAN_TIME_PAST) ? chainActive.Tip()->GetMedianTimePast() : GetAdjustedTime(); return IsFinalTx(tx, nBlockHeight, nBlockTime); } bool TestLockPointValidity(const LockPoints* lp) { AssertLockHeld(cs_main); assert(lp); // If there are relative lock times then the maxInputBlock will be set // If there are no relative lock times, the LockPoints don't depend on the chain if (lp->maxInputBlock) { // Check whether chainActive is an extension of the block at which the LockPoints // calculation was valid. If not LockPoints are no longer valid if (!chainActive.Contains(lp->maxInputBlock)) { return false; } } // LockPoints still valid return true; } bool CheckSequenceLocks(const CTransaction &tx, int flags, LockPoints* lp, bool useExistingLockPoints) { AssertLockHeld(cs_main); AssertLockHeld(mempool.cs); CBlockIndex* tip = chainActive.Tip(); CBlockIndex index; index.pprev = tip; // CheckSequenceLocks() uses chainActive.Height()+1 to evaluate // height based locks because when SequenceLocks() is called within // ConnectBlock(), the height of the block *being* // evaluated is what is used. // Thus if we want to know if a transaction can be part of the // *next* block, we need to use one more than chainActive.Height() index.nHeight = tip->nHeight + 1; std::pair<int, int64_t> lockPair; if (useExistingLockPoints) { assert(lp); lockPair.first = lp->height; lockPair.second = lp->time; } else { // pcoinsTip contains the UTXO set for chainActive.Tip() CCoinsViewMemPool viewMemPool(pcoinsTip, mempool); std::vector<int> prevheights; prevheights.resize(tx.vin.size()); for (size_t txinIndex = 0; txinIndex < tx.vin.size(); txinIndex++) { const CTxIn& txin = tx.vin[txinIndex]; Coin coin; if (!viewMemPool.GetCoin(txin.prevout, coin)) { return error("%s: Missing input", __func__); } if (coin.nHeight == MEMPOOL_HEIGHT) { // Assume all mempool transaction confirm in the next block prevheights[txinIndex] = tip->nHeight + 1; } else { prevheights[txinIndex] = coin.nHeight; } } lockPair = CalculateSequenceLocks(tx, flags, &prevheights, index); if (lp) { lp->height = lockPair.first; lp->time = lockPair.second; // Also store the hash of the block with the highest height of // all the blocks which have sequence locked prevouts. // This hash needs to still be on the chain // for these LockPoint calculations to be valid // Note: It is impossible to correctly calculate a maxInputBlock // if any of the sequence locked inputs depend on unconfirmed txs, // except in the special case where the relative lock time/height // is 0, which is equivalent to no sequence lock. Since we assume // input height of tip+1 for mempool txs and test the resulting // lockPair from CalculateSequenceLocks against tip+1. We know // EvaluateSequenceLocks will fail if there was a non-zero sequence // lock on a mempool input, so we can use the return value of // CheckSequenceLocks to indicate the LockPoints validity int maxInputHeight = 0; for (int height : prevheights) { // Can ignore mempool inputs since we'll fail if they had non-zero locks if (height != tip->nHeight+1) { maxInputHeight = std::max(maxInputHeight, height); } } lp->maxInputBlock = tip->GetAncestor(maxInputHeight); } } return EvaluateSequenceLocks(index, lockPair); } // Returns the script flags which should be checked for a given block static unsigned int GetBlockScriptFlags(const CBlockIndex* pindex, const Consensus::Params& chainparams); static void LimitMempoolSize(CTxMemPool& pool, size_t limit, unsigned long age) { int expired = pool.Expire(GetTime() - age); if (expired != 0) { LogPrint(BCLog::MEMPOOL, "Expired %i transactions from the memory pool\n", expired); } std::vector<COutPoint> vNoSpendsRemaining; pool.TrimToSize(limit, &vNoSpendsRemaining); for (const COutPoint& removed : vNoSpendsRemaining) pcoinsTip->Uncache(removed); } /** Convert CValidationState to a human-readable message for logging */ std::string FormatStateMessage(const CValidationState &state) { return strprintf("%s%s (code %i)", state.GetRejectReason(), state.GetDebugMessage().empty() ? "" : ", "+state.GetDebugMessage(), state.GetRejectCode()); } static bool IsCurrentForFeeEstimation() { AssertLockHeld(cs_main); if (IsInitialBlockDownload()) return false; if (chainActive.Tip()->GetBlockTime() < (GetTime() - MAX_FEE_ESTIMATION_TIP_AGE)) return false; if (chainActive.Height() < pindexBestHeader->nHeight - 1) return false; return true; } /* Make mempool consistent after a reorg, by re-adding or recursively erasing * disconnected block transactions from the mempool, and also removing any * other transactions from the mempool that are no longer valid given the new * tip/height. * * Note: we assume that disconnectpool only contains transactions that are NOT * confirmed in the current chain nor already in the mempool (otherwise, * in-mempool descendants of such transactions would be removed). * * Passing fAddToMempool=false will skip trying to add the transactions back, * and instead just erase from the mempool as needed. */ void UpdateMempoolForReorg(DisconnectedBlockTransactions &disconnectpool, bool fAddToMempool) { AssertLockHeld(cs_main); std::vector<uint256> vHashUpdate; // disconnectpool's insertion_order index sorts the entries from // oldest to newest, but the oldest entry will be the last tx from the // latest mined block that was disconnected. // Iterate disconnectpool in reverse, so that we add transactions // back to the mempool starting with the earliest transaction that had // been previously seen in a block. auto it = disconnectpool.queuedTx.get<insertion_order>().rbegin(); while (it != disconnectpool.queuedTx.get<insertion_order>().rend()) { // ignore validation errors in resurrected transactions CValidationState stateDummy; if (!fAddToMempool || (*it)->IsCoinBase() || !AcceptToMemoryPool(mempool, stateDummy, *it, false, nullptr, nullptr, true)) { // If the transaction doesn't make it in to the mempool, remove any // transactions that depend on it (which would now be orphans). mempool.removeRecursive(**it, MemPoolRemovalReason::REORG); } else if (mempool.exists((*it)->GetHash())) { vHashUpdate.push_back((*it)->GetHash()); } ++it; } disconnectpool.queuedTx.clear(); // AcceptToMemoryPool/addUnchecked all assume that new mempool entries have // no in-mempool children, which is generally not true when adding // previously-confirmed transactions back to the mempool. // UpdateTransactionsFromBlock finds descendants of any transactions in // the disconnectpool that were added back and cleans up the mempool state. mempool.UpdateTransactionsFromBlock(vHashUpdate); // We also need to remove any now-immature transactions mempool.removeForReorg(pcoinsTip, chainActive.Tip()->nHeight + 1, STANDARD_LOCKTIME_VERIFY_FLAGS); // Re-limit mempool size, in case we added any transactions LimitMempoolSize(mempool, gArgs.GetArg("-maxmempool", DEFAULT_MAX_MEMPOOL_SIZE) * 1000000, gArgs.GetArg("-mempoolexpiry", DEFAULT_MEMPOOL_EXPIRY) * 60 * 60); } // Used to avoid mempool polluting consensus critical paths if CCoinsViewMempool // were somehow broken and returning the wrong scriptPubKeys static bool CheckInputsFromMempoolAndCache(const CTransaction& tx, CValidationState &state, const CCoinsViewCache &view, CTxMemPool& pool, unsigned int flags, bool cacheSigStore, PrecomputedTransactionData& txdata) { AssertLockHeld(cs_main); // pool.cs should be locked already, but go ahead and re-take the lock here // to enforce that mempool doesn't change between when we check the view // and when we actually call through to CheckInputs LOCK(pool.cs); assert(!tx.IsCoinBase()); for (const CTxIn& txin : tx.vin) { const Coin& coin = view.AccessCoin(txin.prevout); // At this point we haven't actually checked if the coins are all // available (or shouldn't assume we have, since CheckInputs does). // So we just return failure if the inputs are not available here, // and then only have to check equivalence for available inputs. if (coin.IsSpent()) return false; const CTransactionRef& txFrom = pool.get(txin.prevout.hash); if (txFrom) { assert(txFrom->GetHash() == txin.prevout.hash); assert(txFrom->vout.size() > txin.prevout.n); assert(txFrom->vout[txin.prevout.n] == coin.out); } else { const Coin& coinFromDisk = pcoinsTip->AccessCoin(txin.prevout); assert(!coinFromDisk.IsSpent()); assert(coinFromDisk.out == coin.out); } } return CheckInputs(tx, state, view, true, flags, cacheSigStore, true, txdata); } static bool AcceptToMemoryPoolWorker(const CChainParams& chainparams, CTxMemPool& pool, CValidationState& state, const CTransactionRef& ptx, bool fLimitFree, bool* pfMissingInputs, int64_t nAcceptTime, std::list<CTransactionRef>* plTxnReplaced, bool fOverrideMempoolLimit, const CAmount& nAbsurdFee, std::vector<COutPoint>& coins_to_uncache) { const CTransaction& tx = *ptx; const uint256 hash = tx.GetHash(); AssertLockHeld(cs_main); if (pfMissingInputs) *pfMissingInputs = false; if (!CheckTransaction(tx, state)) return false; // state filled in by CheckTransaction // Coinbase is only valid in a block, not as a loose transaction if (tx.IsCoinBase()) return state.DoS(100, false, REJECT_INVALID, "coinbase"); // Reject transactions with witness before segregated witness activates (override with -prematurewitness) bool witnessEnabled = IsWitnessEnabled(chainActive.Tip(), chainparams.GetConsensus()); if (!gArgs.GetBoolArg("-prematurewitness", false) && tx.HasWitness() && !witnessEnabled) { return state.DoS(0, false, REJECT_NONSTANDARD, "no-witness-yet", true); } // Rather not work on nonstandard transactions (unless -testnet/-regtest) std::string reason; if (fRequireStandard && !IsStandardTx(tx, reason, witnessEnabled)) return state.DoS(0, false, REJECT_NONSTANDARD, reason); // Only accept nLockTime-using transactions that can be mined in the next // block; we don't want our mempool filled up with transactions that can't // be mined yet. if (!CheckFinalTx(tx, STANDARD_LOCKTIME_VERIFY_FLAGS)) return state.DoS(0, false, REJECT_NONSTANDARD, "non-final"); // is it already in the memory pool? if (pool.exists(hash)) { return state.Invalid(false, REJECT_DUPLICATE, "txn-already-in-mempool"); } // Check for conflicts with in-memory transactions std::set<uint256> setConflicts; { LOCK(pool.cs); // protect pool.mapNextTx for (const CTxIn &txin : tx.vin) { auto itConflicting = pool.mapNextTx.find(txin.prevout); if (itConflicting != pool.mapNextTx.end()) { const CTransaction *ptxConflicting = itConflicting->second; if (!setConflicts.count(ptxConflicting->GetHash())) { // Allow opt-out of transaction replacement by setting // nSequence > MAX_BIP125_RBF_SEQUENCE (SEQUENCE_FINAL-2) on all inputs. // // SEQUENCE_FINAL-1 is picked to still allow use of nLockTime by // non-replaceable transactions. All inputs rather than just one // is for the sake of multi-party protocols, where we don't // want a single party to be able to disable replacement. // // The opt-out ignores descendants as anyone relying on // first-seen mempool behavior should be checking all // unconfirmed ancestors anyway; doing otherwise is hopelessly // insecure. bool fReplacementOptOut = true; if (fEnableReplacement) { for (const CTxIn &_txin : ptxConflicting->vin) { if (_txin.nSequence <= MAX_BIP125_RBF_SEQUENCE) { fReplacementOptOut = false; break; } } } if (fReplacementOptOut) { return state.Invalid(false, REJECT_DUPLICATE, "txn-mempool-conflict"); } setConflicts.insert(ptxConflicting->GetHash()); } } } } { CCoinsView dummy; CCoinsViewCache view(&dummy); CAmount nValueIn = 0; LockPoints lp; { LOCK(pool.cs); CCoinsViewMemPool viewMemPool(pcoinsTip, pool); view.SetBackend(viewMemPool); // do all inputs exist? for (const CTxIn txin : tx.vin) { if (!pcoinsTip->HaveCoinInCache(txin.prevout)) { coins_to_uncache.push_back(txin.prevout); } if (!view.HaveCoin(txin.prevout)) { // Are inputs missing because we already have the tx? for (size_t out = 0; out < tx.vout.size(); out++) { // Optimistically just do efficient check of cache for outputs if (pcoinsTip->HaveCoinInCache(COutPoint(hash, out))) { return state.Invalid(false, REJECT_DUPLICATE, "txn-already-known"); } } // Otherwise assume this might be an orphan tx for which we just haven't seen parents yet if (pfMissingInputs) { *pfMissingInputs = true; } return false; // fMissingInputs and !state.IsInvalid() is used to detect this condition, don't set state.Invalid() } } // Bring the best block into scope view.GetBestBlock(); nValueIn = view.GetValueIn(tx); // we have all inputs cached now, so switch back to dummy, so we don't need to keep lock on mempool view.SetBackend(dummy); // Only accept BIP68 sequence locked transactions that can be mined in the next // block; we don't want our mempool filled up with transactions that can't // be mined yet. // Must keep pool.cs for this unless we change CheckSequenceLocks to take a // CoinsViewCache instead of create its own if (!CheckSequenceLocks(tx, STANDARD_LOCKTIME_VERIFY_FLAGS, &lp)) return state.DoS(0, false, REJECT_NONSTANDARD, "non-BIP68-final"); } // Check for non-standard pay-to-script-hash in inputs if (fRequireStandard && !AreInputsStandard(tx, view)) return state.Invalid(false, REJECT_NONSTANDARD, "bad-txns-nonstandard-inputs"); // Check for non-standard witness in P2WSH if (tx.HasWitness() && fRequireStandard && !IsWitnessStandard(tx, view)) return state.DoS(0, false, REJECT_NONSTANDARD, "bad-witness-nonstandard", true); int64_t nSigOpsCost = GetTransactionSigOpCost(tx, view, STANDARD_SCRIPT_VERIFY_FLAGS); CAmount nValueOut = tx.GetValueOut(); CAmount nFees = nValueIn-nValueOut; // nModifiedFees includes any fee deltas from PrioritiseTransaction CAmount nModifiedFees = nFees; pool.ApplyDelta(hash, nModifiedFees); // Keep track of transactions that spend a coinbase, which we re-scan // during reorgs to ensure COINBASE_MATURITY is still met. bool fSpendsCoinbase = false; for (const CTxIn &txin : tx.vin) { const Coin &coin = view.AccessCoin(txin.prevout); if (coin.IsCoinBase()) { fSpendsCoinbase = true; break; } } CTxMemPoolEntry entry(ptx, nFees, nAcceptTime, chainActive.Height(), fSpendsCoinbase, nSigOpsCost, lp); unsigned int nSize = entry.GetTxSize(); // Check that the transaction doesn't have an excessive number of // sigops, making it impossible to mine. Since the coinbase transaction // itself can contain sigops MAX_STANDARD_TX_SIGOPS is less than // MAX_BLOCK_SIGOPS; we still consider this an invalid rather than // merely non-standard transaction. if (nSigOpsCost > MAX_STANDARD_TX_SIGOPS_COST) return state.DoS(0, false, REJECT_NONSTANDARD, "bad-txns-too-many-sigops", false, strprintf("%d", nSigOpsCost)); CAmount mempoolRejectFee = pool.GetMinFee(gArgs.GetArg("-maxmempool", DEFAULT_MAX_MEMPOOL_SIZE) * 1000000).GetFee(nSize); if (mempoolRejectFee > 0 && nModifiedFees < mempoolRejectFee) { return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "mempool min fee not met", false, strprintf("%d < %d", nFees, mempoolRejectFee)); } // No transactions are allowed below minRelayTxFee except from disconnected blocks if (fLimitFree && nModifiedFees < ::minRelayTxFee.GetFee(nSize)) { return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "min relay fee not met"); } if (nAbsurdFee && nFees > nAbsurdFee) return state.Invalid(false, REJECT_HIGHFEE, "absurdly-high-fee", strprintf("%d > %d", nFees, nAbsurdFee)); // Calculate in-mempool ancestors, up to a limit. CTxMemPool::setEntries setAncestors; size_t nLimitAncestors = gArgs.GetArg("-limitancestorcount", DEFAULT_ANCESTOR_LIMIT); size_t nLimitAncestorSize = gArgs.GetArg("-limitancestorsize", DEFAULT_ANCESTOR_SIZE_LIMIT)*1000; size_t nLimitDescendants = gArgs.GetArg("-limitdescendantcount", DEFAULT_DESCENDANT_LIMIT); size_t nLimitDescendantSize = gArgs.GetArg("-limitdescendantsize", DEFAULT_DESCENDANT_SIZE_LIMIT)*1000; std::string errString; if (!pool.CalculateMemPoolAncestors(entry, setAncestors, nLimitAncestors, nLimitAncestorSize, nLimitDescendants, nLimitDescendantSize, errString)) { return state.DoS(0, false, REJECT_NONSTANDARD, "too-long-mempool-chain", false, errString); } // A transaction that spends outputs that would be replaced by it is invalid. Now // that we have the set of all ancestors we can detect this // pathological case by making sure setConflicts and setAncestors don't // intersect. for (CTxMemPool::txiter ancestorIt : setAncestors) { const uint256 &hashAncestor = ancestorIt->GetTx().GetHash(); if (setConflicts.count(hashAncestor)) { return state.DoS(10, false, REJECT_INVALID, "bad-txns-spends-conflicting-tx", false, strprintf("%s spends conflicting transaction %s", hash.ToString(), hashAncestor.ToString())); } } // Check if it's economically rational to mine this transaction rather // than the ones it replaces. CAmount nConflictingFees = 0; size_t nConflictingSize = 0; uint64_t nConflictingCount = 0; CTxMemPool::setEntries allConflicting; // If we don't hold the lock allConflicting might be incomplete; the // subsequent RemoveStaged() and addUnchecked() calls don't guarantee // mempool consistency for us. LOCK(pool.cs); const bool fReplacementTransaction = setConflicts.size(); if (fReplacementTransaction) { CFeeRate newFeeRate(nModifiedFees, nSize); std::set<uint256> setConflictsParents; const int maxDescendantsToVisit = 100; CTxMemPool::setEntries setIterConflicting; for (const uint256 &hashConflicting : setConflicts) { CTxMemPool::txiter mi = pool.mapTx.find(hashConflicting); if (mi == pool.mapTx.end()) continue; // Save these to avoid repeated lookups setIterConflicting.insert(mi); // Don't allow the replacement to reduce the feerate of the // mempool. // // We usually don't want to accept replacements with lower // feerates than what they replaced as that would lower the // feerate of the next block. Requiring that the feerate always // be increased is also an easy-to-reason about way to prevent // DoS attacks via replacements. // // The mining code doesn't (currently) take children into // account (CPFP) so we only consider the feerates of // transactions being directly replaced, not their indirect // descendants. While that does mean high feerate children are // ignored when deciding whether or not to replace, we do // require the replacement to pay more overall fees too, // mitigating most cases. CFeeRate oldFeeRate(mi->GetModifiedFee(), mi->GetTxSize()); if (newFeeRate <= oldFeeRate) { return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "insufficient fee", false, strprintf("rejecting replacement %s; new feerate %s <= old feerate %s", hash.ToString(), newFeeRate.ToString(), oldFeeRate.ToString())); } for (const CTxIn &txin : mi->GetTx().vin) { setConflictsParents.insert(txin.prevout.hash); } nConflictingCount += mi->GetCountWithDescendants(); } // This potentially overestimates the number of actual descendants // but we just want to be conservative to avoid doing too much // work. if (nConflictingCount <= maxDescendantsToVisit) { // If not too many to replace, then calculate the set of // transactions that would have to be evicted for (CTxMemPool::txiter it : setIterConflicting) { pool.CalculateDescendants(it, allConflicting); } for (CTxMemPool::txiter it : allConflicting) { nConflictingFees += it->GetModifiedFee(); nConflictingSize += it->GetTxSize(); } } else { return state.DoS(0, false, REJECT_NONSTANDARD, "too many potential replacements", false, strprintf("rejecting replacement %s; too many potential replacements (%d > %d)\n", hash.ToString(), nConflictingCount, maxDescendantsToVisit)); } for (unsigned int j = 0; j < tx.vin.size(); j++) { // We don't want to accept replacements that require low // feerate junk to be mined first. Ideally we'd keep track of // the ancestor feerates and make the decision based on that, // but for now requiring all new inputs to be confirmed works. if (!setConflictsParents.count(tx.vin[j].prevout.hash)) { // Rather than check the UTXO set - potentially expensive - // it's cheaper to just check if the new input refers to a // tx that's in the mempool. if (pool.mapTx.find(tx.vin[j].prevout.hash) != pool.mapTx.end()) return state.DoS(0, false, REJECT_NONSTANDARD, "replacement-adds-unconfirmed", false, strprintf("replacement %s adds unconfirmed input, idx %d", hash.ToString(), j)); } } // The replacement must pay greater fees than the transactions it // replaces - if we did the bandwidth used by those conflicting // transactions would not be paid for. if (nModifiedFees < nConflictingFees) { return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "insufficient fee", false, strprintf("rejecting replacement %s, less fees than conflicting txs; %s < %s", hash.ToString(), FormatMoney(nModifiedFees), FormatMoney(nConflictingFees))); } // Finally in addition to paying more fees than the conflicts the // new transaction must pay for its own bandwidth. CAmount nDeltaFees = nModifiedFees - nConflictingFees; if (nDeltaFees < ::incrementalRelayFee.GetFee(nSize)) { return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "insufficient fee", false, strprintf("rejecting replacement %s, not enough additional fees to relay; %s < %s", hash.ToString(), FormatMoney(nDeltaFees), FormatMoney(::incrementalRelayFee.GetFee(nSize)))); } } unsigned int scriptVerifyFlags = STANDARD_SCRIPT_VERIFY_FLAGS; if (!chainparams.RequireStandard()) { scriptVerifyFlags = gArgs.GetArg("-promiscuousmempoolflags", scriptVerifyFlags); } // Check against previous transactions // This is done last to help prevent CPU exhaustion denial-of-service attacks. PrecomputedTransactionData txdata(tx); if (!CheckInputs(tx, state, view, true, scriptVerifyFlags, true, false, txdata)) { // SCRIPT_VERIFY_CLEANSTACK requires SCRIPT_VERIFY_WITNESS, so we // need to turn both off, and compare against just turning off CLEANSTACK // to see if the failure is specifically due to witness validation. CValidationState stateDummy; // Want reported failures to be from first CheckInputs if (!tx.HasWitness() && CheckInputs(tx, stateDummy, view, true, scriptVerifyFlags & ~(SCRIPT_VERIFY_WITNESS | SCRIPT_VERIFY_CLEANSTACK), true, false, txdata) && !CheckInputs(tx, stateDummy, view, true, scriptVerifyFlags & ~SCRIPT_VERIFY_CLEANSTACK, true, false, txdata)) { // Only the witness is missing, so the transaction itself may be fine. state.SetCorruptionPossible(); } return false; // state filled in by CheckInputs } // Check again against the current block tip's script verification // flags to cache our script execution flags. This is, of course, // useless if the next block has different script flags from the // previous one, but because the cache tracks script flags for us it // will auto-invalidate and we'll just have a few blocks of extra // misses on soft-fork activation. // // This is also useful in case of bugs in the standard flags that cause // transactions to pass as valid when they're actually invalid. For // instance the STRICTENC flag was incorrectly allowing certain // CHECKSIG NOT scripts to pass, even though they were invalid. // // There is a similar check in CreateNewBlock() to prevent creating // invalid blocks (using TestBlockValidity), however allowing such // transactions into the mempool can be exploited as a DoS attack. unsigned int currentBlockScriptVerifyFlags = GetBlockScriptFlags(chainActive.Tip(), Params().GetConsensus()); if (!CheckInputsFromMempoolAndCache(tx, state, view, pool, currentBlockScriptVerifyFlags, true, txdata)) { // If we're using promiscuousmempoolflags, we may hit this normally // Check if current block has some flags that scriptVerifyFlags // does not before printing an ominous warning if (!(~scriptVerifyFlags & currentBlockScriptVerifyFlags)) { return error("%s: BUG! PLEASE REPORT THIS! ConnectInputs failed against latest-block but not STANDARD flags %s, %s", __func__, hash.ToString(), FormatStateMessage(state)); } else { if (!CheckInputs(tx, state, view, true, MANDATORY_SCRIPT_VERIFY_FLAGS, true, false, txdata)) { return error("%s: ConnectInputs failed against MANDATORY but not STANDARD flags due to promiscuous mempool %s, %s", __func__, hash.ToString(), FormatStateMessage(state)); } else { LogPrintf("Warning: -promiscuousmempool flags set to not include currently enforced soft forks, this may break mining or otherwise cause instability!\n"); } } } // Remove conflicting transactions from the mempool for (const CTxMemPool::txiter it : allConflicting) { LogPrint(BCLog::MEMPOOL, "replacing tx %s with %s for %s VIA additional fees, %d delta bytes\n", it->GetTx().GetHash().ToString(), hash.ToString(), FormatMoney(nModifiedFees - nConflictingFees), (int)nSize - (int)nConflictingSize); if (plTxnReplaced) plTxnReplaced->push_back(it->GetSharedTx()); } pool.RemoveStaged(allConflicting, false, MemPoolRemovalReason::REPLACED); // This transaction should only count for fee estimation if it isn't a // BIP 125 replacement transaction (may not be widely supported), the // node is not behind, and the transaction is not dependent on any other // transactions in the mempool. bool validForFeeEstimation = !fReplacementTransaction && IsCurrentForFeeEstimation() && pool.HasNoInputsOf(tx); // Store transaction in memory pool.addUnchecked(hash, entry, setAncestors, validForFeeEstimation); // trim mempool and check if tx was trimmed if (!fOverrideMempoolLimit) { LimitMempoolSize(pool, gArgs.GetArg("-maxmempool", DEFAULT_MAX_MEMPOOL_SIZE) * 1000000, gArgs.GetArg("-mempoolexpiry", DEFAULT_MEMPOOL_EXPIRY) * 60 * 60); if (!pool.exists(hash)) return state.DoS(0, false, REJECT_INSUFFICIENTFEE, "mempool full"); } } GetMainSignals().TransactionAddedToMempool(ptx); return true; } /** (try to) add transaction to memory pool with a specified acceptance time **/ static bool AcceptToMemoryPoolWithTime(const CChainParams& chainparams, CTxMemPool& pool, CValidationState &state, const CTransactionRef &tx, bool fLimitFree, bool* pfMissingInputs, int64_t nAcceptTime, std::list<CTransactionRef>* plTxnReplaced, bool fOverrideMempoolLimit, const CAmount nAbsurdFee) { std::vector<COutPoint> coins_to_uncache; bool res = AcceptToMemoryPoolWorker(chainparams, pool, state, tx, fLimitFree, pfMissingInputs, nAcceptTime, plTxnReplaced, fOverrideMempoolLimit, nAbsurdFee, coins_to_uncache); if (!res) { for (const COutPoint& hashTx : coins_to_uncache) pcoinsTip->Uncache(hashTx); } // After we've (potentially) uncached entries, ensure our coins cache is still within its size limits CValidationState stateDummy; FlushStateToDisk(chainparams, stateDummy, FLUSH_STATE_PERIODIC); return res; } bool AcceptToMemoryPool(CTxMemPool& pool, CValidationState &state, const CTransactionRef &tx, bool fLimitFree, bool* pfMissingInputs, std::list<CTransactionRef>* plTxnReplaced, bool fOverrideMempoolLimit, const CAmount nAbsurdFee) { const CChainParams& chainparams = Params(); return AcceptToMemoryPoolWithTime(chainparams, pool, state, tx, fLimitFree, pfMissingInputs, GetTime(), plTxnReplaced, fOverrideMempoolLimit, nAbsurdFee); } /** Return transaction in txOut, and if it was found inside a block, its hash is placed in hashBlock */ bool GetTransaction(const uint256 &hash, CTransactionRef &txOut, const Consensus::Params& consensusParams, uint256 &hashBlock, bool fAllowSlow) { CBlockIndex *pindexSlow = nullptr; LOCK(cs_main); CTransactionRef ptx = mempool.get(hash); if (ptx) { txOut = ptx; return true; } if (fTxIndex) { CDiskTxPos postx; if (pblocktree->ReadTxIndex(hash, postx)) { CAutoFile file(OpenBlockFile(postx, true), SER_DISK, CLIENT_VERSION); if (file.IsNull()) return error("%s: OpenBlockFile failed", __func__); CBlockHeader header; try { file >> header; fseek(file.Get(), postx.nTxOffset, SEEK_CUR); file >> txOut; } catch (const std::exception& e) { return error("%s: Deserialize or I/O error - %s", __func__, e.what()); } hashBlock = header.GetHash(); if (txOut->GetHash() != hash) return error("%s: txid mismatch", __func__); return true; } } if (fAllowSlow) { // use coin database to locate block that contains transaction, and scan it const Coin& coin = AccessByTxid(*pcoinsTip, hash); if (!coin.IsSpent()) pindexSlow = chainActive[coin.nHeight]; } if (pindexSlow) { CBlock block; if (ReadBlockFromDisk(block, pindexSlow, consensusParams)) { for (const auto& tx : block.vtx) { if (tx->GetHash() == hash) { txOut = tx; hashBlock = pindexSlow->GetBlockHash(); return true; } } } } return false; } ////////////////////////////////////////////////////////////////////////////// // // CBlock and CBlockIndex // static bool WriteBlockToDisk(const CBlock& block, CDiskBlockPos& pos, const CMessageHeader::MessageStartChars& messageStart) { // Open history file to append CAutoFile fileout(OpenBlockFile(pos), SER_DISK, CLIENT_VERSION); if (fileout.IsNull()) return error("WriteBlockToDisk: OpenBlockFile failed"); // Write index header unsigned int nSize = GetSerializeSize(fileout, block); fileout << FLATDATA(messageStart) << nSize; // Write block long fileOutPos = ftell(fileout.Get()); if (fileOutPos < 0) return error("WriteBlockToDisk: ftell failed"); pos.nPos = (unsigned int)fileOutPos; fileout << block; return true; } bool ReadBlockFromDisk(CBlock& block, const CDiskBlockPos& pos, const Consensus::Params& consensusParams) { block.SetNull(); // Open history file to read CAutoFile filein(OpenBlockFile(pos, true), SER_DISK, CLIENT_VERSION); if (filein.IsNull()) return error("ReadBlockFromDisk: OpenBlockFile failed for %s", pos.ToString()); // Read block try { filein >> block; } catch (const std::exception& e) { return error("%s: Deserialize or I/O error - %s at %s", __func__, e.what(), pos.ToString()); } // Check the header if (!CheckBlockProofOfWork(&block, consensusParams)) return error("ReadBlockFromDisk: Errors in block header at %s", pos.ToString()); return true; } bool ReadBlockFromDisk(CBlock& block, const CBlockIndex* pindex, const Consensus::Params& consensusParams) { if (!ReadBlockFromDisk(block, pindex->GetBlockPos(), consensusParams)) return false; if (block.GetHash() != pindex->GetBlockHash()) return error("ReadBlockFromDisk(CBlock&, CBlockIndex*): GetHash() doesn't match index for %s at %s", pindex->ToString(), pindex->GetBlockPos().ToString()); return true; } CAmount GetBlockSubsidy(int nHeight, const Consensus::Params& consensusParams) { // In -regtest mode use Bitcoin schedule if (Params().MineBlocksOnDemand() && consensusParams.fPowAllowMinDifficultyBlocks) { int halvings = nHeight / consensusParams.nSubsidyHalvingInterval; // Force block reward to zero when right shift is undefined. if (halvings >= 64) return 0; CAmount nSubsidy = 50 * COIN; // Subsidy is cut in half every 210,000 blocks which will occur approximately every 4 years. nSubsidy >>= halvings; return nSubsidy; } // Viacoin schedule CAmount nSubsidy = 0; // different zero block period for testnet and mainnet // mainnet not fixed until final release int zeroRewardHeight = consensusParams.fPowAllowMinDifficultyBlocks ? 2001 : 10001; int rampHeight = 43200 + zeroRewardHeight; // 4 periods of 10800 if (nHeight == 0) { // no reward for genesis block nSubsidy = 0; } else if (nHeight == 1) { // first distribution nSubsidy = 10000000 * COIN; } else if (nHeight <= zeroRewardHeight) { // no block reward to allow difficulty to scale up and prevent instamining nSubsidy = 0; } else if (nHeight <= (zeroRewardHeight + 10800)) { // first 10800 block after zero reward period is 10 coins per block nSubsidy = 10 * COIN; } else if (nHeight <= rampHeight) { // every 10800 blocks reduce nSubsidy from 8 to 6 nSubsidy = (8 - int((nHeight-zeroRewardHeight-1) / 10800)) * COIN; } else if (nHeight <= 1971000) { nSubsidy = 5 * COIN; } else { // (nHeight > 1971000) int halvings = nHeight / consensusParams.nSubsidyHalvingInterval; // Force block reward to zero when right shift is undefined. if (halvings <= 64) { nSubsidy = 20 * COIN; nSubsidy >>= halvings; } } return nSubsidy; } bool IsInitialBlockDownload() { if (Params().GetConsensus().fPowNoRetargeting) return false; // Once this function has returned false, it must remain false. static std::atomic<bool> latchToFalse{false}; // Optimization: pre-test latch before taking the lock. if (latchToFalse.load(std::memory_order_relaxed)) return false; LOCK(cs_main); if (latchToFalse.load(std::memory_order_relaxed)) return false; if (fImporting || fReindex) return true; if (chainActive.Tip() == nullptr) return true; if (chainActive.Tip()->nChainWork < nMinimumChainWork) return true; if (chainActive.Tip()->GetBlockTime() < (GetTime() - nMaxTipAge)) return true; LogPrintf("Leaving InitialBlockDownload (latching to false)\n"); latchToFalse.store(true, std::memory_order_relaxed); return false; } CBlockIndex *pindexBestForkTip = nullptr, *pindexBestForkBase = nullptr; static void AlertNotify(const std::string& strMessage) { uiInterface.NotifyAlertChanged(); std::string strCmd = gArgs.GetArg("-alertnotify", ""); if (strCmd.empty()) return; // Alert text should be plain ascii coming from a trusted source, but to // be safe we first strip anything not in safeChars, then add single quotes around // the whole string before passing it to the shell: std::string singleQuote("'"); std::string safeStatus = SanitizeString(strMessage); safeStatus = singleQuote+safeStatus+singleQuote; boost::replace_all(strCmd, "%s", safeStatus); boost::thread t(runCommand, strCmd); // thread runs free } static void CheckForkWarningConditions() { AssertLockHeld(cs_main); // Before we get past initial download, we cannot reliably alert about forks // (we assume we don't get stuck on a fork before finishing our initial sync) if (IsInitialBlockDownload()) return; // If our best fork is no longer within 72 blocks (+/- 12 hours if no one mines it) // of our head, drop it if (pindexBestForkTip && chainActive.Height() - pindexBestForkTip->nHeight >= 72) pindexBestForkTip = nullptr; if (pindexBestForkTip || (pindexBestInvalid && pindexBestInvalid->nChainWork > chainActive.Tip()->nChainWork + (GetBlockProof(*chainActive.Tip()) * 6))) { if (!GetfLargeWorkForkFound() && pindexBestForkBase) { std::string warning = std::string("'Warning: Large-work fork detected, forking after block ") + pindexBestForkBase->phashBlock->ToString() + std::string("'"); AlertNotify(warning); } if (pindexBestForkTip && pindexBestForkBase) { LogPrintf("%s: Warning: Large valid fork found\n forking the chain at height %d (%s)\n lasting to height %d (%s).\nChain state database corruption likely.\n", __func__, pindexBestForkBase->nHeight, pindexBestForkBase->phashBlock->ToString(), pindexBestForkTip->nHeight, pindexBestForkTip->phashBlock->ToString()); SetfLargeWorkForkFound(true); } else { LogPrintf("%s: Warning: Found invalid chain at least ~6 blocks longer than our best chain.\nChain state database corruption likely.\n", __func__); SetfLargeWorkInvalidChainFound(true); } } else { SetfLargeWorkForkFound(false); SetfLargeWorkInvalidChainFound(false); } } static void CheckForkWarningConditionsOnNewFork(CBlockIndex* pindexNewForkTip) { AssertLockHeld(cs_main); // If we are on a fork that is sufficiently large, set a warning flag CBlockIndex* pfork = pindexNewForkTip; CBlockIndex* plonger = chainActive.Tip(); while (pfork && pfork != plonger) { while (plonger && plonger->nHeight > pfork->nHeight) plonger = plonger->pprev; if (pfork == plonger) break; pfork = pfork->pprev; } // We define a condition where we should warn the user about as a fork of at least 7 blocks // with a tip within 72 blocks (+/- 12 hours if no one mines it) of ours // We use 7 blocks rather arbitrarily as it represents just under 10% of sustained network // hash rate operating on the fork. // or a chain that is entirely longer than ours and invalid (note that this should be detected by both) // We define it this way because it allows us to only store the highest fork tip (+ base) which meets // the 7-block condition and from this always have the most-likely-to-cause-warning fork if (pfork && (!pindexBestForkTip || pindexNewForkTip->nHeight > pindexBestForkTip->nHeight) && pindexNewForkTip->nChainWork - pfork->nChainWork > (GetBlockProof(*pfork) * 7) && chainActive.Height() - pindexNewForkTip->nHeight < 72) { pindexBestForkTip = pindexNewForkTip; pindexBestForkBase = pfork; } CheckForkWarningConditions(); } void static InvalidChainFound(CBlockIndex* pindexNew) { if (!pindexBestInvalid || pindexNew->nChainWork > pindexBestInvalid->nChainWork) pindexBestInvalid = pindexNew; LogPrintf("%s: invalid block=%s height=%d log2_work=%.8g date=%s\n", __func__, pindexNew->GetBlockHash().ToString(), pindexNew->nHeight, log(pindexNew->nChainWork.getdouble())/log(2.0), DateTimeStrFormat("%Y-%m-%d %H:%M:%S", pindexNew->GetBlockTime())); CBlockIndex *tip = chainActive.Tip(); assert (tip); LogPrintf("%s: current best=%s height=%d log2_work=%.8g date=%s\n", __func__, tip->GetBlockHash().ToString(), chainActive.Height(), log(tip->nChainWork.getdouble())/log(2.0), DateTimeStrFormat("%Y-%m-%d %H:%M:%S", tip->GetBlockTime())); CheckForkWarningConditions(); } void static InvalidBlockFound(CBlockIndex *pindex, const CValidationState &state) { if (!state.CorruptionPossible()) { pindex->nStatus |= BLOCK_FAILED_VALID; g_failed_blocks.insert(pindex); setDirtyBlockIndex.insert(pindex); setBlockIndexCandidates.erase(pindex); InvalidChainFound(pindex); } } void UpdateCoins(const CTransaction& tx, CCoinsViewCache& inputs, CTxUndo &txundo, int nHeight) { // mark inputs spent if (!tx.IsCoinBase()) { txundo.vprevout.reserve(tx.vin.size()); for (const CTxIn &txin : tx.vin) { txundo.vprevout.emplace_back(); bool is_spent = inputs.SpendCoin(txin.prevout, &txundo.vprevout.back()); assert(is_spent); } } // add outputs AddCoins(inputs, tx, nHeight); } void UpdateCoins(const CTransaction& tx, CCoinsViewCache& inputs, int nHeight) { CTxUndo txundo; UpdateCoins(tx, inputs, txundo, nHeight); } bool CScriptCheck::operator()() { const CScript &scriptSig = ptxTo->vin[nIn].scriptSig; const CScriptWitness *witness = &ptxTo->vin[nIn].scriptWitness; return VerifyScript(scriptSig, scriptPubKey, witness, nFlags, CachingTransactionSignatureChecker(ptxTo, nIn, amount, cacheStore, *txdata), &error); } int GetSpendHeight(const CCoinsViewCache& inputs) { LOCK(cs_main); CBlockIndex* pindexPrev = mapBlockIndex.find(inputs.GetBestBlock())->second; return pindexPrev->nHeight + 1; } static CuckooCache::cache<uint256, SignatureCacheHasher> scriptExecutionCache; static uint256 scriptExecutionCacheNonce(GetRandHash()); void InitScriptExecutionCache() { // nMaxCacheSize is unsigned. If -maxsigcachesize is set to zero, // setup_bytes creates the minimum possible cache (2 elements). size_t nMaxCacheSize = std::min(std::max((int64_t)0, gArgs.GetArg("-maxsigcachesize", DEFAULT_MAX_SIG_CACHE_SIZE) / 2), MAX_MAX_SIG_CACHE_SIZE) * ((size_t) 1 << 20); size_t nElems = scriptExecutionCache.setup_bytes(nMaxCacheSize); LogPrintf("Using %zu MiB out of %zu/2 requested for script execution cache, able to store %zu elements\n", (nElems*sizeof(uint256)) >>20, (nMaxCacheSize*2)>>20, nElems); } /** * Check whether all inputs of this transaction are valid (no double spends, scripts & sigs, amounts) * This does not modify the UTXO set. * * If pvChecks is not nullptr, script checks are pushed onto it instead of being performed inline. Any * script checks which are not necessary (eg due to script execution cache hits) are, obviously, * not pushed onto pvChecks/run. * * Setting cacheSigStore/cacheFullScriptStore to false will remove elements from the corresponding cache * which are matched. This is useful for checking blocks where we will likely never need the cache * entry again. * * Non-static (and re-declared) in src/test/txvalidationcache_tests.cpp */ bool CheckInputs(const CTransaction& tx, CValidationState &state, const CCoinsViewCache &inputs, bool fScriptChecks, unsigned int flags, bool cacheSigStore, bool cacheFullScriptStore, PrecomputedTransactionData& txdata, std::vector<CScriptCheck> *pvChecks) { if (!tx.IsCoinBase()) { if (!Consensus::CheckTxInputs(tx, state, inputs, GetSpendHeight(inputs))) return false; if (pvChecks) pvChecks->reserve(tx.vin.size()); // The first loop above does all the inexpensive checks. // Only if ALL inputs pass do we perform expensive ECDSA signature checks. // Helps prevent CPU exhaustion attacks. // Skip script verification when connecting blocks under the // assumevalid block. Assuming the assumevalid block is valid this // is safe because block merkle hashes are still computed and checked, // Of course, if an assumed valid block is invalid due to false scriptSigs // this optimization would allow an invalid chain to be accepted. if (fScriptChecks) { // First check if script executions have been cached with the same // flags. Note that this assumes that the inputs provided are // correct (ie that the transaction hash which is in tx's prevouts // properly commits to the scriptPubKey in the inputs view of that // transaction). uint256 hashCacheEntry; // We only use the first 19 bytes of nonce to avoid a second SHA // round - giving us 19 + 32 + 4 = 55 bytes (+ 8 + 1 = 64) static_assert(55 - sizeof(flags) - 32 >= 128/8, "Want at least 128 bits of nonce for script execution cache"); CSHA256().Write(scriptExecutionCacheNonce.begin(), 55 - sizeof(flags) - 32).Write(tx.GetWitnessHash().begin(), 32).Write((unsigned char*)&flags, sizeof(flags)).Finalize(hashCacheEntry.begin()); AssertLockHeld(cs_main); //TODO: Remove this requirement by making CuckooCache not require external locks if (scriptExecutionCache.contains(hashCacheEntry, !cacheFullScriptStore)) { return true; } for (unsigned int i = 0; i < tx.vin.size(); i++) { const COutPoint &prevout = tx.vin[i].prevout; const Coin& coin = inputs.AccessCoin(prevout); assert(!coin.IsSpent()); // We very carefully only pass in things to CScriptCheck which // are clearly committed to by tx' witness hash. This provides // a sanity check that our caching is not introducing consensus // failures through additional data in, eg, the coins being // spent being checked as a part of CScriptCheck. const CScript& scriptPubKey = coin.out.scriptPubKey; const CAmount amount = coin.out.nValue; // Verify signature CScriptCheck check(scriptPubKey, amount, tx, i, flags, cacheSigStore, &txdata); if (pvChecks) { pvChecks->push_back(CScriptCheck()); check.swap(pvChecks->back()); } else if (!check()) { if (flags & STANDARD_NOT_MANDATORY_VERIFY_FLAGS) { // Check whether the failure was caused by a // non-mandatory script verification check, such as // non-standard DER encodings or non-null dummy // arguments; if so, don't trigger DoS protection to // avoid splitting the network between upgraded and // non-upgraded nodes. CScriptCheck check2(scriptPubKey, amount, tx, i, flags & ~STANDARD_NOT_MANDATORY_VERIFY_FLAGS, cacheSigStore, &txdata); if (check2()) return state.Invalid(false, REJECT_NONSTANDARD, strprintf("non-mandatory-script-verify-flag (%s)", ScriptErrorString(check.GetScriptError()))); } // Failures of other flags indicate a transaction that is // invalid in new blocks, e.g. an invalid P2SH. We DoS ban // such nodes as they are not following the protocol. That // said during an upgrade careful thought should be taken // as to the correct behavior - we may want to continue // peering with non-upgraded nodes even after soft-fork // super-majority signaling has occurred. return state.DoS(100,false, REJECT_INVALID, strprintf("mandatory-script-verify-flag-failed (%s)", ScriptErrorString(check.GetScriptError()))); } } if (cacheFullScriptStore && !pvChecks) { // We executed all of the provided scripts, and were told to // cache the result. Do so now. scriptExecutionCache.insert(hashCacheEntry); } } } return true; } namespace { bool UndoWriteToDisk(const CBlockUndo& blockundo, CDiskBlockPos& pos, const uint256& hashBlock, const CMessageHeader::MessageStartChars& messageStart) { // Open history file to append CAutoFile fileout(OpenUndoFile(pos), SER_DISK, CLIENT_VERSION); if (fileout.IsNull()) return error("%s: OpenUndoFile failed", __func__); // Write index header unsigned int nSize = GetSerializeSize(fileout, blockundo); fileout << FLATDATA(messageStart) << nSize; // Write undo data long fileOutPos = ftell(fileout.Get()); if (fileOutPos < 0) return error("%s: ftell failed", __func__); pos.nPos = (unsigned int)fileOutPos; fileout << blockundo; // calculate & write checksum CHashWriter hasher(SER_GETHASH, PROTOCOL_VERSION); hasher << hashBlock; hasher << blockundo; fileout << hasher.GetHash(); return true; } bool UndoReadFromDisk(CBlockUndo& blockundo, const CDiskBlockPos& pos, const uint256& hashBlock) { // Open history file to read CAutoFile filein(OpenUndoFile(pos, true), SER_DISK, CLIENT_VERSION); if (filein.IsNull()) return error("%s: OpenUndoFile failed", __func__); // Read block uint256 hashChecksum; CHashVerifier<CAutoFile> verifier(&filein); // We need a CHashVerifier as reserializing may lose data try { verifier << hashBlock; verifier >> blockundo; filein >> hashChecksum; } catch (const std::exception& e) { return error("%s: Deserialize or I/O error - %s", __func__, e.what()); } // Verify checksum if (hashChecksum != verifier.GetHash()) return error("%s: Checksum mismatch", __func__); return true; } /** Abort with a message */ bool AbortNode(const std::string& strMessage, const std::string& userMessage="") { SetMiscWarning(strMessage); LogPrintf("*** %s\n", strMessage); uiInterface.ThreadSafeMessageBox( userMessage.empty() ? _("Error: A fatal internal error occurred, see debug.log for details") : userMessage, "", CClientUIInterface::MSG_ERROR); StartShutdown(); return false; } bool AbortNode(CValidationState& state, const std::string& strMessage, const std::string& userMessage="") { AbortNode(strMessage, userMessage); return state.Error(strMessage); } } // namespace enum DisconnectResult { DISCONNECT_OK, // All good. DISCONNECT_UNCLEAN, // Rolled back, but UTXO set was inconsistent with block. DISCONNECT_FAILED // Something else went wrong. }; /** * Restore the UTXO in a Coin at a given COutPoint * @param undo The Coin to be restored. * @param view The coins view to which to apply the changes. * @param out The out point that corresponds to the tx input. * @return A DisconnectResult as an int */ int ApplyTxInUndo(Coin&& undo, CCoinsViewCache& view, const COutPoint& out) { bool fClean = true; if (view.HaveCoin(out)) fClean = false; // overwriting transaction output if (undo.nHeight == 0) { // Missing undo metadata (height and coinbase). Older versions included this // information only in undo records for the last spend of a transactions' // outputs. This implies that it must be present for some other output of the same tx. const Coin& alternate = AccessByTxid(view, out.hash); if (!alternate.IsSpent()) { undo.nHeight = alternate.nHeight; undo.fCoinBase = alternate.fCoinBase; } else { return DISCONNECT_FAILED; // adding output for transaction without known metadata } } // The potential_overwrite parameter to AddCoin is only allowed to be false if we know for // sure that the coin did not already exist in the cache. As we have queried for that above // using HaveCoin, we don't need to guess. When fClean is false, a coin already existed and // it is an overwrite. view.AddCoin(out, std::move(undo), !fClean); return fClean ? DISCONNECT_OK : DISCONNECT_UNCLEAN; } /** Undo the effects of this block (with given index) on the UTXO set represented by coins. * When FAILED is returned, view is left in an indeterminate state. */ static DisconnectResult DisconnectBlock(const CBlock& block, const CBlockIndex* pindex, CCoinsViewCache& view) { bool fClean = true; CBlockUndo blockUndo; CDiskBlockPos pos = pindex->GetUndoPos(); if (pos.IsNull()) { error("DisconnectBlock(): no undo data available"); return DISCONNECT_FAILED; } if (!UndoReadFromDisk(blockUndo, pos, pindex->pprev->GetBlockHash())) { error("DisconnectBlock(): failure reading undo data"); return DISCONNECT_FAILED; } if (blockUndo.vtxundo.size() + 1 != block.vtx.size()) { error("DisconnectBlock(): block and undo data inconsistent"); return DISCONNECT_FAILED; } // undo transactions in reverse order for (int i = block.vtx.size() - 1; i >= 0; i--) { const CTransaction &tx = *(block.vtx[i]); uint256 hash = tx.GetHash(); bool is_coinbase = tx.IsCoinBase(); // Check that all outputs are available and match the outputs in the block itself // exactly. for (size_t o = 0; o < tx.vout.size(); o++) { if (!tx.vout[o].scriptPubKey.IsUnspendable()) { COutPoint out(hash, o); Coin coin; bool is_spent = view.SpendCoin(out, &coin); if (!is_spent || tx.vout[o] != coin.out || pindex->nHeight != coin.nHeight || is_coinbase != coin.fCoinBase) { fClean = false; // transaction output mismatch } } } // restore inputs if (i > 0) { // not coinbases CTxUndo &txundo = blockUndo.vtxundo[i-1]; if (txundo.vprevout.size() != tx.vin.size()) { error("DisconnectBlock(): transaction and undo data inconsistent"); return DISCONNECT_FAILED; } for (unsigned int j = tx.vin.size(); j-- > 0;) { const COutPoint &out = tx.vin[j].prevout; int res = ApplyTxInUndo(std::move(txundo.vprevout[j]), view, out); if (res == DISCONNECT_FAILED) return DISCONNECT_FAILED; fClean = fClean && res != DISCONNECT_UNCLEAN; } // At this point, all of txundo.vprevout should have been moved out. } } // move best block pointer to prevout block view.SetBestBlock(pindex->pprev->GetBlockHash()); return fClean ? DISCONNECT_OK : DISCONNECT_UNCLEAN; } void static FlushBlockFile(bool fFinalize = false) { LOCK(cs_LastBlockFile); CDiskBlockPos posOld(nLastBlockFile, 0); FILE *fileOld = OpenBlockFile(posOld); if (fileOld) { if (fFinalize) TruncateFile(fileOld, vinfoBlockFile[nLastBlockFile].nSize); FileCommit(fileOld); fclose(fileOld); } fileOld = OpenUndoFile(posOld); if (fileOld) { if (fFinalize) TruncateFile(fileOld, vinfoBlockFile[nLastBlockFile].nUndoSize); FileCommit(fileOld); fclose(fileOld); } } static bool FindUndoPos(CValidationState &state, int nFile, CDiskBlockPos &pos, unsigned int nAddSize); static CCheckQueue<CScriptCheck> scriptcheckqueue(128); void ThreadScriptCheck() { RenameThread("bitcoin-scriptch"); scriptcheckqueue.Thread(); } // Protected by cs_main VersionBitsCache versionbitscache; int32_t ComputeBlockVersion(const CBlockIndex* pindexPrev, const Consensus::Params& params) { LOCK(cs_main); int32_t nVersion = VERSIONBITS_TOP_BITS; for (int i = 0; i < (int)Consensus::MAX_VERSION_BITS_DEPLOYMENTS; i++) { ThresholdState state = VersionBitsState(pindexPrev, params, (Consensus::DeploymentPos)i, versionbitscache); if (state == THRESHOLD_LOCKED_IN || state == THRESHOLD_STARTED) { nVersion |= VersionBitsMask(params, (Consensus::DeploymentPos)i); } } return nVersion; } /** * Threshold condition checker that triggers when unknown versionbits are seen on the network. */ class WarningBitsConditionChecker : public AbstractThresholdConditionChecker { private: int bit; public: WarningBitsConditionChecker(int bitIn) : bit(bitIn) {} int64_t BeginTime(const Consensus::Params& params) const override { return 0; } int64_t EndTime(const Consensus::Params& params) const override { return std::numeric_limits<int64_t>::max(); } int Period(const Consensus::Params& params) const override { return params.nMinerConfirmationWindow; } int Threshold(const Consensus::Params& params) const override { return params.nRuleChangeActivationThreshold; } bool Condition(const CBlockIndex* pindex, const Consensus::Params& params) const override { return ((pindex->nVersion & VERSIONBITS_TOP_MASK) == VERSIONBITS_TOP_BITS) && ((pindex->nVersion >> bit) & 1) != 0 && ((ComputeBlockVersion(pindex->pprev, params) >> bit) & 1) == 0; } }; // Protected by cs_main static ThresholdConditionCache warningcache[VERSIONBITS_NUM_BITS]; static unsigned int GetBlockScriptFlags(const CBlockIndex* pindex, const Consensus::Params& consensusparams) { AssertLockHeld(cs_main); // BIP16 didn't become active until Apr 1 2012 int64_t nBIP16SwitchTime = 1333238400; bool fStrictPayToScriptHash = (pindex->GetBlockTime() >= nBIP16SwitchTime); unsigned int flags = fStrictPayToScriptHash ? SCRIPT_VERIFY_P2SH : SCRIPT_VERIFY_NONE; // Start enforcing the DERSIG (BIP66) rule if (pindex->nHeight >= consensusparams.BIP66Height) { flags |= SCRIPT_VERIFY_DERSIG; } // Start enforcing CHECKLOCKTIMEVERIFY (BIP65) rule if (pindex->nHeight >= consensusparams.BIP65Height) { flags |= SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY; } // Start enforcing BIP68 (sequence locks) and BIP112 (CHECKSEQUENCEVERIFY) using versionbits logic. if (pindex->nHeight >= consensusparams.nWitnessStartHeight || VersionBitsState(pindex->pprev, consensusparams, Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) { flags |= SCRIPT_VERIFY_CHECKSEQUENCEVERIFY; } // Start enforcing WITNESS rules using versionbits logic. if (IsWitnessEnabled(pindex->pprev, consensusparams)) { flags |= SCRIPT_VERIFY_WITNESS; flags |= SCRIPT_VERIFY_NULLDUMMY; } return flags; } static int64_t nTimeCheck = 0; static int64_t nTimeForks = 0; static int64_t nTimeVerify = 0; static int64_t nTimeConnect = 0; static int64_t nTimeIndex = 0; static int64_t nTimeCallbacks = 0; static int64_t nTimeTotal = 0; /** Apply the effects of this block (with given index) on the UTXO set represented by coins. * Validity checks that depend on the UTXO set are also done; ConnectBlock() * can fail if those validity checks fail (among other reasons). */ static bool ConnectBlock(const CBlock& block, CValidationState& state, CBlockIndex* pindex, CCoinsViewCache& view, const CChainParams& chainparams, bool fJustCheck = false) { AssertLockHeld(cs_main); assert(pindex); // pindex->phashBlock can be null if called by CreateNewBlock/TestBlockValidity assert((pindex->phashBlock == nullptr) || (*pindex->phashBlock == block.GetHash())); int64_t nTimeStart = GetTimeMicros(); // Check it again in case a previous version let a bad block in if (!CheckBlock(block, state, chainparams.GetConsensus(), !fJustCheck, !fJustCheck)) return error("%s: Consensus::CheckBlock: %s", __func__, FormatStateMessage(state)); // verify that the view's current state corresponds to the previous block uint256 hashPrevBlock = pindex->pprev == nullptr ? uint256() : pindex->pprev->GetBlockHash(); assert(hashPrevBlock == view.GetBestBlock()); // Special case for the genesis block, skipping connection of its transactions // (its coinbase is unspendable) if (block.GetHash() == chainparams.GetConsensus().hashGenesisBlock) { if (!fJustCheck) view.SetBestBlock(pindex->GetBlockHash()); return true; } bool fScriptChecks = true; if (!hashAssumeValid.IsNull()) { // We've been configured with the hash of a block which has been externally verified to have a valid history. // A suitable default value is included with the software and updated from time to time. Because validity // relative to a piece of software is an objective fact these defaults can be easily reviewed. // This setting doesn't force the selection of any particular chain but makes validating some faster by // effectively caching the result of part of the verification. BlockMap::const_iterator it = mapBlockIndex.find(hashAssumeValid); if (it != mapBlockIndex.end()) { if (it->second->GetAncestor(pindex->nHeight) == pindex && pindexBestHeader->GetAncestor(pindex->nHeight) == pindex && pindexBestHeader->nChainWork >= nMinimumChainWork) { // This block is a member of the assumed verified chain and an ancestor of the best header. // The equivalent time check discourages hash power from extorting the network via DOS attack // into accepting an invalid block through telling users they must manually set assumevalid. // Requiring a software change or burying the invalid block, regardless of the setting, makes // it hard to hide the implication of the demand. This also avoids having release candidates // that are hardly doing any signature verification at all in testing without having to // artificially set the default assumed verified block further back. // The test against nMinimumChainWork prevents the skipping when denied access to any chain at // least as good as the expected chain. fScriptChecks = (GetBlockProofEquivalentTime(*pindexBestHeader, *pindex, *pindexBestHeader, chainparams.GetConsensus()) <= 60 * 60 * 24 * 7 * 2); } } } int64_t nTime1 = GetTimeMicros(); nTimeCheck += nTime1 - nTimeStart; LogPrint(BCLog::BENCH, " - Sanity checks: %.2fms [%.2fs]\n", 0.001 * (nTime1 - nTimeStart), nTimeCheck * 0.000001); // Do not allow blocks that contain transactions which 'overwrite' older transactions, // unless those are already completely spent. // If such overwrites are allowed, coinbases and transactions depending upon those // can be duplicated to remove the ability to spend the first instance -- even after // being sent to another address. // See BIP30 and http://r6.ca/blog/20120206T005236Z.html for more information. // This logic is not necessary for memory pool transactions, as AcceptToMemoryPool // already refuses previously-known transaction ids entirely. // This rule was originally applied to all blocks with a timestamp after March 15, 2012, 0:00 UTC. // Now that the whole chain is irreversibly beyond that time it is applied to all blocks except the // two in the chain that violate it. This prevents exploiting the issue against nodes during their // initial block download. bool fEnforceBIP30 = (!pindex->phashBlock) || // Enforce on CreateNewBlock invocations which don't have a hash. !((pindex->nHeight==91842 && pindex->GetBlockHash() == uint256S("0x00000000000a4d0a398161ffc163c503763b1f4360639393e0e4c8e300e0caec")) || (pindex->nHeight==91880 && pindex->GetBlockHash() == uint256S("0x00000000000743f190a18c5577a3c2d2a1f610ae9601ac046a38084ccb7cd721"))); // Once BIP34 activated it was not possible to create new duplicate coinbases and thus other than starting // with the 2 existing duplicate coinbase pairs, not possible to create overwriting txs. But by the // time BIP34 activated, in each of the existing pairs the duplicate coinbase had overwritten the first // before the first had been spent. Since those coinbases are sufficiently buried its no longer possible to create further // duplicate transactions descending from the known pairs either. // If we're on the known chain at height greater than where BIP34 activated, we can save the db accesses needed for the BIP30 check. CBlockIndex *pindexBIP34height = pindex->pprev->GetAncestor(chainparams.GetConsensus().BIP34Height); //Only continue to enforce if we're below BIP34 activation height or the block hash at that height doesn't correspond. fEnforceBIP30 = fEnforceBIP30 && (!pindexBIP34height || !(pindexBIP34height->GetBlockHash() == chainparams.GetConsensus().BIP34Hash)); if (fEnforceBIP30) { for (const auto& tx : block.vtx) { for (size_t o = 0; o < tx->vout.size(); o++) { if (view.HaveCoin(COutPoint(tx->GetHash(), o))) { return state.DoS(100, error("ConnectBlock(): tried to overwrite transaction"), REJECT_INVALID, "bad-txns-BIP30"); } } } } // Get the script flags for this block unsigned int flags = GetBlockScriptFlags(pindex, chainparams.GetConsensus()); // Start enforcing BIP68 (sequence locks) and BIP112 (CHECKSEQUENCEVERIFY) using versionbits logic. int nLockTimeFlags = 0; if (flags & SCRIPT_VERIFY_CHECKSEQUENCEVERIFY) { nLockTimeFlags |= LOCKTIME_VERIFY_SEQUENCE; } int64_t nTime2 = GetTimeMicros(); nTimeForks += nTime2 - nTime1; LogPrint(BCLog::BENCH, " - Fork checks: %.2fms [%.2fs]\n", 0.001 * (nTime2 - nTime1), nTimeForks * 0.000001); CBlockUndo blockundo; CCheckQueueControl<CScriptCheck> control(fScriptChecks && nScriptCheckThreads ? &scriptcheckqueue : nullptr); std::vector<int> prevheights; CAmount nFees = 0; int nInputs = 0; int64_t nSigOpsCost = 0; CDiskTxPos pos(pindex->GetBlockPos(), GetSizeOfCompactSize(block.vtx.size())); std::vector<std::pair<uint256, CDiskTxPos> > vPos; vPos.reserve(block.vtx.size()); blockundo.vtxundo.reserve(block.vtx.size() - 1); std::vector<PrecomputedTransactionData> txdata; txdata.reserve(block.vtx.size()); // Required so that pointers to individual PrecomputedTransactionData don't get invalidated for (unsigned int i = 0; i < block.vtx.size(); i++) { const CTransaction &tx = *(block.vtx[i]); nInputs += tx.vin.size(); if (!tx.IsCoinBase()) { if (!view.HaveInputs(tx)) return state.DoS(100, error("ConnectBlock(): inputs missing/spent"), REJECT_INVALID, "bad-txns-inputs-missingorspent"); // Check that transaction is BIP68 final // BIP68 lock checks (as opposed to nLockTime checks) must // be in ConnectBlock because they require the UTXO set prevheights.resize(tx.vin.size()); for (size_t j = 0; j < tx.vin.size(); j++) { prevheights[j] = view.AccessCoin(tx.vin[j].prevout).nHeight; } if (!SequenceLocks(tx, nLockTimeFlags, &prevheights, *pindex)) { return state.DoS(100, error("%s: contains a non-BIP68-final transaction", __func__), REJECT_INVALID, "bad-txns-nonfinal"); } } // GetTransactionSigOpCost counts 3 types of sigops: // * legacy (always) // * p2sh (when P2SH enabled in flags and excludes coinbase) // * witness (when witness enabled in flags and excludes coinbase) nSigOpsCost += GetTransactionSigOpCost(tx, view, flags); if (nSigOpsCost > MAX_BLOCK_SIGOPS_COST) return state.DoS(100, error("ConnectBlock(): too many sigops"), REJECT_INVALID, "bad-blk-sigops"); txdata.emplace_back(tx); if (!tx.IsCoinBase()) { nFees += view.GetValueIn(tx)-tx.GetValueOut(); std::vector<CScriptCheck> vChecks; bool fCacheResults = fJustCheck; /* Don't cache results if we're actually connecting blocks (still consult the cache, though) */ if (!CheckInputs(tx, state, view, fScriptChecks, flags, fCacheResults, fCacheResults, txdata[i], nScriptCheckThreads ? &vChecks : nullptr)) return error("ConnectBlock(): CheckInputs on %s failed with %s", tx.GetHash().ToString(), FormatStateMessage(state)); control.Add(vChecks); } CTxUndo undoDummy; if (i > 0) { blockundo.vtxundo.push_back(CTxUndo()); } UpdateCoins(tx, view, i == 0 ? undoDummy : blockundo.vtxundo.back(), pindex->nHeight); vPos.push_back(std::make_pair(tx.GetHash(), pos)); pos.nTxOffset += ::GetSerializeSize(tx, SER_DISK, CLIENT_VERSION); } int64_t nTime3 = GetTimeMicros(); nTimeConnect += nTime3 - nTime2; LogPrint(BCLog::BENCH, " - Connect %u transactions: %.2fms (%.3fms/tx, %.3fms/txin) [%.2fs]\n", (unsigned)block.vtx.size(), 0.001 * (nTime3 - nTime2), 0.001 * (nTime3 - nTime2) / block.vtx.size(), nInputs <= 1 ? 0 : 0.001 * (nTime3 - nTime2) / (nInputs-1), nTimeConnect * 0.000001); CAmount blockReward = nFees + GetBlockSubsidy(pindex->nHeight, chainparams.GetConsensus()); if (block.vtx[0]->GetValueOut() > blockReward) return state.DoS(100, error("ConnectBlock(): coinbase pays too much (actual=%d vs limit=%d)", block.vtx[0]->GetValueOut(), blockReward), REJECT_INVALID, "bad-cb-amount"); if (!control.Wait()) return state.DoS(100, error("%s: CheckQueue failed", __func__), REJECT_INVALID, "block-validation-failed"); int64_t nTime4 = GetTimeMicros(); nTimeVerify += nTime4 - nTime2; LogPrint(BCLog::BENCH, " - Verify %u txins: %.2fms (%.3fms/txin) [%.2fs]\n", nInputs - 1, 0.001 * (nTime4 - nTime2), nInputs <= 1 ? 0 : 0.001 * (nTime4 - nTime2) / (nInputs-1), nTimeVerify * 0.000001); if (fJustCheck) return true; // Write undo information to disk if (pindex->GetUndoPos().IsNull() || !pindex->IsValid(BLOCK_VALID_SCRIPTS)) { if (pindex->GetUndoPos().IsNull()) { CDiskBlockPos _pos; if (!FindUndoPos(state, pindex->nFile, _pos, ::GetSerializeSize(blockundo, SER_DISK, CLIENT_VERSION) + 40)) return error("ConnectBlock(): FindUndoPos failed"); if (!UndoWriteToDisk(blockundo, _pos, pindex->pprev->GetBlockHash(), chainparams.MessageStart())) return AbortNode(state, "Failed to write undo data"); // update nUndoPos in block index pindex->nUndoPos = _pos.nPos; pindex->nStatus |= BLOCK_HAVE_UNDO; } pindex->RaiseValidity(BLOCK_VALID_SCRIPTS); setDirtyBlockIndex.insert(pindex); } if (fTxIndex) if (!pblocktree->WriteTxIndex(vPos)) return AbortNode(state, "Failed to write transaction index"); // add this block to the view's block chain view.SetBestBlock(pindex->GetBlockHash()); int64_t nTime5 = GetTimeMicros(); nTimeIndex += nTime5 - nTime4; LogPrint(BCLog::BENCH, " - Index writing: %.2fms [%.2fs]\n", 0.001 * (nTime5 - nTime4), nTimeIndex * 0.000001); int64_t nTime6 = GetTimeMicros(); nTimeCallbacks += nTime6 - nTime5; LogPrint(BCLog::BENCH, " - Callbacks: %.2fms [%.2fs]\n", 0.001 * (nTime6 - nTime5), nTimeCallbacks * 0.000001); return true; } /** * Update the on-disk chain state. * The caches and indexes are flushed depending on the mode we're called with * if they're too large, if it's been a while since the last write, * or always and in all cases if we're in prune mode and are deleting files. */ bool static FlushStateToDisk(const CChainParams& chainparams, CValidationState &state, FlushStateMode mode, int nManualPruneHeight) { int64_t nMempoolUsage = mempool.DynamicMemoryUsage(); LOCK(cs_main); static int64_t nLastWrite = 0; static int64_t nLastFlush = 0; static int64_t nLastSetChain = 0; std::set<int> setFilesToPrune; bool fFlushForPrune = false; bool fDoFullFlush = false; int64_t nNow = 0; try { { LOCK(cs_LastBlockFile); if (fPruneMode && (fCheckForPruning || nManualPruneHeight > 0) && !fReindex) { if (nManualPruneHeight > 0) { FindFilesToPruneManual(setFilesToPrune, nManualPruneHeight); } else { FindFilesToPrune(setFilesToPrune, chainparams.PruneAfterHeight()); fCheckForPruning = false; } if (!setFilesToPrune.empty()) { fFlushForPrune = true; if (!fHavePruned) { pblocktree->WriteFlag("prunedblockfiles", true); fHavePruned = true; } } } nNow = GetTimeMicros(); // Avoid writing/flushing immediately after startup. if (nLastWrite == 0) { nLastWrite = nNow; } if (nLastFlush == 0) { nLastFlush = nNow; } if (nLastSetChain == 0) { nLastSetChain = nNow; } int64_t nMempoolSizeMax = gArgs.GetArg("-maxmempool", DEFAULT_MAX_MEMPOOL_SIZE) * 1000000; int64_t cacheSize = pcoinsTip->DynamicMemoryUsage(); int64_t nTotalSpace = nCoinCacheUsage + std::max<int64_t>(nMempoolSizeMax - nMempoolUsage, 0); // The cache is large and we're within 10% and 10 MiB of the limit, but we have time now (not in the middle of a block processing). bool fCacheLarge = mode == FLUSH_STATE_PERIODIC && cacheSize > std::max((9 * nTotalSpace) / 10, nTotalSpace - MAX_BLOCK_COINSDB_USAGE * 1024 * 1024); // The cache is over the limit, we have to write now. bool fCacheCritical = mode == FLUSH_STATE_IF_NEEDED && cacheSize > nTotalSpace; // It's been a while since we wrote the block index to disk. Do this frequently, so we don't need to redownload after a crash. bool fPeriodicWrite = mode == FLUSH_STATE_PERIODIC && nNow > nLastWrite + (int64_t)DATABASE_WRITE_INTERVAL * 1000000; // It's been very long since we flushed the cache. Do this infrequently, to optimize cache usage. bool fPeriodicFlush = mode == FLUSH_STATE_PERIODIC && nNow > nLastFlush + (int64_t)DATABASE_FLUSH_INTERVAL * 1000000; // Combine all conditions that result in a full cache flush. fDoFullFlush = (mode == FLUSH_STATE_ALWAYS) || fCacheLarge || fCacheCritical || fPeriodicFlush || fFlushForPrune; // Write blocks and block index to disk. if (fDoFullFlush || fPeriodicWrite) { // Depend on nMinDiskSpace to ensure we can write block index if (!CheckDiskSpace(0)) return state.Error("out of disk space"); // First make sure all block and undo data is flushed to disk. FlushBlockFile(); // Then update all block file information (which may refer to block and undo files). { std::vector<std::pair<int, const CBlockFileInfo*> > vFiles; vFiles.reserve(setDirtyFileInfo.size()); for (std::set<int>::iterator it = setDirtyFileInfo.begin(); it != setDirtyFileInfo.end(); ) { vFiles.push_back(std::make_pair(*it, &vinfoBlockFile[*it])); setDirtyFileInfo.erase(it++); } std::vector<const CBlockIndex*> vBlocks; vBlocks.reserve(setDirtyBlockIndex.size()); for (std::set<CBlockIndex*>::iterator it = setDirtyBlockIndex.begin(); it != setDirtyBlockIndex.end(); ) { vBlocks.push_back(*it); setDirtyBlockIndex.erase(it++); } if (!pblocktree->WriteBatchSync(vFiles, nLastBlockFile, vBlocks, mapDirtyAuxPow)) { return AbortNode(state, "Failed to write to block index database"); } for (std::vector<const CBlockIndex*>::const_iterator it = vBlocks.begin(); it != vBlocks.end(); it++) { mapDirtyAuxPow.erase((*it)->GetBlockHash()); } } // Finally remove any pruned files if (fFlushForPrune) UnlinkPrunedFiles(setFilesToPrune); nLastWrite = nNow; } // Flush best chain related state. This can only be done if the blocks / block index write was also done. if (fDoFullFlush) { // Typical Coin structures on disk are around 48 bytes in size. // Pushing a new one to the database can cause it to be written // twice (once in the log, and once in the tables). This is already // an overestimation, as most will delete an existing entry or // overwrite one. Still, use a conservative safety factor of 2. if (!CheckDiskSpace(48 * 2 * 2 * pcoinsTip->GetCacheSize())) return state.Error("out of disk space"); // Flush the chainstate (which may refer to block index entries). if (!pcoinsTip->Flush()) return AbortNode(state, "Failed to write to coin database"); nLastFlush = nNow; } } if (fDoFullFlush || ((mode == FLUSH_STATE_ALWAYS || mode == FLUSH_STATE_PERIODIC) && nNow > nLastSetChain + (int64_t)DATABASE_WRITE_INTERVAL * 1000000)) { // Update best block in wallet (so we can detect restored wallets). GetMainSignals().SetBestChain(chainActive.GetLocator()); nLastSetChain = nNow; } } catch (const std::runtime_error& e) { return AbortNode(state, std::string("System error while flushing: ") + e.what()); } return true; } void FlushStateToDisk() { CValidationState state; const CChainParams& chainparams = Params(); FlushStateToDisk(chainparams, state, FLUSH_STATE_ALWAYS); } void PruneAndFlush() { CValidationState state; fCheckForPruning = true; const CChainParams& chainparams = Params(); FlushStateToDisk(chainparams, state, FLUSH_STATE_NONE); } static void DoWarning(const std::string& strWarning) { static bool fWarned = false; SetMiscWarning(strWarning); if (!fWarned) { AlertNotify(strWarning); fWarned = true; } } /** Update chainActive and related internal data structures. */ void static UpdateTip(CBlockIndex *pindexNew, const CChainParams& chainParams) { chainActive.SetTip(pindexNew); // New best block mempool.AddTransactionsUpdated(1); cvBlockChange.notify_all(); std::vector<std::string> warningMessages; if (!IsInitialBlockDownload()) { int nUpgraded = 0; const CBlockIndex* pindex = chainActive.Tip(); for (int bit = 0; bit < VERSIONBITS_NUM_BITS; bit++) { WarningBitsConditionChecker checker(bit); ThresholdState state = checker.GetStateFor(pindex, chainParams.GetConsensus(), warningcache[bit]); if (state == THRESHOLD_ACTIVE || state == THRESHOLD_LOCKED_IN) { const std::string strWarning = strprintf(_("Warning: unknown new rules activated (versionbit %i)"), bit); if (state == THRESHOLD_ACTIVE) { DoWarning(strWarning); } else { warningMessages.push_back(strWarning); } } } // Check the version of the last 100 blocks to see if we need to upgrade: for (int i = 0; i < 100 && pindex != nullptr; i++) { int32_t nExpectedVersion = ComputeBlockVersion(pindex->pprev, chainParams.GetConsensus()); if ((pindex->nVersion & 0xff) > VERSIONBITS_LAST_OLD_BLOCK_VERSION && (pindex->nVersion & 0xff & ~nExpectedVersion) != 0) ++nUpgraded; pindex = pindex->pprev; } if (nUpgraded > 0) warningMessages.push_back(strprintf(_("%d of last 100 blocks have unexpected version"), nUpgraded)); if (nUpgraded > 100/2) { std::string strWarning = _("Warning: Unknown block versions being mined! It's possible unknown rules are in effect"); // notify GetWarnings(), called by Qt and the JSON-RPC code to warn the user: DoWarning(strWarning); } } LogPrintf("%s: new best=%s height=%d version=0x%08x log2_work=%.8g tx=%lu date='%s' progress=%f cache=%.1fMiB(%utxo)", __func__, chainActive.Tip()->GetBlockHash().ToString(), chainActive.Height(), chainActive.Tip()->nVersion, log(chainActive.Tip()->nChainWork.getdouble())/log(2.0), (unsigned long)chainActive.Tip()->nChainTx, DateTimeStrFormat("%Y-%m-%d %H:%M:%S", chainActive.Tip()->GetBlockTime()), GuessVerificationProgress(chainParams.TxData(), chainActive.Tip()), pcoinsTip->DynamicMemoryUsage() * (1.0 / (1<<20)), pcoinsTip->GetCacheSize()); if (!warningMessages.empty()) LogPrintf(" warning='%s'", boost::algorithm::join(warningMessages, ", ")); LogPrintf("\n"); } /** Disconnect chainActive's tip. * After calling, the mempool will be in an inconsistent state, with * transactions from disconnected blocks being added to disconnectpool. You * should make the mempool consistent again by calling UpdateMempoolForReorg. * with cs_main held. * * If disconnectpool is nullptr, then no disconnected transactions are added to * disconnectpool (note that the caller is responsible for mempool consistency * in any case). */ bool static DisconnectTip(CValidationState& state, const CChainParams& chainparams, DisconnectedBlockTransactions *disconnectpool) { CBlockIndex *pindexDelete = chainActive.Tip(); assert(pindexDelete); // Read block from disk. std::shared_ptr<CBlock> pblock = std::make_shared<CBlock>(); CBlock& block = *pblock; if (!ReadBlockFromDisk(block, pindexDelete, chainparams.GetConsensus())) return AbortNode(state, "Failed to read block"); // Apply the block atomically to the chain state. int64_t nStart = GetTimeMicros(); { CCoinsViewCache view(pcoinsTip); assert(view.GetBestBlock() == pindexDelete->GetBlockHash()); if (DisconnectBlock(block, pindexDelete, view) != DISCONNECT_OK) return error("DisconnectTip(): DisconnectBlock %s failed", pindexDelete->GetBlockHash().ToString()); bool flushed = view.Flush(); assert(flushed); } LogPrint(BCLog::BENCH, "- Disconnect block: %.2fms\n", (GetTimeMicros() - nStart) * 0.001); // Write the chain state to disk, if necessary. if (!FlushStateToDisk(chainparams, state, FLUSH_STATE_IF_NEEDED)) return false; if (disconnectpool) { // Save transactions to re-add to mempool at end of reorg for (auto it = block.vtx.rbegin(); it != block.vtx.rend(); ++it) { disconnectpool->addTransaction(*it); } while (disconnectpool->DynamicMemoryUsage() > MAX_DISCONNECTED_TX_POOL_SIZE * 1000) { // Drop the earliest entry, and remove its children from the mempool. auto it = disconnectpool->queuedTx.get<insertion_order>().begin(); mempool.removeRecursive(**it, MemPoolRemovalReason::REORG); disconnectpool->removeEntry(it); } } // Update chainActive and related variables. UpdateTip(pindexDelete->pprev, chainparams); // Let wallets know transactions went from 1-confirmed to // 0-confirmed or conflicted: GetMainSignals().BlockDisconnected(pblock); return true; } static int64_t nTimeReadFromDisk = 0; static int64_t nTimeConnectTotal = 0; static int64_t nTimeFlush = 0; static int64_t nTimeChainState = 0; static int64_t nTimePostConnect = 0; struct PerBlockConnectTrace { CBlockIndex* pindex = nullptr; std::shared_ptr<const CBlock> pblock; std::shared_ptr<std::vector<CTransactionRef>> conflictedTxs; PerBlockConnectTrace() : conflictedTxs(std::make_shared<std::vector<CTransactionRef>>()) {} }; /** * Used to track blocks whose transactions were applied to the UTXO state as a * part of a single ActivateBestChainStep call. * * This class also tracks transactions that are removed from the mempool as * conflicts (per block) and can be used to pass all those transactions * through SyncTransaction. * * This class assumes (and asserts) that the conflicted transactions for a given * block are added via mempool callbacks prior to the BlockConnected() associated * with those transactions. If any transactions are marked conflicted, it is * assumed that an associated block will always be added. * * This class is single-use, once you call GetBlocksConnected() you have to throw * it away and make a new one. */ class ConnectTrace { private: std::vector<PerBlockConnectTrace> blocksConnected; CTxMemPool &pool; public: ConnectTrace(CTxMemPool &_pool) : blocksConnected(1), pool(_pool) { pool.NotifyEntryRemoved.connect(boost::bind(&ConnectTrace::NotifyEntryRemoved, this, _1, _2)); } ~ConnectTrace() { pool.NotifyEntryRemoved.disconnect(boost::bind(&ConnectTrace::NotifyEntryRemoved, this, _1, _2)); } void BlockConnected(CBlockIndex* pindex, std::shared_ptr<const CBlock> pblock) { assert(!blocksConnected.back().pindex); assert(pindex); assert(pblock); blocksConnected.back().pindex = pindex; blocksConnected.back().pblock = std::move(pblock); blocksConnected.emplace_back(); } std::vector<PerBlockConnectTrace>& GetBlocksConnected() { // We always keep one extra block at the end of our list because // blocks are added after all the conflicted transactions have // been filled in. Thus, the last entry should always be an empty // one waiting for the transactions from the next block. We pop // the last entry here to make sure the list we return is sane. assert(!blocksConnected.back().pindex); assert(blocksConnected.back().conflictedTxs->empty()); blocksConnected.pop_back(); return blocksConnected; } void NotifyEntryRemoved(CTransactionRef txRemoved, MemPoolRemovalReason reason) { assert(!blocksConnected.back().pindex); if (reason == MemPoolRemovalReason::CONFLICT) { blocksConnected.back().conflictedTxs->emplace_back(std::move(txRemoved)); } } }; /** * Connect a new block to chainActive. pblock is either nullptr or a pointer to a CBlock * corresponding to pindexNew, to bypass loading it again from disk. * * The block is added to connectTrace if connection succeeds. */ bool static ConnectTip(CValidationState& state, const CChainParams& chainparams, CBlockIndex* pindexNew, const std::shared_ptr<const CBlock>& pblock, ConnectTrace& connectTrace, DisconnectedBlockTransactions &disconnectpool) { assert(pindexNew->pprev == chainActive.Tip()); // Read block from disk. int64_t nTime1 = GetTimeMicros(); std::shared_ptr<const CBlock> pthisBlock; if (!pblock) { std::shared_ptr<CBlock> pblockNew = std::make_shared<CBlock>(); if (!ReadBlockFromDisk(*pblockNew, pindexNew, chainparams.GetConsensus())) return AbortNode(state, "Failed to read block"); pthisBlock = pblockNew; } else { pthisBlock = pblock; } const CBlock& blockConnecting = *pthisBlock; // Apply the block atomically to the chain state. int64_t nTime2 = GetTimeMicros(); nTimeReadFromDisk += nTime2 - nTime1; int64_t nTime3; LogPrint(BCLog::BENCH, " - Load block from disk: %.2fms [%.2fs]\n", (nTime2 - nTime1) * 0.001, nTimeReadFromDisk * 0.000001); { CCoinsViewCache view(pcoinsTip); bool rv = ConnectBlock(blockConnecting, state, pindexNew, view, chainparams); GetMainSignals().BlockChecked(blockConnecting, state); if (!rv) { if (state.IsInvalid()) InvalidBlockFound(pindexNew, state); return error("ConnectTip(): ConnectBlock %s failed", pindexNew->GetBlockHash().ToString()); } nTime3 = GetTimeMicros(); nTimeConnectTotal += nTime3 - nTime2; LogPrint(BCLog::BENCH, " - Connect total: %.2fms [%.2fs]\n", (nTime3 - nTime2) * 0.001, nTimeConnectTotal * 0.000001); bool flushed = view.Flush(); assert(flushed); } int64_t nTime4 = GetTimeMicros(); nTimeFlush += nTime4 - nTime3; LogPrint(BCLog::BENCH, " - Flush: %.2fms [%.2fs]\n", (nTime4 - nTime3) * 0.001, nTimeFlush * 0.000001); // Write the chain state to disk, if necessary. if (!FlushStateToDisk(chainparams, state, FLUSH_STATE_IF_NEEDED)) return false; int64_t nTime5 = GetTimeMicros(); nTimeChainState += nTime5 - nTime4; LogPrint(BCLog::BENCH, " - Writing chainstate: %.2fms [%.2fs]\n", (nTime5 - nTime4) * 0.001, nTimeChainState * 0.000001); // Remove conflicting transactions from the mempool.; mempool.removeForBlock(blockConnecting.vtx, pindexNew->nHeight); disconnectpool.removeForBlock(blockConnecting.vtx); // Update chainActive & related variables. UpdateTip(pindexNew, chainparams); int64_t nTime6 = GetTimeMicros(); nTimePostConnect += nTime6 - nTime5; nTimeTotal += nTime6 - nTime1; LogPrint(BCLog::BENCH, " - Connect postprocess: %.2fms [%.2fs]\n", (nTime6 - nTime5) * 0.001, nTimePostConnect * 0.000001); LogPrint(BCLog::BENCH, "- Connect block: %.2fms [%.2fs]\n", (nTime6 - nTime1) * 0.001, nTimeTotal * 0.000001); connectTrace.BlockConnected(pindexNew, std::move(pthisBlock)); return true; } /** * Return the tip of the chain with the most work in it, that isn't * known to be invalid (it's however far from certain to be valid). */ static CBlockIndex* FindMostWorkChain() { do { CBlockIndex *pindexNew = nullptr; // Find the best candidate header. { std::set<CBlockIndex*, CBlockIndexWorkComparator>::reverse_iterator it = setBlockIndexCandidates.rbegin(); if (it == setBlockIndexCandidates.rend()) return nullptr; pindexNew = *it; } // Check whether all blocks on the path between the currently active chain and the candidate are valid. // Just going until the active chain is an optimization, as we know all blocks in it are valid already. CBlockIndex *pindexTest = pindexNew; bool fInvalidAncestor = false; while (pindexTest && !chainActive.Contains(pindexTest)) { assert(pindexTest->nChainTx || pindexTest->nHeight == 0); // Pruned nodes may have entries in setBlockIndexCandidates for // which block files have been deleted. Remove those as candidates // for the most work chain if we come across them; we can't switch // to a chain unless we have all the non-active-chain parent blocks. bool fFailedChain = pindexTest->nStatus & BLOCK_FAILED_MASK; bool fMissingData = !(pindexTest->nStatus & BLOCK_HAVE_DATA); if (fFailedChain || fMissingData) { // Candidate chain is not usable (either invalid or missing data) if (fFailedChain && (pindexBestInvalid == nullptr || pindexNew->nChainWork > pindexBestInvalid->nChainWork)) pindexBestInvalid = pindexNew; CBlockIndex *pindexFailed = pindexNew; // Remove the entire chain from the set. while (pindexTest != pindexFailed) { if (fFailedChain) { pindexFailed->nStatus |= BLOCK_FAILED_CHILD; } else if (fMissingData) { // If we're missing data, then add back to mapBlocksUnlinked, // so that if the block arrives in the future we can try adding // to setBlockIndexCandidates again. mapBlocksUnlinked.insert(std::make_pair(pindexFailed->pprev, pindexFailed)); } setBlockIndexCandidates.erase(pindexFailed); pindexFailed = pindexFailed->pprev; } setBlockIndexCandidates.erase(pindexTest); fInvalidAncestor = true; break; } pindexTest = pindexTest->pprev; } if (!fInvalidAncestor) return pindexNew; } while(true); } /** Delete all entries in setBlockIndexCandidates that are worse than the current tip. */ static void PruneBlockIndexCandidates() { // Note that we can't delete the current block itself, as we may need to return to it later in case a // reorganization to a better block fails. std::set<CBlockIndex*, CBlockIndexWorkComparator>::iterator it = setBlockIndexCandidates.begin(); while (it != setBlockIndexCandidates.end() && setBlockIndexCandidates.value_comp()(*it, chainActive.Tip())) { setBlockIndexCandidates.erase(it++); } // Either the current tip or a successor of it we're working towards is left in setBlockIndexCandidates. assert(!setBlockIndexCandidates.empty()); } /** * Try to make some progress towards making pindexMostWork the active block. * pblock is either nullptr or a pointer to a CBlock corresponding to pindexMostWork. */ static bool ActivateBestChainStep(CValidationState& state, const CChainParams& chainparams, CBlockIndex* pindexMostWork, const std::shared_ptr<const CBlock>& pblock, bool& fInvalidFound, ConnectTrace& connectTrace) { AssertLockHeld(cs_main); const CBlockIndex *pindexOldTip = chainActive.Tip(); const CBlockIndex *pindexFork = chainActive.FindFork(pindexMostWork); // Disconnect active blocks which are no longer in the best chain. bool fBlocksDisconnected = false; DisconnectedBlockTransactions disconnectpool; while (chainActive.Tip() && chainActive.Tip() != pindexFork) { if (!DisconnectTip(state, chainparams, &disconnectpool)) { // This is likely a fatal error, but keep the mempool consistent, // just in case. Only remove from the mempool in this case. UpdateMempoolForReorg(disconnectpool, false); return false; } fBlocksDisconnected = true; } // Build list of new blocks to connect. std::vector<CBlockIndex*> vpindexToConnect; bool fContinue = true; int nHeight = pindexFork ? pindexFork->nHeight : -1; while (fContinue && nHeight != pindexMostWork->nHeight) { // Don't iterate the entire list of potential improvements toward the best tip, as we likely only need // a few blocks along the way. int nTargetHeight = std::min(nHeight + 32, pindexMostWork->nHeight); vpindexToConnect.clear(); vpindexToConnect.reserve(nTargetHeight - nHeight); CBlockIndex *pindexIter = pindexMostWork->GetAncestor(nTargetHeight); while (pindexIter && pindexIter->nHeight != nHeight) { vpindexToConnect.push_back(pindexIter); pindexIter = pindexIter->pprev; } nHeight = nTargetHeight; // Connect new blocks. for (CBlockIndex *pindexConnect : reverse_iterate(vpindexToConnect)) { if (!ConnectTip(state, chainparams, pindexConnect, pindexConnect == pindexMostWork ? pblock : std::shared_ptr<const CBlock>(), connectTrace, disconnectpool)) { if (state.IsInvalid()) { // The block violates a consensus rule. if (!state.CorruptionPossible()) InvalidChainFound(vpindexToConnect.back()); state = CValidationState(); fInvalidFound = true; fContinue = false; break; } else { // A system error occurred (disk space, database error, ...). // Make the mempool consistent with the current tip, just in case // any observers try to use it before shutdown. UpdateMempoolForReorg(disconnectpool, false); return false; } } else { PruneBlockIndexCandidates(); if (!pindexOldTip || chainActive.Tip()->nChainWork > pindexOldTip->nChainWork) { // We're in a better position than we were. Return temporarily to release the lock. fContinue = false; break; } } } } if (fBlocksDisconnected) { // If any blocks were disconnected, disconnectpool may be non empty. Add // any disconnected transactions back to the mempool. UpdateMempoolForReorg(disconnectpool, true); } mempool.check(pcoinsTip); // Callbacks/notifications for a new best chain. if (fInvalidFound) CheckForkWarningConditionsOnNewFork(vpindexToConnect.back()); else CheckForkWarningConditions(); return true; } static void NotifyHeaderTip() { bool fNotify = false; bool fInitialBlockDownload = false; static CBlockIndex* pindexHeaderOld = nullptr; CBlockIndex* pindexHeader = nullptr; { LOCK(cs_main); pindexHeader = pindexBestHeader; if (pindexHeader != pindexHeaderOld) { fNotify = true; fInitialBlockDownload = IsInitialBlockDownload(); pindexHeaderOld = pindexHeader; } } // Send block tip changed notifications without cs_main if (fNotify) { uiInterface.NotifyHeaderTip(fInitialBlockDownload, pindexHeader); } } /** * Make the best chain active, in multiple steps. The result is either failure * or an activated best chain. pblock is either nullptr or a pointer to a block * that is already loaded (to avoid loading it again from disk). */ bool ActivateBestChain(CValidationState &state, const CChainParams& chainparams, std::shared_ptr<const CBlock> pblock) { // Note that while we're often called here from ProcessNewBlock, this is // far from a guarantee. Things in the P2P/RPC will often end up calling // us in the middle of ProcessNewBlock - do not assume pblock is set // sanely for performance or correctness! CBlockIndex *pindexMostWork = nullptr; CBlockIndex *pindexNewTip = nullptr; int nStopAtHeight = gArgs.GetArg("-stopatheight", DEFAULT_STOPATHEIGHT); do { boost::this_thread::interruption_point(); if (ShutdownRequested()) break; const CBlockIndex *pindexFork; bool fInitialDownload; { LOCK(cs_main); ConnectTrace connectTrace(mempool); // Destructed before cs_main is unlocked CBlockIndex *pindexOldTip = chainActive.Tip(); if (pindexMostWork == nullptr) { pindexMostWork = FindMostWorkChain(); } // Whether we have anything to do at all. if (pindexMostWork == nullptr || pindexMostWork == chainActive.Tip()) return true; bool fInvalidFound = false; std::shared_ptr<const CBlock> nullBlockPtr; if (!ActivateBestChainStep(state, chainparams, pindexMostWork, pblock && pblock->GetHash() == pindexMostWork->GetBlockHash() ? pblock : nullBlockPtr, fInvalidFound, connectTrace)) return false; if (fInvalidFound) { // Wipe cache, we may need another branch now. pindexMostWork = nullptr; } pindexNewTip = chainActive.Tip(); pindexFork = chainActive.FindFork(pindexOldTip); fInitialDownload = IsInitialBlockDownload(); for (const PerBlockConnectTrace& trace : connectTrace.GetBlocksConnected()) { assert(trace.pblock && trace.pindex); GetMainSignals().BlockConnected(trace.pblock, trace.pindex, *trace.conflictedTxs); } } // When we reach this point, we switched to a new tip (stored in pindexNewTip). // Notifications/callbacks that can run without cs_main // Notify external listeners about the new tip. GetMainSignals().UpdatedBlockTip(pindexNewTip, pindexFork, fInitialDownload); // Always notify the UI if a new block tip was connected if (pindexFork != pindexNewTip) { uiInterface.NotifyBlockTip(fInitialDownload, pindexNewTip); } if (nStopAtHeight && pindexNewTip && pindexNewTip->nHeight >= nStopAtHeight) StartShutdown(); } while (pindexNewTip != pindexMostWork); CheckBlockIndex(chainparams.GetConsensus()); // Write changes periodically to disk, after relay. if (!FlushStateToDisk(chainparams, state, FLUSH_STATE_PERIODIC)) { return false; } return true; } bool PreciousBlock(CValidationState& state, const CChainParams& params, CBlockIndex *pindex) { { LOCK(cs_main); if (pindex->nChainWork < chainActive.Tip()->nChainWork) { // Nothing to do, this block is not at the tip. return true; } if (chainActive.Tip()->nChainWork > nLastPreciousChainwork) { // The chain has been extended since the last call, reset the counter. nBlockReverseSequenceId = -1; } nLastPreciousChainwork = chainActive.Tip()->nChainWork; setBlockIndexCandidates.erase(pindex); pindex->nSequenceId = nBlockReverseSequenceId; if (nBlockReverseSequenceId > std::numeric_limits<int32_t>::min()) { // We can't keep reducing the counter if somebody really wants to // call preciousblock 2**31-1 times on the same set of tips... nBlockReverseSequenceId--; } if (pindex->IsValid(BLOCK_VALID_TRANSACTIONS) && pindex->nChainTx) { setBlockIndexCandidates.insert(pindex); PruneBlockIndexCandidates(); } } return ActivateBestChain(state, params); } bool InvalidateBlock(CValidationState& state, const CChainParams& chainparams, CBlockIndex *pindex) { AssertLockHeld(cs_main); // We first disconnect backwards and then mark the blocks as invalid. // This prevents a case where pruned nodes may fail to invalidateblock // and be left unable to start as they have no tip candidates (as there // are no blocks that meet the "have data and are not invalid per // nStatus" criteria for inclusion in setBlockIndexCandidates). bool pindex_was_in_chain = false; CBlockIndex *invalid_walk_tip = chainActive.Tip(); DisconnectedBlockTransactions disconnectpool; while (chainActive.Contains(pindex)) { pindex_was_in_chain = true; // ActivateBestChain considers blocks already in chainActive // unconditionally valid already, so force disconnect away from it. if (!DisconnectTip(state, chainparams, &disconnectpool)) { // It's probably hopeless to try to make the mempool consistent // here if DisconnectTip failed, but we can try. UpdateMempoolForReorg(disconnectpool, false); return false; } } // Now mark the blocks we just disconnected as descendants invalid // (note this may not be all descendants). while (pindex_was_in_chain && invalid_walk_tip != pindex) { invalid_walk_tip->nStatus |= BLOCK_FAILED_CHILD; setDirtyBlockIndex.insert(invalid_walk_tip); setBlockIndexCandidates.erase(invalid_walk_tip); invalid_walk_tip = invalid_walk_tip->pprev; } // Mark the block itself as invalid. pindex->nStatus |= BLOCK_FAILED_VALID; setDirtyBlockIndex.insert(pindex); setBlockIndexCandidates.erase(pindex); g_failed_blocks.insert(pindex); // DisconnectTip will add transactions to disconnectpool; try to add these // back to the mempool. UpdateMempoolForReorg(disconnectpool, true); // The resulting new best tip may not be in setBlockIndexCandidates anymore, so // add it again. BlockMap::iterator it = mapBlockIndex.begin(); while (it != mapBlockIndex.end()) { if (it->second->IsValid(BLOCK_VALID_TRANSACTIONS) && it->second->nChainTx && !setBlockIndexCandidates.value_comp()(it->second, chainActive.Tip())) { setBlockIndexCandidates.insert(it->second); } it++; } InvalidChainFound(pindex); uiInterface.NotifyBlockTip(IsInitialBlockDownload(), pindex->pprev); return true; } bool ResetBlockFailureFlags(CBlockIndex *pindex) { AssertLockHeld(cs_main); int nHeight = pindex->nHeight; // Remove the invalidity flag from this block and all its descendants. BlockMap::iterator it = mapBlockIndex.begin(); while (it != mapBlockIndex.end()) { if (!it->second->IsValid() && it->second->GetAncestor(nHeight) == pindex) { it->second->nStatus &= ~BLOCK_FAILED_MASK; setDirtyBlockIndex.insert(it->second); if (it->second->IsValid(BLOCK_VALID_TRANSACTIONS) && it->second->nChainTx && setBlockIndexCandidates.value_comp()(chainActive.Tip(), it->second)) { setBlockIndexCandidates.insert(it->second); } if (it->second == pindexBestInvalid) { // Reset invalid block marker if it was pointing to one of those. pindexBestInvalid = nullptr; } g_failed_blocks.erase(it->second); } it++; } // Remove the invalidity flag from all ancestors too. while (pindex != nullptr) { if (pindex->nStatus & BLOCK_FAILED_MASK) { pindex->nStatus &= ~BLOCK_FAILED_MASK; setDirtyBlockIndex.insert(pindex); } pindex = pindex->pprev; } return true; } static CBlockIndex* AddToBlockIndex(const CBlockHeader& block) { // Check for duplicate uint256 hash = block.GetHash(); BlockMap::iterator it = mapBlockIndex.find(hash); if (it != mapBlockIndex.end()) return it->second; // Construct new block index object CBlockIndex* pindexNew = new CBlockIndex(block); assert(pindexNew); // We assign the sequence id to blocks only when the full data is available, // to avoid miners withholding blocks but broadcasting headers, to get a // competitive advantage. pindexNew->nSequenceId = 0; BlockMap::iterator mi = mapBlockIndex.insert(std::make_pair(hash, pindexNew)).first; pindexNew->phashBlock = &((*mi).first); BlockMap::iterator miPrev = mapBlockIndex.find(block.hashPrevBlock); if (miPrev != mapBlockIndex.end()) { pindexNew->pprev = (*miPrev).second; pindexNew->nHeight = pindexNew->pprev->nHeight + 1; pindexNew->BuildSkip(); } pindexNew->nTimeMax = (pindexNew->pprev ? std::max(pindexNew->pprev->nTimeMax, pindexNew->nTime) : pindexNew->nTime); pindexNew->nChainWork = (pindexNew->pprev ? pindexNew->pprev->nChainWork : 0) + GetBlockProof(*pindexNew); pindexNew->RaiseValidity(BLOCK_VALID_TREE); if (pindexBestHeader == nullptr || pindexBestHeader->nChainWork < pindexNew->nChainWork) pindexBestHeader = pindexNew; setDirtyBlockIndex.insert(pindexNew); mapDirtyAuxPow.insert(std::make_pair(block.GetHash(), block.auxpow)); return pindexNew; } /** Mark a block as having its data received and checked (up to BLOCK_VALID_TRANSACTIONS). */ static bool ReceivedBlockTransactions(const CBlock &block, CValidationState& state, CBlockIndex *pindexNew, const CDiskBlockPos& pos, const Consensus::Params& consensusParams) { pindexNew->nTx = block.vtx.size(); pindexNew->nChainTx = 0; pindexNew->nFile = pos.nFile; pindexNew->nDataPos = pos.nPos; pindexNew->nUndoPos = 0; pindexNew->nStatus |= BLOCK_HAVE_DATA; if (IsWitnessEnabled(pindexNew->pprev, consensusParams)) { pindexNew->nStatus |= BLOCK_OPT_WITNESS; } pindexNew->RaiseValidity(BLOCK_VALID_TRANSACTIONS); setDirtyBlockIndex.insert(pindexNew); mapDirtyAuxPow.insert(std::make_pair(block.GetHash(), block.auxpow)); if (pindexNew->pprev == nullptr || pindexNew->pprev->nChainTx) { // If pindexNew is the genesis block or all parents are BLOCK_VALID_TRANSACTIONS. std::deque<CBlockIndex*> queue; queue.push_back(pindexNew); // Recursively process any descendant blocks that now may be eligible to be connected. while (!queue.empty()) { CBlockIndex *pindex = queue.front(); queue.pop_front(); pindex->nChainTx = (pindex->pprev ? pindex->pprev->nChainTx : 0) + pindex->nTx; { LOCK(cs_nBlockSequenceId); pindex->nSequenceId = nBlockSequenceId++; } if (chainActive.Tip() == nullptr || !setBlockIndexCandidates.value_comp()(pindex, chainActive.Tip())) { setBlockIndexCandidates.insert(pindex); } std::pair<std::multimap<CBlockIndex*, CBlockIndex*>::iterator, std::multimap<CBlockIndex*, CBlockIndex*>::iterator> range = mapBlocksUnlinked.equal_range(pindex); while (range.first != range.second) { std::multimap<CBlockIndex*, CBlockIndex*>::iterator it = range.first; queue.push_back(it->second); range.first++; mapBlocksUnlinked.erase(it); } } } else { if (pindexNew->pprev && pindexNew->pprev->IsValid(BLOCK_VALID_TREE)) { mapBlocksUnlinked.insert(std::make_pair(pindexNew->pprev, pindexNew)); } } return true; } static bool FindBlockPos(CValidationState &state, CDiskBlockPos &pos, unsigned int nAddSize, unsigned int nHeight, uint64_t nTime, bool fKnown = false) { LOCK(cs_LastBlockFile); unsigned int nFile = fKnown ? pos.nFile : nLastBlockFile; if (vinfoBlockFile.size() <= nFile) { vinfoBlockFile.resize(nFile + 1); } if (!fKnown) { while (vinfoBlockFile[nFile].nSize + nAddSize >= MAX_BLOCKFILE_SIZE) { nFile++; if (vinfoBlockFile.size() <= nFile) { vinfoBlockFile.resize(nFile + 1); } } pos.nFile = nFile; pos.nPos = vinfoBlockFile[nFile].nSize; } if ((int)nFile != nLastBlockFile) { if (!fKnown) { LogPrintf("Leaving block file %i: %s\n", nLastBlockFile, vinfoBlockFile[nLastBlockFile].ToString()); } FlushBlockFile(!fKnown); nLastBlockFile = nFile; } vinfoBlockFile[nFile].AddBlock(nHeight, nTime); if (fKnown) vinfoBlockFile[nFile].nSize = std::max(pos.nPos + nAddSize, vinfoBlockFile[nFile].nSize); else vinfoBlockFile[nFile].nSize += nAddSize; if (!fKnown) { unsigned int nOldChunks = (pos.nPos + BLOCKFILE_CHUNK_SIZE - 1) / BLOCKFILE_CHUNK_SIZE; unsigned int nNewChunks = (vinfoBlockFile[nFile].nSize + BLOCKFILE_CHUNK_SIZE - 1) / BLOCKFILE_CHUNK_SIZE; if (nNewChunks > nOldChunks) { if (fPruneMode) fCheckForPruning = true; if (CheckDiskSpace(nNewChunks * BLOCKFILE_CHUNK_SIZE - pos.nPos)) { FILE *file = OpenBlockFile(pos); if (file) { LogPrintf("Pre-allocating up to position 0x%x in blk%05u.dat\n", nNewChunks * BLOCKFILE_CHUNK_SIZE, pos.nFile); AllocateFileRange(file, pos.nPos, nNewChunks * BLOCKFILE_CHUNK_SIZE - pos.nPos); fclose(file); } } else return state.Error("out of disk space"); } } setDirtyFileInfo.insert(nFile); return true; } static bool FindUndoPos(CValidationState &state, int nFile, CDiskBlockPos &pos, unsigned int nAddSize) { pos.nFile = nFile; LOCK(cs_LastBlockFile); unsigned int nNewSize; pos.nPos = vinfoBlockFile[nFile].nUndoSize; nNewSize = vinfoBlockFile[nFile].nUndoSize += nAddSize; setDirtyFileInfo.insert(nFile); unsigned int nOldChunks = (pos.nPos + UNDOFILE_CHUNK_SIZE - 1) / UNDOFILE_CHUNK_SIZE; unsigned int nNewChunks = (nNewSize + UNDOFILE_CHUNK_SIZE - 1) / UNDOFILE_CHUNK_SIZE; if (nNewChunks > nOldChunks) { if (fPruneMode) fCheckForPruning = true; if (CheckDiskSpace(nNewChunks * UNDOFILE_CHUNK_SIZE - pos.nPos)) { FILE *file = OpenUndoFile(pos); if (file) { LogPrintf("Pre-allocating up to position 0x%x in rev%05u.dat\n", nNewChunks * UNDOFILE_CHUNK_SIZE, pos.nFile); AllocateFileRange(file, pos.nPos, nNewChunks * UNDOFILE_CHUNK_SIZE - pos.nPos); fclose(file); } } else return state.Error("out of disk space"); } return true; } static bool CheckBlockHeader(const CBlockHeader& block, CValidationState& state, const Consensus::Params& consensusParams, bool fCheckPOW = true) { // Check proof of work matches claimed amount if (fCheckPOW && !CheckBlockProofOfWork(&block, consensusParams)) return state.DoS(50, false, REJECT_INVALID, "high-hash", false, "proof of work failed"); return true; } bool CheckBlock(const CBlock& block, CValidationState& state, const Consensus::Params& consensusParams, bool fCheckPOW, bool fCheckMerkleRoot) { // These are checks that are independent of context. if (block.fChecked) return true; // Check that the header is valid (particularly PoW). This is mostly // redundant with the call in AcceptBlockHeader. if (!CheckBlockHeader(block, state, consensusParams, fCheckPOW)) return false; // Check the merkle root. if (fCheckMerkleRoot) { bool mutated; uint256 hashMerkleRoot2 = BlockMerkleRoot(block, &mutated); if (block.hashMerkleRoot != hashMerkleRoot2) return state.DoS(100, false, REJECT_INVALID, "bad-txnmrklroot", true, "hashMerkleRoot mismatch"); // Check for merkle tree malleability (CVE-2012-2459): repeating sequences // of transactions in a block without affecting the merkle root of a block, // while still invalidating it. if (mutated) return state.DoS(100, false, REJECT_INVALID, "bad-txns-duplicate", true, "duplicate transaction"); } // All potential-corruption validation must be done before we do any // transaction validation, as otherwise we may mark the header as invalid // because we receive the wrong transactions for it. // Note that witness malleability is checked in ContextualCheckBlock, so no // checks that use witness data may be performed here. // Size limits if (block.vtx.empty() || block.vtx.size() * WITNESS_SCALE_FACTOR > MAX_BLOCK_WEIGHT || ::GetSerializeSize(block, SER_NETWORK, PROTOCOL_VERSION | SERIALIZE_TRANSACTION_NO_WITNESS) * WITNESS_SCALE_FACTOR > MAX_BLOCK_WEIGHT) return state.DoS(100, false, REJECT_INVALID, "bad-blk-length", false, "size limits failed"); // First transaction must be coinbase, the rest must not be if (block.vtx.empty() || !block.vtx[0]->IsCoinBase()) return state.DoS(100, false, REJECT_INVALID, "bad-cb-missing", false, "first tx is not coinbase"); for (unsigned int i = 1; i < block.vtx.size(); i++) if (block.vtx[i]->IsCoinBase()) return state.DoS(100, false, REJECT_INVALID, "bad-cb-multiple", false, "more than one coinbase"); // Check transactions for (const auto& tx : block.vtx) if (!CheckTransaction(*tx, state, false)) return state.Invalid(false, state.GetRejectCode(), state.GetRejectReason(), strprintf("Transaction check failed (tx hash %s) %s", tx->GetHash().ToString(), state.GetDebugMessage())); unsigned int nSigOps = 0; for (const auto& tx : block.vtx) { nSigOps += GetLegacySigOpCount(*tx); } if (nSigOps * WITNESS_SCALE_FACTOR > MAX_BLOCK_SIGOPS_COST) return state.DoS(100, false, REJECT_INVALID, "bad-blk-sigops", false, "out-of-bounds SigOpCount"); if (fCheckPOW && fCheckMerkleRoot) block.fChecked = true; return true; } bool IsWitnessEnabled(const CBlockIndex* pindexPrev, const Consensus::Params& params) { LOCK(cs_main); const int nHeight = pindexPrev == nullptr ? 0 : pindexPrev->nHeight + 1; return (nHeight >= params.nWitnessStartHeight || VersionBitsState(pindexPrev, params, Consensus::DEPLOYMENT_SEGWIT, versionbitscache) == THRESHOLD_ACTIVE); } // Compute at which vout of the block's coinbase transaction the witness // commitment occurs, or -1 if not found. static int GetWitnessCommitmentIndex(const CBlock& block) { int commitpos = -1; if (!block.vtx.empty()) { for (size_t o = 0; o < block.vtx[0]->vout.size(); o++) { if (block.vtx[0]->vout[o].scriptPubKey.size() >= 38 && block.vtx[0]->vout[o].scriptPubKey[0] == OP_RETURN && block.vtx[0]->vout[o].scriptPubKey[1] == 0x24 && block.vtx[0]->vout[o].scriptPubKey[2] == 0xaa && block.vtx[0]->vout[o].scriptPubKey[3] == 0x21 && block.vtx[0]->vout[o].scriptPubKey[4] == 0xa9 && block.vtx[0]->vout[o].scriptPubKey[5] == 0xed) { commitpos = o; } } } return commitpos; } void UpdateUncommittedBlockStructures(CBlock& block, const CBlockIndex* pindexPrev, const Consensus::Params& consensusParams) { int commitpos = GetWitnessCommitmentIndex(block); static const std::vector<unsigned char> nonce(32, 0x00); if (commitpos != -1 && IsWitnessEnabled(pindexPrev, consensusParams) && !block.vtx[0]->HasWitness()) { CMutableTransaction tx(*block.vtx[0]); tx.vin[0].scriptWitness.stack.resize(1); tx.vin[0].scriptWitness.stack[0] = nonce; block.vtx[0] = MakeTransactionRef(std::move(tx)); } } std::vector<unsigned char> GenerateCoinbaseCommitment(CBlock& block, const CBlockIndex* pindexPrev, const Consensus::Params& consensusParams) { std::vector<unsigned char> commitment; int commitpos = GetWitnessCommitmentIndex(block); std::vector<unsigned char> ret(32, 0x00); if (consensusParams.vDeployments[Consensus::DEPLOYMENT_SEGWIT].nTimeout != 0) { if (commitpos == -1) { uint256 witnessroot = BlockWitnessMerkleRoot(block, nullptr); CHash256().Write(witnessroot.begin(), 32).Write(ret.data(), 32).Finalize(witnessroot.begin()); CTxOut out; out.nValue = 0; out.scriptPubKey.resize(38); out.scriptPubKey[0] = OP_RETURN; out.scriptPubKey[1] = 0x24; out.scriptPubKey[2] = 0xaa; out.scriptPubKey[3] = 0x21; out.scriptPubKey[4] = 0xa9; out.scriptPubKey[5] = 0xed; memcpy(&out.scriptPubKey[6], witnessroot.begin(), 32); commitment = std::vector<unsigned char>(out.scriptPubKey.begin(), out.scriptPubKey.end()); CMutableTransaction tx(*block.vtx[0]); tx.vout.push_back(out); block.vtx[0] = MakeTransactionRef(std::move(tx)); } } UpdateUncommittedBlockStructures(block, pindexPrev, consensusParams); return commitment; } /** Context-dependent validity checks. * By "context", we mean only the previous block headers, but not the UTXO * set; UTXO-related validity checks are done in ConnectBlock(). */ static bool ContextualCheckBlockHeader(const CBlockHeader& block, CValidationState& state, const CChainParams& params, const CBlockIndex* pindexPrev, int64_t nAdjustedTime) { assert(pindexPrev != nullptr); const int nHeight = pindexPrev->nHeight + 1; const Consensus::Params& consensusParams = params.GetConsensus(); // Check and validate auxpow if (block.auxpow && block.auxpow.get() != nullptr) { if (nHeight < consensusParams.nAuxPowStartHeight) return state.DoS(100, error("%s : premature auxpow block", __func__), REJECT_INVALID, "time-too-new"); if (!CheckAuxPowValidity(&block, params.GetConsensus())) return state.DoS(100, error("%s : invalid auxpow block", __func__), REJECT_INVALID, "bad-auxpow"); } // Check proof of work if (block.nBits != GetNextWorkRequired(pindexPrev, &block, consensusParams)) return state.DoS(100, false, REJECT_INVALID, "bad-diffbits", false, "incorrect proof of work"); // Check against checkpoints if (fCheckpointsEnabled) { // Don't accept any forks from the main chain prior to last checkpoint. // GetLastCheckpoint finds the last checkpoint in MapCheckpoints that's in our // MapBlockIndex. CBlockIndex* pcheckpoint = Checkpoints::GetLastCheckpoint(params.Checkpoints()); if (pcheckpoint && nHeight < pcheckpoint->nHeight) return state.DoS(100, error("%s: forked chain older than last checkpoint (height %d)", __func__, nHeight), REJECT_CHECKPOINT, "bad-fork-prior-to-checkpoint"); } // Check timestamp against prev if (block.GetBlockTime() <= pindexPrev->GetMedianTimePast()) return state.Invalid(false, REJECT_INVALID, "time-too-old", "block's timestamp is too early"); // Check timestamp if (block.GetBlockTime() > nAdjustedTime + MAX_FUTURE_BLOCK_TIME) return state.Invalid(false, REJECT_INVALID, "time-too-new", "block timestamp too far in the future"); // Reject outdated version blocks when 95% (75% on testnet) of the network has upgraded: // check for version 3, 4 and 5 upgrades if(((block.nVersion & 0xFF) < VERSIONBITS_TOP_BITS && (block.nVersion & 0xFF) < 3 && nHeight >= consensusParams.BIP65Height) || ((block.nVersion & 0xFF) < VERSIONBITS_TOP_BITS && (block.nVersion & 0xFF) < 4 && nHeight >= consensusParams.BIP66Height) || ((block.nVersion & 0xFF) < VERSIONBITS_TOP_BITS && (block.nVersion & 0xFF) < 5 && nHeight >= consensusParams.BlockVer5Height)) return state.Invalid(false, REJECT_OBSOLETE, strprintf("bad-version(0x%08x)", block.nVersion), strprintf("rejected nVersion=0x%08x block", block.nVersion)); return true; } static bool ContextualCheckBlock(const CBlock& block, CValidationState& state, const Consensus::Params& consensusParams, const CBlockIndex* pindexPrev) { const int nHeight = pindexPrev == nullptr ? 0 : pindexPrev->nHeight + 1; // Start enforcing BIP113 (Median Time Past) using versionbits logic. int nLockTimeFlags = 0; if (nHeight >= consensusParams.nWitnessStartHeight || VersionBitsState(pindexPrev, consensusParams, Consensus::DEPLOYMENT_CSV, versionbitscache) == THRESHOLD_ACTIVE) { nLockTimeFlags |= LOCKTIME_MEDIAN_TIME_PAST; } int64_t nLockTimeCutoff = (nLockTimeFlags & LOCKTIME_MEDIAN_TIME_PAST) ? pindexPrev->GetMedianTimePast() : block.GetBlockTime(); // Check that all transactions are finalized for (const auto& tx : block.vtx) { if (!IsFinalTx(*tx, nHeight, nLockTimeCutoff)) { return state.DoS(10, false, REJECT_INVALID, "bad-txns-nonfinal", false, "non-final transaction"); } } // Enforce rule that the coinbase starts with serialized block height if (block.nVersion >= 2 && nHeight >= consensusParams.BIP34Height) { CScript expect = CScript() << nHeight; if (block.vtx[0]->vin[0].scriptSig.size() < expect.size() || !std::equal(expect.begin(), expect.end(), block.vtx[0]->vin[0].scriptSig.begin())) { return state.DoS(100, false, REJECT_INVALID, "bad-cb-height", false, "block height mismatch in coinbase"); } } // Validation for witness commitments. // * We compute the witness hash (which is the hash including witnesses) of all the block's transactions, except the // coinbase (where 0x0000....0000 is used instead). // * The coinbase scriptWitness is a stack of a single 32-byte vector, containing a witness nonce (unconstrained). // * We build a merkle tree with all those witness hashes as leaves (similar to the hashMerkleRoot in the block header). // * There must be at least one output whose scriptPubKey is a single 36-byte push, the first 4 bytes of which are // {0xaa, 0x21, 0xa9, 0xed}, and the following 32 bytes are SHA256^2(witness root, witness nonce). In case there are // multiple, the last one is used. bool fHaveWitness = false; if (VersionBitsState(pindexPrev, consensusParams, Consensus::DEPLOYMENT_SEGWIT, versionbitscache) == THRESHOLD_ACTIVE) { int commitpos = GetWitnessCommitmentIndex(block); if (commitpos != -1) { bool malleated = false; uint256 hashWitness = BlockWitnessMerkleRoot(block, &malleated); // The malleation check is ignored; as the transaction tree itself // already does not permit it, it is impossible to trigger in the // witness tree. if (block.vtx[0]->vin[0].scriptWitness.stack.size() != 1 || block.vtx[0]->vin[0].scriptWitness.stack[0].size() != 32) { return state.DoS(100, false, REJECT_INVALID, "bad-witness-nonce-size", true, strprintf("%s : invalid witness nonce size", __func__)); } CHash256().Write(hashWitness.begin(), 32).Write(&block.vtx[0]->vin[0].scriptWitness.stack[0][0], 32).Finalize(hashWitness.begin()); if (memcmp(hashWitness.begin(), &block.vtx[0]->vout[commitpos].scriptPubKey[6], 32)) { return state.DoS(100, false, REJECT_INVALID, "bad-witness-merkle-match", true, strprintf("%s : witness merkle commitment mismatch", __func__)); } fHaveWitness = true; } } // No witness data is allowed in blocks that don't commit to witness data, as this would otherwise leave room for spam if (!fHaveWitness) { for (const auto& tx : block.vtx) { if (tx->HasWitness()) { return state.DoS(100, false, REJECT_INVALID, "unexpected-witness", true, strprintf("%s : unexpected witness data found", __func__)); } } } // After the coinbase witness nonce and commitment are verified, // we can check if the block weight passes (before we've checked the // coinbase witness, it would be possible for the weight to be too // large by filling up the coinbase witness, which doesn't change // the block hash, so we couldn't mark the block as permanently // failed). if (GetBlockWeight(block) > MAX_BLOCK_WEIGHT) { return state.DoS(100, false, REJECT_INVALID, "bad-blk-weight", false, strprintf("%s : weight limit failed", __func__)); } return true; } static bool AcceptBlockHeader(const CBlockHeader& block, CValidationState& state, const CChainParams& chainparams, CBlockIndex** ppindex) { AssertLockHeld(cs_main); // Check for duplicate uint256 hash = block.GetHash(); BlockMap::iterator miSelf = mapBlockIndex.find(hash); CBlockIndex *pindex = nullptr; if (hash != chainparams.GetConsensus().hashGenesisBlock) { if (miSelf != mapBlockIndex.end()) { // Block header is already known. pindex = miSelf->second; if (ppindex) *ppindex = pindex; if (pindex->nStatus & BLOCK_FAILED_MASK) return state.Invalid(error("%s: block %s is marked invalid", __func__, hash.ToString()), 0, "duplicate"); return true; } if (!CheckBlockHeader(block, state, chainparams.GetConsensus())) return error("%s: Consensus::CheckBlockHeader: %s, %s", __func__, hash.ToString(), FormatStateMessage(state)); // Get prev block index CBlockIndex* pindexPrev = nullptr; BlockMap::iterator mi = mapBlockIndex.find(block.hashPrevBlock); if (mi == mapBlockIndex.end()) return state.DoS(10, error("%s: prev block not found", __func__), 0, "prev-blk-not-found"); pindexPrev = (*mi).second; if (pindexPrev->nStatus & BLOCK_FAILED_MASK) return state.DoS(100, error("%s: prev block invalid", __func__), REJECT_INVALID, "bad-prevblk"); if (!ContextualCheckBlockHeader(block, state, chainparams, pindexPrev, GetAdjustedTime())) return error("%s: Consensus::ContextualCheckBlockHeader: %s, %s", __func__, hash.ToString(), FormatStateMessage(state)); if (!pindexPrev->IsValid(BLOCK_VALID_SCRIPTS)) { for (const CBlockIndex* failedit : g_failed_blocks) { if (pindexPrev->GetAncestor(failedit->nHeight) == failedit) { assert(failedit->nStatus & BLOCK_FAILED_VALID); CBlockIndex* invalid_walk = pindexPrev; while (invalid_walk != failedit) { invalid_walk->nStatus |= BLOCK_FAILED_CHILD; setDirtyBlockIndex.insert(invalid_walk); invalid_walk = invalid_walk->pprev; } return state.DoS(100, error("%s: prev block invalid", __func__), REJECT_INVALID, "bad-prevblk"); } } } } if (pindex == nullptr) pindex = AddToBlockIndex(block); if (ppindex) *ppindex = pindex; CheckBlockIndex(chainparams.GetConsensus()); return true; } // Exposed wrapper for AcceptBlockHeader bool ProcessNewBlockHeaders(const std::vector<CBlockHeader>& headers, CValidationState& state, const CChainParams& chainparams, const CBlockIndex** ppindex, CBlockHeader *first_invalid) { if (first_invalid != nullptr) first_invalid->SetNull(); { LOCK(cs_main); for (const CBlockHeader& header : headers) { CBlockIndex *pindex = nullptr; // Use a temp pindex instead of ppindex to avoid a const_cast if (!AcceptBlockHeader(header, state, chainparams, &pindex)) { if (first_invalid) *first_invalid = header; return false; } if (ppindex) { *ppindex = pindex; } } } NotifyHeaderTip(); return true; } /** Store block on disk. If dbp is non-nullptr, the file is known to already reside on disk */ static bool AcceptBlock(const std::shared_ptr<const CBlock>& pblock, CValidationState& state, const CChainParams& chainparams, CBlockIndex** ppindex, bool fRequested, const CDiskBlockPos* dbp, bool* fNewBlock) { const CBlock& block = *pblock; if (fNewBlock) *fNewBlock = false; AssertLockHeld(cs_main); CBlockIndex *pindexDummy = nullptr; CBlockIndex *&pindex = ppindex ? *ppindex : pindexDummy; if (!AcceptBlockHeader(block, state, chainparams, &pindex)) return false; // Try to process all requested blocks that we don't have, but only // process an unrequested block if it's new and has enough work to // advance our tip, and isn't too many blocks ahead. bool fAlreadyHave = pindex->nStatus & BLOCK_HAVE_DATA; bool fHasMoreOrSameWork = (chainActive.Tip() ? pindex->nChainWork >= chainActive.Tip()->nChainWork : true); // Blocks that are too out-of-order needlessly limit the effectiveness of // pruning, because pruning will not delete block files that contain any // blocks which are too close in height to the tip. Apply this test // regardless of whether pruning is enabled; it should generally be safe to // not process unrequested blocks. bool fTooFarAhead = (pindex->nHeight > int(chainActive.Height() + MIN_BLOCKS_TO_KEEP)); // TODO: Decouple this function from the block download logic by removing fRequested // This requires some new chain data structure to efficiently look up if a // block is in a chain leading to a candidate for best tip, despite not // being such a candidate itself. // TODO: deal better with return value and error conditions for duplicate // and unrequested blocks. if (fAlreadyHave) return true; if (!fRequested) { // If we didn't ask for it: if (pindex->nTx != 0) return true; // This is a previously-processed block that was pruned if (!fHasMoreOrSameWork) return true; // Don't process less-work chains if (fTooFarAhead) return true; // Block height is too high // Protect against DoS attacks from low-work chains. // If our tip is behind, a peer could try to send us // low-work blocks on a fake chain that we would never // request; don't process these. if (pindex->nChainWork < nMinimumChainWork) return true; } if (fNewBlock) *fNewBlock = true; if (!CheckBlock(block, state, chainparams.GetConsensus()) || !ContextualCheckBlock(block, state, chainparams.GetConsensus(), pindex->pprev)) { if (state.IsInvalid() && !state.CorruptionPossible()) { pindex->nStatus |= BLOCK_FAILED_VALID; setDirtyBlockIndex.insert(pindex); } return error("%s: %s", __func__, FormatStateMessage(state)); } // Header is valid/has work, merkle tree and segwit merkle tree are good...RELAY NOW // (but if it does not build on our best tip, let the SendMessages loop relay it) if (!IsInitialBlockDownload() && chainActive.Tip() == pindex->pprev) GetMainSignals().NewPoWValidBlock(pindex, pblock); int nHeight = pindex->nHeight; // Write block to history file try { unsigned int nBlockSize = ::GetSerializeSize(block, SER_DISK, CLIENT_VERSION); CDiskBlockPos blockPos; if (dbp != nullptr) blockPos = *dbp; if (!FindBlockPos(state, blockPos, nBlockSize+8, nHeight, block.GetBlockTime(), dbp != nullptr)) return error("AcceptBlock(): FindBlockPos failed"); if (dbp == nullptr) if (!WriteBlockToDisk(block, blockPos, chainparams.MessageStart())) AbortNode(state, "Failed to write block"); if (!ReceivedBlockTransactions(block, state, pindex, blockPos, chainparams.GetConsensus())) return error("AcceptBlock(): ReceivedBlockTransactions failed"); } catch (const std::runtime_error& e) { return AbortNode(state, std::string("System error: ") + e.what()); } if (fCheckForPruning) FlushStateToDisk(chainparams, state, FLUSH_STATE_NONE); // we just allocated more disk space for block files return true; } bool ProcessNewBlock(const CChainParams& chainparams, const std::shared_ptr<const CBlock> pblock, bool fForceProcessing, bool *fNewBlock) { { CBlockIndex *pindex = nullptr; if (fNewBlock) *fNewBlock = false; CValidationState state; // Ensure that CheckBlock() passes before calling AcceptBlock, as // belt-and-suspenders. bool ret = CheckBlock(*pblock, state, chainparams.GetConsensus()); LOCK(cs_main); if (ret) { // Store to disk ret = AcceptBlock(pblock, state, chainparams, &pindex, fForceProcessing, nullptr, fNewBlock); } CheckBlockIndex(chainparams.GetConsensus()); if (!ret) { GetMainSignals().BlockChecked(*pblock, state); return error("%s: AcceptBlock FAILED", __func__); } } NotifyHeaderTip(); CValidationState state; // Only used to report errors, not invalidity - ignore it if (!ActivateBestChain(state, chainparams, pblock)) return error("%s: ActivateBestChain failed", __func__); return true; } bool TestBlockValidity(CValidationState& state, const CChainParams& chainparams, const CBlock& block, CBlockIndex* pindexPrev, bool fCheckPOW, bool fCheckMerkleRoot) { AssertLockHeld(cs_main); assert(pindexPrev && pindexPrev == chainActive.Tip()); CCoinsViewCache viewNew(pcoinsTip); CBlockIndex indexDummy(block); indexDummy.pprev = pindexPrev; indexDummy.nHeight = pindexPrev->nHeight + 1; // NOTE: CheckBlockHeader is called by CheckBlock if (!ContextualCheckBlockHeader(block, state, chainparams, pindexPrev, GetAdjustedTime())) return error("%s: Consensus::ContextualCheckBlockHeader: %s", __func__, FormatStateMessage(state)); if (!CheckBlock(block, state, chainparams.GetConsensus(), fCheckPOW, fCheckMerkleRoot)) return error("%s: Consensus::CheckBlock: %s", __func__, FormatStateMessage(state)); if (!ContextualCheckBlock(block, state, chainparams.GetConsensus(), pindexPrev)) return error("%s: Consensus::ContextualCheckBlock: %s", __func__, FormatStateMessage(state)); if (!ConnectBlock(block, state, &indexDummy, viewNew, chainparams, true)) return false; assert(state.IsValid()); return true; } /** * BLOCK PRUNING CODE */ /* Calculate the amount of disk space the block & undo files currently use */ static uint64_t CalculateCurrentUsage() { uint64_t retval = 0; for (const CBlockFileInfo &file : vinfoBlockFile) { retval += file.nSize + file.nUndoSize; } return retval; } /* Prune a block file (modify associated database entries)*/ void PruneOneBlockFile(const int fileNumber) { for (BlockMap::iterator it = mapBlockIndex.begin(); it != mapBlockIndex.end(); ++it) { CBlockIndex* pindex = it->second; if (pindex->nFile == fileNumber) { pindex->nStatus &= ~BLOCK_HAVE_DATA; pindex->nStatus &= ~BLOCK_HAVE_UNDO; pindex->nFile = 0; pindex->nDataPos = 0; pindex->nUndoPos = 0; setDirtyBlockIndex.insert(pindex); // Prune from mapBlocksUnlinked -- any block we prune would have // to be downloaded again in order to consider its chain, at which // point it would be considered as a candidate for // mapBlocksUnlinked or setBlockIndexCandidates. std::pair<std::multimap<CBlockIndex*, CBlockIndex*>::iterator, std::multimap<CBlockIndex*, CBlockIndex*>::iterator> range = mapBlocksUnlinked.equal_range(pindex->pprev); while (range.first != range.second) { std::multimap<CBlockIndex *, CBlockIndex *>::iterator _it = range.first; range.first++; if (_it->second == pindex) { mapBlocksUnlinked.erase(_it); } } } } vinfoBlockFile[fileNumber].SetNull(); setDirtyFileInfo.insert(fileNumber); } void UnlinkPrunedFiles(const std::set<int>& setFilesToPrune) { for (std::set<int>::iterator it = setFilesToPrune.begin(); it != setFilesToPrune.end(); ++it) { CDiskBlockPos pos(*it, 0); fs::remove(GetBlockPosFilename(pos, "blk")); fs::remove(GetBlockPosFilename(pos, "rev")); LogPrintf("Prune: %s deleted blk/rev (%05u)\n", __func__, *it); } } /* Calculate the block/rev files to delete based on height specified by user with RPC command pruneblockchain */ static void FindFilesToPruneManual(std::set<int>& setFilesToPrune, int nManualPruneHeight) { assert(fPruneMode && nManualPruneHeight > 0); LOCK2(cs_main, cs_LastBlockFile); if (chainActive.Tip() == nullptr) return; // last block to prune is the lesser of (user-specified height, MIN_BLOCKS_TO_KEEP from the tip) unsigned int nLastBlockWeCanPrune = std::min((unsigned)nManualPruneHeight, chainActive.Tip()->nHeight - MIN_BLOCKS_TO_KEEP); int count=0; for (int fileNumber = 0; fileNumber < nLastBlockFile; fileNumber++) { if (vinfoBlockFile[fileNumber].nSize == 0 || vinfoBlockFile[fileNumber].nHeightLast > nLastBlockWeCanPrune) continue; PruneOneBlockFile(fileNumber); setFilesToPrune.insert(fileNumber); count++; } LogPrintf("Prune (Manual): prune_height=%d removed %d blk/rev pairs\n", nLastBlockWeCanPrune, count); } /* This function is called from the RPC code for pruneblockchain */ void PruneBlockFilesManual(int nManualPruneHeight) { CValidationState state; const CChainParams& chainparams = Params(); FlushStateToDisk(chainparams, state, FLUSH_STATE_NONE, nManualPruneHeight); } /** * Prune block and undo files (blk???.dat and undo???.dat) so that the disk space used is less than a user-defined target. * The user sets the target (in MB) on the command line or in config file. This will be run on startup and whenever new * space is allocated in a block or undo file, staying below the target. Changing back to unpruned requires a reindex * (which in this case means the blockchain must be re-downloaded.) * * Pruning functions are called from FlushStateToDisk when the global fCheckForPruning flag has been set. * Block and undo files are deleted in lock-step (when blk00003.dat is deleted, so is rev00003.dat.) * Pruning cannot take place until the longest chain is at least a certain length (100000 on mainnet, 1000 on testnet, 1000 on regtest). * Pruning will never delete a block within a defined distance (currently 288) from the active chain's tip. * The block index is updated by unsetting HAVE_DATA and HAVE_UNDO for any blocks that were stored in the deleted files. * A db flag records the fact that at least some block files have been pruned. * * @param[out] setFilesToPrune The set of file indices that can be unlinked will be returned */ static void FindFilesToPrune(std::set<int>& setFilesToPrune, uint64_t nPruneAfterHeight) { LOCK2(cs_main, cs_LastBlockFile); if (chainActive.Tip() == nullptr || nPruneTarget == 0) { return; } if ((uint64_t)chainActive.Tip()->nHeight <= nPruneAfterHeight) { return; } unsigned int nLastBlockWeCanPrune = chainActive.Tip()->nHeight - MIN_BLOCKS_TO_KEEP; uint64_t nCurrentUsage = CalculateCurrentUsage(); // We don't check to prune until after we've allocated new space for files // So we should leave a buffer under our target to account for another allocation // before the next pruning. uint64_t nBuffer = BLOCKFILE_CHUNK_SIZE + UNDOFILE_CHUNK_SIZE; uint64_t nBytesToPrune; int count=0; if (nCurrentUsage + nBuffer >= nPruneTarget) { for (int fileNumber = 0; fileNumber < nLastBlockFile; fileNumber++) { nBytesToPrune = vinfoBlockFile[fileNumber].nSize + vinfoBlockFile[fileNumber].nUndoSize; if (vinfoBlockFile[fileNumber].nSize == 0) continue; if (nCurrentUsage + nBuffer < nPruneTarget) // are we below our target? break; // don't prune files that could have a block within MIN_BLOCKS_TO_KEEP of the main chain's tip but keep scanning if (vinfoBlockFile[fileNumber].nHeightLast > nLastBlockWeCanPrune) continue; PruneOneBlockFile(fileNumber); // Queue up the files for removal setFilesToPrune.insert(fileNumber); nCurrentUsage -= nBytesToPrune; count++; } } LogPrint(BCLog::PRUNE, "Prune: target=%dMiB actual=%dMiB diff=%dMiB max_prune_height=%d removed %d blk/rev pairs\n", nPruneTarget/1024/1024, nCurrentUsage/1024/1024, ((int64_t)nPruneTarget - (int64_t)nCurrentUsage)/1024/1024, nLastBlockWeCanPrune, count); } bool CheckDiskSpace(uint64_t nAdditionalBytes) { uint64_t nFreeBytesAvailable = fs::space(GetDataDir()).available; // Check for nMinDiskSpace bytes (currently 50MB) if (nFreeBytesAvailable < nMinDiskSpace + nAdditionalBytes) return AbortNode("Disk space is low!", _("Error: Disk space is low!")); return true; } static FILE* OpenDiskFile(const CDiskBlockPos &pos, const char *prefix, bool fReadOnly) { if (pos.IsNull()) return nullptr; fs::path path = GetBlockPosFilename(pos, prefix); fs::create_directories(path.parent_path()); FILE* file = fsbridge::fopen(path, "rb+"); if (!file && !fReadOnly) file = fsbridge::fopen(path, "wb+"); if (!file) { LogPrintf("Unable to open file %s\n", path.string()); return nullptr; } if (pos.nPos) { if (fseek(file, pos.nPos, SEEK_SET)) { LogPrintf("Unable to seek to position %u of %s\n", pos.nPos, path.string()); fclose(file); return nullptr; } } return file; } FILE* OpenBlockFile(const CDiskBlockPos &pos, bool fReadOnly) { return OpenDiskFile(pos, "blk", fReadOnly); } /** Open an undo file (rev?????.dat) */ static FILE* OpenUndoFile(const CDiskBlockPos &pos, bool fReadOnly) { return OpenDiskFile(pos, "rev", fReadOnly); } fs::path GetBlockPosFilename(const CDiskBlockPos &pos, const char *prefix) { return GetDataDir() / "blocks" / strprintf("%s%05u.dat", prefix, pos.nFile); } CBlockIndex * InsertBlockIndex(uint256 hash) { if (hash.IsNull()) return nullptr; // Return existing BlockMap::iterator mi = mapBlockIndex.find(hash); if (mi != mapBlockIndex.end()) return (*mi).second; // Create new CBlockIndex* pindexNew = new CBlockIndex(); if (!pindexNew) throw std::runtime_error(std::string(__func__) + ": new CBlockIndex failed"); mi = mapBlockIndex.insert(std::make_pair(hash, pindexNew)).first; pindexNew->phashBlock = &((*mi).first); return pindexNew; } bool static LoadBlockIndexDB(const CChainParams& chainparams) { if (!pblocktree->LoadBlockIndexGuts(chainparams.GetConsensus(), InsertBlockIndex)) return false; boost::this_thread::interruption_point(); // Calculate nChainWork std::vector<std::pair<int, CBlockIndex*> > vSortedByHeight; vSortedByHeight.reserve(mapBlockIndex.size()); for (const std::pair<uint256, CBlockIndex*>& item : mapBlockIndex) { CBlockIndex* pindex = item.second; vSortedByHeight.push_back(std::make_pair(pindex->nHeight, pindex)); } sort(vSortedByHeight.begin(), vSortedByHeight.end()); for (const std::pair<int, CBlockIndex*>& item : vSortedByHeight) { CBlockIndex* pindex = item.second; pindex->nChainWork = (pindex->pprev ? pindex->pprev->nChainWork : 0) + GetBlockProof(*pindex); pindex->nTimeMax = (pindex->pprev ? std::max(pindex->pprev->nTimeMax, pindex->nTime) : pindex->nTime); // We can link the chain of blocks for which we've received transactions at some point. // Pruned nodes may have deleted the block. if (pindex->nTx > 0) { if (pindex->pprev) { if (pindex->pprev->nChainTx) { pindex->nChainTx = pindex->pprev->nChainTx + pindex->nTx; } else { pindex->nChainTx = 0; mapBlocksUnlinked.insert(std::make_pair(pindex->pprev, pindex)); } } else { pindex->nChainTx = pindex->nTx; } } if (!(pindex->nStatus & BLOCK_FAILED_MASK) && pindex->pprev && (pindex->pprev->nStatus & BLOCK_FAILED_MASK)) { pindex->nStatus |= BLOCK_FAILED_CHILD; setDirtyBlockIndex.insert(pindex); } if (pindex->IsValid(BLOCK_VALID_TRANSACTIONS) && (pindex->nChainTx || pindex->pprev == nullptr)) setBlockIndexCandidates.insert(pindex); if (pindex->nStatus & BLOCK_FAILED_MASK && (!pindexBestInvalid || pindex->nChainWork > pindexBestInvalid->nChainWork)) pindexBestInvalid = pindex; if (pindex->pprev) pindex->BuildSkip(); if (pindex->IsValid(BLOCK_VALID_TREE) && (pindexBestHeader == nullptr || CBlockIndexWorkComparator()(pindexBestHeader, pindex))) pindexBestHeader = pindex; } // Load block file info pblocktree->ReadLastBlockFile(nLastBlockFile); vinfoBlockFile.resize(nLastBlockFile + 1); LogPrintf("%s: last block file = %i\n", __func__, nLastBlockFile); for (int nFile = 0; nFile <= nLastBlockFile; nFile++) { pblocktree->ReadBlockFileInfo(nFile, vinfoBlockFile[nFile]); } LogPrintf("%s: last block file info: %s\n", __func__, vinfoBlockFile[nLastBlockFile].ToString()); for (int nFile = nLastBlockFile + 1; true; nFile++) { CBlockFileInfo info; if (pblocktree->ReadBlockFileInfo(nFile, info)) { vinfoBlockFile.push_back(info); } else { break; } } // Check presence of blk files LogPrintf("Checking all blk files are present...\n"); std::set<int> setBlkDataFiles; for (const std::pair<uint256, CBlockIndex*>& item : mapBlockIndex) { CBlockIndex* pindex = item.second; if (pindex->nStatus & BLOCK_HAVE_DATA) { setBlkDataFiles.insert(pindex->nFile); } } for (std::set<int>::iterator it = setBlkDataFiles.begin(); it != setBlkDataFiles.end(); it++) { CDiskBlockPos pos(*it, 0); if (CAutoFile(OpenBlockFile(pos, true), SER_DISK, CLIENT_VERSION).IsNull()) { return false; } } // Check whether we have ever pruned block & undo files pblocktree->ReadFlag("prunedblockfiles", fHavePruned); if (fHavePruned) LogPrintf("LoadBlockIndexDB(): Block files have previously been pruned\n"); // Check whether we need to continue reindexing bool fReindexing = false; pblocktree->ReadReindexing(fReindexing); fReindex |= fReindexing; // Check whether we have a transaction index pblocktree->ReadFlag("txindex", fTxIndex); LogPrintf("%s: transaction index %s\n", __func__, fTxIndex ? "enabled" : "disabled"); return true; } bool LoadChainTip(const CChainParams& chainparams) { if (chainActive.Tip() && chainActive.Tip()->GetBlockHash() == pcoinsTip->GetBestBlock()) return true; if (pcoinsTip->GetBestBlock().IsNull() && mapBlockIndex.size() == 1) { // In case we just added the genesis block, connect it now, so // that we always have a chainActive.Tip() when we return. LogPrintf("%s: Connecting genesis block...\n", __func__); CValidationState state; if (!ActivateBestChain(state, chainparams)) { return false; } } // Load pointer to end of best chain BlockMap::iterator it = mapBlockIndex.find(pcoinsTip->GetBestBlock()); if (it == mapBlockIndex.end()) return false; chainActive.SetTip(it->second); PruneBlockIndexCandidates(); LogPrintf("Loaded best chain: hashBestChain=%s height=%d date=%s progress=%f\n", chainActive.Tip()->GetBlockHash().ToString(), chainActive.Height(), DateTimeStrFormat("%Y-%m-%d %H:%M:%S", chainActive.Tip()->GetBlockTime()), GuessVerificationProgress(chainparams.TxData(), chainActive.Tip())); return true; } CVerifyDB::CVerifyDB() { uiInterface.ShowProgress(_("Verifying blocks..."), 0); } CVerifyDB::~CVerifyDB() { uiInterface.ShowProgress("", 100); } bool CVerifyDB::VerifyDB(const CChainParams& chainparams, CCoinsView *coinsview, int nCheckLevel, int nCheckDepth) { LOCK(cs_main); if (chainActive.Tip() == nullptr || chainActive.Tip()->pprev == nullptr) return true; // Verify blocks in the best chain if (nCheckDepth <= 0 || nCheckDepth > chainActive.Height()) nCheckDepth = chainActive.Height(); nCheckLevel = std::max(0, std::min(4, nCheckLevel)); LogPrintf("Verifying last %i blocks at level %i\n", nCheckDepth, nCheckLevel); CCoinsViewCache coins(coinsview); CBlockIndex* pindexState = chainActive.Tip(); CBlockIndex* pindexFailure = nullptr; int nGoodTransactions = 0; CValidationState state; int reportDone = 0; LogPrintf("[0%%]..."); for (CBlockIndex* pindex = chainActive.Tip(); pindex && pindex->pprev; pindex = pindex->pprev) { boost::this_thread::interruption_point(); int percentageDone = std::max(1, std::min(99, (int)(((double)(chainActive.Height() - pindex->nHeight)) / (double)nCheckDepth * (nCheckLevel >= 4 ? 50 : 100)))); if (reportDone < percentageDone/10) { // report every 10% step LogPrintf("[%d%%]...", percentageDone); reportDone = percentageDone/10; } uiInterface.ShowProgress(_("Verifying blocks..."), percentageDone); if (pindex->nHeight < chainActive.Height()-nCheckDepth) break; if (fPruneMode && !(pindex->nStatus & BLOCK_HAVE_DATA)) { // If pruning, only go back as far as we have data. LogPrintf("VerifyDB(): block verification stopping at height %d (pruning, no data)\n", pindex->nHeight); break; } CBlock block; // check level 0: read from disk if (!ReadBlockFromDisk(block, pindex, chainparams.GetConsensus())) return error("VerifyDB(): *** ReadBlockFromDisk failed at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); // check level 1: verify block validity if (nCheckLevel >= 1 && !CheckBlock(block, state, chainparams.GetConsensus())) return error("%s: *** found bad block at %d, hash=%s (%s)\n", __func__, pindex->nHeight, pindex->GetBlockHash().ToString(), FormatStateMessage(state)); // check level 2: verify undo validity if (nCheckLevel >= 2 && pindex) { CBlockUndo undo; CDiskBlockPos pos = pindex->GetUndoPos(); if (!pos.IsNull()) { if (!UndoReadFromDisk(undo, pos, pindex->pprev->GetBlockHash())) return error("VerifyDB(): *** found bad undo data at %d, hash=%s\n", pindex->nHeight, pindex->GetBlockHash().ToString()); } } // check level 3: check for inconsistencies during memory-only disconnect of tip blocks if (nCheckLevel >= 3 && pindex == pindexState && (coins.DynamicMemoryUsage() + pcoinsTip->DynamicMemoryUsage()) <= nCoinCacheUsage) { assert(coins.GetBestBlock() == pindex->GetBlockHash()); DisconnectResult res = DisconnectBlock(block, pindex, coins); if (res == DISCONNECT_FAILED) { return error("VerifyDB(): *** irrecoverable inconsistency in block data at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); } pindexState = pindex->pprev; if (res == DISCONNECT_UNCLEAN) { nGoodTransactions = 0; pindexFailure = pindex; } else { nGoodTransactions += block.vtx.size(); } } if (ShutdownRequested()) return true; } if (pindexFailure) return error("VerifyDB(): *** coin database inconsistencies found (last %i blocks, %i good transactions before that)\n", chainActive.Height() - pindexFailure->nHeight + 1, nGoodTransactions); // check level 4: try reconnecting blocks if (nCheckLevel >= 4) { CBlockIndex *pindex = pindexState; while (pindex != chainActive.Tip()) { boost::this_thread::interruption_point(); uiInterface.ShowProgress(_("Verifying blocks..."), std::max(1, std::min(99, 100 - (int)(((double)(chainActive.Height() - pindex->nHeight)) / (double)nCheckDepth * 50)))); pindex = chainActive.Next(pindex); CBlock block; if (!ReadBlockFromDisk(block, pindex, chainparams.GetConsensus())) return error("VerifyDB(): *** ReadBlockFromDisk failed at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); if (!ConnectBlock(block, state, pindex, coins, chainparams)) return error("VerifyDB(): *** found unconnectable block at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); } } LogPrintf("[DONE].\n"); LogPrintf("No coin database inconsistencies in last %i blocks (%i transactions)\n", chainActive.Height() - pindexState->nHeight, nGoodTransactions); return true; } /** Apply the effects of a block on the utxo cache, ignoring that it may already have been applied. */ static bool RollforwardBlock(const CBlockIndex* pindex, CCoinsViewCache& inputs, const CChainParams& params) { // TODO: merge with ConnectBlock CBlock block; if (!ReadBlockFromDisk(block, pindex, params.GetConsensus())) { return error("ReplayBlock(): ReadBlockFromDisk failed at %d, hash=%s", pindex->nHeight, pindex->GetBlockHash().ToString()); } for (const CTransactionRef& tx : block.vtx) { if (!tx->IsCoinBase()) { for (const CTxIn &txin : tx->vin) { inputs.SpendCoin(txin.prevout); } } // Pass check = true as every addition may be an overwrite. AddCoins(inputs, *tx, pindex->nHeight, true); } return true; } bool ReplayBlocks(const CChainParams& params, CCoinsView* view) { LOCK(cs_main); CCoinsViewCache cache(view); std::vector<uint256> hashHeads = view->GetHeadBlocks(); if (hashHeads.empty()) return true; // We're already in a consistent state. if (hashHeads.size() != 2) return error("ReplayBlocks(): unknown inconsistent state"); uiInterface.ShowProgress(_("Replaying blocks..."), 0); LogPrintf("Replaying blocks\n"); const CBlockIndex* pindexOld = nullptr; // Old tip during the interrupted flush. const CBlockIndex* pindexNew; // New tip during the interrupted flush. const CBlockIndex* pindexFork = nullptr; // Latest block common to both the old and the new tip. if (mapBlockIndex.count(hashHeads[0]) == 0) { return error("ReplayBlocks(): reorganization to unknown block requested"); } pindexNew = mapBlockIndex[hashHeads[0]]; if (!hashHeads[1].IsNull()) { // The old tip is allowed to be 0, indicating it's the first flush. if (mapBlockIndex.count(hashHeads[1]) == 0) { return error("ReplayBlocks(): reorganization from unknown block requested"); } pindexOld = mapBlockIndex[hashHeads[1]]; pindexFork = LastCommonAncestor(pindexOld, pindexNew); assert(pindexFork != nullptr); } // Rollback along the old branch. while (pindexOld != pindexFork) { if (pindexOld->nHeight > 0) { // Never disconnect the genesis block. CBlock block; if (!ReadBlockFromDisk(block, pindexOld, params.GetConsensus())) { return error("RollbackBlock(): ReadBlockFromDisk() failed at %d, hash=%s", pindexOld->nHeight, pindexOld->GetBlockHash().ToString()); } LogPrintf("Rolling back %s (%i)\n", pindexOld->GetBlockHash().ToString(), pindexOld->nHeight); DisconnectResult res = DisconnectBlock(block, pindexOld, cache); if (res == DISCONNECT_FAILED) { return error("RollbackBlock(): DisconnectBlock failed at %d, hash=%s", pindexOld->nHeight, pindexOld->GetBlockHash().ToString()); } // If DISCONNECT_UNCLEAN is returned, it means a non-existing UTXO was deleted, or an existing UTXO was // overwritten. It corresponds to cases where the block-to-be-disconnect never had all its operations // applied to the UTXO set. However, as both writing a UTXO and deleting a UTXO are idempotent operations, // the result is still a version of the UTXO set with the effects of that block undone. } pindexOld = pindexOld->pprev; } // Roll forward from the forking point to the new tip. int nForkHeight = pindexFork ? pindexFork->nHeight : 0; for (int nHeight = nForkHeight + 1; nHeight <= pindexNew->nHeight; ++nHeight) { const CBlockIndex* pindex = pindexNew->GetAncestor(nHeight); LogPrintf("Rolling forward %s (%i)\n", pindex->GetBlockHash().ToString(), nHeight); if (!RollforwardBlock(pindex, cache, params)) return false; } cache.SetBestBlock(pindexNew->GetBlockHash()); cache.Flush(); uiInterface.ShowProgress("", 100); return true; } bool RewindBlockIndex(const CChainParams& params) { LOCK(cs_main); // Note that during -reindex-chainstate we are called with an empty chainActive! int nHeight = 1; while (nHeight <= chainActive.Height()) { if (IsWitnessEnabled(chainActive[nHeight - 1], params.GetConsensus()) && !(chainActive[nHeight]->nStatus & BLOCK_OPT_WITNESS)) { break; } nHeight++; } // nHeight is now the height of the first insufficiently-validated block, or tipheight + 1 CValidationState state; CBlockIndex* pindex = chainActive.Tip(); while (chainActive.Height() >= nHeight) { if (fPruneMode && !(chainActive.Tip()->nStatus & BLOCK_HAVE_DATA)) { // If pruning, don't try rewinding past the HAVE_DATA point; // since older blocks can't be served anyway, there's // no need to walk further, and trying to DisconnectTip() // will fail (and require a needless reindex/redownload // of the blockchain). break; } if (!DisconnectTip(state, params, nullptr)) { return error("RewindBlockIndex: unable to disconnect block at height %i", pindex->nHeight); } // Occasionally flush state to disk. if (!FlushStateToDisk(params, state, FLUSH_STATE_PERIODIC)) return false; } // Reduce validity flag and have-data flags. // We do this after actual disconnecting, otherwise we'll end up writing the lack of data // to disk before writing the chainstate, resulting in a failure to continue if interrupted. for (BlockMap::iterator it = mapBlockIndex.begin(); it != mapBlockIndex.end(); it++) { CBlockIndex* pindexIter = it->second; // Note: If we encounter an insufficiently validated block that // is on chainActive, it must be because we are a pruning node, and // this block or some successor doesn't HAVE_DATA, so we were unable to // rewind all the way. Blocks remaining on chainActive at this point // must not have their validity reduced. if (IsWitnessEnabled(pindexIter->pprev, params.GetConsensus()) && !(pindexIter->nStatus & BLOCK_OPT_WITNESS) && !chainActive.Contains(pindexIter)) { // Reduce validity pindexIter->nStatus = std::min<unsigned int>(pindexIter->nStatus & BLOCK_VALID_MASK, BLOCK_VALID_TREE) | (pindexIter->nStatus & ~BLOCK_VALID_MASK); // Remove have-data flags. pindexIter->nStatus &= ~(BLOCK_HAVE_DATA | BLOCK_HAVE_UNDO); // Remove storage location. pindexIter->nFile = 0; pindexIter->nDataPos = 0; pindexIter->nUndoPos = 0; // Remove various other things pindexIter->nTx = 0; pindexIter->nChainTx = 0; pindexIter->nSequenceId = 0; // Make sure it gets written. setDirtyBlockIndex.insert(pindexIter); // Update indexes setBlockIndexCandidates.erase(pindexIter); std::pair<std::multimap<CBlockIndex*, CBlockIndex*>::iterator, std::multimap<CBlockIndex*, CBlockIndex*>::iterator> ret = mapBlocksUnlinked.equal_range(pindexIter->pprev); while (ret.first != ret.second) { if (ret.first->second == pindexIter) { mapBlocksUnlinked.erase(ret.first++); } else { ++ret.first; } } } else if (pindexIter->IsValid(BLOCK_VALID_TRANSACTIONS) && pindexIter->nChainTx) { setBlockIndexCandidates.insert(pindexIter); } } if (chainActive.Tip() != nullptr) { // We can't prune block index candidates based on our tip if we have // no tip due to chainActive being empty! PruneBlockIndexCandidates(); CheckBlockIndex(params.GetConsensus()); // FlushStateToDisk can possibly read chainActive. Be conservative // and skip it here, we're about to -reindex-chainstate anyway, so // it'll get called a bunch real soon. if (!FlushStateToDisk(params, state, FLUSH_STATE_ALWAYS)) { return false; } } return true; } // May NOT be used after any connections are up as much // of the peer-processing logic assumes a consistent // block index state void UnloadBlockIndex() { LOCK(cs_main); setBlockIndexCandidates.clear(); chainActive.SetTip(nullptr); pindexBestInvalid = nullptr; pindexBestHeader = nullptr; mempool.clear(); mapBlocksUnlinked.clear(); vinfoBlockFile.clear(); nLastBlockFile = 0; nBlockSequenceId = 1; setDirtyBlockIndex.clear(); mapDirtyAuxPow.clear(); g_failed_blocks.clear(); setDirtyFileInfo.clear(); versionbitscache.Clear(); for (int b = 0; b < VERSIONBITS_NUM_BITS; b++) { warningcache[b].clear(); } for (BlockMap::value_type& entry : mapBlockIndex) { delete entry.second; } mapBlockIndex.clear(); fHavePruned = false; } bool LoadBlockIndex(const CChainParams& chainparams) { // Load block index from databases bool needs_init = fReindex; if (!fReindex) { bool ret = LoadBlockIndexDB(chainparams); if (!ret) return false; needs_init = mapBlockIndex.empty(); } if (needs_init) { // Everything here is for *new* reindex/DBs. Thus, though // LoadBlockIndexDB may have set fReindex if we shut down // mid-reindex previously, we don't check fReindex and // instead only check it prior to LoadBlockIndexDB to set // needs_init. LogPrintf("Initializing databases...\n"); // Use the provided setting for -txindex in the new database fTxIndex = gArgs.GetBoolArg("-txindex", DEFAULT_TXINDEX); pblocktree->WriteFlag("txindex", fTxIndex); } return true; } bool LoadGenesisBlock(const CChainParams& chainparams) { LOCK(cs_main); // Check whether we're already initialized by checking for genesis in // mapBlockIndex. Note that we can't use chainActive here, since it is // set based on the coins db, not the block index db, which is the only // thing loaded at this point. if (mapBlockIndex.count(chainparams.GenesisBlock().GetHash())) return true; try { CBlock &block = const_cast<CBlock&>(chainparams.GenesisBlock()); // Start new block file unsigned int nBlockSize = ::GetSerializeSize(block, SER_DISK, CLIENT_VERSION); CDiskBlockPos blockPos; CValidationState state; if (!FindBlockPos(state, blockPos, nBlockSize+8, 0, block.GetBlockTime())) return error("%s: FindBlockPos failed", __func__); if (!WriteBlockToDisk(block, blockPos, chainparams.MessageStart())) return error("%s: writing genesis block to disk failed", __func__); CBlockIndex *pindex = AddToBlockIndex(block); if (!ReceivedBlockTransactions(block, state, pindex, blockPos, chainparams.GetConsensus())) return error("%s: genesis block not accepted", __func__); } catch (const std::runtime_error& e) { return error("%s: failed to write genesis block: %s", __func__, e.what()); } return true; } bool LoadExternalBlockFile(const CChainParams& chainparams, FILE* fileIn, CDiskBlockPos *dbp) { // Map of disk positions for blocks with unknown parent (only used for reindex) static std::multimap<uint256, CDiskBlockPos> mapBlocksUnknownParent; int64_t nStart = GetTimeMillis(); int nLoaded = 0; try { // This takes over fileIn and calls fclose() on it in the CBufferedFile destructor CBufferedFile blkdat(fileIn, 2*MAX_BLOCK_SERIALIZED_SIZE, MAX_BLOCK_SERIALIZED_SIZE+8, SER_DISK, CLIENT_VERSION); uint64_t nRewind = blkdat.GetPos(); while (!blkdat.eof()) { boost::this_thread::interruption_point(); blkdat.SetPos(nRewind); nRewind++; // start one byte further next time, in case of failure blkdat.SetLimit(); // remove former limit unsigned int nSize = 0; try { // locate a header unsigned char buf[CMessageHeader::MESSAGE_START_SIZE]; blkdat.FindByte(chainparams.MessageStart()[0]); nRewind = blkdat.GetPos()+1; blkdat >> FLATDATA(buf); if (memcmp(buf, chainparams.MessageStart(), CMessageHeader::MESSAGE_START_SIZE)) continue; // read size blkdat >> nSize; if (nSize < 80 || nSize > MAX_BLOCK_SERIALIZED_SIZE) continue; } catch (const std::exception&) { // no valid block header found; don't complain break; } try { // read block uint64_t nBlockPos = blkdat.GetPos(); if (dbp) dbp->nPos = nBlockPos; blkdat.SetLimit(nBlockPos + nSize); blkdat.SetPos(nBlockPos); std::shared_ptr<CBlock> pblock = std::make_shared<CBlock>(); CBlock& block = *pblock; blkdat >> block; nRewind = blkdat.GetPos(); // detect out of order blocks, and store them for later uint256 hash = block.GetHash(); if (hash != chainparams.GetConsensus().hashGenesisBlock && mapBlockIndex.find(block.hashPrevBlock) == mapBlockIndex.end()) { LogPrint(BCLog::REINDEX, "%s: Out of order block %s, parent %s not known\n", __func__, hash.ToString(), block.hashPrevBlock.ToString()); if (dbp) mapBlocksUnknownParent.insert(std::make_pair(block.hashPrevBlock, *dbp)); continue; } // process in case the block isn't known yet if (mapBlockIndex.count(hash) == 0 || (mapBlockIndex[hash]->nStatus & BLOCK_HAVE_DATA) == 0) { LOCK(cs_main); CValidationState state; if (AcceptBlock(pblock, state, chainparams, nullptr, true, dbp, nullptr)) nLoaded++; if (state.IsError()) break; } else if (hash != chainparams.GetConsensus().hashGenesisBlock && mapBlockIndex[hash]->nHeight % 1000 == 0) { LogPrint(BCLog::REINDEX, "Block Import: already had block %s at height %d\n", hash.ToString(), mapBlockIndex[hash]->nHeight); } // Activate the genesis block so normal node progress can continue if (hash == chainparams.GetConsensus().hashGenesisBlock) { CValidationState state; if (!ActivateBestChain(state, chainparams)) { break; } } NotifyHeaderTip(); // Recursively process earlier encountered successors of this block std::deque<uint256> queue; queue.push_back(hash); while (!queue.empty()) { uint256 head = queue.front(); queue.pop_front(); std::pair<std::multimap<uint256, CDiskBlockPos>::iterator, std::multimap<uint256, CDiskBlockPos>::iterator> range = mapBlocksUnknownParent.equal_range(head); while (range.first != range.second) { std::multimap<uint256, CDiskBlockPos>::iterator it = range.first; std::shared_ptr<CBlock> pblockrecursive = std::make_shared<CBlock>(); if (ReadBlockFromDisk(*pblockrecursive, it->second, chainparams.GetConsensus())) { LogPrint(BCLog::REINDEX, "%s: Processing out of order child %s of %s\n", __func__, pblockrecursive->GetHash().ToString(), head.ToString()); LOCK(cs_main); CValidationState dummy; if (AcceptBlock(pblockrecursive, dummy, chainparams, nullptr, true, &it->second, nullptr)) { nLoaded++; queue.push_back(pblockrecursive->GetHash()); } } range.first++; mapBlocksUnknownParent.erase(it); NotifyHeaderTip(); } } } catch (const std::exception& e) { LogPrintf("%s: Deserialize or I/O error - %s\n", __func__, e.what()); } } } catch (const std::runtime_error& e) { AbortNode(std::string("System error: ") + e.what()); } if (nLoaded > 0) LogPrintf("Loaded %i blocks from external file in %dms\n", nLoaded, GetTimeMillis() - nStart); return nLoaded > 0; } void static CheckBlockIndex(const Consensus::Params& consensusParams) { if (!fCheckBlockIndex) { return; } LOCK(cs_main); // During a reindex, we read the genesis block and call CheckBlockIndex before ActivateBestChain, // so we have the genesis block in mapBlockIndex but no active chain. (A few of the tests when // iterating the block tree require that chainActive has been initialized.) if (chainActive.Height() < 0) { assert(mapBlockIndex.size() <= 1); return; } // Build forward-pointing map of the entire block tree. std::multimap<CBlockIndex*,CBlockIndex*> forward; for (BlockMap::iterator it = mapBlockIndex.begin(); it != mapBlockIndex.end(); it++) { forward.insert(std::make_pair(it->second->pprev, it->second)); } assert(forward.size() == mapBlockIndex.size()); std::pair<std::multimap<CBlockIndex*,CBlockIndex*>::iterator,std::multimap<CBlockIndex*,CBlockIndex*>::iterator> rangeGenesis = forward.equal_range(nullptr); CBlockIndex *pindex = rangeGenesis.first->second; rangeGenesis.first++; assert(rangeGenesis.first == rangeGenesis.second); // There is only one index entry with parent nullptr. // Iterate over the entire block tree, using depth-first search. // Along the way, remember whether there are blocks on the path from genesis // block being explored which are the first to have certain properties. size_t nNodes = 0; int nHeight = 0; CBlockIndex* pindexFirstInvalid = nullptr; // Oldest ancestor of pindex which is invalid. CBlockIndex* pindexFirstMissing = nullptr; // Oldest ancestor of pindex which does not have BLOCK_HAVE_DATA. CBlockIndex* pindexFirstNeverProcessed = nullptr; // Oldest ancestor of pindex for which nTx == 0. CBlockIndex* pindexFirstNotTreeValid = nullptr; // Oldest ancestor of pindex which does not have BLOCK_VALID_TREE (regardless of being valid or not). CBlockIndex* pindexFirstNotTransactionsValid = nullptr; // Oldest ancestor of pindex which does not have BLOCK_VALID_TRANSACTIONS (regardless of being valid or not). CBlockIndex* pindexFirstNotChainValid = nullptr; // Oldest ancestor of pindex which does not have BLOCK_VALID_CHAIN (regardless of being valid or not). CBlockIndex* pindexFirstNotScriptsValid = nullptr; // Oldest ancestor of pindex which does not have BLOCK_VALID_SCRIPTS (regardless of being valid or not). while (pindex != nullptr) { nNodes++; if (pindexFirstInvalid == nullptr && pindex->nStatus & BLOCK_FAILED_VALID) pindexFirstInvalid = pindex; if (pindexFirstMissing == nullptr && !(pindex->nStatus & BLOCK_HAVE_DATA)) pindexFirstMissing = pindex; if (pindexFirstNeverProcessed == nullptr && pindex->nTx == 0) pindexFirstNeverProcessed = pindex; if (pindex->pprev != nullptr && pindexFirstNotTreeValid == nullptr && (pindex->nStatus & BLOCK_VALID_MASK) < BLOCK_VALID_TREE) pindexFirstNotTreeValid = pindex; if (pindex->pprev != nullptr && pindexFirstNotTransactionsValid == nullptr && (pindex->nStatus & BLOCK_VALID_MASK) < BLOCK_VALID_TRANSACTIONS) pindexFirstNotTransactionsValid = pindex; if (pindex->pprev != nullptr && pindexFirstNotChainValid == nullptr && (pindex->nStatus & BLOCK_VALID_MASK) < BLOCK_VALID_CHAIN) pindexFirstNotChainValid = pindex; if (pindex->pprev != nullptr && pindexFirstNotScriptsValid == nullptr && (pindex->nStatus & BLOCK_VALID_MASK) < BLOCK_VALID_SCRIPTS) pindexFirstNotScriptsValid = pindex; // Begin: actual consistency checks. if (pindex->pprev == nullptr) { // Genesis block checks. assert(pindex->GetBlockHash() == consensusParams.hashGenesisBlock); // Genesis block's hash must match. assert(pindex == chainActive.Genesis()); // The current active chain's genesis block must be this block. } if (pindex->nChainTx == 0) assert(pindex->nSequenceId <= 0); // nSequenceId can't be set positive for blocks that aren't linked (negative is used for preciousblock) // VALID_TRANSACTIONS is equivalent to nTx > 0 for all nodes (whether or not pruning has occurred). // HAVE_DATA is only equivalent to nTx > 0 (or VALID_TRANSACTIONS) if no pruning has occurred. if (!fHavePruned) { // If we've never pruned, then HAVE_DATA should be equivalent to nTx > 0 assert(!(pindex->nStatus & BLOCK_HAVE_DATA) == (pindex->nTx == 0)); assert(pindexFirstMissing == pindexFirstNeverProcessed); } else { // If we have pruned, then we can only say that HAVE_DATA implies nTx > 0 if (pindex->nStatus & BLOCK_HAVE_DATA) assert(pindex->nTx > 0); } if (pindex->nStatus & BLOCK_HAVE_UNDO) assert(pindex->nStatus & BLOCK_HAVE_DATA); assert(((pindex->nStatus & BLOCK_VALID_MASK) >= BLOCK_VALID_TRANSACTIONS) == (pindex->nTx > 0)); // This is pruning-independent. // All parents having had data (at some point) is equivalent to all parents being VALID_TRANSACTIONS, which is equivalent to nChainTx being set. assert((pindexFirstNeverProcessed != nullptr) == (pindex->nChainTx == 0)); // nChainTx != 0 is used to signal that all parent blocks have been processed (but may have been pruned). assert((pindexFirstNotTransactionsValid != nullptr) == (pindex->nChainTx == 0)); assert(pindex->nHeight == nHeight); // nHeight must be consistent. assert(pindex->pprev == nullptr || pindex->nChainWork >= pindex->pprev->nChainWork); // For every block except the genesis block, the chainwork must be larger than the parent's. assert(nHeight < 2 || (pindex->pskip && (pindex->pskip->nHeight < nHeight))); // The pskip pointer must point back for all but the first 2 blocks. assert(pindexFirstNotTreeValid == nullptr); // All mapBlockIndex entries must at least be TREE valid if ((pindex->nStatus & BLOCK_VALID_MASK) >= BLOCK_VALID_TREE) assert(pindexFirstNotTreeValid == nullptr); // TREE valid implies all parents are TREE valid if ((pindex->nStatus & BLOCK_VALID_MASK) >= BLOCK_VALID_CHAIN) assert(pindexFirstNotChainValid == nullptr); // CHAIN valid implies all parents are CHAIN valid if ((pindex->nStatus & BLOCK_VALID_MASK) >= BLOCK_VALID_SCRIPTS) assert(pindexFirstNotScriptsValid == nullptr); // SCRIPTS valid implies all parents are SCRIPTS valid if (pindexFirstInvalid == nullptr) { // Checks for not-invalid blocks. assert((pindex->nStatus & BLOCK_FAILED_MASK) == 0); // The failed mask cannot be set for blocks without invalid parents. } if (!CBlockIndexWorkComparator()(pindex, chainActive.Tip()) && pindexFirstNeverProcessed == nullptr) { if (pindexFirstInvalid == nullptr) { // If this block sorts at least as good as the current tip and // is valid and we have all data for its parents, it must be in // setBlockIndexCandidates. chainActive.Tip() must also be there // even if some data has been pruned. if (pindexFirstMissing == nullptr || pindex == chainActive.Tip()) { assert(setBlockIndexCandidates.count(pindex)); } // If some parent is missing, then it could be that this block was in // setBlockIndexCandidates but had to be removed because of the missing data. // In this case it must be in mapBlocksUnlinked -- see test below. } } else { // If this block sorts worse than the current tip or some ancestor's block has never been seen, it cannot be in setBlockIndexCandidates. assert(setBlockIndexCandidates.count(pindex) == 0); } // Check whether this block is in mapBlocksUnlinked. std::pair<std::multimap<CBlockIndex*,CBlockIndex*>::iterator,std::multimap<CBlockIndex*,CBlockIndex*>::iterator> rangeUnlinked = mapBlocksUnlinked.equal_range(pindex->pprev); bool foundInUnlinked = false; while (rangeUnlinked.first != rangeUnlinked.second) { assert(rangeUnlinked.first->first == pindex->pprev); if (rangeUnlinked.first->second == pindex) { foundInUnlinked = true; break; } rangeUnlinked.first++; } if (pindex->pprev && (pindex->nStatus & BLOCK_HAVE_DATA) && pindexFirstNeverProcessed != nullptr && pindexFirstInvalid == nullptr) { // If this block has block data available, some parent was never received, and has no invalid parents, it must be in mapBlocksUnlinked. assert(foundInUnlinked); } if (!(pindex->nStatus & BLOCK_HAVE_DATA)) assert(!foundInUnlinked); // Can't be in mapBlocksUnlinked if we don't HAVE_DATA if (pindexFirstMissing == nullptr) assert(!foundInUnlinked); // We aren't missing data for any parent -- cannot be in mapBlocksUnlinked. if (pindex->pprev && (pindex->nStatus & BLOCK_HAVE_DATA) && pindexFirstNeverProcessed == nullptr && pindexFirstMissing != nullptr) { // We HAVE_DATA for this block, have received data for all parents at some point, but we're currently missing data for some parent. assert(fHavePruned); // We must have pruned. // This block may have entered mapBlocksUnlinked if: // - it has a descendant that at some point had more work than the // tip, and // - we tried switching to that descendant but were missing // data for some intermediate block between chainActive and the // tip. // So if this block is itself better than chainActive.Tip() and it wasn't in // setBlockIndexCandidates, then it must be in mapBlocksUnlinked. if (!CBlockIndexWorkComparator()(pindex, chainActive.Tip()) && setBlockIndexCandidates.count(pindex) == 0) { if (pindexFirstInvalid == nullptr) { assert(foundInUnlinked); } } } // assert(pindex->GetBlockHash() == pindex->GetBlockHeader().GetHash()); // Perhaps too slow // End: actual consistency checks. // Try descending into the first subnode. std::pair<std::multimap<CBlockIndex*,CBlockIndex*>::iterator,std::multimap<CBlockIndex*,CBlockIndex*>::iterator> range = forward.equal_range(pindex); if (range.first != range.second) { // A subnode was found. pindex = range.first->second; nHeight++; continue; } // This is a leaf node. // Move upwards until we reach a node of which we have not yet visited the last child. while (pindex) { // We are going to either move to a parent or a sibling of pindex. // If pindex was the first with a certain property, unset the corresponding variable. if (pindex == pindexFirstInvalid) pindexFirstInvalid = nullptr; if (pindex == pindexFirstMissing) pindexFirstMissing = nullptr; if (pindex == pindexFirstNeverProcessed) pindexFirstNeverProcessed = nullptr; if (pindex == pindexFirstNotTreeValid) pindexFirstNotTreeValid = nullptr; if (pindex == pindexFirstNotTransactionsValid) pindexFirstNotTransactionsValid = nullptr; if (pindex == pindexFirstNotChainValid) pindexFirstNotChainValid = nullptr; if (pindex == pindexFirstNotScriptsValid) pindexFirstNotScriptsValid = nullptr; // Find our parent. CBlockIndex* pindexPar = pindex->pprev; // Find which child we just visited. std::pair<std::multimap<CBlockIndex*,CBlockIndex*>::iterator,std::multimap<CBlockIndex*,CBlockIndex*>::iterator> rangePar = forward.equal_range(pindexPar); while (rangePar.first->second != pindex) { assert(rangePar.first != rangePar.second); // Our parent must have at least the node we're coming from as child. rangePar.first++; } // Proceed to the next one. rangePar.first++; if (rangePar.first != rangePar.second) { // Move to the sibling. pindex = rangePar.first->second; break; } else { // Move up further. pindex = pindexPar; nHeight--; continue; } } } // Check that we actually traversed the entire map. assert(nNodes == forward.size()); } std::string CBlockFileInfo::ToString() const { return strprintf("CBlockFileInfo(blocks=%u, size=%u, heights=%u...%u, time=%s...%s)", nBlocks, nSize, nHeightFirst, nHeightLast, DateTimeStrFormat("%Y-%m-%d", nTimeFirst), DateTimeStrFormat("%Y-%m-%d", nTimeLast)); } CBlockFileInfo* GetBlockFileInfo(size_t n) { return &vinfoBlockFile.at(n); } ThresholdState VersionBitsTipState(const Consensus::Params& params, Consensus::DeploymentPos pos) { LOCK(cs_main); return VersionBitsState(chainActive.Tip(), params, pos, versionbitscache); } BIP9Stats VersionBitsTipStatistics(const Consensus::Params& params, Consensus::DeploymentPos pos) { LOCK(cs_main); return VersionBitsStatistics(chainActive.Tip(), params, pos); } int VersionBitsTipStateSinceHeight(const Consensus::Params& params, Consensus::DeploymentPos pos) { LOCK(cs_main); return VersionBitsStateSinceHeight(chainActive.Tip(), params, pos, versionbitscache); } static const uint64_t MEMPOOL_DUMP_VERSION = 1; bool LoadMempool(void) { const CChainParams& chainparams = Params(); int64_t nExpiryTimeout = gArgs.GetArg("-mempoolexpiry", DEFAULT_MEMPOOL_EXPIRY) * 60 * 60; FILE* filestr = fsbridge::fopen(GetDataDir() / "mempool.dat", "rb"); CAutoFile file(filestr, SER_DISK, CLIENT_VERSION); if (file.IsNull()) { LogPrintf("Failed to open mempool file from disk. Continuing anyway.\n"); return false; } int64_t count = 0; int64_t skipped = 0; int64_t failed = 0; int64_t nNow = GetTime(); try { uint64_t version; file >> version; if (version != MEMPOOL_DUMP_VERSION) { return false; } uint64_t num; file >> num; while (num--) { CTransactionRef tx; int64_t nTime; int64_t nFeeDelta; file >> tx; file >> nTime; file >> nFeeDelta; CAmount amountdelta = nFeeDelta; if (amountdelta) { mempool.PrioritiseTransaction(tx->GetHash(), amountdelta); } CValidationState state; if (nTime + nExpiryTimeout > nNow) { LOCK(cs_main); AcceptToMemoryPoolWithTime(chainparams, mempool, state, tx, true, nullptr, nTime, nullptr, false, 0); if (state.IsValid()) { ++count; } else { ++failed; } } else { ++skipped; } if (ShutdownRequested()) return false; } std::map<uint256, CAmount> mapDeltas; file >> mapDeltas; for (const auto& i : mapDeltas) { mempool.PrioritiseTransaction(i.first, i.second); } } catch (const std::exception& e) { LogPrintf("Failed to deserialize mempool data on disk: %s. Continuing anyway.\n", e.what()); return false; } LogPrintf("Imported mempool transactions from disk: %i successes, %i failed, %i expired\n", count, failed, skipped); return true; } void DumpMempool(void) { int64_t start = GetTimeMicros(); std::map<uint256, CAmount> mapDeltas; std::vector<TxMempoolInfo> vinfo; { LOCK(mempool.cs); for (const auto &i : mempool.mapDeltas) { mapDeltas[i.first] = i.second; } vinfo = mempool.infoAll(); } int64_t mid = GetTimeMicros(); try { FILE* filestr = fsbridge::fopen(GetDataDir() / "mempool.dat.new", "wb"); if (!filestr) { return; } CAutoFile file(filestr, SER_DISK, CLIENT_VERSION); uint64_t version = MEMPOOL_DUMP_VERSION; file << version; file << (uint64_t)vinfo.size(); for (const auto& i : vinfo) { file << *(i.tx); file << (int64_t)i.nTime; file << (int64_t)i.nFeeDelta; mapDeltas.erase(i.tx->GetHash()); } file << mapDeltas; FileCommit(file.Get()); file.fclose(); RenameOver(GetDataDir() / "mempool.dat.new", GetDataDir() / "mempool.dat"); int64_t last = GetTimeMicros(); LogPrintf("Dumped mempool: %gs to copy, %gs to dump\n", (mid-start)*0.000001, (last-mid)*0.000001); } catch (const std::exception& e) { LogPrintf("Failed to dump mempool: %s. Continuing anyway.\n", e.what()); } } //! Guess how far we are in the verification process at the given block index double GuessVerificationProgress(const ChainTxData& data, CBlockIndex *pindex) { if (pindex == nullptr) return 0.0; int64_t nNow = time(nullptr); double fTxTotal; if (pindex->nChainTx <= data.nTxCount) { fTxTotal = data.nTxCount + (nNow - data.nTime) * data.dTxRate; } else { fTxTotal = pindex->nChainTx + (nNow - pindex->GetBlockTime()) * data.dTxRate; } return pindex->nChainTx / fTxTotal; } class CMainCleanup { public: CMainCleanup() {} ~CMainCleanup() { // block headers BlockMap::iterator it1 = mapBlockIndex.begin(); for (; it1 != mapBlockIndex.end(); it1++) delete (*it1).second; mapBlockIndex.clear(); } } instance_of_cmaincleanup;
mit
frasaleksander/simpletheme
inc/simpletheme-customizer/view/custom-javascript/custom-javascript.php
98
<script id="simpletheme-custom-javascript"> <?php echo $custom_javascript_javascript; ?> </script>
mit
rastaman/semaphore
public/js/controllers/projects/templates.js
3539
define(['controllers/projects/taskRunner'], function () { app.registerController('ProjectTemplatesCtrl', ['$scope', '$http', '$uibModal', 'Project', '$rootScope', function ($scope, $http, $modal, Project, $rootScope) { $http.get(Project.getURL() + '/keys?type=ssh').success(function (keys) { $scope.sshKeys = keys; $scope.sshKeysAssoc = {}; keys.forEach(function (k) { if (k.removed) k.name = '[removed] - ' + k.name; $scope.sshKeysAssoc[k.id] = k; }); }); $http.get(Project.getURL() + '/inventory').success(function (inv) { $scope.inventory = inv; $scope.inventoryAssoc = {}; inv.forEach(function (i) { if (i.removed) i.name = '[removed] - ' + i.name; $scope.inventoryAssoc[i.id] = i; }); }); $http.get(Project.getURL() + '/repositories').success(function (repos) { $scope.repos = repos; $scope.reposAssoc = {}; repos.forEach(function (i) { if (i.removed) i.name = '[removed] - ' + i.name; $scope.reposAssoc[i.id] = i; }); }); $http.get(Project.getURL() + '/environment').success(function (env) { $scope.environment = env; $scope.environmentAssoc = {}; env.forEach(function (i) { if (i.removed) i.name = '[removed] - ' + i.name; $scope.environmentAssoc[i.id] = i; }); }); $scope.reload = function () { $http.get(Project.getURL() + '/templates').success(function (templates) { $scope.templates = templates; }); } $scope.remove = function (template) { $http.delete(Project.getURL() + '/templates/' + template.id).success(function () { $scope.reload(); }).error(function () { swal('error', 'could not delete template..', 'error'); }); } $scope.add = function () { var scope = $rootScope.$new(); scope.keys = $scope.sshKeys; scope.inventory = $scope.inventory; scope.repositories = $scope.repos; scope.environment = $scope.environment; $modal.open({ templateUrl: '/tpl/projects/templates/add.html', scope: scope }).result.then(function (opts) { var tpl = opts.template; $http.post(Project.getURL() + '/templates', tpl).success(function () { $scope.reload(); }).error(function (_, status) { swal('error', 'could not add template:' + status, 'error'); }); }); } $scope.update = function (template) { var scope = $rootScope.$new(); scope.tpl = template; scope.keys = $scope.sshKeys; scope.inventory = $scope.inventory; scope.repositories = $scope.repos; scope.environment = $scope.environment; $modal.open({ templateUrl: '/tpl/projects/templates/add.html', scope: scope }).result.then(function (opts) { if (opts.remove) { return $scope.remove(template); } var tpl = opts.template; $http.put(Project.getURL() + '/templates/' + template.id, tpl).success(function () { $scope.reload(); }).error(function (_, status) { swal('error', 'could not add template:' + status, 'error'); }); }); } $scope.run = function (tpl) { $modal.open({ templateUrl: '/tpl/projects/createTaskModal.html', controller: 'CreateTaskCtrl', resolve: { Project: function () { return Project; }, Template: function () { return tpl; } } }).result.then(function (task) { var scope = $rootScope.$new(); scope.task = task; scope.project = Project; $modal.open({ templateUrl: '/tpl/projects/taskModal.html', controller: 'TaskCtrl', scope: scope, size: 'lg' }); }) } $scope.reload(); }]); });
mit
jeremymcrae/denovoFilter
tests/test_exclude_segdups.py
1988
''' Copyright (c) 2016 Genome Research Ltd. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ''' import unittest from pandas import DataFrame, Series from denovoFilter.exclude_segdups import check_segdups class TestExcludeSegdups(unittest.TestCase): def test_exclude_segdups(self): ''' check that counting alleles from DP4 entries works correctly ''' # define variants that lie around the boundaries of a segdup region variants = DataFrame({'person_stable_id': ['a', 'a', 'a', 'a', 'a', 'a'], 'chrom': ['1', '1', '1', '1', '1', '1'], 'pos': [1379893, 1379895, 1379894, 1384309, 1384310, 1384311], 'ref': ['A', 'G', 'A', 'G', 'A', 'G'], 'alt': ['C', 'T', 'C', 'T', 'C', 'T'], 'symbol': ['TEST1', 'TEST1', 'TEST1', 'TEST1', 'TEST1', 'TEST1'], }) expected = [True, False, False, False, False, True] self.assertEqual(check_segdups(variants), expected)
mit
brewdente/AutoWebPerf
MasterDevs.ChromeDevTools/Protocol/Debugger/StepOutCommandResponse.cs
310
using MasterDevs.ChromeDevTools; using Newtonsoft.Json; using System.Collections.Generic; namespace MasterDevs.ChromeDevTools.Protocol.Debugger { /// <summary> /// Steps out of the function call. /// </summary> [CommandResponse(ProtocolName.Debugger.StepOut)] public class StepOutCommandResponse { } }
mit
evanova/eve-crest-java
api/src/main/java/org/devfleet/crest/model/CrestCharacter.java
675
package org.devfleet.crest.model; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; public class CrestCharacter extends CrestItem { @JsonProperty private boolean isNPC; //FIXME maybe portraits @JsonProperty private CrestCorporation corporation; @JsonProperty("capsuleer") @JsonDeserialize(using = RefDeserializer.class) private String capsuleerRef; public boolean getNPC() { return isNPC; } public CrestCorporation getCorporation() { return corporation; } public String getCapsuleerRef() { return capsuleerRef; } }
mit
OFFLINE-GmbH/oc-snipcartshop-plugin
components/CurrencyPicker.php
1147
<?php namespace OFFLINE\SnipcartShop\Components; use Cms\Classes\ComponentBase; use Illuminate\Support\Collection; use OFFLINE\SnipcartShop\Models\CurrencySettings; use Redirect; class CurrencyPicker extends ComponentBase { use SetsVars; /** * Configured currencies for this shop. * @var Collection */ public $currencies = []; /** * Currently active currency. * @var string */ public $activeCurrency = []; public function componentDetails() { return [ 'name' => 'offline.snipcartshop::lang.components.currencyPicker.details.name', 'description' => 'offline.snipcartshop::lang.components.currencyPicker.details.description', ]; } public function onRun() { $this->setVar('currencies', CurrencySettings::currencies()); $this->setVar('activeCurrency', CurrencySettings::activeCurrency()); } public function onSwitchCurrency() { if ( ! $locale = post('currency')) { return; } CurrencySettings::setActiveCurrency($locale); return Redirect::back(); } }
mit
voks/accounting
application/views/report/trial_summary_report.php
753
<div class='jumbotron'> <span>Trial Balance Summary Report</span> </div> </div> <div class='content row'> <table class='table text-tbody table-bordered'> <thead> <tr > <th class=''>Account Code</th> <th class=''>Account Name</th> <th class=''>Debit</th> <th class=''>Credit</th> </tr> </thead> <tbody> <?php foreach($trial_balance as $key){ echo "<tr>"; echo " <td class='padding-left-10'>".$key->account_code." - ".$key->sub_code."</td>"; echo " <td class='padding-left-10'>".$key->account_name."</td>"; echo " <td class='padding-left-10'>".$key->trans_dr."</td>"; echo " <td class='padding-right-5 text-right'>".$key->trans_cr."</td>"; echo "</tr>"; } ?> </tbody> </table> </div>
mit
wachterjohannes/task-library
src/Tasks/Naming/NamingFactory.php
412
<?php namespace Tasks\Naming; use Tasks\Scheduler\TaskInterface; use Tasks\TaskRunner\WorkerInterface; class NamingFactory implements NamingFactoryInterface { public function fromWorker(WorkerInterface $worker) { return sprintf('%s.%s', $worker->getNamespace(), $worker->getName()); } public function fromTask(TaskInterface $task) { return $task->getWorkerName(); } }
mit
zhenhua-lee/tools
algorithm/beauty/getMaxAndMin.js
1031
/* * 得到最大、最小数 */ // 该方法将求最大与最小割裂开来了,没有充分利用二者的比较 function getMaxAndMin(arr) { if (arr.length < 1) return new Error('can not be empty array'); var max = arr[0], min = arr[0]; var len = arr.length; for (var i=0; i<len; i++) { if (arr[i] > max) { max = arr[i]; continue; } if (arr[i] < min) { min = arr[i]; } } return {max: max, min: min}; } function getMaxAndMin2(arr) { if (arr.length < 1) return new Error('can not be empty array'); var max = arr[0], min = arr[0]; var len = arr.length; for(var i=0; i<len; i+=2) { var m = n = arr[i]; if (arr[i] > arr[i+1] && i+1 < len) { m = arr[i]; n = arr[i+1]; } else if (arr[i] < arr[i+1] && i+1 < len) { m = arr[i+1]; n = arr[i]; } if (max < m) { max = m; } if (min > n) { min = n; } } return {max: max, min: min}; } module.exports = { method1: getMaxAndMin, method2: getMaxAndMin2, };
mit
sharad4u/testrepo
LibGit2Sharp.Tests/SignatureFixture.cs
1670
using System; using LibGit2Sharp.Tests.TestHelpers; using Xunit; using Xunit.Extensions; namespace LibGit2Sharp.Tests { public class SignatureFixture : BaseFixture { [Theory] [InlineData("\0Leading zero")] [InlineData("Trailing zero\0")] [InlineData("Zero \0inside")] [InlineData("\0")] [InlineData("\0\0\0")] public void CreatingASignatureWithANameContainingZerosThrows(string name) { Assert.Throws<ArgumentException>(() => new Signature(name, "me@there.com", DateTimeOffset.Now)); } [Theory] [InlineData("\0Leading@zero.com")] [InlineData("Trailing@zero.com\0")] [InlineData("Zero@\0inside.com")] [InlineData("\0")] [InlineData("\0\0\0")] public void CreatingASignatureWithAnEmailContainingZerosThrows(string email) { Assert.Throws<ArgumentException>(() => new Signature("Me", email, DateTimeOffset.Now)); } [Fact] public void CreatingASignatureWithBadParamsThrows() { Assert.Throws<ArgumentNullException>(() => new Signature(null, "me@there.com", DateTimeOffset.Now)); Assert.Throws<ArgumentException>(() => new Signature(string.Empty, "me@there.com", DateTimeOffset.Now)); Assert.Throws<ArgumentNullException>(() => new Signature("Me", null, DateTimeOffset.Now)); } [Fact] public void CanCreateASignatureWithAnEmptyEmail() { var sig = new Signature("Me", string.Empty, DateTimeOffset.Now); Assert.Equal(string.Empty, sig.Email); } } } /* This is extra327 */
mit
BUPT-OJ-V4/BOJ-V4
submission/views.py
6703
from rest_framework import viewsets from rest_framework.permissions import IsAuthenticated from rest_framework.decorators import detail_route from bojv4.conf import LANGUAGE from .abstract_models import NormalSubmission as Submission from .forms import SubmissionForm from .serializers import SubmissionSerializer from .tables import SubmissionTable from .filters import SubmissionFilter from django.core.urlresolvers import reverse from django.views.generic import ListView, DetailView from django.views.generic.edit import CreateView from django.http import Http404, HttpResponseForbidden from django.utils.decorators import method_decorator from django.core.exceptions import PermissionDenied from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.contrib.messages.views import SuccessMessageMixin from django.db.models.query import EmptyQuerySet from rest_framework.permissions import BasePermission from django.shortcuts import get_object_or_404 from guardian.shortcuts import get_objects_for_user from django_tables2 import RequestConfig from django.core.cache import cache from problem.models import Problem from ojuser.models import GroupProfile import logging logger = logging.getLogger('django') # from guardian.shortcuts import get_objects_for_user def disable_normal_submission(time_minutes): cache.set("disable_normal_submission", True, time_minutes * 60) def is_normal_submission_disabled(): if cache.get("disable_normal_submission"): return True return False def is_submission_enabled_or_privileged(user): if user.is_superuser or user.is_staff or user.profile.is_teacher: return if is_normal_submission_disabled(): raise PermissionDenied class CaseResultPermission(BasePermission): def has_object_permission(self, request, view, obj): if request.user == obj.user: return True return obj.submission.problem.view_by_user(user=request.user) class SubmissionViewSet(viewsets.ModelViewSet): queryset = Submission.objects.all() serializer_class = SubmissionSerializer permission_classes = (IsAuthenticated,) class SubmissionListView(ListView): model = Submission paginate_by = 15 template_name = 'submission/submission_list.html' def get_queryset(self): groups = get_objects_for_user(self.user, 'ojuser.view_groupprofile', GroupProfile) res = Problem.objects.filter(groups__in=groups).all() ans = Submission.objects.filter(problem__groups__in=groups).order_by('-pk').distinct() self.filter = SubmissionFilter( self.request.GET, queryset=ans, problems=res ) return self.filter.qs @method_decorator(login_required) def dispatch(self, request, *args, **kwargs): is_submission_enabled_or_privileged(request.user) self.user = request.user return super(SubmissionListView, self).dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): context = super(SubmissionListView, self).get_context_data(**kwargs) submissions_table = SubmissionTable(self.get_queryset()) RequestConfig(self.request, paginate={'per_page': self.paginate_by}).configure(submissions_table) # add filter here context['submissions_table'] = submissions_table # add filter here context['filter'] = self.filter return context class SubmissionDetailView(DetailView): model = Submission template_name = 'submission/submission_detail.html' @method_decorator(login_required) def dispatch(self, request, pk=None, *args, **kwargs): is_submission_enabled_or_privileged(request.user) self.user = request.user problem = self.get_object().problem if not problem or not problem.view_by_user(request.user): raise PermissionDenied try: csub = self.get_object().contest_submission raise PermissionDenied except: pass return super(SubmissionDetailView, self).dispatch(request, *args, **kwargs) def get_context_data(self, **kwargs): status = self.object.get_status_display() context = super(SubmissionDetailView, self).get_context_data(**kwargs) context['status'] = status if self.request.user.is_superuser or self.request.user.is_staff: context['show_submit_ip'] = True ce = self.object.get_info('compile-message') context['compile_message'] = ce cases = self.object.cases if self.object.status == 'JD' and len(cases) < self.object.problem.cases.count(): cases.append({ 'status': 'Judging', 'position': len(cases), 'time': 0, 'memory': 0, }) context['cases'] = cases return context class SubmissionCreateView(SuccessMessageMixin, CreateView): model = Submission form_class = SubmissionForm template_name = 'submission/submission_create_form.html' success_message = "your submission has been created successfully" @method_decorator(login_required) def dispatch(self, request, pid=None, *args, **kwargs): is_submission_enabled_or_privileged(request.user) pid = self.kwargs['pid'] self.problem = Problem.objects.filter(pk=pid).first() if not self.problem or not self.problem.view_by_user(request.user): raise PermissionDenied if not self.problem.is_checked: raise PermissionDenied self.user = request.user return super(SubmissionCreateView, self).dispatch(request, *args, **kwargs) def get_form_kwargs(self): kw = super(SubmissionCreateView, self).get_form_kwargs() kw['qs'] = LANGUAGE.choice() return kw def get_context_data(self, **kwargs): context = super(SubmissionCreateView, self).get_context_data(**kwargs) context['problem'] = self.problem return context def form_valid(self, form): if self.problem.forbid(self.request.user): raise PermissionDenied self.object = form.save(commit=False) self.object.problem = self.problem self.object.user = self.request.user self.object.submit_ip = self.request.META.get('REMOTE_ADDR', None) self.object.save() # self.object.code_file.write(str(self.object.pk), i self.object.judge(form.cleaned_data['code']) return super(SubmissionCreateView, self).form_valid(form) def get_success_url(self): return reverse('submission:submission-list')
mit
alsatian-test/tap-bark
test/unit-tests/src/output/set-progress.test.ts
2809
import { Test, TestCase, Expect, SpyOn } from "alsatian"; import { StreamBuilder } from "../../../_builders/stream-builder"; import { OutputProviderBuilder } from "../../../_builders/output-provider-builder"; import { Output } from "../../../../src/output/output"; export default class SetProgressTests { @Test() public streamCursorStartsByMovingToZeroNegativeOne() { const stream = new StreamBuilder().build(); const outputProvider = new OutputProviderBuilder().build(); const output = new Output(stream, outputProvider); SpyOn(stream, "moveCursor"); output.setProgress(100, 100); Expect(stream.moveCursor).toHaveBeenCalledWith(0, -1); } @Test() public streamCursorClearsLine() { const stream = new StreamBuilder().build(); const outputProvider = new OutputProviderBuilder().build(); const output = new Output(stream, outputProvider); SpyOn(stream, "clearLine"); output.setProgress(100, 100); Expect(stream.clearLine).toHaveBeenCalled(); } @TestCase(0, 1, "| |") @TestCase(0, 10, "| |") @TestCase(0, 100, "| |") @TestCase(1, 1, "|====================|") @TestCase(1, 10, "|== |") @TestCase(1, 100, "| |") @TestCase(4, 10, "|======== |") @TestCase(42, 100, "|======== |") @TestCase(10, 10, "|====================|") @TestCase(100, 100, "|====================|") public streamIsWrittenWithCorrectProgressBar(currentProgress: number, total: number, expectedProgressBar: string) { const stream = new StreamBuilder().build(); const outputProvider = new OutputProviderBuilder().build(); const output = new Output(stream, outputProvider); SpyOn(stream, "write"); output.setProgress(currentProgress, total); Expect(stream.write).toHaveBeenCalledWith(expectedProgressBar); } @Test() public streamCursorMovesToZeroOne() { const stream = new StreamBuilder().build(); const outputProvider = new OutputProviderBuilder().build(); const output = new Output(stream, outputProvider); SpyOn(stream, "moveCursor"); output.setProgress(100, 100); Expect(stream.moveCursor).toHaveBeenCalledWith(0, 1); } @Test() public streamCursorFinishesByPositioningCursorAtZeroUndefined() { const stream = new StreamBuilder().build(); const outputProvider = new OutputProviderBuilder().build(); const output = new Output(stream, outputProvider); SpyOn(stream, "cursorTo"); output.setProgress(100, 100); Expect(stream.cursorTo).toHaveBeenCalledWith(0, undefined); } }
mit
xcrespo/FDM
app/src/main/java/com/caracocha/fdm/EventDetailFragment.java
5039
package com.caracocha.fdm; import android.content.Intent; import android.net.Uri; import android.os.Bundle; import android.support.v7.widget.CardView; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.app.Fragment; import android.widget.ImageView; import android.widget.TextView; import com.bumptech.glide.Glide; /** * A fragment representing a single Event detail screen. * This fragment is either contained in a {@link EventListActivity} */ public class EventDetailFragment extends Fragment implements View.OnClickListener { private static final String DEBUG_TAG = "EventDetailFragment"; private Item event; /** * Mandatory empty constructor for the fragment manager to instantiate the * fragment (e.g. upon screen orientation changes). */ public EventDetailFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Bundle bundle = getArguments(); event = bundle.getParcelable("event"); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_event_detail, container, false); CardView cvShare = (CardView) rootView.findViewById(R.id.fragment_event_detail_share); cvShare.setOnClickListener(this); ImageView ivHeader = (ImageView) rootView.findViewById(R.id.fragment_event_detail_header); int imgHeaderID = getActivity().getResources().getIdentifier("header_" + event.sCategory.toLowerCase(), "drawable", getActivity().getPackageName()); if(event.sImgURL != null) { Glide.with(getActivity()).load(event.sImgURL) //.placeholder(R.drawable.loading_spinner_md) .error(imgHeaderID).into(ivHeader); ivHeader.setOnClickListener(this); } else { if (imgHeaderID != 0) { ivHeader.setImageResource(imgHeaderID); } } TextView tvTitle = (TextView) rootView.findViewById(R.id.fragment_event_detail_title); tvTitle.setText(event.sTitle); TextView tvPlace = (TextView) rootView.findViewById(R.id.fragment_event_detail_place); tvPlace.setText(event.sPlace); CardView cvMap = (CardView) rootView.findViewById(R.id.fragment_event_detail_map); if(event.sLatitude != null && event.sLongitude != null) { cvMap.setOnClickListener(this); } else { cvMap.setVisibility(View.GONE); } TextView tvTime = (TextView) rootView.findViewById(R.id.fragment_event_detail_time); tvTime.setText(event.sStartTime); TextView tvDate = (TextView) rootView.findViewById(R.id.fragment_event_detail_date); tvDate.setText(event.sDate); CardView cvWeb = (CardView) rootView.findViewById(R.id.fragment_event_detail_web); if (event.sURL != null) { cvWeb.setOnClickListener(this); } else { cvWeb.setVisibility(View.GONE); } if (event.sPrice != null) { TextView tvPrice = (TextView) rootView.findViewById(R.id.fragment_event_detail_price); tvPrice.setText(event.sPrice); } TextView tvDetails = (TextView) rootView.findViewById(R.id.fragment_event_detail_details); if (event.sDescription != null) { tvDetails.setText(event.sDescription); } else { tvDetails.setVisibility(View.GONE); } return rootView; } @Override public void onClick(View view) { Intent intent; switch (view.getId()) { case R.id.fragment_event_detail_map: intent = new Intent(Intent.ACTION_VIEW, Uri.parse((new StringBuilder("geo:0,0?q=")).append(event.sLatitude).append(",") .append(event.sLongitude).append("(").append(event.sPlace).append(")").toString())); startActivity(intent); break; case R.id.fragment_event_detail_web: intent = new Intent(Intent.ACTION_VIEW, Uri.parse(event.sURL)); startActivity(intent); break; case R.id.fragment_event_detail_share: intent = new Intent(Intent.ACTION_SEND); intent.setType("text/plain"); intent.putExtra(Intent.EXTRA_TEXT, event.sTitle + " @ " + event.sPlace + " (" + event.sDate + ")" + " #fdmapp"); startActivity(Intent.createChooser(intent, getResources().getString(R.string.ab_share))); break; case R.id.fragment_event_detail_header: intent = new Intent(getActivity(), HeaderImageActivity.class); intent.putExtra("IMAGE", event.sImgURL); startActivity(intent); break; } } }
mit
buriy/artmind3
src/engine/InnerNode.java
2191
package engine; import java.util.HashSet; import util.Utils; public class InnerNode extends Node { protected final Columns neurons; protected final Field output; protected final Sensors sensors; protected final int layer; public InnerNode(Field input, Field output, Options opt, int layer) { super(input, opt); this.output = output; this.layer = layer; this.neurons = new Columns(opt, output, layer); if (layer == 0) { this.sensors = new AdaptiveSensors(opt, input); } else { this.sensors = new FieldSensors(opt, input); } } public void addSecondaryInput(Field input){ this.sensors.addSecondaryInput(input); } @Override public NetState operate() { NetState state = NetState.LEARNING; if (learnTime < opt.learnTime()) { int[] bits = sensors.learn(); incLearnTime(); output.reset(); for(int b: bits){ output.set(b, 0, 255); } state = neurons.learn(bits); if (layer == 1) { state = NetState.LEARNING; } if (layer == 2) { state = NetState.LEARNING; } if (layer == 0) { if (neurons.prediction()) { state = NetState.LEARNING; } } if (learnTime == opt.learnTime() - 1) { state = NetState.RESTART; } } else { int[] bits = sensors.run(); state = neurons.run(bits); } return state; } public boolean isLearning() { return neurons.isLearning(); } @Override public String toString() { int[] active = firedSensors(); return input.toString() + " -> \n" + sensors.restoreWinners(active, input); } private int[] firedSensors() { HashSet<Integer> active = new HashSet<Integer>(); for (int i = 0; i < output.width(); i++) { for (int j = 0; j < output.height(); j++) { if(output.test(i, j)){ active.add(i); } } } return Utils.toIntArray(active); } public int[] restore(int[] field) { int width = output.width(); int active[] = new int[width]; for (int i = 0; i < width; i++) { int actives = 0; for (int j = 0; j < output.height(); j++) { actives += field[i + j * width]; } active[i] = actives; } int divider = Utils.maximum(active, 1); int[] data = sensors.restore(active, divider, input); return data; } }
mit
radare/bitcointools
util.py
809
# # Misc util routines # from bsddb3.db import * def long_hex(bytes): return bytes.encode('hex_codec') def short_hex(bytes): t = bytes.encode('hex_codec') if len(t) < 11: return t return t[0:4]+"..."+t[-4:] def determine_db_dir(): import os import os.path import platform if platform.system() == "Darwin": return os.path.expanduser("~/Library/Application Support/Bitcoin/") elif platform.system() == "Windows": return os.path.join(os.environ['APPDATA'], "Bitcoin") return os.path.expanduser("~/.bitcoin") def create_env(db_dir=None): if db_dir is None: db_dir = determine_db_dir() db_env = DBEnv(0) r = db_env.open(db_dir, (DB_CREATE|DB_INIT_LOCK|DB_INIT_LOG|DB_INIT_MPOOL| DB_INIT_TXN|DB_THREAD|DB_RECOVER)) return db_env
mit
javiermvaldecantos/video-player
server.js
1669
var http = require("http"), url = require("url"), path = require("path"), fs = require("fs"), port = process.argv[2] || 8888; var offset = "/www/" http.createServer(function(request, response) { var uri = url.parse(request.url).pathname , filename = path.join(process.cwd() + offset, uri); fs.exists(filename, function(exists) { if(!exists) { response.writeHead(404, {"Content-Type": "text/plain"}); response.write("404 Not Found\n"); response.end(); console.log("couldn't find: " + filename); return; } if (fs.statSync(filename).isDirectory()) filename += '/index.html'; fs.readFile(filename, "binary", function(err, file) { if(err) { response.writeHead(500, {"Content-Type": "text/plain"}); response.write(err + "\n"); response.end(); console.log("error while loading: " + filename); return; } var type = "text/plain"; var extn = path.extname(filename); switch(extn) { case ".json": type = "application/json" break; case ".js": type = "text/javascript" break; case ".html": type = "text/html"; break; case ".css": type = "text/css"; break; default: type = "text/plain"; } response.writeHead(200, {"Content-Type": type}); response.write(file, "binary"); response.end(); }); }); }).listen(parseInt(port, 10)); console.log("Go to\n => http://localhost:" + port + "/index.html" + "\nCTRL + C to shutdown");
mit
fguini/leathergoods
Data/DataAccessComponent.cs
1021
using System; using System.Data; using System.Configuration; using System.Text.RegularExpressions; using Framework; using Framework.Db; namespace Data { /// <summary> /// Base data access component class. /// </summary> public abstract class DataAccessComponent { private static string ConnectionString = AppSettings.DbConnectionString; private static string DbProvider = AppSettings.DbProvider; internal static Database Db; static DataAccessComponent() { Db = DatabaseFactory.CreateDatabase(DbProvider, ConnectionString); } protected static T GetDataValue<T>(IDataReader dr, string columnName) { var i = dr.GetOrdinal(columnName); if (!dr.IsDBNull(i)) return (T)dr.GetValue(i); return default(T); } protected string FormatFilterStatement(string filter) { return Regex.Replace(filter, "^(AND|OR)", string.Empty); } } }
mit
regou/angular-nav-plus-hammer
libs/mobile-nav.js
15730
/* * angular-mobile-nav by Andy Joslin && regou * https://github.com/regou/angular-mobile-nav * @license MIT License http://goo.gl/Z8Nlo * * add navigateing list route-info support by regou * Adjust back Action strategy by regou */ angular.module('ajoslin.mobile-navigate', ['ngAnimate','ngRoute']) .run(['$navigate', '$rootScope', function($navigate, $rootScope) { //Android back button functionality for phonegap document.addEventListener("deviceready", function() { document.addEventListener("backbutton", function() { $rootScope.$apply(function() { var backSuccess = $navigate.back(); if (!backSuccess) { navigator.app.exitApp(); } }); }); }); }]); /* * $change * Service to transition between two elements */ angular.module('ajoslin.mobile-navigate') .provider('$change', function() { var transitionPresets = { //[nextClass, prevClass] //Modal: new page pops up, old page sits there until new page is over it 'modal': ['modal', ''], 'none': ['', ''] }; var defaultOptions = { 'prefix': 'mb-' }; var IN_CLASS = "in"; var OUT_CLASS = "out"; var REVERSE_CLASS = "reverse"; var DONE_CLASS = "done"; var ANIMATION_END = "webkitAnimationName" in document.documentElement.style ? "webkitAnimationEnd" : "animationend"; this.setTransitionPreset = function(transitionName, inClass, outClass) { inClass = inClass || ''; outClass = outClass || inClass; //Default to outClass same as inClass transitionPresets[transitionName] = [inClass, outClass]; }; this.options = function(opts) { defaultOptions = angular.extend(defaultOptions, opts || {}); }; this.$get = ['$q', '$rootScope', function($q, $rootScope) { return function change(next, prev, transType, reverse, options) { options = angular.extend(options || {}, defaultOptions); var deferred = $q.defer(), nextTransClass, prevTransClass; //buildClassString //Transforms array of classes into prefixed class string //(better for performance than multiple .addClass() //@param classes: Array{string} //@return string classNames function buildClassString(classes) { return classes.reduce(function(accumulator, cls) { return accumulator + (cls ? (' ' + options.prefix + cls) : ''); }, ''); } //Convert a preset (eg 'modal') to its array of preset classes if it exists //else, just convert eg 'slide' to ['slide', 'slide'], so both elements get it //The array layout is [nextinationClass, prevClass] var transition = transitionPresets[transType] ? transitionPresets[transType] : [transType, transType]; //Hack for white flash: z-index stops flash, offsetWidth thing forces z-index to apply next.css('z-index','-100'); next[0].offsetWidth += 0; var nextClasses = buildClassString([ reverse ? OUT_CLASS : IN_CLASS, (nextTransClass = transition[reverse ? 1 : 0]), reverse && REVERSE_CLASS || '' ]); next.addClass(nextClasses); var prevClasses; if (prev) { prevClasses = buildClassString([ reverse ? IN_CLASS : OUT_CLASS, (prevTransClass = transition[reverse ? 0 : 1]), reverse && REVERSE_CLASS || '' ]); prev.addClass(prevClasses); } next.css('z-index', ''); next[0].offsetWidth += 0; function done() { $rootScope.$apply(function() { deferred.resolve(); }); } //Find which element (sometimes none) to bind for ending var boundElement; if (nextTransClass && nextTransClass.length) { (boundElement = next).bind(ANIMATION_END, done); } else if (prev && prevTransClass && prevTransClass.length) { (boundElement = prev).bind(ANIMATION_END, done); } else { deferred.resolve(); } deferred.promise.then(function() { boundElement && boundElement.unbind(ANIMATION_END, done); next.removeClass(nextClasses); prev && prev.removeClass(prevClasses); }); //Let the user of change 'cancel' to finish transition early if they wish deferred.promise.cancel = function() { deferred.resolve(); }; return deferred.promise; }; }]; }); angular.module('ajoslin.mobile-navigate') .provider('$navigate', function() { this.$get = ['$rootScope', '$location', '$route', function($rootScope, $location, $route) { var nav = {}, navHistory = []; //we keep our own version of history and ignore window.history function Page(path, transition, isReverse) { var _path = path, _transition = transition || 'slide', _isReverse = isReverse, _onceTransition; this.transition = function() { var trans; if (_onceTransition) { trans = _onceTransition; _onceTransition = null; } else { trans = _transition; } return trans; }; this.path = function() { return _path; }; this.reverse = function() { return _isReverse; }; //For setting a transition on a page - but only one time //Eg say on startup, we want to transition in with 'none', //but want to be 'slide' after that this.transitionOnce = function(trans) { _onceTransition = trans; }; } function navigate(destination, source, isReverse,isBack) { $rootScope.$broadcast('$pageTransitionStart', destination, source, isReverse,isBack); nav.current = nav.next; } /* * Will listen for a route change success and call the selected callback * Only one listen is ever active, so if you press for example * /link1 then press back before /link1 is done, it will go listen for the back */ nav.onRouteSuccess = null; //Add a default onroutesuccess for the very first page function defaultRouteSuccess($event, next, last) { nav.current && navHistory.push([nav.current,next]); nav.next = new Page($location.path()); nav.next.transitionOnce('none'); navigate(nav.next); nav.onRouteSuccess = null; } $rootScope.$on('$routeChangeSuccess', function($event, next, last) { // Only navigate if it's a valid route and it's not gonna just redirect immediately if (!next.$$route || !next.$$route.redirectTo) { (nav.onRouteSuccess || defaultRouteSuccess)($event, next, last); } //Make route history accessible by regou $rootScope.$broadcast('$pageNaved',navHistory,next, last); }); /* * go -transitions to new page * @param path - new path * @param {optional} String transition * @param {optional} boolean isReverse, default false */ nav.go = function go(path, transition, isReverse) { if (typeof transition == 'boolean') { isReverse = transition; transition = null; } $location.path(path); //Wait for successful route change before actually doing stuff nav.onRouteSuccess = function($event, next, last) { nav.current && navHistory.push([nav.current,next]); nav.next = new Page(path, transition || (next.$$route && next.$$route.transition), isReverse); navigate(nav.next, nav.current, false); }; }; //Sometimes you want to erase history nav.eraseHistory = function(str,routeObj) { navHistory=[]; if(routeObj){ navHistory.push([str,routeObj]) } return navHistory; }; nav.getHistory=function(){ return navHistory; }; nav.back = function() { try{ if (navHistory.length > 0) { var previous = navHistory[navHistory.length-1][0]; $location.path(previous.path()); nav.onRouteSuccess = function() { navHistory.pop(); nav.next = previous; navigate(nav.next, nav.current, true,true); }; return true; } return false; }catch(e){return false} }; return nav; }]; }); angular.module('ajoslin.mobile-navigate') .directive('mobileView', ['$rootScope', '$compile', '$controller', '$route', '$change', '$q', function($rootScope, $compile, $controller, $route, $change, $q) { function link(scope, viewElement, attrs) { //Insert page into dom function insertPage(page) { var current = $route.current, locals = current && current.locals; page.element = angular.element(document.createElement("div")); page.element.html(locals.$template); page.element.addClass('mb-page'); //always has to have page class page.scope = scope.$new(); if (current.controller) { locals.$scope = page.scope; page.controller = $controller(current.controller, locals); page.element.contents().data('$ngControllerController', page.controller); } $compile(page.element.contents())(page.scope); if (locals && locals.$template) { // only append page element if a template exists viewElement.append(page.element); } page.scope.$emit('$viewContentLoaded'); page.scope.$eval(attrs.onLoad); return page; } var currentTrans; scope.$on('$pageTransitionStart', function ($event, dest, source, reverse,isBack) { function changePage() { var current = $route.current && $route.current.$$route || {}; if(isBack){reverse=true;} var transition = reverse ? source.transition() : dest.transition(); insertPage(dest); //If the page is marked as reverse, reverse the direction //But,if it's a nav.back Action, keep reverse==true regou@2013.9.9 if (dest.reverse() || current.reverse) { if(!isBack){reverse = !reverse;} } function doTransition() { var promise = $change(dest.element, (source ? source.element : null), transition, reverse); promise.then(function() { if (source) { $rootScope.$broadcast('$pageTransitionSuccess', dest, source); source.scope.$destroy(); source.element.remove(); source = undefined; } }); return promise; } //Set next element to display: none, then wait until transition is //ready, then show it again. dest.element.css('display', 'none'); //Allow a deferTransition expression, which is allowed to return a promise. //The next page will be inserted, but not transitioned in until the promise //is fulfilled. var deferTransitionPromise = scope.$eval(attrs.deferTransition) || $q.when(); deferTransitionPromise.cancel = function() { cancelled = true; //Undo display none from waiting for transition dest.element.css('display', ''); }; var cancelled = false; deferTransitionPromise.then(function() { if (!cancelled) { //Undo display none from waiting for transition dest.element.css('display', ''); return doTransition(); } }); return deferTransitionPromise; } currentTrans && currentTrans.cancel(); currentTrans = changePage(dest, source, reverse); }); } return { restrict: 'EA', link: link }; }]) .directive('scrollable', ['$route', function($route) { var scrollCache = {}; return { restrict: 'EA', link: function(scope, elm, attrs) { var route = $route.current ? $route.current.$$route : {}; var template = route.templateUrl || route.template; var rawElm = elm[0]; //On scope creation, see if we remembered any scroll for this templateUrl //If we did, set it if (template) { //Set oldScroll after a timeout so the page has time to fully load setTimeout(function() { var oldScroll = scrollCache[template]; if (oldScroll) { rawElm.scrollTop = oldScroll; } }); scope.$on('$destroy', function() { scrollCache[template] = rawElm.scrollTop; }); } } }; }]);
mit
estin/django-json-rpc
jsonrpc/site.py
9609
import datetime, decimal from functools import wraps from uuid import uuid1 from jsonrpc._json import loads, dumps from jsonrpc.exceptions import * from jsonrpc.types import * from django.core import signals empty_dec = lambda f: f try: from django.views.decorators.csrf import csrf_exempt except (NameError, ImportError): csrf_exempt = empty_dec from django.core.serializers.json import DjangoJSONEncoder NoneType = type(None) encode_kw = lambda p: dict([(str(k), v) for k, v in p.iteritems()]) def encode_kw11(p): if not type(p) is dict: return {} ret = p.copy() removes = [] for k, v in ret.iteritems(): try: int(k) except ValueError: pass else: removes.append(k) for k in removes: ret.pop(k) return ret def encode_arg11(p): if type(p) is list: return p elif not type(p) is dict: return [] else: pos = [] d = encode_kw(p) for k, v in d.iteritems(): try: pos.append(int(k)) except ValueError: pass pos = list(set(pos)) pos.sort() return [d[str(i)] for i in pos] def validate_params(method, D): if type(D['params']) == Object: keys = method.json_arg_types.keys() if len(keys) != len(D['params']): raise InvalidParamsError('Not enough params provided for %s' % method.json_sig) for k in keys: if not k in D['params']: raise InvalidParamsError('%s is not a valid parameter for %s' % (k, method.json_sig)) if not Any.kind(D['params'][k]) == method.json_arg_types[k]: raise InvalidParamsError('%s is not the correct type %s for %s' % (type(D['params'][k]), method.json_arg_types[k], method.json_sig)) elif type(D['params']) == Array: arg_types = method.json_arg_types.values() try: for i, arg in enumerate(D['params']): if not Any.kind(arg) == arg_types[i]: raise InvalidParamsError('%s is not the correct type %s for %s' % (type(arg), arg_types[i], method.json_sig)) except IndexError: raise InvalidParamsError('Too many params provided for %s' % method.json_sig) else: if len(D['params']) != len(arg_types): raise InvalidParamsError('Not enough params provided for %s' % method.json_sig) class JSONRPCSite(object): "A JSON-RPC Site" def __init__(self, json_encoder=DjangoJSONEncoder): self.urls = {} self.uuid = str(uuid1()) self.version = '1.0' self.name = 'django-json-rpc' self.register('system.describe', self.describe) self.set_json_encoder(json_encoder) def set_json_encoder(self, json_encoder=DjangoJSONEncoder): self.json_encoder = json_encoder def register(self, name, method): self.urls[unicode(name)] = method def empty_response(self, version='1.0'): resp = {'id': None} if version == '1.1': resp['version'] = version return resp if version == '2.0': resp['jsonrpc'] = version resp.update({'error': None, 'result': None}) return resp def validate_get(self, request, method): encode_get_params = lambda r: dict([(k, v[0] if len(v) == 1 else v) for k, v in r]) if request.method == 'GET': method = unicode(method) if method in self.urls and getattr(self.urls[method], 'json_safe', False): D = { 'params': encode_get_params(request.GET.lists()), 'method': method, 'id': 'jsonrpc', 'version': '1.1' } return True, D return False, {} def response_dict(self, request, D, is_batch=False, version_hint='1.0', json_encoder=None): json_encoder = json_encoder or self.json_encoder version = version_hint response = self.empty_response(version=version) apply_version = {'2.0': lambda f, r, p: f(r, **encode_kw(p)) if type(p) is dict else f(r, *p), '1.1': lambda f, r, p: f(r, *encode_arg11(p), **encode_kw(encode_kw11(p))), '1.0': lambda f, r, p: f(r, *p)} try: # params: An Array or Object, that holds the actual parameter values # for the invocation of the procedure. Can be omitted if empty. if 'params' not in D: D['params'] = [] if 'method' not in D or 'params' not in D: raise InvalidParamsError('Request requires str:"method" and list:"params"') if D['method'] not in self.urls: raise MethodNotFoundError('Method not found') if 'jsonrpc' in D: if str(D['jsonrpc']) not in apply_version: raise InvalidRequestError('JSON-RPC version %s not supported.' % D['jsonrpc']) version = request.jsonrpc_version = response['jsonrpc'] = str(D['jsonrpc']) elif 'version' in D: if str(D['version']) not in apply_version: raise InvalidRequestError('JSON-RPC version %s not supported.' % D['version']) version = request.jsonrpc_version = response['version'] = str(D['version']) else: request.jsonrpc_version = '1.0' method = self.urls[str(D['method'])] if getattr(method, 'json_validate', False): validate_params(method, D) if 'id' in D and D['id'] is not None: # regular request response['id'] = D['id'] if version in ('1.1', '2.0') and 'error' in response: response.pop('error') elif is_batch: # notification, not ok in a batch format, but happened anyway raise InvalidRequestError R = apply_version[version](method, request, D['params']) if 'id' not in D or ('id' in D and D['id'] is None): # notification return None, 204 encoder = json_encoder() if not sum(map(lambda e: isinstance(R, e), # type of `R` should be one of these or... (dict, str, unicode, int, long, list, set, NoneType, bool))): try: rs = encoder.default(R) # ...or something this thing supports except TypeError as exc: raise TypeError("Return type not supported, for %r" % R) response['result'] = R status = 200 except Error as e: signals.got_request_exception.send(sender=self.__class__, request=request) response['error'] = e.json_rpc_format if version in ('1.1', '2.0') and 'result' in response: response.pop('result') status = e.status except Exception as e: # exception missed by others signals.got_request_exception.send(sender=self.__class__, request=request) other_error = OtherError(e) response['error'] = other_error.json_rpc_format status = other_error.status if version in ('1.1', '2.0') and 'result' in response: response.pop('result') # Exactly one of result or error MUST be specified. It's not # allowed to specify both or none. if version in ('1.1', '2.0') and 'error' in response and not response['error']: response.pop('error') return response, status @csrf_exempt def dispatch(self, request, method='', json_encoder=None): from django.http import HttpResponse json_encoder = json_encoder or self.json_encoder try: # in case we do something json doesn't like, we always get back valid json-rpc response response = self.empty_response() if request.method.lower() == 'get': valid, D = self.validate_get(request, method) if not valid: raise InvalidRequestError('The method you are trying to access is ' 'not available by GET requests') elif not request.method.lower() == 'post': raise RequestPostError else: try: if hasattr(request, "body"): D = loads(request.body) else: D = loads(request.raw_post_data) except: raise InvalidRequestError if type(D) is list: response = [self.response_dict(request, d, is_batch=True, json_encoder=json_encoder)[0] for d in D] status = 200 else: response, status = self.response_dict(request, D, json_encoder=json_encoder) if response is None and (not u'id' in D or D[u'id'] is None): # a notification return HttpResponse('', status=status) json_rpc = dumps(response, cls=json_encoder) except Error as e: signals.got_request_exception.send(sender=self.__class__, request=request) response['error'] = e.json_rpc_format status = e.status json_rpc = dumps(response, cls=json_encoder) except Exception as e: # exception missed by others signals.got_request_exception.send(sender=self.__class__, request=request) other_error = OtherError(e) response['result'] = None response['error'] = other_error.json_rpc_format status = other_error.status json_rpc = dumps(response,cls=json_encoder) return HttpResponse(json_rpc, status=status, content_type='application/json-rpc') def procedure_desc(self, key): M = self.urls[key] return { 'name': M.json_method, 'summary': M.__doc__, 'idempotent': M.json_safe, 'params': [{'type': str(Any.kind(t)), 'name': k} for k, t in M.json_arg_types.iteritems()], 'return': {'type': M.json_return_type}} def service_desc(self): return { 'sdversion': '1.0', 'name': self.name, 'id': 'urn:uuid:%s' % str(self.uuid), 'summary': self.__doc__, 'version': self.version, 'procs': [self.procedure_desc(k) for k in self.urls.iterkeys() if self.urls[k] != self.describe]} def describe(self, request): return self.service_desc() jsonrpc_site = JSONRPCSite()
mit
PaulMcMillan/hue
hue/__init__.py
21
from base import Hue
mit
yogeshsaroya/new-cdnjs
ajax/libs/jquery-ui-map/3.0-rc1/min/jquery.ui.map.full.min.js
129
version https://git-lfs.github.com/spec/v1 oid sha256:e32536a888928f9c0388689d30b6e673e0d6f36c8bb146a7692dfc4886059b12 size 3959
mit
JeroMiya/androidmono
MonoJavaBridge/android/generated/android/provider/SearchRecentSuggestions.cs
4646
namespace android.provider { [global::MonoJavaBridge.JavaClass()] public partial class SearchRecentSuggestions : java.lang.Object { internal new static global::MonoJavaBridge.JniGlobalHandle staticClass; protected SearchRecentSuggestions(global::MonoJavaBridge.JNIEnv @__env) : base(@__env) { } private static global::MonoJavaBridge.MethodId _m0; protected virtual void truncateHistory(android.content.ContentResolver arg0, int arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.provider.SearchRecentSuggestions.staticClass, "truncateHistory", "(Landroid/content/ContentResolver;I)V", ref global::android.provider.SearchRecentSuggestions._m0, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m1; public virtual void clearHistory() { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.provider.SearchRecentSuggestions.staticClass, "clearHistory", "()V", ref global::android.provider.SearchRecentSuggestions._m1); } private static global::MonoJavaBridge.MethodId _m2; public virtual void saveRecentQuery(java.lang.String arg0, java.lang.String arg1) { global::MonoJavaBridge.JavaBridge.CallVoidMethod(this, global::android.provider.SearchRecentSuggestions.staticClass, "saveRecentQuery", "(Ljava/lang/String;Ljava/lang/String;)V", ref global::android.provider.SearchRecentSuggestions._m2, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1)); } private static global::MonoJavaBridge.MethodId _m3; public SearchRecentSuggestions(android.content.Context arg0, java.lang.String arg1, int arg2) : base(global::MonoJavaBridge.JNIEnv.ThreadEnv) { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; if (global::android.provider.SearchRecentSuggestions._m3.native == global::System.IntPtr.Zero) global::android.provider.SearchRecentSuggestions._m3 = @__env.GetMethodIDNoThrow(global::android.provider.SearchRecentSuggestions.staticClass, "<init>", "(Landroid/content/Context;Ljava/lang/String;I)V"); global::MonoJavaBridge.JniLocalHandle handle = @__env.NewObject(android.provider.SearchRecentSuggestions.staticClass, global::android.provider.SearchRecentSuggestions._m3, global::MonoJavaBridge.JavaBridge.ConvertToValue(arg0), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg1), global::MonoJavaBridge.JavaBridge.ConvertToValue(arg2)); Init(@__env, handle); } internal static global::MonoJavaBridge.FieldId _QUERIES_PROJECTION_1LINE4787; public static global::java.lang.String[] QUERIES_PROJECTION_1LINE { get { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; return global::MonoJavaBridge.JavaBridge.WrapJavaArrayObject<java.lang.String>(@__env.GetStaticObjectField(global::android.provider.SearchRecentSuggestions.staticClass, _QUERIES_PROJECTION_1LINE4787)) as java.lang.String[]; } } internal static global::MonoJavaBridge.FieldId _QUERIES_PROJECTION_2LINE4788; public static global::java.lang.String[] QUERIES_PROJECTION_2LINE { get { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; return global::MonoJavaBridge.JavaBridge.WrapJavaArrayObject<java.lang.String>(@__env.GetStaticObjectField(global::android.provider.SearchRecentSuggestions.staticClass, _QUERIES_PROJECTION_2LINE4788)) as java.lang.String[]; } } public static int QUERIES_PROJECTION_DATE_INDEX { get { return 1; } } public static int QUERIES_PROJECTION_QUERY_INDEX { get { return 2; } } public static int QUERIES_PROJECTION_DISPLAY1_INDEX { get { return 3; } } public static int QUERIES_PROJECTION_DISPLAY2_INDEX { get { return 4; } } static SearchRecentSuggestions() { global::MonoJavaBridge.JNIEnv @__env = global::MonoJavaBridge.JNIEnv.ThreadEnv; global::android.provider.SearchRecentSuggestions.staticClass = @__env.NewGlobalRef(@__env.FindClass("android/provider/SearchRecentSuggestions")); global::android.provider.SearchRecentSuggestions._QUERIES_PROJECTION_1LINE4787 = @__env.GetStaticFieldIDNoThrow(global::android.provider.SearchRecentSuggestions.staticClass, "QUERIES_PROJECTION_1LINE", "[Ljava/lang/String;"); global::android.provider.SearchRecentSuggestions._QUERIES_PROJECTION_2LINE4788 = @__env.GetStaticFieldIDNoThrow(global::android.provider.SearchRecentSuggestions.staticClass, "QUERIES_PROJECTION_2LINE", "[Ljava/lang/String;"); } } }
mit
rinvex/cortex-fort
src/Http/Requests/Managerarea/MemberAttributesFormRequest.php
1417
<?php declare(strict_types=1); namespace Cortex\Auth\Http\Requests\Managerarea; use Rinvex\Support\Traits\Escaper; use Illuminate\Foundation\Http\FormRequest; class MemberAttributesFormRequest extends FormRequest { use Escaper; /** * Determine if the user is authorized to make this request. * * @return bool */ public function authorize(): bool { return true; } /** * Get the validation rules that apply to the request. * * @return array */ public function rules(): array { $member = $this->route('member') ?? app('cortex.auth.member'); // Attach attribute rules $member->getEntityAttributes()->each(function ($attribute, $attributeName) use (&$rules) { switch ($attribute->type) { case 'datetime': $type = 'date'; break; case 'text': case 'check': case 'select': case 'varchar': $type = 'string'; break; default: $type = $attribute->type; break; } $rule = ($attribute->is_required ? 'required|' : 'nullable|').$type; $rules[$attributeName.($attribute->is_collection ? '.*' : '')] = $rule; }); return $rules ?? []; } }
mit
t-zuehlsdorff/gitlabhq
spec/features/merge_requests/diff_notes_resolve_spec.rb
15277
require 'spec_helper' feature 'Diff notes resolve', js: true do let(:user) { create(:user) } let(:project) { create(:project, :public, :repository) } let(:merge_request) { create(:merge_request_with_diffs, source_project: project, author: user, title: "Bug NS-04") } let!(:note) { create(:diff_note_on_merge_request, project: project, noteable: merge_request) } let(:path) { "files/ruby/popen.rb" } let(:position) do Gitlab::Diff::Position.new( old_path: path, new_path: path, old_line: nil, new_line: 9, diff_refs: merge_request.diff_refs ) end context 'no discussions' do before do project.team << [user, :master] sign_in user note.destroy visit_merge_request end it 'displays no discussion resolved data' do expect(page).not_to have_content('discussion resolved') expect(page).not_to have_selector('.discussion-next-btn') end end context 'as authorized user' do before do project.team << [user, :master] sign_in user visit_merge_request end context 'single discussion' do it 'shows text with how many discussions' do page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'allows user to mark a note as resolved' do page.within '.diff-content .note' do find('.line-resolve-btn').click expect(page).to have_selector('.line-resolve-btn.is-active') expect(find('.line-resolve-btn')['data-original-title']).to eq("Resolved by #{user.name}") end page.within '.diff-content' do expect(page).to have_selector('.btn', text: 'Unresolve discussion') end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to mark discussion as resolved' do page.within '.diff-content' do click_button 'Resolve discussion' end page.within '.diff-content .note' do expect(page).to have_selector('.line-resolve-btn.is-active') end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to unresolve discussion' do page.within '.diff-content' do click_button 'Resolve discussion' click_button 'Unresolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'hides resolved discussion' do page.within '.diff-content' do click_button 'Resolve discussion' end visit_merge_request expect(page).to have_selector('.discussion-body', visible: false) end it 'allows user to resolve from reply form without a comment' do page.within '.diff-content' do click_button 'Reply...' click_button 'Resolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to unresolve from reply form without a comment' do page.within '.diff-content' do click_button 'Resolve discussion' sleep 1 click_button 'Reply...' click_button 'Unresolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') expect(page).not_to have_selector('.line-resolve-btn.is-active') end end it 'allows user to comment & resolve discussion' do page.within '.diff-content' do click_button 'Reply...' find('.js-note-text').set 'testing' click_button 'Comment & resolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to comment & unresolve discussion' do page.within '.diff-content' do click_button 'Resolve discussion' click_button 'Reply...' find('.js-note-text').set 'testing' click_button 'Comment & unresolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'allows user to quickly scroll to next unresolved discussion' do page.within '.line-resolve-all-container' do page.find('.discussion-next-btn').click end expect(page.evaluate_script("$('body').scrollTop()")).to be > 0 end it 'hides jump to next button when all resolved' do page.within '.diff-content' do click_button 'Resolve discussion' end expect(page).to have_selector('.discussion-next-btn', visible: false) end it 'updates updated text after resolving note' do page.within '.diff-content .note' do find('.line-resolve-btn').click end expect(page).to have_content("Resolved by #{user.name}") end it 'hides jump to next discussion button' do page.within '.discussion-reply-holder' do expect(page).not_to have_selector('.discussion-next-btn') end end end context 'multiple notes' do before do create(:diff_note_on_merge_request, project: project, noteable: merge_request, in_reply_to: note) visit_merge_request end it 'does not mark discussion as resolved when resolving single note' do page.first '.diff-content .note' do first('.line-resolve-btn').click expect(page).to have_selector('.note-action-button .loading') expect(first('.line-resolve-btn')['data-original-title']).to eq("Resolved by #{user.name}") end expect(page).to have_content('Last updated') page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'resolves discussion' do page.all('.note').each do |note| note.all('.line-resolve-btn').each do |button| button.click end end expect(page).to have_content('Resolved by') page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') end end end context 'muliple discussions' do before do create(:diff_note_on_merge_request, project: project, position: position, noteable: merge_request) visit_merge_request end it 'shows text with how many discussions' do page.within '.line-resolve-all-container' do expect(page).to have_content('0/2 discussions resolved') end end it 'allows user to mark a single note as resolved' do click_button('Resolve discussion', match: :first) page.within '.line-resolve-all-container' do expect(page).to have_content('1/2 discussions resolved') end end it 'allows user to mark all notes as resolved' do page.all('.line-resolve-btn').each do |btn| btn.click end page.within '.line-resolve-all-container' do expect(page).to have_content('2/2 discussions resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user user to mark all discussions as resolved' do page.all('.discussion-reply-holder').each do |reply_holder| page.within reply_holder do click_button 'Resolve discussion' end end page.within '.line-resolve-all-container' do expect(page).to have_content('2/2 discussions resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to quickly scroll to next unresolved discussion' do page.within first('.discussion-reply-holder') do click_button 'Resolve discussion' end page.within '.line-resolve-all-container' do page.find('.discussion-next-btn').trigger('click') end expect(page.evaluate_script("$('body').scrollTop()")).to be > 0 end it 'updates updated text after resolving note' do page.within first('.diff-content .note') do find('.line-resolve-btn').click end expect(page).to have_content("Resolved by #{user.name}") end it 'shows jump to next discussion button' do page.all('.discussion-reply-holder').each do |holder| expect(holder).to have_selector('.discussion-next-btn') end end it 'displays next discussion even if hidden' do page.all('.note-discussion').each do |discussion| page.within discussion do click_button 'Toggle discussion' end end page.within('.issuable-discussion #notes') do expect(page).not_to have_selector('.btn', text: 'Resolve discussion') end page.within '.line-resolve-all-container' do page.find('.discussion-next-btn').click end expect(find('.discussion-with-resolve-btn')).to have_selector('.btn', text: 'Resolve discussion') end end context 'changes tab' do it 'shows text with how many discussions' do page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'allows user to mark a note as resolved' do page.within '.diff-content .note' do find('.line-resolve-btn').click expect(page).to have_selector('.line-resolve-btn.is-active') end page.within '.diff-content' do expect(page).to have_selector('.btn', text: 'Unresolve discussion') end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to mark discussion as resolved' do page.within '.diff-content' do click_button 'Resolve discussion' end page.within '.diff-content .note' do expect(page).to have_selector('.line-resolve-btn.is-active') end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to unresolve discussion' do page.within '.diff-content' do click_button 'Resolve discussion' click_button 'Unresolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'allows user to comment & resolve discussion' do page.within '.diff-content' do click_button 'Reply...' find('.js-note-text').set 'testing' click_button 'Comment & resolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end it 'allows user to comment & unresolve discussion' do page.within '.diff-content' do click_button 'Resolve discussion' click_button 'Reply...' find('.js-note-text').set 'testing' click_button 'Comment & unresolve discussion' end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end end end context 'as a guest' do let(:guest) { create(:user) } before do project.team << [guest, :guest] sign_in guest end context 'someone elses merge request' do before do visit_merge_request end it 'does not allow user to mark note as resolved' do page.within '.diff-content .note' do expect(page).not_to have_selector('.line-resolve-btn') end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end it 'does not allow user to mark discussion as resolved' do page.within '.diff-content .note' do expect(page).not_to have_selector('.btn', text: 'Resolve discussion') end end end context 'guest users merge request' do before do mr = create(:merge_request_with_diffs, source_project: project, source_branch: 'markdown', author: guest, title: "Bug") create(:diff_note_on_merge_request, project: project, noteable: mr) visit_merge_request(mr) end it 'allows user to mark a note as resolved' do page.within '.diff-content .note' do find('.line-resolve-btn').click expect(page).to have_selector('.line-resolve-btn.is-active') end page.within '.diff-content' do expect(page).to have_selector('.btn', text: 'Unresolve discussion') end page.within '.line-resolve-all-container' do expect(page).to have_content('1/1 discussion resolved') expect(page).to have_selector('.line-resolve-btn.is-active') end end end end context 'unauthorized user' do context 'no resolved comments' do before do visit_merge_request end it 'does not allow user to mark note as resolved' do page.within '.diff-content .note' do expect(page).not_to have_selector('.line-resolve-btn') end page.within '.line-resolve-all-container' do expect(page).to have_content('0/1 discussion resolved') end end end context 'resolved comment' do before do note.resolve!(user) visit_merge_request end it 'shows resolved icon' do expect(page).to have_content '1/1 discussion resolved' click_button 'Toggle discussion' expect(page).to have_selector('.line-resolve-btn.is-active') end it 'does not allow user to click resolve button' do expect(page).to have_selector('.line-resolve-btn.is-disabled') click_button 'Toggle discussion' expect(page).to have_selector('.line-resolve-btn.is-disabled') end end end def visit_merge_request(mr = nil) mr = mr || merge_request visit project_merge_request_path(mr.project, mr) end end
mit
wmira/react-icons-kit
src/oct/tools.js
563
export const tools = {"viewBox":"0 0 16 16","children":[{"name":"path","attribs":{"fill-rule":"evenodd","d":"M4.48 7.27c.26.26 1.28 1.33 1.28 1.33l.56-.58-.88-.91 1.69-1.8s-.76-.74-.43-.45c.32-1.19.03-2.51-.87-3.44C4.93.5 3.66.2 2.52.51l1.93 2-.51 1.96-1.89.52-1.93-2C-.19 4.17.1 5.48 1 6.4c.94.98 2.29 1.26 3.48.87zm6.44 1.94l-2.33 2.3 3.84 3.98c.31.33.73.49 1.14.49.41 0 .82-.16 1.14-.49.63-.65.63-1.7 0-2.35l-3.79-3.93zM16 2.53L13.55 0 6.33 7.46l.88.91-4.31 4.46-.99.53-1.39 2.27.35.37 2.2-1.44.51-1.02L7.9 9.08l.88.91L16 2.53z"},"children":[]}],"attribs":{}};
mit
SecurityInnovation/YASAT
YASATEngine/SourceFile.cs
1364
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.IO; namespace YASATEngine { public class SourceFile { public int lines; public int commentedLines; public string path; public List<SourceCodeIssue> issues; public SourceFile(string file) { issues = new List<SourceCodeIssue>(); path = file; try { foreach (String line in File.ReadAllLines(path)) { bool inComment = false; if (line.StartsWith("//")) { commentedLines++; } else if (line.StartsWith("/*")) { inComment = true; commentedLines++; } else if (line.Contains("*/")) { inComment = false; commentedLines++; } else { if (inComment) commentedLines++; } lines++; } } catch (Exception) { } } } }
mit
pallavishende/angular-starter-master
node_modules/@ngstarter/systemjs-extension/index.js
1137
var gulp = require('gulp'); var util = require('gulp-util'); var runSequence = require('run-sequence'); var Builder = require('systemjs-builder'); function SystemJsExtension(config, systemJsConfig) { var defaultSystemJsConfig = config.src + 'systemjs.conf.js'; systemJsConfig = systemJsConfig || defaultSystemJsConfig; gulp.task('build-systemjs', function (done) { runSequence('tsc-app', buildSJS); function buildSJS () { var builder = new Builder(); builder.loadConfig(systemJsConfig) .then(function() { var path = config.tmpApp; return builder .buildStatic( path + 'main.js', path + 'bundle.js', config.systemJs.builder); }) .then(function() { util.log('Build complete'); done(); }) .catch(function (ex) { util.log('Build failed', ex); done('Build failed.'); }); } }); } module.exports = SystemJsExtension;
mit
Squarific/Hanoi
Hanoi.py
3163
class Piece: def __init__ (self, size): self.size = size def __str__ (self): return (self.size * 2 - 1) * "=" def __gt__ (self, piece): return self.size > piece.size def __lt__ (self, piece): return self.size < piece.size class Tower: def __init__ (self, height, pieces): self.height = height self.pieces = [Piece(i) for i in range(1, pieces + 1)] def __str__ (self): """ Return a string representation of the tower where the tower exists of | that are replaced by the string representation of a piece if there is a piece on that place in the tower. """ halfwidth = self.height - 1 stringtower = halfwidth * " " + "|" + halfwidth * " " + "\n" # Fill in with | if there is no piece for i in range(0, self.height - len(self.pieces)): stringtower += halfwidth * " " + "|" + halfwidth * " " + "\n" # Now add the string representations of the pieces for piece in self.pieces: stringpiece = str(piece) whitespace = halfwidth - int((len(stringpiece) - 1) / 2) stringtower += whitespace * " " + stringpiece + whitespace * " " + "\n" return stringtower def add_piece (self, piece): """ Add a piece to the tower only if the added piece is smaller than the top most piece """ if len(self.pieces) == 0 or self.pieces[0] > piece: self.pieces.insert(0, piece) return True return False def remove_piece (self): """ Remove the top most piece of the tower""" if len(self.pieces) > 0: return self.pieces.pop(0) return False class Hanoi: def __init__ (self, pieces): height = pieces self.pieces = pieces self.first = Tower(height, pieces) self.middle = Tower(height, 0) self.last = Tower(height, 0) def __str__ (self): """ Print the towers next to eachother """ hanoistring = str(self.first) hanoistring = self.add_tower_to_string(hanoistring, str(self.middle)) hanoistring = self.add_tower_to_string(hanoistring, str(self.last)) return hanoistring def add_tower_to_string (self, multilinestring, towerstring): """ Take a string that has a tower in it and add another tower to the string """ lines = multilinestring.split("\n") towerlines = towerstring.split("\n") if not len(towerlines) == len(lines): print("Can't add tower to string, strings are not of equel height") return False returnstring = "" for i in range(0, len(lines)): returnstring += lines[i] + "\t" + towerlines[i] + "\n" # We added a \n too many, remove again return returnstring[:-2] def solve (self): """ Solve the problem """ self.step(self.pieces, self.first, self.last, self.middle) def step (self, pieces, first, last, middle): """ Move pieces pieces from first to last via middle in a recursive way using 2 ^ n - 1 steps""" if (pieces == 1): self.swap(first, last) #Base case print(self) else: self.step(pieces - 1, first, middle, last) # Solve for n-1 self.step(1, first, last, middle) # Swap the biggest self.step(pieces - 1, middle, last, first) # Bring them all to the last tower def swap (self, tower1, tower2): """ Swap piece from tower1 to tower2 """ tower2.add_piece(tower1.remove_piece()) Hanoi(8).solve()
mit
pedrocavalero/metadata
src/main/java/net/sf/esfinge/metadata/annotation/validator/method/ValidMethodParameterTypes.java
597
package net.sf.esfinge.metadata.annotation.validator.method; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import net.sf.esfinge.metadata.annotation.validator.ToValidate; import net.sf.esfinge.metadata.validate.method.ValidatorValidMethodParameterTypes; @ToValidate(validationClass = ValidatorValidMethodParameterTypes.class) @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.ANNOTATION_TYPE) public @interface ValidMethodParameterTypes{ Parameters[] validParameters(); }
mit
gabriprat/hoshinplan
app/assets/javascripts/application/hjq-select-all-checkbox.js
1193
/* hjq-select_all_checkbox */ (function ($) { var methods = { init: function (annotations) { var opts = this.hjq('getOptions', annotations); var selector = opts.selector; var cs = $(selector); methods.changeStatus(cs); cs.on('click.selectAllCheckbox', methods.changeStatus.bind(this, cs)); $(this).on('click.selectAllCheckbox', methods.click.bind(this, cs)); }, click: function (cs) { var checked = $(this).is(":checked"); cs.prop('checked', checked); }, changeStatus: function (cs) { var allChecked = cs.filter(':checked').length === cs.length; $(this).attr('checked', allChecked); } }; $.fn.hjq_select_all_checkbox = function (method) { if (methods[method]) { return methods[method].apply(this, Array.prototype.slice.call(arguments, 1)); } else if (typeof method === 'object' || !method) { return methods.init.apply(this, arguments); } else { $.error('Method ' + method + ' does not exist on hjq_select_all_checkbox'); } }; })(jQuery);
mit
gmessner/gitlab4j-api
src/main/java/org/gitlab4j/api/webhook/NoteEvent.java
6564
package org.gitlab4j.api.webhook; import java.util.Date; import org.gitlab4j.api.models.Diff; import org.gitlab4j.api.models.User; import org.gitlab4j.api.utils.JacksonJson; import org.gitlab4j.api.utils.JacksonJsonEnumHelper; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonValue; public class NoteEvent extends AbstractEvent { public static final String X_GITLAB_EVENT = "Note Hook"; public static final String OBJECT_KIND = "note"; private User user; private Integer projectId; private EventProject project; private EventRepository repository; private ObjectAttributes objectAttributes; private EventCommit commit; private EventIssue issue; private EventMergeRequest mergeRequest; private EventSnippet snippet; public String getObjectKind() { return (OBJECT_KIND); } public void setObjectKind(String objectKind) { if (!OBJECT_KIND.equals(objectKind)) throw new RuntimeException("Invalid object_kind (" + objectKind + "), must be '" + OBJECT_KIND + "'"); } public User getUser() { return user; } public void setUser(User user) { this.user = user; } public Integer getProjectId() { return this.projectId; } public void setProjectId(Integer projectId) { this.projectId = projectId; } public EventProject getProject() { return project; } public void setProject(EventProject project) { this.project = project; } public EventRepository getRepository() { return repository; } public void setRepository(EventRepository repository) { this.repository = repository; } public ObjectAttributes getObjectAttributes() { return this.objectAttributes; } public void setObjectAttributes(ObjectAttributes objectAttributes) { this.objectAttributes = objectAttributes; } public EventCommit getCommit() { return commit; } public void setCommit(EventCommit commit) { this.commit = commit; } public EventIssue getIssue() { return issue; } public void setIssue(EventIssue issue) { this.issue = issue; } public EventMergeRequest getMergeRequest() { return mergeRequest; } public void setMergeRequest(EventMergeRequest mergeRequest) { this.mergeRequest = mergeRequest; } public EventSnippet getSnippet() { return snippet; } public void setSnippet(EventSnippet snippet) { this.snippet = snippet; } public static enum NoteableType { ISSUE, MERGE_REQUEST, SNIPPET, COMMIT; private static JacksonJsonEnumHelper<NoteableType> enumHelper = new JacksonJsonEnumHelper<>(NoteableType.class, true, true); @JsonCreator public static NoteableType forValue(String value) { return enumHelper.forValue(value); } @JsonValue public String toValue() { return (enumHelper.toString(this)); } @Override public String toString() { return (enumHelper.toString(this)); } } public static class ObjectAttributes { private Integer id; private String note; private NoteableType noteableType; private Integer authorId; private Date createdAt; private Date updatedAt; private Integer projectId; private String attachment; private String lineCode; private String commitId; private Integer noteableId; private Boolean system; private Diff stDiff; private String url; public Integer getId() { return this.id; } public void setId(Integer id) { this.id = id; } public String getNote() { return note; } public void setNote(String note) { this.note = note; } public NoteableType getNoteableType() { return noteableType; } public void NoteableType(NoteableType notableType) { this.noteableType = notableType; } public Integer getAuthorId() { return this.authorId; } public void setAuthorId(Integer authorId) { this.authorId = authorId; } public Date getCreatedAt() { return this.createdAt; } public void setCreatedAt(Date createdAt) { this.createdAt = createdAt; } public Date getUpdatedAt() { return this.updatedAt; } public void setUpdatedAt(Date updatedAt) { this.updatedAt = updatedAt; } public Integer getProjectId() { return this.projectId; } public void setProjectId(Integer projectId) { this.projectId = projectId; } public String getAttachment() { return attachment; } public void setAttachment(String attachment) { this.attachment = attachment; } public String getLineCode() { return lineCode; } public void setLineCode(String lineCode) { this.lineCode = lineCode; } public String getCommitId() { return commitId; } public void setCommitId(String commitId) { this.commitId = commitId; } public Integer getNoteableId() { return noteableId; } public void setNoteableId(Integer noteableId) { this.noteableId = noteableId; } public Boolean getSystem() { return system; } public void setSystem(Boolean system) { this.system = system; } public Diff getStDiff() { return stDiff; } public void setStDiff(Diff stDiff) { this.stDiff = stDiff; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } } @Override public String toString() { return (JacksonJson.toJsonString(this)); } }
mit
PaulGregor/crowdin-cli
crowdin/cli.py
10255
# -*- coding: utf-8 -*- from __future__ import division, print_function, unicode_literals try: from crowdin.__init__ import __version__ from crowdin import methods except ImportError: from __init__ import __version__ import methods import argparse import gettext import logging import os import sys import yaml class Main: def __init__(self): level = logging.INFO formatter = logging.Formatter('%(message)s') self.logger = logging.getLogger('crowdin') self.logger.setLevel(level) self.console = logging.StreamHandler() self.console.setLevel(level) self.console.setFormatter(formatter) self.logger.addHandler(self.console) l_dir = os.path.dirname(os.path.realpath(__file__)) + "/locales" loc = gettext.translation('cli', l_dir, languages=['en']) # _ = loc.ugettext _ = loc.gettext loc.install() def main(self): parser = argparse.ArgumentParser(prog='crowdin-cli-py', add_help=False, usage=argparse.SUPPRESS, formatter_class=argparse.RawDescriptionHelpFormatter, description=('''\ NAME: Crowdin-cli-py {0} This tool requires configuration file to be created. See https://crowdin.com/page/cli-tool#configuration-file for more details. SYNOPSIS: crowdin-cli-py [global options] command [command option] [arguments...] VERSION: {1} ''').format(_("desc"), __version__)) parser._optionals.title = 'GLOBAL OPTIONS' parser.add_argument('-c', '--config', action='store', metavar='', dest='config', help='- Project-specific configuration file') parser.add_argument('--identity', action='store', dest='identity', metavar='', help='- User-specific configuration file with ' 'API credentials') parser.add_argument('--version', action='version', version="%(prog)s {0}".format(__version__), help='- Display the program version') parser.add_argument('-v', '--verbose', action='store_true', default=False, dest='verbose', help='- Be verbose') parser.add_argument('--help', action='help', help='- Show this message') subparsers = parser.add_subparsers(title='COMMANDS', metavar='') # A help command help_parser = subparsers.add_parser('help', help='Shows a list of commands or help for one command') # A upload command upload_parser = subparsers.add_parser('upload', help='Upload files to the server') upload_parser.add_argument('sources', help='This argument uploads sources files', nargs='?') upload_parser.add_argument('translations', help='This argument uploads translations files', nargs='?') upload_parser.add_argument('-l', '--language', action='store', metavar='', dest='language', help='- Defines the language translations should be uploaded to.') upload_parser.add_argument('-b', '--branch', action='store', metavar='', dest='branch', help='- Defines the brahcn should be uploaded to.') upload_parser.add_argument('--import-duplicates', action='store_const', dest='duplicates', const='1', help='- Defines whether to add translation if there is the same translation previously added.') upload_parser.add_argument('--no-import-duplicates', action='store_false', dest='duplicates', help='- Defines whether to add translation if there is the same translation previously added.') upload_parser.add_argument('--import-eq-suggestions', action='store_const', dest='suggestions', const='1', help='- Defines whether to add translation if it is equal to source string at Crowdin.') upload_parser.add_argument('--no-import-eq-suggestions', action='store_false', dest='suggestions', help='- Defines whether to add translation if it is equal to source string at Crowdin.') upload_parser.add_argument('--auto-approve-imported', action='store_const', dest='imported', const='1', help='- Mark uploaded translations as approved.') upload_parser.add_argument('--no-auto-approve-imported', action='store_false', dest='imported', help='- Mark uploaded translations as approved.') upload_parser.set_defaults(func=self.upload_files) # A list command list_parser = subparsers.add_parser('list', help='List information about the files') list_parser.add_argument('sources', action='store', help='List information about the sources files in current ' 'project.', nargs='?') list_parser.add_argument('translations', action='store', help='List information about the translations ' 'files in current project.', nargs='?') list_parser.add_argument('project', action='store', help='List information about the files that already ' 'exists in current project', nargs='?') list_parser.add_argument('--tree', action='store_true', dest='tree', default=False, help='Built a tree like view') list_parser.set_defaults(func=self.list_files) # A download command download_parser = subparsers.add_parser('download', help='Download projects files') download_parser.add_argument('-l', '--language', action='store', metavar='', dest='dlanguage', help='- If the option is defined the ' 'translations will be downloaded for single specified language.' 'Otherwise (by default) translations are downloaded for all languages') download_parser.add_argument('-b', '--branch', action='store', metavar='', dest='branch', help='- Defines the brahcn should be downloaded to.') download_parser.set_defaults(func=self.download_project) # A test command # test_parser = subparsers.add_parser('test', help='Test Crowdin project.') # test_parser.add_argument('dirname', action='store', help='New directory to create') # test_parser.set_defaults(func=self.test) if len(sys.argv) == 1 or "help" in sys.argv: if "upload" in sys.argv: upload_parser.print_help() elif "download" in sys.argv: download_parser.print_help() else: parser.print_help() sys.exit(1) # results = parser.parse_args() # print results.config if "upload" in sys.argv and not "sources" in sys.argv and not "translations" in sys.argv: upload_parser.print_help() sys.exit(1) if "list" in sys.argv and not "sources" in sys.argv and not "translations" in \ sys.argv and not "project" in sys.argv: list_parser.print_help() sys.exit(1) # print args.identity # print "I'm method main" args = parser.parse_args() if args.verbose: self.logger.setLevel(logging.DEBUG) self.console.setLevel(logging.DEBUG) self.logger.addHandler(self.console) args.func(args) def test(self, test): return methods.Methods(test, self.open_file(test)).test() # Can't Take My Eyes Off You def upload_files(self, upload): # print(upload) if upload.sources == "sources": return methods.Methods(upload, self.open_file(upload)).upload_sources() if upload.sources == "translations": return methods.Methods(upload, self.open_file(upload)).upload_translations() def list_files(self, list_f): # print(list_f) return methods.Methods(list_f, self.open_file(list_f)).list_project_files() def download_project(self, download): # print(download) return methods.Methods(download, self.open_file(download)).download_project() def open_file(self, options_config): # reading configuration file location_to_configuration_file = 'crowdin.yaml' home = os.path.expanduser(b"~").decode(sys.getfilesystemencoding()) + "/.crowdin.yaml" if options_config.config: location_to_configuration_file = options_config.config if options_config.identity: home = options_config.identity try: fh = open(location_to_configuration_file, "r") try: config = yaml.load(fh) except yaml.YAMLError as e: print(e, '\n Could not parse YAML. ' 'We were unable to successfully parse the crowdin.yaml file that you provided - ' 'it most likely is not well-formed YAML. ' '\n Please check whether your crowdin.yaml is valid YAML - you can use ' 'the http://yamllint.com/ validator to do this - and make any necessary changes to fix it.') exit() if os.path.isfile(home): fhh = open(home, "r") config_api = yaml.load(fhh) if config_api.get('api_key'): config['api_key'] = config_api.get('api_key') if config_api.get('project_identifier'): config['project_identifier'] = config_api.get('project_identifier') fhh.close() # print "I'M robot method open file" fh.close() except(OSError, IOError) as e: print(e, '\nCan''t find configuration file (default `crowdin.yaml`). Type `crowdin-cli-py help` ' 'to know how to specify custom configuration file. \nSee ' 'http://crowdin.com/page/cli-tool#configuration-file for more details') exit() else: if not config.get('base_path'): print("Warning: Configuration file misses parameter `base_path` that defines " "your project root directory. Using current directory as a root directory.") return config if __name__ == "__main__": Main().main() def start_cli(): Main().main()
mit
brainstorm/bcbio-nextgen
bcbio/distributed/ipythontasks.py
16109
"""Ipython parallel ready entry points for parallel execution """ import contextlib import os try: from ipyparallel import require except ImportError: from IPython.parallel import require from bcbio import heterogeneity, hla, chipseq, structural, upload from bcbio.bam import callable from bcbio.rnaseq import (sailfish, rapmap, salmon, umi, kallisto) from bcbio.distributed import ipython from bcbio.ngsalign import alignprep from bcbio.srna import sample as srna from bcbio.srna import group as seqcluster from bcbio.chipseq import peaks from bcbio.pipeline import (archive, config_utils, disambiguate, sample, qcsummary, shared, variation, run_info, rnaseq) from bcbio.provenance import system from bcbio.qc import multiqc, qsignature from bcbio.variation import (bamprep, genotype, ensemble, joint, multi, population, recalibrate, validate, vcfutils) from bcbio.log import logger, setup_local_logging @contextlib.contextmanager def _setup_logging(args): # Set environment to standard to use periods for decimals and avoid localization os.environ["LC_ALL"] = "C" os.environ["LC"] = "C" os.environ["LANG"] = "C" config = None if len(args) == 1 and isinstance(args[0], (list, tuple)): args = args[0] for arg in args: if config_utils.is_nested_config_arg(arg): config = arg["config"] break elif config_utils.is_std_config_arg(arg): config = arg break elif isinstance(arg, (list, tuple)) and config_utils.is_nested_config_arg(arg[0]): config = arg[0]["config"] break if config is None: raise NotImplementedError("No config found in arguments: %s" % args[0]) handler = setup_local_logging(config, config.get("parallel", {})) try: yield config except: logger.exception("Unexpected error") raise finally: if hasattr(handler, "close"): handler.close() # Potential wrapper to avoid boilerplate if we can get dill working for closures from functools import wraps def _pack_n_log(f): from bcbio.distributed import ipython @wraps(f) def wrapper(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(fn(*args)) return wrapper @require(sample) def prepare_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.prepare_sample, *args)) @require(sample) def prepare_bcbio_samples(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.prepare_bcbio_samples, *args)) @require(sample) def trim_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.trim_sample, *args)) @require(srna) def trim_srna_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(srna.trim_srna_sample, *args)) @require(srna) def srna_annotation(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(srna.sample_annotation, *args)) @require(seqcluster) def seqcluster_prepare(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(seqcluster.run_prepare, *args)) @require(seqcluster) def seqcluster_cluster(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(seqcluster.run_cluster, *args)) @require(seqcluster) def srna_alignment(* args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(seqcluster.run_align, *args)) @require(peaks) def peakcalling(* args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(peaks.calling, *args)) @require(sailfish) def run_sailfish(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(sailfish.run_sailfish, *args)) @require(sailfish) def run_sailfish_index(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(sailfish.run_sailfish_index, *args)) @require(rapmap) def run_rapmap_align(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(rapmap.run_rapmap_align, *args)) @require(umi) def run_umi_transform(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(umi.umi_transform, *args)) @require(umi) def demultiplex_samples(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(umi.demultiplex_samples, *args)) @require(umi) def run_filter_barcodes(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(umi.filter_barcodes, *args)) @require(umi) def run_tagcount(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(umi.tagcount, *args)) @require(umi) def run_barcode_histogram(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(umi.barcode_histogram, *args)) @require(kallisto) def run_kallisto_singlecell(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(kallisto.run_kallisto_singlecell, *args)) @require(salmon) def run_salmon_bam(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(salmon.run_salmon_bam, *args)) @require(salmon) def run_salmon_reads(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(salmon.run_salmon_reads, *args)) @require(salmon) def run_salmon_index(*args): args = ipython.unzip_args(args) with _setup_logging(args): return ipython.zip_args(apply(salmon.run_salmon_index, *args)) @require(sample) def process_alignment(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.process_alignment, *args)) @require(alignprep) def prep_align_inputs(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(alignprep.create_inputs, *args)) @require(sample) def postprocess_alignment(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.postprocess_alignment, *args)) @require(sample) def prep_samples(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.prep_samples, *args)) @require(sample) def merge_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.merge_sample, *args)) @require(sample) def delayed_bam_merge(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.delayed_bam_merge, *args)) @require(sample) def merge_split_alignments(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.merge_split_alignments, *args)) @require(sample) def recalibrate_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(sample.recalibrate_sample, *args)) @require(recalibrate) def prep_recal(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(recalibrate.prep_recal, *args)) @require(multi) def split_variants_by_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(multi.split_variants_by_sample, *args)) @require(bamprep) def piped_bamprep(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(bamprep.piped_bamprep, *args)) @require(variation) def postprocess_variants(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(variation.postprocess_variants, *args)) @require(qcsummary) def pipeline_summary(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(qcsummary.pipeline_summary, *args)) @require(qsignature) def qsignature_summary(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(qsignature.summary, *args)) @require(multiqc) def multiqc_summary(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(multiqc.summary, *args)) @require(rnaseq) def generate_transcript_counts(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.generate_transcript_counts, *args)) @require(rnaseq) def run_cufflinks(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_cufflinks, *args)) @require(rnaseq) def run_stringtie_expression(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_stringtie_expression, *args)) @require(rnaseq) def run_rnaseq_variant_calling(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_rnaseq_variant_calling, *args)) @require(rnaseq) def run_rnaseq_joint_genotyping(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_rnaseq_joint_genotyping, *args)) @require(rnaseq) def run_express(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_express, *args)) @require(rnaseq) def run_dexseq(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.run_dexseq, *args)) @require(shared) def combine_bam(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(shared.combine_bam, *args)) @require(callable) def combine_sample_regions(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(callable.combine_sample_regions, *args)) @require(genotype) def variantcall_sample(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(genotype.variantcall_sample, *args)) @require(vcfutils) def combine_variant_files(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(vcfutils.combine_variant_files, *args)) @require(vcfutils) def concat_variant_files(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(vcfutils.concat_variant_files, *args)) @require(vcfutils) def merge_variant_files(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(vcfutils.merge_variant_files, *args)) @require(population) def prep_gemini_db(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(population.prep_gemini_db, *args)) @require(hla) def call_hla(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(hla.call_hla, *args)) @require(structural) def detect_sv(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(structural.detect_sv, *args)) @require(structural) def validate_sv(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(structural.validate_sv, *args)) @require(structural) def finalize_sv(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(structural.finalize_sv, *args)) @require(heterogeneity) def heterogeneity_estimate(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(heterogeneity.estimate, *args)) @require(ensemble) def combine_calls(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(ensemble.combine_calls, *args)) @require(validate) def compare_to_rm(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(validate.compare_to_rm, *args)) @require(disambiguate) def run_disambiguate(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(disambiguate.run, *args)) @require(disambiguate) def disambiguate_split(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(disambiguate.split, *args)) @require(disambiguate) def disambiguate_merge_extras(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(disambiguate.merge_extras, *args)) @require(system) def machine_info(*args): args = ipython.unzip_args(args) return ipython.zip_args(system.machine_info()) @require(chipseq) def clean_chipseq_alignment(*args): args = ipython.unzip_args(args) return ipython.zip_args(apply(chipseq.clean_chipseq_alignment, *args)) @require(archive) def archive_to_cram(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(archive.to_cram, *args)) @require(joint) def square_batch_region(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(joint.square_batch_region, *args)) @require(rnaseq) def cufflinks_assemble(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.cufflinks_assemble, *args)) @require(rnaseq) def cufflinks_merge(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.cufflinks_merge, *args)) @require(rnaseq) def stringtie_merge(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(rnaseq.stringtie_merge, *args)) @require(run_info) def organize_samples(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(run_info.organize, *args)) @require(run_info) def prep_system(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(run_info.prep_system, *args)) @require(upload) def upload_samples(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(upload.from_sample, *args)) @require(upload) def upload_samples_project(*args): args = ipython.unzip_args(args) with _setup_logging(args) as config: return ipython.zip_args(apply(upload.project_from_sample, *args))
mit
cadabloom/RethinkDb.Driver
Source/RethinkDb.Driver/Generated/ReqlUserError.cs
682
//AUTOGENERATED, DO NOTMODIFY. //Do not edit this file directly. #pragma warning disable 1591 // ReSharper disable CheckNamespace using System; using RethinkDb.Driver.Model; using RethinkDb.Driver.Ast; namespace RethinkDb.Driver { public class ReqlUserError : ReqlRuntimeError { public ReqlUserError () { } public ReqlUserError (Exception e) : this(e.Message, e) { } public ReqlUserError (string message) : base(message) { } public ReqlUserError (string message, Exception innerException) : base(message, innerException) { } } }
mit
shankymunjal/noty
lib/noty/view_helpers.rb
1547
module Noty module ViewHelpers def notification(_arg = nil) _arg ||= flash.to_hash case _arg.class.name when 'String' javascript_tag do "noty({'text':'#{_arg}','layout':'bottom','type':'alert','animateOpen':{'height':'toggle'},'animateClose':{'height':'toggle'},'speed':500,'timeout':5000,'closeButton':false,'closeOnSelfClick':true,'closeOnSelfOver':false});" end when 'Hash' javascript_tag do _arg.collect do |_key, _value| _value = ( (_value.class.name == 'Array') ? _value.compact.uniq : [_value]) case _key when :alert _value.collect do |v| "noty({'text':'#{v}','layout':'bottom','type':'alert','animateOpen':{'height':'toggle'},'animateClose':{'height':'toggle'},'speed':500,'timeout':0,'closeButton':false,'closeOnSelfClick':true,'closeOnSelfOver':false});" end when :notice _value.collect do |v| _type = (( v =~ /success/i) ? 'success' : 'information') "noty({'text':'#{v}','layout':'bottom','type':'#{_type}','animateOpen':{'height':'toggle'},'animateClose':{'height':'toggle'},'speed':500,'timeout':0,'closeButton':false,'closeOnSelfClick':true,'closeOnSelfOver':false});" end when :error _value.collect do |v| "noty({'text':'#{v}','layout':'bottom','type':'error','animateOpen':{'height':'toggle'},'animateClose':{'height':'toggle'},'speed':100,'timeout':0,'closeButton':false,'closeOnSelfClick':true,'closeOnSelfOver':false});" end end end.join(' ') end end end end end
mit
slantdotnet/data-structures
example/SortingVisualizer/Properties/Resources.Designer.cs
2799
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace SortingVisualizer.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SortingVisualizer.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
mit
firekesti/Pin-Number-Picker
pinnumberpicker/src/main/java/net/firekesti/pinnumberpicker/PinNumberPicker.java
15061
package net.firekesti.pinnumberpicker; import android.animation.Animator; import android.animation.AnimatorInflater; import android.content.Context; import android.content.res.Resources; import android.util.AttributeSet; import android.util.TypedValue; import android.view.KeyEvent; import android.view.View; import android.widget.FrameLayout; import android.widget.OverScroller; import android.widget.TextView; /** * Ripped from AOSP source by kkelly on 11/2/15. */ public final class PinNumberPicker extends FrameLayout { private static final int NUMBER_VIEWS_RES_ID[] = { R.id.previous2_number, R.id.previous_number, R.id.current_number, R.id.next_number, R.id.next2_number}; private static final int CURRENT_NUMBER_VIEW_INDEX = 2; private static Animator sFocusedNumberEnterAnimator; private static Animator sFocusedNumberExitAnimator; private static Animator sAdjacentNumberEnterAnimator; private static Animator sAdjacentNumberExitAnimator; private static float sAlphaForFocusedNumber; private static float sAlphaForAdjacentNumber; private int mMinValue; private int mMaxValue; private int mCurrentValue; private int mNextValue; private final int mNumberViewHeight; private PinNumberPicker mNextNumberPicker; private boolean mCancelAnimation; private final View mNumberViewHolder; private final View mBackgroundView; private boolean mArrowsEnabled = false; private final View mNumberUpView; private final View mNumberDownView; private final View mPasswordDotView; private final TextView[] mNumberViews; private final OverScroller mScroller; private OnFinalNumberDoneListener mListener; private boolean allowPlaceholder; private String placeholderChar; private boolean mPasswordModeEnabled = false; public PinNumberPicker(Context context) { this(context, null); } public PinNumberPicker(Context context, AttributeSet attrs) { this(context, attrs, 0); } public PinNumberPicker(Context context, AttributeSet attrs, int defStyleAttr) { this(context, attrs, defStyleAttr, 0); } public PinNumberPicker(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); View view = inflate(context, R.layout.pin_number_picker, this); mNumberViewHolder = view.findViewById(R.id.number_view_holder); mBackgroundView = view.findViewById(R.id.focused_background); mNumberUpView = view.findViewById(R.id.number_up_arrow); mNumberDownView = view.findViewById(R.id.number_down_arrow); mPasswordDotView = view.findViewById(R.id.password_dot); mNumberViews = new TextView[NUMBER_VIEWS_RES_ID.length]; for (int i = 0; i < NUMBER_VIEWS_RES_ID.length; ++i) { mNumberViews[i] = (TextView) view.findViewById(NUMBER_VIEWS_RES_ID[i]); } Resources resources = context.getResources(); mNumberViewHeight = resources.getDimensionPixelOffset( R.dimen.pin_number_picker_text_view_height); mScroller = new OverScroller(context); mNumberViewHolder.setOnFocusChangeListener(new OnFocusChangeListener() { @Override public void onFocusChange(View v, boolean hasFocus) { updateFocus(); } }); mNumberViewHolder.setOnKeyListener(new OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_DOWN) { switch (keyCode) { case KeyEvent.KEYCODE_DPAD_UP: case KeyEvent.KEYCODE_DPAD_DOWN: { if (!mScroller.isFinished() || mCancelAnimation) { endScrollAnimation(); } if (mScroller.isFinished() || mCancelAnimation) { mCancelAnimation = false; if (keyCode == KeyEvent.KEYCODE_DPAD_DOWN) { mNextValue = adjustValueInValidRange(mCurrentValue + 1); startScrollAnimation(true); mScroller.startScroll(0, 0, 0, mNumberViewHeight, getResources().getInteger( R.integer.pin_number_scroll_duration)); } else { mNextValue = adjustValueInValidRange(mCurrentValue - 1); startScrollAnimation(false); mScroller.startScroll(0, 0, 0, -mNumberViewHeight, getResources().getInteger( R.integer.pin_number_scroll_duration)); } updateText(); invalidate(); } return true; } } } else if (event.getAction() == KeyEvent.ACTION_UP) { switch (keyCode) { case KeyEvent.KEYCODE_DPAD_UP: case KeyEvent.KEYCODE_DPAD_DOWN: { mCancelAnimation = true; return true; } } } return false; } }); mNumberViewHolder.setScrollY(mNumberViewHeight); mListener = new OnFinalNumberDoneListener(); } public static void loadResources(Context context) { if (sFocusedNumberEnterAnimator == null) { TypedValue outValue = new TypedValue(); context.getResources().getValue( R.dimen.pin_alpha_for_focused_number, outValue, true); sAlphaForFocusedNumber = outValue.getFloat(); context.getResources().getValue( R.dimen.pin_alpha_for_adjacent_number, outValue, true); sAlphaForAdjacentNumber = outValue.getFloat(); sFocusedNumberEnterAnimator = AnimatorInflater.loadAnimator(context, R.animator.pin_focused_number_enter); sFocusedNumberExitAnimator = AnimatorInflater.loadAnimator(context, R.animator.pin_focused_number_exit); sAdjacentNumberEnterAnimator = AnimatorInflater.loadAnimator(context, R.animator.pin_adjacent_number_enter); sAdjacentNumberExitAnimator = AnimatorInflater.loadAnimator(context, R.animator.pin_adjacent_number_exit); } } @Override public void computeScroll() { super.computeScroll(); if (mScroller.computeScrollOffset()) { mNumberViewHolder.setScrollY(mScroller.getCurrY() + mNumberViewHeight); updateText(); invalidate(); } else if (mCurrentValue != mNextValue) { mCurrentValue = mNextValue; } } @Override public boolean dispatchKeyEvent(KeyEvent event) { if (event.getAction() == KeyEvent.ACTION_DOWN) { int keyCode = event.getKeyCode(); if (keyCode >= KeyEvent.KEYCODE_0 && keyCode <= KeyEvent.KEYCODE_9) { setNextValue(keyCode - KeyEvent.KEYCODE_0); updateFocus(); } else if (keyCode >= KeyEvent.KEYCODE_NUMPAD_0 && keyCode <= KeyEvent.KEYCODE_NUMPAD_9) { setNextValue(keyCode - KeyEvent.KEYCODE_NUMPAD_0); updateFocus(); } else if (keyCode != KeyEvent.KEYCODE_DPAD_CENTER && keyCode != KeyEvent.KEYCODE_ENTER && keyCode != KeyEvent.KEYCODE_DPAD_RIGHT) { return super.dispatchKeyEvent(event); } if (mNextNumberPicker == null) { // The user is done - they pressed DPAD_CENTER or ENTER or RIGHT and there's no next number picker. mListener.onDone(); } else if (keyCode == KeyEvent.KEYCODE_DPAD_RIGHT) { // If the next one isn't null and the user pressed Right, return super return super.dispatchKeyEvent(event); } else { // Use the enter/center press to request focus on the next one mNextNumberPicker.requestFocus(); } return true; } return super.dispatchKeyEvent(event); } @Override public void setEnabled(boolean enabled) { super.setEnabled(enabled); mNumberViewHolder.setFocusable(enabled); for (int i = 0; i < NUMBER_VIEWS_RES_ID.length; ++i) { mNumberViews[i].setEnabled(enabled); } } public void startScrollAnimation(boolean scrollUp) { if (scrollUp) { sAdjacentNumberExitAnimator.setTarget(mNumberViews[1]); sFocusedNumberExitAnimator.setTarget(mNumberViews[2]); sFocusedNumberEnterAnimator.setTarget(mNumberViews[3]); sAdjacentNumberEnterAnimator.setTarget(mNumberViews[4]); } else { sAdjacentNumberEnterAnimator.setTarget(mNumberViews[0]); sFocusedNumberEnterAnimator.setTarget(mNumberViews[1]); sFocusedNumberExitAnimator.setTarget(mNumberViews[2]); sAdjacentNumberExitAnimator.setTarget(mNumberViews[3]); } sAdjacentNumberExitAnimator.start(); sFocusedNumberExitAnimator.start(); sFocusedNumberEnterAnimator.start(); sAdjacentNumberEnterAnimator.start(); } public void endScrollAnimation() { sAdjacentNumberExitAnimator.end(); sFocusedNumberExitAnimator.end(); sFocusedNumberEnterAnimator.end(); sAdjacentNumberEnterAnimator.end(); mCurrentValue = mNextValue; mNumberViews[1].setAlpha(sAlphaForAdjacentNumber); mNumberViews[2].setAlpha(sAlphaForFocusedNumber); mNumberViews[3].setAlpha(sAlphaForAdjacentNumber); } public void setValueRange(int min, int max) { if (min > max) { throw new IllegalArgumentException( "The min value should be greater than or equal to the max value"); } mMinValue = min; mMaxValue = max; mNextValue = mCurrentValue = mMinValue - 1; clearText(); mNumberViews[CURRENT_NUMBER_VIEW_INDEX].setText("—"); } public void setArrowsEnabled(boolean enabled) { mArrowsEnabled = enabled; } public void setPasswordModeEnabled(boolean mPasswordModeEnabled) { this.mPasswordModeEnabled = mPasswordModeEnabled; } public void setNextNumberPicker(PinNumberPicker picker) { mNextNumberPicker = picker; } public int getValue() { if (mCurrentValue < mMinValue || mCurrentValue > mMaxValue) { throw new IllegalStateException("Value is not set"); } return mCurrentValue; } // Will take effect when the focus is updated. public void setNextValue(int value) { if (value < mMinValue || value > mMaxValue) { throw new IllegalStateException("Value is not set"); } mNextValue = adjustValueInValidRange(value); } public void setCurrentValue(int value) { setNextValue(value); mCurrentValue = mNextValue; clearText(); } public void updateFocus() { endScrollAnimation(); if (mNumberViewHolder.isFocused()) { mBackgroundView.setVisibility(View.VISIBLE); if (mPasswordModeEnabled) { mNumberViewHolder.setAlpha(1.0f); mPasswordDotView.setVisibility(View.GONE); } if (mArrowsEnabled) { mNumberUpView.setVisibility(View.VISIBLE); mNumberDownView.setVisibility(View.VISIBLE); } updateText(); } else { mBackgroundView.setVisibility(View.GONE); if (mPasswordModeEnabled) { mNumberViewHolder.setAlpha(0f); mPasswordDotView.setVisibility(View.VISIBLE); } if (mArrowsEnabled) { mNumberUpView.setVisibility(View.GONE); mNumberDownView.setVisibility(View.GONE); } if (!mScroller.isFinished()) { mCurrentValue = mNextValue; mScroller.abortAnimation(); } clearText(); mNumberViewHolder.setScrollY(mNumberViewHeight); } } private void clearText() { for (int i = 0; i < NUMBER_VIEWS_RES_ID.length; ++i) { if (i != CURRENT_NUMBER_VIEW_INDEX) { mNumberViews[i].setText(""); } else if (mCurrentValue >= mMinValue && mCurrentValue <= mMaxValue) { String value = String.valueOf(mCurrentValue); if (allowPlaceholder && value.length() > 1) { value = placeholderChar; } mNumberViews[i].setText(value); } } } private void updateText() { if (mNumberViewHolder.isFocused()) { if (mCurrentValue < mMinValue || mCurrentValue > mMaxValue) { mNextValue = mCurrentValue = mMinValue; } int value = adjustValueInValidRange(mCurrentValue - CURRENT_NUMBER_VIEW_INDEX); for (int i = 0; i < NUMBER_VIEWS_RES_ID.length; ++i) { String text = String.valueOf(adjustValueInValidRange(value)); if (allowPlaceholder && text.length() > 1) { text = placeholderChar; } mNumberViews[i].setText(text); value = adjustValueInValidRange(value + 1); } } } private int adjustValueInValidRange(int value) { int interval = mMaxValue - mMinValue + 1; if (value < mMinValue - interval || value > mMaxValue + interval) { throw new IllegalArgumentException("The value( " + value + ") is too small or too big to adjust"); } return (value < mMinValue) ? value + interval : (value > mMaxValue) ? value - interval : value; } public void setAllowPlaceholder(boolean allowPlaceholder) { this.allowPlaceholder = allowPlaceholder; } public void setPlaceholderCharacter(String val) { placeholderChar = val; } /* Interface code for a callback when CENTER/ENTER is pressed on the last picker */ public void setOnFinalNumberDoneListener(OnFinalNumberDoneListener listener) { this.mListener = listener; } public interface DoneListener { void onDone(); } }
mit
FrancoisJ/ShareCoin
src/qt/locale/bitcoin_ca_ES.ts
118780
<?xml version="1.0" ?><!DOCTYPE TS><TS language="ca_ES" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Sharecoin</source> <translation>Sobre Sharecoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Sharecoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Sharecoin&lt;/b&gt; versió</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source> <translation>\n Aquest és software experimental.\n\n Distribuït sota llicència de software MIT/11, veure l&apos;arxiu COPYING o http://www.opensource.org/licenses/mit-license.php.\n\nAquest producte inclou software desarrollat pel projecte OpenSSL per a l&apos;ús de OppenSSL Toolkit (http://www.openssl.org/) i de softwqre criptogràfic escrit per l&apos;Eric Young (eay@cryptsoft.com) i software UPnP escrit per en Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The Sharecoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreces</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Feu doble clic per editar l&apos;adreça o l&apos;etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear una nova adreça</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar l&apos;adreça seleccionada al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adreça</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Sharecoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Aquestes són les teves adreces Sharecoin per a rebre pagaments. Pot interesar-te proveïr diferents adreces a cadascun dels enviadors així pots identificar qui et va pagant.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar adreça</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostrar codi &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Sharecoin address</source> <translation>Signa el missatge per provar que ets propietari de l&apos;adreça Sharecoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Esborrar l&apos;adreça sel·leccionada</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Sharecoin address</source> <translation>Verificar un missatge per asegurar-se que ha estat signat amb una adreça Sharecoin específica</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Esborrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Sharecoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Aquestes són la seva adreça de Sharecoin per enviar els pagaments. Sempre revisi la quantitat i l&apos;adreça del destinatari abans transferència de monedes.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Enviar &amp;Monedes</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exporta llibreta d&apos;adreces</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Dialeg de contrasenya</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introdueix contrasenya</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova contrasenya</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeteix la nova contrasenya</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introdueixi la nova contrasenya al moneder&lt;br/&gt;Si us plau useu una contrasenya de &lt;b&gt;10 o més caracters aleatoris&lt;/b&gt;, o &lt;b&gt;vuit o més paraules&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Xifrar la cartera</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desbloquejar-lo.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloqueja el moneder</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desencriptar-lo.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencripta el moneder</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Canviar la contrasenya</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introdueixi tant l&apos;antiga com la nova contrasenya de moneder.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar l&apos;encriptació del moneder</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR SHARECOINS&lt;/b&gt;!</source> <translation>Advertència: Si encripteu el vostre moneder i perdeu la constrasenya, &lt;b&gt;PERDREU TOTS ELS VOSTRES SHARECOINS&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Esteu segur que voleu encriptar el vostre moneder?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT: Tota copia de seguretat que hagis realitzat hauria de ser reemplaçada pel, recentment generat, arxiu encriptat del moneder.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Advertència: Les lletres majúscules estàn activades!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Moneder encriptat</translation> </message> <message> <location line="-56"/> <source>Sharecoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your sharecoins from being stolen by malware infecting your computer.</source> <translation>Sharecoin es tancarà ara per acabar el procés d&apos;encriptació. Recorda que encriptar el teu moneder no protegeix completament els teus sharecoins de ser robades per programari maliciós instal·lat al teu ordinador.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>L&apos;encriptació del moneder ha fallat</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>L&apos;encriptació del moneder ha fallat per un error intern. El seu moneder no ha estat encriptat.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>La contrasenya introduïda no coincideix.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>El desbloqueig del moneder ha fallat</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contrasenya introduïda per a desencriptar el moneder és incorrecte.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>La desencriptació del moneder ha fallat</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>La contrasenya del moneder ha estat modificada correctament.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Signar &amp;missatge...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sincronitzant amb la xarxa ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Panorama general</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra panorama general del moneder</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transaccions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca a l&apos;historial de transaccions</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Edita la llista d&apos;adreces emmagatzemada i etiquetes</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra el llistat d&apos;adreces per rebre pagaments</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>S&amp;ortir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Sortir de l&apos;aplicació</translation> </message> <message> <location line="+4"/> <source>Show information about Sharecoin</source> <translation>Mostra informació sobre Sharecoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Sobre &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informació sobre Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcions...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Xifrar moneder</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Realitzant copia de seguretat del moneder...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Canviar contrasenya...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Important blocs del disc..</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Re-indexant blocs al disc...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Sharecoin address</source> <translation>Enviar monedes a una adreça Sharecoin</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Sharecoin</source> <translation>Modificar les opcions de configuració per sharecoin</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Realitzar còpia de seguretat del moneder a un altre directori</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Canviar la constrasenya d&apos;encriptació del moneder</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Finestra de debug</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Obrir la consola de diagnòstic i debugging</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica el missatge..</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Sharecoin</source> <translation>Sharecoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Enviar</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Rebre</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Adreces</translation> </message> <message> <location line="+22"/> <source>&amp;About Sharecoin</source> <translation>&amp;Sobre Sharecoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Amagar</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Mostrar o amagar la finestra principal</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Xifrar les claus privades pertanyents al seu moneder</translation> </message> <message> <location line="+7"/> <source>Sign messages with your Sharecoin addresses to prove you own them</source> <translation>Signa el missatges amb la seva adreça de Sharecoin per provar que les poseeixes</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Sharecoin addresses</source> <translation>Verificar els missatges per assegurar-te que han estat signades amb una adreça Sharecoin específica.</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Arxiu</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Configuració</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Ajuda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Barra d&apos;eines de seccions</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Sharecoin client</source> <translation>Client Sharecoin</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Sharecoin network</source> <translation><numerusform>%n connexió activa a la xarxa Sharecoin</numerusform><numerusform>%n connexions actives a la xarxa Sharecoin</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Processat el %1 de %2 (estimat) dels blocs del històric de transaccions.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Proccessats %1 blocs del històric de transaccions.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n dia</numerusform><numerusform>%n dies</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n setmana</numerusform><numerusform>%n setmanes</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 radera</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Lúltim bloc rebut ha estat generat fa %1.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Les transaccions a partir d&apos;això no seràn visibles.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informació</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Aquesta transacció supera el límit de tamany. Tot i així pots enviar-la amb una comissió de %1, que es destinen als nodes que processen la seva transacció i ajuda a donar suport a la xarxa. Vols pagar la comissió?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Al dia</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Posar-se al dia ...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Confirmar comisió de transacció</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Transacció enviada</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Transacció entrant</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1\nQuantitat %2\n Tipus: %3\n Adreça: %4\n</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Manejant URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Sharecoin address or malformed URI parameters.</source> <translation>la URI no pot ser processada! Això es pot ser causat per una adreça Sharecoin invalida o paràmetres URI malformats.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;desbloquejat&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;bloquejat&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Sharecoin can no longer continue safely and will quit.</source> <translation>Ha tingut lloc un error fatal. Sharecoin no pot continuar executant-se de manera segura i es tancará.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Alerta de xarxa</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Adreça</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Etiqueta associada amb aquesta entrada de la llibreta d&apos;adreces</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Direcció</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adreça associada amb aquesta entrada de la llibreta d&apos;adreces. Només pot ser modificat per a enviar adreces.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nova adreça de recepció.</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adreça d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar adreces de recepció</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar adreces d&apos;enviament</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;adreça introduïda &quot;%1&quot; ja és present a la llibreta d&apos;adreces.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Sharecoin address.</source> <translation>L&apos;adreça introduida &quot;%1&quot; no és una adreça Sharecoin valida.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No s&apos;ha pogut desbloquejar el moneder.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallat la generació d&apos;una nova clau.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Sharecoin-Qt</source> <translation>Sharecoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Opcions de IU</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Definir llenguatge, per exemple &quot;de_DE&quot; (per defecte: Preferències locals de sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimitzat</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar finestra de benvinguda a l&apos;inici (per defecte: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcions</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;comisió de transacció</translation> </message> <message> <location line="+31"/> <source>Automatically start Sharecoin after logging in to the system.</source> <translation>Iniciar automàticament Sharecoin després de l&apos;inici de sessió del sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Sharecoin on system login</source> <translation>&amp;Iniciar Sharecoin al inici de sessió del sistema.</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Reestablir totes les opcions del client.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Reestablir Opcions</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Xarxa</translation> </message> <message> <location line="+6"/> <source>Automatically open the Sharecoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Obrir el port del client de Sharecoin al router de forma automàtica. Això només funciona quan el teu router implementa UPnP i l&apos;opció està activada.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Port obert amb &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Sharecoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connectar a la xarxa Sharecoin a través de un SOCKS proxy (per exemple connectant a través de Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Connecta a través de un proxy SOCKS:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Adreça IP del proxy (per exemple 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port del proxy (per exemple 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versió de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versió SOCKS del proxy (per exemple 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostrar només l&apos;icona de la barra al minimitzar l&apos;aplicació.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimitzar a la barra d&apos;aplicacions</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimitza en comptes de sortir de la aplicació al tancar la finestra. Quan aquesta opció està activa, la aplicació només es tancarà al seleccionar Sortir al menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimitzar al tancar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Pantalla</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Llenguatge de la Interfície d&apos;Usuari:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Sharecoin.</source> <translation>Aquí pots definir el llenguatge de l&apos;aplicatiu. Aquesta configuració tindrà efecte un cop es reiniciï Sharecoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unitats per mostrar les quantitats en:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Sel·lecciona la unitat de subdivisió per defecte per mostrar en la interficie quan s&apos;envien monedes.</translation> </message> <message> <location line="+9"/> <source>Whether to show Sharecoin addresses in the transaction list or not.</source> <translation>Mostrar adreces Sharecoin als llistats de transaccions o no.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostrar adreces al llistat de transaccions</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel·la</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>Per defecte</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Confirmi el reestabliment de les opcions</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Algunes configuracions poden requerir reiniciar el client per a que tinguin efecte.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Vols procedir?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Sharecoin.</source> <translation>Aquesta configuració tindrà efecte un cop es reiniciï Sharecoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;adreça proxy introduïda és invalida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Sharecoin network after a connection is established, but this process has not completed yet.</source> <translation>La informació mostrada pot no estar al día. El teu moneder es sincronitza automàticament amb la xarxa Sharecoin un cop s&apos;ha establert connexió, però aquest proces no s&apos;ha completat encara.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sense confirmar:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Immatur:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Balanç minat que encara no ha madurat</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccions recents&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>El seu balanç actual</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transaccions encara sense confirmar, que encara no es content en el balanç actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>Fora de sincronia</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start sharecoin: click-to-pay handler</source> <translation>No es pot iniciar sharecoin: manejador clicla-per-pagar</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Dialeg del codi QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Reclamar pagament</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Missatge:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Desar com...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificant la URI en un codi QR.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La quantitat introduïda és invalida, si us plau comprovi-la.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Desar codi QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imatges PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nom del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versió del client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilitzant OpenSSL versió</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>&amp;Temps d&apos;inici</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Xarxa</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>A testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloquejar cadena</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Nombre de blocs actuals</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Total estimat de blocs</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Últim temps de bloc</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Obrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opcions de línia d&apos;ordres</translation> </message> <message> <location line="+7"/> <source>Show the Sharecoin-Qt help message to get a list with possible Sharecoin command-line options.</source> <translation>Mostrar el missatge d&apos;ajuda de Sharecoin-Qt per a obtenir un llistat de possibles ordres per a la línia d&apos;ordres de Sharecoin.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostrar</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de compilació</translation> </message> <message> <location line="-104"/> <source>Sharecoin - Debug window</source> <translation>Sharecoin -Finestra de debug</translation> </message> <message> <location line="+25"/> <source>Sharecoin Core</source> <translation>Nucli de Sharecoin</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Dietàri de debug</translation> </message> <message> <location line="+7"/> <source>Open the Sharecoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Obrir el dietari de debug de Sharecoin del directori de dades actual. Aixó pot trigar uns quants segons per a dietàris grossos.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Netejar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Sharecoin RPC console.</source> <translation>Benvingut a la consola RPC de Sharecoin</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utilitza les fletxes d&apos;amunt i avall per navegar per l&apos;històric, i &lt;b&gt;Ctrl-L&lt;\b&gt; per netejar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriu &lt;b&gt;help&lt;\b&gt; per a obtenir una llistat de les ordres disponibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Enviar a multiples destinataris al mateix temps</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Affegir &amp;Destinatari</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Netejar tots els camps de la transacció</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmi l&apos;acció d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>E&amp;nviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar l&apos;enviament de monedes</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Estas segur que vols enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;adreça remetent no és vàlida, si us plau comprovi-la.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La quantitat a pagar ha de ser major que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Import superi el saldo de la seva compte.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total excedeix el teu balanç quan s&apos;afegeix la comisió a la transacció %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>S&apos;ha trobat una adreça duplicada, tan sols es pot enviar a cada adreça un cop per ordre de enviament.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Error: La ceació de la transacció ha fallat!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Q&amp;uantitat:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça a on envia el pagament (per exemple: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introdueixi una etiquera per a aquesta adreça per afegir-la a la llibreta d&apos;adreces</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Escollir adreça del llibre d&apos;adreces</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Eliminar aquest destinatari</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Sharecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueixi una adreça de Sharecoin (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures .Signar/Verificar un Missatge</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Signar Missatge</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Pots signar missatges amb la teva adreça per provar que són teus. Sigues cautelòs al signar qualsevol cosa, ja que els atacs phising poden intentar confondre&apos;t per a que els hi signis amb la teva identitat. Tan sols signa als documents completament detallats amb els que hi estàs d&apos;acord.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça amb la que signat els missatges (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Escollir una adreça de la llibreta de direccions</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introdueix aqui el missatge que vols signar</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Signatura</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la signatura actual al porta-retalls del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Sharecoin address</source> <translation>Signa el missatge per provar que ets propietari d&apos;aquesta adreça Sharecoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Neteja tots els camps de clau</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introdueixi l&apos;adreça signant, missatge (assegura&apos;t que copies salts de línia, espais, tabuladors, etc excactament tot el text) i la signatura a sota per verificar el missatge. Per evitar ser enganyat per un atac home-entre-mig, vés amb compte de no llegir més en la signatura del que hi ha al missatge signat mateix.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La adreça amb el que el missatge va ser signat (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Sharecoin address</source> <translation>Verificar el missatge per assegurar-se que ha estat signat amb una adreça Sharecoin específica</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Verificar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Neteja tots els camps de verificació de missatge</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Sharecoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueixi una adreça de Sharecoin (per exemple Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clica &quot;Signar Missatge&quot; per a generar una signatura</translation> </message> <message> <location line="+3"/> <source>Enter Sharecoin signature</source> <translation>Introduïr una clau Sharecoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;adreça intoduïda és invàlida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Siu us plau, comprovi l&apos;adreça i provi de nou.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;adreça introduïda no referencia a cap clau.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>El desbloqueig del moneder ha estat cancelat.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La clau privada per a la adreça introduïda no està disponible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>El signat del missatge ha fallat.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Missatge signat.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>La signatura no s&apos;ha pogut decodificar .</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Su us plau, comprovi la signatura i provi de nou.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La signatura no coincideix amb el resum del missatge.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ha fallat la verificació del missatge.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Missatge verificat.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Sharecoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sense confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confrimacions</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estat</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, difusió a través de %n node</numerusform><numerusform>, difusió a través de %n nodes</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Font</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generat</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Des de</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>Adreça pròpia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crèdit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>disponible en %n bloc més</numerusform><numerusform>disponibles en %n blocs més</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no acceptat</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Dèbit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comissió de transacció</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Quantitat neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Missatge</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de transacció</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les monedes generades han de madurar 120 blocs abans de poder ser gastades. Quan has generat aquest bloc, aquest ha estat transmés a la xarxa per a ser afegit a la cadena de blocs. Si no arriba a ser acceptat a la cadena, el seu estat passará a &quot;no acceptat&quot; i no podrá ser gastat. Això pot ocòrrer ocasionalment si un altre node genera un bloc a pocs segons del teu.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informació de debug</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacció</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Entrades</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>cert</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>fals</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, encara no ha estat emès correctement</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>desconegut</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detall de la transacció</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Aquest panell mostra una descripció detallada de la transacció</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Sense connexió (%1 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Sense confirmar (%1 de %2 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmat (%1 confirmacions)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n bloc més</numerusform><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n blocs més</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generat però no acceptat</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Rebut de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagament a un mateix</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estat de la transacció. Desplaça&apos;t per aquí sobre per mostrar el nombre de confirmacions.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data i hora en que la transacció va ser rebuda.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipus de transacció.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Adreça del destinatari de la transacció.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Quantitat extreta o afegida del balanç.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Tot</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Avui</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aquesta setmana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Aquest mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>El mes passat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Enguany</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rang...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A tu mateix</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altres</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introdueix una adreça o una etiqueta per cercar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Quantitat mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostra detalls de la transacció</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Exportar detalls de la transacció </translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rang:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Realitzar còpia de seguretat del moneder</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Dades del moneder (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Còpia de seguretat faillida</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hi ha hagut un error intentant desar les dades del moneder al nou directori</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Copia de seguretat realitzada correctament</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Les dades del moneder han estat desades cirrectament al nou emplaçament.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Sharecoin version</source> <translation>Versió de Sharecoin</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or sharecoind</source> <translation>Enviar comanda a -servidor o sharecoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Llista d&apos;ordres</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Obtenir ajuda per a un ordre.</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opcions:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: sharecoin.conf)</source> <translation>Especificat arxiu de configuració (per defecte: sharecoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: sharecoind.pid)</source> <translation>Especificar arxiu pid (per defecte: sharecoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directori de dades</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establir tamany de la memoria cau en megabytes (per defecte: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Escoltar connexions a &lt;port&gt; (per defecte: 9333 o testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantenir com a molt &lt;n&gt; connexions a peers (per defecte: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connectar al node per obtenir les adreces de les connexions, i desconectar</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Especificar la teva adreça pública</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Límit per a desconectar connexions errònies (per defecte: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Nombre de segons abans de reconectar amb connexions errònies (per defecte: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Escoltar connexions JSON-RPC al port &lt;port&gt; (per defecte: 9332 o testnet:19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Acceptar línia d&apos;ordres i ordres JSON-RPC </translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar en segon pla com a programa dimoni i acceptar ordres</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Usar la xarxa de prova</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar connexions d&apos;afora (per defecte: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=sharecoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Sharecoin Alert&quot; admin@foo.com </source> <translation>%s has de establir una contrasenya RPC a l&apos;arxiu de configuració:\n%s\nEs recomana que useu la següent constrasenya aleatòria:\nrpcuser=sharecoinrpc\nrpcpassword=%s\n(no necesiteu recordar aquesta contrsenya)\nEl nom d&apos;usuari i contrasenya NO HAN de ser els mateixos.\nSi l&apos;arxiu no existeix, crea&apos;l amb els permisos d&apos;arxiu de només lectura per al propietari.\nTambé es recomana establir la notificació d&apos;alertes i així seràs notificat de les incidències;\nper exemple: alertnotify=echo %%s | mail -s &quot;Sharecoin Alert&quot; admin@foo.com</translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv6, retrocedint a IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Vincular a una adreça específica i sempre escoltar-hi. Utilitza la notació [host]:port per IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Sharecoin is probably already running.</source> <translation>No es pot bloquejar el directori de dades %s. Probablement Sharecoin ja estigui en execució.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Error: Aquesta transacció requereix una comissió d&apos;almenys %s degut al seu import, complexitat o per l&apos;ús de fons recentment rebuts!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Executar ordre al rebre una alerta rellevant (%s al cmd es reemplaça per message)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar una ordre quan una transacció del moneder canviï (%s in cmd es canvia per TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establir una mida màxima de transaccions d&apos;alta prioritat/baixa comisió en bytes (per defecte: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Aquesta és una versió de pre-llançament - utilitza-la sota la teva responsabilitat - No usar per a minería o aplicacions de compra-venda</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Advertència: el -paytxfee és molt elevat! Aquesta és la comissió de transacció que pagaràs quan enviis una transacció.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Advertència: Les transaccions mostrades poden no ser correctes! Pot esser que necessitis actualitzar, o bé que altres nodes ho necessitin.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Sharecoin will not work properly.</source> <translation>Advertència: Si us plau comprovi que la data i hora del seu computador siguin correctes! Si el seu rellotge està mal configurat, Sharecoin no funcionará de manera apropiada.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Advertència: Error llegint l&apos;arxiu wallet.dat!! Totes les claus es llegeixen correctament, però hi ha dades de transaccions o entrades del llibre d&apos;adreces absents o bé son incorrectes.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Advertència: L&apos;arxiu wallet.dat és corrupte, dades rescatades! L&apos;arxiu wallet.dat original ha estat desat com wallet.{estampa_temporal}.bak al directori %s; si el teu balanç o transaccions son incorrectes hauries de restaurar-lo de un backup.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intentar recuperar les claus privades d&apos;un arxiu wallet.dat corrupte</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Opcions de la creació de blocs:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Connectar només al(s) node(s) especificats</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>S&apos;ha detectat una base de dades de blocs corrupta</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir la pròpia adreça IP (per defecte: 1 quan escoltant i no -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Vols reconstruir la base de dades de blocs ara?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Error carregant la base de dades de blocs</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Error inicialitzant l&apos;entorn de la base de dades del moneder %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Error carregant la base de dades del bloc</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Error obrint la base de dades de blocs</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Error: Espai al disc baix!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Error: El moneder està blocat, no és possible crear la transacció!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Error: error de sistema:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Error al escoltar a qualsevol port. Utilitza -listen=0 si vols això.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Ha fallat la lectura de la informació del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Ha fallat la lectura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ha fallat la sincronització de l&apos;índex de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Ha fallat la escriptura de l&apos;índex de blocs</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Ha fallat la escriptura de la informació de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Ha fallat l&apos;escriptura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Ha fallat l&apos;escriptura de l&apos;arxiu info</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Ha fallat l&apos;escriptura de la basse de dades de monedes</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Ha fallat l&apos;escriptura de l&apos;índex de transaccions</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Ha fallat el desfer de dades</translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Cerca punts de connexió usant rastreig de DNS (per defecte: 1 tret d&apos;usar -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Quants blocs s&apos;han de confirmar a l&apos;inici (per defecte: 288, 0 = tots)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Com verificar el bloc (0-4, per defecte 3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Reconstruir l&apos;índex de la cadena de blocs dels arxius actuals blk000??.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Estableix el nombre de fils per atendre trucades RPC (per defecte: 4)</translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Verificant blocs...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Verificant moneder...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importa blocs de un fitxer blk000??.dat extern</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Adreça -tor invàlida: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Mantenir tot l&apos;índex de transaccions (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Mida màxima del buffer de recepció per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Mida màxima del buffer d&apos;enviament per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Tan sols acceptar cadenes de blocs que coincideixin amb els punts de prova (per defecte: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Només connectar als nodes de la xarxa &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Sortida de la informació extra de debugging. Implica totes les demés opcions -debug*</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Sortida de la informació extra de debugging de xarxa.</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Anteposar estampa temporal a les dades de debug</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Sharecoin Wiki for SSL setup instructions)</source> <translation>Opcions SSL: (veure la Wiki de Sharecoin per a instruccions de configuració SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Selecciona la versió de socks proxy a utilitzar (4-5, per defecte: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar informació de traça/debug a la consola en comptes del arxiu debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Enviar informació de traça/debug a un debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establir una mida màxima de bloc en bytes (per defecte: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establir una mida mínima de bloc en bytes (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reduir l&apos;arxiu debug.log al iniciar el client (per defecte 1 quan no -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el temps limit per a un intent de connexió en milisegons (per defecte: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Error de sistema:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 1 quan s&apos;escolta)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Utilitzar proxy per arribar als serveis tor amagats (per defecte: el mateix que -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Nom d&apos;usuari per a connexions JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Advertència: Aquetsa versió està obsoleta, és necessari actualitzar!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Necessiteu reconstruir les bases de dades usant -reindex per canviar -txindex</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>L&apos;arxiu wallet.data és corrupte, el rescat de les dades ha fallat</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Contrasenya per a connexions JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permetre connexions JSON-RPC d&apos;adreces IP específiques</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar ordre al node en execució a &lt;ip&gt; (per defecte: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar orde quan el millor bloc canviï (%s al cmd es reemplaça per un bloc de hash)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Actualitzar moneder a l&apos;últim format</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Establir límit de nombre de claus a &lt;n&gt; (per defecte: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Re-escanejar cadena de blocs en cerca de transaccions de moneder perdudes</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilitzar OpenSSL (https) per a connexions JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Arxiu del certificat de servidor (per defecte: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clau privada del servidor (per defecte: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Xifrats acceptats (per defecte: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Aquest misatge d&apos;ajuda</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossible d&apos;unir %s a aquest ordinador (s&apos;ha retornat l&apos;error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Connectar a través de socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permetre consultes DNS per a -addnode, -seednode i -connect</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Carregant adreces...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error carregant wallet.dat: Moneder corrupte</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Sharecoin</source> <translation>Error carregant wallet.dat: El moneder requereix una versió de Sharecoin més moderna</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Sharecoin to complete</source> <translation>El moneder necesita ser re-escrit: re-inicia Sharecoin per a completar la tasca</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Error carregant wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Adreça -proxy invalida: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Xarxa desconeguda especificada a -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>S&apos;ha demanat una versió desconeguda de -socks proxy: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -bind: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -externalip: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Quanitat invalida</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Balanç insuficient</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Carregant índex de blocs...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Afegir un node per a connectar&apos;s-hi i intentar mantenir la connexió oberta</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Sharecoin is probably already running.</source> <translation>Impossible d&apos;unir %s en aquest ordinador. Probablement Sharecoin ja estigui en execució.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisió a afegir per cada KB de transaccions que enviïs</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Carregant moneder...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>No es pot reduir la versió del moneder</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>No es pot escriure l&apos;adreça per defecte</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Re-escanejant...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Càrrega acabada</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Utilitza la opció %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Has de configurar el rpcpassword=&lt;password&gt; a l&apos;arxiu de configuració:\n %s\n Si l&apos;arxiu no existeix, crea&apos;l amb els permís owner-readable-only.</translation> </message> </context> </TS>
mit
mono0926/Ice
typings/express/express.d.ts
63731
// Type definitions for Express 3.1 // Project: http://expressjs.com // Definitions by: Boris Yankov <https://github.com/borisyankov/> // DefinitelyTyped: https://github.com/borisyankov/DefinitelyTyped /* =================== USAGE =================== import express = require('express'); var app = express(); =============================================== */ /// <reference path="../node/node.d.ts" /> declare module Express { // These open interfaces may be extended in an application-specific manner via declaration merging. // See for example passport.d.ts (https://github.com/borisyankov/DefinitelyTyped/blob/master/passport/passport.d.ts) export interface Request { } export interface Response { } export interface Application { } } declare module "express" { import http = require('http'); // Merged declaration, e is both a callable function and a namespace function e(): e.Express; module e { interface IRoute { path: string; method: string; callbacks: Function[]; regexp: any; /** * Check if this route matches `path`, if so * populate `.params`. */ match(path: string): boolean; } class Route implements IRoute { path: string; method: string; callbacks: Function[]; regexp: any; match(path: string): boolean; /** * Initialize `Route` with the given HTTP `method`, `path`, * and an array of `callbacks` and `options`. * * Options: * * - `sensitive` enable case-sensitive routes * - `strict` enable strict matching for trailing slashes * * @param method * @param path * @param callbacks * @param options */ new (method: string, path: string, callbacks: Function[], options: any): Route; } interface IRouter<T> { /** * Map the given param placeholder `name`(s) to the given callback(s). * * Parameter mapping is used to provide pre-conditions to routes * which use normalized placeholders. For example a _:user_id_ parameter * could automatically load a user's information from the database without * any additional code, * * The callback uses the samesignature as middleware, the only differencing * being that the value of the placeholder is passed, in this case the _id_ * of the user. Once the `next()` function is invoked, just like middleware * it will continue on to execute the route, or subsequent parameter functions. * * app.param('user_id', function(req, res, next, id){ * User.find(id, function(err, user){ * if (err) { * next(err); * } else if (user) { * req.user = user; * next(); * } else { * next(new Error('failed to load user')); * } * }); * }); * * @param name * @param fn */ param(name: string, fn: Function): T; param(name: string[], fn: Function): T; /** * Special-cased "all" method, applying the given route `path`, * middleware, and callback to _every_ HTTP method. * * @param path * @param fn */ all(path: string, fn?: (req: Request, res: Response, next: Function) => any): T; all(path: string, ...callbacks: Function[]): void; get(name: string, ...handlers: RequestFunction[]): T; get(name: RegExp, ...handlers: RequestFunction[]): T; post(name: string, ...handlers: RequestFunction[]): T; post(name: RegExp, ...handlers: RequestFunction[]): T; put(name: string, ...handlers: RequestFunction[]): T; put(name: RegExp, ...handlers: RequestFunction[]): T; del(name: string, ...handlers: RequestFunction[]): T; del(name: RegExp, ...handlers: RequestFunction[]): T; patch(name: string, ...handlers: RequestFunction[]): T; patch(name: RegExp, ...handlers: RequestFunction[]): T; } export class Router implements IRouter<Router> { new (options?: any): Router; middleware (): any; param(name: string, fn: Function): Router; param(name: any[], fn: Function): Router; all(path: string, fn?: (req: Request, res: Response, next: Function) => any): Router; all(path: string, ...callbacks: Function[]): void; get(name: string, ...handlers: RequestFunction[]): Router; get(name: RegExp, ...handlers: RequestFunction[]): Router; post(name: string, ...handlers: RequestFunction[]): Router; post(name: RegExp, ...handlers: RequestFunction[]): Router; put(name: string, ...handlers: RequestFunction[]): Router; put(name: RegExp, ...handlers: RequestFunction[]): Router; delete(name: string, ...handlers: RequestFunction[]): Router; delete(name: RegExp, ...handlers: RequestFunction[]): Router; patch(name: string, ...handlers: RequestFunction[]): Router; patch(name: RegExp, ...handlers: RequestFunction[]): Router; } interface Handler { (req: Request, res: Response, next?: Function): void; } interface CookieOptions { maxAge?: number; signed?: boolean; expires?: Date; httpOnly?: boolean; path?: string; domain?: string; secure?: boolean; } interface Errback { (err: Error): void; } interface Session { /** * Update reset `.cookie.maxAge` to prevent * the cookie from expiring when the * session is still active. * * @return {Session} for chaining * @api public */ touch(): Session; /** * Reset `.maxAge` to `.originalMaxAge`. */ resetMaxAge(): Session; /** * Save the session data with optional callback `fn(err)`. */ save(fn: Function): Session; /** * Re-loads the session data _without_ altering * the maxAge properties. Invokes the callback `fn(err)`, * after which time if no exception has occurred the * `req.session` property will be a new `Session` object, * although representing the same session. */ reload(fn: Function): Session; /** * Destroy `this` session. */ destroy(fn: Function): Session; /** * Regenerate this request's session. */ regenerate(fn: Function): Session; user: any; error: string; success: string; views: any; count: number; } interface Request extends http.ServerRequest, Express.Request { session: Session; /** * Return request header. * * The `Referrer` header field is special-cased, * both `Referrer` and `Referer` are interchangeable. * * Examples: * * req.get('Content-Type'); * // => "text/plain" * * req.get('content-type'); * // => "text/plain" * * req.get('Something'); * // => undefined * * Aliased as `req.header()`. * * @param name */ get (name: string): string; header(name: string): string; headers: { [key: string]: string; }; /** * Check if the given `type(s)` is acceptable, returning * the best match when true, otherwise `undefined`, in which * case you should respond with 406 "Not Acceptable". * * The `type` value may be a single mime type string * such as "application/json", the extension name * such as "json", a comma-delimted list such as "json, html, text/plain", * or an array `["json", "html", "text/plain"]`. When a list * or array is given the _best_ match, if any is returned. * * Examples: * * // Accept: text/html * req.accepts('html'); * // => "html" * * // Accept: text/*, application/json * req.accepts('html'); * // => "html" * req.accepts('text/html'); * // => "text/html" * req.accepts('json, text'); * // => "json" * req.accepts('application/json'); * // => "application/json" * * // Accept: text/*, application/json * req.accepts('image/png'); * req.accepts('png'); * // => undefined * * // Accept: text/*;q=.5, application/json * req.accepts(['html', 'json']); * req.accepts('html, json'); * // => "json" */ accepts(type: string): string; accepts(type: string[]): string; /** * Check if the given `charset` is acceptable, * otherwise you should respond with 406 "Not Acceptable". * * @param charset */ acceptsCharset(charset: string): boolean; /** * Check if the given `lang` is acceptable, * otherwise you should respond with 406 "Not Acceptable". * * @param lang */ acceptsLanguage(lang: string): boolean; /** * Parse Range header field, * capping to the given `size`. * * Unspecified ranges such as "0-" require * knowledge of your resource length. In * the case of a byte range this is of course * the total number of bytes. If the Range * header field is not given `null` is returned, * `-1` when unsatisfiable, `-2` when syntactically invalid. * * NOTE: remember that ranges are inclusive, so * for example "Range: users=0-3" should respond * with 4 users when available, not 3. * * @param size */ range(size: number): any[]; /** * Return an array of Accepted media types * ordered from highest quality to lowest. */ accepted: MediaType[]; /** * Return an array of Accepted languages * ordered from highest quality to lowest. * * Examples: * * Accept-Language: en;q=.5, en-us * ['en-us', 'en'] */ acceptedLanguages: any[]; /** * Return an array of Accepted charsets * ordered from highest quality to lowest. * * Examples: * * Accept-Charset: iso-8859-5;q=.2, unicode-1-1;q=0.8 * ['unicode-1-1', 'iso-8859-5'] */ acceptedCharsets: any[]; /** * Return the value of param `name` when present or `defaultValue`. * * - Checks route placeholders, ex: _/user/:id_ * - Checks body params, ex: id=12, {"id":12} * - Checks query string params, ex: ?id=12 * * To utilize request bodies, `req.body` * should be an object. This can be done by using * the `connect.bodyParser()` middleware. * * @param name * @param defaultValue */ param(name: string, defaultValue?: any): string; /** * Check if the incoming request contains the "Content-Type" * header field, and it contains the give mime `type`. * * Examples: * * // With Content-Type: text/html; charset=utf-8 * req.is('html'); * req.is('text/html'); * req.is('text/*'); * // => true * * // When Content-Type is application/json * req.is('json'); * req.is('application/json'); * req.is('application/*'); * // => true * * req.is('html'); * // => false * * @param type */ is(type: string): boolean; /** * Return the protocol string "http" or "https" * when requested with TLS. When the "trust proxy" * setting is enabled the "X-Forwarded-Proto" header * field will be trusted. If you're running behind * a reverse proxy that supplies https for you this * may be enabled. */ protocol: string; /** * Short-hand for: * * req.protocol == 'https' */ secure: boolean; /** * Return the remote address, or when * "trust proxy" is `true` return * the upstream addr. */ ip: string; /** * When "trust proxy" is `true`, parse * the "X-Forwarded-For" ip address list. * * For example if the value were "client, proxy1, proxy2" * you would receive the array `["client", "proxy1", "proxy2"]` * where "proxy2" is the furthest down-stream. */ ips: string[]; /** * Return basic auth credentials. * * Examples: * * // http://tobi:hello@example.com * req.auth * // => { username: 'tobi', password: 'hello' } */ auth: any; /** * Return subdomains as an array. * * Subdomains are the dot-separated parts of the host before the main domain of * the app. By default, the domain of the app is assumed to be the last two * parts of the host. This can be changed by setting "subdomain offset". * * For example, if the domain is "tobi.ferrets.example.com": * If "subdomain offset" is not set, req.subdomains is `["ferrets", "tobi"]`. * If "subdomain offset" is 3, req.subdomains is `["tobi"]`. */ subdomains: string[]; /** * Short-hand for `url.parse(req.url).pathname`. */ path: string; /** * Parse the "Host" header field hostname. */ host: string; /** * Check if the request is fresh, aka * Last-Modified and/or the ETag * still match. */ fresh: boolean; /** * Check if the request is stale, aka * "Last-Modified" and / or the "ETag" for the * resource has changed. */ stale: boolean; /** * Check if the request was an _XMLHttpRequest_. */ xhr: boolean; //body: { username: string; password: string; remember: boolean; title: string; }; body: any; //cookies: { string; remember: boolean; }; cookies: any; /** * Used to generate an anti-CSRF token. * Placed by the CSRF protection middleware. */ csrfToken(): string; method: string; params: any; user: any; authenticatedUser: any; files: any; /** * Clear cookie `name`. * * @param name * @param options */ clearCookie(name: string, options?: any): Response; query: any; route: any; signedCookies: any; originalUrl: string; url: string; } interface MediaType { value: string; quality: number; type: string; subtype: string; } interface Send { (status: number, body?: any): Response; (body: any): Response; } interface Response extends http.ServerResponse, Express.Response { /** * Set status `code`. * * @param code */ status(code: number): Response; /** * Set Link header field with the given `links`. * * Examples: * * res.links({ * next: 'http://api.example.com/users?page=2', * last: 'http://api.example.com/users?page=5' * }); * * @param links */ links(links: any): Response; /** * Send a response. * * Examples: * * res.send(new Buffer('wahoo')); * res.send({ some: 'json' }); * res.send('<p>some html</p>'); * res.send(404, 'Sorry, cant find that'); * res.send(404); */ send: Send; /** * Send JSON response. * * Examples: * * res.json(null); * res.json({ user: 'tj' }); * res.json(500, 'oh noes!'); * res.json(404, 'I dont have that'); */ json: Send; /** * Send JSON response with JSONP callback support. * * Examples: * * res.jsonp(null); * res.jsonp({ user: 'tj' }); * res.jsonp(500, 'oh noes!'); * res.jsonp(404, 'I dont have that'); */ jsonp: Send; /** * Transfer the file at the given `path`. * * Automatically sets the _Content-Type_ response header field. * The callback `fn(err)` is invoked when the transfer is complete * or when an error occurs. Be sure to check `res.sentHeader` * if you wish to attempt responding, as the header and some data * may have already been transferred. * * Options: * * - `maxAge` defaulting to 0 * - `root` root directory for relative filenames * * Examples: * * The following example illustrates how `res.sendfile()` may * be used as an alternative for the `static()` middleware for * dynamic situations. The code backing `res.sendfile()` is actually * the same code, so HTTP cache support etc is identical. * * app.get('/user/:uid/photos/:file', function(req, res){ * var uid = req.params.uid * , file = req.params.file; * * req.user.mayViewFilesFrom(uid, function(yes){ * if (yes) { * res.sendfile('/uploads/' + uid + '/' + file); * } else { * res.send(403, 'Sorry! you cant see that.'); * } * }); * }); */ sendfile(path: string): void; sendfile(path: string, options: any): void; sendfile(path: string, fn: Errback): void; sendfile(path: string, options: any, fn: Errback): void; /** * Transfer the file at the given `path` as an attachment. * * Optionally providing an alternate attachment `filename`, * and optional callback `fn(err)`. The callback is invoked * when the data transfer is complete, or when an error has * ocurred. Be sure to check `res.headerSent` if you plan to respond. * * This method uses `res.sendfile()`. */ download(path: string): void; download(path: string, filename: string): void; download(path: string, fn: Errback): void; download(path: string, filename: string, fn: Errback): void; /** * Set _Content-Type_ response header with `type` through `mime.lookup()` * when it does not contain "/", or set the Content-Type to `type` otherwise. * * Examples: * * res.type('.html'); * res.type('html'); * res.type('json'); * res.type('application/json'); * res.type('png'); * * @param type */ contentType(type: string): Response; /** * Set _Content-Type_ response header with `type` through `mime.lookup()` * when it does not contain "/", or set the Content-Type to `type` otherwise. * * Examples: * * res.type('.html'); * res.type('html'); * res.type('json'); * res.type('application/json'); * res.type('png'); * * @param type */ type(type: string): Response; /** * Respond to the Acceptable formats using an `obj` * of mime-type callbacks. * * This method uses `req.accepted`, an array of * acceptable types ordered by their quality values. * When "Accept" is not present the _first_ callback * is invoked, otherwise the first match is used. When * no match is performed the server responds with * 406 "Not Acceptable". * * Content-Type is set for you, however if you choose * you may alter this within the callback using `res.type()` * or `res.set('Content-Type', ...)`. * * res.format({ * 'text/plain': function(){ * res.send('hey'); * }, * * 'text/html': function(){ * res.send('<p>hey</p>'); * }, * * 'appliation/json': function(){ * res.send({ message: 'hey' }); * } * }); * * In addition to canonicalized MIME types you may * also use extnames mapped to these types: * * res.format({ * text: function(){ * res.send('hey'); * }, * * html: function(){ * res.send('<p>hey</p>'); * }, * * json: function(){ * res.send({ message: 'hey' }); * } * }); * * By default Express passes an `Error` * with a `.status` of 406 to `next(err)` * if a match is not made. If you provide * a `.default` callback it will be invoked * instead. * * @param obj */ format(obj: any): Response; /** * Set _Content-Disposition_ header to _attachment_ with optional `filename`. * * @param filename */ attachment(filename?: string): Response; /** * Set header `field` to `val`, or pass * an object of header fields. * * Examples: * * res.set('Foo', ['bar', 'baz']); * res.set('Accept', 'application/json'); * res.set({ Accept: 'text/plain', 'X-API-Key': 'tobi' }); * * Aliased as `res.header()`. */ set (field: any): Response; set (field: string, value?: string): Response; header(field: any): Response; header(field: string, value?: string): Response; /** * Get value for header `field`. * * @param field */ get (field: string): string; /** * Clear cookie `name`. * * @param name * @param options */ clearCookie(name: string, options?: any): Response; /** * Set cookie `name` to `val`, with the given `options`. * * Options: * * - `maxAge` max-age in milliseconds, converted to `expires` * - `signed` sign the cookie * - `path` defaults to "/" * * Examples: * * // "Remember Me" for 15 minutes * res.cookie('rememberme', '1', { expires: new Date(Date.now() + 900000), httpOnly: true }); * * // save as above * res.cookie('rememberme', '1', { maxAge: 900000, httpOnly: true }) */ cookie(name: string, val: string, options: CookieOptions): Response; cookie(name: string, val: any, options: CookieOptions): Response; cookie(name: string, val: any): Response; /** * Set the location header to `url`. * * The given `url` can also be the name of a mapped url, for * example by default express supports "back" which redirects * to the _Referrer_ or _Referer_ headers or "/". * * Examples: * * res.location('/foo/bar').; * res.location('http://example.com'); * res.location('../login'); // /blog/post/1 -> /blog/login * * Mounting: * * When an application is mounted and `res.location()` * is given a path that does _not_ lead with "/" it becomes * relative to the mount-point. For example if the application * is mounted at "/blog", the following would become "/blog/login". * * res.location('login'); * * While the leading slash would result in a location of "/login": * * res.location('/login'); * * @param url */ location(url: string): Response; /** * Redirect to the given `url` with optional response `status` * defaulting to 302. * * The resulting `url` is determined by `res.location()`, so * it will play nicely with mounted apps, relative paths, * `"back"` etc. * * Examples: * * res.redirect('/foo/bar'); * res.redirect('http://example.com'); * res.redirect(301, 'http://example.com'); * res.redirect('http://example.com', 301); * res.redirect('../login'); // /blog/post/1 -> /blog/login */ redirect(url: string): void; redirect(status: number, url: string): void; redirect(url: string, status: number): void; /** * Render `view` with the given `options` and optional callback `fn`. * When a callback function is given a response will _not_ be made * automatically, otherwise a response of _200_ and _text/html_ is given. * * Options: * * - `cache` boolean hinting to the engine it should cache * - `filename` filename of the view being rendered */ render(view: string, options?: Object, callback?: (err: Error, html: string) => void ): void; render(view: string, callback?: (err: Error, html: string) => void ): void; locals: any; charset: string; } interface RequestFunction { (req: Request, res: Response, next: Function): any; } interface Application extends IRouter<Application>, Express.Application { /** * Initialize the server. * * - setup default configuration * - setup default middleware * - setup route reflection methods */ init(): void; /** * Initialize application configuration. */ defaultConfiguration(): void; /** * Proxy `connect#use()` to apply settings to * mounted applications. **/ use(route: string, callback?: Function): Application; use(route: string, server: Application): Application; use(route: string, router: Router) : Application; use(callback: Function): Application; use(server: Application): Application; /** * Register the given template engine callback `fn` * as `ext`. * * By default will `require()` the engine based on the * file extension. For example if you try to render * a "foo.jade" file Express will invoke the following internally: * * app.engine('jade', require('jade').__express); * * For engines that do not provide `.__express` out of the box, * or if you wish to "map" a different extension to the template engine * you may use this method. For example mapping the EJS template engine to * ".html" files: * * app.engine('html', require('ejs').renderFile); * * In this case EJS provides a `.renderFile()` method with * the same signature that Express expects: `(path, options, callback)`, * though note that it aliases this method as `ejs.__express` internally * so if you're using ".ejs" extensions you dont need to do anything. * * Some template engines do not follow this convention, the * [Consolidate.js](https://github.com/visionmedia/consolidate.js) * library was created to map all of node's popular template * engines to follow this convention, thus allowing them to * work seamlessly within Express. */ engine(ext: string, fn: Function): Application; param(name: string, fn: Function): Application; param(name: string[], fn: Function): Application; /** * Assign `setting` to `val`, or return `setting`'s value. * * app.set('foo', 'bar'); * app.get('foo'); * // => "bar" * * Mounted servers inherit their parent server's settings. * * @param setting * @param val */ set (setting: string, val: string): Application; get(name: string): string; get(name: string, ...handlers: RequestFunction[]): Application; get(name: RegExp, ...handlers: RequestFunction[]): Application; /** * Return the app's absolute pathname * based on the parent(s) that have * mounted it. * * For example if the application was * mounted as "/admin", which itself * was mounted as "/blog" then the * return value would be "/blog/admin". */ path(): string; /** * Check if `setting` is enabled (truthy). * * app.enabled('foo') * // => false * * app.enable('foo') * app.enabled('foo') * // => true */ enabled(setting: string): boolean; /** * Check if `setting` is disabled. * * app.disabled('foo') * // => true * * app.enable('foo') * app.disabled('foo') * // => false * * @param setting */ disabled(setting: string): boolean; /** * Enable `setting`. * * @param setting */ enable(setting: string): Application; /** * Disable `setting`. * * @param setting */ disable(setting: string): Application; /** * Configure callback for zero or more envs, * when no `env` is specified that callback will * be invoked for all environments. Any combination * can be used multiple times, in any order desired. * * Examples: * * app.configure(function(){ * // executed for all envs * }); * * app.configure('stage', function(){ * // executed staging env * }); * * app.configure('stage', 'production', function(){ * // executed for stage and production * }); * * Note: * * These callbacks are invoked immediately, and * are effectively sugar for the following: * * var env = process.env.NODE_ENV || 'development'; * * switch (env) { * case 'development': * ... * break; * case 'stage': * ... * break; * case 'production': * ... * break; * } * * @param env * @param fn */ configure(env: string, fn: Function): Application; configure(env0: string, env1: string, fn: Function): Application; configure(env0: string, env1: string, env2: string, fn: Function): Application; configure(env0: string, env1: string, env2: string, env3: string, fn: Function): Application; configure(env0: string, env1: string, env2: string, env3: string, env4: string, fn: Function): Application; configure(fn: Function): Application; /** * Render the given view `name` name with `options` * and a callback accepting an error and the * rendered template string. * * Example: * * app.render('email', { name: 'Tobi' }, function(err, html){ * // ... * }) * * @param name * @param options or fn * @param fn */ render(name: string, options?: Object, callback?: (err: Error, html: string) => void): void; render(name: string, callback: (err: Error, html: string) => void): void; /** * Listen for connections. * * A node `http.Server` is returned, with this * application (which is a `Function`) as its * callback. If you wish to create both an HTTP * and HTTPS server you may do so with the "http" * and "https" modules as shown here: * * var http = require('http') * , https = require('https') * , express = require('express') * , app = express(); * * http.createServer(app).listen(80); * https.createServer({ ... }, app).listen(443); */ listen(port: number, hostname: string, backlog: number, callback?: Function): http.Server; listen(port: number, hostname: string, callback?: Function): http.Server; listen(port: number, callback?: Function): http.Server; listen(path: string, callback?: Function): http.Server; listen(handle: any, listeningListener?: Function): http.Server; route: IRoute; router: string; settings: any; resource: any; map: any; locals: any; /** * The app.routes object houses all of the routes defined mapped by the * associated HTTP verb. This object may be used for introspection * capabilities, for example Express uses this internally not only for * routing but to provide default OPTIONS behaviour unless app.options() * is used. Your application or framework may also remove routes by * simply by removing them from this object. */ routes: any; } interface Express extends Application { /** * Framework version. */ version: string; /** * Expose mime. */ mime: string; (): Application; /** * Create an express application. */ createApplication(): Application; createServer(): Application; application: any; request: Request; response: Response; } /** * Body parser: * * Parse request bodies, supports _application/json_, * _application/x-www-form-urlencoded_, and _multipart/form-data_. * * This is equivalent to: * * app.use(connect.json()); * app.use(connect.urlencoded()); * app.use(connect.multipart()); * * Examples: * * connect() * .use(connect.bodyParser()) * .use(function(req, res) { * res.end('viewing user ' + req.body.user.name); * }); * * $ curl -d 'user[name]=tj' http://local/ * $ curl -d '{"user":{"name":"tj"}}' -H "Content-Type: application/json" http://local/ * * View [json](json.html), [urlencoded](urlencoded.html), and [multipart](multipart.html) for more info. * * @param options */ function bodyParser(options?: any): Handler; /** * Error handler: * * Development error handler, providing stack traces * and error message responses for requests accepting text, html, * or json. * * Text: * * By default, and when _text/plain_ is accepted a simple stack trace * or error message will be returned. * * JSON: * * When _application/json_ is accepted, connect will respond with * an object in the form of `{ "error": error }`. * * HTML: * * When accepted connect will output a nice html stack trace. */ function errorHandler(opts?: any): Handler; /** * Method Override: * * Provides faux HTTP method support. * * Pass an optional `key` to use when checking for * a method override, othewise defaults to _\_method_. * The original method is available via `req.originalMethod`. * * @param key */ function methodOverride(key?: string): Handler; /** * Cookie parser: * * Parse _Cookie_ header and populate `req.cookies` * with an object keyed by the cookie names. Optionally * you may enabled signed cookie support by passing * a `secret` string, which assigns `req.secret` so * it may be used by other middleware. * * Examples: * * connect() * .use(connect.cookieParser('optional secret string')) * .use(function(req, res, next){ * res.end(JSON.stringify(req.cookies)); * }) * * @param secret */ function cookieParser(secret?: string): Handler; /** * Session: * * Setup session store with the given `options`. * * Session data is _not_ saved in the cookie itself, however * cookies are used, so we must use the [cookieParser()](cookieParser.html) * middleware _before_ `session()`. * * Examples: * * connect() * .use(connect.cookieParser()) * .use(connect.session({ secret: 'keyboard cat', key: 'sid', cookie: { secure: true }})) * * Options: * * - `key` cookie name defaulting to `connect.sid` * - `store` session store instance * - `secret` session cookie is signed with this secret to prevent tampering * - `cookie` session cookie settings, defaulting to `{ path: '/', httpOnly: true, maxAge: null }` * - `proxy` trust the reverse proxy when setting secure cookies (via "x-forwarded-proto") * * Cookie option: * * By default `cookie.maxAge` is `null`, meaning no "expires" parameter is set * so the cookie becomes a browser-session cookie. When the user closes the * browser the cookie (and session) will be removed. * * ## req.session * * To store or access session data, simply use the request property `req.session`, * which is (generally) serialized as JSON by the store, so nested objects * are typically fine. For example below is a user-specific view counter: * * connect() * .use(connect.favicon()) * .use(connect.cookieParser()) * .use(connect.session({ secret: 'keyboard cat', cookie: { maxAge: 60000 }})) * .use(function(req, res, next){ * var sess = req.session; * if (sess.views) { * res.setHeader('Content-Type', 'text/html'); * res.write('<p>views: ' + sess.views + '</p>'); * res.write('<p>expires in: ' + (sess.cookie.maxAge / 1000) + 's</p>'); * res.end(); * sess.views++; * } else { * sess.views = 1; * res.end('welcome to the session demo. refresh!'); * } * } * )).listen(3000); * * ## Session#regenerate() * * To regenerate the session simply invoke the method, once complete * a new SID and `Session` instance will be initialized at `req.session`. * * req.session.regenerate(function(err){ * // will have a new session here * }); * * ## Session#destroy() * * Destroys the session, removing `req.session`, will be re-generated next request. * * req.session.destroy(function(err){ * // cannot access session here * }); * * ## Session#reload() * * Reloads the session data. * * req.session.reload(function(err){ * // session updated * }); * * ## Session#save() * * Save the session. * * req.session.save(function(err){ * // session saved * }); * * ## Session#touch() * * Updates the `.maxAge` property. Typically this is * not necessary to call, as the session middleware does this for you. * * ## Session#cookie * * Each session has a unique cookie object accompany it. This allows * you to alter the session cookie per visitor. For example we can * set `req.session.cookie.expires` to `false` to enable the cookie * to remain for only the duration of the user-agent. * * ## Session#maxAge * * Alternatively `req.session.cookie.maxAge` will return the time * remaining in milliseconds, which we may also re-assign a new value * to adjust the `.expires` property appropriately. The following * are essentially equivalent * * var hour = 3600000; * req.session.cookie.expires = new Date(Date.now() + hour); * req.session.cookie.maxAge = hour; * * For example when `maxAge` is set to `60000` (one minute), and 30 seconds * has elapsed it will return `30000` until the current request has completed, * at which time `req.session.touch()` is called to reset `req.session.maxAge` * to its original value. * * req.session.cookie.maxAge; * // => 30000 * * Session Store Implementation: * * Every session store _must_ implement the following methods * * - `.get(sid, callback)` * - `.set(sid, session, callback)` * - `.destroy(sid, callback)` * * Recommended methods include, but are not limited to: * * - `.length(callback)` * - `.clear(callback)` * * For an example implementation view the [connect-redis](http://github.com/visionmedia/connect-redis) repo. * * @param options */ function session(options?: any): Handler; /** * Hash the given `sess` object omitting changes * to `.cookie`. * * @param sess */ function hash(sess: string): string; /** * Static: * * Static file server with the given `root` path. * * Examples: * * var oneDay = 86400000; * * connect() * .use(connect.static(__dirname + '/public')) * * connect() * .use(connect.static(__dirname + '/public', { maxAge: oneDay })) * * Options: * * - `maxAge` Browser cache maxAge in milliseconds. defaults to 0 * - `hidden` Allow transfer of hidden files. defaults to false * - `redirect` Redirect to trailing "/" when the pathname is a dir. defaults to true * * @param root * @param options */ function static(root: string, options?: any): Handler; /** * Basic Auth: * * Enfore basic authentication by providing a `callback(user, pass)`, * which must return `true` in order to gain access. Alternatively an async * method is provided as well, invoking `callback(user, pass, callback)`. Populates * `req.user`. The final alternative is simply passing username / password * strings. * * Simple username and password * * connect(connect.basicAuth('username', 'password')); * * Callback verification * * connect() * .use(connect.basicAuth(function(user, pass){ * return 'tj' == user & 'wahoo' == pass; * })) * * Async callback verification, accepting `fn(err, user)`. * * connect() * .use(connect.basicAuth(function(user, pass, fn){ * User.authenticate({ user: user, pass: pass }, fn); * })) * * @param callback or username * @param realm */ export function basicAuth(callback: (user: string, pass: string, fn : Function) => void, realm?: string): Handler; export function basicAuth(callback: (user: string, pass: string) => boolean, realm?: string): Handler; export function basicAuth(user: string, pass: string, realm?: string): Handler; /** * Compress: * * Compress response data with gzip/deflate. * * Filter: * * A `filter` callback function may be passed to * replace the default logic of: * * exports.filter = function(req, res){ * return /json|text|javascript/.test(res.getHeader('Content-Type')); * }; * * Options: * * All remaining options are passed to the gzip/deflate * creation functions. Consult node's docs for additional details. * * - `chunkSize` (default: 16*1024) * - `windowBits` * - `level`: 0-9 where 0 is no compression, and 9 is slow but best compression * - `memLevel`: 1-9 low is slower but uses less memory, high is fast but uses more * - `strategy`: compression strategy * * @param options */ function compress(options?: any): Handler; /** * Cookie Session: * * Cookie session middleware. * * var app = connect(); * app.use(connect.cookieParser()); * app.use(connect.cookieSession({ secret: 'tobo!', cookie: { maxAge: 60 * 60 * 1000 }})); * * Options: * * - `key` cookie name defaulting to `connect.sess` * - `secret` prevents cookie tampering * - `cookie` session cookie settings, defaulting to `{ path: '/', httpOnly: true, maxAge: null }` * - `proxy` trust the reverse proxy when setting secure cookies (via "x-forwarded-proto") * * Clearing sessions: * * To clear the session simply set its value to `null`, * `cookieSession()` will then respond with a 1970 Set-Cookie. * * req.session = null; * * @param options */ function cookieSession(options?: any): Handler; /** * Anti CSRF: * * CSRF protection middleware. * * This middleware adds a `req.csrfToken()` function to make a token * which should be added to requests which mutate * state, within a hidden form field, query-string etc. This * token is validated against the visitor's session. * * The default `value` function checks `req.body` generated * by the `bodyParser()` middleware, `req.query` generated * by `query()`, and the "X-CSRF-Token" header field. * * This middleware requires session support, thus should be added * somewhere _below_ `session()` and `cookieParser()`. * * Options: * * - `value` a function accepting the request, returning the token * * @param options */ export function csrf(options?: {value?: Function}): Handler; /** * Directory: * * Serve directory listings with the given `root` path. * * Options: * * - `hidden` display hidden (dot) files. Defaults to false. * - `icons` display icons. Defaults to false. * - `filter` Apply this filter function to files. Defaults to false. * * @param root * @param options */ function directory(root: string, options?: any): Handler; /** * Favicon: * * By default serves the connect favicon, or the favicon * located by the given `path`. * * Options: * * - `maxAge` cache-control max-age directive, defaulting to 1 day * * Examples: * * Serve default favicon: * * connect() * .use(connect.favicon()) * * Serve favicon before logging for brevity: * * connect() * .use(connect.favicon()) * .use(connect.logger('dev')) * * Serve custom favicon: * * connect() * .use(connect.favicon('public/favicon.ico)) * * @param path * @param options */ export function favicon(path?: string, options?: any): Handler; /** * JSON: * * Parse JSON request bodies, providing the * parsed object as `req.body`. * * Options: * * - `strict` when `false` anything `JSON.parse()` accepts will be parsed * - `reviver` used as the second "reviver" argument for JSON.parse * - `limit` byte limit disabled by default * * @param options */ function json(options?: any): Handler; /** * Limit: * * Limit request bodies to the given size in `bytes`. * * A string representation of the bytesize may also be passed, * for example "5mb", "200kb", "1gb", etc. * * connect() * .use(connect.limit('5.5mb')) * .use(handleImageUpload) */ function limit(bytes: number): Handler; function limit(bytes: string): Handler; /** * Logger: * * Log requests with the given `options` or a `format` string. * * Options: * * - `format` Format string, see below for tokens * - `stream` Output stream, defaults to _stdout_ * - `buffer` Buffer duration, defaults to 1000ms when _true_ * - `immediate` Write log line on request instead of response (for response times) * * Tokens: * * - `:req[header]` ex: `:req[Accept]` * - `:res[header]` ex: `:res[Content-Length]` * - `:http-version` * - `:response-time` * - `:remote-addr` * - `:date` * - `:method` * - `:url` * - `:referrer` * - `:user-agent` * - `:status` * * Formats: * * Pre-defined formats that ship with connect: * * - `default` ':remote-addr - - [:date] ":method :url HTTP/:http-version" :status :res[content-length] ":referrer" ":user-agent"' * - `short` ':remote-addr - :method :url HTTP/:http-version :status :res[content-length] - :response-time ms' * - `tiny` ':method :url :status :res[content-length] - :response-time ms' * - `dev` concise output colored by response status for development use * * Examples: * * connect.logger() // default * connect.logger('short') * connect.logger('tiny') * connect.logger({ immediate: true, format: 'dev' }) * connect.logger(':method :url - :referrer') * connect.logger(':req[content-type] -> :res[content-type]') * connect.logger(function(tokens, req, res){ return 'some format string' }) * * Defining Tokens: * * To define a token, simply invoke `connect.logger.token()` with the * name and a callback function. The value returned is then available * as ":type" in this case. * * connect.logger.token('type', function(req, res){ return req.headers['content-type']; }) * * Defining Formats: * * All default formats are defined this way, however it's public API as well: * * connect.logger.format('name', 'string or function') */ function logger(options: string): Handler; function logger(options: Function): Handler; function logger(options?: any): Handler; /** * Compile `fmt` into a function. * * @param fmt */ function compile(fmt: string): Handler; /** * Define a token function with the given `name`, * and callback `fn(req, res)`. * * @param name * @param fn */ function token(name: string, fn: Function): any; /** * Define a `fmt` with the given `name`. */ function format(name: string, str: string): any; function format(name: string, str: Function): any; /** * Query: * * Automatically parse the query-string when available, * populating the `req.query` object. * * Examples: * * connect() * .use(connect.query()) * .use(function(req, res){ * res.end(JSON.stringify(req.query)); * }); * * The `options` passed are provided to qs.parse function. */ function query(options: any): Handler; /** * Reponse time: * * Adds the `X-Response-Time` header displaying the response * duration in milliseconds. */ function responseTime(): Handler; /** * Static cache: * * Enables a memory cache layer on top of * the `static()` middleware, serving popular * static files. * * By default a maximum of 128 objects are * held in cache, with a max of 256k each, * totalling ~32mb. * * A Least-Recently-Used (LRU) cache algo * is implemented through the `Cache` object, * simply rotating cache objects as they are * hit. This means that increasingly popular * objects maintain their positions while * others get shoved out of the stack and * garbage collected. * * Benchmarks: * * static(): 2700 rps * node-static: 5300 rps * static() + staticCache(): 7500 rps * * Options: * * - `maxObjects` max cache objects [128] * - `maxLength` max cache object length 256kb */ function staticCache(options: any): Handler; /** * Timeout: * * Times out the request in `ms`, defaulting to `5000`. The * method `req.clearTimeout()` is added to revert this behaviour * programmatically within your application's middleware, routes, etc. * * The timeout error is passed to `next()` so that you may customize * the response behaviour. This error has the `.timeout` property as * well as `.status == 408`. */ function timeout(ms: number): Handler; /** * Vhost: * * Setup vhost for the given `hostname` and `server`. * * connect() * .use(connect.vhost('foo.com', fooApp)) * .use(connect.vhost('bar.com', barApp)) * .use(connect.vhost('*.com', mainApp)) * * The `server` may be a Connect server or * a regular Node `http.Server`. * * @param hostname * @param server */ function vhost(hostname: string, server: any): Handler; function urlencoded(): any; function multipart(): any; } export = e; }
mit
jakubpawlowicz/clean-css
lib/optimizer/level-1/optimize.js
9392
var sortSelectors = require('./sort-selectors'); var tidyRules = require('./tidy-rules'); var tidyBlock = require('./tidy-block'); var tidyAtRule = require('./tidy-at-rule'); var Hack = require('../hack'); var removeUnused = require('../remove-unused'); var restoreFromOptimizing = require('../restore-from-optimizing'); var wrapForOptimizing = require('../wrap-for-optimizing').all; var configuration = require('../configuration'); var optimizers = require('./value-optimizers'); var OptimizationLevel = require('../../options/optimization-level').OptimizationLevel; var Token = require('../../tokenizer/token'); var Marker = require('../../tokenizer/marker'); var formatPosition = require('../../utils/format-position'); var serializeRules = require('../../writer/one-time').rules; var CHARSET_TOKEN = '@charset'; var CHARSET_REGEXP = new RegExp('^' + CHARSET_TOKEN, 'i'); var DEFAULT_ROUNDING_PRECISION = require('../../options/rounding-precision').DEFAULT; var PROPERTY_NAME_PATTERN = /^(?:\-chrome\-|\-[\w\-]+\w|\w[\w\-]+\w|\w{1,}|\-\-\S+)$/; var IMPORT_PREFIX_PATTERN = /^@import/i; var URL_PREFIX_PATTERN = /^url\(/i; var VARIABLE_NAME_PATTERN = /^--\S+$/; function startsAsUrl(value) { return URL_PREFIX_PATTERN.test(value); } function isImport(token) { return IMPORT_PREFIX_PATTERN.test(token[1]); } function isLegacyFilter(property) { var value; if (property.name == 'filter' || property.name == '-ms-filter') { value = property.value[0][1]; return value.indexOf('progid') > -1 || value.indexOf('alpha') === 0 || value.indexOf('chroma') === 0; } else { return false; } } function noop() {} function optimizeBody(rule, properties, context) { var options = context.options; var valueOptimizers; var property, name, type, value; var propertyToken; var propertyOptimizer; var serializedRule = serializeRules(rule); var _properties = wrapForOptimizing(properties); var pluginValueOptimizers = context.options.plugins.level1Value; var pluginPropertyOptimizers = context.options.plugins.level1Property; var i, l; propertyLoop: for (i = 0, l = _properties.length; i < l; i++) { var j, k, m, n; property = _properties[i]; name = property.name; propertyOptimizer = configuration[name] && configuration[name].propertyOptimizer || noop; valueOptimizers = configuration[name] && configuration[name].valueOptimizers || [optimizers.whiteSpace]; if (!PROPERTY_NAME_PATTERN.test(name)) { propertyToken = property.all[property.position]; context.warnings.push('Invalid property name \'' + name + '\' at ' + formatPosition(propertyToken[1][2][0]) + '. Ignoring.'); property.unused = true; continue; } if (property.value.length === 0) { propertyToken = property.all[property.position]; context.warnings.push('Empty property \'' + name + '\' at ' + formatPosition(propertyToken[1][2][0]) + '. Ignoring.'); property.unused = true; continue; } if (property.hack && ( (property.hack[0] == Hack.ASTERISK || property.hack[0] == Hack.UNDERSCORE) && !options.compatibility.properties.iePrefixHack || property.hack[0] == Hack.BACKSLASH && !options.compatibility.properties.ieSuffixHack || property.hack[0] == Hack.BANG && !options.compatibility.properties.ieBangHack)) { property.unused = true; continue; } if (!options.compatibility.properties.ieFilters && isLegacyFilter(property)) { property.unused = true; continue; } if (property.block) { optimizeBody(rule, property.value[0][1], context); continue; } if (VARIABLE_NAME_PATTERN.test(name)) { continue; } valuesLoop: for (j = 0, m = property.value.length; j < m; j++) { type = property.value[j][0]; value = property.value[j][1]; if (type == Token.PROPERTY_BLOCK) { property.unused = true; context.warnings.push('Invalid value token at ' + formatPosition(value[0][1][2][0]) + '. Ignoring.'); break; } if (startsAsUrl(value) && !context.validator.isUrl(value)) { property.unused = true; context.warnings.push('Broken URL \'' + value + '\' at ' + formatPosition(property.value[j][2][0]) + '. Ignoring.'); break; } for (k = 0, n = valueOptimizers.length; k < n; k++) { value = valueOptimizers[k](name, value, options); } for (k = 0, n = pluginValueOptimizers.length; k < n; k++) { value = pluginValueOptimizers[k](name, value, options); } property.value[j][1] = value; } propertyOptimizer(serializedRule, property, options); for (j = 0, m = pluginPropertyOptimizers.length; j < m; j++) { pluginPropertyOptimizers[j](serializedRule, property, options); } } restoreFromOptimizing(_properties); removeUnused(_properties); removeComments(properties, options); } function removeComments(tokens, options) { var token; var i; for (i = 0; i < tokens.length; i++) { token = tokens[i]; if (token[0] != Token.COMMENT) { continue; } optimizeComment(token, options); if (token[1].length === 0) { tokens.splice(i, 1); i--; } } } function optimizeComment(token, options) { if (token[1][2] == Marker.EXCLAMATION && (options.level[OptimizationLevel.One].specialComments == 'all' || options.commentsKept < options.level[OptimizationLevel.One].specialComments)) { options.commentsKept++; return; } token[1] = []; } function cleanupCharsets(tokens) { var hasCharset = false; for (var i = 0, l = tokens.length; i < l; i++) { var token = tokens[i]; if (token[0] != Token.AT_RULE) continue; if (!CHARSET_REGEXP.test(token[1])) continue; if (hasCharset || token[1].indexOf(CHARSET_TOKEN) == -1) { tokens.splice(i, 1); i--; l--; } else { hasCharset = true; tokens.splice(i, 1); tokens.unshift([Token.AT_RULE, token[1].replace(CHARSET_REGEXP, CHARSET_TOKEN)]); } } } function buildUnitRegexp(options) { var units = ['px', 'em', 'ex', 'cm', 'mm', 'in', 'pt', 'pc', '%']; var otherUnits = ['ch', 'rem', 'vh', 'vm', 'vmax', 'vmin', 'vw']; otherUnits.forEach(function (unit) { if (options.compatibility.units[unit]) { units.push(unit); } }); return new RegExp('(^|\\s|\\(|,)0(?:' + units.join('|') + ')(\\W|$)', 'g'); } function buildPrecisionOptions(roundingPrecision) { var precisionOptions = { matcher: null, units: {}, }; var optimizable = []; var unit; var value; for (unit in roundingPrecision) { value = roundingPrecision[unit]; if (value != DEFAULT_ROUNDING_PRECISION) { precisionOptions.units[unit] = {}; precisionOptions.units[unit].value = value; precisionOptions.units[unit].multiplier = Math.pow(10, value); optimizable.push(unit); } } if (optimizable.length > 0) { precisionOptions.enabled = true; precisionOptions.decimalPointMatcher = new RegExp('(\\d)\\.($|' + optimizable.join('|') + ')($|\\W)', 'g'); precisionOptions.zeroMatcher = new RegExp('(\\d*)(\\.\\d+)(' + optimizable.join('|') + ')', 'g'); } return precisionOptions; } function level1Optimize(tokens, context) { var options = context.options; var levelOptions = options.level[OptimizationLevel.One]; var ie7Hack = options.compatibility.selectors.ie7Hack; var adjacentSpace = options.compatibility.selectors.adjacentSpace; var spaceAfterClosingBrace = options.compatibility.properties.spaceAfterClosingBrace; var format = options.format; var mayHaveCharset = false; var afterRules = false; options.unitsRegexp = options.unitsRegexp || buildUnitRegexp(options); options.precision = options.precision || buildPrecisionOptions(levelOptions.roundingPrecision); options.commentsKept = options.commentsKept || 0; for (var i = 0, l = tokens.length; i < l; i++) { var token = tokens[i]; switch (token[0]) { case Token.AT_RULE: token[1] = isImport(token) && afterRules ? '' : token[1]; token[1] = levelOptions.tidyAtRules ? tidyAtRule(token[1]) : token[1]; mayHaveCharset = true; break; case Token.AT_RULE_BLOCK: optimizeBody(token[1], token[2], context); afterRules = true; break; case Token.NESTED_BLOCK: token[1] = levelOptions.tidyBlockScopes ? tidyBlock(token[1], spaceAfterClosingBrace) : token[1]; level1Optimize(token[2], context); afterRules = true; break; case Token.COMMENT: optimizeComment(token, options); break; case Token.RULE: token[1] = levelOptions.tidySelectors ? tidyRules(token[1], !ie7Hack, adjacentSpace, format, context.warnings) : token[1]; token[1] = token[1].length > 1 ? sortSelectors(token[1], levelOptions.selectorsSortingMethod) : token[1]; optimizeBody(token[1], token[2], context); afterRules = true; break; } if (token[0] == Token.COMMENT && token[1].length === 0 || levelOptions.removeEmpty && (token[1].length === 0 || (token[2] && token[2].length === 0))) { tokens.splice(i, 1); i--; l--; } } if (levelOptions.cleanupCharsets && mayHaveCharset) { cleanupCharsets(tokens); } return tokens; } module.exports = level1Optimize;
mit
Eelco81/server-test-project
Lib/AppUtils/Src/HoldingThreadTester.cpp
409
#include "gmock/gmock.h" #include "HoldingThread.h" namespace { class TestObject { public: MOCK_METHOD0 (Hold, void ()); }; } TEST (HoldingThreadTester, Hold) { TestObject object; EXPECT_CALL (object, Hold ()) .WillOnce (::testing::Return ()); APP::HoldingThread <TestObject> thread ("HoldingThread", &object, &TestObject::Hold); thread.Spawn (); }
mit
jakutis/httpinvoke
test/converters.js
850
var cfg = require('../dummyserver-config'); var httpinvoke = require('../httpinvoke-node'); describe('"converters" option', function() { 'use strict'; this.timeout(10000); it('basically works', function(done) { httpinvoke(cfg.sameOriginUrl + 'boolean', 'POST', { input: true, inputType: 'boolean', outputType: 'boolean', converters: { 'text boolean': Boolean, 'boolean text': String }, finished: function(err, output) { if(err) { return done(err); } if(output !== true) { return done(new Error('Expected output to be converted to a boolean value "true"')); } done(); } }); }); });
mit
ls1intum/ArTEMiS
src/test/javascript/spec/component/programming-exercise/programming-exercise-instruction-testcase-status.spec.ts
3671
import { DebugElement, SimpleChange } from '@angular/core'; import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { AceEditorModule } from 'ng2-ace-editor'; import * as chai from 'chai'; import { ParticipationType, ProgrammingExerciseInstructorStatusComponent } from 'app/entities/programming-exercise'; import { ArTEMiSTestModule } from '../../test.module'; import { TranslateModule } from '@ngx-translate/core'; import { By } from '@angular/platform-browser'; import { NgbModule } from '@ng-bootstrap/ng-bootstrap'; import { ProgrammingExerciseInstructionTestcaseStatusComponent } from '../../../../../main/webapp/app/entities/programming-exercise'; import { TaskCommand } from '../../../../../main/webapp/app/markdown-editor/domainCommands/programming-exercise/task.command'; const expect = chai.expect; describe('ProgrammingExerciseInstructionTestcaseStatusComponent', () => { let comp: ProgrammingExerciseInstructorStatusComponent; let fixture: ComponentFixture<ProgrammingExerciseInstructorStatusComponent>; let debugElement: DebugElement; const taskCommand = new TaskCommand(); const taskRegex = taskCommand.getTagRegex('g'); const exerciseTestCases = ['test1', 'test2', 'test6', 'test7']; const problemStatement = '1. [task][SortStrategy Interface](test1,test2) \n 2. [task][SortStrategy Interface](test3) \n lorem ipsum \n lorem \n 3. [task][SortStrategy Interface](test2,test4)'; const problemStatementTasks = ['[task][SortStrategy Interface](test1,test2)', '[task][SortStrategy Interface](test3)', '[task][SortStrategy Interface](test2,test4)']; const problemStatementTestCases = ['test1', 'test2', 'test3', 'test4']; const missingTestCases = ['test6', 'test7']; const invalidTestCases = ['test3', 'test4']; beforeEach(async(() => { TestBed.configureTestingModule({ imports: [TranslateModule.forRoot(), ArTEMiSTestModule, AceEditorModule, NgbModule], declarations: [ProgrammingExerciseInstructionTestcaseStatusComponent], }) .compileComponents() .then(() => { fixture = TestBed.createComponent(ProgrammingExerciseInstructionTestcaseStatusComponent); debugElement = fixture.debugElement; comp = fixture.componentInstance as ProgrammingExerciseInstructionTestcaseStatusComponent; }); })); it('should not render if no test cases were provided', () => { comp.problemStatement = problemStatement; comp.taskRegex = taskRegex; const changes = [{ problemStatement: new SimpleChange(undefined, problemStatement, true) }]; comp.ngOnChanges(changes); fixture.detectChanges(); expect(debugElement.nativeElement.innerHtml).to.be.undefined; expect(comp.missingTestCases).to.be.empty; expect(comp.invalidTestCases).to.be.empty; }); it('should render warnings on missing and invalid test cases', () => { comp.problemStatement = problemStatement; comp.taskRegex = taskRegex; comp.exerciseTestCases = exerciseTestCases; const changes = { problemStatement: new SimpleChange(undefined, problemStatement, false), exerciseTestCases: new SimpleChange(undefined, exerciseTestCases, false) }; comp.ngOnChanges(changes); fixture.detectChanges(); expect(debugElement.nativeElement.innerHtml).not.to.equal(''); expect(debugElement.query(By.css('fa-icon'))).to.exist; expect(comp.missingTestCases).to.be.deep.equal(missingTestCases); expect(comp.invalidTestCases).to.be.deep.equal(invalidTestCases); }); });
mit
bondcs/sistema-agp
src/Agp/AdminBundle/Entity/EntradaProduto.php
4143
<?php namespace Agp\AdminBundle\Entity; use Doctrine\ORM\Mapping as ORM; use Symfony\Component\Validator\Constraints as Assert; /** * Agp\AdminBundle\Entity\EntradaProduto * * @ORM\Table(name="entrada_produto") * @ORM\Entity(repositoryClass="Agp\AdminBundle\Repository\EntradaProdutoRepository") */ class EntradaProduto { /** * @var integer $codEntradaProduto * * @ORM\Column(name="cod_entrada_produto", type="integer", nullable=false) * @ORM\Id * @ORM\GeneratedValue(strategy="AUTO") */ protected $codEntradaProduto; /** * @var float $vlrCusto * * @ORM\Column(name="vlr_custo", type="float", nullable=true) * @Assert\NotBlank */ protected $vlrCusto; /** * @var float $qtde * * @ORM\Column(name="qtde", type="float", nullable=true) * @Assert\NotBlank */ protected $qtde; /** * @var \DateTime $dtEntrada * * @ORM\Column(name="dt_entrada", type="datetime", nullable=true) */ protected $dtEntrada; /** * * @ORM\ManyToOne(targetEntity="Produto", cascade={"persist", "merge"}) * @ORM\JoinColumn(name="cod_produto", referencedColumnName="cod_produto") */ protected $produto; /** * * @ORM\ManyToOne(targetEntity="Fornecedor", cascade={"persist", "merge"}) * @ORM\JoinColumn(name="cod_fornecedor", referencedColumnName="cod_fornecedor") * @Assert\NotBlank */ protected $fornecedor; public function __construct() { $this->dtEntrada = new \DateTime; } public function atualizaProdutoEstoque($qtdeOld){ $diff = $this->getQtde() - $qtdeOld; $this->getProduto()->atualizaQtde($diff); } public function getCustoTotal(){ return $this->vlrCusto * $this->qtde; } /** * Set codEntradaProduto * * @param integer $codEntradaProduto * @return EntradaProduto */ public function setCodEntradaProduto($codEntradaProduto) { $this->codEntradaProduto = $codEntradaProduto; return $this; } /** * Get codEntradaProduto * * @return integer */ public function getCodEntradaProduto() { return $this->codEntradaProduto; } /** * Set vlrCusto * * @param float $vlrCusto * @return EntradaProduto */ public function setVlrCusto($vlrCusto) { $this->vlrCusto = substr(str_replace(",", ".", $vlrCusto),3); return $this; } /** * Get vlrCusto * * @return float */ public function getVlrCusto() { return $this->vlrCusto; } /** * Set qtde * * @param int $qtde * @return EntradaProduto */ public function setQtde($qtde) { $this->qtde = $qtde; return $this; } /** * Get qtde * * @return int */ public function getQtde() { return $this->qtde; } /** * Set dtEntrada * * @param float $dtEntrada * @return EntradaProduto */ public function setDtEntrada($dtEntrada) { $this->dtEntrada = $dtEntrada; return $this; } /** * Get dtEntrada * * @return float */ public function getDtEntrada() { return $this->dtEntrada; } /** * Set fornecedor * * @param string $fornecedor * @return EntradaProduto */ public function setFornecedor($fornecedor) { $this->fornecedor = $fornecedor; return $this; } /** * Get fornecedor * * @return string */ public function getFornecedor() { return $this->fornecedor; } /** * Set produto * * @param string $produto * @return Produto */ public function setProduto($produto) { $this->produto = $produto; return $this; } /** * Get produto * * @return string */ public function getProduto() { return $this->produto; } }
mit
goodwinxp/Yorozuya
library/ATF/_attack_trap_inform_zoclRegister.hpp
715
// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually #pragma once #include <common/common.h> #include <_attack_trap_inform_zoclDetail.hpp> #include <common/ATFCore.hpp> START_ATF_NAMESPACE namespace Register { class _attack_trap_inform_zoclRegister : public IRegister { public: void Register() override { auto& hook_core = CATFCore::get_instance(); for (auto& r : Detail::_attack_trap_inform_zocl_functions) hook_core.reg_wrapper(r.pBind, r); } }; }; // end namespace Register END_ATF_NAMESPACE
mit
mind0n/hive
History/Samples/Website/PmSys/VisaExpress/checkcode.aspx.cs
3586
using System; using System.Data; using System.Configuration; using System.Collections; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using System.Drawing; public partial class checkcode : System.Web.UI.Page { private void Page_Load(object sender, System.EventArgs e) { this.CreateCheckCodeImage(GenerateCheckCode()); } #region Web 窗体设计器生成的代码 override protected void OnInit(EventArgs e) { /// <summary> /// CODEGEN: 该调用是 ASP.NET Web 窗体设计器所必需的。 /// </summary> InitializeComponent(); base.OnInit(e); } /// <summary> /// 设计器支持所需的方法 /// </summary> private void InitializeComponent() { this.Load += new System.EventHandler(this.Page_Load); } #endregion private string GenerateCheckCode() { int number; char code; string checkCode = String.Empty; System.Random random = new Random(); for (int i = 0; i < 5; i++) { number = random.Next(); if (number % 2 == 0) code = (char)('0' + (char)(number % 10)); else code = (char)('A' + (char)(number % 26)); checkCode += code.ToString(); } Response.Cookies.Add(new HttpCookie("CheckCode", checkCode)); return checkCode; } private void CreateCheckCodeImage(string checkCode) { if (checkCode == null || checkCode.Trim() == String.Empty) return; System.Drawing.Bitmap image = new System.Drawing.Bitmap((int)Math.Ceiling((checkCode.Length * 12.5)), 22); Graphics g = Graphics.FromImage(image); try { //生成随机生成器 Random random = new Random(); //清空图片背景色 g.Clear(Color.White); //画图片的背景噪音线 for (int i = 0; i < 25; i++) { int x1 = random.Next(image.Width); int x2 = random.Next(image.Width); int y1 = random.Next(image.Height); int y2 = random.Next(image.Height); g.DrawLine(new Pen(Color.Silver), x1, y1, x2, y2); } Font font = new System.Drawing.Font("Arial", 12, (System.Drawing.FontStyle.Bold | System.Drawing.FontStyle.Italic)); System.Drawing.Drawing2D.LinearGradientBrush brush = new System.Drawing.Drawing2D.LinearGradientBrush(new Rectangle(0, 0, image.Width, image.Height), Color.Blue, Color.DarkRed, 1.2f, true); g.DrawString(checkCode, font, brush, 2, 2); //画图片的前景噪音点 for (int i = 0; i < 100; i++) { int x = random.Next(image.Width); int y = random.Next(image.Height); image.SetPixel(x, y, Color.FromArgb(random.Next())); } //画图片的边框线 g.DrawRectangle(new Pen(Color.Silver), 0, 0, image.Width - 1, image.Height - 1); System.IO.MemoryStream ms = new System.IO.MemoryStream(); image.Save(ms, System.Drawing.Imaging.ImageFormat.Gif); Response.ClearContent(); Response.ContentType = "image/Gif"; Response.BinaryWrite(ms.ToArray()); } finally { g.Dispose(); image.Dispose(); } } }
mit
msamuel47/TP2-Prog-Web-1
template.php
960
<!doctype html> <html lang="en"> <head> <meta charset="UTF-8"> <title>Nous joindre</title> <link rel="stylesheet" href="CSS/styles.css"> <link href="https://fonts.googleapis.com/css?family=Open+Sans" rel="stylesheet"> </head> <body> <div id="container"> <div id="entete"> <img src="img/logo.png" alt="TheLogo"> <img src="img/banner.png" alt="TheBanner"> <div class="clear"></div> </div> <div id="menu"> <div class="linkitem"><a href="index.html">Acceuil</a></div> <div class="linkitem"><a href="Evenement.html">Nous joindre ...</a></div> <div class="linkitem"><a href=""></a></div> <div class="linkitem"><a href=""></a></div> <div class="clear"></div> </div> <div id="contenu"> <div id="gauche"> <p class="title">Pour nous joindre</p> </div> <div id="droite"> </div> <div id="droite2"> </div> <div class="clear"></div> </div> <div id="pied"> </div> </div> </body> </html>
mit
zefei/remonit
src/client/ngAttr.js
2013
// patched to be used as a regular module // 'use strict'; var name = 'ngAttr'; var selector = true; angular.module(name, []).directive(name, function() { var ATTR_MATCH = /\s*([^=]+)(=\s*(\S+))?/; return { restrict: 'A', link: function(scope, element, attr) { var oldVal; scope.$watch(attr[name], function(value) { ngAttrWatchAction(scope.$eval(attr[name])); }, true); attr.$observe(name, function() { ngAttrWatchAction(scope.$eval(attr[name])); }); function ngAttrWatchAction(newVal) { if (selector === true || scope.$index % 2 === selector) { if (oldVal && !angular.equals(newVal,oldVal)) { attrWorker(oldVal, removeAttr); } attrWorker(newVal, setAttr); } oldVal = angular.copy(newVal); } function splitAttr(value) { var m = ATTR_MATCH.exec(value); return m && [m[1].replace(/\s+$/, ''), m[3]]; } function setAttr(value) { if (value) { if (value[0] === 'undefined' || value[0] === 'null') { return; } element.attr(value[0], angular.isDefined(value[1]) ? value[1] : ''); } } function removeAttr(value) { if (value) { element.removeAttr(value[0]); } } function attrWorker(attrVal, action, compare) { if(angular.isString(attrVal)) { attrVal = attrVal.split(/\s+/); } if(angular.isArray(attrVal)) { forEach(attrVal, function(v) { v = splitAttr(v); action(v); }); } else if (angular.isObject(attrVal)) { var attrs = []; angular.forEach(attrVal, function(v, k) { k = splitAttr(k); if (v) { action(k); } }); } } } }; } );
mit
niho/libwave
doc/private/html/search/all_e.js
11077
var searchData= [ ['wave_5faudio_5froute_5fchange_5fcallback',['wave_audio_route_change_callback',['../host__ios_8h.html#abf4b66fafb54fa7fbce6caa0d715087f',1,'host_ios.h']]], ['wave_5faudio_5fsession_5finterruption_5fcallback',['wave_audio_session_interruption_callback',['../host__ios_8h.html#aa5c8a80aa712398f0882ee1b65fe192c',1,'host_ios.h']]], ['wave_5fcore_5faudio_5finput_5fcallback',['wave_core_audio_input_callback',['../host__ios_8h.html#a4551ae05e5c54313c2ba46633ab07b16',1,'host_ios.h']]], ['wave_5fcore_5faudio_5foutput_5fcallback',['wave_core_audio_output_callback',['../host__ios_8h.html#af6b511d0af185a3e57d105726e6f7826',1,'host_ios.h']]], ['wave_5fcreate_5fremote_5fio_5finstance',['wave_create_remote_io_instance',['../host__ios_8h.html#a5f3d2f3ee0683fbd00219fbdcad8b1e0',1,'host_ios.h']]], ['wave_5fdebug_5fprint_5faudio_5fsession_5finfo',['wave_debug_print_audio_session_info',['../host__ios_8h.html#ada41eb5cb27f9dc96b82cd4094b61f21',1,'host_ios.h']]], ['wave_5fdebug_5fprint_5fremote_5fio_5finfo',['wave_debug_print_remote_io_info',['../host__ios_8h.html#a5eca5bf86d71540a79895e4003628f02',1,'host_ios.h']]], ['wave_5fdeinit',['wave_deinit',['../wave__recorder_8h.html#a72e6f45626b1d70d0c5d181c7fd9e15b',1,'recorder.c']]], ['wave_5fensure_5fno_5faudio_5fsession_5ferror',['wave_ensure_no_audio_session_error',['../host__ios_8h.html#af9b52c9f8f472ea4d9f521f5d238844e',1,'host_ios.h']]], ['wave_5fensure_5fno_5faudio_5funit_5ferror',['wave_ensure_no_audio_unit_error',['../host__ios_8h.html#abd08d1d7632e21af7809fec134c40eef',1,'host_ios.h']]], ['wave_5ferror_5fcodes_2eh',['wave_error_codes.h',['../wave__error__codes_8h.html',1,'']]], ['wave_5ferror_5fstr',['wave_error_str',['../wave__error__codes_8h.html#ab9085a0aa982f903d481c9f22e3cf825',1,'error_codes.c']]], ['wave_5ffree',['WAVE_FREE',['../mem_8h.html#a47b0990520fa7b618897e0ecafb417f5',1,'mem.h']]], ['wave_5finit',['wave_init',['../wave__recorder_8h.html#a97b52c4e6e4920d18aec93bcd65e4ddb',1,'recorder.c']]], ['wave_5finit_5fand_5fstart_5fremote_5fio',['wave_init_and_start_remote_io',['../host__ios_8h.html#ab93450125752b0dae23c96be53621928',1,'host_ios.h']]], ['wave_5finit_5faudio_5fsession',['wave_init_audio_session',['../host__ios_8h.html#a21346e415b83d5540167989d0d28ff87',1,'host_ios.h']]], ['wave_5finput_5favailable_5fchange_5fcallback',['wave_input_available_change_callback',['../host__ios_8h.html#ac19d667c60593324d17238a771bff616',1,'host_ios.h']]], ['wave_5finstance_5fadd_5finput_5fanalyzer',['wave_instance_add_input_analyzer',['../instance_8h.html#a3fe21c6fd263f5898feb7c8570c7e292',1,'instance.c']]], ['wave_5finstance_5fenqueue_5fcontrol_5fevent',['wave_instance_enqueue_control_event',['../instance_8h.html#a7bc70cb21e1275f37e11b46f1d95cbe2',1,'instance.c']]], ['wave_5finstance_5fenqueue_5fcontrol_5fevent_5fof_5ftype',['wave_instance_enqueue_control_event_of_type',['../instance_8h.html#a16bcd56bc66e3ad1c39f1831182b035d',1,'instance.c']]], ['wave_5finstance_5fenqueue_5ferror',['wave_instance_enqueue_error',['../instance_8h.html#a9c687815575bdb2d77c5c1eaa900126b',1,'instance.c']]], ['wave_5finstance_5fenqueue_5fnotification',['wave_instance_enqueue_notification',['../instance_8h.html#a32d1e4bccee541e6b5ea1ba51b009884',1,'instance.c']]], ['wave_5finstance_5fenqueue_5fnotification_5fof_5ftype',['wave_instance_enqueue_notification_of_type',['../instance_8h.html#a5fe5802b908611ca27d83e1e01578482',1,'instance.c']]], ['wave_5finstance_5fhost_5fspecific_5fdeinit',['wave_instance_host_specific_deinit',['../instance_8h.html#af1a81dda2a4aeeafd3531d454d53456b',1,'host_portaudio.c']]], ['wave_5finstance_5fhost_5fspecific_5finit',['wave_instance_host_specific_init',['../instance_8h.html#aacbfdcf64472fb907631ca9ac4c9afc1',1,'host_portaudio.c']]], ['wave_5finstance_5fis_5fon_5fmain_5fthread',['wave_instance_is_on_main_thread',['../instance_8h.html#ad943ad0074c9ea7445962f2d88cd76a2',1,'instance.c']]], ['wave_5finstance_5fon_5faudio_5fthread_5fcontrol_5fevent',['wave_instance_on_audio_thread_control_event',['../instance_8h.html#aa3f848dfed040d5c3a58d552027b5030',1,'instance.c']]], ['wave_5finstance_5fon_5fmain_5fthread_5fnotification',['wave_instance_on_main_thread_notification',['../instance_8h.html#afba03fbce99882575699a68810ddd37a',1,'instance.c']]], ['wave_5flevel_5fadvisor_5fdeinit',['wave_level_advisor_deinit',['../level__advisor_8h.html#a2358b4ec331ab1780dd781af3b502aed',1,'level_advisor.c']]], ['wave_5flevel_5fadvisor_5fprocess_5fbuffer',['wave_level_advisor_process_buffer',['../level__advisor_8h.html#ae4aa58de4c778140790f862e287667e3',1,'level_advisor.c']]], ['wave_5flevel_5fmeter_5fdeinit',['wave_level_meter_deinit',['../level__meter_8h.html#abad3e6ae00c48e46571a929e20b9605d',1,'level_meter.c']]], ['wave_5flevel_5fmeter_5fprocess_5fbuffer',['wave_level_meter_process_buffer',['../level__meter_8h.html#ada325810a9654831b039d4b4da42637e',1,'level_meter.c']]], ['wave_5flock_5ffree_5ffifo_5fpop',['wave_lock_free_fifo_pop',['../lock__free__fifo_8h.html#a20de23599e870604db78abaeda5c3aa7',1,'lock_free_fifo.c']]], ['wave_5flock_5ffree_5ffifo_5fpush',['wave_lock_free_fifo_push',['../lock__free__fifo_8h.html#a390835ff2f3aae267a098a8ebf753754',1,'lock_free_fifo.c']]], ['wave_5fmalloc',['WAVE_MALLOC',['../mem_8h.html#a41a5411ed2ca777248ece4fcffd32847',1,'mem.h']]], ['wave_5fnotification_2eh',['wave_notification.h',['../wave__notification_8h.html',1,'']]], ['wave_5fnotification_5ftype_5fstr',['wave_notification_type_str',['../wave__notification_8h.html#a8d705c8fa79ed714296a7a0ae7db4318',1,'recorder.c']]], ['wave_5fpause_5frecording',['wave_pause_recording',['../wave__recorder_8h.html#a2c0ae68dcfd5cb0607e9aa8529a0275a',1,'recorder.c']]], ['wave_5frecorder_2eh',['wave_recorder.h',['../wave__recorder_8h.html',1,'']]], ['wave_5fresume',['wave_resume',['../host__ios_8h.html#ad874b2a5f991731ad7dcd1052af93900',1,'host_ios.h']]], ['wave_5fresume_5frecording',['wave_resume_recording',['../wave__recorder_8h.html#a856f40c1e5b2db2d546cd6052fd5db89',1,'recorder.c']]], ['wave_5fset_5fasbd',['wave_set_asbd',['../host__ios_8h.html#add20ece2d17581bddc36c43bc3c36b47',1,'host_ios.h']]], ['wave_5fsettings_2eh',['wave_settings.h',['../wave__settings_8h.html',1,'']]], ['wave_5fsettings_5finit',['wave_settings_init',['../wave__settings_8h.html#a127abd5756db30d74c6ba038dc66a62e',1,'settings.c']]], ['wave_5fstart_5frecording',['wave_start_recording',['../wave__recorder_8h.html#aa2cd71b447bc75c0580ffb2c7f7419d5',1,'recorder.c']]], ['wave_5fstop_5fand_5fdeinit_5fremote_5fio',['wave_stop_and_deinit_remote_io',['../host__ios_8h.html#ac1ba25327287effa4324cdbad628658b',1,'host_ios.h']]], ['wave_5fstop_5frecording',['wave_stop_recording',['../wave__recorder_8h.html#a25a821282b0fa77ac81b2ceecb7ec98b',1,'recorder.c']]], ['wave_5fupdate',['wave_update',['../wave__recorder_8h.html#a652fa70600a44314ffb44048ed3d3f40',1,'recorder.c']]], ['waveanalyzerslot',['WaveAnalyzerSlot',['../struct_wave_analyzer_slot.html',1,'WaveAnalyzerSlot'],['../instance_8h.html#ac02f3afec936747f3649fe566b81d111',1,'WaveAnalyzerSlot():&#160;instance.h']]], ['waveaudiothreadstate',['WaveAudioThreadState',['../instance_8h.html#ad31ed175aa8d9fecdf673386cd00147b',1,'instance.h']]], ['wavecontrolevent',['WaveControlEvent',['../struct_wave_control_event.html',1,'WaveControlEvent'],['../instance_8h.html#a512d9cb4b89baa78f9613a8ee7ed7c1e',1,'WaveControlEvent():&#160;instance.h']]], ['wavecontroleventtype',['WaveControlEventType',['../instance_8h.html#ac36261f769a2ab0eba4dd706242ed9ab',1,'WaveControlEventType():&#160;instance.h'],['../instance_8h.html#a89ff1efa1f3ece18916f7450ff06ea82',1,'WaveControlEventType():&#160;instance.h']]], ['wavedevinfo',['WaveDevInfo',['../struct_wave_dev_info.html',1,'WaveDevInfo'],['../wave__recorder_8h.html#aa9ce5dca94211d6de05a6c68d2597af1',1,'WaveDevInfo():&#160;wave_recorder.h']]], ['waveencoder',['WaveEncoder',['../struct_wave_encoder.html',1,'']]], ['waveencoderformat',['WaveEncoderFormat',['../wave__settings_8h.html#a31eebd72e5f711b2e8f8b59ef5beacf2',1,'WaveEncoderFormat():&#160;wave_settings.h'],['../wave__settings_8h.html#a2b7e9df5598a65c4278e84caf84fb43f',1,'WaveEncoderFormat():&#160;wave_settings.h']]], ['waveerror',['WaveError',['../wave__error__codes_8h.html#ad710357474f1fa4e71b7ffc5f7f0127a',1,'WaveError():&#160;wave_error_codes.h'],['../wave__error__codes_8h.html#a24ae5e9cf263b5b48097e45823cd7331',1,'WaveError():&#160;wave_error_codes.h']]], ['waveerrorcallback',['WaveErrorCallback',['../wave__recorder_8h.html#a1d4dca5224732e9d904e02f57f7f5acc',1,'wave_recorder.h']]], ['waveinstance',['WaveInstance',['../struct_wave_instance.html',1,'WaveInstance'],['../instance_8h.html#ad746c740cba7777d8def3d7014f53958',1,'WaveInstance():&#160;instance.h']]], ['waveiosaacencoder',['WaveiOSAACEncoder',['../struct_wavei_o_s_a_a_c_encoder.html',1,'']]], ['waveleveladvisor',['WaveLevelAdvisor',['../struct_wave_level_advisor.html',1,'']]], ['wavelevelmeter',['WaveLevelMeter',['../struct_wave_level_meter.html',1,'WaveLevelMeter'],['../level__meter_8h.html#a6fc2ced1b0116d750fe593d3b1263f04',1,'WaveLevelMeter():&#160;level_meter.h']]], ['wavelockfreefifo',['WaveLockFreeFIFO',['../struct_wave_lock_free_f_i_f_o.html',1,'WaveLockFreeFIFO'],['../lock__free__fifo_8h.html#aa3ff5a6baee99e6846635423f3b5506e',1,'WaveLockFreeFIFO():&#160;lock_free_fifo.h']]], ['wavenotification',['WaveNotification',['../struct_wave_notification.html',1,'WaveNotification'],['../wave__notification_8h.html#ab1317ce69b3aca51b906d05e86caf71f',1,'WaveNotification():&#160;wave_notification.h']]], ['wavenotificationcallback',['WaveNotificationCallback',['../wave__recorder_8h.html#ac230cce4819b72f8fbe55dbcaf283300',1,'wave_recorder.h']]], ['wavenotificationtype',['WaveNotificationType',['../wave__notification_8h.html#a21236b2104a9731c87f5b02703c45dbd',1,'WaveNotificationType():&#160;wave_notification.h'],['../wave__notification_8h.html#a1c159fc39ea1ebe5b28ee68f8c42ddc9',1,'WaveNotificationType():&#160;wave_notification.h']]], ['waveopusencoder',['WaveOpusEncoder',['../struct_wave_opus_encoder.html',1,'']]], ['waverawencoder',['WaveRawEncoder',['../struct_wave_raw_encoder.html',1,'WaveRawEncoder'],['../raw__encoder_8h.html#aa2f4c747aa4ed6bdd32435a0a5997f3f',1,'WaveRawEncoder():&#160;raw_encoder.h']]], ['waverealtimeinfo',['WaveRealtimeInfo',['../struct_wave_realtime_info.html',1,'WaveRealtimeInfo'],['../wave__recorder_8h.html#a9ffe997fb2f175f03c3b1c783d2ed5cf',1,'WaveRealtimeInfo():&#160;wave_recorder.h']]], ['waverecordedchunk',['WaveRecordedChunk',['../struct_wave_recorded_chunk.html',1,'']]], ['waverecordingsession',['WaveRecordingSession',['../struct_wave_recording_session.html',1,'']]], ['wavesettings',['WaveSettings',['../struct_wave_settings.html',1,'WaveSettings'],['../wave__settings_8h.html#ad48a702b16b6d7f59fb279b0d68b4553',1,'WaveSettings():&#160;wave_settings.h']]], ['wavestate',['WaveState',['../instance_8h.html#a71c46212fdeed080e778b0262cee6f6d',1,'instance.h']]] ];
mit
slnz/prymv
app/controllers/api/base.rb
244
# app/controllers/api/base.rb Dir['/app/representers/*.rb'].each { |file| require file } module API class Dispatch < Grape::API mount API::V1::Base end Base = Rack::Builder.new do use API::Logger run API::Dispatch end end
mit
Cobbleopolis/MonsterTruckBot
modules/common/app/common/api/bitTracking/BitTrackingMode.scala
418
package common.api.bitTracking import scala.collection.SortedMap object BitTrackingMode extends Enumeration { type BitTrackingMode = Value val COLLECTIVE: BitTrackingMode = Value("collective") val SINGLE_CHEER: BitTrackingMode = Value("singleCheer") val map: SortedMap[String, String] = SortedMap(values.map(p => p.id.toString -> s"dashboard.bitTracking.cheerModes.$p" ).toSeq: _*) }
mit
mandino/www.bloggingshakespeare.com
wp-content/plugins/advanced-access-manager/Application/Backend/tmpl/settings/core.php
1121
<?php /** @version 6.0.0 */ ?> <?php if (defined('AAM_KEY')) { ?> <div class="aam-feature settings" id="settings-core-content"> <table class="table table-striped table-bordered"> <tbody> <?php foreach($this->getList() as $id => $option) { ?> <tr> <td> <span class='aam-setting-title'><?php echo $option['title']; ?></span> <p class="aam-setting-description"> <?php echo $option['description']; ?> </p> </td> <td class="text-center"> <input data-toggle="toggle" name="<?php echo $id; ?>" id="utility-<?php echo $id; ?>" <?php echo ($option['value'] ? 'checked' : ''); ?> type="checkbox" data-on="<?php echo __('Enabled', AAM_KEY); ?>" data-off="<?php echo __('Disabled', AAM_KEY); ?>" data-size="small" /> </td> </tr> <?php } ?> </tbody> </table> </div> <?php }
mit
ecomfe/veui
packages/babel-plugin-veui/test/fixtures/rewrite/source.js
84
import config from "veui/managers/config"; import Icon from "veui/components/Icon";
mit
cminorframework/utilitybelt-wordpress
src/CminorFramework/UtilityBelt/Wordpress/Components/Models/PayloadModel.php
799
<?php namespace CminorFramework\UtilityBelt\Wordpress\Extendables\Components\Models; use CminorFramework\UtilityBelt\Contracts\Wordpress\Extendables\Components\Models\PayloadModel; use Elasticsearch\Common\Exceptions\InvalidArgumentException; class PayloadModel implements PayloadModel { protected $payload = []; public function getPayload($item_name){ if(!$item_name){ throw new InvalidArgumentException(__CLASS__.'::'.__FUNCTION__.'()'.__LINE__.' Invalid item name' ); } } public function setPayload($item_name, $value) { if(!$item_name){ throw new InvalidArgumentException(__CLASS__.'::'.__FUNCTION__.'()'.__LINE__.' Invalid item name' ); } $this->payload[$item_name] = $value; return $this; } }
mit
eriknelson/gam3
gam3/world.py
3520
# -*- test-case-name: gam3.test.test_world -*- r""" ____ _____ __ __ _ _ / ___| __ _ _ __ ___ |___ / \ \ / /__ _ __| | __| | | | _ / _` | '_ ` _ \ |_ \ \ \ /\ / / _ \| '__| |/ _` | | |_| | (_| | | | | | |___) | \ V V / (_) | | | | (_| | \____|\__,_|_| |_| |_|____/ \_/\_/ \___/|_| |_|\__,_| """ import random from twisted.application.service import Service from game.vector import Vector from game.player import Player from game.environment import SimulationTime from game.terrain import Terrain from epsilon.structlike import record TCP_SERVICE_NAME = 'tcp-service-name' GAM3_SERVICE_NAME = 'gam3-service-name' point = record('x y') class World(SimulationTime): """ All-encompassing model object for the state of a Gam3 game (until we get some distribution up ins). @ivar random: An object like L{random.Random}, used for entropic things. @ivar playerCreationRectangle: A two-tuple of points giving the southwestern (lower bounds on x and y axis) and northeastern (upper bounds on x and y axis) corners of a rectangle within which new players will be created. @ivar observers: A C{list} of objects notified about state changes of this object. @ivar players: A C{list} of L{Player}s in this world. @ivar terrain: A C{dict} mapping x, y coordinate tuples to a terrain type for that location. """ def __init__(self, random=random, playerCreationRectangle=None, granularity=1, platformClock=None): SimulationTime.__init__(self, granularity, platformClock) if playerCreationRectangle is None: playerCreationRectangle = point(1, 1), point(5, 5) self.random = random self.playerCreationRectangle = playerCreationRectangle self.observers = [] self.players = [] self.terrain = Terrain() def createPlayer(self): """ Make a new L{Player}. """ sw, ne = self.playerCreationRectangle x = self.random.randrange(sw.x, ne.x) y = 1.0 z = self.random.randrange(sw.y, ne.y) player = Player(Vector(x, y, z), 2, self.seconds) for observer in self.observers: observer.playerCreated(player) self.players.append(player) return player def removePlayer(self, player): """ Stop tracking the given L{Player} and notify observers via the C{playerRemoved} method. """ self.players.remove(player) for observer in self.observers: observer.playerRemoved(player) def addObserver(self, observer): """ Add the given object to the list of those notified about state changes in this world. """ self.observers.append(observer) def getPlayers(self): """ Return an iterator of all L{Player}s in this L{World}. """ return iter(self.players) class Gam3Service(Service): """ An L{IService<twisted.application.service.IService>} which starts and stops simulation time on a L{World}. @ivar world: The L{World} to start and stop. """ def __init__(self, world): self.world = world def startService(self): """ Start simulation time on the wrapped world. """ self.world.start() def stopService(self): """ Stop simulation time on the wrapped world. """ self.world.stop()
mit
Zhanat1987/yii2advanced
backend/modules/auth/controllers/AuthItemChildController.php
3567
<?php namespace backend\modules\auth\controllers; use Yii; use backend\modules\auth\models\AuthItemChild; use backend\modules\auth\models\search\AuthItemChildSearch; use common\components\MyController; use yii\web\NotFoundHttpException; use yii\filters\VerbFilter; /** * AuthItemChildController implements the CRUD actions for AuthItemChild model. */ class AuthItemChildController extends MyController { public function behaviors() { return [ 'verbs' => [ 'class' => VerbFilter::className(), 'actions' => [ 'delete' => ['post'], ], ], ]; } /** * Lists all AuthItemChild models. * @return mixed */ public function actionIndex() { $searchModel = new AuthItemChildSearch; $dataProvider = $searchModel->search(Yii::$app->request->getQueryParams()); return $this->render('index', [ 'dataProvider' => $dataProvider, 'searchModel' => $searchModel, ]); } /** * Displays a single AuthItemChild model. * @param string $parent * @param string $child * @return mixed */ public function actionView($parent, $child) { return $this->render('view', [ 'model' => $this->findModel($parent, $child), ]); } /** * Creates a new AuthItemChild model. * If creation is successful, the browser will be redirected to the 'view' page. * @return mixed */ public function actionCreate() { $model = new AuthItemChild; if ($model->load(Yii::$app->request->post()) && $model->save()) { return $this->redirect(['view', 'parent' => $model->parent, 'child' => $model->child]); } else { return $this->render('create', [ 'model' => $model, ]); } } /** * Updates an existing AuthItemChild model. * If update is successful, the browser will be redirected to the 'view' page. * @param string $parent * @param string $child * @return mixed */ public function actionUpdate($parent, $child) { $model = $this->findModel($parent, $child); if ($model->load(Yii::$app->request->post()) && $model->save()) { return $this->redirect(['view', 'parent' => $model->parent, 'child' => $model->child]); } else { return $this->render('update', [ 'model' => $model, ]); } } /** * Deletes an existing AuthItemChild model. * If deletion is successful, the browser will be redirected to the 'index' page. * @param string $parent * @param string $child * @return mixed */ public function actionDelete($parent, $child) { $this->findModel($parent, $child)->delete(); return $this->redirect(['index']); } /** * Finds the AuthItemChild model based on its primary key value. * If the model is not found, a 404 HTTP exception will be thrown. * @param string $parent * @param string $child * @return AuthItemChild the loaded model * @throws NotFoundHttpException if the model cannot be found */ protected function findModel($parent, $child) { if (($model = AuthItemChild::find(['parent' => $parent, 'child' => $child])) !== null) { return $model; } else { throw new NotFoundHttpException('The requested page does not exist.'); } } }
mit
HasakiUI/hsk-garen
test/workspace/builder-test/node_modules/_@hasaki-ui_hsk-lulu@1.0.7@@hasaki-ui/hsk-lulu/src/array.js
826
/** * Created by joyer on 17/10/9. */ const _ = require('lodash'); const ArrayUtl = {}; /** * 将值包装成一个数组,如果这个值原先就是数组的话,会原值返回。 * @param value * @return {*} */ ArrayUtl.wrapArray = (value) => { if(Array.isArray(value)) return value; return [value]; }; /** * 往数组左边填数组的最后一个值值,不会创建新的数组 * @param array * @param length */ ArrayUtl.paddingLeftLastValue = (array,length) => { if(!Array.isArray(array)) throw new TypeError("The first params should be a Array"); const arrayLength = array.length; if(arrayLength > length) return array; const lastValue = _.last(array); for(let i = arrayLength;i < length;i++) { array.push(lastValue); } return array; }; module.exports = ArrayUtl;
mit
jasancheg/electronApps
sailsAsRestAPI/app/client/main.js
525
'use strict'; var ipc = require('ipc'); var remote = require('remote'); var Menu = remote.require('menu'); var MenuItem = remote.require('menu-item'); // create a context menu var menu = new Menu(); menu.append(new MenuItem({ label: 'Toggle Dev Tools', click: function() { ipc.send('toggle-dev-tools'); } })); window.addEventListener('contextmenu', function (e) { e.preventDefault(); menu.popup(remote.getCurrentWindow()); }, false); // web contens loaded setTimeout(function () { ipc.send('ready'); }, 10);
mit
w-vision/omnipay-datatrans
src/Message/XmlSettlementCreditRequest.php
665
<?php /** * w-vision * * LICENSE * * This source file is subject to the MIT License * For the full copyright and license information, please view the LICENSE.md * file that are distributed with this source code. * * @copyright Copyright (c) 2016 Woche-Pass AG (http://www.w-vision.ch) * @license MIT License */ namespace Omnipay\Datatrans\Message; /** * Class XmlSettlementCreditRequest * * @package Omnipay\Datatrans\Message */ class XmlSettlementCreditRequest extends XmlSettlementRequest { /** * @return string */ public function getTransactionType() { return static::DATATRANS_TRANSACTION_TYPE_CREDIT; } }
mit
alawatthe/MathLib
src/Polynomial/prototype/times.ts
644
/** * Multiplies the polynomial by a number or an other polynomial * * @param {number|Polynomial} a The multiplicator * @return {Polynomial} */ times(a) : Polynomial { var i, ii, j, jj, product = []; if (a.type === 'polynomial') { for (i = 0, ii = this.deg; i <= ii; i++) { for (j = 0, jj = a.deg; j <= jj; j++) { product[i + j] = MathLib.plus((product[i + j] ? product[i + j] : 0), MathLib.times(this[i], a[j])); } } return new MathLib.Polynomial(product); } else if (a.type === 'rational') { a = a.coerceTo('number'); } // we we multiply it to every coefficient return this.map(b => MathLib.times(a, b)); }
mit
sshh12/StockMarketML
backtest/algoA.py
20865
# coding: utf-8 # In[1]: # Imports from datetime import datetime, timedelta import numpy as np import pickle import os import matplotlib.pyplot as plt from keras.optimizers import RMSprop from keras.models import Sequential, load_model, Model from keras.preprocessing.text import Tokenizer from keras.preprocessing.sequence import pad_sequences from keras.layers import Input, concatenate, SpatialDropout1D, GRU from keras.layers import Dense, Flatten, Embedding, LSTM, Activation, BatchNormalization, Dropout, Conv1D, MaxPooling1D from keras.callbacks import ReduceLROnPlateau, EarlyStopping, ModelCheckpoint, TensorBoard import keras.backend as K from keras.utils import plot_model from contextlib import contextmanager import sqlite3 @contextmanager def db(db_filename='stock.db'): conn = sqlite3.connect(os.path.join('..', 'data', db_filename), detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) cur = conn.cursor() yield conn, cur conn.close() # In[2]: # Options stocks = ['AMD', 'INTC'] all_sources = ['reddit', 'reuters', 'twitter', 'seekingalpha', 'fool', 'wsj', 'thestreet'] tick_window = 30 max_length = 50 vocab_size = None # Set by tokenizer emb_size = 300 model_type = 'regression' epochs = 250 batch_size = 128 test_cutoff = datetime(2018, 3, 1) # In[3]: def add_time(date, days): return (date + timedelta(days=days)).strftime('%Y-%m-%d') def clean(sentence): sentence = sentence.lower() sentence = sentence.replace('-', ' ').replace('_', ' ').replace('&', ' ') sentence = re.sub('\$?\d+%?\w?', 'numbertoken', sentence) sentence = ''.join(c for c in sentence if c in "abcdefghijklmnopqrstuvwxyz ") sentence = re.sub('\s+', ' ', sentence) return sentence.strip() def make_headline_to_effect_data(): """ Headline -> Effect Creates essentially the X, Y data for the embedding model to use when analyzing/encoding headlines. Returns a list of headlines and a list of corresponding 'effects' which represent a change in the stock price. """ meta, headlines, tick_hists, effects, test_indexes = [], [], [], [], [] with db() as (conn, cur): for stock in stocks: print("Fetching Stock..." + stock) ## Go through all the headlines ## cur.execute("SELECT date, source, rawcontent FROM headlines WHERE stock=?", [stock]) headline_query = cur.fetchall() for (date, source, content) in headline_query: if not content: continue content = clean(content) if not (5 <= content.count(' ') <= 40): continue event_date = datetime.strptime(date, '%Y-%m-%d') # The date of headline ## Find corresponding tick data ## cur.execute("""SELECT open, high, low, adjclose, volume FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date DESC""", [stock, add_time(event_date, -30 - tick_window), add_time(event_date, 0)]) before_headline_ticks = cur.fetchall()[:tick_window] if len(before_headline_ticks) != tick_window: continue cur.execute("""SELECT AVG(adjclose) FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date""", [stock, add_time(event_date, 1), add_time(event_date, 3)]) after_headline_ticks = cur.fetchall() cur.execute("SELECT adjclose FROM ticks WHERE stock=? AND date BETWEEN ? and ? ORDER BY date DESC LIMIT 50", [stock, add_time(event_date, -100 - tick_window), add_time(event_date, 0)]) fifty_day = np.array([x[0] for x in cur.fetchall()]) ## Create training example ## previous_tick = before_headline_ticks[0][3] result_tick = after_headline_ticks[0][0] if previous_tick and result_tick and len(after_headline_ticks) > 0: tick_hist = np.array(before_headline_ticks) tick_hist -= np.mean(fifty_day, axis=0) tick_hist /= np.std(fifty_day, axis=0) # Percent Diff (+Normalization Constant) effect = [(result_tick - previous_tick) / previous_tick / 0.023] if event_date > test_cutoff: # Mark as Test Example test_indexes.append(len(headlines)) meta.append((source, event_date.weekday())) headlines.append(content) tick_hists.append(tick_hist) effects.append(effect) return meta, headlines, np.array(tick_hists), np.array(effects), np.array(test_indexes) # In[4]: def encode_sentences(meta, sentences, tokenizer=None, max_length=100, vocab_size=100): """ Encoder Takes a list of headlines and converts them into vectors """ ## Encoding Sentences if not tokenizer: tokenizer = Tokenizer(num_words=vocab_size, filters='', lower=False) # Already Preprocessed tokenizer.fit_on_texts(sentences) encoded_headlines = tokenizer.texts_to_sequences(sentences) padded_headlines = pad_sequences(encoded_headlines, maxlen=max_length, padding='post') ## Encoding Meta Data # OneHot(Source [reddit/twitter/reuters etc..]) + OneHot(WeekDay) meta_matrix = np.zeros((len(sentences), len(all_sources) + 7)) index = 0 for (source, weekday) in meta: meta_matrix[index, all_sources.index(source)] = 1 meta_matrix[index, len(all_sources) + weekday] = 1 index += 1 return meta_matrix, padded_headlines, tokenizer # In[5]: def split_data(X, X2, X3, Y, test_indexes): """ Splits X/Y to Train/Test """ indexes = np.arange(X.shape[0]) np.random.shuffle(indexes) train_indexes = np.setdiff1d(indexes, test_indexes, assume_unique=True) trainX, testX = X[train_indexes], X[test_indexes] trainX2, testX2 = X2[train_indexes], X2[test_indexes] trainX3, testX3 = X3[train_indexes], X3[test_indexes] trainY, testY = Y[train_indexes], Y[test_indexes] return trainX, trainX2, trainX3, trainY, testX, testX2, testX3, testY # In[6]: def get_embedding_matrix(tokenizer, pretrained_file='glove.840B.300d.txt', purge=False): """Load Vectors from Glove File""" print("Loading WordVecs...") embedding_matrix = np.zeros((vocab_size + 1, emb_size)) if not pretrained_file: return embedding_matrix, None ## Load Glove File (Super Slow) ## glove_db = dict() with open(os.path.join('..', 'data', pretrained_file), 'r', encoding="utf-8") as glove: for line in glove: values = line.split(' ') word = values[0].replace('-', '').lower() coefs = np.asarray(values[1:], dtype='float32') glove_db[word] = coefs print('Loaded WordVectors...' + str(len(glove_db))) ## Set Embeddings ## for word, i in tokenizer.word_index.items(): embedding_vector = glove_db.get(word) if embedding_vector is not None: embedding_matrix[i] = embedding_vector elif purge: with db() as (conn, cur): cur.execute("SELECT 1 FROM dictionary WHERE word=? AND stock=?", [word, "none"]) if len(cur.fetchall()) == 0: print("Purge..." + word) cur.execute("DELETE FROM headlines WHERE content LIKE ?", ["%" + word + "%"]) conn.commit() return embedding_matrix, glove_db def correct_sign_acc(y_true, y_pred): """ Accuracy of Being Positive or Negative """ diff = K.equal(y_true > 0, y_pred > 0) return K.mean(diff, axis=-1) def get_model(emb_matrix): ## Headline ## headline_input = Input(shape=(max_length,), name="headlines") emb = Embedding(vocab_size + 1, emb_size, input_length=max_length, weights=[emb_matrix], trainable=True)(headline_input) emb = SpatialDropout1D(.2)(emb) text_conv = Conv1D(filters=128, kernel_size=3, padding='same', activation='selu')(emb) text_conv = MaxPooling1D(pool_size=2)(text_conv) text_conv = Dropout(0.3)(text_conv) text_rnn = LSTM(200, recurrent_dropout=0.3, return_sequences=False)(emb) text_rnn = Activation('selu')(text_rnn) text_rnn = BatchNormalization()(text_rnn) text_rnn = Dropout(0.5)(text_rnn) ## Ticks ## tick_input = Input(shape=(tick_window, 5), name="stockticks") tick_conv = Conv1D(filters=128, kernel_size=3, padding='same', activation='selu')(tick_input) tick_conv = MaxPooling1D(pool_size=2)(tick_conv) tick_conv = Dropout(0.3)(tick_conv) tick_conv = Conv1D(filters=128, kernel_size=3, padding='same', activation='selu')(tick_input) tick_conv = MaxPooling1D(pool_size=2)(tick_conv) tick_conv = Dropout(0.3)(tick_conv) tick_rnn = LSTM(200, dropout=0.3, recurrent_dropout=0.3, return_sequences=False)(tick_conv) tick_rnn = Activation('selu')(tick_rnn) tick_rnn = BatchNormalization()(tick_rnn) ## Meta ## meta_input = Input(shape=(len(all_sources) + 7,), name="metadata") ## Combined ## merged = concatenate([text_rnn, tick_rnn, meta_input]) final_dense = Dense(400)(merged) final_dense = Activation('selu')(final_dense) final_dense = BatchNormalization()(final_dense) final_dense = Dropout(0.5)(final_dense) final_dense = Dense(200)(merged) final_dense = Activation('selu')(final_dense) final_dense = BatchNormalization()(final_dense) final_dense = Dropout(0.5)(final_dense) final_dense = Dense(200)(merged) final_dense = Activation('selu')(final_dense) final_dense = BatchNormalization()(final_dense) final_dense = Dropout(0.5)(final_dense) pred_dense = Dense(1)(final_dense) out = pred_dense model = Model(inputs=[headline_input, tick_input, meta_input], outputs=out) model.compile(optimizer=RMSprop(lr=0.001), loss='mse', metrics=[correct_sign_acc]) return model # In[7]: if __name__ == "__main__": meta, headlines, tick_hists, effects, test_indexes = make_headline_to_effect_data() encoded_meta, encoded_headlines, toke = encode_sentences(meta, headlines, max_length=max_length, vocab_size=vocab_size) vocab_size = len(toke.word_counts) print("Found Words......" + str(vocab_size)) emb_matrix, glove_db = get_embedding_matrix(toke, purge=False) trainX, trainX2, trainX3, trainY, testX, testX2, testX3, testY = split_data(encoded_headlines, tick_hists, encoded_meta, effects, test_indexes) print(trainX.shape, trainX2.shape, trainX3.shape, testY.shape) # In[8]: # TRAIN MODEL if __name__ == "__main__": ## Save Tokenizer ## with open(os.path.join('..', 'models', 'toke-tick.pkl'), 'wb') as toke_file: pickle.dump(toke, toke_file, protocol=pickle.HIGHEST_PROTOCOL) ## Create Model ## model = get_model(emb_matrix) monitor_mode = 'correct_sign_acc' tensorboard = TensorBoard(log_dir="logs/{}".format(datetime.now().strftime("%Y,%m,%d-%H,%M,%S,tick," + model_type))) e_stopping = EarlyStopping(monitor='val_loss', patience=50) checkpoint = ModelCheckpoint(os.path.join('..', 'models', 'media-headlines-ticks-' + model_type + '.h5'), monitor=monitor_mode, verbose=0, save_best_only=True) plot_model(model, to_file='model.png', show_shapes=True) ## Train ## history = model.fit([trainX, trainX2, trainX3], trainY, epochs=epochs, batch_size=batch_size, validation_data=([testX, testX2, testX3], testY), verbose=0, callbacks=[e_stopping, checkpoint, tensorboard]) ## Display Train History ## plt.plot(np.log(history.history['loss'])) plt.plot(np.log(history.history['val_loss'])) plt.legend(['LogTrainLoss', 'LogTestLoss']) plt.show() plt.plot(history.history[monitor_mode]) plt.plot(history.history['val_' + monitor_mode]) plt.legend(['TrainAcc', 'TestAcc']) plt.show() # In[9]: # Predict (TEST) def predict(stock, model=None, toke=None, current_date=None, predict_date=None, look_back=None): import keras.metrics keras.metrics.correct_sign_acc = correct_sign_acc if not model or not toke: with open(os.path.join('..', 'models', 'toke-tick.pkl'), 'rb') as toke_file: toke = pickle.load(toke_file) model = load_model(os.path.join('..', 'models', 'media-headlines-ticks-' + model_type + '.h5')) vocab_size = len(toke.word_counts) if not current_date: current_date = datetime.today() if not predict_date: predict_date = current_date + timedelta(days=1) if not look_back: look_back = 3 pretick_date = add_time(current_date, -look_back) with db() as (conn, cur): ## Select Actual Stock Values ## cur.execute("""SELECT open, high, low, adjclose, volume FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date DESC""", [stock, add_time(current_date, -30 - tick_window), add_time(current_date, 0)]) before_headline_ticks = cur.fetchall()[:tick_window] actual_current = before_headline_ticks[0][3] cur.execute("""SELECT adjclose FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date ASC LIMIT 1""", [stock, add_time(predict_date, 1), add_time(predict_date, 5)]) after_headline_ticks = cur.fetchall() cur.execute("SELECT adjclose FROM ticks WHERE stock=? AND date BETWEEN ? and ? ORDER BY date DESC LIMIT 50", [stock, add_time(current_date, -100 - tick_window), add_time(current_date, 0)]) fifty_day = np.array([x[0] for x in cur.fetchall()]) tick_hist = np.array(before_headline_ticks) tick_hist -= np.mean(fifty_day, axis=0) tick_hist /= np.std(fifty_day, axis=0) ## Find Headlines ## cur.execute("SELECT date, source, content FROM headlines WHERE date BETWEEN ? AND ? AND stock=?", [pretick_date, current_date, stock]) headlines = cur.fetchall() ## Process ## meta, test_sents = [], [] for (date, source, content) in headlines: meta.append([source, datetime.strptime(date, '%Y-%m-%d').weekday()]) test_sents.append(content) encoded_meta, test_encoded, _ = encode_sentences(meta, test_sents, tokenizer=toke, max_length=max_length, vocab_size=vocab_size) tick_hists = np.array([tick_hist] * len(headlines)) predictions = model.predict([test_encoded, tick_hists, encoded_meta])[:, 0] prices = predictions * 0.023 * actual_current + actual_current return predictions, prices # In[10]: # [TEST] ROC if __name__ == "__main__": from sklearn.metrics import roc_auc_score try: actualY = testY predictY = model.predict([testX, testX2, testX3]) print("ROC", roc_auc_score((actualY > 0) * 2 - 1, predictY)) except NameError: print("Test Data and Model Required!") # In[11]: # [TEST] Spot Testing if __name__ == "__main__": ## **This Test May Overlap w/Train Data** ## ## Options ## stock = 'AMD' look_back = 3 current_date = '2018-03-22' predict_date = '2018-03-23' ## Run ## predictions, prices = predict(stock, current_date=datetime.strptime(current_date, '%Y-%m-%d'), predict_date=datetime.strptime(predict_date, '%Y-%m-%d'), look_back=look_back) ## Find Actual Value ## with db() as (conn, cur): cur.execute("""SELECT adjclose FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date ASC LIMIT 1""", [stock, add_time(datetime.strptime(predict_date, '%Y-%m-%d'), 0), add_time(datetime.strptime(predict_date, '%Y-%m-%d'), 6)]) after_headline_ticks = cur.fetchall() try: actual_result = after_headline_ticks[0][0] except: actual_result = -1 ## Display ## parse = lambda num: str(round(num, 2)) print("Predicting Change Coef: " + parse(np.mean(predictions))) print("Predicting Price: " + parse(np.mean(prices))) print("Actual Price: " + parse(actual_result)) # In[12]: # [TEST] Range Test if __name__ == "__main__": ## Load Model For Manual Testing ## import keras.metrics keras.metrics.correct_sign_acc = correct_sign_acc with open(os.path.join('..', 'models', 'toke-tick.pkl'), 'rb') as toke_file: toke = pickle.load(toke_file) model = load_model(os.path.join('..', 'models', 'media-headlines-ticks-' + model_type + '.h5')) ## **This Test May Overlap w/Train Data** ## ## Settings ## stock = 'AMD' start_date = '2017-02-25' end_date = '2018-02-25' ## Run ## with db() as (conn, cur): cur.execute("""SELECT date, adjclose FROM ticks WHERE stock=? AND date BETWEEN ? AND ? ORDER BY date ASC""", [stock, datetime.strptime(start_date, '%Y-%m-%d'), datetime.strptime(end_date, '%Y-%m-%d')]) real_ticks = cur.fetchall() dates = sorted([ date for date, _ in real_ticks]) real_ticks = { date: close for (date, close) in real_ticks } fake_ticks = { date: -1 for date in real_ticks } for date in dates: predict_date = datetime.strptime(date, '%Y-%m-%d') predictions, prices = predict(stock, model=model, toke=toke, current_date=predict_date + timedelta(days=-1), predict_date=predict_date, look_back=3) fake_ticks[date] = np.mean(prices) real_ticks = np.array([real_ticks[date] for date in dates]) fake_ticks = np.array([fake_ticks[date] for date in dates]) plt.plot(real_ticks) plt.plot(fake_ticks) plt.show() plt.plot(fake_ticks - real_ticks) plt.show() acc_image = np.array([np.sign(fake_ticks[1:] - fake_ticks[:-1]) == np.sign(real_ticks[1:] - real_ticks[:-1])]) * 1.0 acc_image = acc_image.reshape((25, 10)) plt.imshow(acc_image, interpolation='none', cmap='RdBu') plt.show() print("Acc: ", np.mean(acc_image))
mit
TeamSPoon/logicmoo_workspace
packs_web/swish/web/node_modules/monaco-editor/esm/vs/editor/contrib/suggest/suggestWidgetDetails.js
16317
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; var __param = (this && this.__param) || function (paramIndex, decorator) { return function (target, key) { decorator(target, key, paramIndex); } }; import * as nls from '../../../nls.js'; import { DisposableStore } from '../../../base/common/lifecycle.js'; import * as dom from '../../../base/browser/dom.js'; import { DomScrollableElement } from '../../../base/browser/ui/scrollbar/scrollableElement.js'; import { MarkdownRenderer } from '../../browser/core/markdownRenderer.js'; import { MarkdownString } from '../../../base/common/htmlContent.js'; import { Codicon } from '../../../base/common/codicons.js'; import { Emitter } from '../../../base/common/event.js'; import { ResizableHTMLElement } from './resizable.js'; import { IInstantiationService } from '../../../platform/instantiation/common/instantiation.js'; export function canExpandCompletionItem(item) { return !!item && Boolean(item.completion.documentation || item.completion.detail && item.completion.detail !== item.completion.label); } let SuggestDetailsWidget = class SuggestDetailsWidget { constructor(_editor, instaService) { this._editor = _editor; this._onDidClose = new Emitter(); this.onDidClose = this._onDidClose.event; this._onDidChangeContents = new Emitter(); this.onDidChangeContents = this._onDidChangeContents.event; this._disposables = new DisposableStore(); this._renderDisposeable = new DisposableStore(); this._borderWidth = 1; this._size = new dom.Dimension(330, 0); this.domNode = dom.$('.suggest-details'); this.domNode.classList.add('no-docs'); this._markdownRenderer = instaService.createInstance(MarkdownRenderer, { editor: _editor }); this._body = dom.$('.body'); this._scrollbar = new DomScrollableElement(this._body, {}); dom.append(this.domNode, this._scrollbar.getDomNode()); this._disposables.add(this._scrollbar); this._header = dom.append(this._body, dom.$('.header')); this._close = dom.append(this._header, dom.$('span' + Codicon.close.cssSelector)); this._close.title = nls.localize('details.close', "Close"); this._type = dom.append(this._header, dom.$('p.type')); this._docs = dom.append(this._body, dom.$('p.docs')); this._configureFont(); this._disposables.add(this._editor.onDidChangeConfiguration(e => { if (e.hasChanged(40 /* fontInfo */)) { this._configureFont(); } })); } dispose() { this._disposables.dispose(); this._renderDisposeable.dispose(); } _configureFont() { const options = this._editor.getOptions(); const fontInfo = options.get(40 /* fontInfo */); const fontFamily = fontInfo.fontFamily; const fontSize = options.get(105 /* suggestFontSize */) || fontInfo.fontSize; const lineHeight = options.get(106 /* suggestLineHeight */) || fontInfo.lineHeight; const fontWeight = fontInfo.fontWeight; const fontSizePx = `${fontSize}px`; const lineHeightPx = `${lineHeight}px`; this.domNode.style.fontSize = fontSizePx; this.domNode.style.lineHeight = lineHeightPx; this.domNode.style.fontWeight = fontWeight; this.domNode.style.fontFeatureSettings = fontInfo.fontFeatureSettings; this._type.style.fontFamily = fontFamily; this._close.style.height = lineHeightPx; this._close.style.width = lineHeightPx; } getLayoutInfo() { const lineHeight = this._editor.getOption(106 /* suggestLineHeight */) || this._editor.getOption(40 /* fontInfo */).lineHeight; const borderWidth = this._borderWidth; const borderHeight = borderWidth * 2; return { lineHeight, borderWidth, borderHeight, verticalPadding: 22, horizontalPadding: 14 }; } renderLoading() { this._type.textContent = nls.localize('loading', "Loading..."); this._docs.textContent = ''; this.domNode.classList.remove('no-docs', 'no-type'); this.layout(this.size.width, this.getLayoutInfo().lineHeight * 2); this._onDidChangeContents.fire(this); } renderItem(item, explainMode) { var _a, _b; this._renderDisposeable.clear(); let { detail, documentation } = item.completion; if (explainMode) { let md = ''; md += `score: ${item.score[0]}\n`; md += `prefix: ${(_a = item.word) !== null && _a !== void 0 ? _a : '(no prefix)'}\n`; md += `word: ${item.completion.filterText ? item.completion.filterText + ' (filterText)' : item.textLabel}\n`; md += `distance: ${item.distance} (localityBonus-setting)\n`; md += `index: ${item.idx}, based on ${item.completion.sortText && `sortText: "${item.completion.sortText}"` || 'label'}\n`; md += `commit_chars: ${(_b = item.completion.commitCharacters) === null || _b === void 0 ? void 0 : _b.join('')}\n`; documentation = new MarkdownString().appendCodeblock('empty', md); detail = `Provider: ${item.provider._debugDisplayName}`; } if (!explainMode && !canExpandCompletionItem(item)) { this.clearContents(); return; } this.domNode.classList.remove('no-docs', 'no-type'); // --- details if (detail) { const cappedDetail = detail.length > 100000 ? `${detail.substr(0, 100000)}…` : detail; this._type.textContent = cappedDetail; this._type.title = cappedDetail; dom.show(this._type); this._type.classList.toggle('auto-wrap', !/\r?\n^\s+/gmi.test(cappedDetail)); } else { dom.clearNode(this._type); this._type.title = ''; dom.hide(this._type); this.domNode.classList.add('no-type'); } // --- documentation dom.clearNode(this._docs); if (typeof documentation === 'string') { this._docs.classList.remove('markdown-docs'); this._docs.textContent = documentation; } else if (documentation) { this._docs.classList.add('markdown-docs'); dom.clearNode(this._docs); const renderedContents = this._markdownRenderer.render(documentation); this._docs.appendChild(renderedContents.element); this._renderDisposeable.add(renderedContents); this._renderDisposeable.add(this._markdownRenderer.onDidRenderAsync(() => { this.layout(this._size.width, this._type.clientHeight + this._docs.clientHeight); this._onDidChangeContents.fire(this); })); } this.domNode.style.userSelect = 'text'; this.domNode.tabIndex = -1; this._close.onmousedown = e => { e.preventDefault(); e.stopPropagation(); }; this._close.onclick = e => { e.preventDefault(); e.stopPropagation(); this._onDidClose.fire(); }; this._body.scrollTop = 0; this.layout(this._size.width, this._type.clientHeight + this._docs.clientHeight); this._onDidChangeContents.fire(this); } clearContents() { this.domNode.classList.add('no-docs'); this._type.textContent = ''; this._docs.textContent = ''; } get size() { return this._size; } layout(width, height) { const newSize = new dom.Dimension(width, height); if (!dom.Dimension.equals(newSize, this._size)) { this._size = newSize; dom.size(this.domNode, width, height); } this._scrollbar.scanDomNode(); } scrollDown(much = 8) { this._body.scrollTop += much; } scrollUp(much = 8) { this._body.scrollTop -= much; } scrollTop() { this._body.scrollTop = 0; } scrollBottom() { this._body.scrollTop = this._body.scrollHeight; } pageDown() { this.scrollDown(80); } pageUp() { this.scrollUp(80); } set borderWidth(width) { this._borderWidth = width; } get borderWidth() { return this._borderWidth; } }; SuggestDetailsWidget = __decorate([ __param(1, IInstantiationService) ], SuggestDetailsWidget); export { SuggestDetailsWidget }; export class SuggestDetailsOverlay { constructor(widget, _editor) { this.widget = widget; this._editor = _editor; this._disposables = new DisposableStore(); this._added = false; this._resizable = new ResizableHTMLElement(); this._resizable.domNode.classList.add('suggest-details-container'); this._resizable.domNode.appendChild(widget.domNode); this._resizable.enableSashes(false, true, true, false); let topLeftNow; let sizeNow; let deltaTop = 0; let deltaLeft = 0; this._disposables.add(this._resizable.onDidWillResize(() => { topLeftNow = this._topLeft; sizeNow = this._resizable.size; })); this._disposables.add(this._resizable.onDidResize(e => { if (topLeftNow && sizeNow) { this.widget.layout(e.dimension.width, e.dimension.height); let updateTopLeft = false; if (e.west) { deltaLeft = sizeNow.width - e.dimension.width; updateTopLeft = true; } if (e.north) { deltaTop = sizeNow.height - e.dimension.height; updateTopLeft = true; } if (updateTopLeft) { this._applyTopLeft({ top: topLeftNow.top + deltaTop, left: topLeftNow.left + deltaLeft, }); } } if (e.done) { topLeftNow = undefined; sizeNow = undefined; deltaTop = 0; deltaLeft = 0; this._userSize = e.dimension; } })); this._disposables.add(this.widget.onDidChangeContents(() => { var _a; if (this._anchorBox) { this._placeAtAnchor(this._anchorBox, (_a = this._userSize) !== null && _a !== void 0 ? _a : this.widget.size); } })); } dispose() { this._resizable.dispose(); this._disposables.dispose(); this.hide(); } getId() { return 'suggest.details'; } getDomNode() { return this._resizable.domNode; } getPosition() { return null; } show() { if (!this._added) { this._editor.addOverlayWidget(this); this.getDomNode().style.position = 'fixed'; this._added = true; } } hide(sessionEnded = false) { this._resizable.clearSashHoverState(); if (this._added) { this._editor.removeOverlayWidget(this); this._added = false; this._anchorBox = undefined; this._topLeft = undefined; } if (sessionEnded) { this._userSize = undefined; this.widget.clearContents(); } } placeAtAnchor(anchor) { var _a; const anchorBox = dom.getDomNodePagePosition(anchor); this._anchorBox = anchorBox; this._placeAtAnchor(this._anchorBox, (_a = this._userSize) !== null && _a !== void 0 ? _a : this.widget.size); } _placeAtAnchor(anchorBox, size) { const bodyBox = dom.getClientArea(document.body); const info = this.widget.getLayoutInfo(); let maxSizeTop; let maxSizeBottom; let minSize = new dom.Dimension(220, 2 * info.lineHeight); let left = 0; let top = anchorBox.top; let bottom = anchorBox.top + anchorBox.height - info.borderHeight; let alignAtTop; let alignEast; // position: EAST, west, south let width = bodyBox.width - (anchorBox.left + anchorBox.width + info.borderWidth + info.horizontalPadding); left = -info.borderWidth + anchorBox.left + anchorBox.width; alignEast = true; maxSizeTop = new dom.Dimension(width, bodyBox.height - anchorBox.top - info.borderHeight - info.verticalPadding); maxSizeBottom = maxSizeTop.with(undefined, anchorBox.top + anchorBox.height - info.borderHeight - info.verticalPadding); // find a better place if the widget is wider than there is space available if (size.width > width) { // position: east, WEST, south if (anchorBox.left > width) { // pos = SuggestDetailsPosition.West; width = anchorBox.left - info.borderWidth - info.horizontalPadding; alignEast = false; left = Math.max(info.horizontalPadding, anchorBox.left - size.width - info.borderWidth); maxSizeTop = maxSizeTop.with(width); maxSizeBottom = maxSizeTop.with(undefined, maxSizeBottom.height); } // position: east, west, SOUTH if (anchorBox.width > width * 1.3 && bodyBox.height - (anchorBox.top + anchorBox.height) > anchorBox.height) { width = anchorBox.width; left = anchorBox.left; top = -info.borderWidth + anchorBox.top + anchorBox.height; maxSizeTop = new dom.Dimension(anchorBox.width - info.borderHeight, bodyBox.height - anchorBox.top - anchorBox.height - info.verticalPadding); maxSizeBottom = maxSizeTop.with(undefined, anchorBox.top - info.verticalPadding); minSize = minSize.with(maxSizeTop.width); } } // top/bottom placement let height = size.height; let maxHeight = Math.max(maxSizeTop.height, maxSizeBottom.height); if (height > maxHeight) { height = maxHeight; } let maxSize; if (height <= maxSizeTop.height) { alignAtTop = true; maxSize = maxSizeTop; } else { alignAtTop = false; maxSize = maxSizeBottom; } this._applyTopLeft({ left, top: alignAtTop ? top : bottom - height }); this.getDomNode().style.position = 'fixed'; this._resizable.enableSashes(!alignAtTop, alignEast, alignAtTop, !alignEast); this._resizable.minSize = minSize; this._resizable.maxSize = maxSize; this._resizable.layout(height, Math.min(maxSize.width, size.width)); this.widget.layout(this._resizable.size.width, this._resizable.size.height); } _applyTopLeft(topLeft) { this._topLeft = topLeft; this.getDomNode().style.left = `${this._topLeft.left}px`; this.getDomNode().style.top = `${this._topLeft.top}px`; } }
mit
marcaube/phprest
tests/ErrorHandler/Formatter/JsonXmlTest.php
2119
<?php namespace Phprest\ErrorHandler\Formatter; use Phprest\Application; use Phprest\Config; use Doctrine\Common\Annotations\AnnotationRegistry; use Phprest\Exception\BadRequest; use Symfony\Component\HttpFoundation\Request; class JsonXmlTest extends \PHPUnit_Framework_TestCase { /** * @var Config */ protected $config; public function setUp() { $this->config = new Config('phprest', 1, true); $this->setContainerElements($this->config); } public function testFormatWithSimpleException() { $jsonXmlFormatter = new JsonXml($this->config); $this->assertContains( '"code":9,"message":"test","details":[]', $jsonXmlFormatter->format(new \LogicException('test', 9)) ); } public function testFormatWithDetailedException() { $jsonXmlFormatter = new JsonXml($this->config); $this->assertContains( '"code":11,"message":"Bad Request","details":[1,2,3,["a","b"]]', $jsonXmlFormatter->format(new BadRequest(11, [1,2,3,['a','b']])) ); } public function testFormatWithNotAcceptable() { $request = Request::createFromGlobals(); $request->headers->set('Accept', 'yaml'); $jsonXmlFormatter = new JsonXml($this->config, $request); $this->assertContains( '"code":0,"message":"Not Acceptable","details":["yaml is not supported"]', $jsonXmlFormatter->format(new \Exception()) ); } /** * @param Config $config */ protected function setContainerElements(Config $config) { AnnotationRegistry::registerLoader('class_exists'); $config->getHateoasService()->register( $config->getContainer(), $config->getHateoasConfig() ); $config->getContainer()->add(Application::CONTAINER_ID_VENDOR, $config->getVendor()); $config->getContainer()->add(Application::CONTAINER_ID_API_VERSION, $config->getApiVersion()); $config->getContainer()->add(Application::CONTAINER_ID_DEBUG, $config->isDebug()); } }
mit
willks/react-native-eximage
ExImage.ios.js
8345
'use strict'; var EdgeInsetsPropType = require('../react-native/Libraries/StyleSheet/EdgeInsetsPropType'); var ImageResizeMode = require('../react-native/Libraries/Image/ImageResizeMode'); var ImageStylePropTypes = require('../react-native/Libraries/Image/ImageStylePropTypes'); var NativeMethodsMixin = require('../react-native/Libraries/ReactIOS/NativeMethodsMixin'); var PropTypes = require('../react/lib/ReactPropTypes'); var React = require('../react'); var ReactNative = require('../react-native'); var ReactNativeViewAttributes = require('../react-native/Libraries/Components/View/ReactNativeViewAttributes'); var StyleSheet = require('../react-native/Libraries/StyleSheet/StyleSheet'); var StyleSheetPropType = require('../react-native/Libraries/StyleSheet/StyleSheetPropType'); var flattenStyle = require('../react-native/Libraries/StyleSheet/flattenStyle'); var requireNativeComponent = require('../react-native/Libraries/ReactIOS/requireNativeComponent'); var resolveAssetSource = require('../react-native/Libraries/Image/resolveAssetSource'); const _ = require('lodash'); // var ImageResizeMode = require('ImageResizeMode'); // var EdgeInsetsPropType = require('EdgeInsetsPropType'); // var ImageStylePropTypes = require('ImageStylePropTypes'); // var NativeMethodsMixin = require('NativeMethodsMixin'); // var NativeModules = require('NativeModules'); // var PropTypes = require('ReactPropTypes'); // var React = require('React'); // var ReactNativeViewAttributes = require('ReactNativeViewAttributes'); // var StyleSheet = require('StyleSheet'); // var StyleSheetPropType = require('StyleSheetPropType'); // // var flattenStyle = require('flattenStyle'); var invariant = require('invariant'); var merge = require('merge'); // var requireNativeComponent = require('requireNativeComponent'); // var resolveAssetSource = require('resolveAssetSource'); var verifyPropTypes = require('../react-native/Libraries/ReactIOS/verifyPropTypes'); var warning = require('warning'); var ExImage = React.createClass({ propTypes: { /** * `uri` is a string representing the resource identifier for the image, which * could be an http address, a local file path, or the name of a static image * resource (which should be wrapped in the `require('image!name')` function). */ source: PropTypes.shape({ uri: PropTypes.string, }), /** * A static image to display while downloading the final image off the * network. */ defaultSource: PropTypes.shape({ uri: PropTypes.string, }), /** * Whether this element should be revealed as an accessible element. */ accessible: PropTypes.bool, /** * Custom string to display for accessibility. */ accessibilityLabel: PropTypes.string, /** * When the image is resized, the corners of the size specified * by capInsets will stay a fixed size, but the center content and borders * of the image will be stretched. This is useful for creating resizable * rounded buttons, shadows, and other resizable assets. More info on * [Apple documentation](https://developer.apple.com/library/ios/documentation/UIKit/Reference/UIImage_Class/index.html#//apple_ref/occ/instm/UIImage/resizableImageWithCapInsets) */ capInsets: EdgeInsetsPropType, /** * Determines how to resize the image when the frame doesn't match the raw * image dimensions. */ resizeMode: PropTypes.oneOf(['cover', 'contain', 'stretch']), style: StyleSheetPropType(ImageStylePropTypes), /** * A unique identifier for this element to be used in UI Automation * testing scripts. */ testID: PropTypes.string, /** * Invoked on mount and layout changes with * * {nativeEvent: { layout: {x, y, width, height}}}. */ onLayout: PropTypes.func, /** * Invoked on load start */ onLoadStart: PropTypes.func, /** * Invoked on download progress with * * {nativeEvent: { written, total}}. */ onLoadProgress: PropTypes.func, /** * Invoked on load abort */ onLoadAbort: PropTypes.func, /** * Invoked on load error * * {nativeEvent: { error}}. */ onLoadError: PropTypes.func, /** * Invoked on load end */ onLoaded: PropTypes.func, /** * Progress Indicator background color */ loadingBackgroundColor: PropTypes.string, /** * Progress Indicator foreground color */ loadingForegroundColor: PropTypes.string, /** * Whether Progress Indicator should be display */ progressIndicate: PropTypes.bool, /** * Whether cache StaticImage thumbnail */ cacheThumbnail: PropTypes.bool, }, getDefaultProps: function() { return { loadingBackgroundColor: '#E3E3E3', loadingForegroundColor: '#F53341', }; }, statics: { resizeMode: ImageResizeMode, }, mixins: [NativeMethodsMixin], /** * `NativeMethodsMixin` will look for this when invoking `setNativeProps`. We * make `this` look like an actual native component class. */ viewConfig: { uiViewClassName: 'UIView', validAttributes: ReactNativeViewAttributes.UIView }, render: function() { for (var prop in nativeOnlyProps) { if (this.props[prop] !== undefined) { console.warn('Prop `' + prop + ' = ' + this.props[prop] + '` should ' + 'not be set directly on Image.'); } } var source = resolveAssetSource(this.props.source) || {}; var {width, height} = source; var style = flattenStyle([{width, height}, styles.base, this.props.style]); invariant(style, 'style must be initialized'); var isNetwork = source.uri && source.uri.match(/^https?:/); invariant( !(isNetwork && source.isStatic), 'static image uris cannot start with "http": "' + source.uri + '"' ); var isStored = !source.isStatic && !isNetwork; var RawImage = isNetwork ? RCTExNetworkImage : RCTExStaticImage; if (this.props.style && this.props.style.tintColor) { warning(RawImage === RCTExStaticImage, 'tintColor style only supported on static images.'); } var resizeMode = this.props.resizeMode || style.resizeMode || 'cover'; var clonedProps = _.clone(this.props); var nativeProps = merge(clonedProps, { style, tintColor: style.tintColor, resizeMode: resizeMode, }); if (nativeProps.cacheThumbnail === undefined) { nativeProps.cacheThumbnail = false; } if (isStored) { nativeProps.imageInfo = { imageTag: source.uri, prezSize: { width: style.width || 0, height: style.height || 0, }, cacheThumbnail: nativeProps.cacheThumbnail, } } else { nativeProps.src = source.uri; } if (this.props.defaultSource) { nativeProps.defaultImageSrc = this.props.defaultSource.uri; } nativeProps.onExLoadStart = nativeProps.onLoadStart; nativeProps.onExLoadProgress = nativeProps.onLoadProgress; nativeProps.onExLoadError = nativeProps.onLoadError; nativeProps.onExLoaded = nativeProps.onLoaded; delete nativeProps.onLoadStart; delete nativeProps.onLoadProgress; delete nativeProps.onLoadError; delete nativeProps.onLoaded; return <RawImage {...nativeProps} />; } }); var styles = StyleSheet.create({ base: { overflow: 'hidden', backgroundColor: '#EFEFEF', }, }); var RCTExNetworkImage = requireNativeComponent('RCTExNetworkImage', null); var RCTExStaticImage = requireNativeComponent('RCTExStaticImage', null); var nativeOnlyProps = { src: true, defaultImageSrc: true, imageTag: true, contentMode: true, imageInfo: true, }; if (__DEV__) { verifyPropTypes(ExImage, RCTExStaticImage.viewConfig, nativeOnlyProps); verifyPropTypes(ExImage, RCTExNetworkImage.viewConfig, nativeOnlyProps); } ExImage.calculateCacheSize = function(callback) { NativeModules.ExNetworkImageManager.calculateCacheSize(callback); } ExImage.clearCache = function(callback) { NativeModules.ExNetworkImageManager.clearCache(callback); } ExImage.clearThumbnailCache = function(callback) { NativeModules.RCTExStaticImageManager.clearThumbnailCache(callback); } module.exports = ExImage;
mit
qrux/phputils
inc/phpseclib/Crypt/DSA/Formats/Keys/XML.php
4985
<?php /** * XML Formatted DSA Key Handler * * While XKMS defines a private key format for RSA it does not do so for DSA. Quoting that standard: * * "[XKMS] does not specify private key parameters for the DSA signature algorithm since the algorithm only * supports signature modes and so the application of server generated keys and key recovery is of limited * value" * * PHP version 5 * * @category Crypt * @package DSA * @author Jim Wigginton <terrafrost@php.net> * @copyright 2015 Jim Wigginton * @license http://www.opensource.org/licenses/mit-license.html MIT License * @link http://phpseclib.sourceforge.net */ namespace phpseclib\Crypt\DSA\Formats\Keys; use ParagonIE\ConstantTime\Base64; use phpseclib\Math\BigInteger; /** * XML Formatted DSA Key Handler * * @package DSA * @author Jim Wigginton <terrafrost@php.net> * @access public */ abstract class XML { /** * Break a public or private key down into its constituent components * * @access public * @param string $key * @param string $password optional * @return array */ public static function load($key, $password = '') { if (!is_string($key)) { throw new \UnexpectedValueException('Key should be a string - not a ' . gettype($key)); } $use_errors = libxml_use_internal_errors(true); $dom = new \DOMDocument(); if (substr($key, 0, 5) != '<?xml') { $key = '<xml>' . $key . '</xml>'; } if (!$dom->loadXML($key)) { throw new \UnexpectedValueException('Key does not appear to contain XML'); } $xpath = new \DOMXPath($dom); $keys = ['p', 'q', 'g', 'y', 'j', 'seed', 'pgencounter']; foreach ($keys as $key) { // $dom->getElementsByTagName($key) is case-sensitive $temp = $xpath->query("//*[translate(local-name(), 'ABCDEFGHIJKLMNOPQRSTUVWXYZ','abcdefghijklmnopqrstuvwxyz')='$key']"); if (!$temp->length) { continue; } $value = new BigInteger(Base64::decode($temp->item(0)->nodeValue), 256); switch ($key) { case 'p': // a prime modulus meeting the [DSS] requirements // Parameters P, Q, and G can be public and common to a group of users. They might be known // from application context. As such, they are optional but P and Q must either both appear // or both be absent $components['p'] = $value; break; case 'q': // an integer in the range 2**159 < Q < 2**160 which is a prime divisor of P-1 $components['q'] = $value; break; case 'g': // an integer with certain properties with respect to P and Q $components['g'] = $value; break; case 'y': // G**X mod P (where X is part of the private key and not made public) $components['y'] = $value; // the remaining options do not do anything case 'j': // (P - 1) / Q // Parameter J is available for inclusion solely for efficiency as it is calculatable from // P and Q case 'seed': // a DSA prime generation seed // Parameters seed and pgenCounter are used in the DSA prime number generation algorithm // specified in [DSS]. As such, they are optional but must either both be present or both // be absent case 'pgencounter': // a DSA prime generation counter } } libxml_use_internal_errors($use_errors); if (!isset($components['y'])) { throw new \UnexpectedValueException('Key is missing y component'); } switch (true) { case !isset($components['p']): case !isset($components['q']): case !isset($components['g']): return ['y' => $components['y']]; } return $components; } /** * Convert a public key to the appropriate format * * See https://www.w3.org/TR/xmldsig-core/#sec-DSAKeyValue * * @access public * @param \phpseclib\Math\BigInteger $p * @param \phpseclib\Math\BigInteger $q * @param \phpseclib\Math\BigInteger $g * @param \phpseclib\Math\BigInteger $y * @return string */ public static function savePublicKey(BigInteger $p, BigInteger $q, BigInteger $g, BigInteger $y) { return "<DSAKeyValue>\r\n" . ' <P>' . Base64::encode($p->toBytes()) . "</P>\r\n" . ' <Q>' . Base64::encode($q->toBytes()) . "</Q>\r\n" . ' <G>' . Base64::encode($g->toBytes()) . "</G>\r\n" . ' <Y>' . Base64::encode($y->toBytes()) . "</Y>\r\n" . '</DSAKeyValue>'; } }
mit
edusegzy/walmart_open
spec/walmart_open/config_spec.rb
877
require "spec_helper" require "walmart_open/config" require "walmart_open/client" require "walmart_open/errors" describe WalmartOpen::Config do context ".new" do it "sets default configs" do config = WalmartOpen::Config.new expect(config.debug).to be(false) expect(config.product_domain).to eq("walmartlabs.api.mashery.com") expect(config.product_version).to eq("v1") expect(config.product_calls_per_second).to be(5) end it "allows setting configs" do config = WalmartOpen::Config.new({debug: true, product_domain: "test", product_version: "test", product_calls_per_second: 1, }) expect(config.debug).to be(true) expect(config.product_domain).to eq("test") expect(config.product_version).to eq("test") expect(config.product_calls_per_second).to be(1) end end end
mit
Elusivehawk/HawkUtils
src/java/com/elusivehawk/util/storage/Bitmask.java
518
package com.elusivehawk.util.storage; /** * * * * @author Elusivehawk */ public class Bitmask { private final long mask; private final int offset; @SuppressWarnings("unqualified-field-access") public Bitmask(long bitmask, int off) { assert off >= 0; mask = bitmask << off; offset = off; } public long getValue(long n) { return (n & this.mask) >> this.offset; } public long setValue(long n, long value) { return (n & ~this.mask) | ((value << this.offset) & this.mask); } }
mit
xlfishbone/Nancy-Marionette-Seed
NancyBackboneSeed/Scripts/app/app.js
406
var Marionette = require('backbone.marionette'), Backbone = require('backbone'), router = require('./router'), master = require('./layout/masterView.js'), app; app = new Marionette.Application({ onStart: function () { var lv = new master(); lv.render(); var rn = new router; Backbone.history.start(); } }); module.exports = app;
mit
GreenMelon/Avalon-Notes
node_modules/grunt-replace-files/tasks/index.js
1248
/* * 基于grunt的文本内容替换组件 * https://github.com/lh2907883/grunt-replace-files * * Copyright (c) 2015-6-29 lihao * Licensed under the MIT. */ 'use strict'; module.exports = function(grunt) { // Please see the Grunt documentation for more information regarding task // creation: http://gruntjs.com/creating-tasks grunt.registerMultiTask('regexReplace', '基于grunt的文本内容替换组件', function() { // Merge task-specific and/or target-specific options with these defaults. // var options = this.options({ // punctuation: '.', // separator: ', ' // }); //console.log(JSON.stringify(this.data.options.regex)); var regexOption = this.data.options.regex; // // Iterate over all specified file groups. this.files.forEach(function(f) { if(f.src.length > 0){ var content = grunt.file.read(f.src[0]); for(var r in regexOption){ if(r){ var tar = regexOption[r]; var regex = new RegExp(r, "gm"); content = content.replace(regex, tar); } } grunt.file.write(f.dest, content); //console.log(content); } }); }); };
mit
NejTech/Ingenuity
Ingenuity/Ingenuity/obj/ARM/Release/ilc/intermediate/PreInitStructs.cs
24052
using System; using System.Runtime.InteropServices; using Internal.Runtime.CompilerServices; using System.Runtime.Serialization; enum DataContractKind { Unknown , BooleanDataContract , ByteArrayDataContract , CharDataContract , DateTimeDataContract , DecimalDataContract , DoubleDataContract , FloatDataContract , GuidDataContract , IntDataContract , LongDataContract , ObjectDataContract , QNameDataContract , ShortDataContract , SignedByteDataContract , StringDataContract , TimeSpanDataContract , UnsignedByteDataContract , UnsignedIntDataContract , UnsignedLongDataContract , UnsignedShortDataContract, UriDataContract , InvalidDataContract , }; struct CommonContractEntry { public bool HasRoot; public bool IsBuiltInDataContract; public bool IsISerializable; public bool IsReference; public bool IsValueType; public bool TypeIsCollectionInterface; public bool TypeIsInterface; public int NameIndex; public int NamespaceIndex; public int StableNameIndex; public int StableNameNamespaceIndex; public int TopLevelElementNameIndex; public int TopLevelElementNamespaceIndex; public int KnownDataContractsListIndex; // indexes into s_knownContractsLists array, -1 terminated public FixupRuntimeTypeHandle OriginalUnderlyingType; public FixupRuntimeTypeHandle UnderlyingType; public FixupRuntimeTypeHandle GenericTypeDefinition; } // PrimitiveDataContract / InvalidDataContract struct DataContractEntry { public CommonContractEntry Common; public int ErrorMessageIndex; public DataContractKind Kind; } struct ClassDataContractEntry { public CommonContractEntry Common; // ClasssDataContract / CollectionDataContract public IntPtr XmlFormatReaderDelegate; public IntPtr XmlFormatWriterDelegate; // ClassDataContract public bool HasDataContract; public bool HasExtensionData; public int ChildElementNamespacesListIndex; // indexes into s_xmlDictionaryStrings array, -1 terminated public int ContractNamespacesListIndex; // indexes into s_xmlDictionaryStrings array, -1 terminated public int MemberNamesListIndex; // indexes into s_xmlDictionaryStrings array, -1 terminated public int MemberNamespacesListIndex; // indexes into s_xmlDictionaryStrings array, -1 terminated } struct CollectionDataContractEntry { public CommonContractEntry Common; // ClasssDataContract / CollectionDataContract public IntPtr XmlFormatReaderDelegate; public IntPtr XmlFormatWriterDelegate; // CollectionDataContract public CollectionKind CollectionContractKind; public int CollectionItemNameIndex; public int KeyNameIndex; public int ItemNameIndex; public int ValueNameIndex; public FixupRuntimeTypeHandle ItemType; public IntPtr XmlFormatGetOnlyCollectionReaderDelegate; } struct EnumDataContractEntry { public CommonContractEntry Common; // EnumDataContract public bool IsFlags; public bool IsULong; public int BaseContractNameIndex; public int BaseContractNamespaceIndex; public int ChildElementNamesListIndex; // indexes into s_xmlDictionaryStrings array, -1 terminated public int MemberCount; public int MemberListIndex; } struct XmlDataContractEntry { public CommonContractEntry Common; // XmlDataContract public IntPtr CreateXmlSerializableDelegate; } struct MemberEntry { public bool EmitDefaultValue; public bool HasConflictingNameAndType; public bool IsGetOnlyCollection; public bool IsNullable; public bool IsRequired; public int Order; public int NameIndex; public long Value; } struct DataContractMapEntry { public FixupRuntimeTypeHandle UserCodeType; public int TableIndex; // (index_in_table << 4) | table_index } namespace System.Runtime.CompilerServices { internal sealed class __BlockReflectionAttribute : global::System.Attribute { } } namespace System.Runtime.InteropServices { [AttributeUsage((System.AttributeTargets.Method | System.AttributeTargets.Class))] internal class McgIntrinsicsAttribute : Attribute { } } [McgIntrinsics] static class SgIntrinsics { internal static IntPtr AddrOf<T>(T ftn) { // This method is implemented elsewhere in the toolchain return default(IntPtr); } } namespace System.Runtime.Serialization.Generated { using System.Diagnostics; using System.Collections.Generic; using System.Runtime.Serialization.Json; using System.Threading; using System.Xml; using System.Runtime.CompilerServices; using TodoList = System.Collections.Generic.List<System.Collections.Generic.KeyValuePair<DataContract, int>>; public static partial class DataContractSerializerHelper { // ROOTS // s_dataContractMap // s_dataContracts // s_classDataContracts // s_collectionDataContracts // s_enumDataContracts // s_xmlDataContracts // s_jsonDelegatesList // // POOLS // s_stringPool // s_xmlDictionaryStrings // s_dataMemberLists // s_knownContractsLists static DataContractMapEntry[] DataContractMap { get { return s_dataContractMap; } } static DataContractEntry[] DataContracts { get { return s_dataContracts; } } static ClassDataContractEntry[] ClassDataContracts { get { return s_classDataContracts; } } static CollectionDataContractEntry[] CollectionDataContracts { get { return s_collectionDataContracts; } } static EnumDataContractEntry[] EnumDataContracts { get { return s_enumDataContracts; } } static XmlDataContractEntry[] XmlDataContracts { get { return s_xmlDataContracts; } } static char[] StringPool { get { return s_stringPool; } } static int[] XmlDictionaryStrings { get { return s_xmlDictionaryStrings; } } static MemberEntry[] DataMemberLists { get { return s_dataMemberLists; } } static int[] KnownContractLists { get { return s_knownContractsLists; } } public static void PopulateContractDictionary( Dictionary<Type, DataContract> dataContracts) { // fill in dictionaries from pre-initialized data XmlDictionary xmlDict = new XmlDictionary(); TodoList knownContractsTodoList = new TodoList(); for (int i = 0; i < DataContractMap.Length; i++) { int encodedTableIndex = DataContractMap[i].TableIndex; Type type = Type.GetTypeFromHandle(DataContractMap[i].UserCodeType.RuntimeTypeHandle); DataContract decodedContract = DecodeContract(encodedTableIndex, xmlDict, knownContractsTodoList); if (!type.IsRemovedByDR()) dataContracts.Add(type, decodedContract); // Silly optimization to mimick previous implementation: If the next entry refers to the same data // contract, let's add that one to the map too without decoding a new DataContract instance. if (((i + 1) < DataContractMap.Length) && (DataContractMap[i + 1].TableIndex == encodedTableIndex)) { Type otherType = Type.GetTypeFromHandle(DataContractMap[i + 1].UserCodeType.RuntimeTypeHandle); if (!otherType.IsRemovedByDR()) dataContracts.Add(otherType, decodedContract); i++; } } foreach (KeyValuePair<DataContract, int> entry in knownContractsTodoList) { DataContract contract = entry.Key; int knownDataContractsListIndex = entry.Value; contract.KnownDataContracts = DecodeKnownContractsList(dataContracts, knownDataContractsListIndex); } } static int DecodeTableIndex(int encodedTableIndex, out int whichTable) { whichTable = encodedTableIndex & 0xF; return encodedTableIndex >> 4; } static DataContract DecodeContract(int encodedTableIndex, XmlDictionary xmlDict, TodoList knownContractsTodoList) { int whichTable; int indexWithinTable = DecodeTableIndex(encodedTableIndex, out whichTable); switch (whichTable) { case 0: return DecodeDataContract(indexWithinTable, xmlDict, knownContractsTodoList); case 1: return DecodeClassDataContract(indexWithinTable, xmlDict, knownContractsTodoList); case 2: return DecodeCollectionDataContract(indexWithinTable, xmlDict, knownContractsTodoList); case 3: return DecodeEnumDataContract(indexWithinTable, xmlDict, knownContractsTodoList); case 4: return DecodeXmlDataContract(indexWithinTable, xmlDict, knownContractsTodoList); default: throw new Exception("unexpected table index"); } } static DataContract DecodeDataContract(int index, XmlDictionary xmlDict, TodoList knownContractsTodoList) { DataContract contract = AllocateSimpleDataContract(DataContracts[index].Kind); DecodeCommon(xmlDict, contract, ref DataContracts[index].Common, knownContractsTodoList); if (contract is InvalidDataContract) { ((InvalidDataContract)contract).ErrorMessage = GetString(DataContracts[index].ErrorMessageIndex); } return contract; } static DataContract DecodeClassDataContract(int index, XmlDictionary xmlDict, TodoList knownContractsTodoList) { ClassDataContract contract = new ClassDataContract(); DecodeCommon(xmlDict, contract, ref ClassDataContracts[index].Common, knownContractsTodoList); contract.XmlFormatReaderDelegate = (XmlFormatClassReaderDelegate)DelegateFromIntPtr(typeof(XmlFormatClassReaderDelegate), ClassDataContracts[index].XmlFormatReaderDelegate); contract.XmlFormatWriterDelegate = (XmlFormatClassWriterDelegate)DelegateFromIntPtr(typeof(XmlFormatClassWriterDelegate), ClassDataContracts[index].XmlFormatWriterDelegate); contract.HasDataContract = ClassDataContracts[index].HasDataContract; contract.HasExtensionData = ClassDataContracts[index].HasExtensionData; contract.ChildElementNamespaces = DecodeDictStringList(xmlDict, ClassDataContracts[index].ChildElementNamespacesListIndex); contract.ContractNamespaces = DecodeDictStringList(xmlDict, ClassDataContracts[index].ContractNamespacesListIndex); contract.MemberNames = DecodeDictStringList(xmlDict, ClassDataContracts[index].MemberNamesListIndex); contract.MemberNamespaces = DecodeDictStringList(xmlDict, ClassDataContracts[index].MemberNamespacesListIndex); return contract; } static DataContract DecodeCollectionDataContract(int index, XmlDictionary xmlDict, TodoList knownContractsTodoList) { CollectionDataContract contract = new CollectionDataContract(CollectionDataContracts[index].CollectionContractKind); DecodeCommon(xmlDict, contract, ref CollectionDataContracts[index].Common, knownContractsTodoList); contract.XmlFormatReaderDelegate = (XmlFormatCollectionReaderDelegate)DelegateFromIntPtr(typeof(XmlFormatCollectionReaderDelegate), CollectionDataContracts[index].XmlFormatReaderDelegate); contract.XmlFormatWriterDelegate = (XmlFormatCollectionWriterDelegate)DelegateFromIntPtr(typeof(XmlFormatCollectionWriterDelegate), CollectionDataContracts[index].XmlFormatWriterDelegate); contract.CollectionItemName = GetDictString(xmlDict, CollectionDataContracts[index].CollectionItemNameIndex); contract.KeyName = GetString(CollectionDataContracts[index].KeyNameIndex); contract.ItemName = GetString(CollectionDataContracts[index].ItemNameIndex); contract.ValueName = GetString(CollectionDataContracts[index].ValueNameIndex); contract.ItemType = Type.GetTypeFromHandle(CollectionDataContracts[index].ItemType.RuntimeTypeHandle); contract.XmlFormatGetOnlyCollectionReaderDelegate = (XmlFormatGetOnlyCollectionReaderDelegate)DelegateFromIntPtr(typeof(XmlFormatGetOnlyCollectionReaderDelegate), CollectionDataContracts[index].XmlFormatGetOnlyCollectionReaderDelegate); return contract; } static DataContract DecodeEnumDataContract(int index, XmlDictionary xmlDict, TodoList knownContractsTodoList) { List<long> values; EnumDataContract contract = new EnumDataContract(); DecodeCommon(xmlDict, contract, ref EnumDataContracts[index].Common, knownContractsTodoList); contract.IsFlags = EnumDataContracts[index].IsFlags; contract.IsULong = EnumDataContracts[index].IsULong; contract.BaseContractName = GetQualifiedName(EnumDataContracts[index].BaseContractNameIndex, EnumDataContracts[index].BaseContractNamespaceIndex); contract.ChildElementNames = DecodeDictStringList(xmlDict, EnumDataContracts[index].ChildElementNamesListIndex); contract.Members = DecodeMembersAndValues(EnumDataContracts[index].MemberCount, EnumDataContracts[index].MemberListIndex, out values); contract.Values = values; return contract; } static DataContract DecodeXmlDataContract(int index, XmlDictionary xmlDict, TodoList knownContractsTodoList) { XmlDataContract contract = new XmlDataContract(); DecodeCommon(xmlDict, contract, ref XmlDataContracts[index].Common, knownContractsTodoList); contract.CreateXmlSerializableDelegate = (CreateXmlSerializableDelegate)DelegateFromIntPtr(typeof(CreateXmlSerializableDelegate), XmlDataContracts[index].CreateXmlSerializableDelegate); return contract; } static Delegate DelegateFromIntPtr(Type delegateType, IntPtr pfnStaticManagedMethod) { if (pfnStaticManagedMethod == IntPtr.Zero) return null; return FunctionPointerHelpers.UnsafeDelegateFromStaticMethodFunctionPointer(delegateType, pfnStaticManagedMethod); } // // These decode methods are factored out in the hopes that the compiler will fold them together, as they should // be identical code since they only vary by the type of the second parameter and the layout of those structs // should match for the fields being accessed // static void DecodeCommon(XmlDictionary xmlDict, DataContract contract, ref CommonContractEntry entry, TodoList knownContractsTodoList) { contract.HasRoot = entry.HasRoot; contract.IsBuiltInDataContract = entry.IsBuiltInDataContract; contract.IsISerializable = entry.IsISerializable; contract.IsReference = entry.IsReference; contract.IsValueType = entry.IsValueType; contract.TypeIsCollectionInterface = entry.TypeIsCollectionInterface; contract.TypeIsInterface = entry.TypeIsInterface; contract.Name = GetDictString(xmlDict, entry.NameIndex); contract.Namespace = GetDictString(xmlDict, entry.NamespaceIndex); contract.StableName = GetQualifiedName(entry.StableNameIndex, entry.StableNameNamespaceIndex); contract.TopLevelElementName = GetDictString(xmlDict, entry.TopLevelElementNameIndex); contract.TopLevelElementNamespace = GetDictString(xmlDict, entry.TopLevelElementNamespaceIndex); contract.OriginalUnderlyingType = Type.GetTypeFromHandle(entry.OriginalUnderlyingType.RuntimeTypeHandle); contract.UnderlyingType = Type.GetTypeFromHandle(entry.UnderlyingType.RuntimeTypeHandle); contract.GenericTypeDefinition = Type.GetTypeFromHandle(entry.GenericTypeDefinition.RuntimeTypeHandle); knownContractsTodoList.Add(new KeyValuePair<DataContract, int>(contract, entry.KnownDataContractsListIndex)); } unsafe static string GetString(int stringPoolIndex) { if (stringPoolIndex == -1) return null; fixed (char* pData = &StringPool[stringPoolIndex]) { return new string(pData); } } static XmlDictionaryString GetDictString(XmlDictionary xmlDict, int stringPoolIndex) { if (stringPoolIndex == -1) return null; return xmlDict.Add(GetString(stringPoolIndex)); } static XmlQualifiedName GetQualifiedName(int nameIndex, int namespaceIndex) { if (nameIndex == -1) return null; return new XmlQualifiedName(GetString(nameIndex), GetString(namespaceIndex)); } static XmlDictionaryString[] DecodeDictStringList(XmlDictionary xmlDict, int listIndex) { int curIndex = listIndex; int length = XmlDictionaryStrings[curIndex++]; XmlDictionaryString[] result = new XmlDictionaryString[length]; for (int i = 0; i < length; i++) { result[i] = GetDictString(xmlDict, XmlDictionaryStrings[curIndex++]); } return result; } static List<DataMember> DecodeMembersAndValues(int memberCount, int memberListIndex, out List<long> values) { List<DataMember> members = new List<DataMember>(memberCount); values = new List<long>(memberCount); for (int i = memberListIndex; i < (memberListIndex + memberCount); i++) { DataMember member = new DataMember(); member.EmitDefaultValue = DataMemberLists[i].EmitDefaultValue; member.HasConflictingNameAndType = DataMemberLists[i].HasConflictingNameAndType; member.IsGetOnlyCollection = DataMemberLists[i].IsGetOnlyCollection; member.IsNullable = DataMemberLists[i].IsNullable; member.IsRequired = DataMemberLists[i].IsRequired; member.Order = DataMemberLists[i].Order; member.Name = GetString(DataMemberLists[i].NameIndex); members.Add(member); values.Add(DataMemberLists[i].Value); } return members; } static Dictionary<XmlQualifiedName, DataContract> DecodeKnownContractsList( Dictionary<Type, DataContract> dataContracts, int knownDataContractsListIndex) { Dictionary<XmlQualifiedName, DataContract> result = new Dictionary<XmlQualifiedName, DataContract>(); int curListIndex = knownDataContractsListIndex; int mapIndex = KnownContractLists[curListIndex++]; while (mapIndex != -1) { // The list entries are indexes into the DataContractMap Type type = Type.GetTypeFromHandle(DataContractMap[mapIndex].UserCodeType.RuntimeTypeHandle); // All contracts have been loaded now, so we fetch the referenced contract from the passed-in collection // using it's user type as the key. DataContract otherContract = dataContracts[type]; // Now form the key for the KnownDataContracts dictionary XmlQualifiedName otherContractKey = new XmlQualifiedName(otherContract.Name.Value, otherContract.Namespace.Value); // And add the entry to the KnownDataContracts dictionary result.Add(otherContractKey, otherContract); mapIndex = KnownContractLists[curListIndex++]; } return result; } static DataContract AllocateSimpleDataContract(DataContractKind kind) { switch (kind) { case DataContractKind.BooleanDataContract : return new BooleanDataContract (); case DataContractKind.ByteArrayDataContract : return new ByteArrayDataContract (); case DataContractKind.CharDataContract : return new CharDataContract (); case DataContractKind.DateTimeDataContract : return new DateTimeDataContract (); case DataContractKind.DecimalDataContract : return new DecimalDataContract (); case DataContractKind.DoubleDataContract : return new DoubleDataContract (); case DataContractKind.FloatDataContract : return new FloatDataContract (); case DataContractKind.GuidDataContract : return new GuidDataContract (); case DataContractKind.IntDataContract : return new IntDataContract (); case DataContractKind.LongDataContract : return new LongDataContract (); case DataContractKind.ObjectDataContract : return new ObjectDataContract (); case DataContractKind.QNameDataContract : return new QNameDataContract (); case DataContractKind.ShortDataContract : return new ShortDataContract (); case DataContractKind.SignedByteDataContract : return new SignedByteDataContract (); case DataContractKind.StringDataContract : return new StringDataContract (); case DataContractKind.TimeSpanDataContract : return new TimeSpanDataContract (); case DataContractKind.UnsignedByteDataContract : return new UnsignedByteDataContract (); case DataContractKind.UnsignedIntDataContract : return new UnsignedIntDataContract (); case DataContractKind.UnsignedLongDataContract : return new UnsignedLongDataContract (); case DataContractKind.UnsignedShortDataContract : return new UnsignedShortDataContract(); case DataContractKind.UriDataContract : return new UriDataContract (); case DataContractKind.InvalidDataContract : return new InvalidDataContract (); } throw new Exception("unknown data contract kind"); } static bool IsRemovedByDR(this Type type) { return (type == typeof(System.Runtime.CompilerServices.DependencyReductionTypeRemoved)); } } }
mit
shemerey/coderepo
algorithms/bitwise_and_of_numbers_range.rb
525
# https://leetcode.com/problems/bitwise-and-of-numbers-range/ # # Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND # of all numbers in this range, inclusive. # # For example, given the range [5, 7], you should return 4. # # Credits: # # Special thanks to @amrsaqr for adding this problem and creating all # test cases. # @param {Integer} m # @param {Integer} n # @return {Integer} def range_bitwise_and(m, n) c = 0 while m != n m >>= 1 n >>= 1 c += 1 end m << c end
mit
lgollut/material-ui
packages/material-ui-utils/src/index.js
630
export { default as chainPropTypes } from './chainPropTypes'; export { default as deepmerge } from './deepmerge'; export { default as elementAcceptingRef } from './elementAcceptingRef'; export { default as elementTypeAcceptingRef } from './elementTypeAcceptingRef'; export { default as exactProp } from './exactProp'; export { default as formatMuiErrorMessage } from './formatMuiErrorMessage'; export { default as getDisplayName } from './getDisplayName'; export { default as HTMLElementType } from './HTMLElementType'; export { default as ponyfillGlobal } from './ponyfillGlobal'; export { default as refType } from './refType';
mit
naimkhalifa/codeur.co
resources/views/auth/register.blade.php
3474
@extends('layouts.front.master') @section('content') <section class="hero is-bold"> <div class="hero-body"> <div class="container"> <div class="columns is-vcentered"> <div class="column is-4 is-offset-4"> <h1 class="title"> S'enregistrer </h1> <div class="box"> <form class="form-horizontal" role="form" method="POST" action="{{ route('register') }}"> {{ csrf_field() }} <div class="field"> <label class="label">Nom d'utilisateur</label> <p class="control"> <input class="input {{ $errors->has('name') ? ' is-danger' : '' }}" type="text" name="name" value="{{old('name')}}" placeholder="John Doe" required="required"> </p> @if ($errors->has('name')) <p class="help is-danger"><strong>{{ $errors->first('name') }}</strong></p> @endif </div> <div class="field"> <label class="label">E-mail</label> <p class="control"> <input class="input {{ $errors->has('email') ? ' is-danger' : '' }}" type="email" name="email" value="{{old('email')}}" placeholder="john@example.com" required="required"> </p> @if ($errors->has('email')) <p class="help is-danger"><strong>{{ $errors->first('email') }}</strong></p> @endif </div> <div class="field"> <label class="label">Mot de passe</label> <p class="control"> <input class="input" type="password" name="password" placeholder="●●●●●●●" required="required"> </p> @if ($errors->has('password')) <p class="help is-danger"><strong>{{ $errors->first('password') }}</strong></p> @endif </div> <div class="field"> <label class="label">Confirmation Mot de passe</label> <p class="control"> <input class="input" type="password" name="password_confirmation" placeholder="●●●●●●●" required="required"> </p> </div> <p class="control"> <button type="submit" class="button is-primary">S'enregistrer</button> </p> </form> </div> <p class="has-text-centered"> <a href="/login">Se connecter</a> </p> </div> </div> </div> </div> </section> @endsection
mit