text stringlengths 1 1.05M |
|---|
import React from "react";
import { Label } from "@windmill/react-ui";
const LabelArea = ({ label }) => {
return (
<Label className="col-span-4 sm:col-span-2 font-medium text-sm">
{label}
</Label>
);
};
export default LabelArea;
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.sparql.junit ;
import java.util.ArrayList;
import java.util.List;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.ResultSetFactory;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.jena.sparql.resultset.ResultsFormat;
import org.apache.jena.sparql.resultset.SPARQLResult;
import org.apache.jena.sparql.vocabulary.TestManifest;
import org.apache.jena.sparql.vocabulary.TestManifestX;
import org.apache.jena.sparql.vocabulary.VocabTestQuery;
import org.apache.jena.util.FileManager;
import org.apache.jena.util.iterator.ClosableIterator;
import org.apache.jena.util.junit.TestException;
import org.apache.jena.util.junit.TestUtils;
import org.apache.jena.vocabulary.RDF;
import org.apache.jena.vocabulary.RDFS;
/**
* Wrapper class for individual test items. Assumes it is a query test item,
* using both the manifest vocabulary and the test query vocabulary.
*/
public class TestItem
{
static int counter = 0 ;
public static String fakeURI() {
return "test:" + (++counter) ;
}
private Resource testResource = null ;
private Resource actionResource = null ;
private String name ;
private boolean buildLuceneIndex = false ;
private String resultFile ;
private String comment ;
private List<String> defaultGraphURIs ;
private List<String> namedGraphURIs ;
private Resource testType = null ;
private String queryFile ;
private Syntax queryFileSyntax ;
public static TestItem create(Resource entry, Resource defaultTestType) {
return new TestItem(entry, defaultTestType) ;
}
public static TestItem create(String _name, String _queryFile, String _dataFile, String _resultFile) {
return new TestItem(_name, _queryFile, _dataFile, _resultFile) ;
}
private TestItem(Resource entry, Resource defaultTestType) {
testResource = entry ;
if ( !entry.hasProperty(TestManifest.name) )
throw new QueryTestException("TestItem with no name (" + entry + ")") ;
name = _getName() ;
if ( !entry.hasProperty(TestManifest.action) )
throw new QueryTestException("TestItem '" + name + "' with no action") ;
// Assumes one type per test only.
testType = TestUtils.getResource(entry, RDF.type) ;
if ( testType == null )
testType = defaultTestType ;
resultFile = _getResultFile() ;
comment = _getComment() ;
defaultGraphURIs = _getDefaultGraphURIs() ;
namedGraphURIs = _getNamedGraphsURIs() ;
queryFile = _getQueryFile() ;
queryFileSyntax = _getQuerySyntax(entry.getModel(), queryFile, Syntax.syntaxARQ) ;
buildLuceneIndex = _getTextIndex() ;
}
private TestItem(String _name, String _queryFile, String _dataFile, String _resultFile) {
name = _name ;
queryFile = _queryFile ;
defaultGraphURIs = new ArrayList<>() ;
defaultGraphURIs.add(_dataFile) ;
namedGraphURIs = new ArrayList<>() ;
resultFile = _resultFile ;
comment = "" ;
queryFileSyntax = Syntax.guessFileSyntax(_queryFile) ;
}
public Resource getResource() {
return testResource ;
}
public Resource getAction() {
return _getAction() ;
}
/** @return Returns the testType. */
public Resource getTestType() {
return testType ;
}
public String getQueryFile() {
return queryFile ;
}
public Syntax getFileSyntax() {
return queryFileSyntax ;
}
public void setFileSyntax(Syntax syntax) {
queryFileSyntax = syntax ;
}
public String getResultFile() {
return resultFile ;
}
/**
* Load results as a SPARQLResult. If the results are a model, no conversion
* to a result set is attempted here.
*/
public SPARQLResult getResults() {
if ( resultFile == null )
return null ;
ResultsFormat format = ResultsFormat.guessSyntax(resultFile) ;
if ( ResultsFormat.isRDFGraphSyntax(format) ) {
Model m = FileManager.get().loadModel(resultFile) ;
return new SPARQLResult(m) ;
}
if ( ResultsFormat.isDatasetSyntax(format) ) {
Dataset d = RDFDataMgr.loadDataset(resultFile) ;
return new SPARQLResult(d) ;
}
// Attempt to handle as a resulset or boolean result.s
SPARQLResult x = ResultSetFactory.result(resultFile) ;
return x ;
}
public String getName() {
return name ;
}
public String getURI() {
if ( testResource != null && testResource.isURIResource() )
return testResource.getURI() ;
return fakeURI() ;
}
public String getComment() {
return comment ;
}
public List<String> getDefaultGraphURIs() {
return defaultGraphURIs ;
}
public List<String> getNamedGraphURIs() {
return namedGraphURIs ;
}
public boolean requiresTextIndex() {
return buildLuceneIndex ;
}
private String _getName() {
Statement s = testResource.getProperty(TestManifest.name) ;
if ( s == null )
return "<<unset>>" ;
return s.getString() ;
}
private Resource _getAction() {
if ( actionResource == null )
actionResource = testResource.getProperty(TestManifest.action).getResource() ;
return actionResource ;
}
private String _getResultFile() {
try {
// It's bnode in some update tests.
// The Update test code managed building the result.
return TestUtils.getLiteralOrURI(testResource, TestManifest.result) ;
} catch (TestException ex) { return null ; }
}
private String _getComment() {
Statement s = testResource.getProperty(RDFS.comment) ;
if ( s == null )
return null ;
return s.getString() ;
}
// ----------------------------------------------------
// ---- Query specific properties
/**
* Get the data file (default graph): maybe unknown if part for the query
* (FROM)
*
* @return List
*/
private List<String> _getDefaultGraphURIs() {
if ( !_getAction().isAnon() )
// Action is a URI - data had better be in the query itself.
return null ;
List<String> l = new ArrayList<>() ;
ClosableIterator<Statement> cIter = _getAction().listProperties(VocabTestQuery.data) ;
for (; cIter.hasNext();) {
Statement stmt = cIter.next() ;
String df = stmt.getResource().getURI() ;
l.add(df) ;
}
cIter.close() ;
return l ;
}
/**
* Get the named graphs : maybe unknown if part for the query (FROM NAMED)
*
* @return List
*/
private List<String> _getNamedGraphsURIs() {
if ( !_getAction().isAnon() )
// Action is a URI - data had better be in the query itself.
return null ;
List<String> l = new ArrayList<>() ;
ClosableIterator<Statement> cIter = _getAction().listProperties(VocabTestQuery.graphData) ;
for (; cIter.hasNext();) {
Statement obj = cIter.next() ;
String df = obj.getResource().getURI() ;
l.add(df) ;
}
cIter.close() ;
return l ;
}
/**
* Get the query file: either it is the action (data in query) or it is
* specified within the bNode as a query/data pair.
*
* @return
*/
private String _getQueryFile() {
Resource r = _getAction() ;
if ( r.hasProperty(VocabTestQuery.query) )
return TestUtils.getLiteralOrURI(r, VocabTestQuery.query) ;
// No query property - must be this action node
if ( _getAction().isAnon() )
return "[]" ;
return _getAction().getURI() ;
}
private Syntax _getQuerySyntax(Model m, String uri, Syntax def) {
Resource r = m.createResource(uri) ;
if ( r.hasProperty(TestManifestX.querySyntax) ) {
Syntax x = Syntax.make(r.getProperty(TestManifestX.querySyntax).getResource().getURI()) ;
// System.err.println("Query syntax: "+x) ;
return x ;
}
if ( uri != null ) {
Syntax synFileName = Syntax.guessFileSyntax(uri) ;
if ( synFileName != null )
return synFileName ;
}
return def ;
}
private boolean _getTextIndex() {
Statement s = testResource.getProperty(TestManifestX.textIndex) ;
if ( s == null )
return false ;
return s.getString().equalsIgnoreCase("true") ;
}
// ----------------------------------------------------
// Misc
@Override
public String toString() {
StringBuilder sbuff = new StringBuilder() ;
String name = getName() ;
// String actionStr = FmtUtils.stringForRDFNode(_getAction()) ;
sbuff.append("Name: " + name) ;
if ( getComment() != null )
sbuff.append(" Comment: " + getComment()) ;
return sbuff.toString() ;
}
}
|
pkgname=fakeroot
pkgver=1.26
fetch() {
curl "https://deb.debian.org/debian/pool/main/f/fakeroot/fakeroot_$pkgver.orig.tar.gz" -o $pkgname-$pkgver.tar.xz
tar -xf $pkgname-$pkgver.tar.xz
patch -p1 < ../../no64.patch
patch -p1 < ../../stdint.patch
}
build() {
cd $pkgname-$pkgver
./configure \
--prefix=/usr \
--build=$TRIPLE \
--host=$TRIPLE
gmake
}
package() {
cd $pkgname-$pkgver
gmake install DESTDIR=$pkgdir
}
license() {
cd $pkgname-$pkgver
cat LICENSE
# cat COPYING
}
|
#!/usr/bin/env bash
# Copyright 2020 Xiaomi Corporation (Author: Junbo Zhang)
# 2021 Pingfeng Luo
# Apache 2.0
# Example of how to build L and G FST for K2. Most scripts of this example are copied from Kaldi.
set -eou pipefail
dataset_path=(
/mnt/cfs2/asr/database/AM/aishell
/root/fangjun/data/aishell
/home/storage04/zhuangweiji/data/open-source-data/SLR33-aishell/data
)
data=${dataset_path[0]}
for d in ${dataset_path[@]}; do
if [ -d $d ]; then
data=$d
break
fi
done
if [ ! -d $data ]; then
echo "$data does not exist"
exit 1
fi
[ -f path.sh ] && . ./path.sh
stage=1
if [ $stage -le 1 ]; then
echo "Data preparation"
local2/aishell_data_prep.sh $data/data_aishell/wav $data/data_aishell/transcript
fi
if [ $stage -le 2 ]; then
echo "Dict preparation"
local2/aishell_prepare_dict.sh $data/resource_aishell data/local/dict_nosp
fi
if [ $stage -le 3 ]; then
echo "Lang preparation"
local/prepare_lang.sh --position-dependent-phones false data/local/dict_nosp \
"<UNK>" data/local/lang_tmp_nosp data/lang_nosp || exit 1
echo "To load L:"
echo "Use::"
echo " with open('data/lang_nosp/L.fst.txt') as f:"
echo " Lfst = k2.Fsa.from_openfst(f.read(), acceptor=False)"
echo ""
fi
if [ $stage -le 4 ]; then
echo "LM preparation"
local2/aishell_train_lms.sh
gunzip -c data/local/lm/3gram-mincount/lm_unpruned.gz >data/local/lm/lm_tgmed.arpa
# Note: you need to install kaldilm using `pip install kaldilm`
# Build G
python3 -m kaldilm \
--read-symbol-table="data/lang_nosp/words.txt" \
--disambig-symbol='#0' \
--max-order=1 \
data/local/lm/lm_tgmed.arpa >data/lang_nosp/G_uni.fst.txt
python3 -m kaldilm \
--read-symbol-table="data/lang_nosp/words.txt" \
--disambig-symbol='#0' \
--max-order=3 \
data/local/lm/lm_tgmed.arpa >data/lang_nosp/G.fst.txt
echo ""
echo "To load G:"
echo "Use::"
echo " with open('data/lang_nosp/G.fst.txt') as f:"
echo " G = k2.Fsa.from_openfst(f.read(), acceptor=False)"
echo ""
fi
if [ $stage -le 5 ]; then
echo "Feature preparation"
python3 ./prepare.py
fi
if [ $stage -le 6 ]; then
echo "Training"
python3 ./ctc_train.py
#python3 ./mmi_bigram_train.py
fi
if [ $stage -le 7 ]; then
echo "Decoding"
python3 ./ctc_decode.py
#python3 ./mmi_bigram_decode.py
fi
|
<filename>src/js/index.js<gh_stars>0
import { $ } from "./utils/dom.js";
import { LottoApp } from "./lottoApp.js";
new LottoApp($({ selector: "#app" }));
|
package eu._5gzorro.governancemanager.dto;
import java.util.Objects;
public class MemberDto {
private String id;
private String legalName;
private String address;
private String ledgerIdentity;
public MemberDto() {
}
public String getStakeholderId() {
return id;
}
public void setStakeholderId(String stakeholderId) {
this.id = stakeholderId;
}
public String getLegalName() {
return legalName;
}
public void setLegalName(String legalName) {
this.legalName = legalName;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getLedgerIdentity() {
return ledgerIdentity;
}
public void setLedgerIdentity(String ledgerIdentity) {
this.ledgerIdentity = ledgerIdentity;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MemberDto memberDto = (MemberDto) o;
return id.equals(memberDto.id) && legalName.equals(memberDto.legalName);
}
@Override
public int hashCode() {
return Objects.hash(id, legalName);
}
@Override
public String toString() {
return "MemberDto{" +
"id='" + id + '\'' +
", legalName='" + legalName + '\'' +
", address='" + address + '\'' +
", ledgerIdentity='" + ledgerIdentity + '\'' +
'}';
}
}
|
import React from 'react';
import { connect } from 'react-redux';
import { getPastOrdersThunk } from '../store/pastOrders';
import { PastOrdersCard } from './PastOrdersCard';
import { Card } from 'semantic-ui-react';
export class pastOrders extends React.Component {
async componentDidMount() {
await this.props.getPastOrders();
}
render() {
const pastOrders = this.props.pastOrders;
return (
<div>
<div>
<h2>Your past orders:</h2>
</div>
<Card.Group>
{pastOrders.map(order => (
<Card key={order.id}>
<PastOrdersCard order={order} />
</Card>
))}
</Card.Group>
</div>
);
}
}
const mapStateToProps = state => ({
pastOrders: state.pastOrdersReducer
});
const mapDispatchToProps = dispatch => ({
getPastOrders: () => dispatch(getPastOrdersThunk())
});
export default connect(mapStateToProps, mapDispatchToProps)(pastOrders);
|
#!/bin/bash
set -ex
CURRENT=$(cat python/hail/version)
PYPI_VERSIONS=$(python list_pypi_versions.py hail)
set +e
grep -qe "^${CURRENT}$" <(echo "$PYPI_VERSIONS")
ALREADY_PUBLISHED=$?
set -e
if [[ $ALREADY_PUBLISHED -ne 0 ]]
then
echo deploying ${CURRENT}
./gradlew shadowJar
cp build/libs/hail-all-spark.jar python/hail/hail-all-spark.jar
cp ../README.md python/
set +x
export TWINE_USERNAME=$(cat secrets/pypi-username)
export TWINE_PASSWORD=$(cat secrets/pypi-password)
set -x
cd python
rm -rf dist
python setup.py sdist bdist_wheel
twine upload dist/*
else
echo nothing to do ${CURRENT} already published
fi
|
<reponame>0x1306a94/SampleCode<filename>TestTableView/TestTableView/AppDelegate.h
//
// AppDelegate.h
// TestTableView
//
// Created by king on 2020/8/20.
// Copyright © 2020 0x1306a94. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AppDelegate : UIResponder <UIApplicationDelegate>
@end
|
TERMUX_PKG_HOMEPAGE=https://developer.gnome.org/notification-spec/
TERMUX_PKG_DESCRIPTION="Library for sending desktop notifications"
TERMUX_PKG_LICENSE="LGPL-2.0"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=0.7.9
TERMUX_PKG_REVISION=9
TERMUX_PKG_SRCURL=https://ftp.gnome.org/pub/gnome/sources/libnotify/0.7/libnotify-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=66c0517ed16df7af258e83208faaf5069727dfd66995c4bbc51c16954d674761
TERMUX_PKG_DEPENDS="gdk-pixbuf, glib"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
-Dtests=false
-Dintrospection=disabled
-Dgtk_doc=false"
|
<gh_stars>1-10
import { ref, onUnmounted, unref, getCurrentInstance, watch } from 'vue';
import { isProdMode } from '@/utils/env';
import { ReturnMethods } from '../type';
import { getDynamicProps } from '@/utils';
export function useModal(props): (((modalMethod: ReturnMethods) => any) | ReturnMethods)[] {
const modal = ref<Nullable<ReturnMethods>>(null);
const loaded = ref<Nullable<boolean>>(false);
function register(modalMethod: ReturnMethods) {
if (!getCurrentInstance()) {
throw new Error('useModal() can only be used inside setup() or functional components!');
}
isProdMode() &&
onUnmounted(() => {
modal.value = null;
loaded.value = false;
});
if (unref(loaded) && isProdMode() && modalMethod === unref(modal)) return;
modal.value = modalMethod;
watch(
() => props,
() => {
// @ts-ignore
const { setProps } = modal.value;
props && setProps(getDynamicProps(props));
},
{
immediate: true,
deep: true,
}
);
}
const getInstance = () => {
const instance = unref(modal);
if (!instance) {
console.error('useModal instance is undefined!');
}
return instance;
};
const methods: ReturnMethods = {
setProps: (props): void => {
getInstance()?.setProps(props);
},
openModal: () => {
getInstance()?.openModal();
},
closeModal: () => {
getInstance()?.closeModal();
},
setSubLoading: (status) => {
getInstance()?.setSubLoading(status);
},
};
return [register, methods];
}
|
#!/bin/bash
go get -u github.com/go-ole/go-ole/oleutil
find _examples/ -maxdepth 2 -mindepth 2 -exec sh -c "cd {}; echo building {}; go build -i main.go" \;
|
<filename>extern/typed-geometry/src/typed-geometry/types/array.hh
#pragma once
#include <typed-geometry/feature/assert.hh>
#include <typed-geometry/types/scalars/default.hh>
namespace tg
{
// TODO: variable sized array
template <class T, u64 N>
struct array
{
// must be public for ctor
T _values[N];
constexpr T* begin() { return _values; }
constexpr T* end() { return _values + N; }
constexpr T const* begin() const { return _values; }
constexpr T const* end() const { return _values + N; }
constexpr u64 size() const { return N; }
constexpr T* data() { return _values; }
constexpr T const* data() const { return _values; }
constexpr T& operator[](u64 i)
{
TG_CONTRACT(i < N);
return _values[i];
}
constexpr T const& operator[](u64 i) const
{
TG_CONTRACT(i < N);
return _values[i];
}
};
}
|
#!/usr/bin/env bash
set -e
cd "$(dirname "${BASH_SOURCE[0]}")"
pushd ./
cd ../
git -C chain pull || git clone https://github.com/crypto-com/chain
cd ./chain
git checkout release/v0.5
./docker/build.sh
popd
|
<gh_stars>1-10
"""
gwcosmoLikelihood Module
<NAME>, <NAME>, <NAME>, <NAME>, <NAME>
In general:
p(x|z,H0,\Omega) is written as p(x|dl(z,H0))*p(x|\Omega)
p(x|dL(z,H0)): self.norms[H0]*self.px_dl(dl(z,H0))
p(x|\Omega): self.skymap.skyprob(ra,dec) or self.skymap.prob[idx]
p(D|z,H0): pdet.pD_zH0_eval(z,H0)
p(s|M(H0)): L_M(M) or L_mdl(m,dl(z,H0))
p(z): zprior(z)
p(M|H0): SchechterMagFunction(H0)(M)
p(\Omega): this term comes out the front and cancels in most cases,
and so does not appear explicitly.
"""
from __future__ import absolute_import
import lal
import numpy as np
import sys
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import healpy as hp
import warnings
warnings.filterwarnings("ignore")
from scipy.integrate import quad, dblquad
from scipy.stats import ncx2, norm, truncnorm
from scipy.interpolate import splev, splrep, interp1d
from astropy import constants as const
from astropy import units as u
from ligo.skymap.moc import rasterize
from ligo.skymap.core import uniq2ang
import astropy.constants as constants
import gwcosmo
from .utilities.standard_cosmology import *
from .utilities.schechter_function import *
from .utilities.schechter_params import *
from .utilities.calc_kcor import *
import time
import progressbar
class gwcosmoLikelihood(object):
"""
A class to hold all the individual components of the posterior for H0,
and methods to stitch them together in the right way.
Parameters
----------
GW_data : gwcosmo.likelihood.posterior_samples.posterior_samples object
Gravitational wave event samples
skymap : gwcosmo.likelihood.skymap.skymap object
Gravitational wave event skymap
galaxy_catalog : gwcosmo.prior.catalog.galaxyCatalog object
The relevant galaxy catalog
EM_counterpart : gwcosmo.prior.catalog.galaxyCatalog object, optional
EM_counterpart data (default=None)
If not None, will default to using this over the galaxy_catalog
Omega_m : float, optional
The matter fraction of the universe (default=0.3)
linear : bool, optional
Use linear cosmology (default=False)
weights : str, optional
Specifies type of luminosity weighting to use: 'schechter' or 'trivial'
(default='schechter') 'trivial' is only for testing purposes and
should not be used in analysis
basic : bool, optional
If True, uses pdet suitable for MDC analysis (default=False)
uncertainty : bool, optional
If true, redshift uncertainty will be assumed and corrected
for (default=False)
rate : str, optional
specifies rate evolution model, 'const' or 'evolving'
Kcorr : bool, optional
If true, will attempt to apply K corrections (default=False)
"""
def __init__(self, H0, GW_data, skymap, galaxy_catalog, pdet, reweight=False, EM_counterpart=None,
Omega_m=0.308, linear=False, weighted=False, basic=False, uncertainty=False,
rate='constant', population_params=None, area=0.999, Kcorr=False):
self.H0 = H0
self.pdet = pdet
self.Omega_m = Omega_m
self.linear = linear
self.weighted = weighted
self.basic = basic
self.uncertainty = uncertainty
self.skymap = skymap
self.area = area
self.Kcorr = Kcorr
self.reweight = reweight
if population_params is None:
self.mass_distribution = pdet.mass_distribution
self.alpha = 1.6
self.mmin = 5
self.mmax = 100
self.Lambda = 0
else:
self.mass_distribution = population_params['mass_distribution']
self.alpha = population_params['alpha']
self.mmin = population_params['mmin']
self.mmax = population_params['mmax']
self.Lambda = population_params['Lambda']
try:
self.band = galaxy_catalog.band
except:
self.band = 'B' # hack so that population analysis works from command line.
sp = SchechterParams(self.band)
self.alpha_sp = sp.alpha
self.Mstar_obs = sp.Mstar
self.Mobs_min = sp.Mmin
self.Mobs_max = sp.Mmax
if galaxy_catalog == None:
self.galaxy_catalog = None
self.mth = None
self.EM_counterpart = EM_counterpart
self.whole_cat = True
else:
self.galaxy_catalog = galaxy_catalog
self.mth = galaxy_catalog.mth()
self.EM_counterpart = None
if GW_data is not None:
temps = []
norms = []
if reweight == True:
print("Reweighting samples")
seed = np.random.randint(10000)
bar = progressbar.ProgressBar()
z_max = []
for H0 in bar(self.H0):
if reweight == True:
zkernel, norm = GW_data.marginalized_redshift_reweight(H0, self.mass_distribution, self.alpha, self.mmin, self.mmax)
else:
zkernel, norm = GW_data.marginalized_redshift(H0)
zmin = np.min(zkernel.dataset)
zmax = np.max(zkernel.dataset)
z_max.append(3*zmax)
z_array = np.linspace(zmin, zmax, 500)
vals = zkernel(z_array)
temps.append(interp1d(z_array, vals,bounds_error=False,fill_value=0))
norms.append(norm)
self.zmax_GW=z_max
self.temps = np.array(temps)
self.norms = np.array(norms)
if (GW_data is None and self.EM_counterpart is None):
dl_array, vals = self.skymap.marginalized_distance()
self.temp = splrep(dl_array,vals)
if (GW_data is None and self.EM_counterpart is not None):
counterpart = self.EM_counterpart
dl_array, vals = self.skymap.lineofsight_distance(counterpart.ra, counterpart.dec)
self.temp = splrep(dl_array,vals)
# TODO: calculate mth for the patch of catalog being used, if whole_cat=False
if (self.EM_counterpart is None and self.galaxy_catalog is not None):
self.radec_lim = self.galaxy_catalog.radec_lim[0]
if self.radec_lim == 0:
self.whole_cat = True
else:
self.whole_cat = False
self.ra_min = self.galaxy_catalog.radec_lim[1]
self.ra_max = self.galaxy_catalog.radec_lim[2]
self.dec_min = self.galaxy_catalog.radec_lim[3]
self.dec_max = self.galaxy_catalog.radec_lim[4]
if self.Kcorr == True:
self.zcut = 0.5
self.color_name = self.galaxy_catalog.color_name
self.color_limit = self.galaxy_catalog.color_limit
else:
self.zcut = 10.
self.color_limit = [-np.inf,np.inf]
if self.whole_cat == False:
def skynorm(dec,ra):
return np.cos(dec)
self.catalog_fraction = dblquad(skynorm,self.ra_min,self.ra_max,
lambda x: self.dec_min,
lambda x: self.dec_max,
epsabs=0,epsrel=1.49e-4)[0]/(4.*np.pi)
self.rest_fraction = 1-self.catalog_fraction
print('This catalog covers {}% of the full sky'.format(self.catalog_fraction*100))
#find galaxies within the bounds of the galaxy catalog
sel = np.argwhere((self.ra_min <= self.galaxy_catalog.ra) & \
(self.galaxy_catalog.ra <= self.ra_max) & \
(self.dec_min <= self.galaxy_catalog.dec) & \
(self.galaxy_catalog.dec <= self.dec_max) & \
((self.galaxy_catalog.z-3*self.galaxy_catalog.sigmaz) <= self.zcut) & \
(self.color_limit[0] <= galaxy_catalog.color) & \
(galaxy_catalog.color <= self.color_limit[1]))
self.allz = self.galaxy_catalog.z[sel].flatten()
self.allra = self.galaxy_catalog.ra[sel].flatten()
self.alldec = self.galaxy_catalog.dec[sel].flatten()
self.allm = self.galaxy_catalog.m[sel].flatten()
self.allsigmaz = self.galaxy_catalog.sigmaz[sel].flatten()
self.allcolor = self.galaxy_catalog.color[sel].flatten()
self.mth = self.galaxy_catalog.mth()
self.nGal = len(self.allz)
if self.uncertainty == False:
self.nsmear_fine = 1
self.nsmear_coarse = 1
self.allsigmaz = np.zeros(len(self.allz))
else:
self.nsmear_fine = 10000
self.nsmear_coarse = 20
self.pDG = None
self.pGD = None
self.pnGD = None
self.pDnG = None
# Note that zmax is an artificial limit that
# should be well above any redshift value that could
# impact the results for the considered H0 values.
self.zmax = 10.
self.zprior = redshift_prior(Omega_m=self.Omega_m, linear=self.linear)
self.cosmo = fast_cosmology(Omega_m=self.Omega_m, linear=self.linear)
self.rate = rate
def ps_z(self, z):
if self.rate == 'constant':
return 1.0
if self.rate == 'evolving':
return (1.0+z)**self.Lambda
def px_dl(self, dl, temp):
"""
Returns a probability for a given distance dl
from the interpolated function.
"""
if self.reweight==True:
return splev(dl, temp, ext=3)
else:
return splev(dl, temp, ext=3)/dl**2
def pz_xH0(self,z,temp):
"""
Returns p(z|x,H0)
"""
return temp(z)
def px_H0G(self, H0):
"""
Returns p(x|H0,G) for given values of H0.
This corresponds to the numerator of Eq 12 in the method doc.
The likelihood of the GW data given H0 and conditioned on
the source being inside the galaxy catalog
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(x|H0,G)
"""
num = np.zeros(len(H0))
prob_sorted = np.sort(self.skymap.prob)[::-1]
prob_sorted_cum = np.cumsum(prob_sorted)
# find index of array which bounds the self.area confidence interval
idx = np.searchsorted(prob_sorted_cum, self.area)
minskypdf = prob_sorted[idx]*self.skymap.npix
count = 0
# TODO: expand this case to look at a skypatch
# around the counterpart ('pencilbeam')
if self.EM_counterpart is not None:
nGalEM = self.EM_counterpart.nGal()
for i in range(nGalEM):
counterpart = self.EM_counterpart.get_galaxy(i)
tempsky = self.skymap.skyprob(counterpart.ra, counterpart.dec)*self.skymap.npix
tempdist = np.zeros(len(H0))
for k in range(len(H0)):
tempdist[k] = self.norms[k]*self.pz_xH0(z,self.temps[k])
numnorm += tempdist*tempsky
else:
#find galaxies within the bounds of the GW event
tempsky = self.skymap.skyprob(self.allra, self.alldec)*self.skymap.npix
ind = np.argwhere(tempsky >= minskypdf)
tempsky = tempsky[ind].flatten()
zs = self.allz[ind].flatten()
ras = self.allra[ind].flatten()
decs = self.alldec[ind].flatten()
ms = self.allm[ind].flatten()
sigzs = self.allsigmaz[ind].flatten()
colors = self.allcolor[ind].flatten()
if self.weighted:
mlim = np.percentile(np.sort(ms),0.01) # more draws for galaxies in brightest 0.01 percent
else:
mlim = 1.0
bar = progressbar.ProgressBar()
print("Calculating p(x|H0,G)")
# loop over galaxies
for i in bar(range(len(zs))):
if ms[i] <= mlim: #do more loops over brightest galaxies
nsmear = self.nsmear_fine
else:
nsmear = self.nsmear_coarse
numinner=np.zeros(len(H0))
a = (0.0 - zs[i]) / sigzs[i]
zsmear = truncnorm.rvs(a, 5, loc=zs[i], scale=sigzs[i], size=nsmear)
zsmear = zsmear[np.argwhere(zsmear<self.zcut)].flatten() # remove support above the catalogue hard redshift cut
tempdist = np.zeros([len(H0),len(zsmear)])
if len(zsmear)>0:
for k in range(len(H0)):
tempdist[k,:] = self.norms[k]*self.pz_xH0(zsmear,self.temps[k])*self.ps_z(zsmear)
for n in range(len(zsmear)):
if self.weighted:
if self.Kcorr == True:
Kcorr = calc_kcor(self.band,zsmear[n],self.color_name,colour_value=colors[i])
else:
Kcorr = 0.
weight = L_mdl(ms[i], self.cosmo.dl_zH0(zsmear[n], H0), Kcorr=Kcorr)
else:
weight = 1.0
numinner += tempdist[:,n]*tempsky[i]*weight
normnuminner = numinner/nsmear
num += normnuminner
print("{} galaxies from this catalog lie in the event's {}% confidence interval".format(len(zs),self.area*100))
numnorm = num/self.nGal
return numnorm
def pD_H0G(self,H0):
"""
Returns p(D|H0,G) (the normalising factor for px_H0G).
This corresponds to the denominator of Eq 12 in the methods doc.
The probability of detection as a function of H0, conditioned on the source being inside the galaxy catalog
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(D|H0,G)
"""
den = np.zeros(len(H0))
if self.weighted:
mlim = np.percentile(np.sort(self.allm),0.01) # more draws for galaxies in brightest 0.01 percent
else:
mlim = 1.0
bar = progressbar.ProgressBar()
print("Calculating p(D|H0,G)")
# loop over galaxies
for i in bar(range(len(self.allz))):
if self.allm[i] <= mlim: #do more loops over brightest galaxies
nsmear = self.nsmear_fine
else:
nsmear = self.nsmear_coarse
deninner=np.zeros(len(H0))
a = (0.0 - self.allz[i]) / self.allsigmaz[i]
zsmear = truncnorm.rvs(a, 5, loc=self.allz[i], scale=self.allsigmaz[i], size=nsmear)
zsmear = zsmear[np.argwhere(zsmear<self.zcut)] # remove support above the catalogue hard redshift cut
if len(zsmear)>0:
# loop over random draws from galaxies
for n in range(len(zsmear)):
if self.weighted:
if self.Kcorr == True:
Kcorr = calc_kcor(self.band,zsmear[n],self.color_name,colour_value=self.allcolor[i])
else:
Kcorr = 0.
weight = L_mdl(self.allm[i], self.cosmo.dl_zH0(zsmear[n], H0), Kcorr=Kcorr)
else:
weight = 1.0
if self.basic:
prob = self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(zsmear[n],H0)).flatten()
else:
prob = self.pdet.pD_zH0_eval(zsmear[n],H0).flatten()
deninner += prob*weight*self.ps_z(zsmear[n])
normdeninner = deninner/nsmear
den += normdeninner
self.pDG = den/self.nGal
return self.pDG
def pG_H0D(self,H0):
"""
Returns p(G|H0,D)
This corresponds to Eq 16 in the doc.
The probability that the host galaxy is in the catalogue given detection and H0.
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(G|H0,D)
"""
# Warning - this integral misbehaves for small values of H0 (<25 kms-1Mpc-1). TODO: fix this.
num = np.zeros(len(H0))
den = np.zeros(len(H0))
# TODO: vectorize this if possible
bar = progressbar.ProgressBar()
print("Calculating p(G|H0,D)")
for i in bar(range(len(H0))):
def I(M,z):
if self.basic:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(z,H0[i]))*self.zprior(z)*self.ps_z(z)
else:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_zH0_eval(z,H0[i])*self.zprior(z)*self.ps_z(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
# Mmin and Mmax currently corresponding to 10L* and 0.001L* respectively, to correspond with MDC
# Will want to change in future.
# TODO: test how sensitive this result is to changing Mmin and Mmax.
Mmin = M_Mobs(H0[i],self.Mobs_min)
Mmax = M_Mobs(H0[i],self.Mobs_max)
num[i] = dblquad(I,0,self.zcut,lambda x: Mmin,lambda x: min(max(M_mdl(self.mth,self.cosmo.dl_zH0(x,H0[i])),Mmin),Mmax),epsabs=0,epsrel=1.49e-4)[0]
den[i] = dblquad(I,0,self.zmax,lambda x: Mmin,lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0]
self.pGD = num/den
return self.pGD
def pnG_H0D(self,H0):
"""
Returns 1.0 - pG_H0D(H0).
This corresponds to Eq 17 in the doc.
The probability that a galaxy is not in the catalogue given detection and H0
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(bar{G}|H0,D)
"""
if all(self.pGD)==None:
self.pGD = self.pG_H0D(H0)
self.pnGD = 1.0 - self.pGD
return self.pnGD
def px_H0nG(self,H0,allsky=True):
"""
Returns p(x|H0,bar{G}).
This corresponds to the numerator of Eq 19 in the doc
The likelihood of the GW data given H0, conditioned on the source being outside the galaxy catalog for an
all sky or patchy galaxy catalog.
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(x|H0,bar{G})
"""
distnum = np.zeros(len(H0))
bar = progressbar.ProgressBar()
print("Calculating p(x|H0,bar{G})")
for i in bar(range(len(H0))):
def Inum(M,z):
temp = self.norms[i]*self.pz_xH0(z,self.temps[i])*self.zprior(z) \
*SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.ps_z(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
Mmin = M_Mobs(H0[i],self.Mobs_min)
Mmax = M_Mobs(H0[i],self.Mobs_max)
if allsky == True:
distnum[i] = dblquad(Inum,0.0,self.zcut, lambda x: min(max(M_mdl(self.mth,self.cosmo.dl_zH0(x,H0[i])),Mmin),Mmax), lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0] \
+ dblquad(Inum,self.zcut,self.zmax_GW[i], lambda x: Mmin, lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0]
else:
distnum[i] = dblquad(Inum,0.0,self.zmax_GW[i],lambda x: Mmin,lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0]
# TODO: expand this case to look at a skypatch around the counterpart ('pencilbeam')
if self.EM_counterpart != None:
nGalEM = self.EM_counterpart.nGal()
for i in range(nGalEM):
counterpart = self.EM_counterpart.get_galaxy(i)
tempsky = self.skymap.skyprob(counterpart.ra,counterpart.dec)*self.skymap.npix
num += distnum*tempsky
else:
pixind = range(self.skymap.npix)
theta,rapix = hp.pix2ang(self.skymap.nside,pixind,nest=True)
decpix = np.pi/2.0 - theta
idx = (self.ra_min <= rapix) & (rapix <= self.ra_max) & (self.dec_min <= decpix) & (decpix <= self.dec_max)
if allsky == True:
skynum = self.skymap.prob[idx].sum()
else:
skynum = 1.0 - self.skymap.prob[idx].sum()
print("{}% of the event's sky probability is contained within the patch covered by the catalog".format(skynum*100))
num = distnum*skynum
return num
def pD_H0nG(self,H0,allsky=True):
"""
Returns p(D|H0,bar{G})
This corresponds to the denominator of Eq 19 in the doc.
The probability of detection as a function of H0, conditioned on the source being outside the galaxy catalog for an
all sky or patchy galaxy catalog.
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(D|H0,bar{G})
"""
# TODO: same fixes as for pG_H0D
den = np.zeros(len(H0))
def skynorm(dec,ra):
return np.cos(dec)
norm = dblquad(skynorm,self.ra_min,self.ra_max,lambda x: self.dec_min,lambda x: self.dec_max,epsabs=0,epsrel=1.49e-4)[0]/(4.*np.pi)
bar = progressbar.ProgressBar()
print("Calculating p(D|H0,bar{G})")
for i in bar(range(len(H0))):
def I(M,z):
if self.basic:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(z,H0[i]))*self.zprior(z)*self.ps_z(z)
else:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_zH0_eval(z,H0[i])*self.zprior(z)*self.ps_z(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
Mmin = M_Mobs(H0[i],self.Mobs_min)
Mmax = M_Mobs(H0[i],self.Mobs_max)
if allsky == True:
den[i] = dblquad(I,0.0,self.zcut, lambda x: min(max(M_mdl(self.mth,self.cosmo.dl_zH0(x,H0[i])),Mmin),Mmax), lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0] \
+ dblquad(I,self.zcut,self.zmax, lambda x: Mmin, lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0]
else:
den[i] = dblquad(I,0.0,self.zmax,lambda x: Mmin,lambda x: Mmax,epsabs=0,epsrel=1.49e-4)[0]
if allsky == True:
pDnG = den*norm
else:
pDnG = den*(1.-norm)
return pDnG
def px_H0_counterpart(self,H0):
"""
Returns p(x|H0,counterpart)
This corresponds to the numerator or Eq 6 in the doc.
The likelihood of the GW data given H0 and direct counterpart.
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(x|H0,counterpart)
"""
z = self.EM_counterpart.z
sigma = self.EM_counterpart.sigmaz
a = (0.0 - z) / sigma # boundary so samples don't go below 0
zsmear = truncnorm.rvs(a, 5, loc=z, scale=sigma, size=10000)
num = np.zeros(len(H0))
for k in range(len(H0)):
num[k] = np.sum(self.norms[k]*self.pz_xH0(zsmear,self.temps[k]))
return num
def pD_H0(self,H0):
"""
Returns p(D|H0).
This corresponds to the denominator of Eq 6 in the doc.
The probability of detection as a function of H0, marginalised over redshift, and absolute magnitude
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(D|H0)
"""
den = np.zeros(len(H0))
bar = progressbar.ProgressBar()
print("Calculating p(D|H0)")
for i in bar(range(len(H0))):
def I(z,M):
if self.basic:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(z,H0[i]))*self.zprior(z)
else:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_zH0_eval(z,H0[i])*self.zprior(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
Mmin = M_Mobs(H0[i],self.Mobs_min)
Mmax = M_Mobs(H0[i],self.Mobs_max)
den[i] = dblquad(I,Mmin,Mmax,lambda x: 0.0,lambda x: self.zmax,epsabs=0,epsrel=1.49e-4)[0]
self.pDnG = den
return self.pDnG
def px_H0_empty(self,H0):
"""
Returns the numerator of the empty catalog case
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(x|H0,bar{G})
"""
distnum = np.zeros(len(H0))
for i in range(len(H0)):
def Inum(z):
temp = self.norms[i]*self.pz_xH0(z,self.temps[i])*self.zprior(z)*self.ps_z(z)
return temp
distnum[i] = quad(Inum,0.0,self.zmax_GW[i],epsabs=0,epsrel=1.49e-4)[0]
skynum = 1.
num = distnum*skynum
return num
def pD_H0_empty(self,H0):
"""
Returns the denominator of the empty catalog case
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
float or array_like
p(D|H0,bar{G})
"""
den = np.zeros(len(H0))
for i in range(len(H0)):
def I(z):
temp = self.pdet.pD_zH0_eval(z,H0[i])*self.zprior(z)*self.ps_z(z)
return temp
den[i] = quad(I,0.0,self.zmax,epsabs=0,epsrel=1.49e-4)[0]
return den
def likelihood(self,H0,complete=False,counterpart_case='direct',new_skypatch=False,population=False):
"""
The likelihood for a single event
This corresponds to Eq 3 (statistical) or Eq 6 (counterpart) in the doc, depending on parameter choices.
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
complete : bool, optional
Is the galaxy catalog complete to all relevant distances/redshifts? (default=False)
counterpart_case : str, optional
Choice of counterpart analysis (default='direct')
if 'direct', will assume the counterpart is correct with certainty
if 'pencilbeam', will assume the host galaxy is along the counterpart's line of sight, but may be beyond it
Returns
-------
float or array_like
p(x|H0,D)
"""
if self.EM_counterpart != None:
if counterpart_case == 'direct':
pxG = self.px_H0_counterpart(H0)
self.pDG = self.pD_H0(H0)
likelihood = pxG/self.pDG # Eq 6
# The pencilbeam case is currently coded up along the line of sight of the counterpart
# For GW170817 the likelihood produced is identical to the 'direct' counterpart case
# TODO: allow this to cover a small patch of sky
elif counterpart_case == 'pencilbeam':
pxG = self.px_H0G(H0)
if all(self.pDG)==None:
self.pDG = self.pD_H0G(H0)
if all(self.pGD)==None:
self.pGD = self.pG_H0D(H0)
if all(self.pnGD)==None:
self.pnGD = self.pnG_H0D(H0)
if all(self.pDnG)==None:
self.pDnG = self.pD_H0nG(H0)
pxnG = self.px_H0nG(H0)
likelihood = self.pGD*(pxG/self.pDG) + self.pnGD*(pxnG/self.pDnG) # Eq 3 along a single line of sight
else:
print("Please specify counterpart_case ('direct' or 'pencilbeam').")
elif new_skypatch==True:
likelihood,pxG,self.pDG,self.pGD,self.pnGD,pxnG,self.pDnG = self.likelihood_skypatch(H0,complete=complete)
elif population==True:
pxG = self.px_H0_empty(H0)
self.pDG = self.pD_H0_empty(H0)
likelihood = pxG/self.pDG
else:
pxG = self.px_H0G(H0)
if all(self.pDG)==None:
self.pDG = self.pD_H0G(H0)
if complete==True:
likelihood = pxG/self.pDG # Eq 3 with p(G|H0,D)=1 and p(bar{G}|H0,D)=0
else:
if all(self.pGD)==None:
self.pGD = self.pG_H0D(H0)
if all(self.pnGD)==None:
self.pnGD = self.pnG_H0D(H0)
if all(self.pDnG)==None:
self.pDnG = self.pD_H0nG(H0)
pxnG = self.px_H0nG(H0)
likelihood = self.pGD*(pxG/self.pDG) + self.pnGD*(pxnG/self.pDnG) # Eq 3
if self.whole_cat == False:
pDnG_rest_of_sky = self.pD_H0nG(H0,allsky=False)
pxnG_rest_of_sky = self.px_H0nG(H0,allsky=False)
likelihood = likelihood*self.catalog_fraction + (pxnG_rest_of_sky/pDnG_rest_of_sky)*self.rest_fraction # Eq 4
if (complete==True) or (self.EM_counterpart != None) or (population==True):
self.pGD = np.ones(len(H0))
self.pnGD = np.zeros(len(H0))
pxnG = np.zeros(len(H0))
self.pDnG = np.ones(len(H0))
if (self.whole_cat==True) or (self.EM_counterpart != None) or (population==True):
pDnG_rest_of_sky = np.ones(len(H0))
pxnG_rest_of_sky = np.zeros(len(H0))
self.rest_fraction = 0
self.catalog_fraction = 1
return likelihood,pxG,self.pDG,self.pGD,self.catalog_fraction, pxnG,self.pDnG,self.pnGD, pxnG_rest_of_sky,pDnG_rest_of_sky,self.rest_fraction
def px_DGH0_skypatch(self,H0):
"""
The "in catalog" part of the new skypatch method
using a catalog which follows the GW event's sky patch contour
p(x|D,G,H0)
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
arrays
numerator and denominator
"""
num = np.zeros(len(H0))
den = np.zeros(len(H0))
print('whole catalog apparent magnitude threshold: {}'.format(self.mth))
tempsky = self.skymap.skyprob(self.allra, self.alldec)*self.skymap.npix
ind = np.argwhere(tempsky > 0.)
tempsky = tempsky[ind].flatten()
zs = self.allz[ind].flatten()
ras = self.allra[ind].flatten()
decs = self.alldec[ind].flatten()
ms = self.allm[ind].flatten()
sigzs = self.allsigmaz[ind].flatten()
colors = self.allcolor[ind].flatten()
max_mth = np.amax(ms)
N = len(zs)
if self.weighted:
mlim = np.percentile(np.sort(ms),0.01) # more draws for galaxies in brightest 0.01 percent
else:
mlim = 1.0
bar = progressbar.ProgressBar()
print("Calculating p(x|D,H0,G) for this event's skyarea")
# loop over galaxies
for i in bar(range(N)):
numinner=np.zeros(len(H0))
deninner=np.zeros(len(H0))
if ms[i] <= mlim: #do more loops over brightest galaxies
nsmear = self.nsmear_fine
else:
nsmear = self.nsmear_coarse
a = (0.0 - zs[i]) / sigzs[i]
zsmear = truncnorm.rvs(a, 5, loc=zs[i], scale=sigzs[i], size=nsmear)
zsmear = zsmear[np.argwhere(zsmear<self.zcut)].flatten() # remove support above the catalogue hard redshift cut
tempdist = np.zeros([len(H0),len(zsmear)])
if len(zsmear)>0:
for k in range(len(H0)):
tempdist[k,:] = self.norms[k]*self.pz_xH0(zsmear,self.temps[k])*self.ps_z(zsmear)
# loop over random draws from galaxies
for n in range(len(zsmear)):
if self.weighted:
if self.Kcorr == True:
Kcorr = calc_kcor(self.band,zsmear[n],self.color_name,colour_value=colors[i])
else:
Kcorr = 0.
weight = L_mdl(ms[i], self.cosmo.dl_zH0(zsmear[n], H0), Kcorr=Kcorr)
else:
weight = 1.0
numinner += tempdist[:,n]*tempsky[i]*weight
if self.basic:
prob = self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(zsmear[n],H0)).flatten()
else:
prob = self.pdet.pD_zH0_eval(zsmear[n],H0).flatten()
deninner += prob*weight*self.ps_z(zsmear[n])
normnuminner = numinner/nsmear
num += normnuminner
normdeninner = deninner/nsmear
den += normdeninner
print("{} galaxies from this catalog lie in the event's {}% confidence interval".format(len(zs),self.area*100))
numnorm = num/self.nGal
if N >= 500:
self.mth = np.median(ms)
else:
self.mth = max_mth #update mth to reflect the area within the event's sky localisation (max m within patch)
print('event patch apparent magnitude threshold: {}'.format(self.mth))
print("{} galaxies (out of a total possible {}) are supported by this event's skymap".format(N,self.nGal))
return num,den
def px_DnGH0_skypatch(self,H0):
"""
The "beyond catalog" part of the new skypatch method
using a catalog which follows the GW event's sky patch contour
p(x|D,Gbar,H0)
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
arrays
numerator and denominator
"""
distnum = np.zeros(len(H0))
distden = np.zeros(len(H0))
bar = progressbar.ProgressBar()
print("Calculating p(x|D,H0,bar{G}) for this event's skyarea")
for i in bar(range(len(H0))):
Mmin = M_Mobs(H0[i],self.Mobs_min)
Mmax = M_Mobs(H0[i],self.Mobs_max)
def Inum(z,M):
temp = self.norms[i]*self.pz_xH0(z,self.temps[i])*self.zprior(z) \
*SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.ps_z(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
distnum[i] = dblquad(Inum,Mmin,Mmax,lambda x: z_dlH0(dl_mM(self.mth,x),H0[i],linear=self.linear),lambda x: self.zmax_GW[i],epsabs=0,epsrel=1.49e-4)[0]
def Iden(z,M):
if self.basic:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_dl_eval_basic(self.cosmo.dl_zH0(z,H0[i]))*self.zprior(z)*self.ps_z(z)
else:
temp = SchechterMagFunction(H0=H0[i],Mstar_obs=self.Mstar_obs,alpha=self.alpha_sp)(M)*self.pdet.pD_zH0_eval(z,H0[i])*self.zprior(z)*self.ps_z(z)
if self.weighted:
return temp*L_M(M)
else:
return temp
distden[i] = dblquad(Iden,Mmin,Mmax,lambda x: z_dlH0(dl_mM(self.mth,x),H0[i],linear=self.linear),lambda x: self.zmax,epsabs=0,epsrel=1.49e-4)[0]
skynum = 1.0
num = distnum*skynum
a = len(np.asarray(np.where(self.skymap.prob!=0)).flatten()) # find number of pixels with any GW event support
skyden = a/self.skymap.npix
den = distden*skyden
return num,den
def likelihood_skypatch(self,H0,complete=False):
"""
The event likelihood using the new skypatch method
p(x|D,H0)
Parameters
----------
H0 : float or array_like
Hubble constant value(s) in kms-1Mpc-1
Returns
-------
array
the unnormalised likelihood
"""
pxDG_num,pxDG_den = self.px_DGH0_skypatch(H0)
if complete==True:
likelihood = pxDG_num/pxDG_den
pGD = np.ones(len(H0))
pnGD = np.zeros(len(H0))
pxDnG_num = np.zeros(len(H0))
pxDnG_den = np.ones(len(H0))
else:
pGD = self.pG_H0D(H0)
pnGD = self.pnG_H0D(H0)
pxDnG_num,pxDnG_den = self.px_DnGH0_skypatch(H0)
pxDnG = pxDnG_num/pxDnG_den
likelihood = pGD*(pxDG_num/pxDG_den) + pnGD*pxDnG
return likelihood,pxDG_num,pxDG_den,pGD,pnGD,pxDnG_num,pxDnG_den
|
package com.rostdev.survivalpack.ui.info;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import com.rostdev.survivalpack.R;
import com.rostdev.survivalpack.model.SensorInfo;
import com.rostdev.survivalpack.mvp.BasePresenter;
import com.rostdev.survivalpack.ui.MvpViewContext;
import javax.inject.Inject;
/**
* Created by Rosty on 7/5/2016.
*/
public class InfoPresenter extends BasePresenter<InfoContract.View> implements
InfoContract.Presenter,
SensorEventListener {
private SensorManager sensorManager;
private Sensor sensorPressure;
private Sensor sensorMagneticField;
private SensorInfo pressure;
private SensorInfo altitude;
private SensorInfo magnetic;
@Inject
public InfoPresenter(@MvpViewContext Context context) {
sensorManager = (SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
sensorPressure = sensorManager.getDefaultSensor(Sensor.TYPE_PRESSURE);
sensorMagneticField = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
}
@Override
protected void onViewAttached(InfoContract.View View) {
sensorManager.registerListener(this,
sensorPressure,
SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(this,
sensorMagneticField,
SensorManager.SENSOR_DELAY_NORMAL);
}
@Override
protected void onViewDetached(InfoContract.View View) {
sensorManager.unregisterListener(this,
sensorPressure);
sensorManager.unregisterListener(this,
sensorMagneticField);
}
@Override
public void onSensorChanged(SensorEvent event) {
switch (event.sensor.getType()) {
case Sensor.TYPE_PRESSURE:
long press = Math.round(event.values[0]);
long alt = Math.round(getAltitude(press));
pressure = new SensorInfo(R.drawable.ic_pressure,
"Pressure", press + " hPa");
altitude = new SensorInfo(R.drawable.ic_altimeter,
"Altitude", alt + " meters");
break;
case Sensor.TYPE_MAGNETIC_FIELD:
long magnet = Math.round(
Math.sqrt((Math.pow((double) event.values[0], 2.0d) +
Math.pow((double) event.values[1], 2.0d)) +
Math.pow((double) event.values[2], 2.0d)));
magnetic = new SensorInfo(R.drawable.ic_magnet,
"Magnetic field", magnet + " µT");
break;
}
if (pressure != null && altitude != null && magnetic != null) {
view.updateSensorInfoData(new SensorInfo[]{
magnetic, pressure, altitude
});
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int i) {
}
private float getAltitude(float pressure){
return SensorManager.getAltitude(
SensorManager.PRESSURE_STANDARD_ATMOSPHERE, pressure);
}
}
|
<gh_stars>0
const token_verify = require('../middleware/token_verify');
module.exports = async (ctx) => {
ctx.body = {
msg: 'userInfo'
}
/*
const token = ctx.header.authorization;
if (token) {
// 解密,获取payload
let payload = await token_verify(token);
ctx.body = {
user_info: payload
}
} else {
ctx.body = {
message: ctx,
code: -1
}
}
*/
};
|
<reponame>hapramp/1Rramp-Android
package com.hapramp.ui.adapters;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.hapramp.R;
import com.hapramp.datastore.TransferHistoryParser;
import com.hapramp.steem.models.TransferHistoryModel;
import com.hapramp.ui.activity.ProfileActivity;
import com.hapramp.utils.Constants;
import com.hapramp.utils.ImageHandler;
import com.hapramp.utils.MomentsUtils;
import com.hapramp.utils.SteemPowerCalc;
import java.util.ArrayList;
import java.util.Locale;
import butterknife.BindView;
import butterknife.ButterKnife;
public class AccountHistoryAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private static final int TYPE_TRANSFER = 0;
private static final int TYPE_AUTHOR_REWARD = 1;
private static final int TYPE_COMMENT_BENEFACTOR = 2;
private static final int TYPE_CLAIM = 3;
private static final int TYPE_CURATION = 4;
private final Context mContext;
private ArrayList<TransferHistoryModel> transferHistoryModels;
public AccountHistoryAdapter(Context context) {
this.mContext = context;
this.transferHistoryModels = new ArrayList<>();
}
public void setTransferHistoryModels(ArrayList<TransferHistoryModel> transferHistoryModels) {
this.transferHistoryModels = transferHistoryModels;
notifyDataSetChanged();
}
@NonNull
@Override
public RecyclerView.ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view;
switch (viewType) {
case TYPE_AUTHOR_REWARD:
view = LayoutInflater.from(mContext).inflate(R.layout.account_history_author_reward_item_view, null, false);
return new AuthorRewardViewHolder(view);
case TYPE_CLAIM:
view = LayoutInflater.from(mContext).inflate(R.layout.account_history_claim_item_view, null, false);
return new ClaimBalanceViewHolder(view);
case TYPE_COMMENT_BENEFACTOR:
view = LayoutInflater.from(mContext).inflate(R.layout.account_history_comment_benefactor_item_view, null, false);
return new CommentBenefactorViewHolder(view);
case TYPE_CURATION:
view = LayoutInflater.from(mContext).inflate(R.layout.account_history_curation_item_view, null, false);
return new CurationViewHolder(view);
case TYPE_TRANSFER:
view = LayoutInflater.from(mContext).inflate(R.layout.account_history_transfer_item_view, parent, false);
return new TransferViewHolder(view);
}
return null;
}
@Override
public void onBindViewHolder(@NonNull RecyclerView.ViewHolder holder, int position) {
if (holder instanceof TransferViewHolder) {
((TransferViewHolder) holder).bind(transferHistoryModels.get(position));
} else if (holder instanceof AuthorRewardViewHolder) {
((AuthorRewardViewHolder) holder).bind(transferHistoryModels.get(position));
} else if (holder instanceof CommentBenefactorViewHolder) {
((CommentBenefactorViewHolder) holder).bind(transferHistoryModels.get(position));
} else if (holder instanceof ClaimBalanceViewHolder) {
((ClaimBalanceViewHolder) holder).bind(transferHistoryModels.get(position));
} else if (holder instanceof CurationViewHolder) {
((CurationViewHolder) holder).bind(transferHistoryModels.get(position));
}
}
@Override
public int getItemViewType(int position) {
switch (transferHistoryModels.get(position).getOperation()) {
case TransferHistoryParser.KEYS.OPERATION_TRANSFER:
return TYPE_TRANSFER;
case TransferHistoryParser.KEYS.OPERATION_AUTHOR_REWARD:
return TYPE_AUTHOR_REWARD;
case TransferHistoryParser.KEYS.OPERATION_CLAIM_REWARD_BALANCE:
return TYPE_CLAIM;
case TransferHistoryParser.KEYS.OPERATION_COMMENT_BENEFACTOR_REWARD:
return TYPE_COMMENT_BENEFACTOR;
case TransferHistoryParser.KEYS.OPERATION_CURATION_REWARD:
return TYPE_CURATION;
}
return super.getItemViewType(position);
}
@Override
public int getItemCount() {
return transferHistoryModels.size();
}
private boolean isSent(String userAccount, String from) {
return userAccount.equals(from);
}
private void openIntent(String url) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
mContext.startActivity(browserIntent);
}
class TransferViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.user_image)
ImageView userImage;
@BindView(R.id.message_label)
TextView messageLabel;
@BindView(R.id.remote_user)
TextView remoteUser;
@BindView(R.id.amount)
TextView amount;
@BindView(R.id.timestamp)
TextView timestamp;
@BindView(R.id.message)
TextView message;
public TransferViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
public void bind(TransferHistoryModel transferHistoryModel) {
TransferHistoryModel.Transfer transfer = transferHistoryModel.getTransfer();
final Intent profileIntent = new Intent(mContext, ProfileActivity.class);
if (isSent(transferHistoryModel.getUserAccount(), transfer.from)) {
//sent
ImageHandler.loadCircularImage(mContext, userImage,
String.format(mContext.getResources().getString(R.string.steem_user_profile_pic_format), transfer.to));
messageLabel.setText("Transferred to");
remoteUser.setText(transfer.to);
profileIntent.putExtra(Constants.EXTRAA_KEY_STEEM_USER_NAME, transfer.to);
amount.setText(String.format("- %s", transfer.amount));
amount.setTextColor(Color.parseColor("#bf0707"));
} else {
//received
ImageHandler.loadCircularImage(mContext, userImage,
String.format(mContext.getResources().getString(R.string.steem_user_profile_pic_format), transfer.from));
messageLabel.setText("Received from");
remoteUser.setText(transfer.from);
profileIntent.putExtra(Constants.EXTRAA_KEY_STEEM_USER_NAME, transfer.from);
amount.setText(String.format("+ %s", transfer.amount));
amount.setTextColor(Color.parseColor("#157c18"));
}
timestamp.setText(MomentsUtils.getFormattedTime(transferHistoryModel.getTimeStamp()));
if (transfer.memo.length() > 0) {
message.setVisibility(View.VISIBLE);
message.setText(transfer.memo);
} else {
message.setVisibility(View.GONE);
}
remoteUser.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mContext.startActivity(profileIntent);
}
});
}
}
class AuthorRewardViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.timestamp)
TextView timestamp;
@BindView(R.id.steem_tv)
TextView steemTv;
@BindView(R.id.steem_info_container)
RelativeLayout steemInfoContainer;
@BindView(R.id.sbd_tv)
TextView sbdTv;
@BindView(R.id.sbd_info_container)
RelativeLayout sbdInfoContainer;
@BindView(R.id.steem_power_tv)
TextView steemPowerTv;
@BindView(R.id.sp_info_container)
RelativeLayout spInfoContainer;
@BindView(R.id.goto_btn)
TextView gotoBtn;
public AuthorRewardViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
public void bind(TransferHistoryModel transferHistoryModel) {
TransferHistoryModel.AuthorReward authorReward = transferHistoryModel.getAuthorReward();
timestamp.setText(MomentsUtils.getFormattedTime(transferHistoryModel.getTimeStamp()));
double sbd = Double.parseDouble(authorReward.getSbd_payout().split(" ")[0]);
double steem = Double.parseDouble(authorReward.getSteem_payout().split(" ")[0]);
double vests = Double.parseDouble(authorReward.getVesting_payout().split(" ")[0]);
double sp = SteemPowerCalc.calculateSteemPower(
vests,
transferHistoryModel.getTotal_vesting_fund_steem(),
transferHistoryModel.getTotal_vesting_shares());
if (sbd == 0) {
sbdInfoContainer.setVisibility(View.GONE);
} else {
sbdTv.setText(String.format(Locale.US, "%.3f SBD", sbd));
}
if (steem == 0) {
steemInfoContainer.setVisibility(View.GONE);
} else {
steemTv.setText(String.format(Locale.US, "%.3f STEEM", steem));
}
steemPowerTv.setText(String.format(Locale.US, "%.3f SP", sp));
final String postUrl = String.format("https://steemit.com/@%s/%s", authorReward.getAuthor(), authorReward.getPermlink());
gotoBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openIntent(postUrl);
}
});
}
}
class CommentBenefactorViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.amount)
TextView amount;
@BindView(R.id.timestamp)
TextView timestamp;
public CommentBenefactorViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
public void bind(TransferHistoryModel transferHistoryModel) {
TransferHistoryModel.CommentBenefactor commentBenefactor = transferHistoryModel.getCommentBenefactor();
timestamp.setText(MomentsUtils.getFormattedTime(transferHistoryModel.getTimeStamp()));
amount.setText(commentBenefactor.getReward());
}
}
class ClaimBalanceViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.timestamp)
TextView timestamp;
@BindView(R.id.steem_tv)
TextView steemTv;
@BindView(R.id.steem_info_container)
RelativeLayout steemInfoContainer;
@BindView(R.id.sbd_tv)
TextView sbdTv;
@BindView(R.id.sbd_info_container)
RelativeLayout sbdInfoContainer;
@BindView(R.id.steem_power_tv)
TextView steemPowerTv;
@BindView(R.id.sp_info_container)
RelativeLayout spInfoContainer;
public ClaimBalanceViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
public void bind(TransferHistoryModel transferHistoryModel) {
TransferHistoryModel.ClaimRewardBalance claimRewardBalance = transferHistoryModel.getClaimRewardBalance();
timestamp.setText(MomentsUtils.getFormattedTime(transferHistoryModel.getTimeStamp()));
double sbd = Double.parseDouble(claimRewardBalance.getReward_sbd().split(" ")[0]);
double steem = Double.parseDouble(claimRewardBalance.getReward_steem().split(" ")[0]);
double vests = Double.parseDouble(claimRewardBalance.getReward_vests().split(" ")[0]);
double sp = SteemPowerCalc.calculateSteemPower(
vests,
transferHistoryModel.getTotal_vesting_fund_steem(),
transferHistoryModel.getTotal_vesting_shares());
if (sbd == 0) {
sbdInfoContainer.setVisibility(View.GONE);
} else {
sbdInfoContainer.setVisibility(View.VISIBLE);
sbdTv.setText(String.format(Locale.US, "%.3f SBD", sbd));
}
if (steem == 0) {
steemInfoContainer.setVisibility(View.GONE);
} else {
steemInfoContainer.setVisibility(View.VISIBLE);
steemTv.setText(String.format(Locale.US, "%.3f STEEM", steem));
}
steemPowerTv.setText(String.format(Locale.US, "%.3f SP", sp));
}
}
class CurationViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.steem_power_tv)
TextView steemPowerTv;
@BindView(R.id.sp_info_container)
RelativeLayout spInfoContainer;
@BindView(R.id.timestamp)
TextView timestamp;
@BindView(R.id.goto_btn)
TextView gotoBtn;
public CurationViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
public void bind(TransferHistoryModel transferHistoryModel) {
TransferHistoryModel.CurationReward curationReward = transferHistoryModel.getCurationReward();
timestamp.setText(MomentsUtils.getFormattedTime(transferHistoryModel.getTimeStamp()));
double vests = Double.parseDouble(curationReward.getReward().split(" ")[0]);
double sp = SteemPowerCalc.calculateSteemPower(
vests,
transferHistoryModel.getTotal_vesting_fund_steem(),
transferHistoryModel.getTotal_vesting_shares());
steemPowerTv.setText(String.format(Locale.US, "%.3f SP", sp));
final String postUrl = String.format("https://steemit.com/@%s/%s", curationReward.getComment_author(),
curationReward.getComment_permlink());
gotoBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
openIntent(postUrl);
}
});
}
}
}
|
import React from 'react';
import pick from 'lodash/pick';
import omitBy from 'lodash/omitBy';
import isEqual from 'lodash/isEqual';
import meanBy from 'lodash/meanBy';
import mean from 'lodash/mean';
import {Location} from 'history';
import {Client} from 'app/api';
import {addErrorMessage} from 'app/actionCreators/indicator';
import {t, tct} from 'app/locale';
import {GlobalSelection, CrashFreeTimeBreakdown} from 'app/types';
import {URL_PARAM} from 'app/constants/globalSelectionHeader';
import {percent, defined} from 'app/utils';
import {Series} from 'app/types/echarts';
import {getParams} from 'app/components/organizations/globalSelectionHeader/getParams';
import {getExactDuration} from 'app/utils/formatters';
import {fetchTotalCount} from 'app/actionCreators/events';
import CHART_PALETTE from 'app/constants/chartPalette';
import {YAxis} from './chart/releaseChartControls';
import {getInterval, getReleaseEventView} from './chart/utils';
import {displayCrashFreePercent, getCrashFreePercent} from '../../utils';
const omitIgnoredProps = (props: Props) =>
omitBy(props, (_, key) =>
['api', 'version', 'orgId', 'projectSlug', 'location', 'children'].includes(key)
);
type ChartData = {
[key: string]: Series;
};
type Data = {
chartData: Series[];
chartSummary: React.ReactNode;
crashFreeTimeBreakdown: CrashFreeTimeBreakdown;
};
export type ReleaseStatsRequestRenderProps = Data & {
loading: boolean;
reloading: boolean;
errored: boolean;
};
type Props = {
api: Client;
version: string;
orgId: string;
projectSlug: string;
selection: GlobalSelection;
location: Location;
yAxis: YAxis;
children: (renderProps: ReleaseStatsRequestRenderProps) => React.ReactNode;
hasHealthData: boolean;
hasDiscover: boolean;
};
type State = {
reloading: boolean;
errored: boolean;
data: Data | null;
};
class ReleaseStatsRequest extends React.Component<Props, State> {
state: State = {
reloading: false,
errored: false,
data: null,
};
componentDidMount() {
this.fetchData();
}
componentDidUpdate(prevProps: Props) {
if (isEqual(omitIgnoredProps(prevProps), omitIgnoredProps(this.props))) {
return;
}
this.fetchData();
}
componentWillUnmount() {
this.unmounting = true;
}
private unmounting: boolean = false;
fetchData = async () => {
let data: Data | null = null;
const {yAxis, hasHealthData, hasDiscover} = this.props;
if (!hasHealthData && !hasDiscover) {
return;
}
this.setState(state => ({
reloading: state.data !== null,
errored: false,
}));
try {
if (yAxis === YAxis.CRASH_FREE) {
data = await this.fetchRateData();
} else if (yAxis === YAxis.EVENTS) {
data = await this.fetchEventData();
} else {
// session duration uses same endpoint as sessions
data = await this.fetchCountData(
yAxis === YAxis.SESSION_DURATION ? YAxis.SESSIONS : yAxis
);
}
} catch {
addErrorMessage(t('Error loading chart data'));
this.setState({
errored: true,
data: null,
});
}
if (this.unmounting) {
return;
}
this.setState({
reloading: false,
data,
});
};
fetchCountData = async (type: YAxis) => {
const {api, yAxis} = this.props;
const response = await api.requestPromise(this.statsPath, {
query: {
...this.baseQueryParams,
type,
},
});
const transformedData =
yAxis === YAxis.SESSION_DURATION
? this.transformSessionDurationData(response.stats)
: this.transformCountData(response.stats, yAxis, response.statTotals);
return {...transformedData, crashFreeTimeBreakdown: response.usersBreakdown};
};
fetchRateData = async () => {
const {api} = this.props;
const [userResponse, sessionResponse] = await Promise.all([
api.requestPromise(this.statsPath, {
query: {
...this.baseQueryParams,
type: YAxis.USERS,
},
}),
api.requestPromise(this.statsPath, {
query: {
...this.baseQueryParams,
type: YAxis.SESSIONS,
},
}),
]);
const transformedData = this.transformRateData(
userResponse.stats,
sessionResponse.stats
);
return {...transformedData, crashFreeTimeBreakdown: userResponse.usersBreakdown};
};
fetchEventData = async () => {
const {api, orgId, location, selection, version, hasHealthData} = this.props;
const {crashFreeTimeBreakdown} = this.state.data || {};
let userResponse, eventsCountResponse;
// we don't need to fetch crashFreeTimeBreakdown every time, because it does not change
if (crashFreeTimeBreakdown || !hasHealthData) {
eventsCountResponse = await fetchTotalCount(
api,
orgId,
getReleaseEventView(selection, version).getEventsAPIPayload(location)
);
} else {
[userResponse, eventsCountResponse] = await Promise.all([
api.requestPromise(this.statsPath, {
query: {
...this.baseQueryParams,
type: YAxis.USERS,
},
}),
fetchTotalCount(
api,
orgId,
getReleaseEventView(selection, version).getEventsAPIPayload(location)
),
]);
}
const breakdown = userResponse?.usersBreakdown ?? crashFreeTimeBreakdown;
const chartSummary = eventsCountResponse.toLocaleString();
return {chartData: [], crashFreeTimeBreakdown: breakdown, chartSummary};
};
get statsPath() {
const {orgId, projectSlug, version} = this.props;
return `/projects/${orgId}/${projectSlug}/releases/${version}/stats/`;
}
get baseQueryParams() {
const {location, selection} = this.props;
return {
...getParams(pick(location.query, [...Object.values(URL_PARAM)])),
interval: getInterval(selection.datetime),
};
}
transformCountData(
responseData,
yAxis: string,
responseTotals
): Omit<Data, 'crashFreeTimeBreakdown'> {
// here we can configure colors of the chart
const chartData: ChartData = {
crashed: {
seriesName: t('Crashed'),
data: [],
color: CHART_PALETTE[3][0],
areaStyle: {
color: CHART_PALETTE[3][0],
opacity: 1,
},
lineStyle: {
opacity: 0,
width: 0.4,
},
},
abnormal: {
seriesName: t('Abnormal'),
data: [],
color: CHART_PALETTE[3][1],
areaStyle: {
color: CHART_PALETTE[3][1],
opacity: 1,
},
lineStyle: {
opacity: 0,
width: 0.4,
},
},
errored: {
seriesName: t('Errored'),
data: [],
color: CHART_PALETTE[3][2],
areaStyle: {
color: CHART_PALETTE[3][2],
opacity: 1,
},
lineStyle: {
opacity: 0,
width: 0.4,
},
},
healthy: {
seriesName: t('Healthy'),
data: [],
color: CHART_PALETTE[3][3],
areaStyle: {
color: CHART_PALETTE[3][3],
opacity: 1,
},
lineStyle: {
opacity: 0,
width: 0.4,
},
},
};
responseData.forEach(entry => {
const [timeframe, values] = entry;
const date = timeframe * 1000;
const crashed = values[`${yAxis}_crashed`];
const abnormal = values[`${yAxis}_abnormal`];
const errored = values[`${yAxis}_errored`];
const healthy = values[yAxis] - crashed - abnormal - errored;
chartData.crashed.data.push({name: date, value: crashed});
chartData.abnormal.data.push({name: date, value: abnormal});
chartData.errored.data.push({name: date, value: errored});
chartData.healthy.data.push({
name: date,
value: healthy >= 0 ? healthy : 0,
});
});
return {
chartData: Object.values(chartData),
chartSummary: responseTotals[yAxis].toLocaleString(),
};
}
transformRateData(
responseUsersData,
responseSessionsData
): Omit<Data, 'crashFreeTimeBreakdown'> {
const chartData: ChartData = {
users: {
seriesName: t('Crash Free Users'),
data: [],
color: CHART_PALETTE[1][0],
},
sessions: {
seriesName: t('Crash Free Sessions'),
data: [],
color: CHART_PALETTE[1][1],
},
};
const calculateDatePercentage = (responseData, subject: YAxis) => {
const percentageData = responseData.map(entry => {
const [timeframe, values] = entry;
const date = timeframe * 1000;
const crashFreePercent =
values[subject] !== 0
? getCrashFreePercent(
100 - percent(values[`${subject}_crashed`], values[subject])
)
: null;
return {name: date, value: crashFreePercent};
});
const averagePercent = displayCrashFreePercent(
meanBy(
percentageData.filter(item => defined(item.value)),
'value'
)
);
return {averagePercent, percentageData};
};
const usersPercentages = calculateDatePercentage(responseUsersData, YAxis.USERS);
chartData.users.data = usersPercentages.percentageData;
const sessionsPercentages = calculateDatePercentage(
responseSessionsData,
YAxis.SESSIONS
);
chartData.sessions.data = sessionsPercentages.percentageData;
const summary = tct('[usersPercent] users, [sessionsPercent] sessions', {
usersPercent: usersPercentages.averagePercent,
sessionsPercent: sessionsPercentages.averagePercent,
});
return {chartData: Object.values(chartData), chartSummary: summary};
}
transformSessionDurationData(responseData): Omit<Data, 'crashFreeTimeBreakdown'> {
// here we can configure colors of the chart
const chartData: Series = {
seriesName: t('Session Duration'),
data: [],
lineStyle: {
opacity: 0,
},
};
const sessionDurationAverage = Math.round(
mean(
responseData
.map(([timeframe, values]) => {
chartData.data.push({
name: timeframe * 1000,
value: Math.round(values.duration_p50),
});
return values.duration_p50;
})
.filter(duration => defined(duration))
) || 0
);
const summary = getExactDuration(sessionDurationAverage ?? 0);
return {chartData: [chartData], chartSummary: summary};
}
render() {
const {children} = this.props;
const {data, reloading, errored} = this.state;
const loading = data === null;
return children({
loading,
reloading,
errored,
chartData: data?.chartData ?? [],
chartSummary: data?.chartSummary ?? '',
crashFreeTimeBreakdown: data?.crashFreeTimeBreakdown ?? [],
});
}
}
export default ReleaseStatsRequest;
|
<gh_stars>1-10
/**
* RightJS UI Internationalization: Spanish module
*
* Copyright (C) <NAME>
*/
RightJS.Object.each({
Calendar: {
Done: 'Hecho',
Now: 'Ahora',
NextMonth: 'Mes siguiente',
PrevMonth: 'Mes precedente',
NextYear: 'Año siguiente',
PrevYear: 'Año precedente',
dayNames: 'Domingo Lunes Martes Miércoles Jueves Viernes Sábado'.split(' '),
dayNamesShort: 'Dom Lun Mar Mié Jue Vie Sab'.split(' '),
dayNamesMin: 'Do Lu Ma Mi Ju Vi Sa'.split(' '),
monthNames: 'Enero Febrero Marzo Abril Mayo Junio Julio Agosto Septiembre Octubre Noviembre Diciembre'.split(' '),
monthNamesShort: 'Ene Feb Mar Abr May Jun Jul Ago Sep Oct Nov Dic'.split(' ')
},
Lightbox: {
Close: 'Cerrar',
Prev: 'Imagen precedente',
Next: 'Imagen siguiente'
},
InEdit: {
Save: "Guardar",
Cancel: "Borrar"
},
Colorpicker: {
Done: 'Hecho'
},
Dialog: {
Ok: 'Ok',
Close: 'Cerrar',
Cancel: 'Cancelar',
Help: 'Ayuda',
Expand: 'Expandir',
Collapse: 'Plegar',
Alert: 'Aviso!',
Confirm: 'Confirmar',
Prompt: 'Entrar'
},
Rte: {
Clear: 'Clear',
Save: 'Save',
Source: 'Source',
Bold: 'Bold',
Italic: 'Italic',
Underline: 'Underline',
Strike: 'Strike through',
Ttext: 'Typetext',
Header: 'Header',
Cut: 'Cut',
Copy: 'Copy',
Paste: 'Paste',
Pastetext: 'Paste as text',
Left: 'Left',
Center: 'Center',
Right: 'Right',
Justify: 'Justify',
Undo: 'Undo',
Redo: 'Redo',
Code: 'Code block',
Quote: 'Block quote',
Link: 'Add link',
Image: 'Insert image',
Video: 'Insert video',
Dotlist: 'List with dots',
Numlist: 'List with numbers',
Indent: 'Indent',
Outdent: 'Outdent',
Forecolor: 'Text color',
Backcolor: 'Background color',
Select: 'Select',
Remove: 'Remove',
Format: 'Format',
Fontname: 'Font name',
Fontsize: 'Size',
Subscript: 'Subscript',
Superscript: 'Superscript',
UrlAddress: 'URL Address'
}
}, function(module, i18n) {
if (self[module]) {
RightJS.$ext(self[module].i18n, i18n);
}
}); |
// Call the dataTables jQuery plugin
$(document).ready(function() {
$('#dataTable, #dataTable1, #dataTable2, #dataTable3, #dataTable4, #dataTable5').DataTable({
responsive: true,
"order": [
[0, "asc"]
],
"language": {
"decimal": "-",
"thousands": ".",
"search": "Tìm kiếm:",
"info": "",
"lengthMenu": "Xem _MENU_ Hàng",
"zeroRecords": "Không tồn tại",
"infoEmpty": "",
"infoFiltered": "",
"paginate": {
"first": "Đầu ",
"last": "Cuối",
"next": "Tiếp",
"previous": "Trước"
},
}
});
});
|
import microtest
import microtest.utils as utils
import os
import io
import contextlib
import templateman.cli as cli
@microtest.test
def test_template_listing():
INSTALLED_TEMPLATES = ['template_1', 'template_2']
with io.StringIO() as stream:
with utils.create_temp_dir(files=INSTALLED_TEMPLATES) as dir_path:
env_dict = { cli.TEMPLATE_DIRECTORY_ENV_VAR: dir_path }
with microtest.patch(cli.os, environ=env_dict):
with contextlib.redirect_stdout(stream):
cli.list_installed_templates(list())
output = stream.getvalue()
assert 'template_1' in output
assert 'template_2' in output
if __name__ == '__main__':
microtest.run()
|
<reponame>Corrots/nsq
package client
type Client struct {
}
|
if [ -e .env ]; then
set -a # automatically export all variables
source .env
set +a
else
echo "Please set up your .env file before starting your environment."
exit 1
fi
# clear containers
bash scripts/clear-container.sh
if [ ! "$(docker ps -q -f name=$POSTGRES_CONTAINER_NAME)" ]; then
echo "# Setting up environment"
# run postgres & adminer containers
docker-compose -f docker-compose.dev.yml up -d
echo "# Wating for database"
sleep 5
fi
echo "# Everything is already up" |
#!/bin/bash
source ../spec.sh
source ./module_spec.sh
if [ ! -d ${MODSDIR} ]; then
echo "${MODSDIR} deos not exist!"
exit 1
fi
# Copy any extra modules that don't need RDL
verbosecopymodule ${PACKAGESDIR}/curlftpfs/curlftpfs-0.9.2-${ARCH}-bksrc1.xzm ${MODSDIR}/
|
<reponame>bobanko/tripSorter-2018
const CleanWebpackPlugin = require("clean-webpack-plugin");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const MiniCssExtractPlugin = require("mini-css-extract-plugin");
const devMode = process.env.NODE_ENV !== "production";
module.exports = {
mode: "development",
entry: "./src/index.jsx",
output: {
filename: "[name].[hash].js"
},
watchOptions: {
ignored: /node_modules/
},
optimization: {
runtimeChunk: "single",
splitChunks: {
cacheGroups: {
vendor: {
test: /[\\/]node_modules[\\/]/,
name: "vendors",
chunks: "all"
}
}
}
},
module: {
rules: [
{
test: /\.(png|jpg|gif|svg|eot|ttf|woff|woff2)$/,
use: [
{
loader: "url-loader",
options: {
limit: 12000
}
}
]
},
{
test: /\.scss$/,
use: [
devMode ? "style-loader" : MiniCssExtractPlugin.loader,
"css-loader",
"sass-loader"
]
},
{
test: /\.jsx?$/,
exclude: /node_modules/,
use: {
loader: "babel-loader"
}
},
{ enforce: "pre", test: /\.jsx?$/, loader: "source-map-loader" }
]
},
devtool: "source-map",
resolve: {
extensions: [".js", ".jsx", ".json"]
},
plugins: [
new CleanWebpackPlugin(["dist/*"]),
new HtmlWebpackPlugin({
template: "./src/index.html"
}),
new MiniCssExtractPlugin({
// Options similar to the same options in webpackOptions.output
// both options are optional
filename: devMode ? "[name].css" : "[name].[hash].css",
chunkFilename: devMode ? "[id].css" : "[id].[hash].css"
})
],
devServer: {
//host: 'localhost', //default
//port: 8080, //default
//port: 9000
contentBase: "./dist",
historyApiFallback: true
//compress: true,
//hot: true
}
};
|
import json
from scapy.all import *
from scapy.layers.l2 import *
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import SocketServer
from dnslib.server import DNSLogger, DNSServer
from dnslib.fixedresolver import FixedResolver
class FixedDNSResolver(FixedResolver):
def resolve(self, request, handler):
reply = request.reply()
reply.add_answer(RR(request.q.qname, QTYPE.A, rdata=A('10.15.0.9')))
handler.send_reply(reply)
class DNSHandler(SocketServer.BaseRequestHandler):
def handle(self):
data = self.request[0].strip()
dns_request = DNSRecord.parse(data)
resolver = FixedDNSResolver()
reply = resolver.resolve(dns_request, self)
self.request[1].sendto(reply.pack(), self.client_address)
class HTTPHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write("Hello, World!")
def run_dns_server():
logger = DNSLogger(prefix=False)
server = DNSServer(resolver, port=53, address='', logger=logger)
server.start()
def run_http_server():
http_server = HTTPServer(('localhost', 8080), HTTPHandler)
http_server.serve_forever()
if __name__ == "__main__":
resolver = FixedDNSResolver(". 60 IN A 10.15.0.9")
dns_server_thread = threading.Thread(target=run_dns_server)
dns_server_thread.start()
http_server_thread = threading.Thread(target=run_http_server)
http_server_thread.start() |
from django.conf import settings
from django.http import HttpResponse
import json
def handle_image_upload(request):
if request.method == 'POST' and request.FILES['image']:
image = request.FILES['image']
if image.size > settings.MAX_IMAGE_UPLOAD_SIZE:
# return error when the image size
# is over the set MAX_IMAGE_UPLOAD_SIZE
to_MB = settings.MAX_IMAGE_UPLOAD_SIZE / (1024 * 1024)
data = json.dumps({
'status': 405,
'error': f'Image size should not exceed {to_MB} MB'
})
return HttpResponse(data, content_type='application/json', status=405)
else:
# continue with image processing
# ...
return HttpResponse('Image uploaded successfully', status=200)
else:
return HttpResponse('Bad request', status=400) |
#!/bin/bash
nativefier --name "Element" \
--icon element-web.png \
--single-instance "https://app.element.io/"
|
// Move from Rod 1 to Rod 3
Move Disk 1 from Rod 1 to Rod 3
Move Disk 2 from Rod 1 to Rod 2
Move Disk 1 from Rod 3 to Rod 2
Move Disk 3 from Rod 1 to Rod 3
Move Disk 1 from Rod 2 to Rod 1
Move Disk 2 from Rod 2 to Rod 3
Move Disk 1 from Rod 1 to Rod 3
// Move from Rod 3 to Rod 2
Move Disk 4 from Rod 3 to Rod 2
Move Disk 1 from Rod 3 to Rod 1
Move Disk 2 from Rod 3 to Rod 2
Move Disk 1 from Rod 1 to Rod 2
Move Disk 3 from Rod 3 to Rod 2
Move Disk 1 from Rod 2 to Rod 3
Move Disk 2 from Rod 2 to Rod 1
Move Disk 1 from Rod 3 to Rod 1
Move Disk 4 from Rod 2 to Rod 3
// Move from Rod 1 to Rod 2
Move Disk 1 from Rod 1 to Rod 2
Move Disk 2 from Rod 1 to Rod 3
Move Disk 1 from Rod 2 to Rod 3
Move Disk 3 from Rod 1 to Rod 2
Move Disk 1 from Rod 3 to Rod 2
Move Disk 2 from Rod 3 to Rod 1
Move Disk 1 from Rod 2 to Rod 1
// Move from Rod 2 to Rod 3
Move Disk 4 from Rod 2 to Rod 3
Move Disk 1 from Rod 2 to Rod 3
Move Disk 2 from Rod 2 to Rod 1
Move Disk 1 from Rod 3 to Rod 1
Move Disk 3 from Rod 2 to Rod 3
Move Disk 1 from Rod 1 to Rod 2
Move Disk 2 from Rod 1 to Rod 3
Move Disk 1 from Rod 2 to Rod 3 |
# make sure cmake is installed
# sudo apt-get update
#g++ sudo apt-get install g++
#unzip sudo apt-get install unzip
#compile dep libraries
#example 1: ./buildServer
#example 2: ./buildServer DEBUG
echo "we only build NF SDK here"
cd NFComm/NFMessageDefine
./cpp.sh
cd ../../
cd BuildScript/linux/
chmod -R 755 ./BuildNF.CMake.Tools.sh
./BuildNF.CMake.Tools.sh
cd ../../
cd _Out/NFDataCfg/Tool/
chmod 755 ./NFFileProcess
chmod 755 ./copy_files.sh
./copy_files.sh
cd ..
cd ..
cd ..
if [ "$1" == "DEBUG" ]; then
rm -rf ./_Out/Debug/NFServer
rm -rf ./_Out/Debug/*.a
cd BuildScript/linux/
chmod -R 755 ./BuildNF.CMake.Debug.sh
time ./BuildNF.CMake.Debug.sh
else
rm -rf ./_Out/Release/NFServer
rm -rf ./_Out/Release/*.a
cd BuildScript/linux/
chmod -R 755 ./BuildNF.CMake.Release.sh
time ./BuildNF.CMake.Release.sh
fi
cd ../../
cd _Out/
chmod 777 *.sh
cd ../
#pwd
|
echo "Getting BuildTools"
sudo apt install tree
# Creating Directories
mkdir build
cd build
mkdir DEBIAN
mkdir usr
cd usr
mkdir local
cd local
mkdir bin
cd ../../../
# Move files over for building
cp ./uls.sh ./build/usr/local/bin
cp ./ulsrepeat.sh ./build/usr/local/bin
cp ./ultimatelinux.sh ./build/usr/local/bin
cp ./control ./build/DEBIAN/
# BUILDING ARTIFACT
dpkg-deb --build build
mv build.deb ./ultimatelinux.deb
# Map all build files
ls
tree
# Delete Temporary Build Workspace
rm -rf build |
def is_message_within_size_limit(message: str, max_size: int) -> bool:
return len(message) <= max_size |
<filename>src/main/java/chylex/hee/entity/item/EntityItemAltar.java
package chylex.hee.entity.item;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.world.World;
import chylex.hee.HardcoreEnderExpansion;
import chylex.hee.game.achievements.AchievementManager;
import chylex.hee.init.ItemList;
import chylex.hee.system.abstractions.entity.EntityDataWatcher;
public class EntityItemAltar extends EntityItem{
private enum Data{ SPARKLING };
private EntityDataWatcher entityData;
public byte pedestalUpdate;
public byte essenceType;
public EntityItemAltar(World world){
super(world);
}
public EntityItemAltar(World world, double x, double y, double z, EntityItem originalItem, byte essenceType){
super(world, x, y, z);
motionX = motionY = motionZ = 0D;
delayBeforeCanPickup = 50;
hoverStart = originalItem.hoverStart;
rotationYaw = originalItem.rotationYaw;
NBTTagCompound tag = new NBTTagCompound();
originalItem.writeEntityToNBT(tag);
readEntityFromNBT(tag);
ItemStack is = getEntityItem();
is.stackSize = 1;
setEntityItemStack(is);
is = originalItem.getEntityItem();
if (--is.stackSize == 0)originalItem.setDead();
else originalItem.setEntityItemStack(is);
this.essenceType = essenceType;
}
@Override
public void entityInit(){
super.entityInit();
entityData = new EntityDataWatcher(this);
entityData.addBoolean(Data.SPARKLING);
}
@Override
public void onUpdate(){
if (!worldObj.isRemote && ++pedestalUpdate > 10){
EntityItem item = new EntityItem(worldObj, posX, posY, posZ, getEntityItem());
item.copyLocationAndAnglesFrom(this);
worldObj.spawnEntityInWorld(item);
setDead();
return;
}
onEntityUpdate();
if (delayBeforeCanPickup > 0)--delayBeforeCanPickup;
prevPosX = posX;
prevPosY = posY;
prevPosZ = posZ;
++age;
if (worldObj.isRemote && (ticksExisted&3) == 1 && entityData.getBoolean(Data.SPARKLING)){
HardcoreEnderExpansion.fx.altarAura(this);
}
ItemStack item = dataWatcher.getWatchableObjectItemStack(10);
if (item != null && item.stackSize <= 0)setDead();
}
@Override
public boolean combineItems(EntityItem item){
return false;
}
@Override
public void onCollideWithPlayer(EntityPlayer player){
if (Math.abs(player.posX-posX) < 0.8001D && Math.abs(player.posZ-posZ) < 0.8001D){
ItemStack is = getEntityItem().copy();
super.onCollideWithPlayer(player);
if (isDead){
if (is.getItem() == ItemList.enhanced_brewing_stand)player.addStat(AchievementManager.ENHANCED_BREWING_STAND, 1);
}
}
}
public void setSparkling(){
entityData.setBoolean(Data.SPARKLING, true);
}
@Override
public void writeEntityToNBT(NBTTagCompound nbt){
super.writeEntityToNBT(nbt);
nbt.setBoolean("sparkling", entityData.getBoolean(Data.SPARKLING));
nbt.setByte("essenceType", essenceType);
}
@Override
public void readEntityFromNBT(NBTTagCompound nbt){
super.readEntityFromNBT(nbt);
if (nbt.getBoolean("sparkling"))entityData.setBoolean(Data.SPARKLING, true);
essenceType = nbt.getByte("essenceType");
}
}
|
#!/bin/bash
: <<'END'
This software was created by United States Government employees at
The Center for the Information Systems Studies and Research (CISR)
at the Naval Postgraduate School NPS. Please note that within the
United States, copyright protection is not available for any works
created by United States Government employees, pursuant to Title 17
United States Code Section 105. This software is in the public
domain and is not subject to copyright.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
END
read -p "This script will reboot the system when done, press enter to continue"
#
# ensure labtainer paths in .bashrc
#
here=`pwd`
export LABTAINER_DIR=$here/trunk
target=~/.bashrc
grep ":./bin:" $target | grep PATH >>/dev/null
result=$?
if [[ result -ne 0 ]];then
cat <<EOT >>$target
if [[ ":\$PATH:" != *":./bin:"* ]]; then
export PATH="\${PATH}:./bin:$here/trunk/scripts/designer/bin"
fi
export LABTAINER_DIR=$here/trunk
EOT
fi
if [ ! -h labtainer-student ]; then ln -s trunk/scripts/labtainer-student; fi
if [ ! -h labtainer-instructor ]; then ln -s trunk/scripts/labtainer-instructor; fi
# add link to update script
full=`realpath trunk/setup_scripts/update-labtainer.sh`
ln -sf $full trunk/scripts/labtainer-student/bin/update-labtainer.sh
cd trunk/setup_scripts
found_distrib=`cat /etc/*-release | grep "^DISTRIB_ID" | awk -F "=" '{print $2}'`
if [[ -z "$1" ]]; then
if [[ -z "$found_distrib" ]]; then
# fedora gotta be different
found_distrib=`cat /etc/*-release | grep "^NAME" | awk -F "=" '{print $2}'`
fi
distrib=$found_distrib
else
distrib=$1
fi
RESULT=0
case "$distrib" in
Ubuntu)
echo is ubuntu
./install-docker-ubuntu.sh
RESULT=$?
;;
Debian|\"Debian*)
echo is debian
./install-docker-debian.sh
RESULT=$?
;;
Fedora)
echo is fedora
./install-docker-fedora.sh
;;
Centos)
echo is centos
./install-docker-centos.sh
RESULT=$?
;;
*)
if [[ -z "$1" ]]; then
echo "Did not recognize distribution: $found_distrib"
echo "Try providing distribution as argument, either Ubuntu|Debian|Fedora|Centos"
else
echo $"Usage: $0 Ubuntu|Debian|Fedora|Centos"
fi
exit 1
esac
if [[ "$RESULT" -eq 0 ]]; then
mkdir -p ../logs
/usr/bin/newgrp docker <<EONG
source ./pull-all.sh
/usr/bin/newgrp $USER
EONG
sudo ./dns-add.py
./getinfo.py
sudo reboot
else
echo "There was a problem with the installation."
fi
|
#!/bin/bash
# SCRIPT: print.sh
# PURPOSE: Generates 2 copies of CV, one with color (cv.pdf) and one without color (cv_print.pdf)
xelatex -interaction=batchmode -jobname=cv_print '\PassOptionsToClass{print}{cv}\input{cv.tex}'
xelatex -interaction=batchmode -jobname=cv '\input{cv.tex}'
|
<filename>src/main/webapp/lib/varmateo/util/CookieHandler.js<gh_stars>0
/**************************************************************************
*
* Copyright (c) 2017 <NAME> All Rights Reserved.
*
**************************************************************************/
"use strict";
/**
*
*/
define(function () {
CookieHandler.prototype._cookieName = null;
/**
* Manages one single cookie.
*/
function CookieHandler (cookieName) {
this._cookieName = cookieName;
}
/**
*
*/
CookieHandler.prototype.get = function () {
return jaaulde.utils.cookies.get(this._cookieName);
}
/**
*
*/
CookieHandler.prototype.set = function ( cookieValue ) {
var now = new Date();
var expiration =
new Date(now.getFullYear(), now.getMonth(), now.getDate());
var cookieOptions = {
expiration : expiration,
};
jaaulde.utils.cookies.set(this._cookieName, cookieValue, cookieOptions);
}
/**
*
*/
CookieHandler.prototype.remove = function () {
jaaulde.utils.cookies.del(this._cookieName);
}
return CookieHandler;
});
|
<reponame>sjcdigital/sjc-edu
package com.sjcdigital.sjcedu.robot.model.pojos.impl;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.sjcdigital.sjcedu.robot.model.entities.MediaAlunoPorTurma;
import com.sjcdigital.sjcedu.robot.model.pojos.Pojo;
public class MediaAlunoPorTurmaPojo implements Pojo {
@JsonProperty("Educação infantil")
private String educacaoInfantil;
@JsonProperty("Anos iniciais")
private String anosIniciais;
@JsonProperty("Anos finais")
private String anosFinais;
@JsonProperty("Ensino médio")
private String ensinoMedio;
public String getEducacaoInfantil() {
return educacaoInfantil;
}
public void setEducacaoInfantil(String educacaoInfantil) {
this.educacaoInfantil = educacaoInfantil;
}
public String getAnosIniciais() {
return anosIniciais;
}
public void setAnosIniciais(String anosIniciais) {
this.anosIniciais = anosIniciais;
}
public String getAnosFinais() {
return anosFinais;
}
public void setAnosFinais(String anosFinais) {
this.anosFinais = anosFinais;
}
public String getEnsinoMedio() {
return ensinoMedio;
}
public void setEnsinoMedio(String ensinoMedio) {
this.ensinoMedio = ensinoMedio;
}
@Override
public MediaAlunoPorTurma paraEntidade() {
return new MediaAlunoPorTurma(educacaoInfantil, anosIniciais, anosFinais, ensinoMedio);
}
}
|
require 'fileutils'
require 'rhc/config'
include RHCHelper
When /^the (.+) cartridge is added$/ do |name|
@app.add_cartridge name
end
When /^the (.+) cartridge is removed$/ do |name|
@app.remove_cartridge name
end
When /^the (.+) cartridge is (stopped|(?:re)?started)$/ do |name,command|
cmd = case command.to_sym
when :stopped
'stop'
when :started
'start'
when :restarted
'restart'
else
raise "Unrecognized command type #{status}"
end
@app.cartridge(name).send(cmd)
end
Then /^the (.+) cartridge should be (.*)$/ do |name,status|
expected = case status.to_sym
when :running
"(.+) is running|Uptime:"
when :stopped
"(.+) stopped"
when :removed
"Cartridge '#{name}' cannot be found in application"
else
raise "Unrecognized status type #{status}"
end
@app.cartridge(name).status.should match(expected)
end
Then /^adding the (.+) cartridge should fail$/ do |name|
@app.add_cartridge(name).should == 154
end
When /^we are updating the (.+) cartridge$/ do |cart|
@cartridge_name = cart
end
When /^the (\w+) scaling value is set to (.*)$/ do |minmax,value|
@exitcode = @app.cartridge(@cartridge_name).send(:scale,"--#{minmax} #{value}")
end
When /^we list cartridges$/ do
@exitcode, @cartridge_output = Cartridge.list
end
When /^we (.+) storage for the (.+) cartridge$/ do |storage_action,cartridge|
@output = @app.cartridge(@cartridge_name).send(:storage, cartridge, "--#{storage_action}")
end
Then /^the (\w+) scaling value should be (.*)$/ do |minmax,value|
expected = {
:min => "minimum",
:max => "maximum"
}[minmax.to_sym]
value = (value == "-1" ? "available" : value)
match_string = [expected,value].join(": ")
regex = Regexp.new(/\b#{match_string}/)
@app.cartridge(@cartridge_name).send(:show).should match(regex)
end
Then /^the additional cartridge storage amount should be (\w+)$/ do |value|
@output.should == value
end
Then /^it should fail with code (\d+)$/ do |code|
@exitcode.should == code.to_i
end
Then /^the list should contain the cartridge ([^\s]+) with display name "([^"]+)"$/ do |name, display_name|
line = @cartridge_output.each_line.find{ |s| s.include?(name) }
line.should_not be_nil
line.should match(display_name)
end
|
void manipulateVector(std::vector<int>& b, int x, int y) {
if (x - 1 == y) {
return;
}
if (x < y) {
int element = b[x];
b.erase(b.begin() + x);
if (y + 1 <= b.size()) {
b.insert(b.begin() + y + 1, element);
} else {
b.push_back(element);
}
}
} |
<gh_stars>0
import { Injectable } from '@angular/core';
import { HttpClient } from '@angular/common/http';
import { ActionResult } from '../models/action-result-model';
import { ConfigService } from './config.service';
const CONTROLLER = 'WF_FORMU'
@Injectable({
providedIn: 'root'
})
export class FormService {
constructor(private http:HttpClient, private configService:ConfigService) { }
getwfFormuList(){
return this.http.get<ActionResult<any[]>>(`${this.configService.config.apiRwfEditrUrl}/${CONTROLLER}?`);
}
}
|
<filename>src/main/java/org/terracottamc/util/BedrockResourceDataReader.java
package org.terracottamc.util;
import com.google.common.io.ByteStreams;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonParser;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import org.terracottamc.network.packet.Protocol;
import org.terracottamc.taglib.NBTBuilder;
import org.terracottamc.taglib.nbt.io.NBTReader;
import org.terracottamc.taglib.nbt.tag.NBTTagCompound;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.zip.GZIPInputStream;
/**
* Copyright (c) 2021, TerracottaMC
* All rights reserved.
*
* <p>
* This project is licensed under the BSD 3-Clause License which
* can be found in the root directory of this source tree
*
* @author Kaooot
* @version 1.0
*/
public class BedrockResourceDataReader {
private static final Map<Integer, String> protocolMinecraftVersions = new HashMap<>();
private static final Map<Integer, List<Map<String, Object>>> itemPalettes = new HashMap<>();
private static final Map<Integer, List<Map<String, Object>>> creativeItems = new HashMap<>();
private static final Map<Integer, Map<String, Integer>> itemNameRuntimeIds = new HashMap<>();
private static final Map<Integer, byte[]> entityIdentifiersData = new HashMap<>();
private static final Map<Integer, byte[]> biomeDefinitionsData = new HashMap<>();
private static final Map<Integer, Map<Integer, NBTTagCompound>> blockPalettes = new HashMap<>();
/**
* Initializes this {@link org.terracottamc.util.BedrockResourceDataReader}
*/
public static void initialize() {
BedrockResourceDataReader.protocolMinecraftVersions.put(Protocol.CURRENT_PROTOCOL, Protocol.MINECRAFT_VERSION);
BedrockResourceDataReader.protocolMinecraftVersions.put(Protocol.PROTOCOL_v1_17_0, Protocol.MINECRAFT_VERSION_v1_17_0);
final Gson gson = new GsonBuilder().setPrettyPrinting().create();
final File bedrockFile = new File("src/main/resources/bedrock/");
final File itemPaletteFolder = new File(bedrockFile.getPath() + "/item_palette/");
final File creativeItemsFolder = new File(bedrockFile.getPath() + "/creative_items/");
final File entityIdentifiersFolder = new File(bedrockFile.getPath() + "/entity_identifiers/");
final File biomeDefinitionsFolder = new File(bedrockFile.getPath() + "/biome_definitions/");
final File blockPaletteFolder = new File(bedrockFile.getPath() + "/block_palette/");
// ItemPalette
if (itemPaletteFolder.isDirectory()) {
for (final File file : Objects.requireNonNull(itemPaletteFolder.listFiles())) {
try {
final String minecraftVersion = file.getName().split("\\.")[1]
.replaceAll("_", ".");
final int protocolVersion = BedrockResourceDataReader
.retrieveProtocolVersionByMinecraftVersion(minecraftVersion);
if (protocolVersion != -1) {
final JsonElement elementToParse = new JsonParser()
.parse(new InputStreamReader(new FileInputStream(file)));
BedrockResourceDataReader.itemPalettes.put(protocolVersion,
gson.fromJson(elementToParse, List.class));
}
} catch (final FileNotFoundException e) {
e.printStackTrace();
}
}
}
final Set<Map.Entry<Integer, List<Map<String, Object>>>> itemPaletteEntries =
BedrockResourceDataReader.itemPalettes.entrySet();
for (final Map.Entry<Integer, List<Map<String, Object>>> itemPaletteEntry : itemPaletteEntries) {
final int protocolVersion = itemPaletteEntry.getKey();
final List<Map<String, Object>> itemPalette = itemPaletteEntry.getValue();
final Map<String, Integer> itemNameRuntimeIdMap = new HashMap<>();
for (final Map<String, Object> item : itemPalette) {
itemNameRuntimeIdMap.put((String) item.get("name"), (int) ((double) item.get("id")));
}
BedrockResourceDataReader.itemNameRuntimeIds.put(protocolVersion, itemNameRuntimeIdMap);
}
// CreativeItems
if (creativeItemsFolder.isDirectory()) {
for (final File file : Objects.requireNonNull(creativeItemsFolder.listFiles())) {
final String minecraftVersion = file.getName().split("\\.")[1]
.replaceAll("_", ".");
final int protocolVersion = BedrockResourceDataReader
.retrieveProtocolVersionByMinecraftVersion(minecraftVersion);
if (protocolVersion != -1) {
try {
final JsonArray arrayToParse = new JsonParser()
.parse(new InputStreamReader(new FileInputStream(file))).getAsJsonObject()
.getAsJsonArray("items");
BedrockResourceDataReader.creativeItems.put(protocolVersion,
gson.fromJson(arrayToParse, List.class));
} catch (final FileNotFoundException e) {
e.printStackTrace();
}
}
}
}
// EntityIdentifiers
if (entityIdentifiersFolder.isDirectory()) {
for (final File file : Objects.requireNonNull(entityIdentifiersFolder.listFiles())) {
final String minecraftVersion = file.getName().split("\\.")[1]
.replaceAll("_", ".");
final int protocolVersion = BedrockResourceDataReader
.retrieveProtocolVersionByMinecraftVersion(minecraftVersion);
if (protocolVersion != -1) {
try {
BedrockResourceDataReader.entityIdentifiersData.put(protocolVersion,
ByteStreams.toByteArray(new FileInputStream(file)));
} catch (final IOException e) {
e.printStackTrace();
}
}
}
}
// BiomeDefinitions
if (biomeDefinitionsFolder.isDirectory()) {
for (final File file : Objects.requireNonNull(biomeDefinitionsFolder.listFiles())) {
final String minecraftVersion = file.getName().split("\\.")[1]
.replaceAll("_", ".");
final int protocolVersion = BedrockResourceDataReader
.retrieveProtocolVersionByMinecraftVersion(minecraftVersion);
if (protocolVersion != -1) {
try {
BedrockResourceDataReader.biomeDefinitionsData.put(protocolVersion,
ByteStreams.toByteArray(new FileInputStream(file)));
} catch (final IOException e) {
e.printStackTrace();
}
}
}
}
// BlockPalette
if (blockPaletteFolder.isDirectory()) {
for (final File file : Objects.requireNonNull(blockPaletteFolder.listFiles())) {
final String minecraftVersion = file.getName().split("\\.")[1]
.replaceAll("_", ".");
final int protocolVersion = BedrockResourceDataReader
.retrieveProtocolVersionByMinecraftVersion(minecraftVersion);
if (protocolVersion != -1) {
try (final FileInputStream fileInputStream = new FileInputStream(file)) {
final GZIPInputStream gzipInputStream = new GZIPInputStream(new DataInputStream(fileInputStream));
final byte[] blockPaletteData = ByteStreams.toByteArray(gzipInputStream);
if (blockPaletteData.length > 0) {
final ByteBuf buffer = PooledByteBufAllocator.DEFAULT.directBuffer();
buffer.writeBytes(blockPaletteData);
final NBTReader nbtReader = new NBTBuilder()
.withIOBuffer(buffer)
.withByteOrder(ByteOrder.BIG_ENDIAN)
.buildReader();
final List<NBTTagCompound> nbtTagCompounds = (List<NBTTagCompound>)
nbtReader.createCompound().getList("blocks");
int blockRuntimeId = 0;
final Map<Integer, NBTTagCompound> nbtDataMap = new HashMap<>();
for (final NBTTagCompound nbtTagCompound : nbtTagCompounds) {
blockRuntimeId++;
nbtDataMap.put(blockRuntimeId, nbtTagCompound);
}
BedrockResourceDataReader.blockPalettes.put(protocolVersion, nbtDataMap);
}
gzipInputStream.close();
} catch (final IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* Retrieves the correct item palette for the given protocol version of the player
*
* @param protocolVersion which is used to retrieve the item palette and to support multiple protocol functionality
*
* @return a fresh {@link java.util.List}
*/
public static List<Map<String, Object>> retrieveItemPaletteByProtocolVersion(final int protocolVersion) {
final Set<Map.Entry<Integer, List<Map<String, Object>>>> itemPaletteEntries =
BedrockResourceDataReader.itemPalettes.entrySet();
for (final Map.Entry<Integer, List<Map<String, Object>>> itemPaletteEntry : itemPaletteEntries) {
if (itemPaletteEntry.getKey() == protocolVersion) {
return itemPaletteEntry.getValue();
}
}
return null;
}
/**
* Retrieves the item name and item runtime identifier mapping for the given protocol version of the player
*
* @param protocolVersion which is used to retrieve the mapping and to support multiple protocol functionality
*
* @return a fresh {@link java.util.Map}
*/
public static Map<String, Integer> retrieveItemNameRuntimeIdsByProtocolVersion(final int protocolVersion) {
final Set<Map.Entry<Integer, Map<String, Integer>>> itemNameRuntimeIdsEntries =
BedrockResourceDataReader.itemNameRuntimeIds.entrySet();
for (final Map.Entry<Integer, Map<String, Integer>> itemNameRuntimeIdsEntry : itemNameRuntimeIdsEntries) {
if (itemNameRuntimeIdsEntry.getKey() == protocolVersion) {
return itemNameRuntimeIdsEntry.getValue();
}
}
return null;
}
/**
* Retrieves the correct creative items for the given protocol version of the player
*
* @param protocolVersion which is used to retrieve the creative items and to support multiple protocol functionality
*
* @return a fresh {@link java.util.List}
*/
public static List<Map<String, Object>> retrieveCreativeItemsByProtocolVersion(final int protocolVersion) {
final Set<Map.Entry<Integer, List<Map<String, Object>>>> creativeItemsEntries =
BedrockResourceDataReader.creativeItems.entrySet();
for (final Map.Entry<Integer, List<Map<String, Object>>> creativeItemsEntry : creativeItemsEntries) {
if (creativeItemsEntry.getKey() == protocolVersion) {
return creativeItemsEntry.getValue();
}
}
return null;
}
/**
* Retrieves the entity identifiers data for the given protocol version of the player
*
* @param protocolVersion which is used to retrieve the entity identifiers data
* and to support multiple protocol functionality
*
* @return fresh entity identifiers data
*/
public static byte[] retrieveEntityIdentifiersDataByProtocolVersion(final int protocolVersion) {
final Set<Map.Entry<Integer, byte[]>> entityIdentifiersDataEntries =
BedrockResourceDataReader.entityIdentifiersData.entrySet();
for (final Map.Entry<Integer, byte[]> entityIdentifiersDataEntry : entityIdentifiersDataEntries) {
if (entityIdentifiersDataEntry.getKey() == protocolVersion) {
return entityIdentifiersDataEntry.getValue();
}
}
return null;
}
/**
* Retrieves the biome definition data for the given protocol version of the player
*
* @param protocolVersion which is used to retrieve the biome definition data
* and to support multiple protocol functionality
*
* @return fresh biome definition data
*/
public static byte[] retrieveBiomeDefinitionsDataByProtocolVersion(final int protocolVersion) {
final Set<Map.Entry<Integer, byte[]>> biomeDefinitionsDataEntries =
BedrockResourceDataReader.biomeDefinitionsData.entrySet();
for (final Map.Entry<Integer, byte[]> biomeDefinitionsDataEntry : biomeDefinitionsDataEntries) {
if (biomeDefinitionsDataEntry.getKey() == protocolVersion) {
return biomeDefinitionsDataEntry.getValue();
}
}
return null;
}
/**
* Retrieves the {@link org.terracottamc.taglib.nbt.tag.NBTTagCompound} of by the given block runtime identifier
* for the given protocol version of the player
*
* @param protocolVersion the protocol version used to work with the block palette for its version
* @param blockRuntimeId which is used to retrieve the correct {@link org.terracottamc.taglib.nbt.tag.NBTTagCompound}
*
* @return a fresh {@link org.terracottamc.taglib.nbt.tag.NBTTagCompound}
*/
public static NBTTagCompound retrieveBlockNBTByBlockRuntimeId(final int protocolVersion, final int blockRuntimeId) {
final Set<Map.Entry<Integer, NBTTagCompound>> blockNBTRuntimeIdEntries =
BedrockResourceDataReader.blockPalettes.get(protocolVersion).entrySet();
for (final Map.Entry<Integer, NBTTagCompound> blockNBTRuntimeIdEntry : blockNBTRuntimeIdEntries) {
if (blockNBTRuntimeIdEntry.getKey() == blockRuntimeId) {
return blockNBTRuntimeIdEntry.getValue();
}
}
return null;
}
/**
* Returns the block runtime identifier by its block identifier and
* the given {@link org.terracottamc.taglib.nbt.tag.NBTTagCompound}
*
* @param protocolVersion the protocol version used to work with the block palette for its version
* @param blockIdentifier which is needed to retrieve the block runtime id
* @param blockStatesTag which are needed to retrieve the block runtime id
*
* @return a fresh block runtime id
*/
public static int retrieveBlockRuntimeIdByBlockIdentifier(final int protocolVersion, final String blockIdentifier,
final NBTTagCompound blockStatesTag) {
final Set<Map.Entry<Integer, NBTTagCompound>> blockNBTRuntimeIdEntries =
BedrockResourceDataReader.blockPalettes.get(protocolVersion).entrySet();
for (final Map.Entry<Integer, NBTTagCompound> blockNBTRuntimeIdEntry : blockNBTRuntimeIdEntries) {
final NBTTagCompound blockNBTTag = blockNBTRuntimeIdEntry.getValue();
if (blockNBTTag.getString("name").equalsIgnoreCase(blockIdentifier) &&
blockNBTTag.getChildTag("states").equals(blockStatesTag)) {
return blockNBTRuntimeIdEntry.getKey();
}
}
return -1;
}
/**
* Retrieves the protocol version from its minecraft version as a {@link java.lang.String}
*
* @param minecraftVersion that is needed to retrieve the protocol version from it
*
* @return a fresh {@link java.lang.Integer} as protocol version
*/
private static Integer retrieveProtocolVersionByMinecraftVersion(final String minecraftVersion) {
for (final Map.Entry<Integer, String> versionEntry : BedrockResourceDataReader.protocolMinecraftVersions.entrySet()) {
if (versionEntry.getValue().equalsIgnoreCase(minecraftVersion)) {
return versionEntry.getKey();
}
}
return -1;
}
} |
// Scan direction is parallel to the sweepline which moves in the perpendicular direction;
// i.e. scan direction is "sideways" along the sweepline. We do several passes, following Clarkson et al.,
// "Rectilinear shortest paths through polygonal obstacles in O(n (log n)2) time" (checked into the enlistment).
// 1. Enumerate all obstacles and load their extreme vertex coordinate projections to the perpendicular axis.
// 2. Run a scanline (in each direction) that:
// a. Accumulates the vertices and generates obstacle-related Steiner points.
// b. Generates the ScanSegments.
// 3. Iterate in parallel along the ScanSegments and *VertexPoints to determine the sparse intersections
// by binary division, as in the paper.
// 4. Finally we create the VisibilityVertices and VisibilityEdges along each ScanSegment from its
// list of intersections.
// Differences from the paper largely are due to the paper's creation of non-orthogonal edges along
// obstacle sides; instead, we create orthogonal edges to the lateral sides of the obstacle's bounding
// box. Also, we support overlapped obstacles (interior edges are weighted, as in the non-sparse
import {Direction} from '../../math/geometry/direction'
import {Point} from '../../math/geometry/point'
import {Rectangle} from '../../math/geometry/rectangle'
import {RBNode} from '../../structs/RBTree/rbNode'
// import {Assert} from '../../utils/assert'
import {comparePointsYFirst} from '../../utils/compare'
import {PointSet} from '../../utils/PointSet'
import {SweepEvent} from '../spline/coneSpanner/SweepEvent'
import {AxisCoordinateEvent} from './AxisCoordinateEvent'
import {BasicObstacleSide, LowObstacleSide} from './BasicObstacleSide'
import {BasicReflectionEvent} from './basicReflectionEvent'
import {BasicVertexEvent} from './BasicVertexEvent'
import {Obstacle} from './obstacle'
import {OpenVertexEvent} from './OpenVertexEvent'
import {PointComparer} from './PointComparer'
import {ScanDirection} from './ScanDirection'
import {ScanSegment} from './ScanSegment'
import {ScanSegmentTree} from './ScanSegmentTree'
import {ScanSegmentVector} from './ScanSegmentVector'
import {ScanSegmentVectorItem} from './ScanSegmentVectorItem'
import {StaticGraphUtility} from './StaticGraphUtility'
import {VisibilityGraphGenerator} from './VisibilityGraphGenerator'
// implementation) and groups.
export class SparseVisibilityGraphGenerator extends VisibilityGraphGenerator {
/// The points of obstacle vertices encountered on horizontal scan.
private horizontalVertexPoints = new PointSet()
/// The points of obstacle vertices encountered on vertical scan.
private verticalVertexPoints: PointSet = new PointSet()
/// The Steiner points generated at the bounding box of obstacles.
/// These help ensure that we can "go around" the obstacle, as with the non-orthogonal edges in the paper.
private boundingBoxSteinerPoints: PointSet = new PointSet()
/// Accumulates distinct vertex projections to the X axis during sweep.
private xCoordAccumulator: Set<number> = new Set<number>()
/// Accumulates distinct vertex projections to the Y axis during sweep.
private yCoordAccumulator: Set<number> = new Set<number>()
/// ScanSegment vector locations on the Y axis; final array after sweep.
private horizontalScanSegmentVector: ScanSegmentVector
/// ScanSegment vector locations on the X axis; final array after sweep.
private verticalScanSegmentVector: ScanSegmentVector
/// The index from a coordinate to a horizontal vector slot.
private horizontalCoordMap: Map<number, number> = new Map<number, number>()
/// The index from a point to a vertical vector slot.
private verticalCoordMap: Map<number, number> = new Map<number, number>()
/// The index from a coordinate to a vector slot on the axis we are intersecting to.
private perpendicularCoordMap: Map<number, number>
/// The segment vector we are intersecting along.
private parallelSegmentVector: ScanSegmentVector
/// The segment vector we are intersecting to.
private perpendicularSegmentVector: ScanSegmentVector
/// The comparer for points along the horizontal or vertical axis.
currentAxisPointComparer: (a: Point, b: Point) => number
constructor() {
super(/* wantReflections:*/ false)
}
Clear() {
super.Clear()
this.Cleanup()
}
private Cleanup() {
this.horizontalVertexPoints.clear()
this.verticalVertexPoints.clear()
this.boundingBoxSteinerPoints.clear()
this.xCoordAccumulator.clear()
this.yCoordAccumulator.clear()
this.horizontalCoordMap.clear()
this.verticalCoordMap.clear()
}
/// Generate the visibility graph along which edges will be routed.
/// <returns></returns>
GenerateVisibilityGraph() {
this.AccumulateVertexCoords()
this.CreateSegmentVectorsAndPopulateCoordinateMaps()
this.RunScanLineToCreateSegmentsAndBoundingBoxSteinerPoints()
this.GenerateSparseIntersectionsFromVertexPoints()
this.CreateScanSegmentTrees()
this.Cleanup()
}
AccumulateVertexCoords() {
// Unlike the paper we only generate lines for extreme vertices (i.e. on the horizontal pass we
// don't generate a horizontal vertex projection to the Y axis for a vertex that is not on the top
// or bottom of the obstacle). So we can just use the bounding box.
for (const obstacle of this.ObstacleTree.GetAllObstacles()) {
this.xCoordAccumulator.add(obstacle.VisibilityBoundingBox.left)
this.xCoordAccumulator.add(obstacle.VisibilityBoundingBox.right)
this.yCoordAccumulator.add(obstacle.VisibilityBoundingBox.top)
this.yCoordAccumulator.add(obstacle.VisibilityBoundingBox.bottom)
}
}
private CreateSegmentVectorsAndPopulateCoordinateMaps() {
this.horizontalScanSegmentVector = new ScanSegmentVector(
this.yCoordAccumulator,
true,
)
this.verticalScanSegmentVector = new ScanSegmentVector(
this.xCoordAccumulator,
false,
)
for (let slot = 0; slot < this.horizontalScanSegmentVector.Length; slot++) {
this.horizontalCoordMap.set(
this.horizontalScanSegmentVector.Item(slot).Coord,
slot,
)
}
for (let slot = 0; slot < this.verticalScanSegmentVector.Length; slot++) {
this.verticalCoordMap.set(
this.verticalScanSegmentVector.Item(slot).Coord,
slot,
)
}
}
private RunScanLineToCreateSegmentsAndBoundingBoxSteinerPoints() {
// Do a scanline pass to create scan segments that span the entire height/width of the graph
// (mixing overlapped with free segments as needed) and generate the type-2 Steiner points.
super.GenerateVisibilityGraph()
this.horizontalScanSegmentVector.ScanSegmentsComplete()
this.verticalScanSegmentVector.ScanSegmentsComplete()
this.xCoordAccumulator.clear()
this.yCoordAccumulator.clear()
}
InitializeEventQueue(scanDir: ScanDirection) {
super.InitializeEventQueue(scanDir)
this.SetVectorsAndCoordMaps(scanDir)
this.AddAxisCoordinateEvents(scanDir)
}
private AddAxisCoordinateEvents(scanDir: ScanDirection) {
// Normal event ordering will apply - and will thus order the ScanSegments created in the vectors.
if (scanDir.IsHorizontal) {
for (const coord of this.yCoordAccumulator) {
this.eventQueue.Enqueue(
new AxisCoordinateEvent(
new Point(
this.ObstacleTree.GraphBox.left -
SparseVisibilityGraphGenerator.SentinelOffset,
coord,
),
),
)
}
return
}
for (const coord of this.xCoordAccumulator) {
this.eventQueue.Enqueue(
new AxisCoordinateEvent(
new Point(
coord,
this.ObstacleTree.GraphBox.bottom -
SparseVisibilityGraphGenerator.SentinelOffset,
),
),
)
}
}
ProcessCustomEvent(evt: SweepEvent) {
if (!this.ProcessAxisCoordinate(evt)) {
this.ProcessCustomEvent(evt)
}
}
private ProcessAxisCoordinate(evt: SweepEvent): boolean {
const axisEvent = <AxisCoordinateEvent>evt
if (null != axisEvent) {
this.CreateScanSegmentsOnAxisCoordinate(axisEvent.Site)
return true
}
return false
}
InsertPerpendicularReflectionSegment(start: Point, end: Point): boolean {
/*Assert.assert(
false,
'base.wantReflections is false in Sparse mode so this should never be called',
)*/
// ReSharper disable HeuristicUnreachableCode
return false
// ReSharper restore HeuristicUnreachableCode
}
InsertParallelReflectionSegment(
start: Point,
end: Point,
eventObstacle: Obstacle,
lowNborSide: BasicObstacleSide,
highNborSide: BasicObstacleSide,
action: BasicReflectionEvent,
): boolean {
/*Assert.assert(
false,
'base.wantReflections is false in Sparse mode so this should never be called',
)*/
// ReSharper disable HeuristicUnreachableCode
return false
// ReSharper restore HeuristicUnreachableCode
}
protected ProcessVertexEvent(
lowSideNode: RBNode<BasicObstacleSide>,
highSideNode: RBNode<BasicObstacleSide>,
vertexEvent: BasicVertexEvent,
) {
const vertexPoints = this.ScanDirection.IsHorizontal
? this.horizontalVertexPoints
: this.verticalVertexPoints
vertexPoints.add(vertexEvent.Site)
// For easier reading...
const lowNborSide = this.LowNeighborSides.LowNeighbor.item
const highNborSide = this.HighNeighborSides.HighNeighbor.item
const highDir = this.ScanDirection.Dir
const lowDir = this.ScanDirection.OppositeDirection
// Generate the neighbor side intersections, regardless of overlaps; these are the type-2 Steiner points.
const lowSteiner = this.ScanLineIntersectSide(vertexEvent.Site, lowNborSide)
const highSteiner = this.ScanLineIntersectSide(
vertexEvent.Site,
highNborSide,
)
// Add the intersections at the neighbor bounding boxes if the intersection is not at a sentinel.
// Go in the opposite direction from the neighbor intersection to find the border between the Steiner
// point and vertexEvent.Site (unless vertexEvent.Site is inside the bounding box).
if (this.ObstacleTree.GraphBox.contains(lowSteiner)) {
const bboxIntersectBeforeLowSteiner =
StaticGraphUtility.RectangleBorderIntersect(
lowNborSide.Obstacle.VisibilityBoundingBox,
lowSteiner,
highDir,
)
if (
PointComparer.IsPureLower(
bboxIntersectBeforeLowSteiner,
vertexEvent.Site,
)
) {
this.boundingBoxSteinerPoints.add(bboxIntersectBeforeLowSteiner)
}
}
if (this.ObstacleTree.GraphBox.contains(highSteiner)) {
const bboxIntersectBeforeHighSteiner =
StaticGraphUtility.RectangleBorderIntersect(
highNborSide.Obstacle.VisibilityBoundingBox,
highSteiner,
lowDir,
)
if (
PointComparer.IsPureLower(
vertexEvent.Site,
bboxIntersectBeforeHighSteiner,
)
) {
this.boundingBoxSteinerPoints.add(bboxIntersectBeforeHighSteiner)
}
}
// Add the corners of the bounding box of the vertex obstacle, if they are visible to the event site.
// This ensures that we "go around" the obstacle, as with the non-orthogonal edges in the paper.
const t = {lowCorner: <Point>undefined, highCorner: <Point>undefined}
SparseVisibilityGraphGenerator.GetBoundingCorners(
lowSideNode.item.Obstacle.VisibilityBoundingBox,
vertexEvent instanceof OpenVertexEvent,
this.ScanDirection.IsHorizontal,
t,
)
if (
PointComparer.IsPureLower(lowSteiner, t.lowCorner) ||
lowNborSide.Obstacle.IsInSameClump(vertexEvent.Obstacle)
) {
vertexPoints.add(t.lowCorner)
}
if (
PointComparer.IsPureLower(t.highCorner, highSteiner) ||
highNborSide.Obstacle.IsInSameClump(vertexEvent.Obstacle)
) {
vertexPoints.add(t.highCorner)
}
}
private static GetBoundingCorners(
boundingBox: Rectangle,
isLowSide: boolean,
isHorizontal: boolean,
t: {lowCorner: Point; /* out */ highCorner: Point},
) {
if (isLowSide) {
t.lowCorner = boundingBox.leftBottom
t.highCorner = isHorizontal
? boundingBox.rightBottom
: boundingBox.leftTop
return
}
t.lowCorner = isHorizontal ? boundingBox.leftTop : boundingBox.rightBottom
t.highCorner = boundingBox.rightTop
}
private CreateScanSegmentsOnAxisCoordinate(site: Point) {
this.CurrentGroupBoundaryCrossingMap.Clear()
// Iterate the ScanLine and create ScanSegments. There will always be at least the two sentinel sides.
const sideNode = this.scanLine.Lowest()
let nextNode = this.scanLine.NextHighR(sideNode)
let overlapDepth = 0
let start = site
let isInsideOverlappedObstacle = false
for (; null != nextNode; nextNode = this.scanLine.NextHighR(nextNode)) {
if (this.SkipSide(start, nextNode.item)) {
continue
}
if (nextNode.item.Obstacle.IsGroup) {
// Do not create internal group crossings in non-overlapped obstacles.
if (overlapDepth == 0 || isInsideOverlappedObstacle) {
this.HandleGroupCrossing(site, nextNode.item)
}
continue
}
const isLowSide = nextNode.item instanceof LowObstacleSide
if (isLowSide) {
if (overlapDepth > 0) {
overlapDepth++
continue
}
// We are not overlapped, so create a ScanSegment from the previous side intersection to the
// intersection with the side in nextNode.Item.
start = this.CreateScanSegment(
start,
nextNode.item,
ScanSegment.NormalWeight,
)
this.CurrentGroupBoundaryCrossingMap.Clear()
overlapDepth = 1
isInsideOverlappedObstacle = nextNode.item.Obstacle.isOverlapped
continue
}
// This is a HighObstacleSide. If we've got overlap nesting, decrement the depth.
/*Assert.assert(overlapDepth > 0, 'Overlap depth must be positive')*/
overlapDepth++
if (overlapDepth > 0) {
continue
}
// If we are not within an overlapped obstacle, don't bother creating the overlapped ScanSegment
// as there will never be visibility connecting to it.
start =
nextNode.item.Obstacle.isOverlapped ||
nextNode.item.Obstacle.OverlapsGroupCorner
? this.CreateScanSegment(
start,
nextNode.item,
ScanSegment.OverlappedWeight,
)
: this.ScanLineIntersectSide(start, nextNode.item)
this.CurrentGroupBoundaryCrossingMap.Clear()
isInsideOverlappedObstacle = false
}
// The final piece.
const end = this.ScanDirection.IsHorizontal
? new Point(
this.ObstacleTree.GraphBox.right +
SparseVisibilityGraphGenerator.SentinelOffset,
start.y,
)
: new Point(
start.x,
this.ObstacleTree.GraphBox.top +
SparseVisibilityGraphGenerator.SentinelOffset,
)
this.parallelSegmentVector.CreateScanSegment(
start,
end,
ScanSegment.NormalWeight,
this.CurrentGroupBoundaryCrossingMap.GetOrderedListBetween(start, end),
)
this.parallelSegmentVector.ScanSegmentsCompleteForCurrentSlot()
}
private HandleGroupCrossing(site: Point, groupSide: BasicObstacleSide) {
if (!this.ScanLineCrossesObstacle(site, groupSide.Obstacle)) {
return
}
// Here we are always going left-to-right. As in base.SkipToNeighbor, we don't stop traversal for groups,
// neither do we create overlapped edges (unless we're inside a non-group obstacle). Instead we turn
// the boundary crossing on or off based on group membership at ShortestPath-time. Even though this is
// the sparse VG, we always create these edges at group boundaries so we don't skip over them.
const dirToInsideOfGroup: Direction =
groupSide instanceof LowObstacleSide
? this.ScanDirection.Dir
: this.ScanDirection.OppositeDirection
const intersect = this.ScanLineIntersectSide(site, groupSide)
const crossing = this.CurrentGroupBoundaryCrossingMap.AddIntersection(
intersect,
groupSide.Obstacle,
dirToInsideOfGroup,
)
// The vertex crossing the edge is perpendicular to the group boundary. A rectilinear group will also have
// an edge parallel to that group boundary that includes the point of that crossing vertex; therefore we must
// split that non-crossing edge at that vertex.
this.AddPerpendicularCoordForGroupCrossing(intersect)
// Similarly, the crossing edge's opposite vertex may be on a perpendicular segment.
const interiorPoint = crossing.GetInteriorVertexPoint(intersect)
this.AddPerpendicularCoordForGroupCrossing(interiorPoint)
}
private AddPerpendicularCoordForGroupCrossing(intersect: Point) {
const nonCrossingPerpSlot = this.FindPerpendicularSlot(intersect, 0)
if (-1 != nonCrossingPerpSlot) {
this.perpendicularSegmentVector
.Item(nonCrossingPerpSlot)
.AddPendingPerpendicularCoord(
this.parallelSegmentVector.CurrentSlot.Coord,
)
}
}
private SkipSide(start: Point, side: BasicObstacleSide): boolean {
if (side.Obstacle.IsSentinel) {
return true
}
// Skip sides of obstacles that we do not actually pass through.
const bbox = side.Obstacle.VisibilityBoundingBox
if (this.ScanDirection.IsHorizontal) {
return start.y == bbox.bottom || start.y == bbox.top
}
return start.x == bbox.left || start.x == bbox.right
}
private CreateScanSegment(
start: Point,
side: BasicObstacleSide,
weight: number,
): Point {
const end = this.ScanLineIntersectSide(start, side)
if (start != end) {
this.parallelSegmentVector.CreateScanSegment(
start,
end,
weight,
this.CurrentGroupBoundaryCrossingMap.GetOrderedListBetween(start, end),
)
}
return end
}
private GenerateSparseIntersectionsFromVertexPoints() {
this.VisibilityGraph = SparseVisibilityGraphGenerator.NewVisibilityGraph()
// Generate the sparse intersections between ScanSegments based upon the ordered vertexPoints.
this.GenerateSparseIntersectionsAlongHorizontalAxis()
this.GenerateSparseIntersectionsAlongVerticalAxis()
this.ConnectAdjoiningScanSegments()
// Now each segment has the coordinates all of its intersections, so create the visibility graph.
this.horizontalScanSegmentVector.CreateSparseVerticesAndEdges(
this.VisibilityGraph,
)
this.verticalScanSegmentVector.CreateSparseVerticesAndEdges(
this.VisibilityGraph,
)
}
private GenerateSparseIntersectionsAlongHorizontalAxis() {
this.currentAxisPointComparer = comparePointsYFirst
const vertexPoints = Array.from(this.horizontalVertexPoints.values()).sort(
this.currentAxisPointComparer,
)
const bboxSteinerPoints = Array.from(
this.boundingBoxSteinerPoints.values(),
).sort(this.currentAxisPointComparer)
this.ScanDirection = ScanDirection.HorizontalInstance
this.SetVectorsAndCoordMaps(this.ScanDirection)
this.GenerateSparseIntersections(vertexPoints, bboxSteinerPoints)
}
private GenerateSparseIntersectionsAlongVerticalAxis() {
this.currentAxisPointComparer = (a, b) => a.compareTo(b)
const vertexPoints = Array.from(this.verticalVertexPoints.values()).sort(
this.currentAxisPointComparer,
)
const bboxSteinerPoints = Array.from(
this.boundingBoxSteinerPoints.values(),
).sort(this.currentAxisPointComparer)
this.ScanDirection = ScanDirection.VerticalInstance
this.SetVectorsAndCoordMaps(this.ScanDirection)
this.GenerateSparseIntersections(vertexPoints, bboxSteinerPoints)
}
private SetVectorsAndCoordMaps(scanDir: ScanDirection) {
if (scanDir.IsHorizontal) {
this.parallelSegmentVector = this.horizontalScanSegmentVector
this.perpendicularSegmentVector = this.verticalScanSegmentVector
this.perpendicularCoordMap = this.verticalCoordMap
} else {
this.parallelSegmentVector = this.verticalScanSegmentVector
this.perpendicularSegmentVector = this.horizontalScanSegmentVector
this.perpendicularCoordMap = this.horizontalCoordMap
}
}
private ConnectAdjoiningScanSegments() {
// Ensure there is a vertex at the end/start point of two ScanSegments; these will always differ in overlappedness.
this.horizontalScanSegmentVector.ConnectAdjoiningSegmentEndpoints()
this.verticalScanSegmentVector.ConnectAdjoiningSegmentEndpoints()
}
private GenerateSparseIntersections(
vertexPoints: Array<Point>,
bboxSteinerPoints: Array<Point>,
) {
this.perpendicularSegmentVector.ResetForIntersections()
this.parallelSegmentVector.ResetForIntersections()
// Position the enumerations to the first point.
let i = 1
const steinerPointsCounter = {j: 0}
for (const item of this.parallelSegmentVector.Items()) {
for (;;) {
if (!item.CurrentSegment.ContainsPoint(vertexPoints[i])) {
// Done accumulating intersections for the current segment; move to the next segment.
if (
!this.AddSteinerPointsToInterveningSegments(
vertexPoints[i],
bboxSteinerPoints,
steinerPointsCounter,
item,
) ||
!item.TraverseToSegmentContainingPoint(vertexPoints[i])
) {
// Done with this vectorItem, move to the next item.
break
}
}
this.AddPointsToCurrentSegmentIntersections(
bboxSteinerPoints,
steinerPointsCounter,
item,
)
this.GenerateIntersectionsFromVertexPointForCurrentSegment(
vertexPoints[i],
item,
)
if (item.PointIsCurrentEndAndNextStart(vertexPoints[i])) {
// MoveNext will always return true because the test to enter this block returned true.
item.MoveNext()
/*Assert.assert(
item.HasCurrent,
'MoveNext ended before EndAndNextStart',
)*/
continue
}
if (++i >= vertexPoints.length) {
// No more vertexPoints; we're done.
return
}
}
}
// We should have exited in the "no more vertexPoints" case above.
/*Assert.assert(false, 'Mismatch in points and segments')*/
}
private AddSteinerPointsToInterveningSegments(
currentVertexPoint: Point,
bboxSteinerPoints: Array<Point>,
t: {j: number},
item: ScanSegmentVectorItem,
): boolean {
// With overlaps, we may have bboxSteinerPoints on segments that do not contain vertices.
while (
t.j < bboxSteinerPoints.length &&
this.currentAxisPointComparer(
bboxSteinerPoints[t.j],
currentVertexPoint,
) == -1
) {
if (!item.TraverseToSegmentContainingPoint(bboxSteinerPoints[t.j])) {
// Done with this vectorItem, move to the next item.
return false
}
this.AddPointsToCurrentSegmentIntersections(bboxSteinerPoints, t, item)
}
return true
}
private AddPointsToCurrentSegmentIntersections(
pointsToAdd: Array<Point>,
t: {j: number},
parallelItem: ScanSegmentVectorItem,
) {
// The first Steiner point should be in the segment, unless we have a non-orthogonal or overlapped or both situation
// that results in no Steiner points having been generated, or Steiner points being generated on a segment that has
// the opposite overlap state from the segment containing the corresponding vertex.
for (
;
t.j < pointsToAdd.length &&
parallelItem.CurrentSegment.ContainsPoint(pointsToAdd[t.j]);
t.j++
) {
const steinerSlot: number = this.FindPerpendicularSlot(
pointsToAdd[t.j],
0,
)
this.AddSlotToSegmentIntersections(parallelItem, steinerSlot)
}
}
private GenerateIntersectionsFromVertexPointForCurrentSegment(
site: Point,
parallelItem: ScanSegmentVectorItem,
) {
const perpStartSlot: number = this.FindPerpendicularSlot(
parallelItem.CurrentSegment.Start,
1,
)
const perpEndSlot: number = this.FindPerpendicularSlot(
parallelItem.CurrentSegment.End,
-1,
)
const siteSlot: number = this.FindPerpendicularSlot(site, 0)
// See comments in FindIntersectingSlot; we don't add non-extreme vertices in the perpendicular direction
// so in some heavily-overlapped scenarios, we may not have any intersections within this scan segment.
if (perpStartSlot >= perpEndSlot) {
return
}
this.AddSlotToSegmentIntersections(parallelItem, perpStartSlot)
this.AddSlotToSegmentIntersections(parallelItem, perpEndSlot)
if (siteSlot > perpStartSlot && siteSlot < perpEndSlot) {
this.AddSlotToSegmentIntersections(parallelItem, siteSlot)
this.AddBinaryDivisionSlotsToSegmentIntersections(
parallelItem,
perpStartSlot,
siteSlot,
perpEndSlot,
)
}
}
// These are called when the site may not be in the vector.
private FindPerpendicularSlot(site: Point, directionIfMiss: number): number {
return SparseVisibilityGraphGenerator.FindIntersectingSlot(
this.perpendicularSegmentVector,
this.perpendicularCoordMap,
site,
directionIfMiss,
)
}
private static FindIntersectingSlot(
segmentVector: ScanSegmentVector,
coordMap: Map<number, number>,
site: Point,
directionIfMiss: number,
): number {
const coord = segmentVector.GetParallelCoord(site)
const slot = coordMap.get(coord)
if (slot != undefined) {
return slot
}
// There are a few cases where the perpCoord is not in the map:
// 1. The first ScanSegment in a slot will have a Start at the sentinel, which is before the first
// perpendicular segment; similarly, the last ScanSegment in a slot will have an out-of-range End.
// 2. Sequences of overlapped/nonoverlapped scan segments that pass through obstacles. Their start
// and end points are not in vertexPoints because they were not vertex-derived, so we find the
// closest bracketing coordinates that are in the vectors.
// 3. Non-extreme vertices in the perpendicular direction (e.g. for a triangle, we add the X's of
// the left and right to the coords, but not of the top).
// 4. Non-rectilinear group side intersections.
return directionIfMiss == 0
? -1
: segmentVector.FindNearest(coord, directionIfMiss)
}
private AddSlotToSegmentIntersections(
parallelItem: ScanSegmentVectorItem,
perpSlot: number,
) {
const perpItem: ScanSegmentVectorItem =
this.perpendicularSegmentVector.Item(perpSlot)
parallelItem.CurrentSegment.AddSparseVertexCoord(perpItem.Coord)
perpItem.AddPerpendicularCoord(parallelItem.Coord)
}
private AddBinaryDivisionSlotsToSegmentIntersections(
parallelItem: ScanSegmentVectorItem,
startSlot: number,
siteSlot: number,
endSlot: number,
) {
// The input parameters' slots have already been added to the segment's coords.
// If there was no object to the low or high side, then the start or end slot was already
// the graphbox max (0 or perpSegmentVector.Length, respectively). So start dividing.
let low = 0
let high: number = this.perpendicularSegmentVector.Length - 1
// Terminate when we are one away because we don't have an edge from a point to itself.
while (high - low > 1) {
const mid: number = low + Math.floor((high - low) / 2)
// We only use the half of the graph that the site is in, so arbitrarily decide that it is
// in the lower half if it is at the midpoint.
if (siteSlot <= mid) {
high = mid
if (siteSlot < high && high <= endSlot) {
this.AddSlotToSegmentIntersections(parallelItem, high)
}
continue
}
low = mid
if (siteSlot > low && low >= startSlot) {
this.AddSlotToSegmentIntersections(parallelItem, low)
}
}
}
// Create the ScanSegmentTrees that functions as indexes for port-visibility splicing.
private CreateScanSegmentTrees() {
SparseVisibilityGraphGenerator.CreateScanSegmentTree(
this.horizontalScanSegmentVector,
this.HorizontalScanSegments,
)
SparseVisibilityGraphGenerator.CreateScanSegmentTree(
this.verticalScanSegmentVector,
this.VerticalScanSegments,
)
}
private static CreateScanSegmentTree(
segmentVector: ScanSegmentVector,
segmentTree: ScanSegmentTree,
) {
for (const item of segmentVector.Items()) {
for (
let segment = item.FirstSegment;
segment != null;
segment = segment.NextSegment
) {
if (segment.HasVisibility()) {
segmentTree.InsertUnique(segment)
}
}
}
}
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/GradientView/GradientView.framework"
install_framework "$BUILT_PRODUCTS_DIR/SDWebImage/SDWebImage.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyJSON/SwiftyJSON.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/GradientView/GradientView.framework"
install_framework "$BUILT_PRODUCTS_DIR/SDWebImage/SDWebImage.framework"
install_framework "$BUILT_PRODUCTS_DIR/SwiftyJSON/SwiftyJSON.framework"
fi
|
#!/bin/bash
echo "Tearing down using docker-compose"
docker-compose --project-name edm down
rm -rf alluxio/data
mkdir alluxio/data
rm -rf mysql/data/*
rm -rf elasticsearch/esdata1/nodes
rm -rf elasticsearch/esdata2/nodes
docker volume prune -f
#find . -name "__pycache__" -type d -exec rm -rf {} +
|
<filename>src/components/ServerSentEvents.js
import React from 'react';
import {observer, inject} from 'mobx-react';
@inject('ssestore') @observer
export default class ServerSentEvents extends React.Component {
constructor(props) {
super(props);
}
componentDidMount() {
//start listening for events
this.props.ssestore.listenForEvents();
}
render() {
return (
<div className="row">
<div className="column">
<h1>Server sent events</h1>
<div className="desc">
<p>This has nothing to do with progressive web apps or service workers.
It's just for fun</p>
</div>
<strong>Data sent from the server:</strong>
<ul>
{this.props.ssestore.items.map((obj, i) => {
return <li className="msg desc" key={obj.id}>{obj.msg}</li>
})}
</ul>
</div>
</div>
)
}
}
|
import { Config } from '../Class/Unit/Config'
import { Pin } from '../Pin'
import { PinOpt } from '../PinOpt'
import { PinOpts } from '../PinOpts'
import { Pins } from '../Pins'
import { Dict } from '../types/Dict'
import { IO } from '../types/IO'
import { None } from '../types/None'
import { $_ } from './$_'
export interface U<I = any, O = any> extends $_ {
setParent(parent: U | null)
setInputs(inputs: Pins<I>, opts: PinOpts): void
setPin(name: string, type: IO, pin: Pin<any>, opt: PinOpt)
setPinIgnored(type: IO, name: string, ignored: boolean): void
setInputIgnored(name: string, ignore?: boolean): boolean
setOutputIgnored(name: string, ignore?: boolean): boolean
setPinRef(type: IO, name: string, ref: boolean): void
setInputRef(name: string, ref: boolean): void
setOutputRef(name: string, ref: boolean): void
setInput(name: string, input: Pin<I[keyof I]>, opt: PinOpt): void
isPinIgnored(type: IO, name: string): boolean
swapPin(type: IO, name: string, pin: Pin): void
addInput(name: string, input: Pin<any>, opt: PinOpt): void
removeInput(name: string): void
setOutputs(outputs: Pins<O>, opts: PinOpts)
setOutput(name: string, output: Pin<any>, opt: PinOpt)
addOutput(name: string, output: Pin<any>): void
removeOutput(name: string): void
removePin(type: IO, name: string)
getPin(type: IO, pinId: string): Pin<any>
getInputs(): Pins<Partial<I>>
getDataInputs(): Pins<Partial<I>>
getRefInputs(): Pins<Partial<I>>
getInput(name: string): Pin<any>
getOutputs(): Pins<Partial<O>>
getDataOutputs(): Pins<Partial<O>>
getRefOutputs(): Pins<Partial<O>>
getOutput(name: string): Pin<any>
push<K extends keyof I>(name: string, data: any): void
pushInput<K extends keyof I>(name: string, data: I[K]): void
pushAllInput<K extends keyof I>(data: Dict<I[K]>): void
pushOutput<K extends keyof O>(name: string, data: O[K]): void
pushAllOutput<K extends keyof O>(data: Dict<O[K]>): void
pushAll<K extends keyof I>(data: Dict<I[K]>): void
takeInput<K extends keyof O>(name: string): O[K]
takeOutput<K extends keyof O>(name: string): O[K]
take<K extends keyof O>(name: string): O[K]
takeAll(): Dict<any>
peakInput<K extends keyof I>(name: string): I[K]
peakOutput<K extends keyof O>(name: string): O[K]
peak<K extends keyof O>(name: string): O[K]
peakAllOutput(): Dict<any>
peakAll(): Dict<any>
renamePin(type: IO, name: string, newName: string): void
renameInput(name: string, newName: string): void
hasRefPinNamed(type: IO, name: string): boolean
hasRefInputNamed(name: string): boolean
hasRefOutputNamed(name: string): boolean
hasPinNamed(type: IO, name: string): boolean
hasInputNamed(name: string): boolean
renameOutput(name: string, newName: string): void
hasOutputNamed(name: string): boolean
getInputCount(): number
getOutputCount(): number
getInputNames(): string[]
getOutputNames(): string[]
setPinData(type: IO, pinId: string, data: any): void
removePinData(type: IO, pinId: string): void
setInputConstant(pinId: string, constant: boolean): void
setOutputConstant(pinId: string, constant: boolean): void
getCatchErr(): boolean
getConfig(): Config
reset(): void
pause(): void
play(): void
paused(): boolean
getSelfPin(): Pin<U>
err(err?: string | Error | None): string | null
hasErr(): boolean
getErr(): string | null
takeErr(): string | null
destroy(): void
getPinData(): { input: Dict<any>; output: Dict<any> }
getInputData(): Dict<any>
getRefInputData(): Dict<U>
}
|
<html>
<head>
<title>My Page</title>
</head>
<body>
<div class="container">
<nav>
<ul>
<li><a href="home.html">Home</a></li>
<li><a href="about.html">About</a></li>
</ul>
</nav>
<div class="main-content">
<!-- Main Page Content Here -->
</div>
<div class="sidebar-container1">
<!-- sidebar 1 content -->
</div>
<div class="sidebar-container2">
<!-- sidebar 2 content -->
</div>
</div>
</body>
</html> |
<reponame>syberflea/materials
#!/usr/env/bin python3
# phases.py
import asyncio
async def phase1(callerid: str):
print(f"phase 1 called from {callerid}")
await asyncio.sleep(2)
return "result1"
async def phase2(callerid: str, arg: str):
print(f"phase 2 called from {callerid}")
await asyncio.sleep(2)
# No await needed here - arg is passed from caller.
return f"result2 derived from {arg}"
async def outer(callerid: str):
"""A wrapper for parameterizing a full coroutine."""
print(f"outer called from {callerid}")
r1 = await phase1(callerid)
r2 = await phase2(callerid, r1)
return r1, r2
async def main():
"""Wrap the coroutines into tasks and execute."""
results = await asyncio.gather(*(outer(i) for i in "ABC"))
return results
if __name__ == "__main__":
asyncio.run(main())
|
'use strict';
// Declare app level module which depends on views, and components
var donorApp = angular.module('myApp', ['ngRoute']).
config(['$routeProvider','$httpProvider','$sceProvider', function($routeProvider,$httpProvider,$sceProvider) {
$httpProvider.defaults.useXDomain = true;
delete $httpProvider.defaults.headers.common['X-Requested-With'];
$httpProvider.defaults.headers.common['Accept'] = "application/json";
$httpProvider.defaults.cache = true;
$sceProvider.enabled(false);
$routeProvider.when('/newsFeed', {templateUrl: 'newsFeed/newsFeed.html',controller: 'NewsFeedCtrl' });
$routeProvider.when('/donors', {templateUrl: 'donors/donors.html' });
$routeProvider.when('/requirement', {templateUrl: 'requirement/requirement.html'});
$routeProvider.otherwise({redirectTo:'/',templateUrl:'oAuth/oAuth.html',controller:'OAuthCtrl'});
}]).controller('MainCtrl',['$log','$document','$scope',function($log,$document,$scope){
$log.debug("MainCtrl::");
}]);
|
from flydra_analysis.a2.calculate_reprojection_errors import \
calculate_reprojection_errors, print_summarize_file
import flydra_analysis.a2.core_analysis as core_analysis
import os, tempfile, shutil
import pkg_resources
DATAFILE2D = pkg_resources.resource_filename('flydra_analysis.a2','sample_datafile-v0.4.28.h5')
DATAFILE3D = pkg_resources.resource_filename('flydra_analysis.a2','sample_datafile-v0.4.28.h5.retracked.h5')
def test_calculate_reprojection_errors():
for from_source in ['smoothed', 'ML_estimates']:
yield check_calculate_reprojection_errors, from_source
def check_calculate_reprojection_errors(from_source):
tmpdir = tempfile.mkdtemp()
try:
outfile = os.path.join(tmpdir,'retracked.h5')
calculate_reprojection_errors(h5_filename=DATAFILE2D,
output_h5_filename=outfile,
kalman_filename=DATAFILE3D,
from_source=from_source,
)
assert os.path.exists(outfile)
print_summarize_file(outfile)
# XXX FIXME add some test beyond just running and printing it.
# close open files
ca = core_analysis.get_global_CachingAnalyzer()
ca.close()
finally:
shutil.rmtree(tmpdir)
|
<gh_stars>1-10
"""Plots the generated points to a graph for each value of w"""
import os
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
def plot():
# Saves 21 colours from the rainbow colormap to use for each value of x
colours = mpl.cm.rainbow(np.linspace(0, 1, 21))
# Makes a plot for each value of w
for i in range(20 + 1):
print(f'Plotting Plot {i}')
folder_name = os.path.join('csvs', 'csv{:0>2d}'.format(i))
fig = plt.figure()
ax1 = fig.add_subplot(1, 1, 1, projection='3d')
for j in range(20 + 1):
points = np.genfromtxt(os.path.join(folder_name, \
'file{:0>2d}.csv'.format(j)), delimiter=',')
y = points[:, 2]
z = points[:, 3]
f = points[:, 4]
ax1.plot_trisurf(y, z, f, color=colours[j])
ax1.set_xlabel('y', fontsize=15)
ax1.set_ylabel('z', fontsize=15)
ax1.set_zlabel('f', fontsize=15)
fig.suptitle('w = {}'.format(i - 10), fontsize=20)
ax1.view_init(elev=30, azim=-15)
# Creates the colorbar legend at the bottom
ax2 = plt.axes([0.03, 0.05, 0.94, 0.02])
cmap = mpl.cm.rainbow
norm = mpl.colors.Normalize(vmin=-10, vmax=10)
cb = mpl.colorbar.ColorbarBase(ax2, cmap=cmap, norm=norm, \
orientation='horizontal')
cb.set_label('x', fontsize=15)
fig.set_size_inches(16, 10)
# plt.show()
plt.savefig(os.path.join(folder_name, 'plot{:0>2d}.png'.format(i)))
plt.close()
if __name__ == "__main__":
plot()
|
pgpApp.controller('KeyWorkCtrl', function ($scope, focus, $state, $stateParams, $uibModal) {
$scope.key = null;
$scope.$stateParams = $stateParams;
$scope.$state = $state;
$scope.init = function() {
if ('pgp' in $stateParams && $stateParams.pgp) {
$scope.rawkey = decodeURIComponent($stateParams.pgp);
key = $scope.loadKey_raw();
if( key ) {
$scope.key = key;
//not quite correct for private keys. Why would you be making permanlinks for this?
focus("message");
}
} else {
$scope.key = $scope.findKey($stateParams.key, $stateParams.private);
if ($scope.isNewKey()) {
$scope.rawkey = "";
focus("pgppub");
} else {
if ($scope.isPrivateKey()) {
$scope.rawkey = $scope.key.toPublic().armor();
$scope.rawkey_private = $scope.key.armor();
if(!$scope.isDecryptedKey()) {
focus("passphrase");
} else {
focus("pmessage");
};
} else {
$scope.rawkey = $scope.key.armor();
focus("message");
}
}
}
};
//TODO: These don't need to be functions any more. Except isDecrypted maybe
$scope.isNewKey = function() { return $scope.key == null};
$scope.isPrivateKey = function() {
if (!$scope.key) return $scope.$stateParams.private;
return $scope.isPrivate($scope.key);
};
$scope.isDecryptedKey = function() {
if($scope.key){
return ($scope.isDecrypted($scope.key));
} else { return(false); }
};
$scope.deleteKey = function() {
var modalInstance = $uibModal.open({
animation: $scope.animationsEnabled,
templateUrl: 'templates/chickenBox.html',
controller: 'chickenBoxCtrl',
size: 'lg',
resolve: {
content: function () {
return {
title : 'Delete key data',
danger : $scope.isPrivateKey(),
};
}
}
});
modalInstance.result.then(function (result) {
$scope.$emit('deletekey', $scope.key);
$scope.$state.go("key", {key:null, private:false});
}, function () {
//$log.info('Modal dismissed at: ' + new Date());
});
}
$scope.mailit = function() {
//Not bullet-proof but probably good enough.
var emailMatches = $scope.getUser($scope.key).match(/\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\b/);
if (!emailMatches) return "";
var email = emailMatches[0];
var rt = $scope.resulttext;
return ("mailto:" + email + "?subject=" + encodeURIComponent("Sent from pgp.help") + "&body=" + encodeURIComponent(rt));
}
$scope.encodeURIComponent = function(raw) {
var r = encodeURIComponent(raw);
return r;
}
$scope.loadKey_raw = function() {
var publicKey;
try {
var publicKey = openpgp.key.readArmored($scope.rawkey);
} catch (err) {
//console.log("Not a key: " + err);
$scope.pgperror = true;
return null;
}
if (publicKey.err) {
$scope.pgperror = true;
return null;
} else {
$scope.pgperror = false;
//Apply this first to get animations to work:
key = publicKey.keys[publicKey.keys.length - 1];
$scope.smartfade = "smartfade";
focus("message");
//$scope.wasNew = true;
//Now notify about the new keys.
for( i = 0; i < publicKey.keys.length; i++) {
$scope.$emit('newkey', publicKey.keys[i]);
}
return key;
};
};
$scope.loadKey = function() {
key = $scope.loadKey_raw();
if (!key) return;
var sp = {
key: $scope.getFingerprint(key),
private: $scope.isPrivate(key),
};
//console.log(sp);
$scope.$state.go("key", sp);
};
$scope.encryptMessage = function() {
$scope.resulttext = "";
if ($scope.message && !$scope.isNewKey()) {
//return "DEC: " + message;
openpgp.encryptMessage($scope.key, $scope.message).then(function(pgpMessage) {
$scope.resulttext = pgpMessage;
$scope.$apply();
}).catch(function(error) {
$scope.resulttext = error;
$scope.$apply();
});
}
};
$scope.applyPassword = function() {
$scope.passworderror = false;
if ($scope.password) {
var ok = $scope.key.decrypt($scope.password);
$scope.passworderror = !ok;
if(ok) {
$scope.password = "";
focus('pmessage');
}
if(ok && $scope.pmessage) {
$scope.decryptMessage();
}
}
}
$scope.blockquote = function(quote) {
var out = " " + quote.replace(/\n/g, "\n ");
return( out );
}
$scope.decryptMessage = function() {
$scope.resulttext = "";
$scope.pmessageerror = false;
if( $scope.isNewKey() ) return;
if( !$scope.pmessage) return;
var ctext;
try {
ctext = openpgp.message.readArmored($scope.pmessage);
} catch (err) {
$scope.resulttext = err.message;
$scope.pmessageerror = true;
return;
}
if (!$scope.isDecryptedKey()) {
focus("passphrase");
return;
}
openpgp.decryptMessage($scope.key, ctext).then( function(plaintext) {
$scope.resulttext = plaintext;
$scope.$apply();
}).catch(function(error ) {
$scope.resulttext = error.message;
$scope.$apply();
});
};
$scope.init();
});
|
export BUILD_DIR=~/Downloads/mgmail
rm -rf $BUILD_DIR
cp -r . $BUILD_DIR
cp ../../core/foam.css $BUILD_DIR
cat \
../../core/stdlib.js \
../../core/WeakMap.js \
../../core/io.js \
../../core/writer.js \
../../core/socket.js \
../../core/base64.js \
../../core/encodings.js \
../../core/utf8.js \
../../core/parse.js \
../../core/event.js \
../../core/JSONUtil.js \
../../core/XMLUtil.js \
../../core/context.js \
../../core/FOAM.js \
../../core/JSONParser.js \
../../core/TemplateUtil.js \
../../core/FObject.js \
../../core/BootstrapModel.js \
../../core/mm1Model.js \
../../core/mm2Property.js \
../../core/mm3Types.js \
../../core/mm4Method.js \
../../core/mm6Misc.js \
../../js/foam/ui/Window.js \
../../core/value.js \
../../core/view.js \
../../core/layout.js \
../../core/cview.js \
../../core/cview2.js \
../../core/RichTextView.js \
../../core/listchoiceview.js \
../../core/scroll.js \
../../core/mlang.js \
../../core/QueryParser.js \
../../core/search.js \
../../core/async.js \
../../core/oam.js \
../../core/visitor.js \
../../core/messaging.js \
../../core/dao.js \
../../core/arrayDAO.js \
../../core/ClientDAO.js \
../../core/diff.js \
../../core/SplitDAO.js \
../../core/index.js \
../../core/StackView.js \
../../core/MementoMgr.js \
../../core/DAOController.js \
../../core/ThreePaneController.js \
../../core/experimental/protobufparser.js \
../../core/experimental/protobuf.js \
../../core/models.js \
../../core/touch.js \
../../core/oauth.js \
../../core/ChromeApp.js \
../../core/../apps/mailreader/view.js \
../../core/../apps/mailreader/email.js \
../../core/../lib/email/email.js \
../../core/turntable.js \
../../core/CORE.js \
../../js/foam/lib/bookmarks/Bookmark.js \
../../js/foam/lib/bookmarks/AddBookmarksDialog.js \
../../lib/bookmarks/bookmark.js \
../../js/foam/ui/md/AppController.js \
../../js/foam/ui/md/SharedStyles.js \
../../lib/mdui/view.js \
../../lib/gmail/ImportedModels.js \
../../lib/gmail/dao.js \
gmail.js \
compose.js \
> "$BUILD_DIR/foam.js"
# For code compression, uncomment the following line:
# ~/node_modules/uglify-js/bin/uglifyjs --overwrite "$BUILD_DIR/foam.js"
# ../../core/ChromeStorage.js \
# ../../demos/benchmark_data.json \
# ../../demos/photo.js \
|
def calculate_discount(num_children: int) -> int:
if num_children == 2:
discount_total = 10
elif num_children == 3:
discount_total = 15
elif num_children == 4:
discount_total = 18
elif num_children >= 5:
discount_total = 18 + (num_children - 4)
return discount_total |
#!/usr/bin/env bash
docker build -t doughyou/food_svc:latest .
docker push doughyou/food_svc:latest |
#!/bin/bash
# sync up all of the hosts in the build farm (the "cat farm").
set -e
mkdir -p ~/.ssh/sockets
# hosts:
# imacg3, ibookg3, graphite, emac2, emac3, pbookg4, pbookg42, imacg5 imacg52
# map:
# tiger g3: ibookg3, imacg3
# tiger g4: graphite
# tiger g4e: emac2, emac3
# tiger g5: imacg52
# leopard g4e: pbookg4, pbookg42
# leopard g5: imacg5
if test "$1" = "--minimal" ; then
minimal=1
shift 1
fi
hosts=${1:-"imacg5 imacg52 emac2 emac3 pbookg4 pbookg42 graphite ibookg3 imacg3"}
uphosts=""
echo "👉 ping"
# make two passes, because sometimes the .local hosts resolve after a ping.
for host in $hosts ; do
ping -o -t 1 $host >/dev/null 2>&1 &
done
sleep 0.25
for host in $hosts ; do
if ping -o -t 1 $host >/dev/null 2>&1 ; then
uphosts="$uphosts $host"
echo " ✅ $host is up"
else
echo " ❌ $host is down"
fi
done
cd ~/catfarm
# echo
# echo "👉 root's files"
# for host in $uphosts ; do
# echo " 🖥 $host"
# ssh root@$host mkdir -p /var/root/.ssh
# rsync -ai host_files/all/ $host:/var/root/
# ssh $host rm -f /var/root/.profile
# done
# echo
# echo "👉 system files"
# for host in $uphosts ; do
# echo " 🖥 $host"
# rsync -ai host_files/$host/ssh_host_* root@$host:/etc/
# done
echo
echo "👉 user files"
for host in $uphosts ; do
echo " 🖥 $host"
(
ssh $host mkdir -p \
/Users/macuser/.ssh/sockets \
/Users/macuser/Downloads \
/Users/macuser/bin \
/Users/macuser/tmp
rsync -ai host_files/all/ $host:/Users/macuser/
rsync -ai tmp/ $host:/Users/macuser/tmp/
ssh $host rm -f /Users/macuser/.profile
) &
done
wait
if test -n "$minimal" ; then
exit 0
fi
echo
echo "👉 pull binpkgs"
for host in $uphosts ; do
echo " 🖥 $host"
(
ssh $host mkdir -p /Users/macuser/Desktop/leopard.sh/binpkgs /Users/macuser/Desktop/tiger.sh/binpkgs
rsync -ai --update $host:/Users/macuser/Desktop/leopard.sh/binpkgs/ ~/leopard.sh/binpkgs
rsync -ai --update $host:/Users/macuser/Desktop/tiger.sh/binpkgs/ ~/leopard.sh/binpkgs
ssh $host rm -f '/Users/macuser/Desktop/*.sh/binpkgs/*'
) &
done
wait
echo
echo "👉 push leopard.sh"
for host in $uphosts ; do
echo " 🖥 $host"
(
rsync -ai ~/leopard.sh/leopardsh/leopard.sh \
~/leopard.sh/tigersh/tiger.sh \
$host:/usr/local/bin/
rsync -ai ~/leopard.sh/leopardsh/utils/make-leopardsh-binpkg.sh \
~/leopard.sh/leopardsh/utils/rebuild-leopardsh-stales.sh \
~/leopard.sh/leopardsh/utils/rebuild-leopardsh-all.sh \
~/leopard.sh/tigersh/utils/make-tigersh-binpkg.sh \
~/leopard.sh/tigersh/utils/rebuild-tigersh-stales.sh \
~/leopard.sh/tigersh/utils/rebuild-tigersh-all.sh \
~/leopard.sh/utils/sleep.sh \
$host:/Users/macuser/bin/
ssh $host "rm -f /opt/leopard.sh/share/leopard.sh/config.cache/leopard.cache \
/opt/tiger.sh/share/tiger.sh/config.cache/tiger.cache \
/opt/tiger.sh/share/tiger.sh/config.cache/disabled.cache"
) &
done
wait
#echo
#echo "👉 push distfiles"
#for host in $uphosts ; do
# echo " 🖥 $host"
# cd ~/dist
# rsync -ai --delete *.tar.gz *.tgz *.tar.bz2 *.tar.xz *.dmg *.zip *.pem $host:/Users/macuser/Downloads/
#done
exit 0
echo
echo "👉 [tiger|leopard].sh --setup"
for host in $uphosts ; do
echo " 🖥 $host"
(
ssh $host 'test "$(uname -r | cut -d. -f1)" = "8" && tiger.sh --setup || true'
ssh $host 'test "$(uname -r | cut -d. -f1)" = "9" && leopard.sh --setup || true'
) &
done
wait
|
function liveSearch(url, userId, resultContainer, key) {
// Create a new XMLHttpRequest object
var xhr = new XMLHttpRequest();
// Set up the request
xhr.open('GET', url + '?userId=' + userId + '&key=' + key, true);
// Define the callback function
xhr.onload = function () {
if (xhr.status >= 200 && xhr.status < 300) {
// Request successful, update the resultContainer with the response
resultContainer.html(xhr.responseText);
} else {
// Request failed, handle the error
console.error('Live search request failed: ' + xhr.status);
}
};
// Handle network errors
xhr.onerror = function () {
console.error('Network error during live search request');
};
// Send the request
xhr.send();
} |
#!/bin/sh
set -eu
task_dir="$PWD"
cd hugo-site
./bin/generate-metalink-artifacts-data.sh "file://$task_dir/artifacts/ssoca-final"
./bin/git-fetch.sh "$task_dir/repo" # avoid stale concourse resource caches
./bin/generate-repo-tags-data.sh "$task_dir/repo"
# accidental lightweight tag; manual, pre-CI
echo ' v0.8.0: 2018-01-07 22:40:00 -0800' >> data/repo/tags.yml
latest_version=$( grep '^ ' data/repo/tags.yml | awk '{ print $1 }' | sed -e 's/^v//' -e 's/:$//' | sort -rV | head -n1 )
cat > config.local.yml <<EOF
baseURL: "https://dpb587.github.io/ssoca"
googleAnalytics: "UA-37464314-3"
params:
releaseVersionLatest: "$latest_version"
EOF
hugo \
--config="config.yml,$task_dir/repo/docs/config.yml,config.local.yml" \
--contentDir="$task_dir/repo/docs" \
--destination="$task_dir/public"
./bin/git-commit.sh "$task_dir/public"
|
<filename>src/actions/index.js
import {saveState} from '../utils'
import {initialState} from '../store'
export const updateBeaconId = (state, event, {isAutoOpen}) => {
const beaconId = event.target.value
if (state.beaconId === beaconId) return
console.log('Beacon DevTools: Initializing', beaconId)
window.Beacon('destroy')
window.Beacon('init', beaconId)
if (isAutoOpen) {
window.Beacon('open')
}
const open = isAutoOpen
const nextState = {...state, beaconId, open}
saveState(nextState)
return {
beaconId,
open,
}
}
export const updateColor = (state, color) => {
window.Beacon('config', {color})
}
export const updateDisplayText = (state, event) => {
const displayText = event.target.value
window.Beacon('config', {display: {text: displayText}})
const nextState = {...state, displayText}
saveState(nextState)
return nextState
}
export const updateIconImage = (state, event) => {
const iconImage = event.target.value
window.Beacon('config', {display: {iconImage}})
const nextState = {...state, iconImage}
saveState(nextState)
return nextState
}
export const updateStyle = (state, event) => {
const style = event.target.value
const nextStyleState = {
display: {style, text: state.displayText, iconImage: state.iconImage},
}
window.Beacon('config', nextStyleState)
const nextState = {...state, style}
saveState(nextState)
return nextState
}
export const updateSizePosition = (state, props) => {
const nextState = {...state, ...props}
saveState(nextState)
return nextState
}
export const updateSearch = (state, event) => {
if (event.key === 'Enter') {
const query = event.target.value
window.Beacon('search', query)
}
}
export const navigateToRoute = (state, event) => {
if (event.key === 'Enter') {
return updateRoute(state, event)
}
}
export const updateRoute = (state, event) => {
const route = event.target.value
window.Beacon('navigate', route)
}
export const openBeacon = state => {
if (state.open) return
const nextState = {...state, open: true}
saveState(nextState)
return {
open: true,
}
}
export const closeBeacon = state => {
if (!state.open) return
const nextState = {...state, open: false}
saveState(nextState)
return {
open: false,
}
}
export const toggleOpen = state => {
if (!state.open) {
window.Beacon('open')
return openBeacon(state)
} else {
window.Beacon('close')
return closeBeacon(state)
}
}
export const toggleChat = state => {
window.Beacon('config', {
messaging: {
chatEnabled: !state.chatEnabled,
},
})
return {
chatEnabled: !state.chatEnabled,
}
}
export const toggleDocs = state => {
window.Beacon('config', {
docsEnabled: !state.docsEnabled,
})
return {
docsEnabled: !state.docsEnabled,
}
}
export const toggleMessaging = state => {
window.Beacon('config', {
messagingEnabled: !state.messagingEnabled,
})
return {
messagingEnabled: !state.messagingEnabled,
}
}
export const toggleShowGetInTouch = state => {
window.Beacon('config', {
messaging: {
contactForm: {
showGetInTouch: !state.showGetInTouch,
},
},
})
return {
showGetInTouch: !state.showGetInTouch,
}
}
export const resetBeacon = state => {
window.Beacon('reset')
saveState(initialState)
return {
initialState,
}
}
export const logout = state => {
window.Beacon('logout')
}
export const setActiveModal = (state, activeModal) => {
return {
activeModal,
}
}
export const updateLabel = (state, labelProps) => {
const {id, value} = labelProps
if (!id) return
window.Beacon('config', {
labels: {
[id]: value,
},
})
return {
labels: {
...state.labels,
[id]: value,
},
}
}
export const toggleTranslation = state => {
return {
showTranslation: !state.showTranslation,
}
}
export const toggleMinimize = state => {
const nextState = {...state, isMinimized: !state.isMinimized}
saveState(nextState)
return {
isMinimized: !state.isMinimized,
}
}
|
def towers_of_hanoi(numDiscs, startPeg=1, endPeg=3):
if numDiscs:
towers_of_hanoi(numDiscs-1, startPeg, 6-startPeg-endPeg)
print("Move disc %d from peg %d to peg %d" % (numDiscs, startPeg, endPeg))
towers_of_hanoi(numDiscs-1, 6-startPeg-endPeg, endPeg) |
<reponame>AVassilev98/ece350
/* @brief: rtx.h User API header file. Do not modify
* @author: <NAME>
* @date: 2020/09/03
*/
#ifndef _RTX_H_
#define _RTX_H_
/*----- Includes -----*/
#include "common.h"
#define __SVC_0 __svc_indirect(0)
extern int k_mem_init(size_t blk_size, int algo);
#define mem_init(blk_size, algo) _mem_init((U32)k_mem_init, blk_size, algo);
extern int _mem_init(U32 p_func, size_t blk_size, int algo) __SVC_0;
extern void *k_mem_alloc(size_t size);
#define mem_alloc(size) _mem_alloc((U32)k_mem_alloc, size);
extern void *_mem_alloc(U32 p_func, size_t size) __SVC_0;
extern int k_mem_dealloc(void *);
#define mem_dealloc(ptr) _mem_dealloc((U32)k_mem_dealloc, ptr)
extern int _mem_dealloc(U32 p_func, void *ptr) __SVC_0;
extern int k_mem_count_extfrag(size_t size);
#define mem_count_extfrag(size) _mem_count_extfrag((U32)k_mem_count_extfrag, size);
extern int _mem_count_extfrag(U32 p_func, size_t size) __SVC_0;
#endif // !_RTX_H_
|
import * as express from 'express';
import * as multer from 'multer';
const searchCtrl = require('./search.ctrl');
const search = express.Router();
search.use(express.json());
search.get('/title', searchCtrl.title);
search.get('/place', searchCtrl.place);
module.exports = search;
|
#!/bin/bash
set -e
function retry() {
END=$(($(date +%s) + 600))
while (( $(date +%s) < $END )); do
set +e
"$@"
EXIT_CODE=$?
set -e
if [[ ${EXIT_CODE} == 0 ]]; then
break
fi
sleep 5
done
return ${EXIT_CODE}
}
function hadoop_master_container(){
environment_compose ps -q hadoop-master
}
function check_hadoop() {
docker exec $(hadoop_master_container) supervisorctl status hive-server2 | grep -i running
}
function stop_unnecessary_hadoop_services() {
HADOOP_MASTER_CONTAINER=$(hadoop_master_container)
docker exec ${HADOOP_MASTER_CONTAINER} supervisorctl status
docker exec ${HADOOP_MASTER_CONTAINER} supervisorctl stop mapreduce-historyserver
docker exec ${HADOOP_MASTER_CONTAINER} supervisorctl stop zookeeper
}
function run_in_application_runner_container() {
local CONTAINER_NAME=$( environment_compose run -d application-runner "$@" )
echo "Showing logs from $CONTAINER_NAME:"
docker logs -f $CONTAINER_NAME
return $( docker inspect --format '{{.State.ExitCode}}' $CONTAINER_NAME )
}
function check_presto() {
run_in_application_runner_container \
java -jar "/docker/volumes/presto-cli/presto-cli-executable.jar" \
${CLI_ARGUMENTS} \
--execute "SHOW CATALOGS" | grep -i hive
}
function run_product_tests() {
local REPORT_DIR="${PRODUCT_TESTS_ROOT}/target/test-reports"
rm -rf "${REPORT_DIR}"
mkdir -p "${REPORT_DIR}"
run_in_application_runner_container \
java "-Djava.util.logging.config.file=/docker/volumes/conf/tempto/logging.properties" \
-Duser.timezone=Asia/Kathmandu \
${TLS_CERTIFICATE} \
-jar "/docker/volumes/presto-product-tests/presto-product-tests-executable.jar" \
--report-dir "/docker/volumes/test-reports" \
--config-local "/docker/volumes/tempto/tempto-configuration-local.yaml" \
"$@" \
&
PRODUCT_TESTS_PROCESS_ID=$!
wait ${PRODUCT_TESTS_PROCESS_ID}
local PRODUCT_TESTS_EXIT_CODE=$?
#make the files in $REPORT_DIR modifiable by everyone, as they were created by root (by docker)
run_in_application_runner_container chmod -R 777 "/docker/volumes/test-reports"
return ${PRODUCT_TESTS_EXIT_CODE}
}
# docker-compose down is not good enough because it's ignores services created with "run" command
function stop_application_runner_containers() {
local ENVIRONMENT=$1
APPLICATION_RUNNER_CONTAINERS=$(environment_compose ps -q application-runner)
for CONTAINER_NAME in ${APPLICATION_RUNNER_CONTAINERS}
do
echo "Stopping: ${CONTAINER_NAME}"
docker stop ${CONTAINER_NAME}
echo "Container stopped: ${CONTAINER_NAME}"
done
echo "Removing dead application-runner containers"
local CONTAINERS=`docker ps -aq --no-trunc --filter status=dead --filter status=exited --filter name=common_application-runner`
for CONTAINER in ${CONTAINERS};
do
docker rm -v "${CONTAINER}"
done
}
function stop_all_containers() {
local ENVIRONMENT
for ENVIRONMENT in $(getAvailableEnvironments)
do
stop_docker_compose_containers ${ENVIRONMENT}
done
}
function stop_docker_compose_containers() {
local ENVIRONMENT=$1
RUNNING_CONTAINERS=$(environment_compose ps -q)
if [[ ! -z ${RUNNING_CONTAINERS} ]]; then
# stop application runner containers started with "run"
stop_application_runner_containers ${ENVIRONMENT}
# stop containers started with "up", removing their volumes
# Some containers (SQL Server) fail to stop on Travis after running the tests. We don't have an easy way to
# reproduce this locally. Since all the tests complete successfully, we ignore this failure.
environment_compose down -v || true
fi
echo "Docker compose containers stopped: [$ENVIRONMENT]"
}
function prefetch_images_silently() {
local IMAGES=$( docker_images_used )
for IMAGE in $IMAGES
do
echo "Pulling docker image [$IMAGE]"
docker pull $IMAGE > /dev/null
done
}
function docker_images_used() {
environment_compose config | grep 'image:' | awk '{ print $2 }' | sort | uniq
}
function environment_compose() {
"${DOCKER_CONF_LOCATION}/${ENVIRONMENT}/compose.sh" "$@"
}
function cleanup() {
stop_application_runner_containers ${ENVIRONMENT}
if [[ "${LEAVE_CONTAINERS_ALIVE_ON_EXIT}" != "true" ]]; then
stop_docker_compose_containers ${ENVIRONMENT}
fi
# Ensure that the logs processes are terminated.
# In most cases after the docker containers are stopped, logs processes must be terminated.
# However when the `LEAVE_CONTAINERS_ALIVE_ON_EXIT` is set, docker containers are not being terminated.
# Redirection of system error is supposed to hide the `process does not exist` and `process terminated` messages
if [[ ! -z ${HADOOP_LOGS_PID} ]]; then
kill ${HADOOP_LOGS_PID} 2>/dev/null || true
fi
if [[ ! -z ${PRESTO_LOGS_PID} ]]; then
kill ${PRESTO_LOGS_PID} 2>/dev/null || true
fi
# docker logs processes are being terminated as soon as docker container are stopped
# wait for docker logs termination
wait 2>/dev/null || true
}
function terminate() {
trap - INT TERM EXIT
set +e
cleanup
exit 130
}
function getAvailableEnvironments() {
for i in $(ls -d $DOCKER_CONF_LOCATION/*/); do echo ${i%%/}; done \
| grep -v files | grep -v common | xargs -n1 basename
}
source ${BASH_SOURCE%/*}/locations.sh
ENVIRONMENT=$1
# Get the list of valid environments
if [[ ! -f "$DOCKER_CONF_LOCATION/$ENVIRONMENT/compose.sh" ]]; then
echo "Usage: run_on_docker.sh <`getAvailableEnvironments | tr '\n' '|'`> <product test args>"
exit 1
fi
shift 1
PRESTO_SERVICES="presto-master"
if [[ "$ENVIRONMENT" == "multinode" ]]; then
PRESTO_SERVICES="${PRESTO_SERVICES} presto-worker"
elif [[ "$ENVIRONMENT" == "multinode-tls" ]]; then
PRESTO_SERVICES="${PRESTO_SERVICES} presto-worker-1 presto-worker-2"
fi
CLI_ARGUMENTS="--server presto-master:8080"
if [[ "$ENVIRONMENT" == "multinode-tls" ]]; then
CLI_ARGUMENTS="--server https://presto-master.docker.cluster:7778 --keystore-path /docker/volumes/conf/presto/etc/docker.cluster.jks --keystore-password 123456"
fi
# check docker and docker compose installation
docker-compose version
docker version
stop_all_containers
if [[ "$CONTINUOUS_INTEGRATION" == 'true' ]]; then
prefetch_images_silently
# This has to be done after fetching the images
# or will present stale / no data for images that changed.
echo "Docker images versions:"
docker_images_used | xargs -n 1 docker inspect --format='ID: {{.ID}}, tags: {{.RepoTags}}'
fi
# catch terminate signals
trap terminate INT TERM EXIT
if [[ "$ENVIRONMENT" == "singlenode" || "$ENVIRONMENT" == "multinode" ]]; then
EXTERNAL_SERVICES="hadoop-master mysql postgres cassandra"
elif [[ "$ENVIRONMENT" == "singlenode-sqlserver" ]]; then
EXTERNAL_SERVICES="hadoop-master sqlserver"
elif [[ "$ENVIRONMENT" == "singlenode-ldap" ]]; then
EXTERNAL_SERVICES="hadoop-master ldapserver"
else
EXTERNAL_SERVICES="hadoop-master"
fi
environment_compose up -d ${EXTERNAL_SERVICES}
# start docker logs for the external services
environment_compose logs --no-color -f ${EXTERNAL_SERVICES} &
HADOOP_LOGS_PID=$!
# wait until hadoop processes is started
retry check_hadoop
stop_unnecessary_hadoop_services
# start presto containers
environment_compose up -d ${PRESTO_SERVICES}
# start docker logs for presto containers
environment_compose logs --no-color -f ${PRESTO_SERVICES} &
PRESTO_LOGS_PID=$!
# wait until presto is started
retry check_presto
# run product tests
set +e
run_product_tests "$@"
EXIT_CODE=$?
set -e
# execution finished successfully
# disable trap, run cleanup manually
trap - INT TERM EXIT
cleanup
exit ${EXIT_CODE}
|
#!/usr/bin/env bash
export GOPATH=$HOME/.go
export PATH=$PATH:$GOPATH/bin
|
def type_checker(x):
if isinstance(x, int) or isinstance(x, float):
print('Number')
else:
print('String')
type_checker(x) |
#include <iostream>
using namespace std;
int findfirstmissingpositive(int arr[], int size)
{
// Initialize
int missing_min = 1;
// Find if missing_min is present
for (int i = 0; i < size; i++)
{
if (arr[i] == missing_min)
{
missing_min++;
}
}
return missing_min;
}
int main()
{
int arr[] = {3, 4, -1, 1};
int size = sizeof(arr) / sizeof(arr[0]);
int missing_min = findfirstmissingpositive(arr, size);
cout << missing_min << endl;
return 0;
} |
<reponame>nabeelkhan/Oracle-DBA-Life<filename>INFO/Books Codes/Oracle Wait Interface A Practical Guide to Performance Diagnostics & Tuning/Chapter5_page129_1.sql
select a.name, b.sid, b.value,
round((sysdate - c.logon_time) * 24) hours_connected
from v$statname a, v$sesstat b, v$session c
where b.sid = c.sid
and a.statistic# = b.statistic#
and b.value > 0
and a.name = 'physical writes direct'
order by b.value;
|
import Iterable from '../iteration/iterable';
import Iterator from '../iteration/iterator';
import Comparer from './comparer';
import iterableSymbol from '../iteration/iterable-symbol';
import assertType from '../utils/assert-type';
import assertNotNull from '../utils/assert-not-null';
import buffer from '../utils/buffer';
import extend from '../utils/extend';
import mixin from '../utils/mixin';
export default function OrderedIterable(source, keySelector, comparer, descending, parent) {
assertNotNull(source);
assertType(keySelector, Function);
comparer = Comparer.from(comparer);
var sorter = new OrderedIterableSorter(keySelector, comparer, descending);
if (parent) {
sorter = parent.sorter.create(sorter);
}
Iterable.call(this, source);
this.sorter = sorter;
}
function OrderedIterableSorter(keySelector, comparer, descending, next) {
this.keySelector = keySelector;
this.comparer = comparer;
this.descending = descending;
this.next = next;
}
extend(OrderedIterable, Iterable, {
/**
* Performs a subsequent ordering of the elements in a sequence in ascending order by using a comparer.
* @param {Function} keySelector A function to extract a key from each element. eg. function(item)
* @param {Comparer=} comparer A Comparer to compare keys.
* @returns {OrderedIterable}
*/
thenBy: function (keySelector, comparer) {
return new OrderedIterable(this[iterableSymbol], keySelector, comparer, false, this);
},
/**
* Performs a subsequent ordering of the elements in a sequence in descending order by using a comparer.
* @param {Function} keySelector A function to extract a key from each element. eg. function(item)
* @param {Comparer=} comparer A Comparer to compare keys.
* @returns {OrderedIterable}
*/
thenByDescending: function (keySelector, comparer) {
return new OrderedIterable(this[iterableSymbol], keySelector, comparer, true, this);
},
toString: function () {
return '[Ordered Iterable]';
},
'@@iterator': function () {
var index = 0,
arr = buffer(this[iterableSymbol]),
len = arr.length,
map = this.sorter.sort(arr);
return new Iterator(function () {
if (index < len) {
return {
value: arr[map[index++]],
done: false
};
}
return {
done: true
};
});
}
});
mixin(OrderedIterableSorter.prototype, {
create: function (next) {
return new OrderedIterableSorter(
this.keySelector,
this.comparer,
this.descending,
this.next ? this.next.create(next) : next
);
},
computeKeys: function (elements) {
var count = elements.length,
keys = new Array(count),
selector = this.keySelector;
for (var i = 0; i < count; i++) {
keys[i] = selector(elements[i]);
}
if (this.next !== undefined) {
this.next.computeKeys(elements, count);
}
this.keys = keys;
},
compareKeys: function (index1, index2) {
var c = this.comparer.compare(this.keys[index1], this.keys[index2]);
if (c === 0) {
if (this.next === undefined) {
return index1 - index2;
}
return this.next.compareKeys(index1, index2);
}
return this.descending ? -c : c;
},
sort: function (elements) {
var count = elements.length,
map = new Array(count);
this.computeKeys(elements);
for (var i = 0; i < count; i++) {
map[i] = i;
}
this.quickSort(map, 0, count - 1);
return map;
},
quickSort: function (map, left, right) {
do {
var i = left,
j = right,
x = map[i + ((j - i) >> 1)];
do {
while (i < map.length && this.compareKeys(x, map[i]) > 0) {
i++;
}
while (j >= 0 && this.compareKeys(x, map[j]) < 0) {
j--;
}
if (i > j) {
break;
}
if (i < j) {
var temp = map[i];
map[i] = map[j];
map[j] = temp;
}
i++;
j--;
} while (i <= j);
if (j - left <= right - i) {
if (left < j) {
this.quickSort(map, left, j);
}
left = i;
}
else {
if (i < right) {
this.quickSort(map, i, right);
}
right = j;
}
} while (left < right);
}
});
|
#!/bin/bash
#github-action genshdoc
# Find the name of the folder the scripts are in
set -a
SCRIPT_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
SCRIPTS_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"/scripts
CONFIGS_DIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"/configs
set +a
echo -ne "
_________ _ _________ _______ _______
\__ _/|\ /|( \ \__ __/( ___ )( ____ \
) ( | ) ( || ( ) ( | ( ) || ( \/
| | | | | || | | | | | | || (_____
| | | | | || | | | | | | |(_____ )
| | | | | || | | | | | | | ) |
|\_) ) | (___) || (____/\___) (___| (___) |/\____) |
(____/ (_______)(_______/\_______/(_______)\_______)
v1.1
"
( bash $SCRIPT_DIR/scripts/startup.sh )|& tee startup.log
source $CONFIGS_DIR/setup.conf
( bash $SCRIPT_DIR/scripts/0-preinstall.sh )|& tee 0-preinstall.log
( arch-chroot /mnt $HOME/ArchTitus/scripts/1-setup.sh )|& tee 1-setup.log
if [[ ! $DESKTOP_ENV == server ]]; then
( arch-chroot /mnt /usr/bin/runuser -u $USERNAME -- /home/$USERNAME/ArchTitus/scripts/2-user.sh )|& tee 2-user.log
fi
( arch-chroot /mnt $HOME/ArchTitus/scripts/3-post-setup.sh )|& tee 3-post-setup.log
cp -v *.log /mnt/home/$USERNAME
echo -ne "
_________ _ _________ _______ _______
\__ _/|\ /|( \ \__ __/( ___ )( ____ \
) ( | ) ( || ( ) ( | ( ) || ( \/
| | | | | || | | | | | | || (_____
| | | | | || | | | | | | |(_____ )
| | | | | || | | | | | | | ) |
|\_) ) | (___) || (____/\___) (___| (___) |/\____) |
(____/ (_______)(_______/\_______/(_______)\_______)
v1.1
Done - Please Eject Install Media and Reboot
"
|
package com.alipay.api.domain;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 查询插件引用关系
*
* @author auto create
* @since 1.0, 2021-06-01 10:44:34
*/
public class AlipayOpenMiniInnerappPluginrelationQueryModel extends AlipayObject {
private static final long serialVersionUID = 2562491358341794893L;
/**
* 应用ID列表
*/
@ApiListField("mini_app_id_list")
@ApiField("string")
private List<String> miniAppIdList;
/**
* 页码
*/
@ApiField("page_num")
private Long pageNum;
/**
* 页大小
*/
@ApiField("page_size")
private Long pageSize;
/**
* 插件ID
*/
@ApiField("plugin_id")
private String pluginId;
/**
* 插件使用关系状态,取值包括WORKING/WAIT_WORKING/STOP_WORKING/EXECUTING
*/
@ApiListField("plugin_relation_status_list")
@ApiField("string")
private List<String> pluginRelationStatusList;
/**
* 运行类型,取值包括ONLINE/TRIAL/REVIEW/DEBUG
*/
@ApiField("run_model_type")
private String runModelType;
/**
* 是否展示邀测信息
*/
@ApiField("show_beta_info")
private Boolean showBetaInfo;
public List<String> getMiniAppIdList() {
return this.miniAppIdList;
}
public void setMiniAppIdList(List<String> miniAppIdList) {
this.miniAppIdList = miniAppIdList;
}
public Long getPageNum() {
return this.pageNum;
}
public void setPageNum(Long pageNum) {
this.pageNum = pageNum;
}
public Long getPageSize() {
return this.pageSize;
}
public void setPageSize(Long pageSize) {
this.pageSize = pageSize;
}
public String getPluginId() {
return this.pluginId;
}
public void setPluginId(String pluginId) {
this.pluginId = pluginId;
}
public List<String> getPluginRelationStatusList() {
return this.pluginRelationStatusList;
}
public void setPluginRelationStatusList(List<String> pluginRelationStatusList) {
this.pluginRelationStatusList = pluginRelationStatusList;
}
public String getRunModelType() {
return this.runModelType;
}
public void setRunModelType(String runModelType) {
this.runModelType = runModelType;
}
public Boolean getShowBetaInfo() {
return this.showBetaInfo;
}
public void setShowBetaInfo(Boolean showBetaInfo) {
this.showBetaInfo = showBetaInfo;
}
}
|
package com.honyum.elevatorMan.net;
import com.honyum.elevatorMan.net.base.RequestBean;
import com.honyum.elevatorMan.net.base.RequestBody;
import com.honyum.elevatorMan.net.base.RequestHead;
public class PropertyAddressListRequest extends RequestBean {
private RequestHead head;
private PalReqBody body;
public RequestHead getHead() {
return head;
}
public void setHead(RequestHead head) {
this.head = head;
}
public PalReqBody getBody() {
return body;
}
public void setBody(PalReqBody body) {
this.body = body;
}
public class PalReqBody extends RequestBody {
private String branchId;
public String getBranchId() {
return branchId;
}
public void setBranchId(String branchId) {
this.branchId = branchId;
}
}
}
|
<reponame>ECRomaneli/c2.js
let medias = document.querySelectorAll('video, audio');
for (let i = 0; i < medias.length; i++) {
let media = medias[i];
let events = 'abort canplay canplaythrough durationchange emptied ended error loadeddata loadedmetadata loadstart pause play playing progress ratechange seeked seeking stalled suspend volumechange waiting', handler = function (e) {
console.warn(e.type, media.buffered.length, media.readyState, window.showEvent ? e : '');
};
events.split(' ').forEach(function (event) {
media.addEventListener(event, handler);
});
} |
<gh_stars>0
import EncodedChannel from '../../src/channel/encodedChannel';
import {OutputChannel} from '../../src/channel';
describe('An encoded channel', () => {
let outputChannel: OutputChannel<string>;
let channel: EncodedChannel<string, string>;
beforeEach(() => {
outputChannel = {
close: jest.fn(),
publish: jest.fn(),
};
channel = new EncodedChannel<string, string>(outputChannel, async (input: string) => `${input}-pong`);
});
afterEach(() => {
jest.restoreAllMocks();
});
test('should encode and then publish the message', async () => {
await channel.publish('ping');
expect(outputChannel.publish).toBeCalledWith('ping-pong');
});
test('should close the output channel on close', async () => {
await channel.close();
expect(outputChannel.close).toBeCalled();
});
});
|
class ApplicationController < ActionController::Base
def create_post
@post = Post.new(post_params)
if @post.save
flash[:success] = "Post successfully created!"
redirect_to @post
else
render :new
end
end
def update_post
if @post.update(post_params)
flash[:success] = "Post successfully updated!"
redirect_to @post
else
render :edit
end
end
def delete_post
@post.destroy
flash[:success] = "Post successfully deleted!"
redirect_to posts_path
end
def show_post
@post = Post.find(params[:id])
end
def list_posts
@posts = Post.all
end
private
def post_params
params.require(:post).permit(:title, :content)
end
end |
package com.yin.springboot.mybatis.server;
import java.util.List;
import com.yin.springboot.mybatis.domain.SmsFlashPromotionProductRelation;
public interface SmsFlashPromotionProductRelationService{
int deleteByPrimaryKey(Long id);
int insert(SmsFlashPromotionProductRelation record);
int insertOrUpdate(SmsFlashPromotionProductRelation record);
int insertOrUpdateSelective(SmsFlashPromotionProductRelation record);
int insertSelective(SmsFlashPromotionProductRelation record);
SmsFlashPromotionProductRelation selectByPrimaryKey(Long id);
int updateByPrimaryKeySelective(SmsFlashPromotionProductRelation record);
int updateByPrimaryKey(SmsFlashPromotionProductRelation record);
int updateBatch(List<SmsFlashPromotionProductRelation> list);
int batchInsert(List<SmsFlashPromotionProductRelation> list);
}
|
function formatTime(date) {
var year = date.getFullYear()
var month = date.getMonth() + 1
var day = date.getDate()
var hour = date.getHours()
var minute = date.getMinutes()
var second = date.getSeconds()
return [year, month, day].map(formatNumber).join('/') + ' ' + [hour, minute, second].map(formatNumber).join(':')
}
function formatNumber(n) {
n = n.toString()
return n[1] ? n : '0' + n
}
function replaceAll(source, oldStr, newStr) {
while (source.indexOf(oldStr) >= 0){
source = source.replace(oldStr, newStr);
}
return source;
}
module.exports = {
formatTime: formatTime,
replaceAll: replaceAll
}
|
import {
HttpHandler,
HttpInterceptor,
HttpRequest,
HttpHeaders
} from "@angular/common/http";
import { Injectable } from "@angular/core";
import { AuthService } from "src/app/services/auth.service";
@Injectable()
export class AuthInterceptor implements HttpInterceptor {
constructor(private authService: AuthService) {}
intercept(req: HttpRequest<any>, next: HttpHandler) {
const authToken = this.authService.getToken();
const headers = new HttpHeaders({
"Content-Type": "application/json",
Authorization: "key=" + authToken
});
const authRequest = req.clone({
headers: headers
});
return next.handle(authRequest);
}
}
|
#!/bin/sh
Help()
{
# Display Help
echo "Program to train and evaluate a 2D U-Net segmentation model"
echo
echo "Syntax: main_prediction_RCSeg.sh [--options]"
echo "options:"
echo "--dir_src Path to the Folder that contains the source code"
echo "--file_input Scan to segment"
echo "--dir_preproc Folder to save the preprocessed images"
echo "--dir_predicted Folder to save the predicted images"
echo "--dir_output Folder to save the postprocessed images"
echo "--width Width of the images"
echo "--height Height of the images"
echo "--tool_name Tool name [MandSeg | RCSeg]"
echo "--threshold Threshold to use to binarize scans in postprocess. (-1 for otsu | [0;255] for a specific value)"
echo "-h|--help Print this Help"
echo
}
while [ "$1" != "" ]; do
case $1 in
--dir_src ) shift
dir_src=$1;;
--file_input ) shift
file_input=$1;;
--dir_preproc ) shift
dir_preproc=$1;;
--dir_predicted ) shift
dir_predicted=$1;;
--dir_output ) shift
dir_output=$1;;
--path_model ) shift
path_model=$1;;
--min_percentage ) shift
min_percentage=$1;;
--max_percentage ) shift
max_percentage=$1;;
--width ) shift
width=$1;;
--height ) shift
height=$1;;
--tool_name ) shift
tool_name=$1;;
--threshold ) shift
threshold=$1;;
-h | --help )
Help
exit;;
* )
echo ' - Error: Unsupported flag'
Help
exit 1
esac
shift
done
dir_src="${dir_src:-./CBCT_seg/src}"
# dir_input="${dir_input:-./Scans}"
dir_preproc="${dir_preproc:-/app/data/preproc}"
dir_predicted="${dir_predicted:-/app/data/predicted}"
dir_output="${dir_output:-$(dirname $file_input)}"
min_percentage="${min_percentage:-55}"
max_percentage="${max_percentage:-90}"
width="${width:-512}"
height="${height:-512}"
tool_name="${tool_name:-RCSeg}"
threshold="${threshold:-100}"
python3 $dir_src/py/preprocess.py \
--image $file_input \
--desired_width $width \
--desired_height $height \
--min_percentage $min_percentage \
--max_percentage $max_percentage \
--out $dir_preproc \
python3 $dir_src/py/predict_seg.py \
--dir_predict $dir_preproc \
--load_model $path_model \
--width $width \
--height $height \
--out $dir_predicted \
python3 $dir_src/py/postprocess.py \
--dir $dir_predicted \
--original_dir $(dirname $file_input) \
--tool $tool_name \
--threshold $threshold \
--out $dir_output \
|
#!/bin/bash
clear
#凑合解决方案
#wget -qO - https://patch-diff.githubusercontent.com/raw/openwrt/openwrt/pull/3875.patch | patch -p1
#使用O2级别的优化
sed -i 's/Os/O3/g' include/target.mk
#更新feed
./scripts/feeds update -a
./scripts/feeds install -a -f
#irqbalance
sed -i 's/0/1/g' feeds/packages/utils/irqbalance/files/irqbalance.config
#remove annoying snapshot tag
sed -i 's,-SNAPSHOT,,g' include/version.mk
sed -i 's,-SNAPSHOT,,g' package/base-files/image-config.in
##必要的patch
wget -P target/linux/generic/pending-5.4 https://github.com/immortalwrt/immortalwrt/raw/master/target/linux/generic/hack-5.4/312-arm64-cpuinfo-Add-model-name-in-proc-cpuinfo-for-64bit-ta.patch
#patch jsonc
patch -p1 < ../PATCH/new/package/use_json_object_new_int64.patch
#patch dnsmasq
patch -p1 < ../PATCH/new/package/dnsmasq-add-filter-aaaa-option.patch
patch -p1 < ../PATCH/new/package/luci-add-filter-aaaa-option.patch
cp -f ../PATCH/new/package/900-add-filter-aaaa-option.patch ./package/network/services/dnsmasq/patches/900-add-filter-aaaa-option.patch
#(从这行开始接下来4个操作全是和fullcone相关的,不需要可以一并注释掉,但极不建议
# Patch Kernel 以解决fullcone冲突
pushd target/linux/generic/hack-5.4
wget https://github.com/coolsnowwolf/lede/raw/master/target/linux/generic/hack-5.4/952-net-conntrack-events-support-multiple-registrant.patch
popd
#Patch FireWall 以增添fullcone功能
mkdir package/network/config/firewall/patches
wget -P package/network/config/firewall/patches/ https://github.com/immortalwrt/immortalwrt/raw/master/package/network/config/firewall/patches/fullconenat.patch
# Patch LuCI 以增添fullcone开关
patch -p1 < ../PATCH/new/package/luci-app-firewall_add_fullcone.patch
#FullCone 相关组件
cp -rf ../openwrt-lienol/package/network/fullconenat ./package/network/fullconenat
#(从这行开始接下来3个操作全是和SFE相关的,不需要可以一并注释掉,但极不建议
# Patch Kernel 以支援SFE
pushd target/linux/generic/hack-5.4
wget https://github.com/coolsnowwolf/lede/raw/master/target/linux/generic/hack-5.4/953-net-patch-linux-kernel-to-support-shortcut-fe.patch
popd
# Patch LuCI 以增添SFE开关
patch -p1 < ../PATCH/new/package/luci-app-firewall_add_sfe_switch.patch
# SFE 相关组件
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/shortcut-fe package/lean/shortcut-fe
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/fast-classifier package/lean/fast-classifier
cp -f ../PATCH/duplicate/shortcut-fe ./package/base-files/files/etc/init.d
##获取额外package
#(不用注释这里的任何东西,这不会对提升action的执行速度起到多大的帮助
#(不需要的包直接修改seed就好
#upx
sed -i '/patchelf pkgconf/i\tools-y += ucl upx' ./tools/Makefile
sed -i '\/autoconf\/compile :=/i\$(curdir)/upx/compile := $(curdir)/ucl/compile' ./tools/Makefile
svn co https://github.com/immortalwrt/immortalwrt/branches/master/tools/upx tools/upx
svn co https://github.com/immortalwrt/immortalwrt/branches/master/tools/ucl tools/ucl
#luci-app-compressed-memory
#wget -O- https://patch-diff.githubusercontent.com/raw/openwrt/openwrt/pull/2840.patch | patch -p1
wget -O- https://github.com/NoTengoBattery/openwrt/commit/40f1d5.patch | patch -p1
wget -O- https://github.com/NoTengoBattery/openwrt/commit/a83a0b.patch | patch -p1
wget -O- https://github.com/NoTengoBattery/openwrt/commit/6d5fb4.patch | patch -p1
mkdir ./package/new
cp -rf ../NoTengoBattery/feeds/luci/applications/luci-app-compressed-memory ./package/new/luci-app-compressed-memory
sed -i 's,include ../..,include $(TOPDIR)/feeds/luci,g' ./package/new/luci-app-compressed-memory/Makefile
cp -rf ../NoTengoBattery/package/system/compressed-memory ./package/system/compressed-memory
#R8168
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ctcgfw/r8168 package/new/r8168
patch -p1 < ../PATCH/new/main/r8168-fix_LAN_led-for_r4s-from_TL.patch
sed -i '/r8169/d' ./target/linux/rockchip/image/armv8.mk
#更换cryptodev-linux
rm -rf ./package/kernel/cryptodev-linux
svn co https://github.com/openwrt/openwrt/trunk/package/kernel/cryptodev-linux package/kernel/cryptodev-linux
#更换Node版本
rm -rf ./feeds/packages/lang/node
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node feeds/packages/lang/node
rm -rf ./feeds/packages/lang/node-arduino-firmata
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-arduino-firmata feeds/packages/lang/node-arduino-firmata
rm -rf ./feeds/packages/lang/node-cylon
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-cylon feeds/packages/lang/node-cylon
rm -rf ./feeds/packages/lang/node-hid
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-hid feeds/packages/lang/node-hid
rm -rf ./feeds/packages/lang/node-homebridge
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-homebridge feeds/packages/lang/node-homebridge
rm -rf ./feeds/packages/lang/node-serialport
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-serialport feeds/packages/lang/node-serialport
rm -rf ./feeds/packages/lang/node-serialport-bindings
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-serialport-bindings feeds/packages/lang/node-serialport-bindings
rm -rf ./feeds/packages/lang/node-yarn
svn co https://github.com/nxhack/openwrt-node-packages/trunk/node-yarn feeds/packages/lang/node-yarn
ln -sf ../../../feeds/packages/lang/node-yarn ./package/feeds/packages/node-yarn
#luci-app-freq
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/lean/luci-app-cpufreq package/lean/luci-app-cpufreq
#京东签到
git clone --depth 1 https://github.com/jerrykuku/node-request.git package/new/node-request
git clone --depth 1 https://github.com/jerrykuku/luci-app-jd-dailybonus.git package/new/luci-app-jd-dailybonus
pushd package/new/luci-app-jd-dailybonus
sed -i 's/wget-ssl/wget/g' root/usr/share/jd-dailybonus/newapp.sh luasrc/controller/jd-dailybonus.lua
popd
#arpbind
svn co https://github.com/QiuSimons/OpenWrt_luci-app/trunk/lean/luci-app-arpbind package/lean/luci-app-arpbind
#Adbyby
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-adbyby-plus package/lean/luci-app-adbyby-plus
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/adbyby package/lean/adbyby
#访问控制
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-accesscontrol package/lean/luci-app-accesscontrol
cp -rf ../PATCH/duplicate/luci-app-control-weburl ./package/new/luci-app-control-weburl
#uu加速
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-uugamebooster package/lean/luci-app-uugamebooster
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/uugamebooster package/lean/uugamebooster
#AutoCore
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/lean/autocore package/lean/autocore
rm -rf ./feeds/packages/utils/coremark
svn co https://github.com/immortalwrt/packages/trunk/utils/coremark feeds/packages/utils/coremark
#迅雷快鸟
git clone --depth 1 https://github.com/garypang13/luci-app-xlnetacc.git package/lean/luci-app-xlnetacc
#DDNS
rm -rf ./feeds/packages/net/ddns-scripts
rm -rf ./feeds/luci/applications/luci-app-ddns
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/ddns-scripts_aliyun package/lean/ddns-scripts_aliyun
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/ddns-scripts_dnspod package/lean/ddns-scripts_dnspod
svn co https://github.com/openwrt/packages/branches/openwrt-18.06/net/ddns-scripts feeds/packages/net/ddns-scripts
svn co https://github.com/openwrt/luci/branches/openwrt-18.06/applications/luci-app-ddns feeds/luci/applications/luci-app-ddns
#Pandownload
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/lean/pandownload-fake-server package/lean/pandownload-fake-server
#oled
git clone -b master --depth 1 https://github.com/NateLol/luci-app-oled.git package/new/luci-app-oled
#网易云解锁
git clone --depth 1 https://github.com/immortalwrt/luci-app-unblockneteasemusic.git package/new/UnblockNeteaseMusic
#定时重启
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-autoreboot package/lean/luci-app-autoreboot
#argon主题
git clone -b master --depth 1 https://github.com/jerrykuku/luci-theme-argon.git package/new/luci-theme-argon
git clone -b master --depth 1 https://github.com/jerrykuku/luci-app-argon-config.git package/new/luci-app-argon-config
#edge主题
git clone -b master --depth 1 https://github.com/garypang13/luci-theme-edge.git package/new/luci-theme-edge
#AdGuard
cp -rf ../openwrt-lienol/package/diy/luci-app-adguardhome ./package/new/luci-app-adguardhome
rm -rf ./feeds/packages/net/adguardhome
svn co https://github.com/openwrt/packages/trunk/net/adguardhome feeds/packages/net/adguardhome
sed -i '/\t)/a\\t$(STAGING_DIR_HOST)/bin/upx --lzma --best $(GO_PKG_BUILD_BIN_DIR)/AdGuardHome' ./feeds/packages/net/adguardhome/Makefile
sed -i '/init/d' feeds/packages/net/adguardhome/Makefile
#ChinaDNS
git clone -b luci --depth 1 https://github.com/pexcn/openwrt-chinadns-ng.git package/new/luci-app-chinadns-ng
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/chinadns-ng package/new/chinadns-ng
#moschinadns
svn co https://github.com/QiuSimons/openwrt-packages/branches/main/mos-chinadns package/new/mos-chinadns
svn co https://github.com/QiuSimons/openwrt-packages/branches/main/luci-app-moschinadns package/new/luci-app-moschinadns
#VSSR
git clone -b master --depth 1 https://github.com/jerrykuku/luci-app-vssr.git package/lean/luci-app-vssr
git clone -b master --depth 1 https://github.com/jerrykuku/lua-maxminddb.git package/lean/lua-maxminddb
sed -i 's,default n,default y,g' package/lean/luci-app-vssr/Makefile
sed -i '/V2ray:v2ray/d' package/lean/luci-app-vssr/Makefile
sed -i '/plugin:v2ray/d' package/lean/luci-app-vssr/Makefile
sed -i '/result.encrypt_method/a\result.fast_open = "1"' package/lean/luci-app-vssr/root/usr/share/vssr/subscribe.lua
sed -i 's,ispip.clang.cn/all_cn.txt,raw.sevencdn.com/QiuSimons/Chnroute/master/dist/chnroute/chnroute.txt,g' package/lean/luci-app-vssr/luasrc/controller/vssr.lua
sed -i 's,ispip.clang.cn/all_cn.txt,raw.sevencdn.com/QiuSimons/Chnroute/master/dist/chnroute/chnroute.txt,g' package/lean/luci-app-vssr/root/usr/share/vssr/update.lua
#SSRP
svn co https://github.com/fw876/helloworld/trunk/luci-app-ssr-plus package/lean/luci-app-ssr-plus
rm -rf ./package/lean/luci-app-ssr-plus/po/zh_Hans
pushd package/lean
wget -qO - https://patch-diff.githubusercontent.com/raw/fw876/helloworld/pull/426.patch | patch -p1
popd
sed -i 's,default n,default y,g' package/lean/luci-app-ssr-plus/Makefile
sed -i 's,Xray:xray ,Xray:xray-core ,g' package/lean/luci-app-ssr-plus/Makefile
sed -i '/V2ray:v2ray/d' package/lean/luci-app-ssr-plus/Makefile
sed -i '/plugin:v2ray/d' package/lean/luci-app-ssr-plus/Makefile
sed -i '/result.encrypt_method/a\result.fast_open = "1"' package/lean/luci-app-ssr-plus/root/usr/share/shadowsocksr/subscribe.lua
sed -i 's,ispip.clang.cn/all_cn.txt,raw.sevencdn.com/QiuSimons/Chnroute/master/dist/chnroute/chnroute.txt,g' package/lean/luci-app-ssr-plus/root/etc/init.d/shadowsocksr
sed -i 's,YW5vbnltb3Vz/domain-list-community/release/gfwlist.txt,Loyalsoldier/v2ray-rules-dat/release/gfw.txt,g' package/lean/luci-app-ssr-plus/root/etc/init.d/shadowsocksr
#SSRP依赖
rm -rf ./feeds/packages/net/xray-core
rm -rf ./feeds/packages/net/kcptun
rm -rf ./feeds/packages/net/shadowsocks-libev
rm -rf ./feeds/packages/net/proxychains-ng
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/shadowsocksr-libev package/lean/shadowsocksr-libev
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/pdnsd-alt package/lean/pdnsd
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/kcptun package/lean/kcptun
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/srelay package/lean/srelay
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/microsocks package/lean/microsocks
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/dns2socks package/lean/dns2socks
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/redsocks2 package/lean/redsocks2
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/proxychains-ng package/lean/proxychains-ng
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/ipt2socks package/lean/ipt2socks
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/simple-obfs package/lean/simple-obfs
svn co https://github.com/coolsnowwolf/packages/trunk/net/shadowsocks-libev package/lean/shadowsocks-libev
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/trojan package/lean/trojan
svn co https://github.com/fw876/helloworld/trunk/naiveproxy package/lean/naiveproxy
#PASSWALL
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/luci-app-passwall package/new/luci-app-passwall
sed -i 's,default n,default y,g' package/new/luci-app-passwall/Makefile
sed -i '/V2ray:v2ray/d' package/new/luci-app-passwall/Makefile
sed -i '/plugin:v2ray/d' package/new/luci-app-passwall/Makefile
cp -f ../PATCH/new/script/move_2_services.sh ./package/new/luci-app-passwall/move_2_services.sh
pushd package/new/luci-app-passwall
bash move_2_services.sh
popd
rm -rf ./feeds/packages/net/https-dns-proxy
svn co https://github.com/Lienol/openwrt-packages/trunk/net/https-dns-proxy feeds/packages/net/https-dns-proxy
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/tcping package/new/tcping
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/trojan-go package/new/trojan-go
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/brook package/new/brook
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/trojan-plus package/new/trojan-plus
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/ssocks package/new/ssocks
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/xray-core package/new/xray-core
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/v2ray package/new/v2ray
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/v2ray-plugin package/new/v2ray-plugin
svn co https://github.com/xiaorouji/openwrt-passwall/trunk/xray-plugin package/new/xray-plugin
#luci-app-cpulimit
cp -rf ../PATCH/duplicate/luci-app-cpulimit ./package/lean/luci-app-cpulimit
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ntlf9t/cpulimit package/lean/cpulimit
#订阅转换
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ctcgfw/subconverter package/new/subconverter
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ctcgfw/jpcre2 package/new/jpcre2
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ctcgfw/rapidjson package/new/rapidjson
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/ctcgfw/duktape package/new/duktape
#清理内存
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-ramfree package/lean/luci-app-ramfree
#打印机
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-usb-printer package/lean/luci-app-usb-printer
#流量监视
git clone -b master --depth 1 https://github.com/brvphoenix/wrtbwmon.git package/new/wrtbwmon
git clone -b master --depth 1 https://github.com/brvphoenix/luci-app-wrtbwmon.git package/new/luci-app-wrtbwmon
#流量监管
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-netdata package/lean/luci-app-netdata
#OpenClash
git clone -b master --depth 1 https://github.com/vernesong/OpenClash.git package/new/luci-app-openclash
#SeverChan
git clone -b master --depth 1 https://github.com/tty228/luci-app-serverchan.git package/new/luci-app-serverchan
#SmartDNS
rm -rf ./feeds/packages/net/smartdns
mkdir package/new/smartdns
wget -P package/new/smartdns/ https://github.com/HiGarfield/lede-17.01.4-Mod/raw/master/package/extra/smartdns/Makefile
sed -i 's,files/etc/config,$(PKG_BUILD_DIR)/package/openwrt/files/etc/config,g' ./package/new/smartdns/Makefile
#上网APP过滤
git clone -b master --depth 1 https://github.com/destan19/OpenAppFilter.git package/new/OpenAppFilter
#ipv6-helper
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/ipv6-helper package/lean/ipv6-helper
#IPSEC
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-ipsec-vpnd package/lean/luci-app-ipsec-vpnd
#Zerotier
svn co https://github.com/immortalwrt/immortalwrt/branches/master/package/lean/luci-app-zerotier package/lean/luci-app-zerotier
cp -f ../PATCH/new/script/move_2_services.sh ./package/lean/luci-app-zerotier/move_2_services.sh
pushd package/lean/luci-app-zerotier
bash move_2_services.sh
popd
rm -rf ./feeds/packages/net/zerotier/files/etc/init.d/zerotier
#UPNP(回滚以解决某些沙雕设备的沙雕问题
rm -rf ./feeds/packages/net/miniupnpd
svn co https://github.com/coolsnowwolf/packages/trunk/net/miniupnpd feeds/packages/net/miniupnpd
#KMS
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-vlmcsd package/lean/luci-app-vlmcsd
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/vlmcsd package/lean/vlmcsd
#frp
rm -rf ./feeds/luci/applications/luci-app-frps
rm -rf ./feeds/luci/applications/luci-app-frpc
rm -rf ./feeds/packages/net/frp
rm -f ./package/feeds/packages/frp
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-frps package/lean/luci-app-frps
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/luci-app-frpc package/lean/luci-app-frpc
svn co https://github.com/coolsnowwolf/lede/trunk/package/lean/frp package/lean/frp
#花生壳
svn co https://github.com/teasiu/dragino2/trunk/package/teasiu/luci-app-phtunnel package/new/luci-app-phtunnel
svn co https://github.com/QiuSimons/dragino2-teasiu/trunk/package/teasiu/luci-app-oray package/new/luci-app-oray
svn co https://github.com/teasiu/dragino2/trunk/package/teasiu/phtunnel package/new/phtunnel
#腾讯DDNS
svn co https://github.com/QiuSimons/OpenWrt_luci-app/trunk/others/luci-app-tencentddns package/lean/luci-app-tencentddns
#阿里DDNS
svn co https://github.com/kenzok8/openwrt-packages/trunk/luci-app-aliddns package/new/luci-app-aliddns
#WOL
svn co https://github.com/sundaqiang/openwrt-packages/trunk/luci-app-services-wolplus package/new/luci-app-services-wolplus
#Docker
#sed -i 's/+docker/+docker \\\n\t+dockerd/g' ./feeds/luci/applications/luci-app-dockerman/Makefile
rm -rf ./feeds/luci/applications/luci-app-dockerman
cp -rf ../openwrt-lienol/package/diy/luci-app-dockerman ./feeds/luci/applications/luci-app-dockerman
#1. Modify default IP
sed -i 's/192.168.1.1/192.168.5.1/g' package/base-files/files/bin/config_generate
#2 password
sed -i 's/root::0:0:99999:7:::/root:$1$S2TRFyMU$E8fE0RRKR0jNadn3YLrSQ0:18690:0:99999:7:::/g' package/base-files/files/etc/shadow
#3. Modify default hosename
sed -i 's/OpenWrt/SUPERouter/g' package/base-files/files/bin/config_generate
##最后的收尾工作
#Lets Fuck
mkdir package/base-files/files/usr/bin
cp -f ../PATCH/new/script/fuck package/base-files/files/usr/bin/fuck
cp -f ../PATCH/new/script/chinadnslist package/base-files/files/usr/bin/chinadnslist
#最大连接
sed -i 's/16384/65536/g' package/kernel/linux/files/sysctl-nf-conntrack.conf
#生成默认配置及缓存
rm -rf .config
exit 0
|
studName = "John"
# Add Dictionary Here
studentDetails = {"name": studName} |
#!/bin/bash
# This script is used for easier developing of cdw containers. docker-compose will pull from
# these builds instead of online.
docker build gitea --tag sscsctf/levels:ctf-cda-gitea
docker build organization --tag sscsctf/levels:ctf-cda-organization
docker build player --tag sscsctf/levels:ctf-cda-player
#docker push sscsctf/levels:ctf-cda-gitea
#docker push sscsctf/levels:ctf-cda-organization
#docker push sscsctf/levels:ctf-cda-player
|
require 'dotenv/load'
require 'faraday'
require 'json'
class Lotr_quote
def initialize
@quote = get_random_lotr_quote
@author = get_quote_author
end
def get_random_lotr_quote
api_key = ENV['LOTR_API']
quote_url = "https://the-one-api.dev/v2/quote"
response = Faraday.get(quote_url, {}, {'Authorization' => "Bearer #{api_key}"})
all_quotes_parsed = JSON.parse(response.body)
@quote = all_quotes_parsed["docs"].sample
end
def get_quote_author
api_key = ENV['LOTR_API']
author_id = @quote["character"]
author_url = "https://the-one-api.dev/v2/character/#{author_id}"
response = Faraday.get(author_url, {}, {'Authorization' => "Bearer #{api_key}"})
characted_parsed = JSON.parse(response.body)
@author = characted_parsed["docs"].first
end
def quote_text
@quote["dialog"]
end
def author_name_race
"~ #{@author["name"]}, the #{@author["race"]}"
end
end
|
#!/bin/bash
fileid="1lspYx8gs63X0RxS9N9s4VPIKFNdRpahB"
html=`curl -c ./cookie -s -L "https://drive.google.com/uc?export=download&id=${fileid}"`
curl -Lb ./cookie "https://drive.google.com/uc?export=download&`echo ${html}|grep -Po '(confirm=[a-zA-Z0-9\-_]+)'`&id=${fileid}" -o mobilenet_v2_224_dm05_integer_quant.tflite
echo Download finished.
|
<filename>http-event-collector/http-event-collector.js<gh_stars>1-10
var SplunkLogger = require("splunk-logging").Logger;
module.exports = function(RED) {
function HTTPEventCollector(config) {
RED.nodes.createNode(this,config);
var context = this.context();
var node = this;
var myMessage = null;
/**
* Only the token property is required.
*/
this.myURI = config.inputURI.toString();
this.myToken = config.inputToken.toString();
this.mySourcetype = config.inputSourcetype.toString();
this.myHost = config.inputHost.toString();
this.mySource = config.inputSource.toString();
this.myIndex = config.inputIndex.toString();
var config = {
token: <PASSWORD>,
url: this.myURI
};
this.on('input', function(msg) {
// Create a new logger
var Logger = new SplunkLogger(config);
Logger.error = function(err, context) {
// Handle errors here
console.log("error", err, "context", context);
};
// Attempt to convert msg.payload to a json structure.
try{
myMessage = JSON.parse(msg)
}
catch(err){
myMessage = msg
}
var payload = {
// Data sent from previous node msg.payload
message: myMessage,
//msgMetaData : msg,
// Metadata
metadata: {
source: this.mySource,
sourcetype: this.mySourcetype,
index: this.myIndex,
host: this.myHost,
},
// Severity is also optional
severity: "info"
};
console.log("Sending payload", payload);
Logger.send(payload, function(err, resp, body) {
// If successful, body will be { text: 'Success', code: 0 }
console.log("Response from Splunk", body);
});
});
}
RED.nodes.registerType("splunk-http-event-collector",HTTPEventCollector);
};
|
#!/usr/bin/env bash
_config_changed=false
function cleanup() {
if [ "$_config_changed" == true ]; then
__revert_wildcard_config_changes
fi
exit
}
trap cleanup INT
. "$_scripts_dir/common/common.sh"
_init_script "$0"
. "$_scripts_dir/utils.sh"
. "$_scripts_dir/common/i18n_core.sh"
_init_i18n "$1"
. "$_scripts_dir/i18n.sh"
_wildcard=false
while :; do
if [ ${#_domains} -ne 0 ]; then
echo "$(_translate i18n_ADD_ANOTHER_DOMAIN_NAME)"
read -r _answer
if [ "$_answer" != "Y" ] && [ "$_answer" != "y" ] && [ "$_answer" != "J" ] && [ "$_answer" != "j" ]; then
break
fi
fi
echo "$(_translate i18n_ADD_DOMAIN_NAME)"
read -r _domain
if [[ -z "${_domain// /}" ]]; then
_echo "red" "$(_translate i18n_INFO_EMPTY_DOMAIN)"
else
if [[ "$_domain" == "*."* ]]; then
_wildcard=true
fi
_domains+=("$_domain")
fi
done
_check_dns_entries "${_domains[@]}"
_command_to_execute=("certbot" "certonly")
if [ $_wildcard == true ]; then
# Cannot create Wildcard cert with webroot authenticator, because manual is needed
sed -i 's/^authenticator = webroot/#authenticator = webroot/' "/etc/letsencrypt/cli.ini"
_config_changed=true
_command_to_execute+=("--manual" "--server" "https://acme-v02.api.letsencrypt.org/directory" "--preferred-challenges" "dns-01")
else
_command_to_execute+=("--noninteractive")
fi
function __revert_wildcard_config_changes() {
# Revert change from Wildcard Certificate Creation
sed -i 's/^#authenticator = webroot/authenticator = webroot/' "/etc/letsencrypt/cli.ini"
_config_changed=false
}
for _i in "${!_domains[@]}"; do
_command_to_execute+=("-d" "${_domains[$_i]}")
done
if ! _eval "${_command_to_execute[@]}"; then
if [ $_wildcard == true ]; then
__revert_wildcard_config_changes
fi
_error "$(_translate i18n_ERROR_CREATE_CERTIFICATE "${_command_to_execute[*]}")"
fi
if [ $_wildcard == true ]; then
__revert_wildcard_config_changes
fi
if [[ ${_domains[0]} == "*."* ]]; then
_write_action "get" "${_domains[0]:2}" "${_domains[@]}"
else
_write_action "get" "${_domains[0]}" "${_domains[@]}"
fi
exit 0
|
<gh_stars>1-10
// see docs/hlse.md for instructions on how test data was generated
import QUnit from 'qunit';
import {unpad} from 'pkcs7';
import sinon from 'sinon';
import {decrypt, Decrypter, AsyncStream} from '../src';
// see docs/hlse.md for instructions on how test data was generated
const stringFromBytes = function(bytes) {
let result = '';
for (let i = 0; i < bytes.length; i++) {
result += String.fromCharCode(bytes[i]);
}
return result;
};
QUnit.module('Decryption');
QUnit.test('decrypts a single AES-128 with PKCS7 block', function() {
let key = new Uint32Array([0, 0, 0, 0]);
let initVector = key;
// the string "howdy folks" encrypted
let encrypted = new Uint8Array([
0xce, 0x90, 0x97, 0xd0,
0x08, 0x46, 0x4d, 0x18,
0x4f, 0xae, 0x01, 0x1c,
0x82, 0xa8, 0xf0, 0x67
]);
QUnit.deepEqual('howdy folks',
stringFromBytes(unpad(decrypt(encrypted, key, initVector))),
'decrypted with a byte array key'
);
});
QUnit.test('decrypts multiple AES-128 blocks with CBC', function() {
let key = new Uint32Array([0, 0, 0, 0]);
let initVector = key;
// the string "0123456789abcdef01234" encrypted
let encrypted = new Uint8Array([
0x14, 0xf5, 0xfe, 0x74,
0x69, 0x66, 0xf2, 0x92,
0x65, 0x1c, 0x22, 0x88,
0xbb, 0xff, 0x46, 0x09,
0x0b, 0xde, 0x5e, 0x71,
0x77, 0x87, 0xeb, 0x84,
0xa9, 0x54, 0xc2, 0x45,
0xe9, 0x4e, 0x29, 0xb3
]);
QUnit.deepEqual('0123456789abcdef01234',
stringFromBytes(unpad(decrypt(encrypted, key, initVector))),
'decrypted multiple blocks');
});
QUnit.test(
'verify that the deepcopy works by doing two decrypts in the same test',
function() {
let key = new Uint32Array([0, 0, 0, 0]);
let initVector = key;
// the string "howdy folks" encrypted
let pkcs7Block = new Uint8Array([
0xce, 0x90, 0x97, 0xd0,
0x08, 0x46, 0x4d, 0x18,
0x4f, 0xae, 0x01, 0x1c,
0x82, 0xa8, 0xf0, 0x67
]);
QUnit.deepEqual('howdy folks',
stringFromBytes(unpad(decrypt(pkcs7Block, key, initVector))),
'decrypted with a byte array key'
);
// the string "0123456789abcdef01234" encrypted
let cbcBlocks = new Uint8Array([
0x14, 0xf5, 0xfe, 0x74,
0x69, 0x66, 0xf2, 0x92,
0x65, 0x1c, 0x22, 0x88,
0xbb, 0xff, 0x46, 0x09,
0x0b, 0xde, 0x5e, 0x71,
0x77, 0x87, 0xeb, 0x84,
0xa9, 0x54, 0xc2, 0x45,
0xe9, 0x4e, 0x29, 0xb3
]);
QUnit.deepEqual('0123456789abcdef01234',
stringFromBytes(unpad(decrypt(cbcBlocks, key, initVector))),
'decrypted multiple blocks');
});
QUnit.module('Incremental Processing', {
beforeEach() {
this.clock = sinon.useFakeTimers();
},
afterEach() {
this.clock.restore();
}
});
QUnit.test('executes a callback after a timeout', function() {
let asyncStream = new AsyncStream();
let calls = '';
asyncStream.push(function() {
calls += 'a';
});
this.clock.tick(asyncStream.delay);
QUnit.equal(calls, 'a', 'invoked the callback once');
this.clock.tick(asyncStream.delay);
QUnit.equal(calls, 'a', 'only invoked the callback once');
});
QUnit.test('executes callback in series', function() {
let asyncStream = new AsyncStream();
let calls = '';
asyncStream.push(function() {
calls += 'a';
});
asyncStream.push(function() {
calls += 'b';
});
this.clock.tick(asyncStream.delay);
QUnit.equal(calls, 'a', 'invoked the first callback');
this.clock.tick(asyncStream.delay);
QUnit.equal(calls, 'ab', 'invoked the second');
});
QUnit.module('Incremental Decryption', {
beforeEach() {
this.clock = sinon.useFakeTimers();
},
afterEach() {
this.clock.restore();
}
});
QUnit.test('asynchronously decrypts a 4-word block', function() {
let key = new Uint32Array([0, 0, 0, 0]);
let initVector = key;
// the string "howdy folks" encrypted
let encrypted = new Uint8Array([0xce, 0x90, 0x97, 0xd0,
0x08, 0x46, 0x4d, 0x18,
0x4f, 0xae, 0x01, 0x1c,
0x82, 0xa8, 0xf0, 0x67]);
let decrypted;
let decrypter = new Decrypter(encrypted,
key,
initVector,
function(error, result) {
if (error) {
throw new Error(error);
}
decrypted = result;
});
QUnit.ok(!decrypted, 'asynchronously decrypts');
this.clock.tick(decrypter.asyncStream_.delay * 2);
QUnit.ok(decrypted, 'completed decryption');
QUnit.deepEqual('howdy folks',
stringFromBytes(decrypted),
'decrypts and unpads the result');
});
QUnit.test('breaks up input greater than the step value', function() {
let encrypted = new Int32Array(Decrypter.STEP + 4);
let done = false;
let decrypter = new Decrypter(encrypted,
new Uint32Array(4),
new Uint32Array(4),
function() {
done = true;
});
this.clock.tick(decrypter.asyncStream_.delay * 2);
QUnit.ok(!done, 'not finished after two ticks');
this.clock.tick(decrypter.asyncStream_.delay);
QUnit.ok(done, 'finished after the last chunk is decrypted');
});
|
#include "3bc.h"
#define print_file(file,type, val);\
switch(type){\
case STRI: fprintf(file, "%lu", (unsigned long) val); break;\
case STRO: fprintf(file, "%o", (unsigned int) val); break;\
case STRC: fprintf(file, "%c", (unsigned char) val); break;\
case STRX: fprintf(file, "%x", (unsigned int) val); break;\
case STRU: fprintf(file, "%d", (signed int) val); break;}
#define print_error(string) fprintf(stderr, "> ERROR DESCRIPTION: %s\n", string);break
#ifndef _WIN32
struct termios term_old_attr;
struct termios term_new_attr;
#endif
file_t* program_file;
void lang_driver_run()
{
while(tape_program_avaliable()? tape_program_exe(): lang_interpreter_line(program_file));
}
void lang_driver_init(int argc, char **argv)
{
signal(SIGINT, lang_driver_exit);
if (argc <= 1) {
program_file = stdin;
}
else {
program_file = fopen(argv[argc - 1], "r");
}
#ifndef _WIN32
tcgetattr(0, &term_old_attr);
tcgetattr(0, &term_new_attr);
term_new_attr.c_lflag &= ~ICANON;
term_new_attr.c_lflag &= ~ECHO;
term_new_attr.c_cc[VTIME] = 0;
term_new_attr.c_cc[VMIN] = 1;
#endif
}
void lang_driver_exit(int sig)
{
#ifndef _WIN32
tcsetattr(STDIN_FILENO,TCSANOW,&term_old_attr);
#endif
if (program_file != stdin) {
fclose(program_file);
}
tape_memory_destroy();
tape_program_destroy();
exit(sig);
}
void lang_driver_output_1(reg_t type, val_t val)
{
print_file(stdout, type, val);
}
void lang_driver_output_2(reg_t type, val_t val)
{
print_file(stderr, type, val);
}
void lang_driver_error(error_t error_code)
{
fprintf(stderr, "\n[3BC] CRITICAL ERROR ABORTED THE PROGRAM");
fprintf(stderr, "\n> ERROR LINE: %d", CLINE + 1);
fprintf(stderr, "\n> ERROR CODE: %d\n", error_code);
switch(error_code)
{
case ERROR_CPU_ZERO: print_error("EMPUTY CPU MODE");
case ERROR_CPU_UNDEF: print_error("UNDEFINED CPU MODE");
case ERROR_CPU_PROTECT: print_error("PROTECTED CPU MODE");
case ERROR_CPU_RESERVED: print_error("RESERVED CPU MODE");
case ERROR_CPU_REGISTER: print_error("UNDEFINED CPU REGISTER");
case ERROR_INVALID_LABEL: print_error("INVALID LABEL");
case ERROR_INVALID_VALUE: print_error("INVALID VALUE");
case ERROR_INVALID_ADDRESS: print_error("INVALID ADDRESS");
case ERROR_PARAM_DUALITY: print_error("DUALITY ADDRES WITH VALUE IS NOT ALLOWED");
case ERROR_PARAM_REQUIRE_VALUE: print_error("VALUE IS REQUIRED");
case ERROR_PARAM_REQUIRE_ADDRESS: print_error("ADDRESS IS REQUIRED");
case ERROR_PARAM_BLOCKED_VALUE: print_error("VALUE IS NOT ALLOWED");
case ERROR_PARAM_BLOCKED_ADDRESS: print_error("ADDRESS IS NOT ALLOWED");
case ERROR_INTERPRETER_REGISTER: print_error("INVALID REGISTER");
case ERROR_INTERPRETER_NUMBER: print_error("INVALID NUMBER");
case ERROR_TAPE_LABEL: print_error("FAILURE TO EXPAND THE LABEL LIST");
case ERROR_TAPE_MEMORY: print_error("FAILURE TO EXPAND THE MEMORY");
case ERROR_TAPE_PROGRAM: print_error("FAILURE TO EXPAND THE PROGRAM");
default: print_error("UNKNOWN ERROR");
}
lang_driver_exit(EXIT_FAILURE);
}
#ifndef _WIN32
int getch()
{
int c ;
tcsetattr(STDIN_FILENO,TCSANOW, &term_new_attr);
c = getchar() ;
tcsetattr(STDIN_FILENO,TCSANOW, &term_old_attr);
return c ;
}
#endif
int getch_parser(const char* format)
{
static int input_val;
static char input_key[2] = "\0";
input_key[0] = getch();
sscanf(input_key, format, &input_val);
return input_val;
}
|
#!/bin/bash -ex
##############################################################################
### Script cai dat cac goi bo tro cho CTL
### Khai bao bien de thuc hien
source config.cfg
function echocolor {
echo "#######################################################################"
echo "$(tput setaf 3)##### $1 #####$(tput sgr0)"
echo "#######################################################################"
}
function ops_edit {
crudini --set "$1" "$2" "$3" "$4"
}
# Cach dung
## Cu phap:
## ops_edit_file $bien_duong_dan_file [SECTION] [PARAMETER] [VALUAE]
## Vi du:
### filekeystone=/etc/keystone/keystone.conf
### ops_edit_file $filekeystone DEFAULT rpc_backend rabbit
# Ham de del mot dong trong file cau hinh
function ops_del {
crudini --del "$1" "$2" "$3"
}
function glance_create_db() {
mysql -uroot -p$PASS_DATABASE_ROOT -e "CREATE DATABASE glance;
GRANT ALL PRIVILEGES ON glance.* TO 'glance'@'localhost' IDENTIFIED BY '$PASS_DATABASE_GLANCE';
GRANT ALL PRIVILEGES ON glance.* TO 'glance'@'%' IDENTIFIED BY '$PASS_DATABASE_GLANCE';
GRANT ALL PRIVILEGES ON glance.* TO 'glance'@'$CTL1_IP_NIC1' IDENTIFIED BY '$PASS_DATABASE_GLANCE';
FLUSH PRIVILEGES;"
}
#Tao endpoint,user cho glance
function glance_user_endpoint() {
openstack user create glance --domain default --password $GLANCE_PASS
openstack role add --project service --user glance admin
openstack service create --name glance --description "OpenStack Image" image
openstack endpoint create --region RegionOne image public http://$CTL1_IP_NIC1:9292
openstack endpoint create --region RegionOne image internal http://$CTL1_IP_NIC1:9292
openstack endpoint create --region RegionOne image admin http://$CTL1_IP_NIC1:9292
}
#Cau hinh va cai dat glance
function glance_install_config() {
yum -y install openstack-glance
glance_api_conf=/etc/glance/glance-api.conf
glance_registry_conf=/etc/glance/glance-registry.conf
cp $glance_api_conf $glance_api_conf.orig
cp $glance_registry_conf $glance_registry_conf.orig
###glance_api_conf
ops_edit $glance_api_conf DEFAULT transport_url rabbit://openstack:$RABBIT_PASS@$CTL1_IP_NIC1
ops_edit $glance_api_conf glance_store stores file,http
ops_edit $glance_api_conf glance_store default_store file
ops_edit $glance_api_conf glance_store filesystem_store_datadir /var/lib/glance/images/
ops_edit $glance_api_conf database connection mysql+pymysql://glance:$PASS_DATABASE_GLANCE@$CTL1_IP_NIC1/glance
ops_edit $glance_api_conf keystone_authtoken www_authenticate_uri http://$CTL1_IP_NIC1:5000
ops_edit $glance_api_conf keystone_authtoken auth_url http://$CTL1_IP_NIC1:5000
ops_edit $glance_api_conf keystone_authtoken memcached_servers $CTL1_IP_NIC1:11211
ops_edit $glance_api_conf keystone_authtoken auth_type password
ops_edit $glance_api_conf keystone_authtoken project_domain_name Default
ops_edit $glance_api_conf keystone_authtoken user_domain_name Default
ops_edit $glance_api_conf keystone_authtoken project_name service
ops_edit $glance_api_conf keystone_authtoken username glance
ops_edit $glance_api_conf keystone_authtoken password $GLANCE_PASS
ops_edit $glance_api_conf paste_deploy flavor keystone
ops_edit $glance_api_conf oslo_messaging_notifications driver messagingv2
###glance_registry_conf
ops_edit $glance_registry_conf DEFAULT transport_url rabbit://openstack:$RABBIT_PASS@$CTL1_IP_NIC1
ops_edit $glance_registry_conf database connection mysql+pymysql://glance:$PASS_DATABASE_GLANCE@$CTL1_IP_NIC1/glance
ops_edit $glance_registry_conf keystone_authtoken www_authenticate_uri http://$CTL1_IP_NIC1:5000
ops_edit $glance_registry_conf keystone_authtoken auth_url http://$CTL1_IP_NIC1:5000
ops_edit $glance_registry_conf keystone_authtoken memcached_servers $CTL1_IP_NIC1:11211
ops_edit $glance_registry_conf keystone_authtoken auth_type password
ops_edit $glance_registry_conf keystone_authtoken project_domain_name Default
ops_edit $glance_registry_conf keystone_authtoken user_domain_name Default
ops_edit $glance_registry_conf keystone_authtoken project_name service
ops_edit $glance_registry_conf keystone_authtoken username glance
ops_edit $glance_registry_conf keystone_authtoken password $GLANCE_PASS
ops_edit $glance_registry_conf paste_deploy flavor keystone
ops_edit $glance_registry_conf oslo_messaging_notifications driver messagingv2
}
#Dong bo DB cho lance
function glance_syncdb() {
su -s /bin/sh -c "glance-manage db_sync" glance
}
function glance_enable_restart() {
systemctl enable openstack-glance-api.service
systemctl enable openstack-glance-registry.service
systemctl start openstack-glance-api.service
systemctl start openstack-glance-registry.service
}
function glance_create_image() {
wget http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img
openstack image create "cirros" --file cirros-0.3.5-x86_64-disk.img \
--disk-format qcow2 --container-format bare \
--public
openstack image list
}
############################
# Thuc thi cac functions
## Goi cac functions
############################
source config.cfg
source /root/admin-openrc
############################
echocolor "Bat dau cai dat Glance"
echocolor "Tao DB Glance"
sleep 3
glance_create_db
echocolor "Tao user va endpoint cho Glance"
sleep 3
glance_user_endpoint
echocolor "Cai dat va cau hinh Glance"
sleep 3
glance_install_config
echocolor "Dong bo DB cho Glance"
sleep 3
glance_syncdb
echocolor "Restart dich vu glance"
sleep 3
glance_enable_restart
echocolor "Tao images"
sleep 3
glance_create_image
echocolor "Da cai dat xong Glance"
|
package models
type AllPulishAddressForIP struct {
IP string `json:"ip"`
Port string `json:"port"`
PublishProtocol string `json:"publish_protocol"`
}
|
<filename>test/__fixtures__/kitchensink-graphql.js
import { run } from "./kitchensink.js";
import { makeFieldResolver } from "../../src/execute.js";
import { buildSchema, graphql } from "graphql";
import { findServices } from "../../src/protos.js";
import { readFileSync } from "fs";
(async () => {
const schema = buildSchema(
readFileSync("test/__fixtures__/kitchensink.graphql", "utf-8")
);
const services = findServices(schema, { cwd: process.cwd() });
await run(50001);
const result = await graphql({
schema,
source: `mutation KitchenSink {
DoSomething(
field_double: 1.1
field_float: 2.2
field_int32: 3
field_int64: 4
field_uint32: 5
field_uint64: 6
field_sint32: 7
field_sint64: 8
field_fixed32: 9
field_fixed64: 10
field_sfixed32: 11
field_sfixed64: 12
field_bool: false
field_string: "hello"
# field_bytes: "world"
field_strings: ["goodbye"]
field_enum: one
field_nested_enum: three
field_child: {
foo: "bar"
}
field_nested_child: {
bar: "baz"
}
field_recursive: {
depth: 1
recursive: {
depth: 2
}
}
) {
field_double
field_float
field_int32
field_int64
field_uint32
field_uint64
field_sint32
field_sint64
field_fixed32
field_fixed64
field_sfixed32
field_sfixed64
field_bool
field_string
field_bytes
field_strings
field_enum
field_nested_enum
field_child {
foo
}
field_nested_child {
bar
}
field_recursive {
depth
recursive {
depth
}
}
}
}`,
fieldResolver: makeFieldResolver(services),
});
console.log(result);
})();
|
#!/bin/bash -eu
# Copyright 2020 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
export LINK_FLAGS=""
# Not using OpenSSL
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_NO_OPENSSL"
# Install Boost headers
cd $SRC/
tar jxf boost_1_74_0.tar.bz2
cd boost_1_74_0/
CFLAGS="" CXXFLAGS="" ./bootstrap.sh
CFLAGS="" CXXFLAGS="" ./b2 headers
cp -R boost/ /usr/include/
# Generate lookup tables. This only needs to be done once.
cd $SRC/cryptofuzz
python gen_repository.py
if [[ $CFLAGS != *sanitize=memory* ]]
then
# Compile libgmp
cd $SRC/
lzip -d gmp-6.2.0.tar.lz
tar xf gmp-6.2.0.tar
cd gmp-6.2.0/
autoreconf -ivf
if [[ $CFLAGS != *-m32* ]]
then
./configure --enable-maintainer-mode
else
setarch i386 ./configure --enable-maintainer-mode
fi
make -j$(nproc)
make install
# Compile Nettle (with libgmp)
mkdir $SRC/nettle-with-libgmp-install/
cp -R $SRC/nettle $SRC/nettle-with-libgmp/
cd $SRC/nettle-with-libgmp/
bash .bootstrap
if [[ $CFLAGS != *sanitize=memory* ]]
then
./configure --disable-documentation --disable-openssl --prefix=`realpath ../nettle-with-libgmp-install`
else
./configure --disable-documentation --disable-openssl --disable-assembler --prefix=`realpath ../nettle-with-libgmp-install`
fi
make -j$(nproc)
make install
if [[ $CFLAGS != *-m32* ]]
then
export LIBNETTLE_A_PATH=`realpath ../nettle-with-libgmp-install/lib/libnettle.a`
export LIBHOGWEED_A_PATH=`realpath ../nettle-with-libgmp-install/lib/libhogweed.a`
ls -l $LIBHOGWEED_A_PATH
else
export LIBNETTLE_A_PATH=`realpath ../nettle-with-libgmp-install/lib32/libnettle.a`
export LIBHOGWEED_A_PATH=`realpath ../nettle-with-libgmp-install/lib32/libhogweed.a`
fi
export NETTLE_INCLUDE_PATH=`realpath ../nettle-with-libgmp-install/include`
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_NETTLE"
export LINK_FLAGS="$LINK_FLAGS /usr/local/lib/libgmp.a"
# Compile Cryptofuzz Nettle module
cd $SRC/cryptofuzz/modules/nettle
make -f Makefile-hogweed -B
##############################################################################
# Compile Botan
cd $SRC/botan
if [[ $CFLAGS != *-m32* ]]
then
./configure.py --cc-bin=$CXX --cc-abi-flags="$CXXFLAGS" --disable-shared --disable-modules=locking_allocator --build-targets=static --without-documentation
else
./configure.py --cpu=x86_32 --cc-bin=$CXX --cc-abi-flags="$CXXFLAGS" --disable-shared --disable-modules=locking_allocator --build-targets=static --without-documentation
fi
make -j$(nproc)
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_BOTAN"
export LIBBOTAN_A_PATH="$SRC/botan/libbotan-3.a"
export BOTAN_INCLUDE_PATH="$SRC/botan/build/include"
# Compile Cryptofuzz Botan module
cd $SRC/cryptofuzz/modules/botan
make -B
# Compile Cryptofuzz
cd $SRC/cryptofuzz
LIBFUZZER_LINK="$LIB_FUZZING_ENGINE" make -B -j$(nproc) >/dev/null
# Generate dictionary
./generate_dict
# Copy fuzzer
cp $SRC/cryptofuzz/cryptofuzz $OUT/cryptofuzz-nettle-with-libgmp
# Copy dictionary
cp $SRC/cryptofuzz/cryptofuzz-dict.txt $OUT/cryptofuzz-nettle-with-libgmp.dict
# Copy seed corpus
cp $SRC/cryptofuzz-corpora/libressl_latest.zip $OUT/cryptofuzz-nettle-with-libgmp_seed_corpus.zip
fi
# Compile Nettle (with mini gmp)
mkdir $SRC/nettle-with-mini-gmp-install/
cp -R $SRC/nettle $SRC/nettle-with-mini-gmp/
cd $SRC/nettle-with-mini-gmp/
bash .bootstrap
if [[ $CFLAGS != *sanitize=memory* ]]
then
./configure --enable-mini-gmp --disable-documentation --disable-openssl --prefix=`realpath ../nettle-with-mini-gmp-install`
else
./configure --enable-mini-gmp --disable-documentation --disable-openssl --disable-assembler --prefix=`realpath ../nettle-with-mini-gmp-install`
fi
make -j$(nproc)
make install
if [[ $CFLAGS != *-m32* ]]
then
export LIBNETTLE_A_PATH=`realpath ../nettle-with-mini-gmp-install/lib/libnettle.a`
export LIBHOGWEED_A_PATH=`realpath ../nettle-with-mini-gmp-install/lib/libhogweed.a`
ls -l $LIBHOGWEED_A_PATH
else
export LIBNETTLE_A_PATH=`realpath ../nettle-with-mini-gmp-install/lib32/libnettle.a`
export LIBHOGWEED_A_PATH=`realpath ../nettle-with-mini-gmp-install/lib32/libhogweed.a`
fi
export NETTLE_INCLUDE_PATH=`realpath ../nettle-with-mini-gmp-install/include`
export LINK_FLAGS=""
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_NETTLE"
# Compile Cryptofuzz Nettle module
cd $SRC/cryptofuzz/modules/nettle
make -f Makefile-hogweed -B
# Compile Cryptofuzz
cd $SRC/cryptofuzz
LIBFUZZER_LINK="$LIB_FUZZING_ENGINE" make -B -j$(nproc) >/dev/null
# Generate dictionary
./generate_dict
# Copy fuzzer
cp $SRC/cryptofuzz/cryptofuzz $OUT/cryptofuzz-nettle-with-mini-gmp
# Copy dictionary
cp $SRC/cryptofuzz/cryptofuzz-dict.txt $OUT/cryptofuzz-nettle-with-mini-gmp.dict
# Copy seed corpus
cp $SRC/cryptofuzz-corpora/libressl_latest.zip $OUT/cryptofuzz-nettle-with-mini-gmp_seed_corpus.zip
|
package ru.contextguide.yandexservices.exceptions;
/**
* Исключение при создании запроса к сервису.
*/
public class ApiRequestException extends YDException {
public ApiRequestException() {
super();
}
public ApiRequestException(String message) {
super(message);
}
public ApiRequestException(String message, Throwable cause) {
super(message, cause);
}
public ApiRequestException(Throwable cause) {
super(cause);
}
}
|
<filename>inference/plotting.py
"""
.. moduleauthor:: <NAME> <<EMAIL>>
"""
from numpy import array, meshgrid, linspace, sqrt, ceil, ndarray
from itertools import product, cycle
from warnings import warn
from inference.pdf import GaussianKDE, KDE2D, sample_hdi
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
from matplotlib.collections import PatchCollection
from matplotlib.cm import get_cmap
import matplotlib.patheffects as path_effects
def matrix_plot(
samples,
labels=None,
show=True,
reference=None,
filename=None,
plot_style="contour",
colormap="Blues",
show_ticks=None,
point_colors=None,
point_size=1,
label_size=10,
):
"""
Construct a 'matrix plot' for a set of variables which shows all possible
1D and 2D marginal distributions.
:param samples: \
A list of array-like objects containing the samples for each variable.
:keyword labels: \
A list of strings to be used as axis labels for each parameter being plotted.
:keyword bool show: \
Sets whether the plot is displayed.
:keyword reference: \
A list of reference values for each parameter which will be over-plotted.
:keyword str filename: \
File path to which the matrix plot will be saved (if specified).
:keyword str plot_style: \
Specifies the type of plot used to display the 2D marginal distributions.
Available styles are 'contour' for filled contour plotting, 'histogram' for
hexagonal-bin histogram, and 'scatter' for scatterplot.
:keyword bool show_ticks: \
By default, axis ticks are only shown when plotting less than 6 variables.
This behaviour can be overridden for any number of parameters by setting
show_ticks to either True or False.
:keyword point_colors: \
An array containing data which will be used to set the colors of the points
if the plot_style argument is set to 'scatter'.
:keyword point_size: \
An array containing data which will be used to set the size of the points
if the plot_style argument is set to 'scatter'.
:keyword int label_size: \
The font-size used for axis labels.
"""
N_par = len(samples)
if labels is None: # set default axis labels if none are given
if N_par >= 10:
labels = [f"p{i}" for i in range(N_par)]
else:
labels = [f"param {i}" for i in range(N_par)]
else:
if len(labels) != N_par:
raise ValueError("number of labels must match number of plotted parameters")
if reference is not None:
if len(reference) != N_par:
raise ValueError(
"number of reference values must match number of plotted parameters"
)
# check that given plot style is valid, else default to a histogram
if plot_style not in ["contour", "histogram", "scatter"]:
plot_style = "histogram"
warn("'plot_style' must be set as either 'contour', 'histogram' or 'scatter'")
# by default, we suppress axis ticks if there are 6 parameters or more to keep things tidy
if show_ticks is None:
show_ticks = N_par < 6
L = 200
cmap = get_cmap(colormap)
# find the darker of the two ends of the colormap, and use it for the marginal plots
marginal_color = sorted([cmap(10), cmap(245)], key=lambda x: sum(x[:-1]))[0]
# build axis arrays and determine limits for all variables
axis_limits = []
axis_arrays = []
for sample in samples:
# get the 98% HDI to calculate plot limits
lwr, upr = sample_hdi(sample, fraction=0.98)
# store the limits and axis array
axis_limits.append([lwr - (upr - lwr) * 0.3, upr + (upr - lwr) * 0.3])
axis_arrays.append(
linspace(lwr - (upr - lwr) * 0.35, upr + (upr - lwr) * 0.35, L)
)
fig = plt.figure(figsize=(8, 8))
# build a lower-triangular indices list in diagonal-striped order
inds_list = [(N_par - 1, 0)] # start with bottom-left corner
for k in range(1, N_par):
inds_list.extend([(N_par - 1 - i, k - i) for i in range(k + 1)])
# now create a dictionary of axis objects with correct sharing
axes = {}
for tup in inds_list:
i, j = tup
x_share = None
y_share = None
if i < N_par - 1:
x_share = axes[(N_par - 1, j)]
if (j > 0) and (i != j): # diagonal doesnt share y-axis
y_share = axes[(i, 0)]
axes[tup] = plt.subplot2grid(
(N_par, N_par), (i, j), sharex=x_share, sharey=y_share
)
# now loop over grid and plot
for tup in inds_list:
i, j = tup
ax = axes[tup]
# are we on the diagonal?
if i == j:
sample = samples[i]
pdf = GaussianKDE(sample)
estimate = array(pdf(axis_arrays[i]))
ax.plot(
axis_arrays[i],
0.9 * (estimate / estimate.max()),
lw=1,
color=marginal_color,
)
ax.fill_between(
axis_arrays[i],
0.9 * (estimate / estimate.max()),
color=marginal_color,
alpha=0.1,
)
if reference is not None:
ax.plot(
[reference[i], reference[i]],
[0, 1],
lw=1.5,
ls="dashed",
color="red",
)
ax.set_ylim([0, 1])
else:
x = samples[j]
y = samples[i]
# plot the 2D marginals
if plot_style == "contour":
# Filled contour plotting using 2D gaussian KDE
pdf = KDE2D(x=x, y=y)
x_ax = axis_arrays[j][::4]
y_ax = axis_arrays[i][::4]
X, Y = meshgrid(x_ax, y_ax)
prob = array(pdf(X.flatten(), Y.flatten())).reshape([L // 4, L // 4])
ax.set_facecolor(cmap(256 // 20))
ax.contourf(X, Y, prob, 10, cmap=cmap)
elif plot_style == "histogram":
# hexagonal-bin histogram
ax.set_facecolor(cmap(0))
ax.hexbin(x, y, gridsize=35, cmap=cmap)
else:
# scatterplot
if point_colors is None:
ax.scatter(x, y, color=marginal_color, s=point_size)
else:
ax.scatter(x, y, c=point_colors, s=point_size, cmap=cmap)
# plot any reference points if given
if reference is not None:
ax.plot(
reference[j],
reference[i],
marker="o",
markersize=7,
markerfacecolor="none",
markeredgecolor="white",
markeredgewidth=3.5,
)
ax.plot(
reference[j],
reference[i],
marker="o",
markersize=7,
markerfacecolor="none",
markeredgecolor="red",
markeredgewidth=2,
)
# assign axis labels
if i == N_par - 1:
ax.set_xlabel(labels[j], fontsize=label_size)
if j == 0 and i != 0:
ax.set_ylabel(labels[i], fontsize=label_size)
# impose x-limits on bottom row
if i == N_par - 1:
ax.set_xlim(axis_limits[j])
# impose y-limits on left column, except the top-left corner
if j == 0 and i != 0:
ax.set_ylim(axis_limits[i])
if show_ticks: # set up ticks for the edge plots if they are to be shown
# hide x-tick labels for plots not on the bottom row
if i < N_par - 1:
plt.setp(ax.get_xticklabels(), visible=False)
# hide y-tick labels for plots not in the left column
if j > 0:
plt.setp(ax.get_yticklabels(), visible=False)
# remove all y-ticks for 1D marginal plots on the diagonal
if i == j:
ax.set_yticks([])
else: # else remove all ticks from all axes
ax.set_xticks([])
ax.set_yticks([])
# set the plot spacing
fig.tight_layout()
fig.subplots_adjust(wspace=0.0, hspace=0.0)
# save/show the figure if required
if filename is not None:
plt.savefig(filename)
if show:
plt.show()
return fig
def trace_plot(samples, labels=None, show=True, filename=None):
"""
Construct a 'trace plot' for a set of variables which displays the
value of the variables as a function of step number in the chain.
:param samples: \
A list of array-like objects containing the samples for each variable.
:keyword labels: \
A list of strings to be used as axis labels for each parameter being plotted.
:keyword bool show: \
Sets whether the plot is displayed.
:keyword str filename: \
File path to which the matrix plot will be saved (if specified).
"""
N_par = len(samples)
if labels is None:
if N_par >= 10:
labels = [f"p{i}" for i in range(N_par)]
else:
labels = [f"param {i}" for i in range(N_par)]
else:
if len(labels) != N_par:
raise ValueError(
"number of labels must match the number of plotted parameters"
)
# if for 'n' columns we allow up to m = 2*n rows, set 'n' to be as small as possible
# given the number of parameters.
n = int(ceil(sqrt(0.5 * N_par)))
# now given fixed n, make m as small as we can
m = int(ceil(float(N_par) / float(n)))
fig = plt.figure(figsize=(12, 8))
grid_inds = product(range(m), range(n))
colors = cycle(["C0", "C1", "C2", "C3", "C4"])
axes = {}
for s, label, coords, col in zip(samples, labels, grid_inds, colors):
i, j = coords
if i == 0 and j == 0:
axes[(i, j)] = plt.subplot2grid((m, n), (i, j))
else:
axes[(i, j)] = plt.subplot2grid((m, n), (i, j), sharex=axes[(0, 0)])
axes[(i, j)].plot(s, ".", markersize=4, alpha=0.15, c=col)
axes[(i, j)].set_ylabel(label)
# get the 98% HDI to calculate plot limits, and 10% HDI to estimate the mode
lwr, upr = sample_hdi(s, fraction=0.99)
mid = 0.5 * sum(sample_hdi(s, fraction=0.10))
axes[(i, j)].set_ylim([lwr - (mid - lwr) * 0.7, upr + (upr - mid) * 0.7])
# get the 10% HDI to estimate the mode
axes[(i, j)].set_yticks([lwr - (mid - lwr) * 0.5, mid, upr + (upr - mid) * 0.5])
if i < m - 1:
plt.setp(axes[(i, j)].get_xticklabels(), visible=False)
else:
axes[(i, j)].set_xlabel("chain step #")
fig.tight_layout()
if filename is not None:
plt.savefig(filename)
if show:
plt.show()
return fig
def hdi_plot(
x,
sample,
intervals=(0.65, 0.95),
colormap="Blues",
axis=None,
label_intervals=True,
color_levels=None,
):
"""
Plot highest-density intervals for a given sample of model realisations.
:param x: \
The x-axis locations of the sample data.
:param sample: \
A ``numpy.ndarray`` containing the sample data, which has shape ``(n, len(x))`` where
``n`` is the number of samples.
:keyword intervals: \
A tuple containing the fractions of the total probability for each interval.
:keyword colormap: \
The colormap to be used for plotting the intervals. Must be a vaild argument
of the ``matplotlib.cm.get_cmap`` function.
:keyword axis: \
A ``matplotlib.pyplot`` axis object which will be used to plot the intervals.
:keyword bool label_intervals: \
If ``True``, then labels will be assigned to each interval plot such that they appear
in the legend when using ``matplotlib.pyplot.legend``.
:keyword color_levels: \
A list of integers in the range [0,255] which specify the color value within the chosen
color map to be used for each of the intervals.
"""
# order the intervals from highest to lowest
intervals = array(intervals)
intervals.sort()
intervals = intervals[::-1]
# check that all the intervals are valid:
if not all((intervals > 0.0) & (intervals < 1.0)):
raise ValueError("All intervals must be greater than 0 and less than 1")
# check the sample data has compatible dimensions
s = array(sample)
if s.shape[1] != len(x):
if s.shape[0] == len(x):
s = s.T
else:
raise ValueError('"x" and "sample" have incompatible dimensions')
# sort the sample data
s.sort(axis=0)
n = s.shape[0]
cmap = get_cmap(colormap)
if color_levels is None:
# construct the colors for each interval
lwr = 0.20
upr = 1.0
color_levels = 255 * ((upr - lwr) * (1 - intervals) + lwr)
colors = [cmap(int(c)) for c in color_levels]
# if not plotting axis is given, then use default pyplot
if axis is None:
_, axis = plt.subplots()
from numpy import take_along_axis, expand_dims
# iterate over the intervals and plot each
for frac, col in zip(intervals, colors):
L = int(frac * n)
# check that we have enough samples to estimate the HDI for the chosen fraction
if n > L:
# find the optimal single HDI
widths = s[L:, :] - s[: n - L, :]
i = expand_dims(widths.argmin(axis=0), axis=0)
lwr = take_along_axis(s, i, 0).squeeze()
upr = take_along_axis(s, i + L, 0).squeeze()
else:
lwr = s[0, :]
upr = s[-1, :]
if label_intervals:
axis.fill_between(
x, lwr, upr, color=col, label="{}% HDI".format(int(100 * frac))
)
else:
axis.fill_between(x, lwr, upr, color=col)
return axis
def transition_matrix_plot(
ax=None,
matrix=None,
colormap="viridis",
exclude_diagonal=False,
upper_triangular=False,
):
"""
Plot the transition matrix of a Markov chain
:param matrix: \
A 2D ``numpy.ndarray`` containing the transition probabilities, which should be
in the range [0,1].
:keyword colormap: \
The colormap to be used for plotting the intervals. Must be a vaild argument
of the ``matplotlib.cm.get_cmap`` function.
:keyword bool exclude_diagonal: \
If ``True`` the diagonal of the matrix will not be plotted.
"""
if type(matrix) is not ndarray:
raise TypeError("given matrix must be a numpy.ndarray")
if len(matrix.shape) != 2:
raise ValueError("given matrix must have exactly two dimensions")
if matrix.shape[0] != matrix.shape[1]:
raise ValueError(
"given matrix must be square (i.e. both dimensions are of the same length)"
)
if matrix.shape[0] == 1:
raise ValueError("given matrix must be at least of size 2x2")
N = matrix.shape[0]
if upper_triangular:
inds = [(i, j) for i in range(N) for j in range(N) if i <= j]
else:
inds = [(i, j) for i in range(N) for j in range(N)]
if exclude_diagonal:
inds = [(i, j) for i, j in inds if i != j]
rectangles = [Rectangle((i + 0.5, j + 0.5), 1, 1) for i, j in inds]
x_sorted = sorted([i[0] for i in inds])
y_sorted = sorted([i[1] for i in inds])
x_limits = [x_sorted[0] + 0.5, x_sorted[-1] + 1.5]
y_limits = [y_sorted[0] + 0.5, y_sorted[-1] + 1.5]
# get a color for each of the rectangles
cmap = get_cmap(colormap)
rectangle_colors = [cmap(matrix[i, j] / matrix.max()) for i, j in inds]
pc = PatchCollection(
rectangles, facecolors=rectangle_colors, edgecolors=["black"] * N
)
if ax is None:
_, ax = plt.subplots()
ax.add_collection(pc)
ax.set_xlim(x_limits)
ax.set_ylim(y_limits)
# only plot the rate values as text if the matrix is of size 10 or less
if N < 11:
fsize = 20 - N
for i, j in inds:
# here we draw a black outline around the white text we've
# added to improve visibility
ax.text(
i + 1,
j + 1,
"{}%".format(int(matrix[i, j] * 100)),
horizontalalignment="center",
verticalalignment="center",
color="white",
fontsize=fsize,
).set_path_effects(
[
path_effects.Stroke(linewidth=1.5, foreground="black"),
path_effects.Normal(),
]
)
return ax
|
def calculateDistance(x1, y1, x2, y2):
distance = ((x2 - x1)**2 + (y2 - y1)**2)**(1/2)
return distance
# main
x1 = 0
y1 = 0
x2 = 10
y2 = 10
dist = calculateDistance(x1, y1, x2, y2)
print("Distance between the two points is", dist) |
import { StyleSheet } from "react-native";
export default StyleSheet.create({
//
container: {
flex: 1,
backgroundColor: "#2E3248"
},
//
favorites_: {},
//
favorites_flat_grid_style: {
flex: 1,
marginTop: 10
},
//
favorites_card_item_image_style: {
flex: 1,
height: 200,
width: null
},
//
favorites_card_item_style: {
marginTop: 10,
justifyContent: "center",
paddingBottom: 10
},
//
favorites_empty: {
flex: 1,
justifyContent: "center",
alignItems: "center"
},
//
favorites_empty_text: {
fontSize: 20,
color: "#FFF",
textAlign: "center",
padding: 10
}
});
|
# frozen_string_literal: true
# rubocop:disable Rails/Output
password = '<PASSWORD>'
user = User.create_or_find_by(email: '<EMAIL>', password: password)
puts "User with email #{user.email}"
# rubocop:enable Rails/Output
|
export function load(path?: string): Promise<void>;
export let _path: string | undefined;
export class Class {
public static scope: any;
public static register(cls: any): void;
}
/* Client API */
declare type Handle = number;
declare type BlockListener = (blockHash: Hash) => Promise<any> | any;
declare type ConsensusChangedListener = (consensusState: Client.ConsensusState) => Promise<any> | any;
declare type HeadChangedListener = (blockHash: Hash, reason: string, revertedBlocks: Hash[], adoptedBlocks: Hash[]) => Promise<any> | any;
declare type TransactionListener = (transaction: Client.TransactionDetails) => Promise<any> | any;
declare type MempoolListener = (transactionHash: Hash) => Promise<any> | any;
export class Client {
public static Configuration: typeof ClientConfiguration;
public static ConfigurationBuilder: typeof ClientConfigurationBuilder;
public static Mempool: typeof ClientMempool;
public static MempoolStatistics: typeof ClientMempoolStatistics;
public static Network: typeof ClientNetwork;
public static BasicAddress: typeof ClientBasicAddress;
public static AddressInfo:typeof ClientAddressInfo;
public static PeerInfo: typeof ClientPeerInfo;
public static NetworkStatistics: typeof ClientNetworkStatistics;
public static TransactionDetails: typeof ClientTransactionDetails;
public static TransactionState: {
NEW: 'new';
PENDING: 'pending';
MINED: 'mined';
INVALIDATED: 'invalidated';
EXPIRED: 'expired';
CONFIRMED: 'confirmed';
};
public static Feature: {
MINING: 'MINING';
LOCAL_HISTORY: 'LOCAL_HISTORY';
MEMPOOL: 'MEMPOOL';
PASSIVE: 'PASSIVE';
};
public static ConsensusState: {
CONNECTING: 'connecting';
SYNCING: 'syncing';
ESTABLISHED: 'established';
};
public network: Client.Network;
public mempool: Client.Mempool;
constructor(config: Client.Configuration | object, consensus?: Promise<BaseConsensus>);
public getHeadHash(): Promise<Hash>;
public getHeadHeight(): Promise<number>;
public getHeadBlock(includeBody?: boolean): Promise<Block>;
public getBlock(hash: Hash | string, includeBody?: boolean): Promise<Block>;
public getBlockAt(height: number, includeBody?: boolean): Promise<Block>;
public getBlockTemplate(minerAddress: Address | string, extraData?: Uint8Array | string): Promise<Block>;
public submitBlock(block: Block): Promise<boolean>;
public getAccount(address: Address | string): Promise<Account>;
public getAccounts(addresses: Array<Address | string>): Promise<Account[]>;
public getTransaction(hash: Hash | string, blockHash?: Hash | string, blockHeight?: number): Promise<Client.TransactionDetails>;
public getTransactionReceipt(hash: Hash | string): Promise<TransactionReceipt | undefined>;
public getTransactionReceiptsByAddress(address: Address | string, limit?: number): Promise<TransactionReceipt[]>;
public getTransactionReceiptsByHashes(hashes: Array<Hash | string>): Promise<TransactionReceipt[]>;
public getTransactionsByAddress(address: Address | string, sinceBlockHeight?: number, knownTransactionDetails?: Client.TransactionDetails[] | ReturnType<Client.TransactionDetails["toPlain"]>[], limit?: number): Promise<Client.TransactionDetails[]>;
public sendTransaction(tx: Transaction | object | string): Promise<Client.TransactionDetails>;
public addBlockListener(listener: BlockListener): Promise<Handle>;
public addConsensusChangedListener(listener: ConsensusChangedListener): Promise<Handle>;
public addHeadChangedListener(listner: HeadChangedListener): Promise<Handle>;
public addTransactionListener(listener: TransactionListener, addresses: Array<Address | string>): Promise<Handle>;
public removeListener(handle: Handle): Promise<void>;
public waitForConsensusEstablished(): Promise<void>;
public _consensusState: Client.ConsensusState;
}
export namespace Client {
type ConsensusState = ConsensusState.CONNECTING | ConsensusState.SYNCING | ConsensusState.ESTABLISHED;
namespace ConsensusState {
type CONNECTING = 'connecting';
type SYNCING = 'syncing';
type ESTABLISHED = 'established';
}
type Configuration = ClientConfiguration;
type ConfigurationBuilder = ClientConfigurationBuilder;
type Feature = Feature.MINING | Feature.LOCAL_HISTORY | Feature.MEMPOOL | Feature.PASSIVE;
namespace Feature {
type MINING = 'MINING';
type LOCAL_HISTORY = 'LOCAL_HISTORY';
type MEMPOOL = 'MEMPOOL';
type PASSIVE = 'PASSIVE';
}
type Mempool = ClientMempool;
type MempoolStatistics = ClientMempoolStatistics;
type Network = ClientNetwork;
type BasicAddress = ClientBasicAddress;
type AddressInfo = ClientAddressInfo;
type PeerInfo = ClientPeerInfo;
type NetworkStatistics = ClientNetworkStatistics;
type TransactionDetails = ClientTransactionDetails;
type TransactionState = TransactionState.NEW | TransactionState.PENDING | TransactionState.MINED | TransactionState.INVALIDATED | TransactionState.EXPIRED | TransactionState.CONFIRMED;
namespace TransactionState {
type NEW = 'new';
type PENDING = 'pending';
type MINED = 'mined';
type INVALIDATED = 'invalidated';
type EXPIRED = 'expired';
type CONFIRMED = 'confirmed';
}
}
declare class ClientConfiguration {
public static builder(): Client.ConfigurationBuilder;
public features: Client.Feature[];
public requiredBlockConfirmations: number;
public networkConfig: NetworkConfig;
constructor(networkConfig: NetworkConfig, features?: Client.Feature[], useVolatileStorage?: boolean, requiredBlockConfirmations?: number);
public createConsensus(): Promise<BaseConsensus>;
public hasFeature(feature: Client.Feature): boolean;
public requireFeatures(...features: Client.Feature[]): void;
public instantiateClient(): Client;
}
declare class ClientConfigurationBuilder {
constructor();
public dumb(): this;
public rtc(): this;
public ws(host: string, port?: number): this;
public wss(host: string, port: number, tlsKey: string, tlsCert: string): this;
public protocol(protocol: 'dumb' | 'rtc' | 'ws' | 'wss', host: string, port: number, tlsKey: string, tlsCert: string): this;
public volatile(volatile?: boolean): this;
public blockConfirmations(confirmations: number): this;
public feature(...feature: Client.Feature[]): this;
public reverseProxy(port: number, header: string, ...addresses: string[]): this;
public build(): Client.Configuration;
public instantiateClient(): Client;
}
declare class ClientNetwork {
constructor(client: Client);
public getPeers(): Promise<Client.PeerInfo[]>;
public getPeer(address: PeerAddress | Client.AddressInfo | string): Promise<Client.PeerInfo | null>;
public getAddresses(): Promise<Client.AddressInfo[]>;
public getAddress(address: PeerAddress | Client.AddressInfo | string): Promise<Client.AddressInfo | null>;
public getOwnAddress(): Promise<Client.BasicAddress>;
public getStatistics(): Promise<Client.NetworkStatistics>;
public connect(address: PeerAddress | Client.BasicAddress | string): Promise<void>;
public disconnect(address: PeerAddress | Client.BasicAddress | string): Promise<void>;
public ban(address: PeerAddress | Client.BasicAddress | string): Promise<void>;
public unban(address: PeerAddress | Client.BasicAddress | string): Promise<void>;
}
declare class ClientBasicAddress {
public peerAddress: PeerAddress;
public peerId: PeerId;
public services: string[];
public netAddress: NetAddress | null;
constructor(address: PeerAddress);
public toPlain(): {
peerAddress: string,
peerId: string,
services: string[],
netAddress: {
ip: Uint8Array,
reliable: boolean,
} | null,
};
}
declare class ClientAddressInfo extends ClientBasicAddress {
public banned: boolean;
public connected: boolean;
public state: number;
constructor(addressState: PeerAddressState);
public toPlain(): {
peerAddress: string,
peerId: string,
services: string[],
banned: boolean,
connected: boolean,
};
}
declare class ClientPeerInfo extends ClientBasicAddress {
public connectionSince: number;
public netAddress: NetAddress;
public bytesReceived: number;
public bytesSent: number;
public latency: number;
public version: number;
public state: number;
public timeOffset: number;
public headHash: Hash;
public userAgent: string;
constructor(connection: PeerConnection);
public toPlain(): {
peerAddress: string,
peerId: string,
services: string[],
connectionSince: number,
netAddress: string,
bytesReceived: number,
bytesSent: number,
latency: number,
version: number,
state: number,
timeOffset: number,
headHash: string,
userAgent: string,
};
}
declare class ClientNetworkStatistics {
public bytesReceived: number;
public bytesSent: number;
public totalPeerCount: number;
public peerCountsByType: {
total: number,
connecting: number,
dumb: number,
rtc: number,
ws: number,
wss: number,
};
public totalKnownAddresses: number;
public knownAddressesByType: {
total: number,
rtc: number,
ws: number,
wss: number,
};
public timeOffset: number;
constructor(network: Network);
public toPlain(): {
bytesReceived: number,
bytesSent: number,
totalPeerCount: number,
peerCountsByType: {
total: number,
connecting: number,
dumb: number,
rtc: number,
ws: number,
wss: number,
},
totalKnownAddresses: number,
knownAddressesByType: {
total: number,
rtc: number,
ws: number,
wss: number,
},
timeOffset: number,
};
}
declare class ClientMempool {
constructor(client: Client);
public getTransactions(): Promise<Hash[]>;
public getStatistics(): Promise<Client.MempoolStatistics>;
public addTransactionAddedListener(listener: MempoolListener): Promise<Handle>;
public addTransactionRemovedListener(listener: MempoolListener): Promise<Handle>;
public removeListener(handle: Handle): void;
}
declare class ClientMempoolStatistics {
public count: number;
public size: number;
public requiredFeePerByte: number;
public countInBuckets: {buckets: []} | any;
public sizeInBuckets: {buckets: []} | any;
constructor(mempoolContents: Transaction[]);
}
declare class ClientTransactionDetails {
public static fromPlain(o: object): Client.TransactionDetails;
public transactionHash: Hash;
public format: Transaction.Format;
public sender: Address;
public senderType: Account.Type;
public recipient: Address;
public recipientType: Account.Type;
public value: number;
public fee: number;
public feePerByte: number;
public validityStartHeight: number;
public network: number;
public flags: number;
public data: {raw: Uint8Array};
public proof: {raw: Uint8Array};
public size: number;
public valid: boolean;
public transaction: Transaction;
public state: Client.TransactionState;
public blockHash: Hash;
public blockHeight: number;
public confirmations: number;
public timestamp: number;
constructor(
transaction: Transaction,
state: Client.TransactionState,
blockHash?: Hash,
blockHeight?: number,
confirmations?: number,
timestamp?: number,
);
public toPlain(): {
transactionHash: string,
format: string;
sender: string;
senderType: string;
recipient: string;
recipientType: string;
value: number;
fee: number;
feePerByte: number;
validityStartHeight: number;
network: string;
flags: number;
data: {raw: string};
proof: {
raw: string,
signature?: string,
publicKey?: string,
signer?: string,
pathLength?: number,
};
size: number;
valid: boolean;
state: Client.TransactionState;
blockHash?: string;
blockHeight?: number;
confirmations?: number;
timestamp?: number;
};
}
export class LogNative {
constructor()
public isLoggable(tag: string, level: number): boolean;
public setLoggable(tag: string, level: number): void;
public msg(level: number, tag: string | { name: string }, args: any[]): void;
}
export class Log {
public static instance: Log;
public static TRACE: Log.Level.TRACE;
public static VERBOSE: Log.Level.VERBOSE;
public static DEBUG: Log.Level.DEBUG;
public static INFO: Log.Level.INFO;
public static WARNING: Log.Level.WARNING;
public static ERROR: Log.Level.ERROR;
public static ASSERT: Log.Level.ASSERT;
public static Level: {
TRACE: 1;
VERBOSE: 2;
DEBUG: 3;
INFO: 4;
WARNING: 5;
ERROR: 6;
ASSERT: 7;
toStringTag(level: Log.Level): string;
toString(level: Log.Level): string;
get(v: string | number | Log.Level): Log.Level;
};
public level: Log.Level;
constructor(native: LogNative);
public setLoggable(tag: string, level: Log.Level): void;
public msg(level: Log.Level, tag: string | { name: string }, args: any[]): void;
public d(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
public e(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
public i(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
public v(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
public w(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
public t(tag: string | { name: string }, message: string | (() => string), args: any[]): void;
}
export namespace Log {
type Level = Level.TRACE | Level.VERBOSE | Level.DEBUG | Level.INFO | Level.WARNING | Level.ERROR | Level.ASSERT;
namespace Level {
type TRACE = 1;
type VERBOSE = 2;
type DEBUG = 3;
type INFO = 4;
type WARNING = 5;
type ERROR = 6;
type ASSERT = 7;
}
}
export class Observable {
public on(type: string, callback: (...args: any[]) => any): number;
public off(type: string, id: number): void;
public fire(type: string, ...args: any[]): (Promise<any> | null);
}
export abstract class DataChannel extends Observable {
public static CHUNK_SIZE_MAX: 16384; // 16 kb
public static MESSAGE_SIZE_MAX: 10485760; // 10 mb
public static CHUNK_TIMEOUT: 5000; // 5 seconds
public static MESSAGE_TIMEOUT: 3200000;
public static ReadyState: {
CONNECTING: 0;
OPEN: 1;
CLOSING: 2;
CLOSED: 3;
fromString(str: string): DataChannel.ReadyState;
};
public abstract readyState: DataChannel.ReadyState;
public lastMessageReceivedAt: number;
constructor();
public isExpectingMessage(type: Message.Type): boolean;
public confirmExpectedMessage(type: Message.Type, success: boolean): void;
public expectMessage(types: Message.Type | Message.Type[], timeoutCallback: () => any, msgTimeout?: number, chunkTimeout?: number): void;
public close(): void;
public send(msg: Uint8Array): void;
public abstract sendChunk(msg: Uint8Array): void;
}
export namespace DataChannel {
type ReadyState = ReadyState.CONNECTING | ReadyState.OPEN | ReadyState.CLOSING | ReadyState.CLOSED;
namespace ReadyState {
type CONNECTING = 0;
type OPEN = 1;
type CLOSING = 2;
type CLOSED = 3;
}
}
export class ExpectedMessage {
constructor(
types: Message.Type[],
timeoutCallback: () => any,
msgTimeout: number,
chunkTimeout: number,
);
}
export class CryptoLib {
public static instance: { getRandomValues(buf: Uint8Array): Uint8Array };
}
export class WebRtcFactory {
public static newPeerConnection(configuration?: RTCConfiguration): RTCPeerConnection;
public static newSessionDescription(rtcSessionDescriptionInit: any): RTCSessionDescription;
public static newIceCandidate(rtcIceCandidateInit: any): RTCIceCandidate;
}
export class WebSocketFactory {
public static newWebSocketServer(networkConfig: WsNetworkConfig | WssNetworkConfig): any;
public static newWebSocket(url: string, [options]: any): WebSocket;
}
export class WebSocketServer {
public static UPGRADE_TIMEOUT: 3000; // 3 seconds
public static TLS_HANDSHAKE_TIMEOUT: 3000; // 3 seconds
public static PAYLOAD_MAX: number;
public static PENDING_UPGRADES_MAX: 1000;
public static PENDING_UPGRADES_PER_IP_MAX: 2;
public static PENDING_UPGRADES_PER_SUBNET_MAX: 6;
public static CONNECTION_RATE_LIMIT_PER_IP: 10; // per minute
public static CONNECTION_RATE_LIMIT_PER_SUBNET: 30; // per minute
public static LIMIT_TRACKING_AGE_MAX: 120000; // 2 minutes
public static HOUSEKEEPING_INTERVAL: 300000; // 5 minutes
constructor(
networkConfig: WsNetworkConfig | WssNetworkConfig,
);
}
export class ConstantHelper {
public static instance: ConstantHelper;
constructor();
public isConstant(constant: string): boolean;
public get(constant: string): number;
public set(constant: string, value: number): void;
public reset(constant: string): void;
public resetAll(): void;
}
export class Services {
public static NONE: 0;
public static FLAG_NANO: 1;
public static FLAG_LIGHT: 2;
public static FLAG_FULL: 4;
public static ALL_LEGACY: 7;
public static FULL_BLOCKS: number;
public static BLOCK_HISTORY: number;
public static BLOCK_PROOF: number;
public static CHAIN_PROOF: number;
public static ACCOUNTS_PROOF: number;
public static ACCOUNTS_CHUNKS: number;
public static MEMPOOL: number;
public static TRANSACTION_INDEX: number;
public static BODY_PROOF: number;
public static ALL_CURRENT: number;
public static NAMES: {[name: number]: string};
public static PROVIDES_FULL: number;
public static PROVIDES_LIGHT: number;
public static PROVIDES_NANO: number;
public static PROVIDES_PICO: number;
public static ACCEPTS_FULL: number;
public static ACCEPTS_LIGHT: number;
public static ACCEPTS_NANO: number;
public static ACCEPTS_PICO: number;
public static ACCEPTS_SPV: number;
public static isFullNode(services: number): boolean;
public static isLightNode(services: number): boolean;
public static isNanoNode(services: number): boolean;
public static providesServices(flags: number, ...services: number[]): boolean;
public static legacyProvideToCurrent(flags: number): number;
public static toNameArray(flags: number): string[];
public provided: number;
public accepted: number;
constructor(provided?: number, accepted?: number);
}
export class Timers {
constructor();
public setTimeout(key: any, fn: () => any, waitTime: number): void;
public clearTimeout(key: any): void;
public resetTimout(key: any, fn: () => any, waitTime: number): void;
public timeoutExists(key: any): boolean;
public setInterval(key: any, fn: () => any, intervalTime: number): void;
public clearInterval(key: any): void;
public resetInterval(key: any, fn: () => any, intervalTime: number): void;
public intervalExists(key: any): boolean;
public clearAll(): void;
}
export class Version {
public static CODE: 2;
public static CORE_JS_VERSION: string;
public static isCompatible(code: number): boolean;
public static createUserAgent(appAgent?: string): string;
}
export class Time {
public offset: number;
constructor(offset?: number);
public now(): number;
}
export class EventLoopHelper {
public static webYield(): Promise<void>;
public static yield(): Promise<void>;
}
export class IteratorUtils {
public static alternate<T>(...iterators: Array<Iterator<T>>): Iterable<T>;
}
export class ArrayUtils {
public static randomElement(arr: any[]): any;
public static subarray(uintarr: Uint8Array, begin?: number, end?: number): Uint8Array;
public static k_combinations(list: any[], k: number): Generator;
}
export class HashMap<K, V> {
public length: number;
constructor(fnHash?: (o: object) => string);
public get(key: K): V | undefined;
public put(key: K, value: V): void;
public remove(key: K): void;
public clear(): void;
public contains(key: K): boolean;
public keys(): K[];
public keyIterator(): Iterator<K>;
public values(): V[];
public valueIterator(): Iterator<V>;
public entries(): Array<[K, V]>;
public entryIterator(): Iterator<[K, V]>;
public isEmpty(): boolean;
}
export class HashSet<V> {
public [Symbol.iterator]: Iterator<V>;
public length: number;
constructor(fnHash?: (o: object) => string);
public add(value: V): void;
public addAll(collection: Iterable<V>): void;
public get(value: V): V | undefined;
public remove(value: V): void;
public removeAll(collection: V[]): void;
public clear(): void;
public contains(value: V): boolean;
public values(): V[];
public valueIterator(): Iterator<V>;
public isEmpty(): boolean;
}
export class LimitHashSet {
public [Symbol.iterator]: Iterator<any>;
public length: number;
constructor(limit: number, fnHash?: (o: object) => string);
public add(value: any): void;
public addAll(collection: Iterable<any>): void;
public get(value: any): any;
public remove(value: any): void;
public removeAll(collection: any[]): void;
public clear(): void;
public contains(value: any): boolean;
public values(): any[];
public valueIterator(): Iterator<any>;
public isEmpty(): boolean;
}
export class InclusionHashSet<V> {
public [Symbol.iterator]: Iterator<string>;
public length: number;
constructor(fnHash?: (o: object) => string);
public add(value: V): void;
public addAll(collection: Iterable<V>): void;
public remove(value: V): void;
public removeAll(collection: V[]): void;
public clear(): void;
public contains(value: V): boolean;
public values(): string[];
public valueIterator(): Iterator<string>;
public isEmpty(): boolean;
public clone(): InclusionHashSet<V>;
}
export class LimitInclusionHashSet {
public [Symbol.iterator]: Iterator<any>;
public length: number;
constructor(limit: number, fnHash?: (o: object) => string);
public add(value: any): void;
public addAll(collection: Iterable<any>): void;
public remove(value: any): void;
public removeAll(collection: any[]): void;
public clear(): void;
public contains(value: any): boolean;
public values(): any[];
public valueIterator(): Iterator<any>;
public isEmpty(): boolean;
public clone(): LimitInclusionHashSet;
}
export class LimitIterable<T> {
public static iterator<V>(iterator: Iterator<V>, limit: number): { next: () => object };
constructor(it: Iterable<T> | Iterator<T>, limit: number);
public [Symbol.iterator](): { next: () => object };
}
export class LinkedList {
public first: any;
public last: any;
public length: number;
constructor(...args: any[]);
public push(value: any): void;
public unshift(value: any): void;
public pop(): any;
public shift(): any;
public clear(): void;
public [Symbol.iterator](): Iterator<any>;
public iterator(): Iterator<any>;
public isEmpty(): boolean;
}
export class UniqueLinkedList extends LinkedList {
constructor(fnHash: (o: object) => string);
public push(value: any, moveBack?: boolean): void;
public unshift(value: any): void;
public pop(): any;
public shift(): any;
public clear(): void;
public get(value: any): any;
public contains(value: any): boolean;
public remove(value: any): void;
public moveBack(value: any): void;
}
export class Queue {
public length: number;
constructor(...args: any[]);
public enqueue(value: any): void;
public enqueueAll(values: any[]): void;
public dequeue(): any;
public dequeueMulti(count: number): any[];
public peek(): any;
public clear(): void;
public isEmpty(): boolean;
}
export class UniqueQueue extends Queue {
constructor(fnHash: (o: object) => string);
public contains(value: any): boolean;
public remove(value: any): void;
public requeue(value: any): void;
}
export class ThrottledQueue extends UniqueQueue {
public available: number;
constructor(
maxAtOnce?: number,
allowanceNum?: number,
allowanceInterval?: number,
maxSize?: number,
allowanceCallback?: () => any,
);
public stop(): void;
public enqueue(value: any): void;
public dequeue(): any;
public dequeueMulti(count: number): any[];
public isAvailable(): boolean;
}
export class SortedList {
public length: number;
constructor([sortedList]: any[], compare?: (a: any, b: any) => -1 | 0 | 1);
public indexOf(o: any): number;
public add(value: any): void;
public shift(): any;
public pop(): any;
public peekFirst(): any;
public peekLast(): any;
public remove(value: any): void;
public clear(): void;
public values(): any[];
public [Symbol.iterator](): Iterator<any>;
public copy(): SortedList;
}
export class Assert {
public static that(condition: boolean, message?: string): void;
}
export class CryptoUtils {
public static SHA512_BLOCK_SIZE: 128;
public static computeHmacSha512(key: Uint8Array, data: Uint8Array): Uint8Array;
public static computePBKDF2sha512(password: Uint8Array, salt: Uint8Array, iterations: number, derivedKeyLength: number): SerialBuffer;
public static otpKdfLegacy(message: Uint8Array, key: Uint8Array, salt: Uint8Array, iterations: number): Promise<Uint8Array>;
public static otpKdf(message: Uint8Array, key: Uint8Array, salt: Uint8Array, iterations: number): Promise<Uint8Array>;
}
export class BufferUtils {
public static BASE64_ALPHABET: string;
public static BASE32_ALPHABET: {
RFC4648: string;
RFC4648_HEX: string;
NIMIQ: string;
};
public static HEX_ALPHABET: string;
public static toAscii(buffer: Uint8Array): string;
public static fromAscii(string: string): SerialBuffer;
public static toBase64(buffer: Uint8Array): string;
public static fromBase64(base64: string, length?: number): SerialBuffer;
public static toBase64Url(buffer: Uint8Array): string;
public static fromBase64Url(base64: string, length?: number): SerialBuffer;
public static toBase32(buf: Uint8Array, alphabet?: string): string;
public static fromBase32(base32: string, alphabet?: string): Uint8Array;
public static toHex(buffer: Uint8Array): string;
public static fromHex(hex: string, length?: number): SerialBuffer;
public static toBinary(buffer: Uint8Array): string;
public static fromUtf8(str: string): Uint8Array;
public static fromAny(o: Uint8Array | string, length?: number): SerialBuffer;
public static concatTypedArrays(a: Uint8Array | Uint16Array | Uint32Array, b: Uint8Array | Uint16Array | Uint32Array): Uint8Array | Uint16Array | Uint32Array;
public static equals(a: Uint8Array | Uint16Array | Uint32Array, b: Uint8Array | Uint16Array | Uint32Array): boolean;
public static compare(a: Uint8Array | Uint16Array | Uint32Array, b: Uint8Array | Uint16Array | Uint32Array): -1 | 0 | 1;
public static xor(a: Uint8Array, b: Uint8Array): Uint8Array;
}
export class SerialBuffer extends Uint8Array {
public static EMPTY: SerialBuffer;
public static varUintSize(value: number): number;
public static varLengthStringSize(value: string): number;
public readPos: number;
public writePos: number;
constructor(bufferOrArrayOrLength: any)
public subarray(start?: number, end?: number): Uint8Array;
public reset(): void;
public read(length: number): Uint8Array;
public write(array: any): void;
public readUint8(): number;
public writeUint8(value: number): void;
public readUint16(): number;
public writeUint16(value: number): void;
public readUint32(): number;
public writeUint32(value: number): void;
public readUint64(): number;
public writeUint64(value: number): void;
public readVarUint(): number;
public writeVarUint(value: number): void;
public readFloat64(): number;
public writeFloat64(value: number): void;
public readString(length: number): string;
public writeString(value: string, length: number): void;
public readPaddedString(length: number): string;
public writePaddedString(value: string, length: number): void;
public readVarLengthString(): string;
public writeVarLengthString(value: string): void;
}
export class Synchronizer extends Observable {
public working: boolean;
public length: number;
public totalElapsed: number;
public totalJobs: number;
public totalThrottles: number;
constructor(throttleAfter: number, throttleWait: number);
public push<T>(fn: () => T): Promise<T>;
public clear(): void;
}
export class MultiSynchronizer extends Observable {
constructor(throttleAfter: number, throttleWait: number);
public push<T>(tag: string, fn: () => T): Promise<T>;
public clear(): void;
public isWorking(tag: string): boolean;
}
export class PrioritySynchronizer extends Observable {
public working: boolean;
public length: number;
public totalElapsed: number;
public totalJobs: number;
public totalThrottles: number;
constructor(
numPriorities: number,
throttleAfter?: number,
throttleWait?: number,
);
public push<T>(priority: number, fn: () => T): Promise<T>;
public clear(): void;
}
export class RateLimit {
public lastReset: number;
constructor(allowedOccurrences: number, timeRange?: number);
public note(number?: number): boolean;
}
export class IWorker {
public static areWorkersAsync: boolean;
public static createProxy(clazz: any, name: string, worker: Worker): IWorker.Proxy;
public static startWorkerForProxy(clazz: any, name: string, workerScript: string): IWorker.Proxy;
public static stubBaseOnMessage(msg: { data: { command: string, args: any[], id: number | string } }): Promise<void>;
public static prepareForWorkerUse(baseClazz: any, impl: any): void;
}
export namespace IWorker {
type Proxy = (clazz: any) => any;
function Stub(clazz: any): any;
function Pool(clazz: any): any;
}
export class WasmHelper {
public static doImport(): Promise<void>;
public static importWasm(wasm: string, module?: string): Promise<boolean>;
public static importScript(script: string, module?: string): Promise<boolean>;
public static fireModuleLoaded(module?: string): void;
}
export class CryptoWorker {
public static lib: CryptoLib;
public static getInstanceAsync(): Promise<CryptoWorkerImpl>;
public computeArgon2d(input: Uint8Array): Promise<Uint8Array>;
public computeArgon2dBatch(input: Uint8Array[]): Promise<Uint8Array[]>;
public kdfLegacy(key: Uint8Array, salt: Uint8Array, iterations: number, outputSize: number): Promise<Uint8Array>;
public kdf(key: Uint8Array, salt: Uint8Array, iterations: number, outputSize: number): Promise<Uint8Array>;
public blockVerify(block: Uint8Array, transactionValid: boolean[], timeNow: number, genesisHash: Uint8Array, networkId: number): Promise<{ valid: boolean, pow: SerialBuffer, interlinkHash: SerialBuffer, bodyHash: SerialBuffer }>;
}
export class CryptoWorkerImpl extends IWorker.Stub(CryptoWorker) {
constructor();
public init(name: string): Promise<void>;
public computeArgon2d(input: Uint8Array): Uint8Array;
public computeArgon2dBatch(input: Uint8Array[]): Uint8Array[];
public kdfLegacy(key: Uint8Array, salt: Uint8Array, iterations: number, outputSize: number): Uint8Array;
public kdf(key: Uint8Array, salt: Uint8Array, iterations: number, outputSize: number): Uint8Array;
public blockVerify(block: Uint8Array, transactionValid: boolean[], timeNow: number, genesisHash: Uint8Array, networkId: number): Promise<{ valid: boolean, pow: SerialBuffer, interlinkHash: SerialBuffer, bodyHash: SerialBuffer }>;
}
export class CRC8 {
public static compute(buf: Uint8Array): number;
}
export class CRC32 {
public static compute(buf: Uint8Array): number;
}
export class BigNumber {
constructor(n: number | string | BigNumber, b: number);
public CloseEvent(configObject: any): BigNumber;
public config(obj: any): any;
public set(obj: any): any;
public isBigNumber(v: any): boolean;
public maximum(...args: BigNumber[]): BigNumber;
public max(...args: BigNumber[]): BigNumber;
public minimum(...args: BigNumber[]): BigNumber;
public min(...args: BigNumber[]): BigNumber;
public random(db: number): BigNumber;
}
export class NumberUtils {
public static UINT8_MAX: 255;
public static UINT16_MAX: 65535;
public static UINT32_MAX: 4294967295;
public static UINT64_MAX: number;
public static isUint8(val: number): boolean;
public static isUint16(val: number): boolean;
public static isUint32(val: number): boolean;
public static isUint64(val: number): boolean;
public static randomUint32(): number;
public static randomUint64(): number;
public static fromBinary(bin: string): number;
}
export class MerkleTree {
public static computeRoot(values: any[], fnHash?: (o: any) => Hash): Hash;
}
export class MerklePath {
public static compute(values: any[], leafValue: any, fnHash?: (o: any) => Hash): MerklePath;
public static unserialize(buf: SerialBuffer): MerklePath;
public serializedSize: number;
public nodes: MerklePathNode[];
constructor(nodes: MerklePathNode[]);
public computeRoot(leafValue: any, fnHash?: (o: any) => Hash): Hash;
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: MerklePath): boolean;
}
export class MerklePathNode {
public hash: Hash;
public left: boolean;
constructor(hash: Hash, left: boolean);
public equals(o: MerklePathNode): boolean;
}
export class MerkleProof {
public static Operation: {
CONSUME_PROOF: 0;
CONSUME_INPUT: 1;
HASH: 2;
};
public static compute(values: any[], leafValues: any[], fnHash?: (o: any) => Hash): MerkleProof;
public static computeWithAbsence(values: any[], leafValues: any[], fnCompare: (a: any, b: any) => number, fnHash?: (o: any) => Hash): MerkleProof;
public static unserialize(buf: SerialBuffer): MerkleProof;
public serializedSize: number;
public nodes: Hash[];
constructor(hashes: any[], operations: MerkleProof.Operation[]);
public computeRoot(leafValues: any[], fnHash?: (o: any) => Hash): Hash;
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: MerkleProof): boolean;
}
export namespace MerkleProof {
type Operation = Operation.CONSUME_PROOF | Operation.CONSUME_INPUT | Operation.HASH;
namespace Operation {
type CONSUME_PROOF = 0;
type CONSUME_INPUT = 1;
type HASH = 2;
}
}
export class PlatformUtils {
public static readonly userAgentString: string;
public static readonly hardwareConcurrency: number;
public static isBrowser(): boolean;
public static isWeb(): boolean;
public static isNodeJs(): boolean;
public static supportsWebRTC(): boolean;
public static supportsWS(): boolean;
public static isOnline(): boolean;
public static isWindows(): boolean;
}
export class StringUtils {
public static isMultibyte(str: string): boolean;
public static isHex(str: string): boolean;
public static isHexBytes(str: string, length?: number): boolean;
public static commonPrefix(str1: string, str2: string): string;
public static lpad(str: string, padString: string, length: number): string;
}
export class Policy {
public static BLOCK_TIME: 60;
public static BLOCK_SIZE_MAX: 1e5;
public static BLOCK_TARGET_MAX: BigNumber;
public static DIFFICULTY_BLOCK_WINDOW: 120;
public static DIFFICULTY_MAX_ADJUSTMENT_FACTOR: 2;
public static TRANSACTION_VALIDITY_WINDOW: 120;
public static LUNAS_PER_COIN: 1e5;
public static SATOSHIS_PER_COIN: 1e5;
public static TOTAL_SUPPLY: 21e14;
public static INITIAL_SUPPLY: 252000000000000;
public static EMISSION_SPEED: number;
public static EMISSION_TAIL_START: 48692960;
public static EMISSION_TAIL_REWARD: 4000;
public static NUM_BLOCKS_VERIFICATION: 250;
public static coinsToLunas(coins: number): number;
public static coinsToSatoshis(coins: number): number;
public static lunasToCoins(lunas: number): number;
public static satoshisToCoins(satoshis: number): number;
public static supplyAfter(blockHeight: number): number;
public static blockRewardAt(blockHeight: number): number;
}
export abstract class Serializable {
public equals(o: Serializable): boolean;
public compare(o: Serializable): number;
public hashCode(): string;
public serialize(buf?: SerialBuffer): SerialBuffer;
public toString(): string;
public toBase64(): string;
public toHex(): string;
}
export class Hash extends Serializable {
public static SIZE: Map<Hash.Algorithm, number>;
public static NULL: Hash;
public static Algorithm: {
BLAKE2B: 1;
ARGON2D: 2;
SHA256: 3;
SHA512: 4;
toString(hashAlgorithm: Hash.Algorithm): string;
};
public static light(arr: Uint8Array): Hash;
public static blake2b(arr: Uint8Array): Hash;
public static hard(arr: Uint8Array): Promise<Hash>;
public static argon2d(arr: Uint8Array): Promise<Hash>;
public static sha256(arr: Uint8Array): Hash;
public static sha512(arr: Uint8Array): Hash;
public static compute(arr: Uint8Array, algorithm: Hash.Algorithm.BLAKE2B | Hash.Algorithm.SHA256): Hash;
public static unserialize(buf: SerialBuffer, algorithm?: Hash.Algorithm): Hash;
public static fromAny(hash: Hash | Uint8Array | string, algorithm?: Hash.Algorithm): Hash;
public static fromBase64(base64: string): Hash;
public static fromHex(hex: string): Hash;
public static fromPlain(str: string): Hash;
public static fromString(str: string): Hash;
public static isHash(o: any): boolean;
public static getSize(algorithm: Hash.Algorithm): number;
public static computeBlake2b(input: Uint8Array): Uint8Array;
public static computeSha256(input: Uint8Array): Uint8Array;
public static computeSha512(input: Uint8Array): Uint8Array;
public serializedSize: number;
public array: Uint8Array;
public algorithm: Hash.Algorithm;
constructor(arg?: Uint8Array, algorithm?: Hash.Algorithm);
public serialize(buf?: SerialBuffer): SerialBuffer;
public subarray(begin?: number, end?: number): Uint8Array;
public toPlain(): string;
public equals(o: Serializable): boolean;
}
export namespace Hash {
type Algorithm = Algorithm.BLAKE2B | Algorithm.ARGON2D | Algorithm.SHA256 | Algorithm.SHA512;
namespace Algorithm {
type BLAKE2B = 1;
type ARGON2D = 2;
type SHA256 = 3;
type SHA512 = 4;
}
}
export class PrivateKey extends Secret {
public static SIZE: 32;
public static PURPOSE_ID: number;
public static generate(): PrivateKey;
public static unserialize(buf: SerialBuffer): PrivateKey;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public overwrite(privateKey: PrivateKey): void;
public equals(o: any): boolean;
}
export class PublicKey extends Serializable {
public static SIZE: 32;
public static copy(o: PublicKey): PublicKey;
public static derive(privateKey: PrivateKey): PublicKey;
public static sum(publicKeys: PublicKey[]): PublicKey;
public static unserialize(buf: SerialBuffer): PublicKey;
public static fromAny(o: PublicKey | Uint8Array | string): PublicKey;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public hash(): Hash;
public compare(o: PublicKey): number;
public toAddress(): Address;
public toPeerId(): PeerId;
}
export class KeyPair extends Serializable {
public static LOCK_KDF_ROUNDS: 256;
public static generate(): KeyPair;
public static derive(privateKey: PrivateKey): KeyPair;
public static fromHex(hexBuf: string): KeyPair;
public static fromEncrypted(buf: SerialBuffer, key: Uint8Array): Promise<KeyPair>;
public static unserialize(buf: SerialBuffer): KeyPair;
public privateKey: PrivateKey;
public publicKey: PublicKey;
public serializedSize: number;
public encryptedSize: number;
public isLocked: boolean;
constructor(
privateKey: PrivateKey,
publicKey: PublicKey,
locked?: boolean,
lockSalt?: Uint8Array,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public exportEncrypted(key: Uint8Array): Promise<SerialBuffer>;
public lock(key: string | Uint8Array): Promise<void>;
public unlock(key: string | Uint8Array): Promise<void>;
public relock(): void;
public equals(o: any): boolean;
}
export class Secret extends Serializable {
public static SIZE: 32;
public static ENCRYPTION_SALT_SIZE: 16;
public static ENCRYPTION_KDF_ROUNDS: 256;
public static ENCRYPTION_CHECKSUM_SIZE: 4;
public static ENCRYPTION_CHECKSUM_SIZE_V3: 2;
public static Type: {
PRIVATE_KEY: 1,
ENTROPY: 2,
};
public static fromEncrypted(buf: SerialBuffer, key: Uint8Array): Promise<PrivateKey|Entropy>;
public encryptedSize: number;
public type: Secret.Type;
constructor(type: Secret.Type, purposeId: number);
public exportEncrypted(key: Uint8Array): Promise<SerialBuffer>;
}
export namespace Secret {
type Type = Type.PRIVATE_KEY|Type.ENTROPY;
namespace Type {
type PRIVATE_KEY = 1;
type ENTROPY = 2;
}
}
export class Entropy extends Secret {
public static SIZE: 32;
public static PURPOSE_ID: number;
public static generate(): Entropy;
public static unserialize(buf: SerialBuffer): Entropy;
public serializedSize: number;
constructor(arg: Uint8Array);
public toExtendedPrivateKey(password?: string, wordlist?: string[]): ExtendedPrivateKey;
public toMnemonic(wordlist?: string[]): string[];
public serialize(buf?: SerialBuffer): SerialBuffer;
public overwrite(entropy: Entropy): void;
public equals(o: any): boolean;
}
export class ExtendedPrivateKey extends Serializable {
public static CHAIN_CODE_SIZE: 32;
public static generateMasterKey(seed: Uint8Array): ExtendedPrivateKey;
public static isValidPath(path: string): boolean;
public static derivePathFromSeed(path: string, seed: Uint8Array): ExtendedPrivateKey;
public static unserialize(buf: SerialBuffer): ExtendedPrivateKey;
public serializedSize: number;
public privateKey: PrivateKey;
constructor(key: PrivateKey, chainCode: Uint8Array);
public derive(index: number): ExtendedPrivateKey;
public derivePath(path: string): ExtendedPrivateKey;
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public toAddress(): Address;
}
export class RandomSecret extends Serializable {
public static SIZE: 32;
public static unserialize(buf: SerialBuffer): RandomSecret;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
}
export class Signature extends Serializable {
public static SIZE: 64;
public static copy(o: Signature): Signature;
public static create(privateKey: PrivateKey, publicKey: PublicKey, data: Uint8Array): Signature;
public static fromPartialSignatures(commitment: Commitment, signatures: PartialSignature[]): Signature;
public static unserialize(buf: SerialBuffer): Signature;
public static fromAny(o: Signature | Uint8Array | string): Signature;
public serializedSize: number;
constructor(args: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(publicKey: PublicKey, data: Uint8Array): boolean;
public equals(o: any): boolean;
}
export class Commitment extends Serializable {
public static SIZE: 32;
public static copy(o: Commitment): Commitment;
public static sum(commitments: Commitment[]): Commitment;
public static unserialize(buf: SerialBuffer): Commitment;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
}
export class CommitmentPair extends Serializable {
public static SERIALIZED_SIZE: 96;
public static RANDOMNESS_SIZE: 32;
public static generate(): CommitmentPair;
public static unserialize(buf: SerialBuffer): CommitmentPair;
public static fromHex(hexBuf: string): CommitmentPair;
public secret: RandomSecret;
public commitment: Commitment;
public serializedSize: number;
constructor(secret: RandomSecret, commitment: Commitment);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
}
export class PartialSignature extends Serializable {
public static SIZE: 32;
public static create(privateKey: PrivateKey, publicKey: PublicKey, publicKeys: PublicKey[], secret: RandomSecret, aggregateCommitment: Commitment, data: Uint8Array): PartialSignature;
public static unserialize(buf: SerialBuffer): PartialSignature;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
}
export class MnemonicUtils {
public static ENGLISH_WORDLIST: string[];
public static DEFAULT_WORDLIST: string[];
public static MnemonicType: {
UNKNOWN: -1;
LEGACY: 0;
BIP39: 1;
};
public static entropyToMnemonic(entropy: string | ArrayBuffer | Uint8Array | Entropy, wordlist?: string[]): string[];
public static entropyToLegacyMnemonic(entropy: string | ArrayBuffer | Uint8Array | Entropy, wordlist?: string[]): string[];
public static mnemonicToEntropy(mnemonic: string | string[], wordlist?: string[]): Entropy;
public static legacyMnemonicToEntropy(mnemonic: string | string[], wordlist?: string[]): Entropy;
public static mnemonicToSeed(mnemonic: string | string[], password?: string): SerialBuffer;
public static mnemonicToExtendedPrivateKey(mnemonic: string | string[], password?: string): ExtendedPrivateKey;
public static isCollidingChecksum(entropy: Entropy): boolean;
public static getMnemonicType(mnemonic: string | string[], wordlist?: string[]): MnemonicUtils.MnemonicType;
}
export namespace MnemonicUtils {
type MnemonicType = MnemonicType.LEGACY | MnemonicType.BIP39 | MnemonicType.UNKNOWN;
namespace MnemonicType {
type UNKNOWN = -1;
type LEGACY = 0;
type BIP39 = 1;
}
}
export class Address extends Serializable {
public static CCODE: 'NQ';
public static SERIALIZED_SIZE: 20;
public static HEX_SIZE: 40;
public static NULL: Address;
public static CONTRACT_CREATION: Address;
public static copy(o: Address): Address;
public static fromHash(hash: Hash): Address;
public static unserialize(buf: SerialBuffer): Address;
public static fromString(str: string): Address;
public static fromBase64(base64: string): Address;
public static fromHex(hex: string): Address;
public static fromUserFriendlyAddress(str: string): Address;
public static fromAny(addr: Address | string): Address;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public subarray(begin?: number, end?: number): Uint8Array;
public equals(o: Address): boolean;
public toPlain(): string;
public toUserFriendlyAddress(withSpaces?: boolean): string;
}
export abstract class Account {
public static Type: {
BASIC: 0;
VESTING: 1;
HTLC: 2;
toString(type: Account.Type): string;
fromAny(type: Account.Type | string): Account.Type;
};
public static BalanceError: Error;
public static DoubleTransactionError: Error;
public static ProofError: Error;
public static ValidityError: Error;
public static unserialize(buf: SerialBuffer): Account;
public static dataToPlain(data: Uint8Array): {};
public static proofToPlain(proof: Uint8Array): {};
public static fromAny(o: Account | {type: Account.Type | string, balance: number}): Account;
public static fromPlain(plain: {type: Account.Type | string, balance: number}): Account;
public serializedSize: number;
public balance: number;
public type: Account.Type;
constructor(type: Account.Type, balance: number);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public toString(): string;
public toPlain(): {
type: string,
balance: number,
};
public withBalance(balance: number): Account;
public withOutgoingTransaction(transaction: Transaction, blockHeight: number, transactionCache: TransactionCache, revert?: boolean): Account;
public withIncomingTransaction(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public withContractCommand(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public isInitial(): boolean;
public isToBePruned(): boolean;
}
export namespace Account {
type Type = Type.BASIC | Type.VESTING | Type.HTLC;
namespace Type {
type BASIC = 0;
type VESTING = 1;
type HTLC = 2;
}
}
export class PrunedAccount {
public static unserialize(buf: SerialBuffer): PrunedAccount;
public static fromAny(o: PrunedAccount | object): PrunedAccount;
public static fromPlain(plain: object): PrunedAccount;
public address: Address;
public account: Account;
public serializedSize: number;
constructor(address: Address, account: Account);
public compare(o: PrunedAccount): number;
public serialize(buf?: SerialBuffer): SerialBuffer;
public hashCode(): string;
public toPlain(): {
address: string,
account: object,
};
}
export class BasicAccount extends Account {
public static INITIAL: BasicAccount;
public static copy(o: BasicAccount): BasicAccount;
public static unserialize(buf: SerialBuffer): BasicAccount;
public static fromPlain(o: {balance: number}): BasicAccount;
public static verifyOutgoingTransaction(transaction: Transaction): boolean;
public static verifyIncomingTransaction(transaction: Transaction): boolean;
public static proofToPlain(proof: Uint8Array): {
signature: string,
publicKey: string,
signer: string,
pathLength: number,
} | {};
public static dataToPlain(data: Uint8Array): {};
constructor(balance?: number);
public equals(o: any): boolean;
public toString(): string;
public withBalance(balance: number): Account;
public withIncomingTransaction(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public withContractCommand(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public isInitial(): boolean;
}
export class Contract extends Account {
public static verifyIncomingTransaction(transaction: Transaction): boolean;
constructor(type: Account.Type, balance: number);
public withIncomingTransaction(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public withContractCommand(transaction: Transaction, blockHeight: number, revert?: boolean): BasicAccount | Contract;
}
export class HashedTimeLockedContract extends Contract {
public static ProofType: {
REGULAR_TRANSFER: 1;
EARLY_RESOLVE: 2;
TIMEOUT_RESOLVE: 3;
toString(proofType: HashedTimeLockedContract.ProofType): string;
};
public static create(balance: number, blockHeight: number, transaction: Transaction): HashedTimeLockedContract;
public static unserialize(buf: SerialBuffer): HashedTimeLockedContract;
public static fromPlain(plain: object): HashedTimeLockedContract;
public static verifyOutgoingTransaction(transaction: Transaction): boolean;
public static verifyIncomingTransaction(transaction: Transaction): boolean;
public static dataToPlain(data: Uint8Array): {
sender: string,
recipient: string,
hashAlgorithm: string,
hashRoot: string,
hashCount: number,
timeout: number,
} | {};
public static proofToPlain(proof: Uint8Array): object;
public serializedSize: number;
public sender: Address;
public recipient: Address;
public hashRoot: Hash;
public hashCount: number;
public timeout: number;
public totalAmount: number;
constructor(
balance?: number,
sender?: Address,
recipient?: Address,
hashRoot?: Hash,
hashCount?: number,
timeout?: number,
totalAmount?: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public toString(): string;
public toPlain(): {
type: string,
balance: number,
sender: string,
recipient: string,
hashRoot: string,
hashCount: number,
timeout: number,
totalAmount: number,
};
public equals(o: any): boolean;
public withBalance(balance: number): Account;
public withOutgoingTransaction(transaction: Transaction, blockHeight: number, transactionCache: TransactionCache, revert?: boolean): Account;
public withIncomingTransaction(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
}
export namespace HashedTimeLockedContract {
type ProofType = ProofType.REGULAR_TRANSFER | ProofType.EARLY_RESOLVE | ProofType.TIMEOUT_RESOLVE;
namespace ProofType {
type REGULAR_TRANSFER = 1;
type EARLY_RESOLVE = 2;
type TIMEOUT_RESOLVE = 3;
}
}
export class VestingContract extends Contract {
public static create(balance: number, blockHeight: number, transaction: Transaction): VestingContract;
public static unserialize(buf: SerialBuffer): VestingContract;
public static fromPlain(plain: object): VestingContract;
public static verifyOutgoingTransaction(transaction: Transaction): boolean;
public static verifyIncomingTransaction(transaction: Transaction): boolean;
public static dataToPlain(data: Uint8Array): {
owner: string,
vestingStart: number,
vestingStepBlocks: number,
vestingStepAmount: number,
vestingTotalAmount: number,
} | {};
public static proofToPlain(proof: Uint8Array): {
signature: string,
publicKey: string,
signer: string,
pathLength: number,
};
public serializedSize: number;
public owner: Address;
public vestingStart: number;
public vestingStepBlocks: number;
public vestingStepAmount: number;
public vestingTotalAmount: number;
constructor(
balance?: number,
owner?: Address,
vestingStart?: number,
vestingStepBlocks?: number,
vestingStepAmount?: number,
vestingTotalAmount?: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public toString(): string;
public toPlain(): {
type: string,
balance: number,
owner: string,
vestingStart: number,
vestingStepBlocks: number,
vestingStepAmount: number,
vestingTotalAmount: number,
};
public equals(o: any): boolean;
public withBalance(balance: number): Account;
public withOutgoingTransaction(transaction: Transaction, blockHeight: number, transactionCache: TransactionCache, revert?: boolean): Account;
public withIncomingTransaction(transaction: Transaction, blockHeight: number, revert?: boolean): Account;
public getMinCap(blockHeight: number): number;
}
export class AccountsTreeNode {
public static BRANCH: 0x00;
public static TERMINAL: 0xff;
public static terminalNode(prefix: string, account: Account): AccountsTreeNode;
public static branchNode(prefix: string, childrenSuffixes?: string[], childrenHashes?: Hash[]): AccountsTreeNode;
public static isTerminalType(type: number): boolean;
public static isBranchType(type: number): boolean;
public static unserialize(buf: SerialBuffer): AccountsTreeNode;
public serializedSize: number;
public account: Account;
public prefix: string;
constructor(
type: number,
prefix: string,
arg: Account | string[],
arg2?: Hash[],
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public getChildHash(prefix: string): false | Hash;
public getChild(prefix: string): false | string;
public withChild(prefix: string, childHash: Hash): AccountsTreeNode;
public withoutChild(prefix: string): AccountsTreeNode;
public hasChildren(): boolean;
public hasSingleChild(): boolean;
public getFirstChild(): undefined | string;
public getLastChild(): undefined | string;
public getChildren(): undefined | string[];
public withAccount(account: Account): AccountsTreeNode;
public hash(): Hash;
public isChildOf(parent: AccountsTreeNode): boolean;
public isTerminal(): boolean;
public isBranch(): boolean;
public equals(o: any): boolean;
}
export class AccountsTreeStore {
public static initPersistent(jdb: any): void;
public static getPersistent(jdb: any): AccountsTreeStore;
public static createVolatile(): AccountsTreeStore;
public tx: any;
constructor(store: any);
public get(key: string): Promise<AccountsTreeNode>;
public put(node: AccountsTreeNode): Promise<string>;
public remove(node: AccountsTreeNode): Promise<string>;
public getRootNode(): Promise<AccountsTreeNode>;
public getTerminalNodes(startPrefix: string, size: number): Promise<AccountsTreeNode[]>;
public snapshot(tx?: AccountsTreeStore): AccountsTreeStore;
public transaction(enableWatchdog?: boolean): AccountsTreeStore;
public synchronousTransaction(enableWatchdog?: boolean): SynchronousAccountsTreeStore;
public truncate(): Promise<void>;
public commit(): Promise<boolean>;
public abort(): Promise<void>;
}
export class AccountsTreeStoreCodec {
public valueEncoding: { encode: (val: any) => any, decode: (val: any) => any, buffer: boolean, type: string } | void;
public encode(obj: any): any;
public decode(obj: any, key: string): any;
}
export class SynchronousAccountsTreeStore extends AccountsTreeStore {
constructor(store: any);
public preload(keys: string[]): void;
public getSync(key: string, expectedToBePresent?: boolean): AccountsTreeNode;
public putSync(node: AccountsTreeNode): string;
public removeSync(node: AccountsTreeNode): string;
public getRootNodeSync(): AccountsTreeNode;
}
export class AccountsProof {
public static unserialize(buf: SerialBuffer): AccountsProof;
public serializedSize: number;
public length: number;
public nodes: AccountsTreeNode[];
constructor(nodes: AccountsTreeNode[]);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): boolean;
public getAccount(address: Address): Account;
public toString(): string;
public root(): Hash;
}
export class AccountsTreeChunk {
public static SIZE_MAX: number;
public static EMPTY: AccountsTreeChunk;
public static unserialize(buf: SerialBuffer): AccountsTreeChunk;
public serializedSize: number;
public terminalNodes: AccountsTreeNode[];
public proof: AccountsProof;
public head: AccountsTreeNode;
public tail: AccountsTreeNode;
public length: number;
constructor(nodes: AccountsTreeNode[], proof: AccountsProof);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): boolean;
public toString(): string;
public root(): Hash;
}
export class AccountsTree extends Observable {
public static getPersistent(jdb: any): Promise<AccountsTree>;
public static createVolatile(): Promise<AccountsTree>;
public tx: any;
constructor(store: AccountsTreeStore);
public put(address: Address, account: Account): Promise<void>;
public get(address: Address): Promise<null | Account>;
public getAccountsProof(addresses: Address[]): Promise<AccountsProof>;
public getChunk(startPrefix: string, size: number): Promise<AccountsTreeChunk>;
public transaction(enableWatchdog?: boolean): Promise<AccountsTree>;
public synchronousTransaction(enableWatchdog?: boolean): Promise<SynchronousAccountsTree>;
public partialTree(): Promise<PartialAccountsTree>;
public snapshot(tx?: AccountsTree): Promise<AccountsTree>;
public commit(): Promise<boolean>;
public abort(): Promise<void>;
public root(): Promise<Hash>;
public isEmpty(): Promise<boolean>;
}
export class SynchronousAccountsTree extends AccountsTree {
constructor(store: SynchronousAccountsTreeStore);
public preloadAddresses(addresses: Address[]): Promise<void>;
public putSync(address: Address, account: Account): void;
public getSync(address: Address, expectedToBePresent?: boolean): null | Account;
public rootSync(): Hash;
}
// @ts-ignore
export class PartialAccountsTree extends SynchronousAccountsTree {
public static Status: {
ERR_HASH_MISMATCH: -3;
ERR_INCORRECT_PROOF: -2;
ERR_UNMERGEABLE: -1;
OK_COMPLETE: 0;
OK_UNFINISHED: 1;
};
public complete: boolean;
public missingPrefix: string;
constructor(store: SynchronousAccountsTreeStore);
public pushChunk(chunk: AccountsTreeChunk): Promise<PartialAccountsTree.Status>;
// @ts-ignore
public synchronousTransaction(enableWatchdog?: boolean): PartialAccountsTree;
// @ts-ignore
public transaction(enableWatchdog?: boolean): AccountsTree;
public commit(): Promise<boolean>;
public abort(): Promise<void>;
}
export namespace PartialAccountsTree {
type Status = Status.ERR_HASH_MISMATCH | Status.ERR_INCORRECT_PROOF | Status.ERR_UNMERGEABLE | Status.OK_COMPLETE | Status.OK_UNFINISHED;
namespace Status {
type ERR_HASH_MISMATCH = -3;
type ERR_INCORRECT_PROOF = -2;
type ERR_UNMERGEABLE = -1;
type OK_COMPLETE = 0;
type OK_UNFINISHED = 1;
}
}
export class Accounts extends Observable {
public static getPersistent(jdb: any): Promise<Accounts>;
public static createVolatile(): Promise<Accounts>;
public tx: any;
constructor(accountsTree: AccountsTree);
public initialize(genesisBlock: Block, encodedAccounts: string): Promise<void>;
public getAccountsProof(addresses: Address[]): Promise<AccountsProof>;
public getAccountsTreeChunk(startPrefix: string): Promise<AccountsTreeChunk>;
public commitBlock(block: Block, transactionCache: TransactionCache): Promise<void>;
public commitBlockBody(body: BlockBody, blockHeight: number, transactionCache: TransactionCache): Promise<void>;
public gatherToBePrunedAccounts(transactions: Transaction[], blockHeight: number, transactionCache: TransactionCache): Promise<PrunedAccount[]>;
public revertBlock(block: Block, transactionCache: TransactionCache): Promise<void>;
public revertBlockBody(body: BlockBody, blockHeight: number, transactionCache: TransactionCache): Promise<void>;
public get(address: Address, accountType?: Account.Type, tree?: AccountsTree): Promise<Account>;
public transaction(enableWatchdog?: boolean): Promise<Accounts>;
public snapshot(tx: Accounts): Promise<Accounts>;
public partialAccountsTree(): Promise<PartialAccountsTree>;
public commit(): Promise<void>;
public abort(): Promise<void>;
public hash(): Promise<Hash>;
}
export class BlockHeader {
public static CURRENT_VERSION: number;
public static SUPPORTED_VERSIONS: number[];
public static SERIALIZED_SIZE: 146;
public static Version: {
V1: 1;
};
public static unserialize(buf: SerialBuffer): BlockHeader;
public serializedSize: number;
public version: number;
public prevHash: Hash;
public interlinkHash: Hash;
public bodyHash: Hash;
public accountsHash: Hash;
public nBits: number;
public target: BigNumber;
public difficulty: BigNumber;
public height: number;
public timestamp: number;
public nonce: number;
constructor(
prevHash: Hash,
interlinkHash: Hash,
bodyHash: Hash,
accountsHash: Hash,
nBits: number,
height: number,
timestamp: number,
nonce: number,
version?: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verifyProofOfWork(buf?: SerialBuffer): Promise<boolean>;
public isImmediateSuccessorOf(prevHeader: BlockHeader): boolean;
public hash(buf?: SerialBuffer): Hash;
public pow(buf?: SerialBuffer): Promise<Hash>;
public equals(o: any): boolean;
public toString(): string;
}
export namespace BlockHeader {
type Version = Version.V1;
namespace Version {
type V1 = 1;
}
}
export class BlockInterlink {
public static unserialize(buf: SerialBuffer): BlockInterlink;
public serializedSize: number;
public hashes: Hash[];
public length: number;
constructor(
hashes: Hash[],
prevHash?: Hash,
repeatBits?: Uint8Array,
compressed?: Hash[],
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public hash(): Hash;
}
export class BlockBody {
public static getMetadataSize(extraData: Uint8Array): number;
public static unserialize(buf: SerialBuffer): BlockBody;
public serializedSize: number;
public extraData: Uint8Array;
public minerAddr: Address;
public transactions: Transaction[];
public transactionCount: number;
public prunedAccounts: PrunedAccount[];
constructor(
minerAddr: Address,
transactions: Transaction[],
extraData?: Uint8Array,
prunedAccounts?: PrunedAccount[],
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): boolean;
public getMerkleLeafs(): any[];
public hash(): Hash;
public equals(o: any): boolean;
public getAddresses(): Address[];
}
export class BlockUtils {
public static compactToTarget(compact: number): BigNumber;
public static targetToCompact(target: BigNumber): number;
public static getTargetHeight(target: BigNumber): number;
public static getTargetDepth(target: BigNumber): number;
public static compactToDifficulty(compact: number): BigNumber;
public static difficultyToCompact(difficulty: BigNumber): number;
public static difficultyToTarget(difficulty: BigNumber): BigNumber;
public static targetToDifficulty(target: BigNumber): BigNumber;
public static hashToTarget(hash: Hash): BigNumber;
public static realDifficulty(hash: Hash): BigNumber;
public static getHashDepth(hash: Hash): number;
public static isProofOfWork(hash: Hash, target: BigNumber): boolean;
public static isValidCompact(compact: number): boolean;
public static isValidTarget(target: BigNumber): boolean;
public static getNextTarget(headBlock: BlockHeader, tailBlock: BlockHeader, deltaTotalDifficulty: BigNumber): BigNumber;
}
export class Subscription {
public static NONE: Subscription;
public static BLOCKS_ONLY: Subscription;
public static ANY: Subscription;
public static Type: {
NONE: 0;
ANY: 1;
ADDRESSES: 2;
MIN_FEE: 3;
};
public static fromAddresses(addresses: Address[]): Subscription;
public static fromMinFeePerByte(minFeePerByte: number): Subscription;
public static unserialize(buf: SerialBuffer): Subscription;
public serializedSize: number;
public type: Subscription.Type;
public addresses: Address[];
public minFeePerByte: number;
constructor(type: Subscription.Type, filter?: Address[] | number);
public serialize(buf?: SerialBuffer): SerialBuffer;
public matchesBlock(block: Block): boolean;
public matchesTransaction(transaction: Transaction): boolean;
public isSubsetOf(other: Subscription): boolean;
public toString(): string;
}
export namespace Subscription {
type Type = Type.NONE | Type.ANY | Type.ADDRESSES | Type.MIN_FEE;
namespace Type {
type NONE = 0;
type ANY = 1;
type ADDRESSES = 2;
type MIN_FEE = 3;
}
}
export abstract class Transaction {
public static Format: {
BASIC: 0;
EXTENDED: 1;
toString(format: Transaction.Format): string;
fromAny(format: Transaction.Format | string): Transaction.Format;
};
public static Flag: {
NONE: 0;
CONTRACT_CREATION: 0b1;
};
public static FORMAT_MAP: Map<Transaction.Format, {unserialize: (buf: SerialBuffer) => Transaction, fromPlain: (plain: object) => Transaction}>;
public static unserialize(buf: SerialBuffer): Transaction;
public static fromPlain(plain: object): Transaction;
public static fromAny(tx: Transaction | string | object): Transaction;
public serializedContentSize: number;
public serializedSize: number;
public format: Transaction.Format;
public sender: Address;
public senderType: Account.Type;
public recipient: Address;
public recipientType: Account.Type;
public value: number;
public fee: number;
public feePerByte: number;
public networkId: number;
public validityStartHeight: number;
public flags: Transaction.Flag;
public data: Uint8Array;
public proof: Uint8Array;
constructor(
format: Transaction.Format,
sender: Address,
senderType: Account.Type,
recipient: Address,
recipientType: Account.Type,
value: number,
fee: number,
validityStartHeight: number,
flags: Transaction.Flag | any,
data: Uint8Array,
proof?: Uint8Array,
networkId?: number,
);
public serializeContent(buf?: SerialBuffer): SerialBuffer;
public verify(networkId?: number): boolean;
public serialize(buf?: SerialBuffer): SerialBuffer;
public hash(): Hash;
public compare(o: Transaction): -1 | 0 | 1;
public compareBlockOrder(o: Transaction): -1 | 0 | 1;
public equals(o: any): boolean;
public toString(): string;
public getContractCreationAddress(): Address;
public hasFlag(flag: number): boolean;
public toPlain(): {
transactionHash: string,
format: string;
sender: string;
senderType: string;
recipient: string;
recipientType: string;
value: number;
fee: number;
feePerByte: number;
validityStartHeight: number;
network: string;
flags: number;
data: {raw: string};
proof: {
raw: string,
signature?: string,
publicKey?: string,
signer?: string,
pathLength?: number,
};
size: number;
valid: boolean;
};
}
export namespace Transaction {
type Format = Format.BASIC | Format.EXTENDED;
namespace Format {
type BASIC = 0;
type EXTENDED = 1;
}
type Flag = Flag.NONE | Flag.CONTRACT_CREATION;
namespace Flag {
type NONE = 0;
type CONTRACT_CREATION = 0b1;
}
}
export class SignatureProof {
public static SINGLE_SIG_SIZE: number;
public static verifyTransaction(transaction: Transaction): boolean;
public static singleSig(publicKey: PublicKey, signature: Signature): SignatureProof;
public static multiSig(signerKey: PublicKey, publicKeys: PublicKey[], signature: Signature): SignatureProof;
public static unserialize(buf: SerialBuffer): SignatureProof;
public serializedSize: number;
public publicKey: PublicKey;
public merklePath: MerklePath;
public signature: Signature;
constructor(
publicKey: PublicKey,
merklePath: MerklePath,
signature: Signature,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public verify(address: Address | null, data: Uint8Array): boolean;
public isSignedBy(sender: Address): boolean;
}
export class BasicTransaction extends Transaction {
public static unserialize(buf: SerialBuffer): BasicTransaction;
public static fromPlain(plain: object): BasicTransaction;
public serializedSize: number;
public senderPubKey: PublicKey;
public signature: Signature;
constructor(
senderPublicKey: PublicKey,
recipient: Address,
value: number,
fee: number,
validityStartHeight: number,
signature?: Signature,
networkId?: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
}
export class ExtendedTransaction extends Transaction {
public static unserialize(buf: SerialBuffer): ExtendedTransaction;
public static fromPlain(plain: object): ExtendedTransaction;
public serializedSize: number;
constructor(
sender: Address,
senderType: Account.Type,
recipient: Address,
recipientType: Account.Type,
value: number,
fee: number,
validityStartHeight: number,
flags: Transaction.Flag | number,
data: Uint8Array,
proof?: Uint8Array,
networkId?: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
}
export class TransactionsProof {
public static unserialize(buf: SerialBuffer): TransactionsProof;
public serializedSize: number;
public length: number;
public transactions: Transaction[];
public proof: MerkleProof;
constructor(transactions: Transaction[], proof: MerkleProof);
public serialize(buf?: SerialBuffer): SerialBuffer;
public toString(): string;
public root(): Hash;
}
export type BlockDescriptor = object;
export class TransactionCache {
public missingBlocks: number;
public transactions: InclusionHashSet<Hash>;
public head: null | BlockDescriptor;
public tail: null | BlockDescriptor;
constructor(transactionHashes?: InclusionHashSet<Hash>, blockOrder?: BlockDescriptor[]);
public containsTransaction(transaction: Transaction): boolean;
public pushBlock(block: Block): void;
public shiftBlock(): void;
public revertBlock(block: Block): number;
public prependBlocks(blocks: Block[]): void;
public clone(): TransactionCache;
public isEmpty(): boolean;
}
export class TransactionStoreEntry {
public static fromBlock(block: Block): TransactionStoreEntry[];
public static fromJSON(id: string, o: { transactionHashBuffer: Uint8Array, senderBuffer: Uint8Array, recipientBuffer: Uint8Array, blockHeight: number, blockHash: string, index: number }): TransactionStoreEntry;
public transactionHash: Hash;
public sender: Address;
public recipient: Address;
public blockHeight: number;
public blockHash: Hash;
public index: number;
constructor(
transactionHash: Hash,
sender: Address,
recipient: Address,
blockHeight: number,
blockHash: Hash,
index: number,
);
public toJSON(): { transactionHashBuffer: Uint8Array, senderBuffer: Uint8Array, recipientBuffer: Uint8Array, blockHeight: number, blockHash: string, index: number };
}
export class TransactionStore {
public static CURRENT_ID_KEY: number;
public static initPersistent(jdb: any): void;
public static getPersistent(jdb: any): TransactionStore;
public static createVolatile(): TransactionStore;
public tx: any;
constructor(store: any);
public get(transactionHash: Hash): Promise<TransactionStoreEntry>;
public getBySender(sender: Address, limit?: number): Promise<TransactionStoreEntry[]>;
public getByRecipient(recipient: Address, limit?: number): Promise<TransactionStoreEntry[]>;
public put(block: Block): Promise<void>;
public remove(block: Block): Promise<void>;
public snapshot(tx: TransactionStore): TransactionStore;
public transaction(enableWatchdog?: boolean): TransactionStore;
public truncate(): Promise<void>;
public commit(): Promise<boolean>;
public abort(): Promise<void>;
}
export class TransactionStoreCodec {
public valueEncoding: { encode: (val: any) => any, decode: (val: any) => any, buffer: boolean, type: string } | void;
public encode(obj: any): any;
public decode(obj: any, key: string): any;
}
export class TransactionReceipt {
public static unserialize(buf: SerialBuffer): TransactionReceipt;
public static fromPlain(o: object): TransactionReceipt;
public static fromAny(o: TransactionReceipt | object | string): TransactionReceipt;
public serializedSize: number;
public transactionHash: Hash;
public blockHash: Hash;
public blockHeight: number;
constructor(
transactionHash: Hash,
blockHash: Hash,
blockHeight: number,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public toPlain(): {
transactionHash: string,
blockHash: string,
blockHeight: number,
};
}
export class Block {
public static TIMESTAMP_DRIFT_MAX: 600 /* seconds */; // 10 minutes
public static unserialize(buf: SerialBuffer): Block;
public static fromAny(block: Block | object | string): Block;
public static fromPlain(o: object): Block;
public serializedSize: number;
public header: BlockHeader;
public interlink: BlockInterlink;
public body: BlockBody;
public version: number;
public prevHash: Hash;
public interlinkHash: Hash;
public bodyHash: Hash;
public accountsHash: Hash;
public nBits: number;
public target: BigNumber;
public difficulty: BigNumber;
public height: number;
public timestamp: number;
public nonce: number;
public minerAddr: Address | undefined;
public transactions: Transaction[] | undefined;
public extraData: Uint8Array | undefined;
public prunedAccounts: PrunedAccount[] | undefined;
public transactionCount: number | undefined;
constructor(
header: BlockHeader,
interlink: BlockInterlink,
body?: BlockBody,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(time: Time): Promise<boolean>;
public isImmediateSuccessorOf(predecessor: Block): Promise<boolean>;
public isInterlinkSuccessorOf(predecessor: Block): Promise<boolean>;
public isSuccessorOf(predecessor: Block): Promise<boolean>;
public getNextInterlink(nextTarget: BigNumber, nextVersion?: number): Promise<BlockInterlink>;
public shallowCopy(): Block;
public equals(o: any): boolean;
public isLight(): boolean;
public isFull(): boolean;
public toLight(): Block;
public toFull(body: BlockBody): Block;
public hash(buf?: SerialBuffer): Hash;
public pow(buf?: SerialBuffer): Promise<Hash>;
public toString(): string;
public toPlain(): {
version: number,
hash: string,
prevHash: string,
interlinkHash: string,
bodyHash: string,
accountsHash: string,
nBits: number,
difficulty: string,
height: number,
timestamp: number,
nonce: number,
interlink: string[],
minerAddr?: string,
transactions?: object[],
extraData?: string,
prunedAccounts?: object[],
};
}
export class BlockProducer {
constructor(blockchain: BaseChain, accounts: Accounts, mempool: Mempool, time: Time);
public getNextBlock(address: Address, extraData?: Uint8Array): Promise<Block>;
}
export abstract class IBlockchain extends Observable {
public abstract head: Block;
public abstract headHash: Hash;
public abstract height: number;
}
export abstract class BaseChain extends IBlockchain {
public static MULTILEVEL_STRATEGY: BaseChain.MultilevelStrategy.MODERATE;
public static MultilevelStrategy: {
STRICT: 1;
MODERATE: 2;
RELAXED: 3;
};
public static manyPow(headers: BlockHeader[]): Promise<void>;
constructor(store: ChainDataStore);
public getBlock(hash: Hash, includeForks?: boolean, includeBody?: boolean): Promise<null | Block>;
public getRawBlock(hash: Hash, includeForks?: boolean): Promise<null | Uint8Array>;
public getBlockAt(height: number, includeBody?: boolean): Promise<null | Block>;
public getNearestBlockAt(height: number, lower?: boolean): Promise<null | Block>;
public getSuccessorBlocks(block: Block): Promise<Block[]>;
public getBlockLocators(): Promise<Hash[]>;
public getNextTarget(block?: Block, next?: Block): Promise<BigNumber>;
public isBetterProof(proof1: ChainProof, proof2: ChainProof, m: number): Promise<boolean>;
}
export namespace BaseChain {
type MultilevelStrategy = BaseChain.MultilevelStrategy.STRICT | BaseChain.MultilevelStrategy.MODERATE | BaseChain.MultilevelStrategy.RELAXED;
namespace MultilevelStrategy {
type STRICT = 1;
type MODERATE = 2;
type RELAXED = 3;
}
}
export class BlockChain {
public static merge(chain1: BlockChain, chain2: BlockChain): BlockChain;
public static lowestCommonAncestor(chain1: BlockChain, chain2: BlockChain): undefined | Block;
public static unserialize(buf: SerialBuffer): BlockChain;
public serializedSize: number;
public length: number;
public blocks: Block[];
public head: Block;
public tail: Block;
constructor(blocks: Block[], superChains?: BlockChain[]);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): Promise<boolean>;
public denseSuffix(): Block[];
public getSuperChains(): Promise<BlockChain[]>;
public isAnchored(): boolean;
public toString(): string;
public totalDifficulty(): number;
}
export class HeaderChain {
public static unserialize(buf: SerialBuffer): HeaderChain;
public serializedSize: number;
public length: number;
public headers: BlockHeader[];
public head: BlockHeader;
public tail: BlockHeader;
constructor(headers: BlockHeader[]);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): Promise<boolean>;
public toString(): string;
public totalDifficulty(): BigNumber;
}
export class ChainProof {
public static unserialize(buf: SerialBuffer): ChainProof;
public serializedSize: number;
public prefix: BlockChain;
public suffix: HeaderChain;
public head: BlockHeader;
constructor(prefix: BlockChain, suffix: HeaderChain);
public serialize(buf?: SerialBuffer): SerialBuffer;
public verify(): Promise<boolean>;
public toString(): string;
}
export class ChainData {
public static initial(block: Block, superBlockCounts: SuperBlockCounts): Promise<ChainData>;
public static fromObj(obj: { _head: Uint8Array, _totalDifficulty: string, _totalWork: string, _superBlockCounts: number[], _onMainChain: boolean, _mainChainSuccessor: null | Uint8Array, _height: number, _pow: Uint8Array }, hashBase64?: string): ChainData;
public head: Block;
public totalDifficulty: BigNumber;
public totalWork: BigNumber;
public superBlockCounts: SuperBlockCounts;
public onMainChain: boolean;
public mainChainSuccessor: Hash;
constructor(
head: Block,
totalDifficulty: BigNumber,
totalWork: BigNumber,
superBlockCounts: SuperBlockCounts,
onMainChain?: boolean,
mainChainSuccessor?: Hash,
);
public toObj(): { _head: SerialBuffer, _totalDifficulty: string, _totalWork: string, _superBlockCounts: number[], _onMainChain: boolean, _mainChainSuccessor: null | SerialBuffer, _height: number, _pow: SerialBuffer };
public shallowCopy(): ChainData;
public nextChainData(block: Block): Promise<ChainData>;
public previousChainData(block: Block): Promise<ChainData>;
}
export class SuperBlockCounts {
public length: number;
public array: number[];
constructor(array: number[]);
public add(depth: number): void;
public subtract(depth: number): void;
public copyAndAdd(depth: number): SuperBlockCounts;
public copyAndSubtract(depth: number): SuperBlockCounts;
public get(depth: number): number;
public getCandidateDepth(m: number): number;
}
export class ChainDataStore {
public static CHAINDATA_CACHING_ENABLED: true;
public static CHAINDATA_CACHE_SIZE: 5000;
public static BLOCKS_CACHING_ENABLED: true;
public static BLOCKS_CACHE_SIZE: 0;
public static BLOCKS_RAW_CACHE_SIZE: 500;
public static initPersistent(jdb: any): void;
public static getPersistent(jdb: any): ChainDataStore;
public static createVolatile(): ChainDataStore;
public txs: any[];
constructor(chainStore: any, blockStore: any);
public getChainData(key: Hash, includeBody?: boolean): Promise<null | ChainData>;
public putChainData(key: Hash, chainData: ChainData, includeBody?: boolean): Promise<void>;
public putChainDataSync(key: Hash, chainData: ChainData, includeBody?: boolean): void;
public removeChainDataSync(key: Hash): void;
public getBlock(key: Hash, includeBody?: boolean): null | Block;
public getRawBlock(key: Hash, includeForks?: boolean): Promise<null | Uint8Array>;
public getChainDataCandidatesAt(height: number): Promise<ChainData[]>;
public getChainDataAt(height: number, includeBody?: boolean): Promise<undefined | null | ChainData>;
public getBlockAt(height: number, includeBody?: boolean): Promise<null | Block>;
public getSuccessorBlocks(block: Block): Promise<Block[]>;
public getNearestBlockAt(height: number, lower?: boolean): Promise<undefined | null | Block>;
public getBlocks(startBlockHash: Hash, count?: number, forward?: boolean): Promise<Block[]>;
public getBlocksForward(startBlockHash: Hash, count?: number): Promise<Block[]>;
public getBlocksBackward(startBlockHash: Hash, count?: number, includeBody?: boolean): Promise<Block[]>;
public getHead(): Promise<undefined | Hash>;
public setHead(key: Hash): Promise<void>;
public setHeadSync(key: Hash): void;
public transaction(enableWatchdog?: boolean): ChainDataStore;
public synchronousTransaction(enableWatchdog?: boolean): ChainDataStore;
public commit(): Promise<void>;
public abort(): Promise<void>;
public snapshot(): ChainDataStore;
public truncate(): Promise<void>;
}
export class ChainDataStoreCodec {
public valueEncoding: { encode: (val: any) => any, decode: (val: any) => any, buffer: boolean, type: string } | void;
public encode(obj: any): any;
public decode(obj: any, key: string): any;
}
export class BlockStoreCodec {
public valueEncoding: { encode: (val: any) => any, decode: (val: any) => any, buffer: boolean, type: string } | void;
public encode(obj: any): any;
public decode(obj: any, key: string): any;
}
export class MempoolTransactionSet {
public transactions: Transaction[];
public sender: Address;
public senderType: undefined | Account.Type;
public length: number;
constructor(sortedTransactions: Transaction[]);
public add(transaction: Transaction): MempoolTransactionSet;
public remove(transaction: Transaction): MempoolTransactionSet;
public copyAndAdd(transaction: Transaction): MempoolTransactionSet;
public numBelowFeePerByte(feePerByte: number): number;
public toString(): string;
}
export class MempoolFilter {
public static BLACKLIST_SIZE: number;
public static FEE: number;
public static VALUE: number;
public static TOTAL_VALUE: number;
public static RECIPIENT_BALANCE: number;
public static SENDER_BALANCE: number;
public static CREATION_FEE: number;
public static CREATION_FEE_PER_BYTE: number;
public static CREATION_VALUE: number;
public static CONTRACT_FEE: number;
public static CONTRACT_FEE_PER_BYTE: number;
public static CONTRACT_VALUE: number;
constructor();
public acceptsTransaction(tx: Transaction): boolean;
public acceptsRecipientAccount(tx: Transaction, oldAccount: Account, newAccount: Account): boolean;
public acceptsSenderAccount(tx: Transaction, oldAccount: Account, newAccount: Account): boolean;
public blacklist(hash: Hash): void;
public isBlacklisted(hash: Hash): boolean;
}
export class Mempool extends Observable {
public static TRANSACTION_RELAY_FEE_MIN: 1;
public static TRANSACTIONS_PER_SENDER_MAX: 500;
public static FREE_TRANSACTIONS_PER_SENDER_MAX: 10;
public static SIZE_MAX: number;
public static ReturnCode: {
EXPIRED: -5;
MINED: -4;
FILTERED: -3;
FEE_TOO_LOW: -2;
INVALID: -1;
ACCEPTED: 1;
KNOWN: 2;
};
public length: number;
public queue: Synchronizer;
constructor(blockchain: IBlockchain, accounts: Accounts);
public pushTransaction(transaction: Transaction): Promise<Mempool.ReturnCode>;
public getTransaction(hash: Hash): Transaction;
// public *transactionGenerator(maxSize?: number, minFeePerByte?: number): IterableIterator<Transaction>;
public getTransactions(maxSize?: number, minFeePerByte?: number): Transaction[];
public getTransactionsForBlock(maxSize: number): Promise<Transaction[]>;
public getPendingTransactions(address: Address): Transaction[];
public getTransactionsBySender(address: Address): Transaction[];
public getTransactionsByRecipient(address: Address): Transaction[];
public getTransactionsByAddresses(addresses: Address[], maxTransactions?: number): Transaction[];
public evictBelowMinFeePerByte(minFeePerByte: number): void;
public isFiltered(txHash: Hash): boolean;
}
export namespace Mempool {
type ReturnCode = ReturnCode.FEE_TOO_LOW | ReturnCode.INVALID | ReturnCode.ACCEPTED | ReturnCode.KNOWN;
namespace ReturnCode {
type EXPIRED = -5;
type MINED = -4;
type FILTERED = -3;
type FEE_TOO_LOW = -2;
type INVALID = -1;
type ACCEPTED = 1;
type KNOWN = 2;
}
}
export class InvRequestManager {
public static MAX_TIME_PER_VECTOR: 10000;
public static MAX_INV_MANAGED: 10000;
constructor();
public askToRequestVector(agent: BaseConsensusAgent, vector: InvVector): void;
public noteVectorNotReceived(agent: BaseConsensusAgent, vector: InvVector): void;
public noteVectorReceived(vector: InvVector): void;
}
export class BaseConsensusAgent extends Observable {
public static REQUEST_THRESHOLD: 50;
public static REQUEST_THROTTLE: 500;
public static REQUEST_TIMEOUT: 10000;
public static REQUEST_TRANSACTIONS_WAITING_MAX: 5000;
public static REQUEST_BLOCKS_WAITING_MAX: 5000;
public static BLOCK_PROOF_REQUEST_TIMEOUT: 10000;
public static TRANSACTIONS_PROOF_REQUEST_TIMEOUT: 10000;
public static TRANSACTION_RECEIPTS_REQUEST_TIMEOUT: 15000;
public static TRANSACTION_RELAY_INTERVAL: 5000;
public static TRANSACTIONS_AT_ONCE: 100;
public static TRANSACTIONS_PER_SECOND: 10;
public static FREE_TRANSACTION_RELAY_INTERVAL: 6000;
public static FREE_TRANSACTIONS_AT_ONCE: 10;
public static FREE_TRANSACTIONS_PER_SECOND: 1;
public static FREE_TRANSACTION_SIZE_PER_INTERVAL: 15000; // ~100 legacy transactions
public static TRANSACTION_RELAY_FEE_MIN: 1;
public static SUBSCRIPTION_CHANGE_GRACE_PERIOD: 3000;
public static HEAD_REQUEST_INTERVAL: 100000; // 100 seconds, give client time to announce new head without request
public static KNOWS_OBJECT_AFTER_INV_DELAY: 3000;
public static KNOWN_OBJECTS_COUNT_MAX: 40000;
public peer: Peer;
public synced: boolean;
public syncing: boolean;
constructor(
time: Time,
peer: Peer,
invRequestManager: InvRequestManager,
targetSubscription?: Subscription,
);
public providesServices(...services: number[]): boolean;
public onHeadUpdated(): void;
public subscribe(subscription: Subscription): void;
public relayBlock(block: Block): boolean;
public requestBlock(hash: Hash): Promise<Block | null>;
public requestTransaction(hash: Hash): Promise<Transaction | null>;
public relayTransaction(transaction: Transaction): boolean;
public removeTransaction(transaction: Transaction): void;
public knowsBlock(blockHash: Hash): boolean;
public knowsTransaction(txHash: Hash): boolean;
public requestVector(...vector: InvVector[]): void;
public getBlockProof(blockHashToProve: Hash, knownBlock: Block): Promise<Block>;
public getBlockProofAt(blockHeightToProve: number, knownBlock: Block): Promise<Block>;
public getTransactionProof(block: Block, addresses: Address[]): Promise<Transaction[]>;
public getTransactionsProofByAddresses(block: Block, addresses: Address[]): Promise<Transaction[]>;
public getTransactionsProofByHashes(block: Block, hashes: Hash[]): Promise<Transaction[]>;
public getTransactionReceipts(address: Address): Promise<TransactionReceipt[]>;
public getTransactionReceiptsByAddress(address: Address, limit?: number): Promise<TransactionReceipt[]>;
public getTransactionReceiptsByHashes(hashes: Hash[]): Promise<TransactionReceipt[]>;
public shutdown(): void;
}
// Not registered globally
// export class FreeTransactionVector {
// constructor(inv: InvVector, serializedSize: number);
// public hashCode(): string;
// public toString(): string;
// public inv: InvVector;
// public serializedSize: number;
// }
export class BaseConsensus extends Observable {
public static MAX_ATTEMPTS_TO_FETCH: 5;
public static SYNC_THROTTLE: 1500; // ms
public static MIN_FULL_NODES: 1;
public static TRANSACTION_RELAY_TIMEOUT: 10000;
public static SendTransactionResult: {
REJECTED_LOCAL: -4,
EXPIRED: -3,
ALREADY_MINED: -2,
INVALID: -1,
NONE: 0,
RELAYED: 1,
KNOWN: 2,
PENDING_LOCAL: 3,
};
public established: boolean;
public network: Network;
public invRequestManager: InvRequestManager;
constructor(
blockchain: BaseChain,
mempool: Observable,
network: Network,
);
public getHeadHash(): Promise<Hash>;
public getHeadHeight(): Promise<number>;
public getBlock(hash: Hash, includeBody?: boolean, includeBodyFromLocal?: boolean, blockHeight?: number): Promise<Block>;
public getBlockAt(height: number, includeBody?: boolean): Promise<Block>;
public getPendingTransactions(hashes: Hash[]): Promise<Transaction[]>;
public getTransactionsFromBlock(hashes: Hash[], blockHash: Hash, blockHeight?: number, block?: Block): Promise<Transaction[]>;
public getTransactionsFromBlockByAddresses(addresses: Address[], blockHash: Hash, blockHeight?: number): Promise<Transaction[]>;
public getTransactionReceiptsByAddress(address: Address, limit?: number): Promise<TransactionReceipt[]>;
public getTransactionReceiptsByHashes(hashes: Hash[]): Promise<TransactionReceipt[]>;
public getMempoolContents(): Transaction[];
public handoverTo(consensus: BaseConsensus): BaseConsensus;
public subscribe(subscription: Subscription): void;
public getSubscription(): Subscription;
}
export namespace BaseConsensus {
type SendTransactionResult = SendTransactionResult.REJECTED_LOCAL | SendTransactionResult.EXPIRED | SendTransactionResult.ALREADY_MINED | SendTransactionResult.INVALID | SendTransactionResult.NONE | SendTransactionResult.RELAYED | SendTransactionResult.KNOWN | SendTransactionResult.PENDING_LOCAL;
namespace SendTransactionResult {
type REJECTED_LOCAL = -4;
type EXPIRED = -3;
type ALREADY_MINED = -2;
type INVALID = -1;
type NONE = 0;
type RELAYED = 1;
type KNOWN = 2;
type PENDING_LOCAL = 3;
}
}
export class FullChain extends BaseChain {
public static ERR_ORPHAN: -2;
public static ERR_INVALID: -1;
public static OK_KNOWN: 0;
public static OK_EXTENDED: 1;
public static OK_REBRANCHED: 2;
public static OK_FORKED: 3;
public static SYNCHRONIZER_THROTTLE_AFTER: 500; // ms
public static SYNCHRONIZER_THROTTLE_WAIT: 30; // ms
public static getPersistent(jdb: any, accounts: Accounts, time: Time, transactionStore?: TransactionStore): Promise<FullChain>;
public static createVolatile(accounts: Accounts, time: Time, transactionStore?: TransactionStore): Promise<FullChain>;
public head: Block;
public headHash: Hash;
public height: number;
public totalDifficulty: BigNumber;
public totalWork: BigNumber;
public accounts: Accounts;
public transactionCache: TransactionCache;
public blockForkedCount: number;
public blockRebranchedCount: number;
public blockExtendedCount: number;
public blockOrphanCount: number;
public blockInvalidCount: number;
public blockKnownCount: number;
constructor(
store: ChainDataStore,
accounts: Accounts,
time: Time,
transactionStore?: TransactionStore,
);
public pushBlock(block: Block): Promise<number>;
public getBlocks(startBlockHash: Hash, count?: number, forward?: boolean): Promise<Block[]>;
public getChainProof(): Promise<ChainProof>;
public getBlockProof(blockToProve: Block, knownBlock: Block): Promise<null | BlockChain>;
public getAccountsTreeChunk(blockHash: Hash, startPrefix: string): Promise<null | AccountsTreeChunk>;
public getAccountsProof(blockHash: Hash, addresses: Address[]): Promise<null | AccountsProof>;
public getTransactionsProof(blockHash: Hash, addresses: Address[]): Promise<null | TransactionsProof>;
public getTransactionsProofByAddresses(blockHash: Hash, addresses: Address[]): Promise<TransactionsProof | null>;
public getTransactionsProofByHashes(blockHash: Hash, hashes: Hash[]): Promise<TransactionsProof | null>;
public getTransactionReceiptsByAddress(address: Address, limit?: number): Promise<TransactionReceipt[] | null>;
public getTransactionReceiptsByHashes(hashes: Hash[], limit?: number): Promise<TransactionReceipt[] | null>;
public getTransactionInfoByHash(transactionHash: Hash): Promise<null | TransactionStoreEntry>;
public accountsHash(): Promise<Hash>;
public queue(): PrioritySynchronizer;
}
export class FullConsensusAgent extends BaseConsensusAgent {
public static SYNC_ATTEMPTS_MAX: number;
public static GETBLOCKS_VECTORS_MAX: 500;
public static RESYNC_THROTTLE: 3000; // 3 seconds
public static MEMPOOL_DELAY_MIN: 2000; // 2 seconds
public static MEMPOOL_DELAY_MAX: 20000; // 20 seconds
public static MEMPOOL_THROTTLE: 1000;
public static MEMPOOL_ENTRIES_MAX: 10000;
public static CHAIN_PROOF_RATE_LIMIT: 3; // per minute
public static ACCOUNTS_PROOF_RATE_LIMIT: 60; // per minute
public static ACCOUNTS_TREE_CHUNK_RATE_LIMIT: 300; // per minute
public static TRANSACTION_PROOF_RATE_LIMIT: 60; // per minute
public static TRANSACTION_RECEIPTS_RATE_LIMIT: 30; // per minute
public static BLOCK_PROOF_RATE_LIMIT: 60; // per minute
public static GET_BLOCKS_RATE_LIMIT: 30; // per minute
public syncing: boolean;
constructor(
blockchain: FullChain,
mempool: Mempool,
time: Time,
peer: Peer,
invRequestManager: InvRequestManager,
targetSubscription: Subscription,
);
public syncBlockchain(): void;
}
export class FullConsensus extends BaseConsensus {
public minFeePerByte: number;
public blockchain: FullChain;
public mempool: Mempool;
constructor(
blockchain: FullChain,
mempool: Mempool,
network: Network,
);
public getBlock(hash: Hash, includeBody?: boolean, includeBodyFromLocal?: boolean, blockHeight?: number): Promise<Block>;
public getBlockAt(height: number, includeBody?: boolean): Promise<Block>;
public getBlockTemplate(minerAddress: Address, extraData?: Uint8Array): Promise<Block>;
public submitBlock(block: Block): Promise<boolean>;
public getAccounts(addresses: Address[]): Promise<Account[]>;
public getPendingTransactions(hashes: Hash[]): Promise<Transaction[]>;
public getPendingTransactionsByAddress(address: Address, limit?: number): Promise<Transaction[]>;
public getTransactionsFromBlock(hashes: Hash[], blockHash: Hash, blockHeight?: number, block?: Block): Promise<Transaction[]>;
public getTransactionReceiptsByAddress(address: Address, limit?: number): Promise<TransactionReceipt[]>;
public getTransactionReceiptsByHashes(hashes: Hash[]): Promise<TransactionReceipt[]>;
public sendTransaction(tx: Transaction): Promise<BaseConsensus.SendTransactionResult>;
public getMempoolContents(): Transaction[];
public subscribeMinFeePerByte(minFeePerByte: number): void;
}
export class LightChain extends FullChain {
public static getPersistent(jdb: any, accounts: Accounts, time: Time): Promise<LightChain>;
public static createVolatile(accounts: Accounts, time: Time): Promise<LightChain>;
constructor(
store: ChainDataStore,
accounts: Accounts,
time: Time,
);
public partialChain(): PartialLightChain;
}
export class LightConsensusAgent extends FullConsensusAgent {
public static CHAINPROOF_REQUEST_TIMEOUT: 45000;
public static CHAINPROOF_CHUNK_TIMEOUT: 10000;
public static ACCOUNTS_TREE_CHUNK_REQUEST_TIMEOUT: 8000;
public static SYNC_ATTEMPTS_MAX: number;
public static GETBLOCKS_VECTORS_MAX: 500;
public static WEAK_PROOFS_MAX: 3;
public syncing: boolean;
constructor(
blockchain: LightChain,
mempool: Mempool,
time: Time,
peer: Peer,
invRequestManager: InvRequestManager,
targetSubscription: Subscription,
);
public syncBlockchain(): Promise<void>;
public getHeader(hash: Hash): Promise<BlockHeader>;
}
export class LightConsensus extends BaseConsensus {
public blockchain: LightChain;
public mempool: Mempool;
public readonly minFeePerByte: number;
constructor(
blockchain: LightChain,
mempool: Mempool,
network: Network,
);
public getBlockTemplate(minerAddress: Address, extraData?: Uint8Array): Promise<Block>;
public submitBlock(block: Block): Promise<boolean>;
public getAccounts(addresses: Address[]): Promise<Account[]>;
public getPendingTransactions(hashes: Hash[]): Promise<Transaction[]>;
public getPendingTransactionsByAddress(address: Address, limit?: number): Promise<Transaction[]>;
public sendTransaction(tx: Transaction): Promise<BaseConsensus.SendTransactionResult>;
public getMempoolContents(): Transaction[];
public subscribeMinFeePerByte(minFeePerByte: number): void;
}
export class PartialLightChain extends LightChain {
public static State: {
WEAK_PROOF: -2;
ABORTED: -1;
PROVE_CHAIN: 0;
PROVE_ACCOUNTS_TREE: 1;
PROVE_BLOCKS: 2;
COMPLETE: 3;
};
public state: PartialLightChain.State;
public proofHeadHeight: number;
constructor(
store: ChainDataStore,
accounts: Accounts,
time: Time,
proof: ChainProof,
commitSynchronizer: PrioritySynchronizer,
);
public pushProof(proof: ChainProof): Promise<boolean>;
public pushAccountsTreeChunk(chunk: AccountsTreeChunk): Promise<PartialAccountsTree.Status>;
public commit(): Promise<boolean>;
public abort(): Promise<void>;
public getMissingAccountsPrefix(): string;
// @ts-ignore
public getBlockLocators(): Hash[];
public numBlocksNeeded(): number;
public needsMoreBlocks(): boolean;
}
export namespace PartialLightChain {
type State = State.WEAK_PROOF | State.ABORTED | State.PROVE_CHAIN | State.PROVE_ACCOUNTS_TREE | State.PROVE_BLOCKS | State.COMPLETE;
namespace State {
type WEAK_PROOF = -2;
type ABORTED = -1;
type PROVE_CHAIN = 0;
type PROVE_ACCOUNTS_TREE = 1;
type PROVE_BLOCKS = 2;
type COMPLETE = 3;
}
}
export class BaseMiniConsensusAgent extends BaseConsensusAgent {
public static ACCOUNTSPROOF_REQUEST_TIMEOUT: 5000;
public static MEMPOOL_DELAY_MIN: 500;
public static MEMPOOL_DELAY_MAX: 5000;
public static MEMPOOL_ENTRIES_MAX: 1000;
constructor(
blockchain: BaseChain,
mempool: NanoMempool,
time: Time,
peer: Peer,
invRequestManager: InvRequestManager,
targetSubscription: Subscription,
);
public requestMempool(): void;
public getAccounts(blockHash: Hash, addresses: Address[]): Promise<Account[]>;
}
export class BaseMiniConsensus extends BaseConsensus {
public static MempoolRejectedError: BaseMiniConsensusMempoolRejectedError;
constructor(blockchain: BaseChain, mempool: Observable, network: Network);
public subscribeAccounts(addresses: Address[]): void;
public subscribe(subscription: Subscription): void;
public addSubscriptions(newAddresses: Address[] | Address): void;
public removeSubscriptions(addressesToRemove: Address[] | Address): void;
public getAccount(address: Address, blockHash?: Hash): Promise<Account>;
public getAccounts(addresses: Address[], blockHash?: Hash): Promise<Account[]>;
public sendTransaction(tx: Transaction): Promise<BaseConsensus.SendTransactionResult>;
public getPendingTransactions(hashes: Hash[]): Promise<Transaction[]>;
public getPendingTransactionsByAddress(address: Address, limit?: number): Promise<Transaction[]>;
public relayTransaction(transaction: Transaction): Promise<void>;
}
declare class BaseMiniConsensusMempoolRejectedError extends Error {
public mempoolReturnCode: Mempool.ReturnCode;
constructor(mempoolCode: Mempool.ReturnCode);
}
export class NanoChain extends BaseChain {
public static ERR_ORPHAN: -2;
public static ERR_INVALID: -1;
public static OK_KNOWN: 0;
public static OK_EXTENDED: 1;
public static OK_REBRANCHED: 2;
public static OK_FORKED: 3;
public static SYNCHRONIZER_THROTTLE_AFTER: 500; // ms
public static SYNCHRONIZER_THROTTLE_WAIT: 30; // ms
public head: Block;
public headHash: Hash;
public height: number;
constructor(time: Time);
public pushProof(proof: ChainProof): Promise<boolean>;
public pushHeader(header: BlockHeader): Promise<number>;
public getChainProof(): Promise<ChainProof>;
}
export class NanoConsensusAgent extends BaseMiniConsensusAgent {
public static CHAINPROOF_REQUEST_TIMEOUT: 45000;
public static CHAINPROOF_CHUNK_TIMEOUT: 10000;
public syncing: boolean;
constructor(
blockchain: NanoChain,
mempool: NanoMempool,
time: Time,
peer: Peer,
invRequestManager: InvRequestManager,
targetSubscription: Subscription,
);
public syncBlockchain(): Promise<void>;
}
export class NanoConsensus extends BaseMiniConsensus {
public blockchain: NanoChain;
public mempool: NanoMempool;
constructor(
blockchain: NanoChain,
mempool: NanoMempool,
network: Network,
);
}
export class NanoMempool extends Observable {
public length: number;
constructor(blockchain: IBlockchain);
public pushTransaction(transaction: Transaction): Promise<Mempool.ReturnCode>;
public getTransaction(hash: Hash): undefined | Transaction;
public getTransactions(maxCount?: number): Transaction[];
public getPendingTransactions(address: Address): Transaction[];
public getTransactionsBySender(address: Address): Transaction[];
public getTransactionsByRecipient(address: Address): Transaction[];
public getTransactionsByAddresses(addresses: Address[], maxTransactions?: number): Transaction[];
public changeHead(block: Block, transactions: Transaction[]): Promise<void>;
public removeTransaction(transaction: Transaction): void;
public evictExceptAddresses(addresses: Address[]): void;
}
export class PicoChain extends BaseChain {
public static ERR_INCONSISTENT: -2;
public static ERR_INVALID: -1;
public static OK_KNOWN: 0;
public static OK_EXTENDED: 1;
public static OK_REBRANCHED: 2;
public static OK_FORKED: 3;
public head: Block;
public headHash: Hash;
public height: number;
constructor(time: Time);
public reset(): Promise<void>;
public pushBlock(block: Block): Promise<number>;
}
export class PicoConsensusAgent extends BaseMiniConsensusAgent {
constructor(consensus: PicoConsensus, peer: Peer, targetSubscription: Subscription);
public syncBlockchain(): Promise<void>;
}
export class PicoConsensus extends BaseMiniConsensus {
public static MIN_SYNCED_NODES: 3;
public blockchain: PicoChain;
public mempool: NanoMempool;
constructor(blockchain: PicoChain, mempool: NanoMempool, network: Network);
}
export class ConsensusDB /* extends JDB.JungleDB */ {
public static VERSION: number;
public static INITIAL_DB_SIZE: number;
public static MIN_RESIZE: number;
public static getFull(dbPrefix?: string): Promise<ConsensusDB>;
public static getLight(dbPrefix?: string): Promise<ConsensusDB>;
public static restoreTransactions(jdb: ConsensusDB): Promise<void>;
constructor(dbPrefix: string, light?: boolean);
}
// Not registered globally
// export class UpgradeHelper {
// public static recomputeTotals(jdb: ConsensusDB): Promise<void>;
// }
export class Consensus {
public static full(netconfig?: NetworkConfig): Promise<FullConsensus>;
public static light(netconfig?: NetworkConfig): Promise<LightConsensus>;
public static nano(netconfig?: NetworkConfig): Promise<NanoConsensus>;
public static pico(netconfig?: NetworkConfig): Promise<PicoConsensus>;
public static volatileFull(netconfig?: NetworkConfig): Promise<FullConsensus>;
public static volatileLight(netconfig?: NetworkConfig): Promise<LightConsensus>;
public static volatileNano(netconfig?: NetworkConfig): Promise<NanoConsensus>;
public static volatilePico(netconfig?: NetworkConfig): Promise<PicoConsensus>;
}
export class Protocol {
public static DUMB: 0;
public static WSS: 1;
public static RTC: 2;
public static WS: 4;
}
export class Message {
public static MAGIC: 0x42042042;
public static Type: {
VERSION: 0;
INV: 1;
GET_DATA: 2;
GET_HEADER: 3;
NOT_FOUND: 4;
GET_BLOCKS: 5;
BLOCK: 6;
HEADER: 7;
TX: 8;
MEMPOOL: 9;
REJECT: 10;
SUBSCRIBE: 11;
ADDR: 20;
GET_ADDR: 21;
PING: 22;
PONG: 23;
SIGNAL: 30;
GET_CHAIN_PROOF: 40;
CHAIN_PROOF: 41;
GET_ACCOUNTS_PROOF: 42;
ACCOUNTS_PROOF: 43;
GET_ACCOUNTS_TREE_CHUNK: 44;
ACCOUNTS_TREE_CHUNK: 45;
GET_TRANSACTIONS_PROOF: 47;
GET_TRANSACTIONS_PROOF_BY_ADDRESSES: 47;
TRANSACTIONS_PROOF: 48;
GET_TRANSACTION_RECEIPTS: 49;
GET_TRANSACTION_RECEIPTS_BY_ADDRESS: 49;
TRANSACTION_RECEIPTS: 50;
GET_BLOCK_PROOF: 51;
BLOCK_PROOF: 52;
GET_TRANSACTIONS_PROOF_BY_HASHES: 53;
GET_TRANSACTION_RECEIPTS_BY_HASHES: 54;
GET_BLOCK_PROOF_AT: 55;
GET_HEAD: 60;
HEAD: 61;
VERACK: 90;
};
public static peekType(buf: SerialBuffer): Message.Type;
public static peekLength(buf: SerialBuffer): number;
public static unserialize(buf: SerialBuffer): Message;
public serializedSize: number;
public type: Message.Type;
constructor(type: Message.Type);
public serialize(buf?: SerialBuffer): SerialBuffer;
public toString(): string;
}
export namespace Message {
type Type = Type.VERSION | Type.INV | Type.GET_DATA | Type.GET_HEADER | Type.NOT_FOUND | Type.GET_BLOCKS | Type.BLOCK | Type.HEADER | Type.TX | Type.MEMPOOL | Type.REJECT | Type.SUBSCRIBE | Type.ADDR | Type.GET_ADDR | Type.PING | Type.PONG | Type.SIGNAL | Type.GET_CHAIN_PROOF | Type.CHAIN_PROOF | Type.GET_ACCOUNTS_PROOF | Type.ACCOUNTS_PROOF | Type.GET_ACCOUNTS_TREE_CHUNK | Type.ACCOUNTS_TREE_CHUNK | Type.GET_TRANSACTIONS_PROOF | Type.GET_TRANSACTIONS_PROOF_BY_ADDRESSES | Type.TRANSACTIONS_PROOF | Type.GET_TRANSACTION_RECEIPTS | Type.GET_TRANSACTION_RECEIPTS_BY_ADDRESS | Type.TRANSACTION_RECEIPTS | Type.GET_BLOCK_PROOF | Type.BLOCK_PROOF | Type.GET_TRANSACTIONS_PROOF_BY_HASHES | Type.GET_TRANSACTION_RECEIPTS_BY_HASHES | Type.GET_BLOCK_PROOF_AT | Type.GET_HEAD | Type.HEAD | Type.VERACK;
namespace Type {
type VERSION = 0;
type INV = 1;
type GET_DATA = 2;
type GET_HEADER = 3;
type NOT_FOUND = 4;
type GET_BLOCKS = 5;
type BLOCK = 6;
type HEADER = 7;
type TX = 8;
type MEMPOOL = 9;
type REJECT = 10;
type SUBSCRIBE = 11;
type ADDR = 20;
type GET_ADDR = 21;
type PING = 22;
type PONG = 23;
type SIGNAL = 30;
type GET_CHAIN_PROOF = 40;
type CHAIN_PROOF = 41;
type GET_ACCOUNTS_PROOF = 42;
type ACCOUNTS_PROOF = 43;
type GET_ACCOUNTS_TREE_CHUNK = 44;
type ACCOUNTS_TREE_CHUNK = 45;
type GET_TRANSACTIONS_PROOF = 47;
type GET_TRANSACTIONS_PROOF_BY_ADDRESSES = 47;
type TRANSACTIONS_PROOF = 48;
type GET_TRANSACTION_RECEIPTS = 49;
type GET_TRANSACTION_RECEIPTS_BY_ADDRESS = 49;
type TRANSACTION_RECEIPTS = 50;
type GET_BLOCK_PROOF = 51;
type BLOCK_PROOF = 52;
type GET_TRANSACTIONS_PROOF_BY_HASHES = 53;
type GET_TRANSACTION_RECEIPTS_BY_HASHES = 54;
type GET_BLOCK_PROOF_AT = 55;
type GET_HEAD = 60;
type HEAD = 61;
type VERACK = 90;
}
}
export class AddrMessage extends Message {
public static ADDRESSES_MAX_COUNT: 1000;
public static unserialize(buf: SerialBuffer): AddrMessage;
public addresses: PeerAddress[];
constructor(addresses: PeerAddress[]);
}
export class BlockMessage extends Message {
public static unserialize(buf: SerialBuffer): BlockMessage;
public block: Block;
constructor(block: Block);
}
export class RawBlockMessage extends Message {
public static unserialize(buf: SerialBuffer): RawBlockMessage;
public block: Block;
constructor(block: Uint8Array);
}
export class GetAddrMessage extends Message {
public static unserialize(buf: SerialBuffer): GetAddrMessage;
public protocolMask: number;
public serviceMask: number;
public maxResults: number;
constructor(
protocolMask: number,
serviceMask: number,
maxResults: number,
);
}
export class GetBlocksMessage extends Message {
public static LOCATORS_MAX_COUNT: 128;
public static Direction: {
FORWARD: 0x1;
BACKWARD: 0x2;
};
public static unserialize(buf: SerialBuffer): GetBlocksMessage;
public locators: Hash[];
public direction: GetBlocksMessage.Direction;
public maxInvSize: number;
constructor(
locators: Hash[],
maxInvSize?: number,
direction?: GetBlocksMessage.Direction,
);
}
export namespace GetBlocksMessage {
type Direction = Direction.FORWARD | Direction.BACKWARD;
namespace Direction {
type FORWARD = 0x1;
type BACKWARD = 0x2;
}
}
export class HeaderMessage extends Message {
public static unserialize(buf: SerialBuffer): HeaderMessage;
public header: BlockHeader;
constructor(header: BlockHeader);
}
export class InvVector {
public static Type: {
ERROR: 0;
TRANSACTION: 1;
BLOCK: 2;
unserialize(buf: SerialBuffer): InvVector.Type;
};
public static fromBlock(block: Block): InvVector;
public static fromHeader(header: BlockHeader): InvVector;
public static fromTransaction(tx: Transaction): InvVector;
public static unserialize(buf: SerialBuffer): InvVector;
public serializedSize: number;
public type: InvVector.Type;
public hash: Hash;
constructor(type: InvVector.Type, hash: Hash);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export namespace InvVector {
type Type = Type.ERROR | Type.TRANSACTION | Type.BLOCK;
namespace Type {
type ERROR = 0;
type TRANSACTION = 1;
type BLOCK = 2;
}
}
export class BaseInventoryMessage extends Message {
public static VECTORS_MAX_COUNT: 1000;
public vectors: InvVector[];
constructor(type: Message.Type, vectors: InvVector[]);
}
export class InvMessage extends BaseInventoryMessage {
public static unserialize(buf: SerialBuffer): InvMessage;
constructor(vectors: InvVector[]);
}
export class GetDataMessage extends BaseInventoryMessage {
public static unserialize(buf: SerialBuffer): GetDataMessage;
constructor(vectors: InvVector[]);
}
export class GetHeaderMessage extends BaseInventoryMessage {
public static unserialize(buf: SerialBuffer): GetHeaderMessage;
constructor(vectors: InvVector[]);
}
export class NotFoundMessage extends BaseInventoryMessage {
public static unserialize(buf: SerialBuffer): NotFoundMessage;
constructor(vectors: InvVector[]);
}
export class MempoolMessage extends Message {
public static unserialize(buf: SerialBuffer): MempoolMessage;
constructor();
}
export class PingMessage extends Message {
public static unserialize(buf: SerialBuffer): PingMessage;
public nonce: number;
constructor(nonce: number);
}
export class PongMessage extends Message {
public static unserialize(buf: SerialBuffer): PongMessage;
public nonce: number;
constructor(nonce: number);
}
export class RejectMessage extends Message {
public static Code: {
REJECT_MALFORMED: 0x01;
REJECT_INVALID: 0x10;
REJECT_OBSOLETE: 0x11;
REJECT_DOUBLE: 0x12;
REJECT_DUST: 0x41;
REJECT_INSUFFICIENT_FEE: 0x42;
};
public static unserialize(buf: SerialBuffer): RejectMessage;
public messageType: Message.Type;
public code: RejectMessage.Code;
public reason: string;
public extraData: Uint8Array;
constructor(
messageType: Message.Type,
code: RejectMessage.Code,
reason: string,
extraData?: Uint8Array,
);
}
export namespace RejectMessage {
type Code = Code.REJECT_MALFORMED | Code.REJECT_INVALID | Code.REJECT_OBSOLETE | Code.REJECT_DOUBLE | Code.REJECT_DUST | Code.REJECT_INSUFFICIENT_FEE;
namespace Code {
type REJECT_MALFORMED = 0x01;
type REJECT_INVALID = 0x10;
type REJECT_OBSOLETE = 0x11;
type REJECT_DOUBLE = 0x12;
type REJECT_DUST = 0x41;
type REJECT_INSUFFICIENT_FEE = 0x42;
}
}
export class SignalMessage extends Message {
public static Flag: {
UNROUTABLE: 0x1;
TTL_EXCEEDED: 0x2;
};
public static unserialize(buf: SerialBuffer): SignalMessage;
public senderId: PeerId;
public recipientId: PeerId;
public nonce: number;
public ttl: number;
public flags: SignalMessage.Flag | number;
public payload: Uint8Array;
public signature: Signature;
public senderPubKey: PublicKey;
constructor(
senderId: PeerId,
recipientId: PeerId,
nonce: number,
ttl: number,
flags?: SignalMessage.Flag | number,
payload?: Uint8Array,
senderPubKey?: PublicKey,
signature?: Signature,
);
public verifySignature(): boolean;
public hasPayload(): boolean;
public isUnroutable(): boolean;
public isTtlExceeded(): boolean;
}
export namespace SignalMessage {
type Flag = Flag.UNROUTABLE | Flag.TTL_EXCEEDED;
namespace Flag {
type UNROUTABLE = 0x1;
type TTL_EXCEEDED = 0x2;
}
}
export class SubscribeMessage extends Message {
public static unserialize(buf: SerialBuffer): SubscribeMessage;
public subscription: Subscription;
constructor(subscription: Subscription);
}
export class TxMessage extends Message {
public static unserialize(buf: SerialBuffer): TxMessage;
public transaction: Transaction;
public hasAccountsProof: boolean;
public accountsProof: AccountsProof;
constructor(transaction: Transaction, accountsProof?: AccountsProof);
}
export class VersionMessage extends Message {
public static CHALLENGE_SIZE: 32;
public static unserialize(buf: SerialBuffer): VersionMessage;
public version: number;
public peerAddress: PeerAddress;
public genesisHash: Hash;
public headHadh: Hash;
public challengeNonce: Uint8Array;
public userAgent?: string;
constructor(
version: number,
peerAddress: PeerAddress,
genesisHash: Hash,
headHash: Hash,
challengeNonce: Uint8Array,
userAgent?: string,
);
}
export class VerAckMessage extends Message {
public static unserialize(buf: SerialBuffer): VerAckMessage;
public publicKey: PublicKey;
public signature: Signature;
constructor(publicKey: PublicKey, signature: Signature);
}
export class AccountsProofMessage extends Message {
public static unserialize(buf: SerialBuffer): AccountsProofMessage;
public blockHash: Hash;
public proof: AccountsProof;
constructor(blockHash: Hash, accountsProof?: AccountsProof);
public hasProof(): boolean;
}
export class GetAccountsProofMessage extends Message {
public static ADDRESSES_MAX_COUNT: 256;
public static unserialize(buf: SerialBuffer): GetAccountsProofMessage;
public addresses: Address[];
public blockHash: Hash;
constructor(blockHash: Hash, addresses: Address[]);
}
export class ChainProofMessage extends Message {
public static unserialize(buf: SerialBuffer): ChainProofMessage;
public proof: ChainProof;
constructor(proof: ChainProof);
}
export class GetChainProofMessage extends Message {
public static unserialize(buf: SerialBuffer): GetChainProofMessage;
constructor();
}
export class AccountsTreeChunkMessage extends Message {
public static unserialize(buf: SerialBuffer): AccountsTreeChunkMessage;
public blockHash: Hash;
public chunk: AccountsTreeChunk;
constructor(blockHash: Hash, accountsTreeChunk?: AccountsTreeChunk);
public hasChunk(): boolean;
}
export class GetAccountsTreeChunkMessage extends Message {
public static unserialize(buf: SerialBuffer): GetAccountsTreeChunkMessage;
public blockHash: Hash;
public startPrefix: string;
constructor(blockHash: Hash, startPrefix: string);
}
export class TransactionsProofMessage extends Message {
public static unserialize(buf: SerialBuffer): TransactionsProofMessage;
public blockHash: Hash;
public proof: TransactionsProof;
constructor(blockHash: Hash, proof?: TransactionsProof);
public hasProof(): boolean;
}
export class GetTransactionsProofByAddressMessage extends Message {
public static ADDRESSES_MAX_COUNT: 256;
public static unserialize(buf: SerialBuffer): GetTransactionsProofByAddressMessage;
public addresses: Address[];
public blockHash: Hash;
constructor(blockHash: Hash, addresses: Address[]);
}
export class GetTransactionReceiptsByAddressMessage extends Message {
public static unserialize(buf: SerialBuffer): GetTransactionReceiptsByAddressMessage;
public address: Address;
public offset: number;
constructor(address: Address, offset?: number);
}
export class TransactionReceiptsMessage extends Message {
public static RECEIPTS_MAX_COUNT: 500;
public static unserialize(buf: SerialBuffer): TransactionReceiptsMessage;
public receipts: TransactionReceipt[];
constructor(receipts?: TransactionReceipt[]);
public hasReceipts(): boolean;
}
export class GetBlockProofMessage extends Message {
public static unserialize(buf: SerialBuffer): GetBlockProofMessage;
public blockHashToProve: Hash;
public knownBlockHash: Hash;
constructor(blockHashToProve: Hash, knownBlockHash: Hash);
}
export class BlockProofMessage extends Message {
public static unserialize(buf: SerialBuffer): BlockProofMessage;
public proof: BlockChain;
constructor(proof?: BlockChain);
public hasProof(): boolean;
}
export class GetHeadMessage extends Message {
public static unserialize(buf: SerialBuffer): GetHeadMessage;
constructor();
}
export class HeadMessage extends Message {
public static unserialize(buf: SerialBuffer): HeadMessage;
public header: BlockHeader;
constructor(header: BlockHeader);
}
export class GetBlockProofAtMessage extends Message {} // TODO
export class GetTransactionReceiptsByHashesMessage extends Message {} // TODO
export class GetTransactionsProofByAddressesMessage extends Message {} // TODO
export class GetTransactionsProofByHashesMessage extends Message {} // TODO
export class MessageFactory {
public static CLASSES: { [messageType: number]: Message };
public static peekType(buf: SerialBuffer): Message.Type;
public static parse(buf: SerialBuffer): Message;
}
export class WebRtcConnector extends Observable {
public static CONNECT_TIMEOUT: 8000;
public static CONNECTORS_MAX: 6;
public static INBOUND_CONNECTORS_MAX: 3;
constructor(networkConfig: NetworkConfig);
public connect(peerAddress: PeerAddress, signalChannel: PeerChannel): boolean;
public isValidSignal(msg: { senderId: any, nonce: any }): boolean;
public onSignal(channel: PeerChannel, msg: SignalMessage): void;
}
export class PeerConnector extends Observable {
public static ICE_GATHERING_TIMEOUT: 1000;
public static CONNECTION_OPEN_DELAY: 200;
public nonce: any;
public peerAddress: PeerAddress;
public rtcConnection: RTCPeerConnection;
constructor(
networkConfig: NetworkConfig,
signalChannel: PeerChannel,
peerId: PeerId,
peerAddress: PeerAddress,
);
public onSignal(signal: any): void;
public close(): void;
}
export class OutboundPeerConnector extends PeerConnector {
constructor(
webRtcConfig: NetworkConfig,
peerAddress: PeerAddress,
signalChannel: PeerChannel,
);
public close(): void;
}
export class InboundPeerConnector extends PeerConnector {
constructor(webRtcConfig: NetworkConfig, signalChannel: PeerChannel, peerId: PeerId, offer: any);
}
export class WebRtcDataChannel extends DataChannel {
public readyState: DataChannel.ReadyState;
constructor(nativeChannel: any);
public sendChunk(msg: any): void;
}
export class WebRtcUtils {
public static candidateToNetAddress(candidate: RTCIceCandidate): NetAddress;
}
export class WebSocketConnector extends Observable {
public static CONNECT_TIMEOUT: 5000;
constructor(
protocol: number,
protocolPrefix: string,
networkConfig: NetworkConfig,
);
public connect(peerAddress: PeerAddress): boolean;
public abort(peerAddress: PeerAddress): void;
}
export class WebSocketDataChannel extends DataChannel {
public readyState: DataChannel.ReadyState;
constructor(ws: WebSocket);
public sendChunk(msg: any): void;
}
export class NetAddress {
public static UNSPECIFIED: NetAddress;
public static UNKNOWN: NetAddress;
public static Type: {
IPv4: 0;
IPv6: 1;
UNSPECIFIED: 2;
UNKNOWN: 3;
};
public static fromIP(ip: string, reliable?: boolean): NetAddress;
public static unserialize(buf: SerialBuffer): NetAddress;
public serializedSize: number;
public ip: Uint8Array;
public type: NetAddress.Type;
public reliable: boolean;
constructor(type: NetAddress.Type, ipArray?: Uint8Array, reliable?: boolean);
public serialize(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
public isPseudo(): boolean;
public isPrivate(): boolean;
public isIPv6(): boolean;
public isIPv4(): boolean;
public subnet(bitCount: number): NetAddress;
}
export namespace NetAddress {
type Type = Type.IPv4 | Type.IPv6 | Type.UNSPECIFIED | Type.UNKNOWN;
namespace Type {
type IPv4 = 0;
type IPv6 = 1;
type UNSPECIFIED = 2;
type UNKNOWN = 3;
}
}
export class PeerId extends Serializable {
public static SERIALIZED_SIZE: 16;
public static copy(o: PeerId): PeerId;
public static unserialize(buf: SerialBuffer): PeerId;
public static fromBase64(base64: string): PeerId;
public static fromHex(hex: string): PeerId;
public serializedSize: number;
constructor(arg: Uint8Array);
public serialize(buf?: SerialBuffer): SerialBuffer;
public subarray(begin?: number, end?: number): Uint8Array;
public equals(o: any): boolean;
public toString(): string;
}
export class PeerAddress {
public static unserialize(buf: SerialBuffer): PeerAddress;
public serializedSize: number;
public serializedContentSize: number;
public protocol: number;
public services: number;
public timestamp: number;
public netAddress: NetAddress | null;
public publicKey: PublicKey;
public peerId: PeerId;
public distance: number;
public signature: Signature;
constructor(
protocol: number,
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
signature?: Signature,
);
public serialize(buf?: SerialBuffer): SerialBuffer;
public serializeContent(buf?: SerialBuffer): SerialBuffer;
public equals(o: any): boolean;
public hashCode(): string;
public verifySignature(): boolean;
public isSeed(): boolean;
public exceedsAge(): boolean;
}
export class WsBasePeerAddress extends PeerAddress {
public static fromSeedString(str: string): WsPeerAddress | WssPeerAddress;
public host: string;
public port: number;
public protocolPrefix: string;
constructor(
protocol: number,
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
host: string,
port: number,
signature?: Signature,
);
public toSeedString(): string;
public globallyReachable(): boolean;
public hashCode(): string;
public toString(): string;
}
export class WssPeerAddress extends WsBasePeerAddress {
public static seed(host: string, port: number, publicKeyHex?: string): WssPeerAddress;
public static unserialize(buf: SerialBuffer): WssPeerAddress;
constructor(
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
host: string,
port: number,
signature?: Signature,
);
public withoutId(): WssPeerAddress;
}
export class WsPeerAddress extends WsBasePeerAddress {
public static seed(host: string, port: number, publicKeyHex?: string): WsPeerAddress;
public static unserialize(buf: SerialBuffer): WsPeerAddress;
constructor(
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
host: string,
port: number,
signature?: Signature,
);
public globallyReachable(): boolean;
public withoutId(): WsPeerAddress;
}
export class RtcPeerAddress extends PeerAddress {
public static unserialize(buf: SerialBuffer): RtcPeerAddress;
constructor(
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
signature?: Signature,
);
public hashCode(): string;
public toString(): string;
}
export class DumbPeerAddress extends PeerAddress {
public static unserialize(buf: SerialBuffer): DumbPeerAddress;
constructor(
services: number,
timestamp: number,
netAddress: NetAddress,
publicKey: PublicKey,
distance: number,
signature?: Signature,
);
public hashCode(): string;
public toString(): string;
}
export class PeerAddressState {
public static NEW: 1;
public static ESTABLISHED: 2;
public static TRIED: 3;
public static FAILED: 4;
public static BANNED: 5;
public signalRouter: SignalRouter;
public maxFailedAttempts: number;
public failedAttempts: number;
constructor(peerAddress: PeerAddress);
public close(type: number): void;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class SignalRouter {
constructor(peerAddress: PeerAddress);
public bestRoute(): SignalRoute;
public addRoute(signalChannel: PeerChannel, distance: number, timestamp: number): boolean;
public deleteBestRoute(): void;
public deleteRoute(signalChannel: PeerChannel): void;
public deleteAllRoutes(): void;
public hasRoute(): boolean;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class SignalRoute {
public signalChannel: PeerChannel;
public distance: number;
public score: number;
constructor(
signalChannel: PeerChannel,
distance: number,
timestamp: number,
);
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class SeedList {
public static MAX_SIZE: number;
public static REQUEST_TIMEOUT: number;
public static retrieve(url: string, publicKey?: PublicKey): Promise<SeedList>;
public static parse(listStr: string, publicKey?: PublicKey): SeedList;
public seeds: PeerAddress[];
public publicKey: PublicKey;
public signature: Signature;
constructor(
seeds: PeerAddress[],
publicKey?: PublicKey,
signature?: Signature,
);
public serializeContent(): Uint8Array;
}
export class SeedListUrl {
public url: string;
public publicKey: PublicKey;
constructor(url: string, publicKeyHex?: string);
}
export class PeerAddressSeeder extends Observable {
public collect(): Promise<void>;
}
export class PeerAddressBook extends Observable {
public static MAX_AGE_WEBSOCKET: number;
public static MAX_AGE_WEBRTC: number;
public static MAX_AGE_DUMB: number;
public static MAX_DISTANCE: number;
public static MAX_FAILED_ATTEMPTS_WS: number;
public static MAX_FAILED_ATTEMPTS_RTC: number;
public static MAX_TIMESTAMP_DRIFT: number;
public static HOUSEKEEPING_INTERVAL: number;
public static DEFAULT_BAN_TIME: number;
public static INITIAL_FAILED_BACKOFF: number;
public static MAX_FAILED_BACKOFF: number;
public static MAX_SIZE_WS: number;
public static MAX_SIZE_WSS: number;
public static MAX_SIZE_RTC: number;
public static MAX_SIZE: number;
public static MAX_SIZE_PER_IP: number;
public static SEEDING_TIMEOUT: number;
public knownAddressesCount: number;
public knownWsAddressesCount: number;
public knownWssAddressesCount: number;
public knownRtcAddressesCount: number;
public seeded: boolean;
constructor(netconfig: NetworkConfig);
public iterator(): Iterator<PeerAddressState>;
public wsIterator(): Iterator<PeerAddressState>;
public wssIterator(): Iterator<PeerAddressState>;
public rtcIterator(): Iterator<PeerAddressState>;
public getState(peerAddress: PeerAddress): undefined | PeerAddressState;
public get(peerAddress: PeerAddress): null | PeerAddress;
public getByPeerId(peerId: PeerId): null | PeerAddress;
public getChannelByPeerId(peedId: PeerId): null | PeerChannel;
public query(protocolMask: number, serviceMask: number, maxAddresses: number): PeerAddress[];
public add(channel: PeerChannel, arg: PeerAddress | PeerAddress[]): void;
public established(channel: PeerChannel, peerAddress: PeerAddress | RtcPeerAddress): void;
public close(channel: PeerChannel, peerAddress: PeerAddress, type?: number): void;
public unroutable(channel: PeerChannel, peerAddress: PeerAddress): void;
public isBanned(peerAddress: PeerAddress): boolean;
}
export class GenesisConfig {
public static NETWORK_ID: number;
public static NETWORK_NAME: string;
public static GENESIS_BLOCK: Block;
public static GENESIS_HASH: Hash;
public static GENESIS_ACCOUNTS: string;
public static SEED_PEERS: PeerAddress[];
public static SEED_LISTS: SeedList[];
public static CONFIGS: { [key: string]: { NETWORK_ID: number, NETWORK_NAME: string, SEED_PEERS: PeerAddress[], SEED_LISTS: SeedListUrl, GENESIS_BLOCK: Block, GENESIS_ACCOUNTS: string } };
public static main(): void;
public static test(): void;
public static dev(): void;
public static init(config: { NETWORK_ID: number, NETWORK_NAME: string, GENESIS_BLOCK: Block, GENESIS_ACCOUNTS: string, SEED_PEERS: PeerAddress[] }): void;
public static networkIdToNetworkName(networkId: number): string;
public static networkIdFromAny(networkId: number | string): number;
}
export class CloseType {
// Regular Close Types
public static GET_BLOCKS_TIMEOUT: 1;
public static GET_HEADER_TIMEOUT: 2;
public static GET_CHAIN_PROOF_TIMEOUT: 3;
public static GET_ACCOUNTS_PROOF_TIMEOUT: 4;
public static GET_ACCOUNTS_TREE_CHUNK_TIMEOUT: 5;
public static GET_TRANSACTIONS_PROOF_TIMEOUT: 6;
public static GET_TRANSACTION_RECEIPTS_TIMEOUT: 7;
public static SENDING_PING_MESSAGE_FAILED: 10;
public static SENDING_OF_VERSION_MESSAGE_FAILED: 11;
public static SIMULTANEOUS_CONNECTION: 20;
public static DUPLICATE_CONNECTION: 21;
public static INVALID_CONNECTION_STATE: 22;
public static PEER_BANNED: 30;
public static IP_BANNED: 31;
public static MAX_PEER_COUNT_REACHED: 40;
public static PEER_CONNECTION_RECYCLED: 41;
public static PEER_CONNECTION_RECYCLED_INBOUND_EXCHANGE: 42;
public static INBOUND_CONNECTIONS_BLOCKED: 43;
public static MANUAL_NETWORK_DISCONNECT: 50;
public static MANUAL_WEBSOCKET_DISCONNECT: 51;
public static MANUAL_PEER_DISCONNECT: 52;
// Ban Close Types
public static INCOMPATIBLE_VERSION: 100;
public static DIFFERENT_GENESIS_BLOCK: 101;
public static INVALID_PEER_ADDRESS_IN_VERSION_MESSAGE: 102;
public static UNEXPECTED_PEER_ADDRESS_IN_VERSION_MESSAGE: 103;
public static INVALID_PUBLIC_KEY_IN_VERACK_MESSAGE: 104;
public static INVALID_SIGNATURE_IN_VERACK_MESSAGE: 105;
public static ADDR_MESSAGE_TOO_LARGE: 110;
public static ADDR_NOT_GLOBALLY_REACHABLE: 111;
public static INVALID_ADDR: 112;
public static INVALID_SIGNAL_TTL: 113;
public static INVALID_BLOCK: 120;
public static INVALID_HEADER: 121;
public static INVALID_ACCOUNTS_TREE_CHUNCK: 122;
public static INVALID_ACCOUNTS_PROOF: 123;
public static INVALID_CHAIN_PROOF: 124;
public static INVALID_TRANSACTION_PROOF: 125;
public static INVALID_BLOCK_PROOF: 126;
public static RATE_LIMIT_EXCEEDED: 130;
public static BLOCKCHAIN_SYNC_FAILED: 140;
public static MANUAL_PEER_BAN: 150;
// Fail Close Types
public static CONNECTION_FAILED: 200;
public static CLOSED_BY_REMOTE: 201;
public static NETWORK_ERROR: 202;
public static CHANNEL_CLOSING: 203;
public static VERSION_TIMEOUT: 210;
public static VERACK_TIMEOUT: 211;
public static PING_TIMEOUT: 212;
public static CONNECTION_LIMIT_PER_IP: 220;
public static CONNECTION_LIMIT_DUMB: 221;
public static FAILED_TO_PARSE_MESSAGE_TYPE: 230;
public static UNEXPECTED_ACCOUNTS_TREE_CHUNK: 231;
public static UNEXPECTED_HEADER: 232;
public static TRANSACTION_NOT_MATCHING_SUBSCRIPTION: 233;
public static ABORTED_SYNC: 240;
public static MANUAL_PEER_FAIL: 250;
public static isBanningType(closeType: number): boolean;
public static isFailingType(closeType: number): boolean;
}
export class NetworkConnection extends Observable {
public id: number;
public protocol: number;
public peerAddress: PeerAddress;
public netAddress: NetAddress;
public bytesSent: number;
public bytesReceived: number;
public inbound: boolean;
public outbound: boolean;
public closed: boolean;
public lastMessageReceivedAt: number;
constructor(
channel: DataChannel,
protocol: number,
netAddress: NetAddress,
peerAddress: PeerAddress,
);
public send(msg: Uint8Array): boolean;
public expectMessage(types: Message.Type | Message.Type[], timeoutCallback: () => any, msgTimeout?: number, chunkTimeout?: number): void;
public isExpectingMessage(type: Message.Type): boolean;
public confirmExpectedMessage(type: Message.Type, success: boolean): void;
public close(type?: number, reason?: string): void;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class PeerChannel extends Observable {
public connection: NetworkConnection;
public id: number;
public protocol: number;
public peerAddress: PeerAddress;
public netAddress: NetAddress;
public closed: boolean;
public lastMessageReceivedAt: number;
public Event: { [messageType: number]: string };
constructor(connection: NetworkConnection);
public expectMessage(types: Message.Type | Message.Type[], timeoutCallback: () => any, msgTimeout?: number, chunkTimeout?: number): void;
public isExpectingMessage(type: Message.Type): boolean;
public close(type?: number, reason?: string): void;
public version(peerAddress: PeerAddress, headHash: Hash, challengeNonce: Uint8Array, appAgent?: string): boolean;
public verack(publicKey: PublicKey, signature: Signature): boolean;
public inv(vectors: InvVector[]): boolean;
public notFound(vectors: InvVector[]): boolean;
public getData(vectors: InvVector[]): boolean;
public getHeader(vectors: InvVector[]): boolean;
public block(block: Block): boolean;
public rawBlock(block: Uint8Array): boolean;
public header(header: BlockHeader): boolean;
public tx(transaction: Transaction, accountsProof?: AccountsProof): boolean;
public getBlocks(locators: Hash[], maxInvSize: number, ascending?: boolean): boolean;
public mempool(): boolean;
public reject(messageType: Message.Type, code: RejectMessage.Code, reason: string, extraData?: Uint8Array): boolean;
public subscribe(subscription: Subscription): boolean;
public addr(addresses: PeerAddress[]): boolean;
public getAddr(protocolMask: number, serviceMask: number, maxResults: number): boolean;
public ping(nonce: number): boolean;
public pong(nonce: number): boolean;
public signal(senderId: PeerId, recipientId: PeerId, nonce: number, ttl: number, flags: SignalMessage.Flag | number, payload?: Uint8Array, senderPubKey?: PublicKey, signature?: Signature): boolean;
public getAccountsProof(blockHash: Hash, addresses: Address[]): boolean;
public accountsProof(blockHash: Hash, proof?: AccountsProof): boolean;
public getChainProof(): boolean;
public chainProof(proof: ChainProof): boolean;
public getAccountsTreeChunk(blockHash: Hash, startPrefix: string): boolean;
public accountsTreeChunk(blockHash: Hash, chunk?: AccountsTreeChunk): boolean;
public getTransactionsProof(blockHash: Hash, addresses: Address[]): boolean;
public getTransactionsProofByAddresses(blockHash: Hash, addresses: Address[]): boolean;
public getTransactionsProofByHashes(blockHash: Hash, hashes: Hash[]): boolean;
public transactionsProof(blockHash: Hash, proof?: TransactionsProof): boolean;
public getTransactionReceipts(address: Address): boolean;
public getTransactionReceiptsByAddress(address: Address): boolean;
public getTransactionReceiptsByHashes(hashes: Hash[]): boolean;
public transactionReceipts(transactionReceipts?: TransactionReceipt[]): boolean;
public getBlockProof(blockHashToProve: Hash, knownBlockHash: Hash): boolean;
public getBlockProofAt(blockHeightToProve: number, knownBlockHash: Hash): boolean;
public blockProof(proof?: BlockChain): boolean;
public getHead(): boolean;
public head(header: BlockHeader): boolean;
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class NetworkAgent {
public static HANDSHAKE_TIMEOUT: 4000; // 4 seconds
public static PING_TIMEOUT: 10000; // 10 seconds
public static CONNECTIVITY_CHECK_INTERVAL: 60000; // 1 minute
public static ANNOUNCE_ADDR_INTERVAL: 600000; // 10 minutes
public static VERSION_ATTEMPTS_MAX: 10;
public static VERSION_RETRY_DELAY: 500; // 500 ms
public static GETADDR_RATE_LIMIT: 3; // per minute
public static MAX_ADDR_PER_MESSAGE: 1000;
public static MAX_ADDR_PER_REQUEST: 500;
public static NUM_ADDR_PER_REQUEST: 200;
public channel: PeerChannel;
public peer: Peer;
constructor(
blockchain: IBlockchain,
addresses: PeerAddressBook,
networkConfig: NetworkConfig,
channel: PeerChannel,
);
public handshake(): void;
public requestAddresses(maxResults?: number): void;
}
export class PeerConnectionStatistics {
public latencyMedian: number;
constructor();
public reset(): void;
public addLatency(latency: number): void;
public addMessage(msg: Message): void;
public getMessageCount(msgType: number): number;
}
export class PeerConnection {
public static getOutbound(peerAddress: PeerAddress): PeerConnection;
public static getInbound(networkConnection: NetworkConnection): PeerConnection;
public id: number;
public state: number;
public peerAddress: PeerAddress;
public networkConnection: NetworkConnection;
public peerChannel: PeerChannel;
public networkAgent: NetworkAgent;
public peer: Peer;
public score: number;
public establishedSince: number;
public ageEstablished: number;
public statistics: PeerConnectionStatistics;
constructor();
public negotiating(): void;
public close(): void;
}
export class PeerConnectionState {
public static NEW: 1;
public static CONNECTING: 2;
public static CONNECTED: 3;
public static NEGOTIATING: 4;
public static ESTABLISHED: 5;
public static CLOSED: 6;
}
export class SignalProcessor {
constructor(
peerAddress: PeerAddressBook,
networkConfig: NetworkConfig,
rtcConnector: WebRtcConnector,
);
public onSignal(channel: PeerChannel, msg: SignalMessage): void;
}
export class SignalStore {
public static SIGNAL_MAX_AGE: 10 /* seconds */;
public length: number;
constructor(maxSize?: number);
public add(senderId: PeerId, recipientId: PeerId, nonce: number): void;
public contains(senderId: PeerId, recipientId: PeerId, nonce: number): boolean;
public signalForwarded(senderId: PeerId, recipientId: PeerId, nonce: number): boolean;
}
export class ForwardedSignal {
constructor(
senderId: PeerId,
recipientId: PeerId,
nonce: number,
);
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class ConnectionPool {
public static DEFAULT_BAN_TIME: 600000;
public static UNBAN_IPS_INTERVAL: 60000;
public peerCountWs: number;
public peerCountWss: number;
public peerCountRtc: number;
public peerCountDumb: number;
public peerCount: number;
public peerCountFull: number;
public peerCountLight: number;
public peerCountNano: number;
public peerCountOutbound: number;
public peerCountFullWsOutbound: number;
public connectingCount: number;
public count: number;
public bytesSent: number;
public bytesReceived: number;
public allowInboundExchange: boolean;
public allowInboundConnections: boolean;
constructor(
peerAddresses: PeerAddressBook,
networkConfig: NetworkConfig,
blockchain: IBlockchain,
);
public values(): PeerConnection[];
public valueIterator(): Iterator<PeerConnection>;
public getConnectionByPeerAddress(peerAddress: PeerAddress): null | PeerConnection;
public getConnectionsByNetAddress(netAddress: NetAddress): PeerConnection[];
public getConnectionsBySubnet(netAddress: NetAddress): PeerConnection[];
public getOutboundConnectionsBySubnet(netAddress: NetAddress): PeerConnection[];
public connectOutbound(peerAddress: PeerAddress): boolean;
public disconnect(reason: string | any): void;
}
export class PeerScorer {
public static PEER_COUNT_MIN_FULL_WS_OUTBOUND: number;
public static PEER_COUNT_MIN_OUTBOUND: number;
public static PICK_SELECTION_SIZE: 100;
public static MIN_AGE_FULL: number;
public static BEST_AGE_FULL: number;
public static MIN_AGE_LIGHT: number;
public static BEST_AGE_LIGHT: number;
public static MAX_AGE_LIGHT: number;
public static MIN_AGE_NANO: number;
public static BEST_AGE_NANO: number;
public static MAX_AGE_NANO: number;
public static BEST_PROTOCOL_WS_DISTRIBUTION: 0.15; // 15%
public lowestConnectionScore: number;
public connectionScores: PeerConnection[];
constructor(
networkConfig: NetworkConfig,
addresses: PeerAddressBook,
connections: ConnectionPool,
);
public pickAddress(): null | PeerAddress;
public isGoodPeerSet(): boolean;
public needsGoodPeers(): boolean;
public needsMorePeers(): boolean;
public isGoodPeer(): boolean;
public scoreConnections(): void;
public recycleConnections(count: number, type: number, reason: string): void;
}
export class NetworkConfig {
public static getDefault(): NetworkConfig;
public protocol: number;
public protocolMask: number;
public keyPair: KeyPair;
public publicKey: PublicKey;
public peerId: PeerId;
public services: Services;
public peerAddress: PeerAddress;
public appAgent: string;
constructor(protocolMask: number);
public initPersistent(): Promise<void>;
public initVolatile(): Promise<void>;
public canConnect(protocol: number): boolean;
}
export class WsNetworkConfig extends NetworkConfig {
public protocol: number;
public port: number;
public reverseProxy: { enabled: boolean, port: number, addresses: string[], header: string };
public peerAddress: WsPeerAddress | WssPeerAddress;
public secure: boolean;
constructor(
host: string,
port: number,
reverseProxy: { enabled: boolean, port: number, addresses: string[], header: string },
);
}
export class WssNetworkConfig extends WsNetworkConfig {
public ssl: { key: string, cert: string };
constructor(
host: string,
port: number,
key: string,
cert: string,
reverseProxy: { enabled: boolean, port: number, addresses: string[], header: string },
);
}
export class RtcNetworkConfig extends NetworkConfig {
public rtcConfig: RTCConfiguration;
public peerAddress: RtcPeerAddress;
constructor();
}
export class DumbNetworkConfig extends NetworkConfig {
public peerAddress: DumbPeerAddress;
constructor();
}
export class Network extends Observable {
public static PEER_COUNT_MAX: number;
public static INBOUND_PEER_COUNT_PER_SUBNET_MAX: number;
public static OUTBOUND_PEER_COUNT_PER_SUBNET_MAX: 2;
public static PEER_COUNT_PER_IP_MAX: number;
public static PEER_COUNT_DUMB_MAX: 1000;
public static IPV4_SUBNET_MASK: 24;
public static IPV6_SUBNET_MASK: 96;
public static PEER_COUNT_RECYCLING_ACTIVE: number;
public static RECYCLING_PERCENTAGE_MIN: 0.01;
public static RECYCLING_PERCENTAGE_MAX: 0.20;
public static CONNECTING_COUNT_MAX: 2;
public static SIGNAL_TTL_INITIAL: 3;
public static CONNECT_BACKOFF_INITIAL: 2000; // 2 seconds
public static CONNECT_BACKOFF_MAX: 600000; // 10 minutes
public static TIME_OFFSET_MAX: number; // 10 minutes
public static HOUSEKEEPING_INTERVAL: number; // 5 minutes
public static SCORE_INBOUND_EXCHANGE: 0.5;
public static CONNECT_THROTTLE: 500; // 0.5 seconds
public static ADDRESS_REQUEST_CUTOFF: 250;
public static ADDRESS_REQUEST_PEERS: 2;
public static SIGNALING_ENABLED: 1;
public time: Time;
public peerCount: number;
public peerCountWebSocket: number;
public peerCountWebSocketSecure: number;
public peerCountWebRtc: number;
public peerCountDumb: number;
public peerCountConnecting: number;
public knownAddressesCount: number;
public bytesSent: number;
public bytesReceived: number;
public allowInboundConnections: boolean;
public addresses: PeerAddressBook;
public connections: ConnectionPool;
public config: NetworkConfig;
constructor(
blockchain: IBlockchain,
networkConfig: NetworkConfig,
time: Time,
);
public connect(): void;
public disconnect(reason: string | any): void;
}
export class NetUtils {
public static IPv4_LENGTH: 4;
public static IPv6_LENGTH: 16;
public static IPv4_PRIVATE_NETWORK: string[];
public static isPrivateIP(ip: string | Uint8Array): boolean;
public static isLocalIP(ip: string | Uint8Array): boolean;
public static isIPv4inSubnet(ip: string | Uint8Array, subnet: string): boolean;
public static isIPv4Address(ip: string | Uint8Array): boolean;
public static isIPv6Address(ip: string | Uint8Array): boolean;
public static hostGloballyReachable(host: string): boolean;
public static ipToBytes(ip: string): Uint8Array;
public static bytesToIp(ip: Uint8Array): string;
public static ipToSubnet(ip: string | Uint8Array, bitCount: number): string | Uint8Array;
}
export class PeerKeyStore {
public static VERSION: number;
public static KEY_DATABASE: string;
public static INITIAL_DB_SIZE: number;
public static getPersistent(): Promise<PeerKeyStore>;
public static createVolatile(): PeerKeyStore;
constructor(store: any);
public get(key: string): Promise<KeyPair>;
public put(key: string, keyPair: KeyPair): Promise<void>;
}
export class PeerKeyStoreCodec {
public leveldbValueEncoding: string;
public lmdbValueEncoding: object;
public encode(obj: any): any;
public decode(buf: any, key: string): any;
}
export class Peer {
public channel: PeerChannel;
public version: number;
public headHash: Hash;
public head: BlockHeader;
public timeOffset: number;
public id: number;
public peerAddress: PeerAddress;
public netAddress: NetAddress;
public userAgent?: string;
constructor(
channel: PeerChannel,
version: number,
headHash: Hash,
timeOffset: number,
userAgent?: string,
);
public equals(o: any): boolean;
public hashCode(): string;
public toString(): string;
}
export class Miner extends Observable {
public static MIN_TIME_ON_BLOCK: 10000;
public static MOVING_AVERAGE_MAX_SIZE: 10;
public address: Address;
public working: boolean;
public hashrate: number;
public threads: number;
public throttleWait: number;
public throttleAfter: number;
public extraData: Uint8Array;
public shareCompact: number;
public numBlocksMined: number;
constructor(
blockchain: BaseChain,
accounts: Accounts,
mempool: Mempool,
time: Time,
minerAddress: Address,
extraData?: Uint8Array,
);
public startWork(): void;
public onWorkerShare(obj: {hash: Hash, nonce: number, block: Block}): void;
public getNextBlock(address?: Address, extraData?: Uint8Array): Promise<Block>;
public stopWork(): void;
public startConfigChanges(): void;
public finishConfigChanges(): void;
}
export abstract class BasePoolMiner extends Miner {
public static PAYOUT_NONCE_PREFIX: 'POOL_PAYOUT';
public static RECONNECT_TIMEOUT: 3000;
public static RECONNECT_TIMEOUT_MAX: 30000;
public static ConnectionState: {
CONNECTED: 0;
CONNECTING: 1;
CLOSED: 2;
};
public static Mode: {
NANO: 'nano';
SMART: 'smart';
};
public static generateDeviceId(networkConfig: NetworkConfig): number;
public host: string;
public port: number;
public address: Address;
constructor(
mode: BasePoolMiner.Mode,
blockchain: BaseChain,
accounts: Accounts,
mempool: Mempool,
time: Time,
address: Address,
deviceId: number,
deviceData: object | null,
extraData?: Uint8Array,
);
public requestPayout(): void;
public connect(host: string, port: number): void;
public disconnect(): void;
public isConnected(): boolean;
public isDisconnected(): boolean;
}
export namespace BasePoolMiner {
type ConnectionState = ConnectionState.CONNECTED | ConnectionState.CONNECTING | ConnectionState.CLOSED;
namespace ConnectionState {
type CONNECTED = 0;
type CONNECTING = 1;
type CLOSED = 2;
}
type Mode = Mode.NANO | Mode.SMART;
namespace Mode {
type NANO = 'nano';
type SMART = 'smart';
}
}
export class SmartPoolMiner extends BasePoolMiner {
constructor(
blockchain: BaseChain,
accounts: Accounts,
mempool: Mempool,
time: Time,
address: Address,
deviceId: number,
deviceData: object | null,
extraData?: Uint8Array,
);
}
export class NanoPoolMiner extends BasePoolMiner {
constructor(
blockchain: BaseChain,
time: Time,
address: Address,
deviceId: number,
deviceData: object | null,
);
// @ts-ignore
public getNextBlock(): Block;
}
export class Wallet {
public static generate(): Wallet;
public static loadPlain(buf: Uint8Array | string): Wallet;
public static loadEncrypted(buf: Uint8Array | string, key: Uint8Array | string): Promise<Wallet>;
public isLocked: boolean;
public address: Address;
public publicKey: PublicKey;
public keyPair: KeyPair;
constructor(keyPair: KeyPair);
public createTransaction(recipient: Address, value: number, fee: number, validityStartHeight: number): BasicTransaction;
public signTransaction(transaction: Transaction): SignatureProof;
public exportPlain(): Uint8Array;
public exportEncrypted(key: Uint8Array|string): Promise<SerialBuffer>;
public lock(key: Uint8Array | string): Promise<void>;
public relock(): void;
public unlock(key: Uint8Array | string): Promise<void>;
public equals(o: any): boolean;
}
// @ts-ignore
export class MultiSigWallet extends Wallet {
public static fromPublicKeys(keyPair: KeyPair, minSignatures: number, publicKeys: PublicKey[]): MultiSigWallet;
public static loadPlain(buf: Uint8Array | string): MultiSigWallet;
public static loadEncrypted(buf: Uint8Array | string, key: Uint8Array | string): Promise<MultiSigWallet>;
public encryptedSize: number;
public exportedSize: number;
public minSignatures: number;
public publicKeys: PublicKey[];
constructor(
keyPair: KeyPair,
minSignatures: number,
publicKeys: PublicKey[],
);
public exportEncrypted(key: Uint8Array|string): Promise<SerialBuffer>;
public exportPlain(): Uint8Array;
// @ts-ignore
public createTransaction(recipientAddr: Address, value: number, fee: number, validityStartHeight: number): ExtendedTransaction;
public createCommitment(): CommitmentPair;
public partiallySignTransaction(transaction: Transaction, publicKeys: PublicKey[], aggregatedCommitment: Commitment, secret: RandomSecret): PartialSignature;
// @ts-ignore
public signTransaction(transaction: Transaction, aggregatedPublicKey: PublicKey, aggregatedCommitment: Commitment, signatures: PartialSignature[]): SignatureProof;
public completeTransaction(transaction: Transaction, aggregatedPublicKey: PublicKey, aggregatedCommitment: Commitment, signatures: PartialSignature[]): Transaction;
}
export class WalletStore {
public static VERSION: number;
public static INITIAL_DB_SIZE: number; // 10 MB initially
public static MIN_RESIZE: number; // 10 MB
public static WALLET_DATABASE: string;
public static MULTISIG_WALLET_DATABASE: string;
constructor(dbName?: string);
public hasDefault(): Promise<boolean>;
public getDefault(key?: Uint8Array | string): Promise<Wallet>;
public setDefault(address: Address): Promise<void>;
public get(address: Address, key?: Uint8Array | string): Promise<null | Wallet>;
public put(wallet: Wallet, key?: Uint8Array | string, unlockKey?: Uint8Array | string): Promise<void>;
public remove(address: Address): Promise<void>;
public list(): Promise<Address[]>;
public getMultiSig(address: Address, key?: Uint8Array | string): Promise<null | MultiSigWallet>;
public putMultiSig(wallet: MultiSigWallet, key?: Uint8Array | string, unlockKey?: Uint8Array | string): Promise<void>;
public removeMultiSig(address: Address): Promise<void>;
public listMultiSig(): Promise<Address[]>;
public close(): void;
}
export class WalletStoreCodec {
public leveldbValueEncoding: string;
public lmdbValueEncoding: object;
public encode(obj: any): any;
public decode(buf: any, key: string): any;
}
export abstract class MinerWorker {
public multiMine(blockHeader: Uint8Array, compact: number, minNonce: number, maxNonce: number): Promise<{ hash: Uint8Array, nonce: number } | boolean>;
}
export class MinerWorkerImpl extends IWorker.Stub(MinerWorker) {
constructor();
public init(name: string): void;
public multiMine(input: Uint8Array, compact: number, minNonce: number, maxNonce: number): Promise<{ hash: Uint8Array, nonce: number } | boolean>;
}
export class MinerWorkerPool extends IWorker.Pool(MinerWorker) {
public noncesPerRun: number;
public runsPerCycle: number;
public cycleWait: number;
constructor(size?: number);
public on(type: string, callback: () => any): number;
public off(type: string, id: number): void;
public startMiningOnBlock(block: Block, shareCompact?: number): Promise<void>;
public stop(): void;
}
|
<reponame>diegoperezl/cf4j
package es.upm.etsisi.cf4j.util;
/** This class contains useful math methods. */
public class Maths {
/**
* Calculates the average of an double array
*
* @param array Array of double from which to calculate the mean
* @return Array mean
*/
public static double arrayAverage(double[] array) {
double average = 0f;
for (double d : array) average += d;
return average / (double) array.length;
}
/**
* Calculates the standard deviation of an double array
*
* @param array Array of double from which to calculate the standard deviation
* @return Standard deviation of the array
*/
public static double arrayStandardDeviation(double[] array) {
double average = Maths.arrayAverage(array);
double standard_deviation = 0f;
for (double d : array) standard_deviation += (d - average) * (d - average);
return Math.sqrt(standard_deviation / (double) array.length);
}
/**
* Calculates the average of an int array
*
* @param array Array of int from which to calculate the mean
* @return Array mean
*/
public static double arrayAverage(int[] array) {
double average = 0f;
for (int i : array) average += i;
return average / (double) array.length;
}
/**
* Calculate the standard deviation of an int array
*
* @param array Array of int from which to calculate the standard deviation
* @return Standard deviation of the array
*/
public static double arrayStandardDeviation(int[] array) {
double average = Maths.arrayAverage(array);
double standard_deviation = 0f;
for (int i : array) standard_deviation += (i - average) * (i - average);
return Math.sqrt(standard_deviation / (double) array.length);
}
/**
* Dot product between two vectors
*
* @param a Vector A
* @param b Vector B
* @return dot_product(A, B)
*/
public static double dotProduct(double[] a, double[] b) {
double r = 0;
for (int i = 0; i < a.length; i++) r += a[i] * b[i];
return r;
}
/**
* Returns the log in an specific base
*
* @param x Value
* @param b Base
* @return log of x in base b
*/
public static double log(double x, double b) {
return Math.log(x) / Math.log(b);
}
/**
* Returns the logistic function g(x)
*
* @param x The given parameter x of the function g(x)
* @return Value of the logistic function g(x)
*/
public static double logistic(double x) {
return 1.0 / (1.0 + Math.exp(-x));
}
}
|
#include <iostream>
int main() {
int nums[] = {2, 3, 7, 11, 16};
int max = nums[0];
for (int i = 1; i < 5; i++)
{
if (nums[i] > max)
{
max = nums[i];
}
}
std::cout << "The maximum is " << max << std::endl;
return 0;
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.