text
stringlengths 3
1.05M
|
|---|
#!/usr/bin/python
import argparse
from dx.dex import Dex
from dx.printer import DexPrinter
def print_dump(args):
printer = DexPrinter(args.meta)
dex = Dex(args.dex_file)
if args.H:
printer.header(dex.header())
if args.X:
printer.maplist(dex.map_list())
if args.S:
for item in dex.string_ids():
printer.stringid(item)
if args.T:
for item in dex.type_ids():
printer.typeid(item)
if args.P:
for item in dex.proto_ids():
printer.protoid(item)
if args.F:
for item in dex.field_ids():
printer.fieldid(item)
if args.M:
for item in dex.method_ids():
printer.methodid(item)
if args.C:
for item in dex.class_defs():
printer.classdef(item)
if args.t:
for item in dex.type_lists():
printer.typelist(item)
if args.s:
for item in dex.string_data_list():
printer.stringdata(item)
if args.c:
for item in dex.class_data_list():
printer.classdata(item)
if args.b:
for item in dex.code_list():
printer.codeitem(item)
if args.d:
for item in dex.debug_info_list():
printer.debuginfo(item)
if args.i:
for item in dex.encoded_arrays():
printer.encodedarray(item)
if args.n:
for item in dex.an_directories():
printer.annotationdirectoryitem(item)
if args.l:
for item in dex.an_set_ref_lists():
printer.annotationsetreflist(item)
if args.e:
for item in dex.an_set():
printer.annotationsetitem(item)
if args.r:
for item in dex.annotations():
printer.annotationitem(item)
def main():
parser = argparse.ArgumentParser(description="Dump dex structures")
parser.add_argument('dex_file',help='Target DEX file')
parser.add_argument('--meta', action='store_true',help='print meta info')
parser.add_argument('-H', action='store_true',help='header_item')
parser.add_argument('-X', action='store_true',help='map_list')
parser.add_argument('-S', action='store_true',help='string_id')
parser.add_argument('-T', action='store_true',help='type_id')
parser.add_argument('-P', action='store_true',help='proto_id')
parser.add_argument('-F', action='store_true',help='field_id')
parser.add_argument('-M', action='store_true',help='method_id')
parser.add_argument('-C', action='store_true',help='class_def')
parser.add_argument('-t', action='store_true',help='type_list')
parser.add_argument('-s', action='store_true',help='string_data')
parser.add_argument('-c', action='store_true',help='class_data')
parser.add_argument('-b', action='store_true',help='code_item')
parser.add_argument('-d', action='store_true',help='debug_info')
parser.add_argument('-i', action='store_true',help='class_static_fields')
parser.add_argument('-n', action='store_true',help='class_annotations')
parser.add_argument('-l', action='store_true',help='annotation_set_ref_list')
parser.add_argument('-e', action='store_true',help='annotation_set_item')
parser.add_argument('-r', action='store_true',help='annotation_item')
args = parser.parse_args()
print_dump(args)
if __name__ == "__main__":
main()
|
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-profile_estimate" } */
extern int global;
int bar(int);
void foo (int bound)
{
int i, ret = 0;
for (i = 0; i < bound; i++)
{
if (i > bound)
global += bar (i);
if (i >= bound + 2)
global += bar (i);
if (i > bound - 2)
global += bar (i);
if (i + 2 > bound)
global += bar (i);
if (i == 10)
global += bar (i);
}
}
/* { dg-final { scan-tree-dump-times "guess loop iv compare heuristics of edge\[^:\]*: 2.0%" 4 "profile_estimate"} } */
|
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RBF code."""
from decimal import Decimal
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut
from test_framework.script import CScript, OP_DROP
from test_framework.test_framework import Bitcoin3TestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, bytes_to_hex_str, satoshi_round
MAX_REPLACEMENT_LIMIT = 100
def txToHex(tx):
return bytes_to_hex_str(tx.serialize())
def make_utxo(node, amount, confirmed=True, scriptPubKey=CScript([1])):
"""Create a txout with a given amount and scriptPubKey
Mines coins as needed.
confirmed - txouts created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
fee = 1*COIN
while node.getbalance() < satoshi_round((amount + fee)/COIN):
node.generate(100)
new_addr = node.getnewaddress()
txid = node.sendtoaddress(new_addr, satoshi_round((amount+fee)/COIN))
tx1 = node.getrawtransaction(txid, 1)
txid = int(txid, 16)
i = None
for i, txout in enumerate(tx1['vout']):
if txout['scriptPubKey']['addresses'] == [new_addr]:
break
assert i is not None
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(txid, i))]
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
signed_tx = node.signrawtransactionwithwallet(txToHex(tx2))
txid = node.sendrawtransaction(signed_tx['hex'], True)
# If requested, ensure txouts are confirmed.
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
node.generate(1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert(new_size < mempool_size)
mempool_size = new_size
return COutPoint(int(txid, 16), 0)
class ReplaceByFeeTest(Bitcoin3TestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [
[
"-maxorphantx=1000",
"-whitelist=127.0.0.1",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101",
],
[
"-mempoolreplacement=0",
],
]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
# Leave IBD
self.nodes[0].generate(1)
make_utxo(self.nodes[0], 1*COIN)
# Ensure nodes are synced
self.sync_all()
self.log.info("Running test simple doublespend...")
self.test_simple_doublespend()
self.log.info("Running test doublespend chain...")
self.test_doublespend_chain()
self.log.info("Running test doublespend tree...")
self.test_doublespend_tree()
self.log.info("Running test replacement feeperkb...")
self.test_replacement_feeperkb()
self.log.info("Running test spends of conflicting outputs...")
self.test_spends_of_conflicting_outputs()
self.log.info("Running test new unconfirmed inputs...")
self.test_new_unconfirmed_inputs()
self.log.info("Running test too many replacements...")
self.test_too_many_replacements()
self.log.info("Running test opt-in...")
self.test_opt_in()
self.log.info("Running test RPC...")
self.test_rpc()
self.log.info("Running test prioritised transactions...")
self.test_prioritised_transactions()
self.log.info("Passed")
def test_simple_doublespend(self):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
# make_utxo may have generated a bunch of blocks, so we need to sync
# before we can spend the coins generated, or else the resulting
# transactions might not be accepted by our peers.
self.sync_all()
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
self.sync_all()
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, CScript([b'b' * 35]))]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
# This will raise an exception due to transaction replacement being disabled
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True)
# Extra 0.1 BTC3 fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))]
tx1b_hex = txToHex(tx1b)
# Replacement still disabled even with "enough fee"
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[1].sendrawtransaction, tx1b_hex, True)
# Works when enabled
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
mempool = self.nodes[0].getrawmempool()
assert (tx1a_txid not in mempool)
assert (tx1b_txid in mempool)
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
# Second node is running mempoolreplacement=0, will not replace originally-seen txn
mempool = self.nodes[1].getrawmempool()
assert tx1a_txid in mempool
assert tx1b_txid not in mempool
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 10*COIN:
remaining_value -= 1*COIN
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = [CTxOut(remaining_value, CScript([1, OP_DROP] * 15 + [1]))]
tx_hex = txToHex(tx)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
chain_txids.append(txid)
prevout = COutPoint(int(txid, 16), 0)
# Whether the double-spend is allowed is evaluated by including all
# child fees - 40 BTC3 - so this attempt is rejected.
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 30 * COIN, CScript([1] * 35))]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
# Accepted with sufficient fee
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(1 * COIN, CScript([1] * 35))]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert(doublespent_txid not in mempool)
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 50*COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001*COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
vout = [CTxOut(txout_value, CScript([i+1]))
for i in range(tree_width)]
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = vout
tx_hex = txToHex(tx)
assert(len(tx.serialize()) < 100000)
txid = self.nodes[0].sendrawtransaction(tx_hex, True)
yield tx
_total_txs[0] += 1
txid = int(txid, 16)
for i, txout in enumerate(tx.vout):
for x in branch(COutPoint(txid, i), txout_value,
max_txs,
tree_width=tree_width, fee=fee,
_total_txs=_total_txs):
yield x
fee = int(0.0001*COIN)
n = MAX_REPLACEMENT_LIMIT
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
# Attempt double-spend, will fail because too little fee paid
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n, CScript([1] * 35))]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
# 1 BTC3 fee is enough
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n - 1 * COIN, CScript([1] * 35))]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, True)
mempool = self.nodes[0].getrawmempool()
for tx in tree_txs:
tx.rehash()
assert (tx.hash not in mempool)
# Try again, but with more total transactions than the "max txs
# double-spent at once" anti-DoS limit.
for n in (MAX_REPLACEMENT_LIMIT+1, MAX_REPLACEMENT_LIMIT*2):
fee = int(0.0001*COIN)
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 2 * fee * n, CScript([1] * 35))]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, True)
for tx in tree_txs:
tx.rehash()
self.nodes[0].getrawtransaction(tx.hash)
def test_replacement_feeperkb(self):
"""Replacement requires fee-per-KB to be higher"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1a_hex = txToHex(tx1a)
self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*999000]))]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
utxo1 = make_utxo(self.nodes[0], int(1.2*COIN))
utxo2 = make_utxo(self.nodes[0], 3*COIN)
tx1a = CTransaction()
tx1a.vin = [CTxIn(utxo1, nSequence=0)]
tx1a.vout = [CTxOut(int(1.1 * COIN), CScript([b'a' * 35]))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
tx1a_txid = int(tx1a_txid, 16)
# Direct spend an output of the transaction we're replacing.
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
# Spend tx1a's output to test the indirect case.
tx1b = CTransaction()
tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
tx1b_txid = int(tx1b_txid, 16)
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0),
CTxIn(COutPoint(tx1b_txid, 0))]
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, True)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
confirmed_utxo = make_utxo(self.nodes[0], int(1.1*COIN))
unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1*COIN), False)
tx1 = CTransaction()
tx1.vin = [CTxIn(confirmed_utxo)]
tx1.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1_hex = txToHex(tx1)
self.nodes[0].sendrawtransaction(tx1_hex, True)
tx2 = CTransaction()
tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
tx2.vout = tx1.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, True)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
# Try directly replacing more than MAX_REPLACEMENT_LIMIT
# transactions
# Start by creating a single transaction with many outputs
initial_nValue = 10*COIN
utxo = make_utxo(self.nodes[0], initial_nValue)
fee = int(0.0001*COIN)
split_value = int((initial_nValue-fee)/(MAX_REPLACEMENT_LIMIT+1))
outputs = []
for i in range(MAX_REPLACEMENT_LIMIT+1):
outputs.append(CTxOut(split_value, CScript([1])))
splitting_tx = CTransaction()
splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
splitting_tx.vout = outputs
splitting_tx_hex = txToHex(splitting_tx)
txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, True)
txid = int(txid, 16)
# Now spend each of those outputs individually
for i in range(MAX_REPLACEMENT_LIMIT+1):
tx_i = CTransaction()
tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
tx_i.vout = [CTxOut(split_value - fee, CScript([b'a' * 35]))]
tx_i_hex = txToHex(tx_i)
self.nodes[0].sendrawtransaction(tx_i_hex, True)
# Now create doublespend of the whole lot; should fail.
# Need a big enough fee to cover all spending transactions and have
# a higher fee rate
double_spend_value = (split_value-100*fee)*(MAX_REPLACEMENT_LIMIT+1)
inputs = []
for i in range(MAX_REPLACEMENT_LIMIT+1):
inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
double_tx = CTransaction()
double_tx.vin = inputs
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, True)
# If we remove an input, it should pass
double_tx = CTransaction()
double_tx.vin = inputs[0:-1]
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
self.nodes[0].sendrawtransaction(double_tx_hex, True)
def test_opt_in(self):
"""Replacing should only work if orig tx opted in"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
# Create a non-opting in transaction
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Shouldn't be able to double-spend
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))]
tx1b_hex = txToHex(tx1b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, True)
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
# Create a different non-opting in transaction
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx2a_hex = txToHex(tx2a)
tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, True)
# Still shouldn't be able to double-spend
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(0.9 * COIN), CScript([b'b' * 35]))]
tx2b_hex = txToHex(tx2b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, True)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
# Transaction should be replaceable on either input
tx1a_txid = int(tx1a_txid, 16)
tx2a_txid = int(tx2a_txid, 16)
tx3a = CTransaction()
tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
tx3a.vout = [CTxOut(int(0.9*COIN), CScript([b'c'])), CTxOut(int(0.9*COIN), CScript([b'd']))]
tx3a_hex = txToHex(tx3a)
self.nodes[0].sendrawtransaction(tx3a_hex, True)
tx3b = CTransaction()
tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx3b.vout = [CTxOut(int(0.5 * COIN), CScript([b'e' * 35]))]
tx3b_hex = txToHex(tx3b)
tx3c = CTransaction()
tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
tx3c.vout = [CTxOut(int(0.5 * COIN), CScript([b'f' * 35]))]
tx3c_hex = txToHex(tx3c)
self.nodes[0].sendrawtransaction(tx3b_hex, True)
# If tx3b was accepted, tx3c won't look like a replacement,
# but make sure it is accepted anyway
self.nodes[0].sendrawtransaction(tx3c_hex, True)
def test_prioritised_transactions(self):
# Ensure that fee deltas used via prioritisetransaction are
# correctly used by replacement logic
# 1. Check that feeperkb uses modified fees
tx0_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, True)
# Higher fee, but the actual fee per KB is much lower.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001*COIN), CScript([b'a'*740000]))]
tx1b_hex = txToHex(tx1b)
# Verify tx1b cannot replace tx1a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, True)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1*COIN))
# Now tx1b should be able to replace tx1a
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, True)
assert(tx1b_txid in self.nodes[0].getrawmempool())
# 2. Check that absolute fee checks use modified fee.
tx1_outpoint = make_utxo(self.nodes[0], int(1.1*COIN))
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2a.vout = [CTxOut(1 * COIN, CScript([b'a' * 35]))]
tx2a_hex = txToHex(tx2a)
self.nodes[0].sendrawtransaction(tx2a_hex, True)
# Lower fee, but we'll prioritise it
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(1.01 * COIN), CScript([b'a' * 35]))]
tx2b.rehash()
tx2b_hex = txToHex(tx2b)
# Verify tx2b cannot replace tx2a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, True)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1*COIN))
# tx2b should now be accepted
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, True)
assert(tx2b_txid in self.nodes[0].getrawmempool())
def test_rpc(self):
us0 = self.nodes[0].listunspent()[0]
ins = [us0]
outs = {self.nodes[0].getnewaddress() : Decimal(1.0000000)}
rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
json0 = self.nodes[0].decoderawtransaction(rawtx0)
json1 = self.nodes[0].decoderawtransaction(rawtx1)
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967295)
rawtx2 = self.nodes[0].createrawtransaction([], outs)
frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False})
json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967294)
if __name__ == '__main__':
ReplaceByFeeTest().main()
|
if __name__ == "__main__":
numero = int(input("Introduzca un numero: "))
for contador in range(1, numero):
if contador % 2 == 0:
print(str(contador) + " es un numero par!")
else:
print(str(contador) + " es un numero impar!")
|
# *** WARNING: this file was generated by the Pulumi Kubernetes codegen tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
from typing import Optional
import pulumi
import pulumi.runtime
from pulumi import Input, ResourceOptions
from ... import tables, version
class ReplicaSetList(pulumi.CustomResource):
"""
ReplicaSetList is a collection of ReplicaSets.
"""
apiVersion: pulumi.Output[str]
"""
APIVersion defines the versioned schema of this representation of an object. Servers should
convert recognized schemas to the latest internal value, and may reject unrecognized values.
More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
"""
kind: pulumi.Output[str]
"""
Kind is a string value representing the REST resource this object represents. Servers may infer
this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More
info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
"""
items: pulumi.Output[list]
"""
List of ReplicaSets. More info:
https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller
"""
metadata: pulumi.Output[dict]
"""
Standard list metadata. More info:
https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
def __init__(self, resource_name, opts=None, items=None, metadata=None, __name__=None, __opts__=None):
"""
Create a ReplicaSetList resource with the given unique name, arguments, and options.
:param str resource_name: The _unique_ name of the resource.
:param pulumi.ResourceOptions opts: A bag of options that control this resource's behavior.
:param pulumi.Input[list] items: List of ReplicaSets. More info:
https://kubernetes.io/docs/concepts/workloads/controllers/replicationcontroller
:param pulumi.Input[dict] metadata: Standard list metadata. More info:
https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if not resource_name:
raise TypeError('Missing resource name argument (for URN creation)')
if not isinstance(resource_name, str):
raise TypeError('Expected resource name to be a string')
if opts and not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
__props__ = dict()
__props__['apiVersion'] = 'extensions/v1beta1'
__props__['kind'] = 'ReplicaSetList'
if items is None:
raise TypeError('Missing required property items')
__props__['items'] = items
__props__['metadata'] = metadata
__props__['status'] = None
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(
version=version.get_version(),
))
super(ReplicaSetList, self).__init__(
"kubernetes:extensions/v1beta1:ReplicaSetList",
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name, id, opts=None):
"""
Get the state of an existing `ReplicaSetList` resource, as identified by `id`.
The ID is of the form `[namespace]/[name]`; if `[namespace]` is omitted,
then (per Kubernetes convention) the ID becomes `default/[name]`.
Pulumi will keep track of this resource using `resource_name` as the Pulumi ID.
:param str resource_name: _Unique_ name used to register this resource with Pulumi.
:param pulumi.Input[str] id: An ID for the Kubernetes resource to retrieve.
Takes the form `[namespace]/[name]` or `[name]`.
:param Optional[pulumi.ResourceOptions] opts: A bag of options that control this
resource's behavior.
"""
opts = ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
return ReplicaSetList(resource_name, opts)
def translate_output_property(self, prop: str) -> str:
return tables._CASING_FORWARD_TABLE.get(prop) or prop
def translate_input_property(self, prop: str) -> str:
return tables._CASING_BACKWARD_TABLE.get(prop) or prop
|
// craco.config.js
module.exports = {
style: {
postcss: {
plugins: [
require('tailwindcss'),
require('autoprefixer'),
],
},
},
}
|
//>>built
define("dojox/drawing/defaults",{clickMode:!0,clickable:!0,current:null,currentHit:null,angleSnap:1,zAxis:!1,zAxisEnabled:!0,zAngle:225,renderHitLines:!0,renderHitLayer:!0,labelSameColor:!1,useSelectedStyle:!0,norm:{width:1,color:"#000000",style:"Solid",cap:"round",fill:"#CCCCCC"},selected:{width:6,color:"#00FF00"},highlighted:{width:6,color:"#FF00FF",style:"Solid",cap:"round",fill:"#E11EBB"},disabled:{width:1,color:"#666666",style:"solid",cap:"round",fill:"#cccccc"},hitNorm:{width:6,color:{r:0,g:255,
b:255,a:0},style:"Solid",cap:"round",fill:{r:255,g:255,b:255,a:0}},hitSelected:{width:6,color:"#FF9900",style:"Solid",cap:"round",fill:{r:255,g:255,b:255,a:0}},hitHighlighted:{width:6,color:"#FFFF00",style:"Solid",cap:"round",fill:{r:255,g:255,b:255,a:0}},anchors:{size:10,width:2,color:"#999",style:"solid",fill:"#fff",cap:"square",minSize:10,marginZero:5},arrows:{length:30,width:16},text:{minWidth:100,deleteEmptyCreate:!0,deleteEmptyModify:!0,pad:3,size:"18px",family:"sans-serif",weight:"normal",
color:"#000000"},textDisabled:{size:"18px",family:"sans-serif",weight:"normal",color:"#cccccc"},textMode:{create:{width:2,style:"dotted",color:"#666666",fill:null},edit:{width:1,style:"dashed",color:"#666",fill:null}},button:{norm:{color:"#cccccc",fill:{type:"linear",x1:0,x2:0,y1:0,y2:100,colors:[{offset:0.5,color:"#ffffff"},{offset:1,color:"#e5e5e5"}]}},over:{fill:{type:"linear",x1:0,x2:0,y1:0,y2:100,colors:[{offset:0.5,color:"#ffffff"},{offset:1,color:"#e1eaf5"}]},color:"#92a0b3"},down:{fill:{type:"linear",
x1:0,x2:0,y1:0,y2:100,colors:[{offset:0,color:"#e1eaf5"},{offset:1,color:"#ffffff"}]},color:"#92a0b3"},selected:{fill:{type:"linear",x1:0,x2:0,y1:0,y2:100,colors:[{offset:0,color:"#97b4bf"},{offset:1,color:"#c8dae1"}]},color:"#92a0b3"},icon:{norm:{fill:null,color:"#92a0b3"},selected:{fill:"#ffffff",color:"#92a0b3"}}},copy:function(){var d=function(b){if("object"!=typeof b||null===b||void 0===b)return b;var a;if(b.push){a=[];for(var c=0;c<b.length;c++)a.push(d(b[c]));return a}a={};for(c in b)"copy"!=
c&&(a[c]="object"==typeof b[c]?d(b[c]):b[c]);return a},a=d(this);a.current=a.norm;a.currentHit=a.hitNorm;a.currentText=a.text;return a}});
|
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import * as actions from '../companies'
import constants from '../../constants/companies'
import api from '../../lib/kitsu'
const middlewares = [thunk]
const mockStore = configureMockStore(middlewares)
describe('fetchCompanies', () => {
describe('on success', () => {
it('dispatches FETCH_COMPANIES_REQUEST & FETCH_COMPANIES_SUCCESS', async () => {
const fetchSpy = jest
.spyOn(api, 'get')
.mockReturnValue({
data: [
{
id: 1,
attributes: {
name: 'Company A'
}
},
{
id: 2,
attributes: {
name: 'Company B'
}
}
],
links: {
last: 'http://localhost:3000/v1/companies?page%5Bnumber%5D=10&page%5Bsize%5D=10'
}
})
const expectedActions = [
{
type: constants.FETCH_COMPANIES_REQUEST
},
{
type: constants.FETCH_COMPANIES_SUCCESS,
companies: [
{
id: 1,
attributes: {
name: 'Company A'
}
},
{
id: 2,
attributes: {
name: 'Company B'
}
}
],
'totalPages': 10
}
]
const store = mockStore({
companies: {
companies: {},
currentPage: 1,
totalPages: 0,
ui: {
fetching: false,
doneFetching: false,
fetchError: ''
}
}
})
await store.dispatch(actions.fetchCompanies())
expect(store.getActions()).toEqual(expectedActions)
expect(fetchSpy).toHaveBeenCalledWith('companies', { page: 1 })
fetchSpy.mockRestore()
})
})
describe('on failure', () => {
it('dispatches FETCH_COMPANIES_REQUEST & FETCH_COMPANIES_FAILURE', async () => {
const fetchSpy = jest
.spyOn(api, 'get')
.mockImplementation(() => Promise.reject(new Error('Not Found')))
const expectedActions = [
{
type: constants.FETCH_COMPANIES_REQUEST
},
{
type: constants.FETCH_COMPANIES_FAILURE,
errorMessage: new Error('Not Found')
}
]
const store = mockStore({
companies: {
companies: {},
currentPage: 1,
totalPages: 0,
ui: {
fetching: false,
doneFetching: false,
fetchError: 'Not Found'
}
}
})
await store.dispatch(actions.fetchCompanies())
expect(store.getActions()).toEqual(expectedActions)
expect(fetchSpy).toHaveBeenCalledWith('companies', { page: 1 })
fetchSpy.mockRestore()
})
})
})
describe('resetCompanies', () => {
it('dispatches RESET_COMPANIES', () => {
const expectedActions = [{ type: constants.RESET_COMPANIES }]
const store = mockStore({
companies: [{ id: 1, name: 'John Doe' }, { id: 2, name: 'Jane Doe' }],
currentPage: 1,
totalPages: 10,
ui: {
fetching: false,
doneFetching: true,
fetchError: ''
}
})
store.dispatch(actions.resetCompanies())
expect(store.getActions()).toEqual(expectedActions)
})
})
describe('updateCurrentPage', () => {
it('dispatches UPDATE_CURRENT_PAGE', () => {
const expectedActions = [
{
type: constants.UPDATE_CURRENT_PAGE,
page: 1
}
]
const store = mockStore({
companies: [{ id: 1, name: 'John Doe' }, { id: 2, name: 'Jane Doe' }],
currentPage: 1,
totalPages: 10,
ui: {
fetching: false,
doneFetching: true,
fetchError: ''
}
})
store.dispatch(actions.updateCurrentPage())
expect(store.getActions()).toEqual(expectedActions)
})
})
describe('updateSearch', () => {
it('dispatches UPDATE_COMPANIES_SEARCH', () => {
const expectedActions = [
{
type: constants.UPDATE_COMPANIES_SEARCH,
search: 'John Doe'
}
]
const store = mockStore({
companies: [{ id: 1, name: 'John Doe' }, { id: 2, name: 'Jane Doe' }],
currentPage: 1,
totalPages: 10,
ui: {
fetching: false,
doneFetching: true,
fetchError: ''
},
search: ''
})
store.dispatch(actions.updateSearch('John Doe'))
expect(store.getActions()).toEqual(expectedActions)
})
})
|
(function(global) {
global.Thunderjack = global.Thunderjack || {};
Thunderjack.HandUi = Thunderjack.HandUi || {};
//===========================================================================================
// "public"
//===========================================================================================
//-------------------------------------------------------------------------------------------
//ctor
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi = function(hand_prefab) {
this._handPrefab = hand_prefab;
this._cardPositions = [];
this._scoreBmpText = null;
this._initCardPositions();
this._initResultLabel();
this._initTextFields();
};
//-------------------------------------------------------------------------------------------
// getCardPositionAt
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype.getCardPositionAt = function(i_index) {
return(this._cardPositions[i_index]);
};
//-------------------------------------------------------------------------------------------
// hideResultsLabel
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype.hideResultsLabel = function() {
this._handPrefab.fResult_label.visible = false;
};
//-------------------------------------------------------------------------------------------
// setScore
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype.setScore = function(i_score) {
this._scoreBmpText.text().text = i_score > 0 ? i_score : "";
this._scoreBmpText.text().visible = true;
};
//-------------------------------------------------------------------------------------------
// showAnimationLabel
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype.showAnimationLabel = function(s_resultLabelFrame) {
var labelSprite = this._handPrefab.fResult_label;
labelSprite.visible = true;
labelSprite.animations.play(s_resultLabelFrame);
};
//-------------------------------------------------------------------------------------------
// showResultsLabel
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype.showResultsLabel = function(s_resultLabelFrame) {
var labelSprite = this._handPrefab.fResult_label;
labelSprite.visible = true;
labelSprite.frameName = s_resultLabelFrame;
};
//===========================================================================================
// "protected"
//===========================================================================================
//-------------------------------------------------------------------------------------------
//_initBitmapText
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype._initBitmapText = function(areaNodeSprite, b_visible) {
var bitmapText = new Thunderjack.BitmapText(this._handPrefab.game, areaNodeSprite);
bitmapText.text().visible = b_visible === undefined ? true : b_visible;
return(bitmapText);
};
//-------------------------------------------------------------------------------------------
//_cTextFields
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype._initTextFields = function() {
this._scoreBmpText = this._initBitmapText(this._handPrefab.fScoreTextNode, false);
};
//===========================================================================================
// "private"
//===========================================================================================
//-------------------------------------------------------------------------------------------
// _initCardPosition
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype._initCardPosition = function(s_cardNodeName) {
var cardNode = this._handPrefab.getByName(s_cardNodeName);
var position = new Phaser.Point(this._handPrefab.x + cardNode.x, this._handPrefab.y + cardNode.y);
this._cardPositions.push(position);
var SHOULD_DESTROY = true;
var IS_SILENT = true;
this._handPrefab.remove(cardNode, SHOULD_DESTROY, IS_SILENT);
};
//-------------------------------------------------------------------------------------------
// _initCardPositions
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype._initCardPositions = function() {
this._initCardPosition("m_cardNode0");
this._initCardPosition("m_cardNode1");
this._initCardPosition("m_cardNode2");
this._initCardPosition("m_cardNode3");
this._initCardPosition("m_cardNode4");
this._initCardPosition("m_cardNode5");
};
//-------------------------------------------------------------------------------------------
//_initResultLabel
//-------------------------------------------------------------------------------------------
Thunderjack.HandUi.prototype._initResultLabel = function() {
this._handPrefab.fResult_label.visible = false;
};
})(window);
|
import * as React from 'react'
function SvgClose(props) {
return (
<svg width="1em" height="1em" viewBox="0 0 24 24" fill="none" {...props}>
<path
fill="currentColor"
d="M13.414 12l5.793-5.793a1 1 0 00-1.414-1.414L12 10.586 6.207 4.793a1 1 0 00-1.414 1.414L10.586 12l-5.793 5.793a1 1 0 001.414 1.414L12 13.414l5.793 5.793a.996.996 0 001.414 0 1 1 0 000-1.414L13.414 12z"
/>
</svg>
)
}
export default SvgClose
|
#!/usr/bin/env python
"""
A script to show how to create your own time domain source model.
A simple damped Gaussian signal is defined in the time domain, injected into
noise in two interferometers (LIGO Livingston and Hanford at design
sensitivity), and then recovered.
"""
import numpy as np
import bilby
# define the time-domain model
def time_domain_damped_sinusoid(
time, amplitude, damping_time, frequency, phase):
"""
This example only creates a linearly polarised signal with only plus
polarisation.
"""
plus = amplitude * np.exp(-time / damping_time) *\
np.sin(2 * np.pi * frequency * time + phase)
cross = np.zeros(len(time))
return {'plus': plus, 'cross': cross}
# define parameters to inject.
injection_parameters = dict(amplitude=5e-22, damping_time=0.1, frequency=50,
phase=0, ra=0, dec=0, psi=0, geocent_time=0.)
duration = 0.5
sampling_frequency = 2048
outdir = 'outdir'
label = 'time_domain_source_model'
# call the waveform_generator to create our waveform model.
waveform = bilby.gw.waveform_generator.WaveformGenerator(
duration=duration, sampling_frequency=sampling_frequency,
time_domain_source_model=time_domain_damped_sinusoid)
# inject the signal into three interferometers
ifos = bilby.gw.detector.InterferometerList(['H1', 'L1'])
ifos.set_strain_data_from_power_spectral_densities(
sampling_frequency=sampling_frequency, duration=duration,
start_time=injection_parameters['geocent_time'] - 3)
ifos.inject_signal(waveform_generator=waveform,
parameters=injection_parameters)
# create the priors
prior = injection_parameters.copy()
prior['amplitude'] = bilby.core.prior.LogUniform(1e-23, 1e-21, r'$h_0$')
prior['damping_time'] = bilby.core.prior.Uniform(
0.01, 1, r'damping time', unit='$s$')
prior['frequency'] = bilby.core.prior.Uniform(0, 200, r'frequency', unit='Hz')
prior['phase'] = bilby.core.prior.Uniform(-np.pi / 2, np.pi / 2, r'$\phi$')
# define likelihood
likelihood = bilby.gw.likelihood.GravitationalWaveTransient(ifos, waveform)
# launch sampler
result = bilby.core.sampler.run_sampler(
likelihood, prior, sampler='dynesty', npoints=1000,
injection_parameters=injection_parameters, outdir=outdir, label=label)
result.plot_corner()
|
'use strict';
var async = require('async');
var fs = require('fs');
var npm = require('npm');
var path = require('path');
var spawn = require('child_process').spawn;
var bitcore = require('bitcore-lib-dash');
var $ = bitcore.util.preconditions;
var _ = bitcore.deps._;
var utils = require('../utils');
/**
* Will remove a service from bitcore-node-dash.json
* @param {String} configFilePath - The absolute path to the configuration file
* @param {String} service - The name of the module
* @param {Function} done
*/
function removeConfig(configFilePath, service, done) {
$.checkArgument(utils.isAbsolutePath(configFilePath), 'An absolute path is expected');
fs.readFile(configFilePath, function(err, data) {
if (err) {
return done(err);
}
var config = JSON.parse(data);
$.checkState(
Array.isArray(config.services),
'Configuration file is expected to have a services array.'
);
// remove the service from the configuration
for (var i = 0; i < config.services.length; i++) {
if (config.services[i] === service) {
config.services.splice(i, 1);
}
}
config.services = _.uniq(config.services);
config.services.sort(function(a, b) {
return a > b;
});
fs.writeFile(configFilePath, JSON.stringify(config, null, 2), done);
});
}
/**
* Will uninstall a Node.js service and remove from package.json.
* @param {String} configDir - The absolute configuration directory path
* @param {String} service - The name of the service
* @param {Function} done
*/
function uninstallService(configDir, service, done) {
$.checkArgument(utils.isAbsolutePath(configDir), 'An absolute path is expected');
$.checkArgument(_.isString(service), 'A string is expected for the service argument');
var child = spawn('npm', ['uninstall', service, '--save'], {cwd: configDir});
child.stdout.on('data', function(data) {
process.stdout.write(data);
});
child.stderr.on('data', function(data) {
process.stderr.write(data);
});
child.on('close', function(code) {
if (code !== 0) {
return done(new Error('There was an error uninstalling service(s): ' + service));
} else {
return done();
}
});
}
/**
* Will remove a Node.js service if it is installed.
* @param {String} configDir - The absolute configuration directory path
* @param {String} service - The name of the service
* @param {Function} done
*/
function removeService(configDir, service, done) {
$.checkArgument(utils.isAbsolutePath(configDir), 'An absolute path is expected');
$.checkArgument(_.isString(service), 'A string is expected for the service argument');
// check if the service is installed
npm.load(function(err) {
if (err) {
return done(err);
}
npm.commands.ls([service], true /*silent*/, function(err, data, lite) {
if (err) {
return done(err);
}
if (lite.dependencies) {
uninstallService(configDir, service, done);
} else {
done();
}
});
});
}
/**
* Will remove the Node.js service and from the bitcore-node-dash configuration.
* @param {String} options.cwd - The current working directory
* @param {String} options.dirname - The bitcore-node-dash configuration directory
* @param {Array} options.services - An array of strings of service names
* @param {Function} done - A callback function called when finished
*/
function remove(options, done) {
$.checkArgument(_.isObject(options));
$.checkArgument(_.isFunction(done));
$.checkArgument(
_.isString(options.path) && utils.isAbsolutePath(options.path),
'An absolute path is expected'
);
$.checkArgument(Array.isArray(options.services));
var configPath = options.path;
var services = options.services;
var bitcoreConfigPath = path.resolve(configPath, 'bitcore-node-dash.json');
var packagePath = path.resolve(configPath, 'package.json');
if (!fs.existsSync(bitcoreConfigPath) || !fs.existsSync(packagePath)) {
return done(
new Error('Directory does not have a bitcore-node-dash.json and/or package.json file.')
);
}
async.eachSeries(
services,
function(service, next) {
// if the service is installed remove it
removeService(configPath, service, function(err) {
if (err) {
return next(err);
}
// remove service to bitcore-node-dash.json
removeConfig(bitcoreConfigPath, service, next);
});
}, done
);
}
module.exports = remove;
|
/**********************************************************************/
/* ____ ____ */
/* / /\/ / */
/* /___/ \ / */
/* \ \ \/ */
/* \ \ Copyright (c) 2003-2009 Xilinx, Inc. */
/* / / All Right Reserved. */
/* /---/ /\ */
/* \ \ / \ */
/* \___\/\___\ */
/***********************************************************************/
/* This file is designed for use with ISim build 0xfbc00daa */
#define XSI_HIDE_SYMBOL_SPEC true
#include "xsi.h"
#include <memory.h>
#ifdef __GNUC__
#include <stdlib.h>
#else
#include <malloc.h>
#define alloca _alloca
#endif
static const char *ng0 = "/home/lab661/Documents/xkoste13/Digital-electronics-1/Labs/05-counter_binary/binary_counter/binary_cnt_tb00.vhd";
static void work_a_0590140223_2372691052_p_0(char *t0)
{
char *t1;
char *t2;
char *t3;
char *t4;
char *t5;
char *t6;
int64 t7;
int64 t8;
LAB0: t1 = (t0 + 2784U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(76, ng0);
t2 = (t0 + 3416);
t3 = (t2 + 56U);
t4 = *((char **)t3);
t5 = (t4 + 56U);
t6 = *((char **)t5);
*((unsigned char *)t6) = (unsigned char)2;
xsi_driver_first_trans_fast(t2);
xsi_set_current_line(77, ng0);
t2 = (t0 + 1808U);
t3 = *((char **)t2);
t7 = *((int64 *)t3);
t8 = (t7 / 2);
t2 = (t0 + 2592);
xsi_process_wait(t2, t8);
LAB6: *((char **)t1) = &&LAB7;
LAB1: return;
LAB4: xsi_set_current_line(78, ng0);
t2 = (t0 + 3416);
t3 = (t2 + 56U);
t4 = *((char **)t3);
t5 = (t4 + 56U);
t6 = *((char **)t5);
*((unsigned char *)t6) = (unsigned char)3;
xsi_driver_first_trans_fast(t2);
xsi_set_current_line(79, ng0);
t2 = (t0 + 1808U);
t3 = *((char **)t2);
t7 = *((int64 *)t3);
t8 = (t7 / 2);
t2 = (t0 + 2592);
xsi_process_wait(t2, t8);
LAB10: *((char **)t1) = &&LAB11;
goto LAB1;
LAB5: goto LAB4;
LAB7: goto LAB5;
LAB8: goto LAB2;
LAB9: goto LAB8;
LAB11: goto LAB9;
}
static void work_a_0590140223_2372691052_p_1(char *t0)
{
char *t1;
char *t2;
int64 t3;
char *t4;
char *t5;
char *t6;
char *t7;
int t8;
int t9;
char *t10;
char *t11;
int64 t12;
int t13;
LAB0: t1 = (t0 + 3032U);
t2 = *((char **)t1);
if (t2 == 0)
goto LAB2;
LAB3: goto *t2;
LAB2: xsi_set_current_line(87, ng0);
t3 = (100 * 1000LL);
t2 = (t0 + 2840);
xsi_process_wait(t2, t3);
LAB6: *((char **)t1) = &&LAB7;
LAB1: return;
LAB4: xsi_set_current_line(88, ng0);
t2 = (t0 + 3480);
t4 = (t2 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
*((unsigned char *)t7) = (unsigned char)3;
xsi_driver_first_trans_fast(t2);
xsi_set_current_line(89, ng0);
t2 = (t0 + 6144);
*((int *)t2) = 0;
t4 = (t0 + 6148);
*((int *)t4) = 50;
t8 = 0;
t9 = 50;
LAB8: if (t8 <= t9)
goto LAB9;
LAB11: xsi_set_current_line(96, ng0);
t2 = (t0 + 1808U);
t4 = *((char **)t2);
t3 = *((int64 *)t4);
t12 = (t3 * 10);
t2 = (t0 + 2840);
xsi_process_wait(t2, t12);
LAB23: *((char **)t1) = &&LAB24;
goto LAB1;
LAB5: goto LAB4;
LAB7: goto LAB5;
LAB9: xsi_set_current_line(90, ng0);
t5 = (t0 + 3544);
t6 = (t5 + 56U);
t7 = *((char **)t6);
t10 = (t7 + 56U);
t11 = *((char **)t10);
*((unsigned char *)t11) = (unsigned char)3;
xsi_driver_first_trans_fast(t5);
xsi_set_current_line(91, ng0);
t2 = (t0 + 1808U);
t4 = *((char **)t2);
t3 = *((int64 *)t4);
t12 = (t3 * 1);
t2 = (t0 + 2840);
xsi_process_wait(t2, t12);
LAB14: *((char **)t1) = &&LAB15;
goto LAB1;
LAB10: t2 = (t0 + 6144);
t8 = *((int *)t2);
t4 = (t0 + 6148);
t9 = *((int *)t4);
if (t8 == t9)
goto LAB11;
LAB20: t13 = (t8 + 1);
t8 = t13;
t5 = (t0 + 6144);
*((int *)t5) = t8;
goto LAB8;
LAB12: xsi_set_current_line(92, ng0);
t2 = (t0 + 3544);
t4 = (t2 + 56U);
t5 = *((char **)t4);
t6 = (t5 + 56U);
t7 = *((char **)t6);
*((unsigned char *)t7) = (unsigned char)2;
xsi_driver_first_trans_fast(t2);
xsi_set_current_line(93, ng0);
t2 = (t0 + 1808U);
t4 = *((char **)t2);
t3 = *((int64 *)t4);
t12 = (t3 * 3);
t2 = (t0 + 2840);
xsi_process_wait(t2, t12);
LAB18: *((char **)t1) = &&LAB19;
goto LAB1;
LAB13: goto LAB12;
LAB15: goto LAB13;
LAB16: goto LAB10;
LAB17: goto LAB16;
LAB19: goto LAB17;
LAB21: xsi_set_current_line(101, ng0);
LAB27: *((char **)t1) = &&LAB28;
goto LAB1;
LAB22: goto LAB21;
LAB24: goto LAB22;
LAB25: goto LAB2;
LAB26: goto LAB25;
LAB28: goto LAB26;
}
extern void work_a_0590140223_2372691052_init()
{
static char *pe[] = {(void *)work_a_0590140223_2372691052_p_0,(void *)work_a_0590140223_2372691052_p_1};
xsi_register_didat("work_a_0590140223_2372691052", "isim/binary_cnt_tb00_isim_beh.exe.sim/work/a_0590140223_2372691052.didat");
xsi_register_executes(pe);
}
|
import mongoose from 'mongoose';
export default mongoose.model('Subscription', mongoose.Schema({
_id: {type: mongoose.Schema.Types.ObjectId, index: true, auto: true},
plan: {type: mongoose.Schema.Types.ObjectId, required: true, ref: 'Plan'},
paymentMethod: String,
timestamp: {type: Date, default: Date.now}
}));
|
import requests
import json
import logging
class AccountKeyClient:
ACCOUNT_KEY_URL = "https://account-key-service.herokuapp.com/v1/account"
def __init__(self):
self.logger = logging.getLogger(__name__)
def get_account_key(self, key, email):
try:
payload = {}
headers = {"Content-Type":"application/json"}
payload['email'] = email
payload['key'] = key
data = json.dumps(payload)
self.logger.info("Sending request to account key service " + data)
response = requests.post(AccountKeyClient.ACCOUNT_KEY_URL, data=data, headers=headers)
self.logger.info("Response from account key service " + str(response.json()))
return response.json()['account_key']
except Exception as e:
self.logger.error("Error at account key client" + str(e))
raise e
|
from pathlib import Path
from typing import List, Optional, Tuple
import cv2
import numpy as np
from annotate_picture import Annotation
DRAW_CORORS = [
(80, 200, 200),
(200, 80, 200),
(200, 200, 80),
(80, 80, 200),
(80, 200, 80),
(200, 80, 80),
(150, 80, 200),
(80, 200, 150),
]
FONT = cv2.FONT_HERSHEY_SIMPLEX
def index_to_color_alpha(i: int) -> Tuple[Tuple[int, int, int], int]:
if i == 0:
return (255, 255, 255), 1
else:
return DRAW_CORORS[(i - 1) % len(DRAW_CORORS)], 0.7
def draw_grid(img: np.ndarray, min_x, min_y, draw_size=10) -> np.ndarray:
img_y, img_x = img.shape[:2]
step_colors = [
(1, (80, 80, 80)),
(5, (200, 100, 100)),
(10, (100, 180, 100)),
(50, (100, 100, 200))
]
origin_x = -min_x * draw_size
origin_y = -min_y * draw_size
for grid_step, color in step_colors:
step = draw_size * grid_step
img[origin_y % step:img_y + origin_y:step, :, :] = color
img[:, origin_x % step:img_x + origin_x:step, :] = color
white = (255, 255, 255)
img[origin_y:origin_y + 2, :, :] = white
img[:, origin_x:origin_x + 2, :] = white
# draw grid num
text_step = 10 * draw_size
for y in range(origin_y % text_step, img_y, text_step):
x = origin_x
cv2.putText(
img,
str(int((origin_y - y) / draw_size)), (x, y),
FONT,
0.7,
white,
1,
cv2.LINE_AA
)
for x in range(origin_x % text_step, img_x, text_step):
y = origin_y
cv2.putText(
img,
str(int((x - origin_x) / draw_size)), (x, y),
FONT,
0.7,
white,
1,
cv2.LINE_AA
)
return img
def draw_annotation_list(
img: np.ndarray,
annotation_list: List[Annotation],
draw_size: int,
min_x: int,
min_y: int,
color: Tuple[int, int, int] = (0, 100, 0)
):
tmp = np.zeros(img.shape, np.uint8)
# 透過マスク
for annotation in annotation_list:
x0 = (annotation.min_x - min_x) * draw_size
y0 = (annotation.min_y - min_y) * draw_size
x1 = (annotation.max_x - min_x + 1) * draw_size
y1 = (annotation.max_y - min_y + 1) * draw_size
cv2.rectangle(
tmp,
(x0, y0),
(x1, y1),
color,
cv2.FILLED
)
img = cv2.addWeighted(img, 0.8, tmp, 0.5, 1)
# もっかいテキスト塗る
for annotation in annotation_list:
x0 = (annotation.min_x - min_x) * draw_size
y0 = (annotation.min_y - min_y) * draw_size
y1 = (annotation.max_y - min_y) * draw_size
cv2.putText(
img,
str(annotation.val),
(x0, y1),
FONT,
(y1-y0)/50,
(0, 0, 0),
2,
cv2.LINE_AA
)
cv2.putText(
img,
str(annotation.val),
(x0, y1),
FONT,
(y1-y0)/50,
(255, 255, 255),
1,
cv2.LINE_AA
)
return img
def draw(
plot_vectors: List[Tuple[int, int]],
draw_size: int = 10,
output_dir: str = '',
filename: str = 'draw.png',
draw_color: Tuple[int, int, int] = (255, 255, 255),
bg_color: Tuple[int, int, int] = (25, 25, 25),
show_grid: bool = True,
annotation_list: Optional[List[Annotation]] = None
) -> None:
"""
output_dir配下にimage_size*draw_sizeの大きさの filename(png) を描画・保存する。
image_sizeはplot_vectors_listの最小値/最大値から判断する。
"""
min_x = 10000
max_x = -10000
min_y = 10000
max_y = -10000
for x, y in plot_vectors:
min_x = min(min_x, x)
max_x = max(max_x, x)
min_y = min(min_y, y)
max_y = max(max_y, y)
width = max(1, max_x - min_x + 1)
height = max(1, max_y - min_y + 1)
output_path = Path(output_dir) / filename
img = np.full(
(
height * draw_size,
width * draw_size,
3
),
bg_color,
dtype=np.uint8
)
for vector in plot_vectors:
x = (vector[0] - min_x) * draw_size
y = (vector[1] - min_y) * draw_size
cv2.rectangle(
img,
(x, y),
(x + draw_size - 1, y + draw_size - 1),
draw_color,
-1
)
if show_grid:
draw_grid(
img=img,
min_x=min_x,
min_y=min_y,
draw_size=draw_size
)
if annotation_list is not None:
img = draw_annotation_list(
img=img,
annotation_list=annotation_list,
draw_size=draw_size,
min_x=min_x,
min_y=min_y,
)
cv2.imwrite(str(output_path), img)
return img
def multipul_draw(
plot_vectors_list: List[List[Tuple[int, int]]],
draw_size: int = 10,
output_dir: str = '',
filename_suffix: str = 'draw',
draw_color: Tuple[int, int, int] = (255, 255, 255),
bg_color: Tuple[int, int, int] = (25, 25, 25),
annotation_lists: Optional[List[List[Annotation]]] = None
) -> None:
"""
output_dir配下にpngを描画する.
draw_sizeは1プロットあたりの大きさ。
filenameは <filename_suffix>_<i>.png で連番
"""
for i in range(len(plot_vectors_list)):
draw_color, _ = index_to_color_alpha(i)
plot_vectors = plot_vectors_list[i]
filename = f'{filename_suffix}_{i}.png'
draw(
plot_vectors=plot_vectors,
draw_size=draw_size,
output_dir=output_dir,
filename=filename,
draw_color=draw_color,
bg_color=bg_color,
annotation_list=None if annotation_lists is None else annotation_lists[i]
)
multilayer_draw(
plot_vectors_list=plot_vectors_list,
output_dir=output_dir,
annotation_lists=annotation_lists,
filename=f'{filename_suffix}_layered.png',
)
def multilayer_draw(
plot_vectors_list: List[List[Tuple[int, int]]],
draw_size: int = 10,
output_dir: str = '',
filename: str = 'draw_layered.png',
draw_color: Tuple[int, int, int] = (255, 255, 255),
bg_color: Tuple[int, int, int] = (25, 25, 25),
show_grid: bool = True,
annotation_lists: Optional[List[List[Annotation]]] = None
) -> None:
min_x = 10000
max_x = -10000
min_y = 10000
max_y = -10000
for plot_vectors in plot_vectors_list:
for x, y in plot_vectors:
min_x = min(min_x, x)
max_x = max(max_x, x)
min_y = min(min_y, y)
max_y = max(max_y, y)
width = max(1, max_x - min_x + 1)
height = max(1, max_y - min_y + 1)
output_path = Path(output_dir) / filename
img = np.full(
(
height * draw_size,
width * draw_size,
3
),
bg_color,
dtype=np.uint8
)
# 一番最後に最初の画像を
for i in range(len(plot_vectors_list)):
plot_vectors = plot_vectors_list[i]
draw_color, alpha = index_to_color_alpha(i)
tmp = np.zeros(img.shape, np.uint8)
for vector in plot_vectors:
x = (vector[0] - min_x) * draw_size
y = (vector[1] - min_y) * draw_size
cv2.rectangle(
tmp,
(x, y),
(x + draw_size - 1, y + draw_size - 1),
draw_color,
cv2.FILLED
)
img = cv2.addWeighted(img, 1, tmp, alpha, 1)
draw_grid(
img=img,
min_x=min_x,
min_y=min_y,
draw_size=draw_size
)
if annotation_lists is not None:
for annotation_list in annotation_lists:
img = draw_annotation_list(
img=img,
annotation_list=annotation_list,
draw_size=draw_size,
min_x=min_x,
min_y=min_y,
)
cv2.imwrite(str(output_path), img)
|
const config = require("./themeConfig")
module.exports = {
purge: ["./src/**/*.{js,jsx,ts,tsx}"],
darkMode: false, // or 'media' or 'class'
theme: {
...config,
extend: {},
},
variants: {
extend: {},
},
plugins: [],
corePlugins: {
preflight: false,
},
}
|
import unittest
from os.path import join, dirname, abspath
import matplotlib.pyplot as plt
import numpy as np
import pystella as ps
__author__ = 'bakl'
class TestStellaLightCurves(unittest.TestCase):
def test_stella_curves(self):
name = 'cat_R500_M15_Ni006_E12'
path = join(dirname(abspath(__file__)), 'data', 'stella')
bands = ('U', 'B', 'V')
mdl = ps.Stella(name, path=path)
curves = mdl.curves(bands)
print(ps.first(curves).Time[:300])
self.assertTrue((np.array(sorted(curves.BandNames) == sorted(bands))).all(),
msg="Error for the initial band names [%s] "
"VS secondary BandNames are %s."
% (' '.join(bands), ' '.join(curves.BandNames)))
def test_stella_curves_tbeg(self):
name = 'cat_R500_M15_Ni006_E12'
path = join(dirname(abspath(__file__)), 'data', 'stella')
bands = ('U', 'B', 'V')
mdl = ps.Stella(name, path=path)
t_beg = 1.
curves = mdl.curves(bands, t_beg=t_beg)
print(ps.first(curves).Time[:3])
self.assertTrue(np.any(ps.first(curves).Time >= t_beg),
msg="There ara values Time less then t_beg = {}".format(t_beg))
def test_stella_curves_VS_tt_plot(self):
from pystella.rf.band import colors
name = 'cat_R500_M15_Ni006_E12'
path = join(dirname(abspath(__file__)), 'data', 'stella')
bands = ('U', 'B', 'V', 'R', 'I')
mdl = ps.Stella(name, path=path)
curves = mdl.curves(bands)
tt = mdl.get_tt().load()
ax = ps.lcp.curves_plot(curves)
for bname in bands:
ax.plot(tt['time'], tt['M'+bname], label="tt "+bname, color=colors(bname),
marker='*', markersize=3, ls='')
ax.legend()
plt.grid(linestyle=':', linewidth=1)
plt.show()
def test_stella_curves_reddening_plot(self):
from matplotlib import gridspec
name = 'cat_R500_M15_Ni006_E12'
path = join(dirname(abspath(__file__)), 'data', 'stella')
bands = ('UVW1', 'UVW2', 'UVM2')
# bands = ('UVW1', 'UVW2', 'UVM2', 'U', 'B', 'R', 'I')
ebv = 1
# mags reddening
cs = ps.lcf.curves_compute(name, path, bands, t_diff=1.05)
mdl = ps.Stella(name, path=path)
is_SMC = False
if is_SMC:
curves_mags = ps.lcf.curves_reddening(cs, ebv=ebv, law='Rv2.1')
curves = mdl.curves(bands, ebv=ebv, t_diff=1.05, mode=ps.ReddeningLaw.SMC) # best SMC MW
else:
curves_mags = ps.lcf.curves_reddening(cs, ebv=ebv, law=ps.extinction.law_default)
curves = mdl.curves(bands, ebv=ebv, t_diff=1.05, mode=ps.ReddeningLaw.MW)
# curves = mdl.curves(bands, ebv=ebv, law=LawFitz, mode=ReddeningLaw.SMC) # best SMC
self.assertTrue((np.array(sorted(curves.BandNames) == sorted(curves_mags.BandNames))).all(),
msg="Error for the initial band names [%s] "
"VS secondary BandNames are %s."
% (' '.join(curves_mags.BandNames), ' '.join(curves.BandNames)))
# plot reddening with mags
fig = plt.figure(figsize=(12, 12))
gs1 = gridspec.GridSpec(4, 1)
axUbv = fig.add_subplot(gs1[:-1, 0])
axDM = fig.add_subplot(gs1[3, 0])
lt = {lc.Band.Name: 'o' for lc in curves_mags}
ax = ps.lcp.curves_plot(curves_mags, ax=axUbv, lt=lt, markersize=2, is_legend=False)
xlim = ax.get_xlim()
ps.lcp.curves_plot(curves, ax=axUbv)
x = curves.TimeCommon
for b in bands:
y = curves.get(b).Mag - curves_mags.get(b).Mag
axDM.plot(x, y, label="Delta {}".format(b))
axDM.set_xlim(xlim)
axDM.legend()
plt.grid(linestyle=':', linewidth=1)
plt.show()
|
import torch
import torch.nn as nn
from config import device, im_size
class conv2DBatchNormRelu(nn.Module):
def __init__(
self,
in_channels,
n_filters,
k_size,
stride,
padding,
bias=True,
dilation=1,
with_bn=True,
with_relu=True
):
super(conv2DBatchNormRelu, self).__init__()
conv_mod = nn.Conv2d(int(in_channels),
int(n_filters),
kernel_size=k_size,
padding=padding,
stride=stride,
bias=bias,
dilation=dilation, )
if with_bn:
if with_relu:
self.cbr_unit = nn.Sequential(conv_mod, nn.BatchNorm2d(int(n_filters)), nn.ReLU(inplace=True))
else:
self.cbr_unit = nn.Sequential(conv_mod, nn.BatchNorm2d(int(n_filters)))
else:
if with_relu:
self.cbr_unit = nn.Sequential(conv_mod, nn.ReLU(inplace=True))
else:
self.cbr_unit = nn.Sequential(conv_mod)
def forward(self, inputs):
outputs = self.cbr_unit(inputs)
return outputs
class _aspp(nn.Module):
def __init__(self, inplanes, planes, kernel_size, padding, dilation):
super(_aspp, self).__init__()
self.cbr_unit = conv2DBatchNormRelu(inplanes, planes, kernel_size,stride=1,padding=padding,dilation=dilation)
self._init_weight()
def forward(self, x):
x = self.cbr_unit(x)
return x
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class aspp(nn.Module):
def __init__(self, inplanes):
super(aspp, self).__init__()
dilations = [1,2,3]
self.aspp1 = _aspp(inplanes, 256, 1, padding=0, dilation=1)
self.aspp2 = _aspp(inplanes, 256, 3, padding=dilations[0], dilation=dilations[0])
self.aspp3 = _aspp(inplanes, 256, 3, padding=dilations[1], dilation=dilations[1])
self.aspp4 = _aspp(inplanes, 256, 3, padding=dilations[2], dilation=dilations[2])
self.cbr_unit_1 = conv2DBatchNormRelu(1024,256,3,stride=1,padding=1)
self.cbr_unit_2 = conv2DBatchNormRelu(256,512,3,stride=1,padding=1)
self._init_weight()
def forward(self, x):
x1 = self.aspp1(x)
x2 = self.aspp2(x)
x3 = self.aspp3(x)
x4 = self.aspp4(x)
x = torch.cat((x1, x2, x3, x4), dim=1)
x = self.cbr_unit_1(x)
x = self.cbr_unit_2(x)
return x
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
# n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
# m.weight.data.normal_(0, math.sqrt(2. / n))
torch.nn.init.kaiming_normal_(m.weight)
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
class segnetDown2(nn.Module):
def __init__(self, in_size, out_size):
super(segnetDown2, self).__init__()
self.conv1 = conv2DBatchNormRelu(in_size, out_size, k_size=3, stride=1, padding=1)
self.conv2 = conv2DBatchNormRelu(out_size, out_size, k_size=3, stride=1, padding=1)
self.maxpool_with_argmax = nn.MaxPool2d(2, 2, return_indices=True)
def forward(self, inputs):
outputs = self.conv1(inputs)
outputs = self.conv2(outputs)
unpooled_shape = outputs.size()
outputs, indices = self.maxpool_with_argmax(outputs)
return outputs, indices, unpooled_shape
class segnetDown3(nn.Module):
def __init__(self, in_size, out_size):
super(segnetDown3, self).__init__()
self.conv1 = conv2DBatchNormRelu(in_size, out_size, k_size=3, stride=1, padding=1)
self.conv2 = conv2DBatchNormRelu(out_size, out_size, k_size=3, stride=1, padding=1)
self.conv3 = conv2DBatchNormRelu(out_size, out_size, k_size=3, stride=1, padding=1)
self.maxpool_with_argmax = nn.MaxPool2d(2, 2, return_indices=True)
def forward(self, inputs):
outputs = self.conv1(inputs)
outputs = self.conv2(outputs)
outputs = self.conv3(outputs)
unpooled_shape = outputs.size()
outputs, indices = self.maxpool_with_argmax(outputs)
return outputs, indices, unpooled_shape
class segnet2(nn.Module):
def __init__(self, in_size, out_size):
super(segnet2, self).__init__()
self.conv1 = conv2DBatchNormRelu(in_size, out_size, k_size=3, stride=1, padding=1)
self.conv2 = conv2DBatchNormRelu(out_size, out_size, k_size=3, stride=1, padding=1)
def forward(self, inputs):
outputs = self.conv1(inputs)
outputs = self.conv2(outputs)
unpooled_shape = outputs.size()
return outputs, unpooled_shape
class segnetUp1(nn.Module):
def __init__(self, in_size, out_size):
super(segnetUp1, self).__init__()
self.unpool = nn.MaxUnpool2d(2, 2)
self.conv = conv2DBatchNormRelu(in_size, out_size, k_size=5, stride=1, padding=2, with_relu=False)
def forward(self, inputs, indices, output_shape):
outputs = self.unpool(input=inputs, indices=indices, output_size=output_shape)
outputs = self.conv(outputs)
return outputs
class adding(nn.Module):
def __init__(self):
super(adding, self).__init__()
def forward(self, a, b):
adding = a + b
return adding
class DIMModel(nn.Module):
def __init__(self, n_classes=1, in_channels=4, is_unpooling=True, pretrain=True):
super(DIMModel, self).__init__()
self.in_channels = in_channels
self.is_unpooling = is_unpooling
self.pretrain = pretrain
self.down1 = segnetDown2(self.in_channels, 64)
self.down2 = segnetDown2(64, 128)
self.down3 = segnetDown3(128, 256)
self.down4 = segnet2(256, 512)
self.res = conv2DBatchNormRelu(512,512,k_size=3, stride=1, padding=1, with_relu=True, with_bn=False)
self.aspp = aspp(512)
self.adding = adding()
self.relu = nn.ReLU()
self.maxpool = nn.MaxPool2d(2, 2, return_indices=True)
self.down5 = segnetDown3(512, 512)
self.up5 = segnetUp1(512, 512)
self.up4 = segnetUp1(512, 256)
self.up3 = segnetUp1(256, 128)
self.up2 = segnetUp1(128, 64)
self.up1 = segnetUp1(64, n_classes)
self.sigmoid = nn.Sigmoid()
# if self.pretrain:
# import torchvision.models as models
# vgg16 = models.vgg16()
# print(vgg16)
# self.init_vgg16_params()
def forward(self, inputs):
# inputs: [N, 4, 320, 320]
down1, indices_1, unpool_shape1 = self.down1(inputs)
down2, indices_2, unpool_shape2 = self.down2(down1)
down3, indices_3, unpool_shape3 = self.down3(down2)
down4, unpool_shape4 = self.down4(down3)
res = self.res(down4)
aspp = self.aspp(down4)
adding = self.adding(res, aspp)
activation = self.relu(adding)
maxpool, indices_4 = self.maxpool(activation)
down5, indices_5, unpool_shape5 = self.down5(maxpool)
up5 = self.up5(down5, indices_5, unpool_shape5)
up4 = self.up4(up5, indices_4, unpool_shape4)
up3 = self.up3(up4, indices_3, unpool_shape3)
up2 = self.up2(up3, indices_2, unpool_shape2)
up1 = self.up1(up2, indices_1, unpool_shape1)
x = torch.squeeze(up1, dim=1) # [N, 1, 320, 320] -> [N, 320, 320]
x = self.sigmoid(x)
return x
# def init_vgg16_params(self):
# return
# migrate_model.migrate(self)
class RefinementModel(nn.Module):
def __init__(self):
super(RefinementModel, self).__init__()
self.conv_1 = conv2DBatchNormRelu(4, 64, 3, 1, 1)
self.conv_2 = conv2DBatchNormRelu(64,64, 3, 1, 1)
self.conv_3 = conv2DBatchNormRelu(64,64, 3, 1, 1)
self.conv_4 = conv2DBatchNormRelu(64, 1, 3, 1, 1, with_bn=False, with_relu=False)
self.sigmoid = nn.Sigmoid()
def forward(self, inputs):
x = self.conv_1(inputs)
x = self.conv_2(x)
x = self.conv_3(x)
x = self.conv_4(x)
x = torch.squeeze(x, dim=1)
x = self.sigmoid(x)
skip = inputs[:,3,:,:]
x = x + skip
return x
if __name__ == '__main__':
model = DIMModel().to(device)
print(model)
# summary(model, (4, im_size, im_size))
|
#
# Copyright(c) 2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
from core.test_run_utils import TestRun
from utils.installer import install_iotrace, check_if_installed
from utils.iotrace import IotracePlugin
from utils.misc import kill_all_io
from test_tools.fio.fio import Fio
def dut_prepare(reinstall: bool):
if not check_if_installed() or reinstall:
TestRun.LOGGER.info("Installing iotrace:")
install_iotrace()
else:
TestRun.LOGGER.info("iotrace is already installed by previous test")
# Call it after installing iotrace because we need iotrace
# to get valid paths
dut_cleanup()
fio = Fio()
if not fio.is_installed():
TestRun.LOGGER.info("Installing fio")
fio.install()
TestRun.LOGGER.info("Killing all IO")
kill_all_io()
def dut_cleanup():
iotrace: IotracePlugin = TestRun.plugins['iotrace']
TestRun.LOGGER.info("Stopping fuzzing")
TestRun.executor.run(f'{iotrace.working_dir}/standalone-linux-io-tracer/tests/security/fuzzy/fuzz.sh clean')
output = TestRun.executor.run('pgrep iotrace')
if output.stdout != "":
TestRun.executor.run(f'kill -9 {output.stdout}')
TestRun.LOGGER.info("Removing existing traces")
trace_repository_path: str = iotrace.get_trace_repository_path()
TestRun.executor.run_expect_success(f'rm -rf {trace_repository_path}/kernel')
|
/*
* Generated by asn1c-0.9.24 (http://lionet.info/asn1c)
* From ASN.1 module "EUTRA-RRC-Definitions"
* found in "fixed_grammar.asn"
* `asn1c -gen-PER`
*/
#include "BarringPerACDC-Category-r13.h"
static int
ac_BarringFactor_r13_4_constraint(asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
/* Replace with underlying type checker */
td->check_constraints = asn_DEF_NativeEnumerated.check_constraints;
return td->check_constraints(td, sptr, ctfailcb, app_key);
}
/*
* This type is implemented using NativeEnumerated,
* so here we adjust the DEF accordingly.
*/
static void
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(asn_TYPE_descriptor_t *td) {
td->free_struct = asn_DEF_NativeEnumerated.free_struct;
td->print_struct = asn_DEF_NativeEnumerated.print_struct;
td->ber_decoder = asn_DEF_NativeEnumerated.ber_decoder;
td->der_encoder = asn_DEF_NativeEnumerated.der_encoder;
td->xer_decoder = asn_DEF_NativeEnumerated.xer_decoder;
td->xer_encoder = asn_DEF_NativeEnumerated.xer_encoder;
td->uper_decoder = asn_DEF_NativeEnumerated.uper_decoder;
td->uper_encoder = asn_DEF_NativeEnumerated.uper_encoder;
td->aper_decoder = asn_DEF_NativeEnumerated.aper_decoder;
td->aper_encoder = asn_DEF_NativeEnumerated.aper_encoder;
td->compare = asn_DEF_NativeEnumerated.compare;
if(!td->per_constraints)
td->per_constraints = asn_DEF_NativeEnumerated.per_constraints;
td->elements = asn_DEF_NativeEnumerated.elements;
td->elements_count = asn_DEF_NativeEnumerated.elements_count;
/* td->specifics = asn_DEF_NativeEnumerated.specifics; // Defined explicitly */
}
static void
ac_BarringFactor_r13_4_free(asn_TYPE_descriptor_t *td,
void *struct_ptr, int contents_only) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
td->free_struct(td, struct_ptr, contents_only);
}
static int
ac_BarringFactor_r13_4_print(asn_TYPE_descriptor_t *td, const void *struct_ptr,
int ilevel, asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->print_struct(td, struct_ptr, ilevel, cb, app_key);
}
static asn_dec_rval_t
ac_BarringFactor_r13_4_decode_ber(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
void **structure, const void *bufptr, size_t size, int tag_mode) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->ber_decoder(opt_codec_ctx, td, structure, bufptr, size, tag_mode);
}
static asn_enc_rval_t
ac_BarringFactor_r13_4_encode_der(asn_TYPE_descriptor_t *td,
void *structure, int tag_mode, ber_tlv_tag_t tag,
asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->der_encoder(td, structure, tag_mode, tag, cb, app_key);
}
static asn_dec_rval_t
ac_BarringFactor_r13_4_decode_xer(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
void **structure, const char *opt_mname, const void *bufptr, size_t size) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->xer_decoder(opt_codec_ctx, td, structure, opt_mname, bufptr, size);
}
static asn_enc_rval_t
ac_BarringFactor_r13_4_encode_xer(asn_TYPE_descriptor_t *td, void *structure,
int ilevel, enum xer_encoder_flags_e flags,
asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->xer_encoder(td, structure, ilevel, flags, cb, app_key);
}
static asn_dec_rval_t
ac_BarringFactor_r13_4_decode_uper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints, void **structure, asn_per_data_t *per_data) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->uper_decoder(opt_codec_ctx, td, constraints, structure, per_data);
}
static asn_enc_rval_t
ac_BarringFactor_r13_4_encode_uper(asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints,
void *structure, asn_per_outp_t *per_out) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->uper_encoder(td, constraints, structure, per_out);
}
static asn_enc_rval_t
ac_BarringFactor_r13_4_encode_aper(asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints,
void *structure, asn_per_outp_t *per_out) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->aper_encoder(td, constraints, structure, per_out);
}
static asn_comp_rval_t *
ac_BarringFactor_r13_4_compare(asn_TYPE_descriptor_t *td1,
const void *structure1,
asn_TYPE_descriptor_t *td2,
const void *structure2) {
asn_comp_rval_t * res = NULL;
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td1);
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td2);
res = td1->compare(td1, structure1, td2, structure2);
return res;
}
static asn_dec_rval_t
ac_BarringFactor_r13_4_decode_aper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints, void **structure, asn_per_data_t *per_data) {
ac_BarringFactor_r13_4_inherit_TYPE_descriptor(td);
return td->aper_decoder(opt_codec_ctx, td, constraints, structure, per_data);
}
static int
ac_BarringTime_r13_21_constraint(asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
/* Replace with underlying type checker */
td->check_constraints = asn_DEF_NativeEnumerated.check_constraints;
return td->check_constraints(td, sptr, ctfailcb, app_key);
}
/*
* This type is implemented using NativeEnumerated,
* so here we adjust the DEF accordingly.
*/
static void
ac_BarringTime_r13_21_inherit_TYPE_descriptor(asn_TYPE_descriptor_t *td) {
td->free_struct = asn_DEF_NativeEnumerated.free_struct;
td->print_struct = asn_DEF_NativeEnumerated.print_struct;
td->ber_decoder = asn_DEF_NativeEnumerated.ber_decoder;
td->der_encoder = asn_DEF_NativeEnumerated.der_encoder;
td->xer_decoder = asn_DEF_NativeEnumerated.xer_decoder;
td->xer_encoder = asn_DEF_NativeEnumerated.xer_encoder;
td->uper_decoder = asn_DEF_NativeEnumerated.uper_decoder;
td->uper_encoder = asn_DEF_NativeEnumerated.uper_encoder;
td->aper_decoder = asn_DEF_NativeEnumerated.aper_decoder;
td->aper_encoder = asn_DEF_NativeEnumerated.aper_encoder;
td->compare = asn_DEF_NativeEnumerated.compare;
if(!td->per_constraints)
td->per_constraints = asn_DEF_NativeEnumerated.per_constraints;
td->elements = asn_DEF_NativeEnumerated.elements;
td->elements_count = asn_DEF_NativeEnumerated.elements_count;
/* td->specifics = asn_DEF_NativeEnumerated.specifics; // Defined explicitly */
}
static void
ac_BarringTime_r13_21_free(asn_TYPE_descriptor_t *td,
void *struct_ptr, int contents_only) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
td->free_struct(td, struct_ptr, contents_only);
}
static int
ac_BarringTime_r13_21_print(asn_TYPE_descriptor_t *td, const void *struct_ptr,
int ilevel, asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->print_struct(td, struct_ptr, ilevel, cb, app_key);
}
static asn_dec_rval_t
ac_BarringTime_r13_21_decode_ber(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
void **structure, const void *bufptr, size_t size, int tag_mode) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->ber_decoder(opt_codec_ctx, td, structure, bufptr, size, tag_mode);
}
static asn_enc_rval_t
ac_BarringTime_r13_21_encode_der(asn_TYPE_descriptor_t *td,
void *structure, int tag_mode, ber_tlv_tag_t tag,
asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->der_encoder(td, structure, tag_mode, tag, cb, app_key);
}
static asn_dec_rval_t
ac_BarringTime_r13_21_decode_xer(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
void **structure, const char *opt_mname, const void *bufptr, size_t size) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->xer_decoder(opt_codec_ctx, td, structure, opt_mname, bufptr, size);
}
static asn_enc_rval_t
ac_BarringTime_r13_21_encode_xer(asn_TYPE_descriptor_t *td, void *structure,
int ilevel, enum xer_encoder_flags_e flags,
asn_app_consume_bytes_f *cb, void *app_key) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->xer_encoder(td, structure, ilevel, flags, cb, app_key);
}
static asn_dec_rval_t
ac_BarringTime_r13_21_decode_uper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints, void **structure, asn_per_data_t *per_data) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->uper_decoder(opt_codec_ctx, td, constraints, structure, per_data);
}
static asn_enc_rval_t
ac_BarringTime_r13_21_encode_uper(asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints,
void *structure, asn_per_outp_t *per_out) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->uper_encoder(td, constraints, structure, per_out);
}
static asn_enc_rval_t
ac_BarringTime_r13_21_encode_aper(asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints,
void *structure, asn_per_outp_t *per_out) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->aper_encoder(td, constraints, structure, per_out);
}
static asn_comp_rval_t *
ac_BarringTime_r13_21_compare(asn_TYPE_descriptor_t *td1,
const void *structure1,
asn_TYPE_descriptor_t *td2,
const void *structure2) {
asn_comp_rval_t * res = NULL;
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td1);
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td2);
res = td1->compare(td1, structure1, td2, structure2);
return res;
}
static asn_dec_rval_t
ac_BarringTime_r13_21_decode_aper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td,
asn_per_constraints_t *constraints, void **structure, asn_per_data_t *per_data) {
ac_BarringTime_r13_21_inherit_TYPE_descriptor(td);
return td->aper_decoder(opt_codec_ctx, td, constraints, structure, per_data);
}
static int
memb_acdc_Category_r13_constraint_1(asn_TYPE_descriptor_t *td, const void *sptr,
asn_app_constraint_failed_f *ctfailcb, void *app_key) {
long value;
if(!sptr) {
_ASN_CTFAIL(app_key, td, sptr,
"%s: value not given (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
value = *(const long *)sptr;
if((value >= 1 && value <= 16)) {
/* Constraint check succeeded */
return 0;
} else {
_ASN_CTFAIL(app_key, td, sptr,
"%s: constraint failed (%s:%d)",
td->name, __FILE__, __LINE__);
return -1;
}
}
static asn_per_constraints_t asn_PER_type_ac_BarringFactor_r13_constr_4 GCC_NOTUSED = {
{ APC_CONSTRAINED, 4, 4, 0, 15 } /* (0..15) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_type_ac_BarringTime_r13_constr_21 GCC_NOTUSED = {
{ APC_CONSTRAINED, 3, 3, 0, 7 } /* (0..7) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_per_constraints_t asn_PER_memb_acdc_Category_r13_constr_2 GCC_NOTUSED = {
{ APC_CONSTRAINED, 4, 4, 1, 16 } /* (1..16) */,
{ APC_UNCONSTRAINED, -1, -1, 0, 0 },
0, 0 /* No PER value map */
};
static asn_INTEGER_enum_map_t asn_MAP_ac_BarringFactor_r13_value2enum_4[] = {
{ 0, 3, "p00" },
{ 1, 3, "p05" },
{ 2, 3, "p10" },
{ 3, 3, "p15" },
{ 4, 3, "p20" },
{ 5, 3, "p25" },
{ 6, 3, "p30" },
{ 7, 3, "p40" },
{ 8, 3, "p50" },
{ 9, 3, "p60" },
{ 10, 3, "p70" },
{ 11, 3, "p75" },
{ 12, 3, "p80" },
{ 13, 3, "p85" },
{ 14, 3, "p90" },
{ 15, 3, "p95" }
};
static unsigned int asn_MAP_ac_BarringFactor_r13_enum2value_4[] = {
0, /* p00(0) */
1, /* p05(1) */
2, /* p10(2) */
3, /* p15(3) */
4, /* p20(4) */
5, /* p25(5) */
6, /* p30(6) */
7, /* p40(7) */
8, /* p50(8) */
9, /* p60(9) */
10, /* p70(10) */
11, /* p75(11) */
12, /* p80(12) */
13, /* p85(13) */
14, /* p90(14) */
15 /* p95(15) */
};
static asn_INTEGER_specifics_t asn_SPC_ac_BarringFactor_r13_specs_4 = {
asn_MAP_ac_BarringFactor_r13_value2enum_4, /* "tag" => N; sorted by tag */
asn_MAP_ac_BarringFactor_r13_enum2value_4, /* N => "tag"; sorted by N */
16, /* Number of elements in the maps */
0, /* Enumeration is not extensible */
1, /* Strict enumeration */
0, /* Native long size */
0
};
static ber_tlv_tag_t asn_DEF_ac_BarringFactor_r13_tags_4[] = {
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (10 << 2))
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ac_BarringFactor_r13_4 = {
"ac-BarringFactor-r13",
"ac-BarringFactor-r13",
ac_BarringFactor_r13_4_free,
ac_BarringFactor_r13_4_print,
ac_BarringFactor_r13_4_constraint,
ac_BarringFactor_r13_4_decode_ber,
ac_BarringFactor_r13_4_encode_der,
ac_BarringFactor_r13_4_decode_xer,
ac_BarringFactor_r13_4_encode_xer,
ac_BarringFactor_r13_4_decode_uper,
ac_BarringFactor_r13_4_encode_uper,
ac_BarringFactor_r13_4_decode_aper,
ac_BarringFactor_r13_4_encode_aper,
ac_BarringFactor_r13_4_compare,
0, /* Use generic outmost tag fetcher */
asn_DEF_ac_BarringFactor_r13_tags_4,
sizeof(asn_DEF_ac_BarringFactor_r13_tags_4)
/sizeof(asn_DEF_ac_BarringFactor_r13_tags_4[0]) - 1, /* 1 */
asn_DEF_ac_BarringFactor_r13_tags_4, /* Same as above */
sizeof(asn_DEF_ac_BarringFactor_r13_tags_4)
/sizeof(asn_DEF_ac_BarringFactor_r13_tags_4[0]), /* 2 */
&asn_PER_type_ac_BarringFactor_r13_constr_4,
0, 0, /* Defined elsewhere */
&asn_SPC_ac_BarringFactor_r13_specs_4 /* Additional specs */
};
static asn_INTEGER_enum_map_t asn_MAP_ac_BarringTime_r13_value2enum_21[] = {
{ 0, 2, "s4" },
{ 1, 2, "s8" },
{ 2, 3, "s16" },
{ 3, 3, "s32" },
{ 4, 3, "s64" },
{ 5, 4, "s128" },
{ 6, 4, "s256" },
{ 7, 4, "s512" }
};
static unsigned int asn_MAP_ac_BarringTime_r13_enum2value_21[] = {
5, /* s128(5) */
2, /* s16(2) */
6, /* s256(6) */
3, /* s32(3) */
0, /* s4(0) */
7, /* s512(7) */
4, /* s64(4) */
1 /* s8(1) */
};
static asn_INTEGER_specifics_t asn_SPC_ac_BarringTime_r13_specs_21 = {
asn_MAP_ac_BarringTime_r13_value2enum_21, /* "tag" => N; sorted by tag */
asn_MAP_ac_BarringTime_r13_enum2value_21, /* N => "tag"; sorted by N */
8, /* Number of elements in the maps */
0, /* Enumeration is not extensible */
1, /* Strict enumeration */
0, /* Native long size */
0
};
static ber_tlv_tag_t asn_DEF_ac_BarringTime_r13_tags_21[] = {
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (10 << 2))
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_ac_BarringTime_r13_21 = {
"ac-BarringTime-r13",
"ac-BarringTime-r13",
ac_BarringTime_r13_21_free,
ac_BarringTime_r13_21_print,
ac_BarringTime_r13_21_constraint,
ac_BarringTime_r13_21_decode_ber,
ac_BarringTime_r13_21_encode_der,
ac_BarringTime_r13_21_decode_xer,
ac_BarringTime_r13_21_encode_xer,
ac_BarringTime_r13_21_decode_uper,
ac_BarringTime_r13_21_encode_uper,
ac_BarringTime_r13_21_decode_aper,
ac_BarringTime_r13_21_encode_aper,
ac_BarringTime_r13_21_compare,
0, /* Use generic outmost tag fetcher */
asn_DEF_ac_BarringTime_r13_tags_21,
sizeof(asn_DEF_ac_BarringTime_r13_tags_21)
/sizeof(asn_DEF_ac_BarringTime_r13_tags_21[0]) - 1, /* 1 */
asn_DEF_ac_BarringTime_r13_tags_21, /* Same as above */
sizeof(asn_DEF_ac_BarringTime_r13_tags_21)
/sizeof(asn_DEF_ac_BarringTime_r13_tags_21[0]), /* 2 */
&asn_PER_type_ac_BarringTime_r13_constr_21,
0, 0, /* Defined elsewhere */
&asn_SPC_ac_BarringTime_r13_specs_21 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_acdc_BarringConfig_r13_3[] = {
{ ATF_NOFLAGS, 0, offsetof(struct BarringPerACDC_Category_r13__acdc_BarringConfig_r13, ac_BarringFactor_r13),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_ac_BarringFactor_r13_4,
0, /* Defer constraints checking to the member type */
0, /* No PER visible constraints */
0,
"ac-BarringFactor-r13"
},
{ ATF_NOFLAGS, 0, offsetof(struct BarringPerACDC_Category_r13__acdc_BarringConfig_r13, ac_BarringTime_r13),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_ac_BarringTime_r13_21,
0, /* Defer constraints checking to the member type */
0, /* No PER visible constraints */
0,
"ac-BarringTime-r13"
},
};
static ber_tlv_tag_t asn_DEF_acdc_BarringConfig_r13_tags_3[] = {
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static asn_TYPE_tag2member_t asn_MAP_acdc_BarringConfig_r13_tag2el_3[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* ac-BarringFactor-r13 at 2492 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* ac-BarringTime-r13 at 2494 */
};
static asn_SEQUENCE_specifics_t asn_SPC_acdc_BarringConfig_r13_specs_3 = {
sizeof(struct BarringPerACDC_Category_r13__acdc_BarringConfig_r13),
offsetof(struct BarringPerACDC_Category_r13__acdc_BarringConfig_r13, _asn_ctx),
asn_MAP_acdc_BarringConfig_r13_tag2el_3,
2, /* Count of tags in the map */
0, 0, 0, /* Optional elements (not needed) */
-1, /* Start extensions */
-1 /* Stop extensions */
};
static /* Use -fall-defs-global to expose */
asn_TYPE_descriptor_t asn_DEF_acdc_BarringConfig_r13_3 = {
"acdc-BarringConfig-r13",
"acdc-BarringConfig-r13",
SEQUENCE_free,
SEQUENCE_print,
SEQUENCE_constraint,
SEQUENCE_decode_ber,
SEQUENCE_encode_der,
SEQUENCE_decode_xer,
SEQUENCE_encode_xer,
SEQUENCE_decode_uper,
SEQUENCE_encode_uper,
SEQUENCE_decode_aper,
SEQUENCE_encode_aper,
SEQUENCE_compare,
0, /* Use generic outmost tag fetcher */
asn_DEF_acdc_BarringConfig_r13_tags_3,
sizeof(asn_DEF_acdc_BarringConfig_r13_tags_3)
/sizeof(asn_DEF_acdc_BarringConfig_r13_tags_3[0]) - 1, /* 1 */
asn_DEF_acdc_BarringConfig_r13_tags_3, /* Same as above */
sizeof(asn_DEF_acdc_BarringConfig_r13_tags_3)
/sizeof(asn_DEF_acdc_BarringConfig_r13_tags_3[0]), /* 2 */
0, /* No PER visible constraints */
asn_MBR_acdc_BarringConfig_r13_3,
2, /* Elements count */
&asn_SPC_acdc_BarringConfig_r13_specs_3 /* Additional specs */
};
static asn_TYPE_member_t asn_MBR_BarringPerACDC_Category_r13_1[] = {
{ ATF_NOFLAGS, 0, offsetof(struct BarringPerACDC_Category_r13, acdc_Category_r13),
(ASN_TAG_CLASS_CONTEXT | (0 << 2)),
-1, /* IMPLICIT tag at current level */
&asn_DEF_NativeInteger,
memb_acdc_Category_r13_constraint_1,
&asn_PER_memb_acdc_Category_r13_constr_2,
0,
"acdc-Category-r13"
},
{ ATF_POINTER, 1, offsetof(struct BarringPerACDC_Category_r13, acdc_BarringConfig_r13),
(ASN_TAG_CLASS_CONTEXT | (1 << 2)),
0,
&asn_DEF_acdc_BarringConfig_r13_3,
0, /* Defer constraints checking to the member type */
0, /* No PER visible constraints */
0,
"acdc-BarringConfig-r13"
},
};
static int asn_MAP_BarringPerACDC_Category_r13_oms_1[] = { 1 };
static ber_tlv_tag_t asn_DEF_BarringPerACDC_Category_r13_tags_1[] = {
(ASN_TAG_CLASS_UNIVERSAL | (16 << 2))
};
static asn_TYPE_tag2member_t asn_MAP_BarringPerACDC_Category_r13_tag2el_1[] = {
{ (ASN_TAG_CLASS_CONTEXT | (0 << 2)), 0, 0, 0 }, /* acdc-Category-r13 at 2489 */
{ (ASN_TAG_CLASS_CONTEXT | (1 << 2)), 1, 0, 0 } /* acdc-BarringConfig-r13 at 2493 */
};
static asn_SEQUENCE_specifics_t asn_SPC_BarringPerACDC_Category_r13_specs_1 = {
sizeof(struct BarringPerACDC_Category_r13),
offsetof(struct BarringPerACDC_Category_r13, _asn_ctx),
asn_MAP_BarringPerACDC_Category_r13_tag2el_1,
2, /* Count of tags in the map */
asn_MAP_BarringPerACDC_Category_r13_oms_1, /* Optional members */
1, 0, /* Root/Additions */
-1, /* Start extensions */
-1 /* Stop extensions */
};
asn_TYPE_descriptor_t asn_DEF_BarringPerACDC_Category_r13 = {
"BarringPerACDC-Category-r13",
"BarringPerACDC-Category-r13",
SEQUENCE_free,
SEQUENCE_print,
SEQUENCE_constraint,
SEQUENCE_decode_ber,
SEQUENCE_encode_der,
SEQUENCE_decode_xer,
SEQUENCE_encode_xer,
SEQUENCE_decode_uper,
SEQUENCE_encode_uper,
SEQUENCE_decode_aper,
SEQUENCE_encode_aper,
SEQUENCE_compare,
0, /* Use generic outmost tag fetcher */
asn_DEF_BarringPerACDC_Category_r13_tags_1,
sizeof(asn_DEF_BarringPerACDC_Category_r13_tags_1)
/sizeof(asn_DEF_BarringPerACDC_Category_r13_tags_1[0]), /* 1 */
asn_DEF_BarringPerACDC_Category_r13_tags_1, /* Same as above */
sizeof(asn_DEF_BarringPerACDC_Category_r13_tags_1)
/sizeof(asn_DEF_BarringPerACDC_Category_r13_tags_1[0]), /* 1 */
0, /* No PER visible constraints */
asn_MBR_BarringPerACDC_Category_r13_1,
2, /* Elements count */
&asn_SPC_BarringPerACDC_Category_r13_specs_1 /* Additional specs */
};
|
#pragma once
unsigned int cpp_div( const unsigned int left, const unsigned int right );
unsigned int asm_div( const unsigned int left, const unsigned int right );
unsigned int cpp_div_remainder( const unsigned int left, const unsigned int right );
unsigned int asm_div_remainder( const unsigned int left, const unsigned int right );
int cpp_idiv( const int left, const int right );
int asm_idiv( const int left, const int right );
int cpp_idiv_remainder( const int left, const int right );
int asm_idiv_remainder( const int left, const int right );
|
import Document, { Html, Head, Main, NextScript } from 'next/document';
class MyDocument extends Document {
render() {
return (
<Html lang="ja" dir="ltr">
<Head />
<body>
<Main />
<NextScript />
</body>
</Html>
)
}
}
export default MyDocument
|
var hasOwnProperty = {}.hasOwnProperty;
module.exports = function (it, key) {
return hasOwnProperty.call(it, key);
};
|
from django.http import JsonResponse
from django.core.exceptions import RequestDataTooBig
class CheckRequest(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
response = self.get_response(request)
return response
def process_exception(self, request, exception):
if isinstance(exception, RequestDataTooBig):
return JsonResponse({"error_msg":"File size exceeds limit. File size should be 30MB or less"})
|
/**
* || ____ _ __
* +------+ / __ )(_) /_______________ _____ ___
* | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
* +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
* || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
*
* Crazyflie control firmware
*
* Copyright (C) 2011-2012 Bitcraze AB
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, in version 3.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* trace.h - ITM trace implementation/definition
*/
#ifndef __TRACE_H__
#define __TRACE_H__
#define configUSE_TRACE_FACILITY 1
// ITM useful macros
#ifndef ITM_NO_OVERFLOW
#define ITM_SEND(CH, DATA) ((uint32_t*)0xE0000000)[CH] = DATA
#else
#define ITM_SEND(CH, DATA) while(((uint32_t*)0xE0000000)[CH] == 0);\
((uint32_t*)0xE0000000)[CH] = DATA
#endif
// Send 4 first chatacters of task name to ITM port 1
#define traceTASK_SWITCHED_IN() ITM_SEND(1, *((uint32_t*)pxCurrentTCB->pcTaskName))
// Systick value on port 2
#define traceTASK_INCREMENT_TICK(xTickCount) ITM_SEND(2, xTickCount)
// Queue trace on port 3
#define ITM_QUEUE_SEND 0x0100
#define ITM_QUEUE_FAILED 0x0200
#define ITM_BLOCKING_ON_QUEUE_RECEIVE 0x0300
#define ITM_BLOCKING_ON_QUEUE_SEND 0x0400
#define traceQUEUE_SEND(xQueue) ITM_SEND(3, ITM_QUEUE_SEND | ((xQUEUE *) xQueue)->uxQueueNumber)
#define traceQUEUE_SEND_FAILED(xQueue) ITM_SEND(3, ITM_QUEUE_FAILED | ((xQUEUE *) xQueue)->uxQueueNumber)
#define traceBLOCKING_ON_QUEUE_RECEIVE(xQueue) ITM_SEND(3, ITM_BLOCKING_ON_QUEUE_RECEIVE | ((xQUEUE *) xQueue)->uxQueueNumber)
#define traceBLOCKING_ON_QUEUE_SEND(xQueue) ITM_SEND(3, ITM_BLOCKING_ON_QUEUE_SEND | ((xQUEUE *) xQueue)->uxQueueNumber)
#endif
|
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#pragma once
#include <mutex>
namespace metrics {
class MetricManager;
class UpdateHook {
const char* _name;
time_t _nextCall;
uint32_t _period;
friend class MetricManager;
public:
using UP = std::unique_ptr<UpdateHook>;
using MetricLockGuard = std::unique_lock<std::mutex>;
UpdateHook(const char* name) : _name(name), _nextCall(0), _period(0) {}
virtual ~UpdateHook() = default;
virtual void updateMetrics(const MetricLockGuard & guard) = 0;
const char* getName() const { return _name; }
};
}
|
import torch
from torch.utils.data import Dataset, DataLoader
import torch.optim as torch_optim
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from timeit import default_timer as timer
import warnings
# from sklearn.model_selection._split import StratifiedShuffleSplit
# from experiments.experimental_design import experimental_design
torch.manual_seed(42)
class CostInsensitiveDataset(Dataset):
def __init__(self, X, y):
self.X = X
self.y = y
def __len__(self):
return len(self.y)
def __getitem__(self, idx):
return self.X[idx], self.y[idx]
class CostSensitiveDataset(Dataset):
def __init__(self, X, y, w):
self.X = X
self.y = y
self.w = w
def __len__(self):
return len(self.y)
def __getitem__(self, idx):
return self.X[idx], self.y[idx], self.w[idx]
class CSNeuralNetwork(nn.Module):
def __init__(self, n_inputs, cost_sensitive=False, obj='ce', lambda1=0, lambda2=0, n_neurons=16):
# TODO:
# One hidden layer - tune number of neurons as hyperparameter!
# Compare relu with hyperbolic tangent
# Only add BatchNorm in regularized version
# Add more arguments to object initialization (e.g. objective function)
super().__init__()
self.n_inputs = n_inputs
self.cost_sensitive = (obj == 'weightedce' or obj == 'aec')
self.obj = obj
self.lin_layer1 = nn.Linear(n_inputs, n_neurons)
self.final_layer = nn.Linear(n_neurons, 1)
self.sigmoid = nn.Sigmoid()
self.lambda1 = lambda1
self.lambda2 = lambda2
def forward(self, x):
x = self.lin_layer1(x)
# x = F.relu(x)
# Todo: check difference with tanh?
x = torch.tanh(x)
x = self.final_layer(x)
x = self.sigmoid(x)
return x
def model_train(self, model, x_train, y_train, x_val, y_val, cost_matrix_train=None, cost_matrix_val=None,
n_epochs=500, batch_size=2 ** 10, verbose=True):
early_stopping_criterion = 25
if self.cost_sensitive:
train_ds = CostSensitiveDataset(torch.from_numpy(x_train).float(),
torch.from_numpy(y_train[:, None]).float(),
torch.from_numpy(cost_matrix_train))
val_ds = CostSensitiveDataset(torch.from_numpy(x_val).float(),
torch.from_numpy(y_val[:, None]).float(),
torch.from_numpy(cost_matrix_val))
else:
train_ds = CostInsensitiveDataset(torch.from_numpy(x_train).float(),
torch.from_numpy(y_train[:, None]).float())
val_ds = CostInsensitiveDataset(torch.from_numpy(x_val).float(), torch.from_numpy(y_val[:, None]).float())
criterion = nn.BCELoss()
optimizer = torch_optim.Adam(model.parameters(), lr=0.001) # Todo: larger learning rate?
train_dl = DataLoader(train_ds, batch_size=batch_size, shuffle=True)
val_dl = DataLoader(val_ds, batch_size=int(batch_size / 4), shuffle=True)
best_val_loss = float("Inf")
epochs_not_improved = 0
for epoch in range(n_epochs):
start = timer()
running_loss = 0.0
# Training
model.train()
for i, data in enumerate(train_dl):
if self.cost_sensitive:
inputs, labels, cost_matrix_batch = data
else:
inputs, labels = data
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = model(inputs)
if self.obj == 'ce':
loss = criterion(outputs, labels)
elif self.obj == 'weightedce':
misclass_cost_batch = torch.zeros((len(labels), 1), dtype=torch.double)
misclass_cost_batch[labels == 0] = cost_matrix_batch[:, 1, 0][:, None][labels == 0]
misclass_cost_batch[labels == 1] = cost_matrix_batch[:, 0, 1][:, None][labels == 1]
loss = nn.BCELoss(weight=misclass_cost_batch)(outputs, labels)
elif self.obj == 'aec':
loss = self.expected_cost(outputs, labels, cost_matrix_batch)
else:
raise Exception('Objective function not recognized')
# Add regularization
model_params = torch.cat([params.view(-1) for params in model.parameters()])
l1_regularization = self.lambda1 * torch.norm(model_params, 1)
# print('l1 regularization = %.4f' % l1_regularization)
l2_regularization = self.lambda2 * torch.norm(model_params, 2)**2 # torch.norm returns the square root
# print('l2 regularization = %.4f' % l2_regularization)
loss += l1_regularization + l2_regularization
loss.backward()
optimizer.step()
running_loss += loss.item()
# Validation check
model.eval()
total_val_loss = 0.0
with torch.no_grad():
for val_i, val_data in enumerate(val_dl):
if self.obj == 'ce':
val_inputs, val_labels = val_data
val_outputs = model(val_inputs)
val_loss = criterion(val_outputs, val_labels)
elif self.obj == 'weightedce':
val_inputs, val_labels, val_cost_matrix = val_data
val_outputs = model(val_inputs)
misclass_cost_val = torch.zeros((len(val_labels), 1), dtype=torch.double)
misclass_cost_val[val_labels == 0] = val_cost_matrix[:, 1, 0][:, None][val_labels == 0]
misclass_cost_val[val_labels == 1] = val_cost_matrix[:, 0, 1][:, None][val_labels == 1]
val_loss = nn.BCELoss(weight=misclass_cost_val)(val_outputs, val_labels)
elif self.obj == 'aec':
val_inputs, val_labels, val_cost_matrix = val_data
val_outputs = model(val_inputs)
val_loss = self.expected_cost(val_outputs, val_labels, val_cost_matrix)
total_val_loss += val_loss
end = timer()
if total_val_loss < best_val_loss:
# Is improvement large enough?
# If difference in val_loss is < 10**-1 # Todo: increase?
if best_val_loss - total_val_loss < 10**-1:
epochs_not_improved += 1
# Todo: delete next line
# print('\t\tDifference: {}'.format(best_val_loss - total_val_loss))
if epochs_not_improved > early_stopping_criterion:
print(
'\t\tEarly stopping criterion reached: validation loss not significantly improved for {}'
' epochs.'.format(
epochs_not_improved - 1))
print('\t\tInsufficient improvement in validation loss')
break
else:
epochs_not_improved = 0
best_val_loss = total_val_loss
checkpoint = {
'epoch': epoch + 1,
'best validation loss': best_val_loss,
'model': model.state_dict(),
'optimizer': optimizer.state_dict()}
torch.save(checkpoint, 'checkpoint')
if verbose:
if epoch % 1 == 0:
print('\t\t[Epoch %d]\tloss: %.8f\tval_loss: %.8f\tTime [s]: %.2f\tModel saved!' % (
epoch + 1, running_loss / len(train_ds), total_val_loss / len(val_ds) / 4, end-start))
else:
epochs_not_improved += 1
if epochs_not_improved > early_stopping_criterion:
print('\t\tEarly stopping criterion reached: validation loss not significantly improved for {}'
' epochs.'.format(
epochs_not_improved - 1))
break
if verbose:
if epoch % 10 == 9:
print('\t\t[Epoch %d]\tloss: %.8f\tval_loss: %.8f\tTime [s]: %.2f' % (
epoch + 1, running_loss / len(train_ds), total_val_loss / len(val_ds) / 4, end-start))
best_checkpoint = torch.load('checkpoint')
model.load_state_dict(best_checkpoint['model'])
if verbose:
print('\tFinished training! Best validation loss at epoch %d (loss: %.8f)\n'
% (best_checkpoint['epoch'], best_val_loss / len(val_ds) / 4))
if best_checkpoint['epoch'] > (n_epochs - early_stopping_criterion):
warnings.warn('Number of epochs might have to be increased!')
return model
def model_predict(self, model, X_test):
y_pred = torch.zeros(len(X_test)).float()
test_ds = CostInsensitiveDataset(torch.from_numpy(X_test).float(), y_pred) # Amounts only needed for loss
test_dl = DataLoader(test_ds, batch_size=X_test.shape[0])
preds = []
model.eval()
with torch.no_grad():
for x, _ in test_dl:
prob = model(x)
preds.append(prob.flatten())
return preds[0].numpy() # TODO: not too clean ...
def expected_cost(self, output, target, cost_matrix):
ec = target * (output * cost_matrix[:, 1, 1] + (1 - output) * cost_matrix[:, 0, 1]) \
+ (1 - target) * (output * cost_matrix[:, 1, 0] + (1 - output) * cost_matrix[:, 0, 0])
return ec.mean()
def tune(self, l1, lambda1_list, l2, lambda2_list, neurons_list, x_train, y_train, cost_matrix_train, x_val, y_val,
cost_matrix_val):
results = np.ones((3, len(neurons_list)))
results[0, :] = neurons_list
for i, n_neurons in enumerate(neurons_list):
print('Number of neurons: {}'.format(n_neurons))
if l1:
self.lambda2 = 0
losses_list_l1 = []
for lambda1 in lambda1_list:
net = CSNeuralNetwork(n_inputs=x_train.shape[1], cost_sensitive=self.cost_sensitive, obj=self.obj,
lambda1=lambda1, n_neurons=n_neurons)
net = net.model_train(net, x_train, y_train, x_val, y_val,
cost_matrix_train=cost_matrix_train, cost_matrix_val=cost_matrix_val)
scores_val = net.model_predict(net, x_val)
# Evaluate loss (without regularization term!)
net.lambda1 = 0
if self.obj == 'ce':
eps = 1e-9 # small value to avoid log(0)
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
val_loss = ce.mean()
elif self.obj == 'weightedce':
eps = 1e-9 # small value to avoid log(0)
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
cost_misclass = np.zeros(len(y_val))
cost_misclass[y_val == 0] = cost_matrix_val[:, 1, 0][y_val == 0]
cost_misclass[y_val == 1] = cost_matrix_val[:, 0, 1][y_val == 1]
weighted_ce = cost_misclass * ce
val_loss = weighted_ce.mean()
elif self.obj == 'aec':
def aec_val(scores, y_true):
ec = y_true * (
scores * cost_matrix_val[:, 1, 1] + (1 - scores) * cost_matrix_val[:, 0, 1])\
+ (1 - y_true) * (
scores * cost_matrix_val[:, 1, 0] + (1 - scores) * cost_matrix_val[:, 0, 0])
return ec.mean()
aec = aec_val(scores_val, y_val)
val_loss = aec
print('\t\tLambda l1 = %.4f;\tLoss = %.5f' % (lambda1, val_loss))
losses_list_l1.append(val_loss)
lambda1_opt = lambda1_list[np.argmin(losses_list_l1)]
print('\tOptimal lambda = %.4f' % lambda1_opt)
self.lambda1 = lambda1_opt
results[1, i] = lambda1_opt
results[2, i] = np.min(losses_list_l1)
elif l2:
self.lambda1 = 0
losses_list_l2 = []
for lambda2 in lambda2_list:
net = CSNeuralNetwork(n_inputs=x_train.shape[1], cost_sensitive=self.cost_sensitive, obj=self.obj,
lambda2=lambda2, n_neurons=n_neurons)
net = net.model_train(net, x_train, y_train, x_val, y_val,
cost_matrix_train=cost_matrix_train, cost_matrix_val=cost_matrix_val)
scores_val = net.model_predict(net, x_val)
# Evaluate loss (without regularization term!)
net.lambda2 = 0
if self.obj == 'ce':
eps = 1e-9
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
val_loss = ce.mean()
elif self.obj == 'weightedce':
eps = 1e-9
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
cost_misclass = np.zeros(len(y_val))
cost_misclass[y_val == 0] = cost_matrix_val[:, 1, 0][y_val == 0]
cost_misclass[y_val == 1] = cost_matrix_val[:, 0, 1][y_val == 1]
weighted_ce = cost_misclass * ce
val_loss = weighted_ce.mean()
elif self.obj == 'aec':
def aec_val(scores, y_true):
ec = y_true * (
scores * cost_matrix_val[:, 1, 1] + (1 - scores) * cost_matrix_val[:, 0, 1])\
+ (1 - y_true) * (
scores * cost_matrix_val[:, 1, 0] + (1 - scores) * cost_matrix_val[:, 0, 0])
return ec.mean()
aec = aec_val(scores_val, y_val)
val_loss = aec
print('\t\tLambda l2 = %.4f;\tLoss = %.5f' % (lambda2, val_loss))
losses_list_l2.append(val_loss)
lambda2_opt = lambda2_list[np.argmin(losses_list_l2)]
print('\tOptimal lambda = %.4f' % lambda2_opt)
self.lambda2 = lambda2_opt
results[1, i] = lambda2_opt
results[2, i] = np.min(losses_list_l2)
else:
self.lambda1 = 0
self.lambda2 = 0
net = CSNeuralNetwork(n_inputs=x_train.shape[1], cost_sensitive=self.cost_sensitive, obj=self.obj,
n_neurons=n_neurons)
net = net.model_train(net, x_train, y_train, x_val, y_val, cost_matrix_train=cost_matrix_train,
cost_matrix_val=cost_matrix_val, verbose=True)
scores_val = net.model_predict(net, x_val)
if self.obj == 'ce':
eps = 1e-9
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
val_loss = ce.mean()
elif self.obj == 'weightedce':
eps = 1e-9
ce = - (y_val * np.log(scores_val + eps) + (1 - y_val) * np.log(1 - scores_val + eps))
cost_misclass = np.zeros(len(y_val))
cost_misclass[y_val == 0] = cost_matrix_val[:, 1, 0][y_val == 0]
cost_misclass[y_val == 1] = cost_matrix_val[:, 0, 1][y_val == 1]
weighted_ce = cost_misclass * ce
val_loss = weighted_ce.mean()
elif self.obj == 'aec':
def aec_val(scores, y_true):
ec = y_true * (
scores * cost_matrix_val[:, 1, 1] + (1 - scores) * cost_matrix_val[:, 0, 1]) \
+ (1 - y_true) * (
scores * cost_matrix_val[:, 1, 0] + (1 - scores) * cost_matrix_val[:, 0, 0])
return ec.mean()
aec = aec_val(scores_val, y_val)
val_loss = aec
print('\t\tNumber of neurons = %i;\tLoss = %.5f' % (n_neurons, val_loss))
results[2, i] = val_loss
# Assign best settings
opt_ind = np.argmin(results[2, :])
opt_n_neurons = int(results[0, opt_ind])
print('Optimal number of neurons: {}'.format(opt_n_neurons))
if l1:
self.lambda1 = results[1, opt_ind]
print('Optimal l1: {}'.format(self.lambda1))
if l2:
self.lambda2 = results[1, opt_ind]
print('Optimal l2: {}'.format(self.lambda2))
return CSNeuralNetwork(self.n_inputs, self.cost_sensitive, self.obj, self.lambda1, self.lambda2, opt_n_neurons)
|
"""Common framework used to run hangups examples."""
import argparse
import asyncio
import logging
import os
import hangups
import appdirs
def run_example(example_coroutine, *extra_args):
"""Run a hangups example coroutine.
Args:
example_coroutine (coroutine): Coroutine to run with a connected
hangups client and arguments namespace as arguments.
extra_args (str): Any extra command line arguments required by the
example.
"""
args = _get_parser(extra_args).parse_args()
logging.basicConfig(level=logging.DEBUG if args.debug else logging.WARNING)
# Obtain hangups authentication cookies, prompting for credentials from
# standard input if necessary.
cookies = hangups.auth.get_auth_stdin(args.token_path)
client = hangups.Client(cookies)
task = asyncio.async(_async_main(example_coroutine, client, args))
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(task)
except KeyboardInterrupt:
task.cancel()
loop.run_forever()
finally:
loop.close()
def _get_parser(extra_args):
"""Return ArgumentParser with any extra arguments."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
dirs = appdirs.AppDirs('hangups', 'hangups')
default_token_path = os.path.join(dirs.user_cache_dir, 'refresh_token.txt')
parser.add_argument(
'--token-path', default=default_token_path,
help='path used to store OAuth refresh token'
)
parser.add_argument(
'-d', '--debug', action='store_true',
help='log detailed debugging messages'
)
for extra_arg in extra_args:
parser.add_argument(extra_arg, required=True)
return parser
@asyncio.coroutine
def _async_main(example_coroutine, client, args):
"""Run the example coroutine."""
# Spawn a task for hangups to run in parallel with the example coroutine.
task = asyncio.async(client.connect())
# Wait for hangups to either finish connecting or raise an exception.
on_connect = asyncio.Future()
client.on_connect.add_observer(lambda: on_connect.set_result(None))
done, _ = yield from asyncio.wait(
(on_connect, task), return_when=asyncio.FIRST_COMPLETED
)
yield from asyncio.gather(*done)
# Run the example coroutine. Afterwards, disconnect hangups gracefully and
# yield the hangups task to handle any exceptions.
try:
yield from example_coroutine(client, args)
finally:
yield from client.disconnect()
yield from task
|
# -*- coding: utf-8 -*-
__author__ = "Rogers de Pelle"
__contact__ = "@rogersdepelle"
import copy
import json
import sys
import math
import os
import re
import time
import warnings
from naive_bayes import learn, classify
def standard_deviation(s):
"""
s: Values list for calculate standard deviation
"""
avg = sum(s)/float(len(s))
var = map(lambda x: (x - avg)**2, s)
return math.sqrt(sum(var)/float(len(var)))
def clean_text(text, option):
"""
text: String
option: Filter option to be applied in the text
"""
text = re.sub('<[^>]*>', '', text)
text = re.sub('[.,;!?]', ' ', text)
if option == 2 or option == 4:
text = re.sub('[^a-zA-Z ]', '', text)
if option == 3 or option == 4:
warnings.simplefilter("ignore", UnicodeWarning)
file = open(os.path.dirname(__file__) + '/stopwords.json', 'r')
stopwords = json.loads(file.read())
text = ' '.join([word for word in text.split() if word not in stopwords])
return text
def get_text(begin, end, positive_path, negative_path, option):
"""
positive_path: Path of positive files
negative_path: Path of negatives files
"""
docs = {'pos':[], 'neg':[]}
for x in xrange(begin,end+1):
try:
file_path = positive_path + str(x) + ".txt"
file = open(file_path)
docs['pos'].append(clean_text(file.read(), option))
except:
print "Invalid file: " + file_path
sys.exit(0)
try:
file_path = negative_path + str(x) + ".txt"
file = open(file_path)
docs['neg'].append(clean_text(file.read(), option))
except:
print "Invalid file: " + file_path
sys.exit(0)
return docs
def cross_validation(docs, values, k):
"""
docs: Dict with text lists separate by value
values: Target values texts
k: Steps of cross validation
"""
group_size = {}
confusion_matrix = []
m = {'true':{}, 'false':{}}
for value in values:
group_size[value] = len(docs[value])/k
m['true'][value] = 0
m['false'][value] = 0
for i in xrange(0,k):
training = copy.deepcopy(docs)
confusion_matrix.insert(i, copy.deepcopy(m))
for value in values:
begin = i * group_size[value]
end = (i + 1) * group_size[value]
test = training[value][begin:end]
del training[value][begin:end]
probabilities, vocabulary = learn(training, values)
for doc in test:
prob_value = classify(doc, probabilities, vocabulary, values)
if value == prob_value:
confusion_matrix[i]['true'][value] += 1
else:
confusion_matrix[i]['false'][prob_value] += 1
return confusion_matrix
def evaluation(confusion_matrix, log, values):
"""
confusion_matrix: Partial confusion matrices
log: Recording the results
"""
i = 0
matrix = {'true':{}, 'false':{}}
for value in values:
matrix['true'][value] = []
matrix['false'][value] = []
for m in confusion_matrix:
i += 1
log.write("Matrix " + str(i))
for value in values:
log.write("\nTrue %s: %5d | False %s: %5d" % (value, m['true'][value], value, m['false'][value]))
matrix['true'][value].append(m['true'][value])
matrix['false'][value].append(m['false'][value])
log.write("\n\n")
log.write("General Matrix")
for value in values:
log.write("\nTrue %s: %5d | False %s: %5d" % (value, sum(matrix['true'][value]), value, sum(matrix['false'][value])))
log.write("\n\nStandard Deviation")
for value in values:
log.write("\nTrue %s: %5.2f | False %s: %5.2f" % (value, standard_deviation(matrix['true'][value]), value, standard_deviation(matrix['false'][value])))
return matrix
def main():
start = time.time()
values = ['pos', 'neg']
option = -1
begin = 0
end = 24999
if len(sys.argv) >= 3:
if os.path.exists(sys.argv[1]) and os.path.exists(sys.argv[2]):
positive_path = sys.argv[1]
negative_path = sys.argv[2]
else:
print "\nInvalid paths!"
sys.exit(0)
else:
print "\nInsert folders path.\n"
sys.exit(0)
print "\nNaive Bayes:"
print "\nChoose the files range (0-24999)"
while option < begin or option > end:
try:
option = int(raw_input('\nBegin: '))
if option < begin or option > end:
print "\nInvalid Number!"
except:
option = 0
print "\nInvalid Number!"
begin = option
while option <= begin or option > end or option - begin < 10:
try:
option = int(raw_input('\nEnd: '))
if option <= begin or option > end or option - begin < 10:
print "\nInvalid Number!"
except:
option = 0
print "\nInvalid Numbern!"
end = option
option = 0
print "\n1. Basic classification"
print "2. Classification removing special characters"
print "3. Classification removing stopwords"
print "4. Classification removing special characters and stopwords"
while option < 1 or option > 4:
try:
option = int(raw_input('\nChose a option: '))
if option < 1 or option > 4:
print "\nInvalid option!"
except:
option = 0
print "\nInvalid option!"
docs = get_text(begin, end, positive_path, negative_path, option)
confusion_matrix = cross_validation(docs, values, 10)
log = open('log' + str(option) + '.txt', 'w')
matrix = evaluation(confusion_matrix, log, values)
tp = sum(matrix['true']['pos'])
tn = sum(matrix['true']['neg'])
fp = sum(matrix['false']['pos'])
fn = sum(matrix['false']['neg'])
precision = (tp / float(tp + fp))
recall = (tp / float(tp + fn))
f1 = 2 * precision * recall / (precision + recall)
log.write("\n\nPrecision: %.2f%%" % (precision * 100))
log.write("\nRecall: %.2f%%" % (recall * 100))
log.write("\nTrue pos: " + str(tp))
log.write("\nFalse pos: " + str(fp))
log.write("\nF1 Score: %.2f%%" % (f1 * 100))
log.write("\nTime: %.0fs" % (time.time() - start))
log.close()
print "\nThe results were saved the log" + str(option) + ".txt\n"
if __name__ == "__main__":
main()
|
/**
* Default property values of interactive objects
* Used by {@link PIXI.interaction.InteractionManager} to automatically give all DisplayObjects these properties
*
* @mixin
* @name interactiveTarget
* @memberof PIXI.interaction
* @example
* function MyObject() {}
*
* Object.assign(
* core.DisplayObject.prototype,
* PIXI.interaction.interactiveTarget
* );
*/
export default {
/**
* Determines if the displayObject be clicked/touched
*
* @member {boolean}
* @memberof PIXI.interaction.interactiveTarget#
*/
interactive: false,
/**
* Determines if the children to the displayObject can be clicked/touched
* Setting this to false allows pixi to bypass a recursive hitTest function
*
* @member {boolean}
* @memberof PIXI.interaction.interactiveTarget#
*/
interactiveChildren: true,
/**
* Interaction shape. Children will be hit first, then this shape will be checked.
* Setting this will cause this shape to be checked in hit tests rather than the displayObject's bounds.
*
* @member {PIXI.Rectangle|PIXI.Circle|PIXI.Ellipse|PIXI.Polygon|PIXI.RoundedRectangle}
* @memberof PIXI.interaction.interactiveTarget#
*/
hitArea: null,
/**
* If enabled, the mouse cursor use the pointer behavior when hovered over the displayObject if it is interactive
* Setting this changes the 'cursor' property to `'pointer'`.
*
* @member {boolean}
* @memberof PIXI.interaction.interactiveTarget#
*/
get buttonMode()
{
return this.cursor === 'pointer';
},
set buttonMode(value)
{
if (value)
{
this.cursor = 'pointer';
}
else if (this.cursor === 'pointer')
{
this.cursor = null;
}
},
/**
* This defines what cursor mode is used when the mouse cursor
* is hovered over the displayObject.
*
* @see https://developer.mozilla.org/en/docs/Web/CSS/cursor
*
* @member {string}
* @memberof PIXI.interaction.interactiveTarget#
*/
cursor: null,
/**
* Internal set of all active pointers, by identifier
*
* @member {Map<number, InteractionTrackingData>}
* @memberof PIXI.interaction.interactiveTarget#
* @private
*/
get trackedPointers()
{
if (this._trackedPointers === undefined) this._trackedPointers = {};
return this._trackedPointers;
},
/**
* Map of all tracked pointers, by identifier. Use trackedPointers to access.
*
* @private
* @type {Map<number, InteractionTrackingData>}
*/
_trackedPointers: undefined,
};
|
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.d (the "License");
# you may not use this file except in compliance with the License.
#
"""Userbot module containing hash and encode/decode commands."""
from subprocess import PIPE
from subprocess import run as runapp
import pybase64
from userbot import CMD_HELP
from userbot.events import register
@register(outgoing=True, pattern=r"\.hash (.*)")
async def gethash(hash_q):
"""For .hash command, find the md5, sha1, sha256, sha512 of the string."""
hashtxt_ = hash_q.pattern_match.group(1)
with open("hashdis.txt", "w+") as hashtxt:
hashtxt.write(hashtxt_)
md5 = runapp(["md5sum", "hashdis.txt"], stdout=PIPE)
md5 = md5.stdout.decode()
sha1 = runapp(["sha1sum", "hashdis.txt"], stdout=PIPE)
sha1 = sha1.stdout.decode()
sha256 = runapp(["sha256sum", "hashdis.txt"], stdout=PIPE)
sha256 = sha256.stdout.decode()
sha512 = runapp(["sha512sum", "hashdis.txt"], stdout=PIPE)
runapp(["rm", "hashdis.txt"], stdout=PIPE)
sha512 = sha512.stdout.decode()
ans = ("Text: `" + hashtxt_ + "`\nMD5: `" + md5 + "`SHA1: `" + sha1 +
"`SHA256: `" + sha256 + "`SHA512: `" + sha512[:-1] + "`")
if len(ans) > 4096:
with open("hashes.txt", "w+") as hashfile:
hashfile.write(ans)
await hash_q.client.send_file(
hash_q.chat_id,
"hashes.txt",
reply_to=hash_q.id,
caption="`It's too big, sending a text file instead. `")
runapp(["rm", "hashes.txt"], stdout=PIPE)
else:
await hash_q.reply(ans)
@register(outgoing=True, pattern=r"\.base64 (en|de) (.*)")
async def endecrypt(query):
"""For .base64 command, find the base64 encoding of the given string."""
if query.pattern_match.group(1) == "en":
lething = str(
pybase64.b64encode(bytes(query.pattern_match.group(2),
"utf-8")))[2:]
await query.reply("Encoded: `" + lething[:-1] + "`")
else:
lething = str(
pybase64.b64decode(bytes(query.pattern_match.group(2), "utf-8"),
validate=True))[2:]
await query.reply("Decoded: `" + lething[:-1] + "`")
CMD_HELP.update({
"base64": "`.base64` <en|de> <text>\nUsage: Find the base64 encoding.\n\nen:encode , de:decode."})
CMD_HELP.update({
"hash": "`.hash` <text>\nFind the md5, sha1, sha256, sha512 of the string when written into a txt file."})
|
/*
* This header is generated by classdump-dyld 1.0
* on Tuesday, November 5, 2019 at 2:50:45 AM Mountain Standard Time
* Operating System: Version 13.0 (Build 17J586)
* Image Source: /System/Library/Frameworks/SceneKit.framework/SceneKit
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos.
*/
#import <SceneKit/SceneKit-Structs.h>
#import <SceneKit/NSSecureCoding.h>
#import <SceneKit/NSCopying.h>
@class CAAnimation, SCNNode, NSString;
@interface SCNParticlePropertyController : NSObject <NSSecureCoding, NSCopying> {
CAAnimation* _animation;
long long _inputMode;
double _inputScale;
double _inputBias;
SCNNode* _inputOrigin;
NSString* _inputProperty;
id _c3dAnimation;
}
@property (nonatomic,retain) CAAnimation * animation;
@property (assign,nonatomic) long long inputMode;
@property (assign,nonatomic) double inputScale;
@property (assign,nonatomic) double inputBias;
@property (assign,nonatomic,__weak) SCNNode * inputOrigin;
@property (nonatomic,copy) NSString * inputProperty;
+(BOOL)supportsSecureCoding;
+(id)particlePropertyControllerWithAnimation:(id)arg1 ;
+(id)controllerWithAnimation:(id)arg1 ;
-(id)copyWithZone:(NSZone*)arg1 ;
-(id)copy;
-(id)init;
-(void)dealloc;
-(void)encodeWithCoder:(id)arg1 ;
-(id)initWithCoder:(id)arg1 ;
-(CAAnimation *)animation;
-(void)setAnimation:(CAAnimation *)arg1 ;
-(double)inputScale;
-(void)setInputScale:(double)arg1 ;
-(double)inputBias;
-(void)setInputBias:(double)arg1 ;
-(long long)inputMode;
-(void)setInputMode:(long long)arg1 ;
-(SCNNode *)inputOrigin;
-(void)setInputOrigin:(SCNNode *)arg1 ;
-(void)setC3dAnimation:(id)arg1 ;
-(NSString *)inputProperty;
-(id)c3dAnimation;
-(void)setInputProperty:(NSString *)arg1 ;
-(void)_customEncodingOfSCNParticlePropertyController:(id)arg1 ;
-(void)_customDecodingOfSCNParticlePropertyController:(id)arg1 ;
@end
|
import _extends from "@babel/runtime/helpers/esm/extends";
import _slicedToArray from "@babel/runtime/helpers/esm/slicedToArray";
import _objectWithoutProperties from "@babel/runtime/helpers/esm/objectWithoutProperties";
/* eslint-disable jsx-a11y/click-events-have-key-events, jsx-a11y/no-static-element-interactions */
import React from 'react';
import PropTypes from 'prop-types';
import warning from 'warning';
import clsx from 'clsx';
import formControlState from '../FormControl/formControlState';
import FormControlContext, { useFormControl } from '../FormControl/FormControlContext';
import withStyles from '../styles/withStyles';
import { useForkRef } from '../utils/reactHelpers';
import TextareaAutosize from '../TextareaAutosize';
import { isFilled } from './utils';
export var styles = function styles(theme) {
var light = theme.palette.type === 'light';
var placeholder = {
color: 'currentColor',
opacity: light ? 0.42 : 0.5,
transition: theme.transitions.create('opacity', {
duration: theme.transitions.duration.shorter
})
};
var placeholderHidden = {
opacity: '0 !important'
};
var placeholderVisible = {
opacity: light ? 0.42 : 0.5
};
return {
/* Styles applied to the root element. */
root: {
// Mimics the default input display property used by browsers for an input.
fontFamily: theme.typography.fontFamily,
color: theme.palette.text.primary,
fontSize: theme.typography.pxToRem(16),
lineHeight: '1.1875em',
// Reset (19px), match the native input line-height
boxSizing: 'border-box',
// Prevent padding issue with fullWidth.
position: 'relative',
cursor: 'text',
display: 'inline-flex',
alignItems: 'center',
'&$disabled': {
color: theme.palette.text.disabled,
cursor: 'default'
}
},
/* Styles applied to the root element if the component is a descendant of `FormControl`. */
formControl: {},
/* Styles applied to the root element if the component is focused. */
focused: {},
/* Styles applied to the root element if `disabled={true}`. */
disabled: {},
/* Styles applied to the root element if `startAdornment` is provided. */
adornedStart: {},
/* Styles applied to the root element if `endAdornment` is provided. */
adornedEnd: {},
/* Styles applied to the root element if `error={true}`. */
error: {},
/* Styles applied to the `input` element if `margin="dense"`. */
marginDense: {},
/* Styles applied to the root element if `multiline={true}`. */
multiline: {
padding: "".concat(8 - 2, "px 0 ").concat(8 - 1, "px"),
'&$marginDense': {
paddingTop: 4 - 1
}
},
/* Styles applied to the root element if `fullWidth={true}`. */
fullWidth: {
width: '100%'
},
/* Styles applied to the `input` element. */
input: {
font: 'inherit',
color: 'currentColor',
padding: "".concat(8 - 2, "px 0 ").concat(8 - 1, "px"),
border: 0,
boxSizing: 'content-box',
background: 'none',
height: '1.1875em',
// Reset (19px), match the native input line-height
margin: 0,
// Reset for Safari
// Remove grey highlight
WebkitTapHighlightColor: 'transparent',
display: 'block',
// Make the flex item shrink with Firefox
minWidth: 0,
width: '100%',
// Fix IE 11 width issue
'&::-webkit-input-placeholder': placeholder,
'&::-moz-placeholder': placeholder,
// Firefox 19+
'&:-ms-input-placeholder': placeholder,
// IE 11
'&::-ms-input-placeholder': placeholder,
// Edge
'&:focus': {
outline: 0
},
// Reset Firefox invalid required input style
'&:invalid': {
boxShadow: 'none'
},
'&::-webkit-search-decoration': {
// Remove the padding when type=search.
'-webkit-appearance': 'none'
},
// Show and hide the placeholder logic
'label[data-shrink=false] + $formControl &': {
'&::-webkit-input-placeholder': placeholderHidden,
'&::-moz-placeholder': placeholderHidden,
// Firefox 19+
'&:-ms-input-placeholder': placeholderHidden,
// IE 11
'&::-ms-input-placeholder': placeholderHidden,
// Edge
'&:focus::-webkit-input-placeholder': placeholderVisible,
'&:focus::-moz-placeholder': placeholderVisible,
// Firefox 19+
'&:focus:-ms-input-placeholder': placeholderVisible,
// IE 11
'&:focus::-ms-input-placeholder': placeholderVisible // Edge
},
'&$disabled': {
opacity: 1 // Reset iOS opacity
}
},
/* Styles applied to the `input` element if `margin="dense"`. */
inputMarginDense: {
paddingTop: 4 - 1
},
/* Styles applied to the `input` element if `select={true}`. */
inputSelect: {
paddingRight: 24
},
/* Styles applied to the `input` element if `multiline={true}`. */
inputMultiline: {
height: 'auto',
resize: 'none',
padding: 0
},
/* Styles applied to the `input` element if `type="search"`. */
inputTypeSearch: {
// Improve type search style.
'-moz-appearance': 'textfield',
'-webkit-appearance': 'textfield'
},
/* Styles applied to the `input` element if `startAdornment` is provided. */
inputAdornedStart: {},
/* Styles applied to the `input` element if `endAdornment` is provided. */
inputAdornedEnd: {},
/* Styles applied to the `input` element if `hiddenLabel={true}`. */
inputHiddenLabel: {}
};
};
var useEnhancedEffect = typeof window === 'undefined' ? React.useEffect : React.useLayoutEffect;
/**
* `InputBase` contains as few styles as possible.
* It aims to be a simple building block for creating an input.
* It contains a load of style reset and some state logic.
*/
var InputBase = React.forwardRef(function InputBase(props, ref) {
var ariaDescribedby = props['aria-describedby'],
autoComplete = props.autoComplete,
autoFocus = props.autoFocus,
classes = props.classes,
classNameProp = props.className,
defaultValue = props.defaultValue,
disabled = props.disabled,
endAdornment = props.endAdornment,
error = props.error,
_props$fullWidth = props.fullWidth,
fullWidth = _props$fullWidth === void 0 ? false : _props$fullWidth,
id = props.id,
_props$inputComponent = props.inputComponent,
inputComponent = _props$inputComponent === void 0 ? 'input' : _props$inputComponent,
_props$inputProps = props.inputProps;
_props$inputProps = _props$inputProps === void 0 ? {} : _props$inputProps;
var inputPropsClassName = _props$inputProps.className,
inputPropsProp = _objectWithoutProperties(_props$inputProps, ["className"]),
inputRefProp = props.inputRef,
margin = props.margin,
_props$multiline = props.multiline,
multiline = _props$multiline === void 0 ? false : _props$multiline,
name = props.name,
onBlur = props.onBlur,
onChange = props.onChange,
onClick = props.onClick,
onFocus = props.onFocus,
onKeyDown = props.onKeyDown,
onKeyUp = props.onKeyUp,
placeholder = props.placeholder,
readOnly = props.readOnly,
renderSuffix = props.renderSuffix,
rows = props.rows,
rowsMax = props.rowsMax,
_props$select = props.select,
select = _props$select === void 0 ? false : _props$select,
startAdornment = props.startAdornment,
_props$type = props.type,
type = _props$type === void 0 ? 'text' : _props$type,
value = props.value,
other = _objectWithoutProperties(props, ["aria-describedby", "autoComplete", "autoFocus", "classes", "className", "defaultValue", "disabled", "endAdornment", "error", "fullWidth", "id", "inputComponent", "inputProps", "inputRef", "margin", "multiline", "name", "onBlur", "onChange", "onClick", "onFocus", "onKeyDown", "onKeyUp", "placeholder", "readOnly", "renderSuffix", "rows", "rowsMax", "select", "startAdornment", "type", "value"]);
var _React$useRef = React.useRef(value != null),
isControlled = _React$useRef.current;
var inputRef = React.useRef();
var handleInputRefWarning = React.useCallback(function (instance) {
process.env.NODE_ENV !== "production" ? warning(!instance || instance instanceof HTMLInputElement || instance.focus, ['Material-UI: you have provided a `inputComponent` to the input component', 'that does not correctly handle the `inputRef` prop.', 'Make sure the `inputRef` prop is called with a HTMLInputElement.'].join('\n')) : void 0;
}, []);
var handleInputPropsRefProp = useForkRef(inputPropsProp.ref, handleInputRefWarning);
var handleInputRefProp = useForkRef(inputRefProp, handleInputPropsRefProp);
var handleInputRef = useForkRef(inputRef, handleInputRefProp);
var _React$useState = React.useState(false),
_React$useState2 = _slicedToArray(_React$useState, 2),
focused = _React$useState2[0],
setFocused = _React$useState2[1];
var muiFormControl = useFormControl();
if (process.env.NODE_ENV !== 'production') {
// eslint-disable-next-line react-hooks/rules-of-hooks
React.useEffect(function () {
if (muiFormControl) {
return muiFormControl.registerEffect();
}
return undefined;
}, [muiFormControl]);
}
var fcs = formControlState({
props: props,
muiFormControl: muiFormControl,
states: ['disabled', 'error', 'hiddenLabel', 'margin', 'required', 'filled']
});
fcs.focused = muiFormControl ? muiFormControl.focused : focused; // The blur won't fire when the disabled state is set on a focused input.
// We need to book keep the focused state manually.
React.useEffect(function () {
if (!muiFormControl && disabled && focused) {
setFocused(false);
if (onBlur) {
onBlur();
}
}
}, [muiFormControl, disabled, focused, onBlur]);
var checkDirty = React.useCallback(function (obj) {
if (isFilled(obj)) {
if (muiFormControl && muiFormControl.onFilled) {
muiFormControl.onFilled();
}
} else if (muiFormControl && muiFormControl.onEmpty) {
muiFormControl.onEmpty();
}
}, [muiFormControl]);
useEnhancedEffect(function () {
if (isControlled) {
checkDirty({
value: value
});
}
}, [value, checkDirty, isControlled]);
var handleFocus = function handleFocus(event) {
// Fix a bug with IE 11 where the focus/blur events are triggered
// while the input is disabled.
if (fcs.disabled) {
event.stopPropagation();
return;
}
if (onFocus) {
onFocus(event);
}
if (muiFormControl && muiFormControl.onFocus) {
muiFormControl.onFocus(event);
} else {
setFocused(true);
}
};
var handleBlur = function handleBlur(event) {
if (onBlur) {
onBlur(event);
}
if (muiFormControl && muiFormControl.onBlur) {
muiFormControl.onBlur(event);
} else {
setFocused(false);
}
};
var handleChange = function handleChange(event) {
if (!isControlled) {
var element = event.target || inputRef.current;
if (element == null) {
throw new TypeError('Material-UI: Expected valid input target. ' + 'Did you use a custom `inputComponent` and forget to forward refs? ' + 'See https://material-ui.com/r/input-component-ref-interface for more info.');
}
checkDirty({
value: element.value
});
} // Perform in the willUpdate
if (onChange) {
for (var _len = arguments.length, args = new Array(_len > 1 ? _len - 1 : 0), _key = 1; _key < _len; _key++) {
args[_key - 1] = arguments[_key];
}
onChange.apply(void 0, [event].concat(args));
}
};
var handleClick = function handleClick(event) {
if (inputRef.current && event.currentTarget === event.target) {
inputRef.current.focus();
}
if (onClick) {
onClick(event);
}
};
var InputComponent = inputComponent;
var inputProps = _extends({}, inputPropsProp, {
ref: handleInputRef
});
if (typeof InputComponent !== 'string') {
inputProps = _extends({
// Rename ref to inputRef as we don't know the
// provided `inputComponent` structure.
inputRef: handleInputRef,
type: type
}, inputProps, {
ref: null
});
} else if (multiline) {
if (rows && !rowsMax) {
InputComponent = 'textarea';
} else {
inputProps = _extends({
rows: rows,
rowsMax: rowsMax
}, inputProps);
InputComponent = TextareaAutosize;
}
} else {
inputProps = _extends({
type: type
}, inputProps);
}
return React.createElement("div", _extends({
className: clsx(classes.root, classNameProp, fcs.disabled && classes.disabled, fcs.error && classes.error, fullWidth && classes.fullWidth, fcs.focused && classes.focused, muiFormControl && classes.formControl, multiline && classes.multiline, startAdornment && classes.adornedStart, endAdornment && classes.adornedEnd, {
dense: classes.marginDense
}[fcs.margin]),
onClick: handleClick,
ref: ref
}, other), startAdornment, React.createElement(FormControlContext.Provider, {
value: null
}, React.createElement(InputComponent, _extends({
"aria-invalid": fcs.error,
"aria-describedby": ariaDescribedby,
autoComplete: autoComplete,
autoFocus: autoFocus,
className: clsx(classes.input, inputPropsClassName, fcs.disabled && classes.disabled, multiline && classes.inputMultiline, select && classes.inputSelect, fcs.hiddenLabel && classes.inputHiddenLabel, startAdornment && classes.inputAdornedStart, endAdornment && classes.inputAdornedEnd, {
search: classes.inputTypeSearch
}[type], {
dense: classes.inputMarginDense
}[fcs.margin]),
defaultValue: defaultValue,
disabled: fcs.disabled,
id: id,
name: name,
onBlur: handleBlur,
onChange: handleChange,
onFocus: handleFocus,
onKeyDown: onKeyDown,
onKeyUp: onKeyUp,
placeholder: placeholder,
readOnly: readOnly,
required: fcs.required,
rows: rows,
value: value
}, inputProps))), endAdornment, renderSuffix ? renderSuffix(_extends({}, fcs, {
startAdornment: startAdornment
})) : null);
});
process.env.NODE_ENV !== "production" ? InputBase.propTypes = {
/**
* @ignore
*/
'aria-describedby': PropTypes.string,
/**
* This prop helps users to fill forms faster, especially on mobile devices.
* The name can be confusing, as it's more like an autofill.
* You can learn more about it [following the specification](https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#autofill).
*/
autoComplete: PropTypes.string,
/**
* If `true`, the `input` element will be focused during the first mount.
*/
autoFocus: PropTypes.bool,
/**
* Override or extend the styles applied to the component.
* See [CSS API](#css) below for more details.
*/
classes: PropTypes.object.isRequired,
/**
* The CSS class name of the wrapper element.
*/
className: PropTypes.string,
/**
* The default `input` element value. Use when the component is not controlled.
*/
defaultValue: PropTypes.any,
/**
* If `true`, the `input` element will be disabled.
*/
disabled: PropTypes.bool,
/**
* End `InputAdornment` for this component.
*/
endAdornment: PropTypes.node,
/**
* If `true`, the input will indicate an error. This is normally obtained via context from
* FormControl.
*/
error: PropTypes.bool,
/**
* If `true`, the input will take up the full width of its container.
*/
fullWidth: PropTypes.bool,
/**
* The id of the `input` element.
*/
id: PropTypes.string,
/**
* The component used for the `input` element.
* Either a string to use a DOM element or a component.
*/
inputComponent: PropTypes.elementType,
/**
* [Attributes](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#Attributes) applied to the `input` element.
*/
inputProps: PropTypes.object,
/**
* This prop can be used to pass a ref callback to the `input` element.
*/
inputRef: PropTypes.oneOfType([PropTypes.func, PropTypes.object]),
/**
* If `dense`, will adjust vertical spacing. This is normally obtained via context from
* FormControl.
*/
margin: PropTypes.oneOf(['dense', 'none']),
/**
* If `true`, a textarea element will be rendered.
*/
multiline: PropTypes.bool,
/**
* Name attribute of the `input` element.
*/
name: PropTypes.string,
/**
* @ignore
*/
onBlur: PropTypes.func,
/**
* Callback fired when the value is changed.
*
* @param {object} event The event source of the callback.
* You can pull out the new value by accessing `event.target.value`.
*/
onChange: PropTypes.func,
/**
* @ignore
*/
onClick: PropTypes.func,
/**
* @ignore
*/
onFocus: PropTypes.func,
/**
* @ignore
*/
onKeyDown: PropTypes.func,
/**
* @ignore
*/
onKeyUp: PropTypes.func,
/**
* The short hint displayed in the input before the user enters a value.
*/
placeholder: PropTypes.string,
/**
* It prevents the user from changing the value of the field
* (not from interacting with the field).
*/
readOnly: PropTypes.bool,
/**
* @ignore
*/
renderSuffix: PropTypes.func,
/**
* If `true`, the `input` element will be required.
*/
required: PropTypes.bool,
/**
* Number of rows to display when multiline option is set to true.
*/
rows: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
/**
* Maximum number of rows to display when multiline option is set to true.
*/
rowsMax: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
/**
* Should be `true` when the component hosts a select.
*/
select: PropTypes.bool,
/**
* Start `InputAdornment` for this component.
*/
startAdornment: PropTypes.node,
/**
* Type of the `input` element. It should be [a valid HTML5 input type](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input#Form_%3Cinput%3E_types).
*/
type: PropTypes.string,
/**
* The value of the `input` element, required for a controlled component.
*/
value: PropTypes.any
} : void 0;
export default withStyles(styles, {
name: 'MuiInputBase'
})(InputBase);
|
/****************************************************************************
Copyright (c) 2011-2012 cocos2d-x.org
Copyright (c) 2013-2014 Chukong Technologies Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
/**
* movement event type enum
* @constant
* @type {Object}
*/
ccs.MovementEventType = {
start: 0,
complete: 1,
loopComplete: 2
};
/**
* The animation event class, it has the callback, target and arguments.
* @deprecated since v3.0.
* @class
* @extends ccs.Class
*/
ccs.AnimationEvent = ccs.Class.extend(/** @lends ccs.AnimationEvent# */{
_arguments: null,
_callFunc: null,
_selectorTarget: null,
/**
* Constructor of ccs.AnimationEvent
* @param {function} callFunc
* @param {object} target
* @param {object} [data]
*/
ctor: function (callFunc,target, data) {
this._data = data;
this._callFunc = callFunc;
this._selectorTarget = target;
},
call: function () {
if (this._callFunc)
this._callFunc.apply(this._selectorTarget, this._arguments);
},
setArguments: function (args) {
this._arguments = args;
}
});
/**
* The movement event class for Armature.
* @constructor
*
* @property {ccs.Armature} armature - The armature reference of movement event.
* @property {Number} movementType - The type of movement.
* @property {String} movementID - The ID of movement.
*/
ccs.MovementEvent = function () {
this.armature = null;
this.movementType = ccs.MovementEventType.start;
this.movementID = "";
};
/**
* The frame event class for Armature.
* @constructor
*
* @property {ccs.Bone} bone - The bone reference of frame event.
* @property {String} frameEventName - The name of frame event.
* @property {Number} originFrameIndex - The index of origin frame.
* @property {Number} currentFrameIndex - The index of current frame.
*/
ccs.FrameEvent = function () {
this.bone = null;
this.frameEventName = "";
this.originFrameIndex = 0;
this.currentFrameIndex = 0;
};
/**
* The Animation class for Armature, it plays armature animation, and controls speed scale and manages animation frame.
* @class
* @extends ccs.ProcessBase
*
* @param {ccs.Armature} [armature] The armature
*
* @property {ccs.AnimationData} animationData - Animation data
* @property {Object} userObject - User custom object
* @property {Boolean} ignoreFrameEvent - Indicate whether the frame event is ignored
* @property {Number} speedScale - Animation play speed scale
* @property {Number} animationScale - Animation play speed scale
*/
ccs.ArmatureAnimation = ccs.ProcessBase.extend(/** @lends ccs.ArmatureAnimation# */{
_animationData: null,
_movementData: null,
_armature: null,
_movementID: "",
_toIndex: 0,
_tweenList: null,
_speedScale: 1,
_ignoreFrameEvent: false,
_frameEventQueue: null,
_movementEventQueue: null,
_movementList: null,
_onMovementList: false,
_movementListLoop: false,
_movementIndex: 0,
_movementListDurationTo: -1,
_movementEventCallFunc: null,
_frameEventCallFunc: null,
_movementEventTarget: null,
_frameEventTarget:null,
_movementEventListener: null,
_frameEventListener: null,
ctor: function (armature) {
ccs.ProcessBase.prototype.ctor.call(this);
this._tweenList = [];
this._movementList = [];
this._frameEventQueue = [];
this._movementEventQueue = [];
this._armature = null;
armature && ccs.ArmatureAnimation.prototype.init.call(this, armature);
},
/**
* Initializes with an armature object
* @param {ccs.Armature} armature
* @return {Boolean}
*/
init: function (armature) {
this._armature = armature;
this._tweenList.length = 0;
return true;
},
/**
* Pauses armature animation.
*/
pause: function () {
var locTweenList = this._tweenList;
for (var i = 0; i < locTweenList.length; i++)
locTweenList[i].pause();
ccs.ProcessBase.prototype.pause.call(this);
},
/**
* Resumes armature animation.
*/
resume: function () {
var locTweenList = this._tweenList;
for (var i = 0; i < locTweenList.length; i++)
locTweenList[i].resume();
ccs.ProcessBase.prototype.resume.call(this);
},
/**
* Stops armature animation.
*/
stop: function () {
var locTweenList = this._tweenList;
for (var i = 0; i < locTweenList.length; i++)
locTweenList[i].stop();
locTweenList.length = 0;
ccs.ProcessBase.prototype.stop.call(this);
},
/**
* Sets animation play speed scale.
* @deprecated since v3.0, please use setSpeedScale instead.
* @param {Number} animationScale
*/
setAnimationScale: function (animationScale) {
this.setSpeedScale(animationScale);
},
/**
* Returns animation play speed scale.
* @deprecated since v3.0, please use getSpeedScale instead.
* @returns {Number}
*/
getAnimationScale: function () {
return this.getSpeedScale();
},
/**
* Sets animation play speed scale.
* @param {Number} speedScale
*/
setSpeedScale: function (speedScale) {
if (speedScale === this._speedScale)
return;
this._speedScale = speedScale;
this._processScale = !this._movementData ? this._speedScale : this._speedScale * this._movementData.scale;
var dict = this._armature.getBoneDic();
for (var key in dict) {
var bone = dict[key];
bone.getTween().setProcessScale(this._processScale);
if (bone.getChildArmature())
bone.getChildArmature().getAnimation().setSpeedScale(this._processScale);
}
},
/**
* Returns animation play speed scale.
* @returns {Number}
*/
getSpeedScale: function () {
return this._speedScale;
},
/**
* play animation by animation name.
* @param {String} animationName The animation name you want to play
* @param {Number} [durationTo=-1]
* the frames between two animation changing-over.It's meaning is changing to this animation need how many frames
* -1 : use the value from CCMovementData get from flash design panel
* @param {Number} [loop=-1]
* Whether the animation is loop.
* loop < 0 : use the value from CCMovementData get from flash design panel
* loop = 0 : this animation is not loop
* loop > 0 : this animation is loop
* @example
* // example
* armature.getAnimation().play("run",-1,1);//loop play
* armature.getAnimation().play("run",-1,0);//not loop play
*/
play: function (animationName, durationTo, loop) {
cc.assert(this._animationData, "this.animationData can not be null");
this._movementData = this._animationData.getMovement(animationName);
cc.assert(this._movementData, "this._movementData can not be null");
durationTo = (durationTo === undefined) ? -1 : durationTo;
loop = (loop === undefined) ? -1 : loop;
//! Get key frame count
this._rawDuration = this._movementData.duration;
this._movementID = animationName;
this._processScale = this._speedScale * this._movementData.scale;
//! Further processing parameters
durationTo = (durationTo === -1) ? this._movementData.durationTo : durationTo;
var durationTween = this._movementData.durationTween === 0 ? this._rawDuration : this._movementData.durationTween;
var tweenEasing = this._movementData.tweenEasing;
//loop = (!loop || loop < 0) ? this._movementData.loop : loop;
loop = (loop < 0) ? this._movementData.loop : loop;
this._onMovementList = false;
ccs.ProcessBase.prototype.play.call(this, durationTo, durationTween, loop, tweenEasing);
if (this._rawDuration === 0)
this._loopType = ccs.ANIMATION_TYPE_SINGLE_FRAME;
else {
this._loopType = loop ? ccs.ANIMATION_TYPE_TO_LOOP_FRONT : ccs.ANIMATION_TYPE_NO_LOOP;
this._durationTween = durationTween;
}
this._tweenList.length = 0;
var movementBoneData, map = this._armature.getBoneDic();
for(var element in map) {
var bone = map[element];
movementBoneData = this._movementData.movBoneDataDic[bone.getName()];
var tween = bone.getTween();
if(movementBoneData && movementBoneData.frameList.length > 0) {
this._tweenList.push(tween);
movementBoneData.duration = this._movementData.duration;
tween.play(movementBoneData, durationTo, durationTween, loop, tweenEasing);
tween.setProcessScale(this._processScale);
if (bone.getChildArmature()) {
bone.getChildArmature().getAnimation().setSpeedScale(this._processScale);
if (!bone.getChildArmature().getAnimation().isPlaying())
bone.getChildArmature().getAnimation().playWithIndex(0);
}
} else {
if(!bone.isIgnoreMovementBoneData()){
//! this bone is not include in this movement, so hide it
bone.getDisplayManager().changeDisplayWithIndex(-1, false);
tween.stop();
}
}
}
this._armature.update(0);
},
/**
* Plays animation with index, the other param is the same to play.
* @param {Number} animationIndex
* @param {Number} durationTo
* @param {Number} durationTween
* @param {Number} loop
* @param {Number} [tweenEasing]
* @deprecated since v3.0, please use playWithIndex instead.
*/
playByIndex: function (animationIndex, durationTo, durationTween, loop, tweenEasing) {
cc.log("playByIndex is deprecated. Use playWithIndex instead.");
this.playWithIndex(animationIndex, durationTo, loop);
},
/**
* Plays animation with index, the other param is the same to play.
* @param {Number|Array} animationIndex
* @param {Number} durationTo
* @param {Number} loop
*/
playWithIndex: function (animationIndex, durationTo, loop) {
var movName = this._animationData.movementNames;
cc.assert((animationIndex > -1) && (animationIndex < movName.length));
var animationName = movName[animationIndex];
this.play(animationName, durationTo, loop);
},
/**
* Plays animation with names
* @param {Array} movementNames
* @param {Number} durationTo
* @param {Boolean} loop
*/
playWithNames: function (movementNames, durationTo, loop) {
durationTo = (durationTo === undefined) ? -1 : durationTo;
loop = (loop === undefined) ? true : loop;
this._movementListLoop = loop;
this._movementListDurationTo = durationTo;
this._onMovementList = true;
this._movementIndex = 0;
if(movementNames instanceof Array)
this._movementList = movementNames;
else
this._movementList.length = 0;
this.updateMovementList();
},
/**
* Plays animation by indexes
* @param {Array} movementIndexes
* @param {Number} durationTo
* @param {Boolean} loop
*/
playWithIndexes: function (movementIndexes, durationTo, loop) {
durationTo = (durationTo === undefined) ? -1 : durationTo;
loop = (loop === undefined) ? true : loop;
this._movementList.length = 0;
this._movementListLoop = loop;
this._movementListDurationTo = durationTo;
this._onMovementList = true;
this._movementIndex = 0;
var movName = this._animationData.movementNames;
for (var i = 0; i < movementIndexes.length; i++) {
var name = movName[movementIndexes[i]];
this._movementList.push(name);
}
this.updateMovementList();
},
/**
* <p>
* Goes to specified frame and plays current movement. <br/>
* You need first switch to the movement you want to play, then call this function. <br/>
* <br/>
* example : playByIndex(0); <br/>
* gotoAndPlay(0); <br/>
* playByIndex(1); <br/>
* gotoAndPlay(0); <br/>
* gotoAndPlay(15); <br/>
* </p>
* @param {Number} frameIndex
*/
gotoAndPlay: function (frameIndex) {
if (!this._movementData || frameIndex < 0 || frameIndex >= this._movementData.duration) {
cc.log("Please ensure you have played a movement, and the frameIndex is in the range.");
return;
}
var ignoreFrameEvent = this._ignoreFrameEvent;
this._ignoreFrameEvent = true;
this._isPlaying = true;
this._isComplete = this._isPause = false;
ccs.ProcessBase.prototype.gotoFrame.call(this, frameIndex);
this._currentPercent = this._curFrameIndex / (this._movementData.duration - 1);
this._currentFrame = this._nextFrameIndex * this._currentPercent;
var locTweenList = this._tweenList;
for (var i = 0; i < locTweenList.length; i++)
locTweenList[i].gotoAndPlay(frameIndex);
this._armature.update(0);
this._ignoreFrameEvent = ignoreFrameEvent;
},
/**
* Goes to specified frame and pauses current movement.
* @param {Number} frameIndex
*/
gotoAndPause: function (frameIndex) {
this.gotoAndPlay(frameIndex);
this.pause();
},
/**
* Returns the length of armature's movements
* @return {Number}
*/
getMovementCount: function () {
return this._animationData.getMovementCount();
},
/**
* Updates the state of ccs.Tween list, calls frame event's callback and calls movement event's callback.
* @param {Number} dt
*/
update: function (dt) {
ccs.ProcessBase.prototype.update.call(this, dt);
var locTweenList = this._tweenList;
for (var i = 0; i < locTweenList.length; i++)
locTweenList[i].update(dt);
var frameEvents = this._frameEventQueue, event;
while (frameEvents.length > 0) {
event = frameEvents.shift();
this._ignoreFrameEvent = true;
if(this._frameEventCallFunc)
this._frameEventCallFunc.call(this._frameEventTarget, event.bone, event.frameEventName, event.originFrameIndex, event.currentFrameIndex);
if(this._frameEventListener)
this._frameEventListener(event.bone, event.frameEventName, event.originFrameIndex, event.currentFrameIndex);
this._ignoreFrameEvent = false;
}
var movementEvents = this._movementEventQueue;
while (movementEvents.length > 0) {
event = movementEvents.shift();
if(this._movementEventCallFunc)
this._movementEventCallFunc.call(this._movementEventTarget, event.armature, event.movementType, event.movementID);
if (this._movementEventListener)
this._movementEventListener(event.armature, event.movementType, event.movementID);
}
},
/**
* Updates will call this handler, you can handle your logic here
*/
updateHandler: function () { //TODO set it to protected in v3.1
var locCurrentPercent = this._currentPercent;
if (locCurrentPercent >= 1) {
switch (this._loopType) {
case ccs.ANIMATION_TYPE_NO_LOOP:
this._loopType = ccs.ANIMATION_TYPE_MAX;
this._currentFrame = (locCurrentPercent - 1) * this._nextFrameIndex;
locCurrentPercent = this._currentFrame / this._durationTween;
if (locCurrentPercent < 1.0) {
this._nextFrameIndex = this._durationTween;
this.movementEvent(this._armature, ccs.MovementEventType.start, this._movementID);
break;
}
break;
case ccs.ANIMATION_TYPE_MAX:
case ccs.ANIMATION_TYPE_SINGLE_FRAME:
locCurrentPercent = 1;
this._isComplete = true;
this._isPlaying = false;
this.movementEvent(this._armature, ccs.MovementEventType.complete, this._movementID);
this.updateMovementList();
break;
case ccs.ANIMATION_TYPE_TO_LOOP_FRONT:
this._loopType = ccs.ANIMATION_TYPE_LOOP_FRONT;
locCurrentPercent = ccs.fmodf(locCurrentPercent, 1);
this._currentFrame = this._nextFrameIndex === 0 ? 0 : ccs.fmodf(this._currentFrame, this._nextFrameIndex);
this._nextFrameIndex = this._durationTween > 0 ? this._durationTween : 1;
this.movementEvent(this, ccs.MovementEventType.start, this._movementID);
break;
default:
//locCurrentPercent = ccs.fmodf(locCurrentPercent, 1);
this._currentFrame = ccs.fmodf(this._currentFrame, this._nextFrameIndex);
this._toIndex = 0;
this.movementEvent(this._armature, ccs.MovementEventType.loopComplete, this._movementID);
break;
}
this._currentPercent = locCurrentPercent;
}
},
/**
* Returns the Id of current movement
* @returns {String}
*/
getCurrentMovementID: function () {
if (this._isComplete)
return "";
return this._movementID;
},
/**
* Sets movement event callback to animation.
* @param {function} callFunc
* @param {Object} target
*/
setMovementEventCallFunc: function (callFunc, target) {
if(arguments.length === 1){
this._movementEventListener = callFunc;
}else if(arguments.length === 2){
this._movementEventTarget = target;
this._movementEventCallFunc = callFunc;
}
},
/**
* Sets frame event callback to animation.
* @param {function} callFunc
* @param {Object} target
*/
setFrameEventCallFunc: function (callFunc, target) {
if(arguments.length === 1){
this._frameEventListener = callFunc;
}else if(arguments.length === 2){
this._frameEventTarget = target;
this._frameEventCallFunc = callFunc;
}
},
/**
* Sets user object to animation.
* @param {Object} userObject
*/
setUserObject: function (userObject) {
this._userObject = userObject;
},
/**
* Emits a frame event
* @param {ccs.Bone} bone
* @param {String} frameEventName
* @param {Number} originFrameIndex
* @param {Number} currentFrameIndex
*/
frameEvent: function (bone, frameEventName, originFrameIndex, currentFrameIndex) {
if ((this._frameEventTarget && this._frameEventCallFunc) || this._frameEventListener) {
var frameEvent = new ccs.FrameEvent();
frameEvent.bone = bone;
frameEvent.frameEventName = frameEventName;
frameEvent.originFrameIndex = originFrameIndex;
frameEvent.currentFrameIndex = currentFrameIndex;
this._frameEventQueue.push(frameEvent);
}
},
/**
* Emits a movement event
* @param {ccs.Armature} armature
* @param {Number} movementType
* @param {String} movementID
*/
movementEvent: function (armature, movementType, movementID) {
if ((this._movementEventTarget && this._movementEventCallFunc) || this._movementEventListener) {
var event = new ccs.MovementEvent();
event.armature = armature;
event.movementType = movementType;
event.movementID = movementID;
this._movementEventQueue.push(event);
}
},
/**
* Updates movement list.
*/
updateMovementList: function () {
if (this._onMovementList) {
var movementObj, locMovementList = this._movementList;
if (this._movementListLoop) {
movementObj = locMovementList[this._movementIndex];
this.play(movementObj, movementObj.durationTo, 0);
this._movementIndex++;
if (this._movementIndex >= locMovementList.length)
this._movementIndex = 0;
} else {
if (this._movementIndex < locMovementList.length) {
movementObj = locMovementList[this._movementIndex];
this.play(movementObj, movementObj.durationTo, 0);
this._movementIndex++;
} else
this._onMovementList = false;
}
this._onMovementList = true;
}
},
/**
* Sets animation data to animation.
* @param {ccs.AnimationData} data
*/
setAnimationData: function (data) {
if(this._animationData !== data)
this._animationData = data;
},
/**
* Returns animation data of animation.
* @return {ccs.AnimationData}
*/
getAnimationData: function () {
return this._animationData;
},
/**
* Returns the user object of animation.
* @return {Object}
*/
getUserObject: function () {
return this._userObject;
},
/**
* Determines if the frame event is ignored
* @returns {boolean}
*/
isIgnoreFrameEvent: function () {
return this._ignoreFrameEvent;
}
});
var _p = ccs.ArmatureAnimation.prototype;
// Extended properties
/** @expose */
_p.speedScale;
cc.defineGetterSetter(_p, "speedScale", _p.getSpeedScale, _p.setSpeedScale);
/** @expose */
_p.animationScale;
cc.defineGetterSetter(_p, "animationScale", _p.getAnimationScale, _p.setAnimationScale);
_p = null;
/**
* Allocates and initializes a ArmatureAnimation.
* @return {ccs.ArmatureAnimation}
* @deprecated since v3.1, please use new construction instead
*/
ccs.ArmatureAnimation.create = function (armature) {
return new ccs.ArmatureAnimation(armature);
};
|
module.exports = {
/**************************CREATE CLAIM************************ */
'SelectedClaimState': {
'YesIntent': function () {
let SelectedClaimState = this.t('DoYouNeedAnAmbulance');
this.followUpState('NeedAmbulance').ask(SelectedClaimState);
},
'NoIntent': function () {
let SelectedClaimState = this.speechBuilder()
.addBreak('400ms').addT('GladYouAreOk')
.addBreak('400ms').addT('MrShiv')
.addBreak('400ms').addT('pinPointingYourCar')
.addBreak('400ms').addT('InformedPoliceAndSendingDrone')
.addBreak('400ms').addT('PhotosSentHelpInvestigation')
.addAudio("https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Drone2.mp3")
.addBreak('400ms').addT('DroneArrived')
.addBreak('400ms').addT('droneTakePictures')
.addBreak('400ms').addT('canWeTakePictures')
this.followUpState('DroneArrived')
.showImageCard(this.t('cardTitle'), this.t('pinPointingYourCar'), 'https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/3.+giphy-4.gif')
.ask(SelectedClaimState, this.t('boolReprompt'));
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
this.followUpState('SelectedClaimState')
.ask(this.t('boolPrompt'), this.t('boolReprompt'));
},
},
'NeedAmbulance': {
'YesIntent': function () {
let NeedAmbulance = this.speechBuilder()
.addBreak('400ms').addT('BookedAmbulance')
.addBreak('400ms').addT('AmbulanceCarOntheWaay')
this.tell(NeedAmbulance, this.t('boolReprompt'));
},
'NoIntent': function () {
this.toStateIntent('SelectedClaimState', 'NoIntent');
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
this.followUpState('NeedAmbulance')
.ask(this.t('boolPrompt'), this.t('boolReprompt'));
},
},
'DroneArrived': {
'YesIntent': function () {
let droneArrived = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/CameraClick1.mp3')
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/CameraClick1.mp3')
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/CameraClick1.mp3')
//.addBreak('400ms')
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Process1.mp3')
//.addBreak('400ms')
.addBreak('400ms').addT('recievedPhotosFromDrone')
.addBreak('400ms').addT('assessedSituation')
.addBreak('400ms').addT('canWeBookTowTruck')
this.followUpState('CallForTowTruck')
.showImageCard(this.t('cardTitle'), this.t('BookingTowTruck'), 'https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/4.+Car-Tow-Truck-56137.gif')
.ask(droneArrived, this.t('boolReprompt'));
},
'NoIntent': function () {
let droneArrived = this.speechBuilder()
.addBreak('400ms').addT('ok')
.addBreak('400ms').addT('SendingPerson')
.addBreak('400ms').addT('takeFewMinutes')
this.tell(droneArrived, this.t('boolReprompt'));
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
this.followUpState('DroneArrived')
.ask(this.t('boolPrompt'), this.t('boolReprompt'));
},
},
'CallForTowTruck': {
'YesIntent': function () {
let CallForTowTruck = this.speechBuilder()
.addBreak('400ms').addT('bookingaTowTruck')
.addBreak('400ms').addT('bookingUber')
.addBreak('400ms').addT('dontWorryAbouttheCar')
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/UberCar1.mp3')
.addBreak('400ms').addT('yourTowTruckArrived')
.addBreak('400ms').addT('dontForgetCarKeys')
.addBreak('400ms').addT('canWeProceed')
this.followUpState('UberArrived')
.showImageCard(this.t('cardTitle'), this.t('yourTowTruckArrived'), 'https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/5.1+tow-truck-logo-26.gif')
.ask(CallForTowTruck, this.t('boolReprompt'));
},
'NoIntent': function () {
let CallForTowTruck = this.speechBuilder()
.addBreak('400ms').addT('ok')
.addBreak('400ms').addT('SendingPerson')
.addBreak('400ms').addT('takeFewMinutes')
.addBreak('400ms').addT('bookingUber')
.addBreak('400ms').addT('canWeProceed')
this.followUpState('UberArrived').tell(CallForTowTruck, this.t('boolReprompt'));
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
this.followUpState('CallForTowTruck')
.ask(this.t('boolPrompt'), this.t('boolReprompt'));
},
},
'UberArrived': {
'YesIntent': function () {
let UberArrived = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/UberCar1.mp3')
.addBreak('400ms').addT('uberHasArrived')
.addBreak('400ms').addT('PleaseProceed')
//.addBreak('3000ms')
.addBreak('400ms').addT('didYouGetIntoTheCar')
this
.showImageCard(this.t('cardTitle'), this.t('uberHasArrived'), 'https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/6.+download.jpg')
.followUpState('ClaimThanks').ask(UberArrived, this.t('boolReprompt'));
},
'NoIntent': function () {
this.toStateIntent('ClaimThanks', 'YesIntent');
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
this.followUpState('UberArrived')
.ask(this.t('boolPrompt'), this.t('boolReprompt'));
},
},
'ClaimThanks': {
'YesIntent': function () {
let ClaimThanks = this.speechBuilder()
.addBreak('400ms').addT('sentClaimDetailstoEmail')
.addBreak('400ms').addT('keepPostedOnCar')
.addBreak('400ms').addT('keepPostedOnClaim')
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
this
.showImageCard(this.t('cardTitle'), this.t('BigThankYou'), 'https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/7+download.jpg')
.tell(ClaimThanks, this.t('boolReprompt'));
},
'NoIntent': function () {
let ClaimThanks = this.speechBuilder()
.addBreak('400ms').addT('ok')
.addBreak('400ms').addT('getIntoTheCar')
.addBreak('400ms').addT('didYouGetIntoTheCar')
this.followUpState('ClaimThanks').tell(ClaimThanks, this.t('boolReprompt'));
},
'RepeatIntent': function () {
this.repeat();
},
'CancelIntent': function () {
let speech = this.speechBuilder()
.addAudio('https://insurance-buddy-2.s3.ap-south-1.amazonaws.com/Intro.mp3')
//.addT(str)
.addBreak('400ms').addT('ThankYouFromInsuranceBuddy')
this.tell(speech, speech);
},
'Unhandled': function () {
if (glbPolicyNo == null) {
this
.ask(this.speechBuilder().addT('errorMsg').addT('welcomeMsg2'), this.speechBuilder().addT('errorMsg').addT('welcomeMsg2'));
} else if (glbPolicyNo != null) {
this
.ask(this.speechBuilder().addT('errorMsg').addT('PolicyWelcomeP2'), this.speechBuilder().addT('errorMsg').addT('PolicyWelcomeP2'));
} else {
this
.ask(this.speechBuilder().addT('errorMsg').addT('PolicyWelcomeP2'), this.speechBuilder().addT('errorMsg').addT('PolicyWelcomeP2'));
}
},
},
};
|
#!/usr/bin/env python
import sys
from infopage import Infopage
if len(sys.argv) > 1 and '-h' in sys.argv[1:]:
print "Usage: schema.py [-d]"
print "-d Drops all tables before recreating them"
sys.exit(1)
dropping = len(sys.argv) > 1 and '-d' in sys.argv[1:]
ip = Infopage()
ip.loadconfig()
with ip:
if dropping:
ip.dropall()
ip.createschema()
if dropping:
ip.insertdefault()
|
const broadcastMerge = (parentVal, childVal, vm) => {
if (!childVal) {
return parentVal
}
if (!parentVal) {
return childVal
}
const result = parentVal
Object.keys(childVal).forEach((eventName) => {
if (!result[eventName]) {
result[eventName] = childVal[eventName]
} else {
const unflatedHandlers = [result[eventName], childVal[eventName]]
result[eventName] = unflatedHandlers.flat()
}
})
return result
}
export default broadcastMerge
|
import { z } from 'zorium'
import * as _ from 'lodash-es'
import FormatService from 'frontend-shared/services/format'
if (typeof window !== 'undefined') { require('./index.styl') }
export default function $blocksOverview ({ timeScale, block }) {
return z('.z-block-overview', [
z('.metrics', _.map(block.metrics.nodes, (metric) => {
const allDimension = _.find(metric.dimensions?.nodes, { slug: 'all' })
const count = allDimension?.datapoints?.nodes[0]?.count || 0
return [
// server sums all into 1 datapoint for 'overview' type blocks
z('.metric-value', FormatService.unit(count, metric.unit)),
z('.metric-name', metric.name)
]
}))
])
}
|
# -*- coding: ascii -*-
#
# Copyright 2007, 2008, 2009, 2010, 2011
# Andr\xe9 Malo or his licensors, as applicable
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
==================
Simple make base
==================
Simple make base.
"""
__author__ = "Andr\xe9 Malo"
__docformat__ = "restructuredtext en"
import sys as _sys
from _setup import term as _term
class Failure(SystemExit):
""" Failure exception """
def fail(reason):
""" Fail for a reason """
raise Failure(reason)
def warn(message, name=None):
""" Warn """
_term.red("%(NAME)sWarning: %(msg)s",
NAME=name and "%s:" % name or '', msg=message
)
def fatal(reason):
""" Fatal error, immediate stop """
print(reason, file=_sys.stderr)
_sys.exit(1)
class Target(object):
""" Target base class """
NAME = None
DEPS = None
HIDDEN = False
ERROR = None
def __init__(self, runner):
""" Base __init__ """
self.runner = runner
self.init()
def init(self):
""" Default init hook """
pass
def run(self):
""" Default run hook """
pass
def clean(self, scm=True, dist=False):
""" Default clean hook """
pass
class _Runner(object):
""" Runner """
def __init__(self, *targetscollection):
""" Initialization """
tdict = {}
if not targetscollection:
import __main__
targetscollection = [__main__]
from _setup.make import default_targets
if default_targets not in targetscollection:
targetscollection.append(default_targets)
for targets in targetscollection:
for value in list(vars(targets).values()):
if isinstance(value, type) and issubclass(value, Target) and \
value.NAME is not None:
if value.NAME in tdict:
if issubclass(value, tdict[value.NAME]):
pass # override base target
elif issubclass(tdict[value.NAME], value):
continue # found base later. ignore
else:
warn('Ambiguous target name', value.NAME)
continue
tdict[value.NAME] = value
self._tdict = tdict
self._itdict = {}
def print_help(self):
""" Print make help """
import textwrap as _textwrap
targets = self.targetinfo()
keys = []
for key, info in list(targets.items()):
if not info['hide']:
keys.append(key)
keys.sort()
length = max(list(map(len, keys)))
info = []
for key in keys:
info.append("%s%s" % (
(key + " " * length)[:length + 2],
_textwrap.fill(
targets[key]['desc'].strip(),
subsequent_indent=" " * (length + 2)
),
))
print("Available targets:\n\n" + "\n".join(info))
def targetinfo(self):
""" Extract target information """
result = {}
for name, cls in list(self._tdict.items()):
result[name] = {
'desc': cls.__doc__ or "no description",
'hide': cls.HIDDEN,
'deps': cls.DEPS or (),
}
return result
def _topleveltargets(self):
""" Find all top level targets """
rev = {} # key is a dep of [values]
all_ = self.targetinfo()
for target, info in list(all_.items()):
for dep in info['deps']:
if dep not in all_:
fatal("Unknown target '%s' (dep of %s) -> exit" % (
dep, target
))
rev.setdefault(dep, []).append(target)
return [target for target, info in list(rev.items()) if not info]
def _run(self, target, seen=None):
""" Run a target """
if target.DEPS:
self(*target.DEPS, **{'seen': seen})
if not target.HIDDEN:
_term.yellow(">>> %(name)s", name=target.NAME)
try:
result = target.run()
except KeyboardInterrupt:
result, target.ERROR = False, "^C -> exit"
except Failure as e:
result, target.ERROR = False, "%s: %s" % (target.NAME, e)
except (SystemExit, MemoryError):
raise
except:
import traceback
target.ERROR = "%s errored:\n%s" % (target.NAME, ''.join(
traceback.format_exception(*_sys.exc_info())
))
result = False
else:
if result is None:
result = True
return result
def _clean(self, target, scm, dist, seen=None):
""" Run a target """
if target.DEPS:
self.run_clean(
*target.DEPS, **{'scm': scm, 'dist': dist, 'seen': seen}
)
try:
result = target.clean(scm, dist)
except KeyboardInterrupt:
result, target.ERROR = False, "^C -> exit"
except Failure as e:
result, target.ERROR = False, "%s: %s" % (target.NAME, e)
except (SystemExit, MemoryError):
raise
except:
import traceback
target.ERROR = "%s errored:\n%s" % (target.NAME, ''.join(
traceback.format_exception(*_sys.exc_info())
))
result = False
else:
if result is None:
result = True
return result
def _make_init(self, seen):
""" Make init mapper """
def init(target):
""" Return initialized target """
if target not in seen:
try:
seen[target] = self._tdict[target](self)
except KeyError:
fatal("Unknown target '%s' -> exit" % target)
else:
seen[target] = None
return seen[target]
return init
def run_clean(self, *targets, **kwargs):
""" Run targets """
def pop(name, default=None):
""" Pop """
if name in kwargs:
value = kwargs[name]
del kwargs[name]
if value is None:
return default
return value
else:
return default
seen = pop('seen', {})
scm = pop('scm', True)
dist = pop('dist', False)
if kwargs:
raise TypeError('Unknown keyword parameters')
if not targets:
top_targets = self._topleveltargets()
targets = self.targetinfo()
for item in top_targets:
del targets[item]
targets = list(targets.keys())
targets.sort()
top_targets.sort()
targets = top_targets + targets
init = self._make_init(seen)
for name in targets:
target = init(name)
if target is not None:
if not self._clean(target, scm=scm, dist=dist, seen=seen):
msg = target.ERROR
if msg is None:
msg = "Clean target %s returned error -> exit" % name
fatal(msg)
def __call__(self, *targets, **kwargs):
""" Run targets """
if 'seen' in kwargs:
seen = kwargs['seen']
del kwargs['seen']
else:
seen = None
if seen is None:
seen = self._itdict
if kwargs:
raise TypeError('Unknown keyword parameters')
init = self._make_init(seen)
for name in targets:
target = init(name)
if target is not None:
if not self._run(target, seen):
msg = target.ERROR
if msg is None:
msg = "Target %s returned error -> exit" % name
fatal(msg)
def main(*args, **kwargs):
"""
main(argv=None, *args, name=None)
Main start point. This function parses the command line and executes the
targets given through `argv`. If there are no targets given, a help output
is generated.
:Parameters:
`argv` : sequence
Command line arguments. If omitted or ``None``, they are picked from
``sys.argv``.
`args` : ``tuple``
The list of modules with targets. If omitted, ``__main__``
is imported and treated as target module. Additionally the mechanism
always adds the `_setup.make` module (this one) to the list in order
to grab some default targets.
`name` : ``str``
Name of the executing module. If omitted or ``None``, ``'__main__'``
is assumed. If the final name is not ``'__main__'``, the function
returns immediately.
"""
try:
name = kwargs['name']
except KeyError:
name = '__main__'
else:
del kwargs['name']
if name is None:
name = '__main__'
try:
argv = kwargs['argv']
except KeyError:
if not args:
args = (None,)
else:
del kwargs['argv']
args = (argv,) + args
if kwargs:
raise TypeError("Unrecognized keyword arguments for main()")
if name == '__main__':
argv, args = args[0], args[1:]
if argv is None:
argv = _sys.argv[1:]
runner = _Runner(*args)
if argv:
runner(*argv)
else:
runner.print_help()
|
import operator
from typing import TYPE_CHECKING, Type, Union
import numpy as np
from pandas._libs import lib, missing as libmissing
from pandas.core.dtypes.base import ExtensionDtype, register_extension_dtype
from pandas.core.dtypes.common import pandas_dtype
from pandas.core.dtypes.inference import is_array_like
from pandas import compat
from pandas.core import ops
from pandas.core.arrays import IntegerArray, PandasArray
from pandas.core.arrays.integer import _IntegerDtype
from pandas.core.construction import extract_array
from pandas.core.indexers import check_array_indexer
from pandas.core.missing import isna
if TYPE_CHECKING:
import pyarrow # noqa: F401
@register_extension_dtype
class StringDtype(ExtensionDtype):
"""
Extension dtype for string data.
.. versionadded:: 1.0.0
.. warning::
StringDtype is considered experimental. The implementation and
parts of the API may change without warning.
In particular, StringDtype.na_value may change to no longer be
``numpy.nan``.
Attributes
----------
None
Methods
-------
None
Examples
--------
>>> pd.StringDtype()
StringDtype
"""
name = "string"
#: StringDtype.na_value uses pandas.NA
na_value = libmissing.NA
@property
def type(self) -> Type[str]:
return str
@classmethod
def construct_array_type(cls) -> Type["StringArray"]:
"""
Return the array type associated with this dtype.
Returns
-------
type
"""
return StringArray
def __repr__(self) -> str:
return "StringDtype"
def __from_arrow__(
self, array: Union["pyarrow.Array", "pyarrow.ChunkedArray"]
) -> "StringArray":
"""
Construct StringArray from pyarrow Array/ChunkedArray.
"""
import pyarrow # noqa: F811
if isinstance(array, pyarrow.Array):
chunks = [array]
else:
# pyarrow.ChunkedArray
chunks = array.chunks
results = []
for arr in chunks:
# using _from_sequence to ensure None is converted to NA
str_arr = StringArray._from_sequence(np.array(arr))
results.append(str_arr)
return StringArray._concat_same_type(results)
class StringArray(PandasArray):
"""
Extension array for string data.
.. versionadded:: 1.0.0
.. warning::
StringArray is considered experimental. The implementation and
parts of the API may change without warning.
Parameters
----------
values : array-like
The array of data.
.. warning::
Currently, this expects an object-dtype ndarray
where the elements are Python strings or :attr:`pandas.NA`.
This may change without warning in the future. Use
:meth:`pandas.array` with ``dtype="string"`` for a stable way of
creating a `StringArray` from any sequence.
copy : bool, default False
Whether to copy the array of data.
Attributes
----------
None
Methods
-------
None
See Also
--------
array
The recommended function for creating a StringArray.
Series.str
The string methods are available on Series backed by
a StringArray.
Notes
-----
StringArray returns a BooleanArray for comparison methods.
Examples
--------
>>> pd.array(['This is', 'some text', None, 'data.'], dtype="string")
<StringArray>
['This is', 'some text', <NA>, 'data.']
Length: 4, dtype: string
Unlike arrays instantiated with ``dtype="object"``, ``StringArray``
will convert the values to strings.
>>> pd.array(['1', 1], dtype="object")
<PandasArray>
['1', 1]
Length: 2, dtype: object
>>> pd.array(['1', 1], dtype="string")
<StringArray>
['1', '1']
Length: 2, dtype: string
However, instantiating StringArrays directly with non-strings will raise an error.
For comparison methods, `StringArray` returns a :class:`pandas.BooleanArray`:
>>> pd.array(["a", None, "c"], dtype="string") == "a"
<BooleanArray>
[True, <NA>, False]
Length: 3, dtype: boolean
"""
# undo the PandasArray hack
_typ = "extension"
def __init__(self, values, copy=False):
values = extract_array(values)
skip_validation = isinstance(values, type(self))
super().__init__(values, copy=copy)
self._dtype = StringDtype()
if not skip_validation:
self._validate()
def _validate(self):
"""Validate that we only store NA or strings."""
if len(self._ndarray) and not lib.is_string_array(self._ndarray, skipna=True):
raise ValueError("StringArray requires a sequence of strings or pandas.NA")
if self._ndarray.dtype != "object":
raise ValueError(
"StringArray requires a sequence of strings or pandas.NA. Got "
f"'{self._ndarray.dtype}' dtype instead."
)
@classmethod
def _from_sequence(cls, scalars, dtype=None, copy=False):
if dtype:
assert dtype == "string"
result = np.asarray(scalars, dtype="object")
if copy and result is scalars:
result = result.copy()
# Standardize all missing-like values to NA
# TODO: it would be nice to do this in _validate / lib.is_string_array
# We are already doing a scan over the values there.
na_values = isna(result)
has_nans = na_values.any()
if has_nans and result is scalars:
# force a copy now, if we haven't already
result = result.copy()
# convert to str, then to object to avoid dtype like '<U3', then insert na_value
result = np.asarray(result, dtype=str)
result = np.asarray(result, dtype="object")
if has_nans:
result[na_values] = StringDtype.na_value
return cls(result)
@classmethod
def _from_sequence_of_strings(cls, strings, dtype=None, copy=False):
return cls._from_sequence(strings, dtype=dtype, copy=copy)
def __arrow_array__(self, type=None):
"""
Convert myself into a pyarrow Array.
"""
import pyarrow as pa
if type is None:
type = pa.string()
values = self._ndarray.copy()
values[self.isna()] = None
return pa.array(values, type=type, from_pandas=True)
def _values_for_factorize(self):
arr = self._ndarray.copy()
mask = self.isna()
arr[mask] = -1
return arr, -1
def __setitem__(self, key, value):
value = extract_array(value, extract_numpy=True)
if isinstance(value, type(self)):
# extract_array doesn't extract PandasArray subclasses
value = value._ndarray
key = check_array_indexer(self, key)
scalar_key = lib.is_scalar(key)
scalar_value = lib.is_scalar(value)
if scalar_key and not scalar_value:
raise ValueError("setting an array element with a sequence.")
# validate new items
if scalar_value:
if isna(value):
value = StringDtype.na_value
elif not isinstance(value, str):
raise ValueError(
f"Cannot set non-string value '{value}' into a StringArray."
)
else:
if not is_array_like(value):
value = np.asarray(value, dtype=object)
if len(value) and not lib.is_string_array(value, skipna=True):
raise ValueError("Must provide strings.")
super().__setitem__(key, value)
def fillna(self, value=None, method=None, limit=None):
# TODO: validate dtype
return super().fillna(value, method, limit)
def astype(self, dtype, copy=True):
dtype = pandas_dtype(dtype)
if isinstance(dtype, StringDtype):
if copy:
return self.copy()
return self
elif isinstance(dtype, _IntegerDtype):
arr = self._ndarray.copy()
mask = self.isna()
arr[mask] = 0
values = arr.astype(dtype.numpy_dtype)
return IntegerArray(values, mask, copy=False)
return super().astype(dtype, copy)
def _reduce(self, name: str, skipna: bool = True, **kwargs):
if name in ["min", "max"]:
return getattr(self, name)(skipna=skipna)
raise TypeError(f"Cannot perform reduction '{name}' with string dtype")
def value_counts(self, dropna=False):
from pandas import value_counts
return value_counts(self._ndarray, dropna=dropna).astype("Int64")
def memory_usage(self, deep=False):
result = self._ndarray.nbytes
if deep:
return result + lib.memory_usage_of_objects(self._ndarray)
return result
# Override parent because we have different return types.
@classmethod
def _create_arithmetic_method(cls, op):
# Note: this handles both arithmetic and comparison methods.
@ops.unpack_zerodim_and_defer(op.__name__)
def method(self, other):
from pandas.arrays import BooleanArray
assert op.__name__ in ops.ARITHMETIC_BINOPS | ops.COMPARISON_BINOPS
if isinstance(other, cls):
other = other._ndarray
mask = isna(self) | isna(other)
valid = ~mask
if not lib.is_scalar(other):
if len(other) != len(self):
# prevent improper broadcasting when other is 2D
raise ValueError(
f"Lengths of operands do not match: {len(self)} != {len(other)}"
)
other = np.asarray(other)
other = other[valid]
if op.__name__ in ops.ARITHMETIC_BINOPS:
result = np.empty_like(self._ndarray, dtype="object")
result[mask] = StringDtype.na_value
result[valid] = op(self._ndarray[valid], other)
return StringArray(result)
else:
# logical
result = np.zeros(len(self._ndarray), dtype="bool")
result[valid] = op(self._ndarray[valid], other)
return BooleanArray(result, mask)
return compat.set_function_name(method, f"__{op.__name__}__", cls)
@classmethod
def _add_arithmetic_ops(cls):
cls.__add__ = cls._create_arithmetic_method(operator.add)
cls.__radd__ = cls._create_arithmetic_method(ops.radd)
cls.__mul__ = cls._create_arithmetic_method(operator.mul)
cls.__rmul__ = cls._create_arithmetic_method(ops.rmul)
_create_comparison_method = _create_arithmetic_method
StringArray._add_arithmetic_ops()
StringArray._add_comparison_ops()
|
/*
id-splitter.c - Instruction-Data transaction splitter.
Copyright 2002-2007 Virtutech AB
The contents herein are Source Code which are a subset of Licensed
Software pursuant to the terms of the Virtutech Simics Software
License Agreement (the "Agreement"), and are being distributed under
the Agreement. You should have received a copy of the Agreement with
this Licensed Software; if not, please contact Virtutech for a copy
of the Agreement prior to using this Licensed Software.
By using this Source Code, you agree to be bound by all of the terms
of the Agreement, and use of this Source Code is subject to the terms
the Agreement.
This Source Code and any derivatives thereof are provided on an "as
is" basis. Virtutech makes no warranties with respect to the Source
Code or any derivatives thereof and disclaims all implied warranties,
including, without limitation, warranties of merchantability and
fitness for a particular purpose and non-infringement.
*/
/*
<add id="simics sparc module short">
<name index="true">id splitter</name>
The id splitter module splits up memory operations into separate
data and instruction streams. Data operations are forwarded to
the timing interface of the object specified by the dbranch attribute and,
in the same manner, instruction operations are forwarded to the ibranch.
</add>
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <simics/api.h>
#include <simics/alloc.h>
#include <simics/utils.h>
static conf_class_t *id_splitter_class;
static class_data_t id_splitter_data;
static timing_model_interface_t timing_interface;
typedef struct {
conf_object_t obj;
conf_object_t *ibranch;
conf_object_t *dbranch;
timing_model_interface_t dbranch_if, ibranch_if;
} id_splitter_t;
static cycles_t
id_splitter_operate(conf_object_t *mem_hier, conf_object_t *space,
map_list_t *map, generic_transaction_t *mem_trans)
{
id_splitter_t *ids = (id_splitter_t *)mem_hier;
if (SIM_mem_op_is_data(mem_trans)) {
if (ids->dbranch)
return ids->dbranch_if.operate(ids->dbranch, space, map, mem_trans);
} else {
if (ids->ibranch)
return ids->ibranch_if.operate(ids->ibranch, space, map, mem_trans);
}
return 0;
}
static set_error_t
set_dbranch(void *dont_care, conf_object_t *obj, attr_value_t *val, attr_value_t *idx)
{
id_splitter_t *ids = (id_splitter_t *)obj;
timing_model_interface_t *timing_interface;
if (val->kind != Sim_Val_Object)
return Sim_Set_Need_Object;
ids->dbranch = val->u.object;
timing_interface = SIM_get_interface(ids->dbranch, "timing-model");
if (!timing_interface) {
pr("object `%s' has no timing interface\n",
ids->dbranch->name);
return Sim_Set_Interface_Not_Found;
}
ids->dbranch_if = *timing_interface;
if (ids->dbranch_if.operate == NULL) {
pr("object `%s' doesn't export the operate function!\n",
ids->dbranch->name);
return Sim_Set_Interface_Not_Found;
}
return Sim_Set_Ok;
}
static attr_value_t
get_dbranch(void *dont_care, conf_object_t *obj, attr_value_t *idx)
{
id_splitter_t *ids = (id_splitter_t *)obj;
attr_value_t ret;
ret.kind = Sim_Val_Object;
ret.u.object = ids->dbranch;
return ret;
}
static set_error_t
set_ibranch(void *dont_care, conf_object_t *obj, attr_value_t *val, attr_value_t *idx)
{
id_splitter_t *ids = (id_splitter_t *)obj;
timing_model_interface_t *timing_interface;
if (val->kind != Sim_Val_Object)
return Sim_Set_Need_Object;
ids->ibranch = val->u.object;
timing_interface = SIM_get_interface(ids->ibranch, "timing-model");
if (!timing_interface) {
pr("object `%s' has no timing interface\n",
ids->ibranch->name);
return Sim_Set_Interface_Not_Found;
}
ids->ibranch_if = *timing_interface;
if (ids->ibranch_if.operate == NULL) {
pr("object `%s' doesn't export the operate function!\n",
ids->ibranch->name);
return Sim_Set_Interface_Not_Found;
}
return Sim_Set_Ok;
}
static attr_value_t
get_ibranch(void *dont_care, conf_object_t *obj, attr_value_t *idx)
{
id_splitter_t *ids = (id_splitter_t *)obj;
attr_value_t ret;
ret.kind = Sim_Val_Object;
ret.u.object = ids->ibranch;
return ret;
}
static conf_object_t *
id_splitter_new_instance(parse_object_t *pa)
{
id_splitter_t *ids = MM_ZALLOC(1, id_splitter_t);
SIM_object_constructor(&ids->obj, pa);
return &ids->obj;
}
DLL_EXPORT void
init_local(void)
{
id_splitter_data.new_instance = id_splitter_new_instance;
id_splitter_data.description = "The id splitter module splits up memory operations into separate "
"data and instruction streams. Data operations are forwarded to "
"the timing interface of the object specified by the dbranch attribute and, "
"in the same manner, instruction operations are forwarded to the ibranch.";
if (!(id_splitter_class = SIM_register_class("id-splitter", &id_splitter_data))) {
pr("Could not create id-splitter class\n");
}
/* set up custom interfaces */
timing_interface.operate = id_splitter_operate;
SIM_register_interface(id_splitter_class, "timing-model", &timing_interface);
SIM_register_attribute(id_splitter_class, "ibranch",
get_ibranch, 0, set_ibranch, 0, Sim_Attr_Optional,
"Object to receive instruction transactions.");
SIM_register_attribute(id_splitter_class, "dbranch",
get_dbranch, 0, set_dbranch, 0, Sim_Attr_Optional,
"Object to receive data transactions.");
}
|
// Copyright (c) 2009 INRIA Sophia-Antipolis (France).
// All rights reserved.
//
// This file is part of CGAL (www.cgal.org).
// You can redistribute it and/or modify it under the terms of the GNU
// General Public License as published by the Free Software Foundation,
// either version 3 of the License, or (at your option) any later version.
//
// Licensees holding a valid commercial license may use this file in
// accordance with the commercial license agreement provided with the software.
//
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
//
// $URL$
// $Id$
//
//
// Author(s) : Stéphane Tayeb
//
//******************************************************************************
// File Description :
// class Labeled_mesh_domain_3. See class description.
//******************************************************************************
#ifndef CGAL_MESH_3_LABELED_MESH_DOMAIN_3_H
#define CGAL_MESH_3_LABELED_MESH_DOMAIN_3_H
#include <CGAL/license/Mesh_3.h>
#define CGAL_DEPRECATED_HEADER "<CGAL/Mesh_3/Labeled_mesh_domain_3.h>"
#define CGAL_REPLACEMENT_HEADER "<CGAL/Labeled_mesh_domain_3.h>"
#include <CGAL/internal/deprecation_warning.h>
#include <CGAL/Mesh_3/config.h>
#include <CGAL/Bbox_3.h>
#include <CGAL/point_generators_3.h>
#include <CGAL/Mesh_3/Creator_weighted_point_3.h>
#include <boost/variant.hpp>
#include <boost/format.hpp>
#include <boost/optional.hpp>
#include <CGAL/tuple.h>
#include <CGAL/Origin.h>
#include <CGAL/Random.h>
namespace CGAL {
namespace Mesh_3 {
/**
* \class Labeled_mesh_domain_3
*
* Function f must take his values into N.
* Let p be a Point.
* - f(p)=0 means that p is outside domain.
* - f(p)=a, a!=0 means that p is inside subdomain a.
*
* Any boundary facet is labelled <a,b>, a<b, where a and b are the
* tags of it's incident subdomain.
* Thus, a boundary facet of the domain is labelled <0,b>, where b!=0.
*/
template<class Function,
class BGT>
class Labeled_mesh_domain_3
{
public:
/// Geometric object types
typedef typename BGT::Point_3 Point_3;
typedef typename BGT::Segment_3 Segment_3;
typedef typename BGT::Ray_3 Ray_3;
typedef typename BGT::Line_3 Line_3;
typedef typename BGT::Vector_3 Vector_3;
typedef typename BGT::Sphere_3 Sphere_3;
typedef CGAL::Bbox_3 Bbox_3;
typedef typename BGT::Iso_cuboid_3 Iso_cuboid_3;
public:
// Kernel_traits compatibility
typedef BGT R;
//-------------------------------------------------------
// Index Types
//-------------------------------------------------------
/// Type of indexes for cells of the input complex
typedef typename Function::return_type Subdomain_index;
typedef boost::optional<Subdomain_index> Subdomain;
/// Type of indexes for surface patch of the input complex
typedef std::pair<Subdomain_index, Subdomain_index> Surface_patch_index;
typedef boost::optional<Surface_patch_index> Surface_patch;
/// Type of indexes to characterize the lowest dimensional face of the input
/// complex on which a vertex lie
typedef boost::variant<Subdomain_index, Surface_patch_index> Index;
typedef CGAL::cpp11::tuple<Point_3,Index,int> Intersection;
typedef typename BGT::FT FT;
typedef BGT Geom_traits;
/**
* @brief Constructor
*/
Labeled_mesh_domain_3(const Function& f,
const Sphere_3& bounding_sphere,
const FT& error_bound = FT(1e-3),
CGAL::Random* p_rng = NULL);
Labeled_mesh_domain_3(const Function& f,
const Bbox_3& bbox,
const FT& error_bound = FT(1e-3),
CGAL::Random* p_rng = NULL);
/// Destructor
virtual ~Labeled_mesh_domain_3()
{
if(delete_rng_)
delete p_rng_;
}
/**
* Returns a bounding box of the domain
*/
Bbox_3 bbox() const {
return this->bbox_.bbox();
}
/**
* Constructs a set of \ccc{n} points on the surface, and output them to
* the output iterator \ccc{pts} whose value type is required to be
* \ccc{std::pair<Points_3, Index>}.
*/
struct Construct_initial_points
{
Construct_initial_points(const Labeled_mesh_domain_3& domain)
: r_domain_(domain) {}
template<class OutputIterator>
OutputIterator operator()(OutputIterator pts, const int n = 12) const;
private:
const Labeled_mesh_domain_3& r_domain_;
};
/// Returns Construct_initial_points object
Construct_initial_points construct_initial_points_object() const
{
return Construct_initial_points(*this);
}
/**
* Returns true if point~\ccc{p} is in the domain. If \ccc{p} is in the
* domain, the parameter index is set to the index of the subdomain
* including $p$. It is set to the default value otherwise.
*/
struct Is_in_domain
{
Is_in_domain(const Labeled_mesh_domain_3& domain) : r_domain_(domain) {}
Subdomain operator()(const Point_3& p) const
{
// f(p)==0 means p is outside the domain
Subdomain_index index = (r_domain_.function_)(p);
if ( Subdomain_index() == index )
return Subdomain();
else
return Subdomain(index);
}
private:
const Labeled_mesh_domain_3& r_domain_;
};
/// Returns Is_in_domain object
Is_in_domain is_in_domain_object() const { return Is_in_domain(*this); }
/**
* Returns true is the element \ccc{type} intersect properly any of the
* surface patches describing the either the domain boundary or some
* subdomain boundary.
* \ccc{Type} is either \ccc{Segment_3}, \ccc{Ray_3} or \ccc{Line_3}.
* Parameter index is set to the index of the intersected surface patch
* if \ccc{true} is returned and to the default \ccc{Surface_patch_index}
* value otherwise.
*/
struct Do_intersect_surface
{
Do_intersect_surface(const Labeled_mesh_domain_3& domain)
: r_domain_(domain) {}
Surface_patch operator()(const Segment_3& s) const
{
return this->operator()(s.source(), s.target());
}
Surface_patch operator()(const Ray_3& r) const
{
return clip_to_segment(r);
}
Surface_patch operator()(const Line_3& l) const
{
return clip_to_segment(l);
}
private:
/// Returns true if points \c a & \c b do not belong to the same subdomain
/// \c index is set to the surface index of subdomains f(a), f(b)
Surface_patch operator()(const Point_3& a, const Point_3& b) const
{
// If f(a) != f(b), then [a,b] intersects some surface. Here we consider
// [a,b] intersects surface_patch labelled <f(a),f(b)> (or <f(b),f(a)>).
// It may be false, further rafinement will improve precision
const Subdomain_index value_a = r_domain_.function_(a);
const Subdomain_index value_b = r_domain_.function_(b);
if ( value_a != value_b )
return Surface_patch(r_domain_.make_surface_index(value_a, value_b));
else
return Surface_patch();
}
/**
* Clips \c query to a segment \c s, and call operator()(s)
*/
template<typename Query>
Surface_patch clip_to_segment(const Query& query) const
{
typename cpp11::result_of<typename BGT::Intersect_3(Query, Iso_cuboid_3)>::type
clipped = CGAL::intersection(query, r_domain_.bbox_);
if(clipped)
#if CGAL_INTERSECTION_VERSION > 1
if(const Segment_3* s = boost::get<Segment_3>(&*clipped))
return this->operator()(*s);
#else
if(const Segment_3* s = object_cast<Segment_3>(&clipped))
return this->operator()(*s);
#endif
return Surface_patch();
}
private:
const Labeled_mesh_domain_3& r_domain_;
};
/// Returns Do_intersect_surface object
Do_intersect_surface do_intersect_surface_object() const
{
return Do_intersect_surface(*this);
}
/**
* Returns a point in the intersection of the primitive \ccc{type}
* with some boundary surface.
* \ccc{Type1} is either \ccc{Segment_3}, \ccc{Ray_3} or \ccc{Line_3}.
* The integer \ccc{dimension} is set to the dimension of the lowest
* dimensional face in the input complex containing the returned point, and
* \ccc{index} is set to the index to be stored at a mesh vertex lying
* on this face.
*/
struct Construct_intersection
{
Construct_intersection(const Labeled_mesh_domain_3& domain)
: r_domain_(domain) {}
Intersection operator()(const Segment_3& s) const
{
#ifndef CGAL_MESH_3_NO_LONGER_CALLS_DO_INTERSECT_3
CGAL_precondition(r_domain_.do_intersect_surface_object()(s));
#endif // NOT CGAL_MESH_3_NO_LONGER_CALLS_DO_INTERSECT_3
return this->operator()(s.source(),s.target());
}
Intersection operator()(const Ray_3& r) const
{
return clip_to_segment(r);
}
Intersection operator()(const Line_3& l) const
{
return clip_to_segment(l);
}
private:
/**
* Returns a point in the intersection of [a,b] with the surface
* \c a must be the source point, and \c b the out point. It's important
* because it drives bisection cuts.
* Indeed, the returned point is the first intersection from \c [a,b]
* with a subdomain surface.
*/
Intersection operator()(const Point_3& a, const Point_3& b) const
{
// Functors
typename BGT::Compute_squared_distance_3 squared_distance =
BGT().compute_squared_distance_3_object();
typename BGT::Construct_midpoint_3 midpoint =
BGT().construct_midpoint_3_object();
// Non const points
Point_3 p1 = a;
Point_3 p2 = b;
Point_3 mid = midpoint(p1, p2);
// Cannot be const: those values are modified below.
Subdomain_index value_at_p1 = r_domain_.function_(p1);
Subdomain_index value_at_p2 = r_domain_.function_(p2);
Subdomain_index value_at_mid = r_domain_.function_(mid,true);
// If both extremities are in the same subdomain,
// there is no intersection.
// This should not happen...
if( value_at_p1 == value_at_p2 )
{
return Intersection();
}
// Construct the surface patch index and index from the values at 'a'
// and 'b'. Even if the bissection find out a different pair of
// values, the reported index will be constructed from the initial
// values.
const Surface_patch_index sp_index =
r_domain_.make_surface_index(value_at_p1, value_at_p2);
const Index index = r_domain_.index_from_surface_patch_index(sp_index);
// Else lets find a point (by bisection)
// Bisection ends when the point is near than error bound from surface
while(true)
{
// If the two points are enough close, then we return midpoint
if ( squared_distance(p1, p2) < r_domain_.squared_error_bound_ )
{
CGAL_assertion(value_at_p1 != value_at_p2);
return Intersection(mid, index, 2);
}
// Else we must go on
// Here we consider that p1(a) is the source point. Thus, we keep p1 and
// change p2 if f(p1)!=f(p2).
// That allows us to find the first intersection from a of [a,b] with
// a surface.
if ( value_at_p1 != value_at_mid )
{
p2 = mid;
value_at_p2 = value_at_mid;
}
else
{
p1 = mid;
value_at_p1 = value_at_mid;
}
mid = midpoint(p1, p2);
value_at_mid = r_domain_.function_(mid,true);
}
}
/// Clips \c query to a segment \c s, and call operator()(s)
template<typename Query>
Intersection clip_to_segment(const Query& query) const
{
typename cpp11::result_of<typename BGT::Intersect_3(Query, Iso_cuboid_3)>::type
clipped = CGAL::intersection(query, r_domain_.bbox_);
if(clipped)
#if CGAL_INTERSECTION_VERSION > 1
if(const Segment_3* s = boost::get<Segment_3>(&*clipped))
return this->operator()(*s);
#else
if(const Segment_3* s = object_cast<Segment_3>(&clipped))
return this->operator()(*s);
#endif
return Intersection();
}
private:
const Labeled_mesh_domain_3& r_domain_;
};
/// Returns Construct_intersection object
Construct_intersection construct_intersection_object() const
{
return Construct_intersection(*this);
}
/**
* Returns the index to be stored in a vertex lying on the surface identified
* by \c index.
*/
Index index_from_surface_patch_index(const Surface_patch_index& index) const
{ return Index(index); }
/**
* Returns the index to be stored in a vertex lying in the subdomain
* identified by \c index.
*/
Index index_from_subdomain_index(const Subdomain_index& index) const
{ return Index(index); }
/**
* Returns the \c Surface_patch_index of the surface patch
* where lies a vertex with dimension 2 and index \c index.
*/
Surface_patch_index surface_patch_index(const Index& index) const
{ return boost::get<Surface_patch_index>(index); }
/**
* Returns the index of the subdomain containing a vertex
* with dimension 3 and index \c index.
*/
Subdomain_index subdomain_index(const Index& index) const
{ return boost::get<Subdomain_index>(index); }
// -----------------------------------
// Backward Compatibility
// -----------------------------------
#ifndef CGAL_MESH_3_NO_DEPRECATED_SURFACE_INDEX
typedef Surface_patch_index Surface_index;
Index index_from_surface_index(const Surface_index& index) const
{ return index_from_surface_patch_index(index); }
Surface_index surface_index(const Index& index) const
{ return surface_patch_index(index); }
#endif // CGAL_MESH_3_NO_DEPRECATED_SURFACE_INDEX
// -----------------------------------
// End backward Compatibility
// -----------------------------------
private:
/// Returns Surface_patch_index from \c i and \c j
Surface_patch_index make_surface_index(const Subdomain_index i,
const Subdomain_index j) const
{
if ( i < j ) return Surface_patch_index(i,j);
else return Surface_patch_index(j,i);
}
/// Returns squared error bound from \c bbox and \c error
FT squared_error_bound(const Iso_cuboid_3& bbox, const FT& error) const
{
typename BGT::Compute_squared_distance_3 squared_distance =
BGT().compute_squared_distance_3_object();
return squared_distance((bbox.min)(), (bbox.max)())*error*error/4;
}
/// Returns squared error bound from \c sphere and \c error
FT squared_error_bound(const Sphere_3& sphere, const FT& error) const
{
typename BGT::Compute_squared_radius_3 squared_radius =
BGT().compute_squared_radius_3_object();
return squared_radius(sphere)*error*error;
}
/// Returns the bounding sphere of an Iso_cuboid_3
Sphere_3 bounding_sphere(const Iso_cuboid_3& bbox) const
{
typename BGT::Construct_sphere_3 sphere = BGT().construct_sphere_3_object();
return sphere((bbox.min)(), (bbox.max)());
}
/// Returns and Iso_cuboid_3 from a Bbox_3
Iso_cuboid_3 iso_cuboid(const Bbox_3& bbox)
{
const Point_3 p_min(bbox.xmin(), bbox.ymin(), bbox.zmin());
const Point_3 p_max(bbox.xmax(), bbox.ymax(), bbox.zmax());
return Iso_cuboid_3(p_min,p_max);
}
protected:
/// Returns bounding box
const Iso_cuboid_3& bounding_box() const { return bbox_; }
private:
/// The function which answers subdomain queries
const Function function_;
/// The bounding box
const Iso_cuboid_3 bbox_;
/// The random number generator used by Construct_initial_points
CGAL::Random* p_rng_;
bool delete_rng_;
/// Error bound relative to sphere radius
FT squared_error_bound_;
private:
// Disabled copy constructor & assignment operator
typedef Labeled_mesh_domain_3<Function,BGT> Self;
Labeled_mesh_domain_3(const Self& src);
Self& operator=(const Self& src);
}; // end class Labeled_mesh_domain_3
//-------------------------------------------------------
// Method implementation
//-------------------------------------------------------
template<class F, class BGT>
Labeled_mesh_domain_3<F,BGT>::Labeled_mesh_domain_3(
const F& f,
const Sphere_3& bounding_sphere,
const FT& error_bound,
CGAL::Random* p_rng)
: function_(f)
, bbox_(iso_cuboid(bounding_sphere.bbox()))
, p_rng_(p_rng)
, delete_rng_(false)
, squared_error_bound_(squared_error_bound(bounding_sphere,error_bound))
{
// TODO : CGAL_ASSERT(0 < f(bounding_sphere.get_center()) ) ?
if(!p_rng_)
{
p_rng_ = new CGAL::Random(0);
delete_rng_ = true;
}
}
template<class F, class BGT>
Labeled_mesh_domain_3<F,BGT>::Labeled_mesh_domain_3(
const F& f,
const Bbox_3& bbox,
const FT& error_bound,
CGAL::Random* p_rng)
: function_(f)
, bbox_(iso_cuboid(bbox))
, p_rng_(p_rng)
, delete_rng_(false)
, squared_error_bound_(squared_error_bound(bbox_,error_bound))
{
// TODO : CGAL_ASSERT(0 < f(bounding_sphere.get_center()) ) ?
if(!p_rng_)
{
p_rng_ = new CGAL::Random(0);
delete_rng_ = true;
}
}
template<class F, class BGT>
template<class OutputIterator>
OutputIterator
Labeled_mesh_domain_3<F,BGT>::Construct_initial_points::operator()(
OutputIterator pts,
const int nb_points) const
{
// Create point_iterator on and in bounding_sphere
typedef Random_points_on_sphere_3<Point_3> Random_points_on_sphere_3;
typedef Random_points_in_sphere_3<Point_3> Random_points_in_sphere_3;
const FT squared_radius = BGT().compute_squared_radius_3_object()(
r_domain_.bounding_sphere(r_domain_.bbox_));
const double radius = std::sqrt(CGAL::to_double(squared_radius));
CGAL::Random& rng = *(r_domain_.p_rng_);
Random_points_on_sphere_3 random_point_on_sphere(radius, rng);
Random_points_in_sphere_3 random_point_in_sphere(radius, rng);
// Get some functors
typename BGT::Construct_segment_3 segment_3 =
BGT().construct_segment_3_object();
typename BGT::Construct_vector_3 vector_3 =
BGT().construct_vector_3_object();
typename BGT::Construct_translated_point_3 translate =
BGT().construct_translated_point_3_object();
typename BGT::Construct_center_3 center = BGT().construct_center_3_object();
// Get translation from origin to sphere center
Point_3 center_pt = center(r_domain_.bounding_sphere(r_domain_.bbox_));
const Vector_3 sphere_translation = vector_3(CGAL::ORIGIN, center_pt);
// Create nb_point points
int n = nb_points;
#ifdef CGAL_MESH_3_VERBOSE
std::cerr << "construct initial points:\n";
#endif
while ( 0 != n )
{
// Get a random segment
const Point_3 random_point = translate(*random_point_on_sphere,
sphere_translation);
const Segment_3 random_seg = segment_3(center_pt, random_point);
// Add the intersection to the output if it exists
Surface_patch surface = r_domain_.do_intersect_surface_object()(random_seg);
if ( surface )
{
const Point_3 intersect_pt = CGAL::cpp11::get<0>(
r_domain_.construct_intersection_object()(random_seg));
*pts++ = std::make_pair(intersect_pt,
r_domain_.index_from_surface_patch_index(*surface));
--n;
#ifdef CGAL_MESH_3_VERBOSE
std::cerr << boost::format("\r \r"
"%1%/%2% initial point(s) found...")
% (nb_points - n)
% nb_points;
#endif
}
else
{
// Get a new random point into sphere as center of object
// It may be necessary if the center of the domain is empty, e.g. torus
// In general case, it is good for input point dispersion
++random_point_in_sphere;
center_pt = translate(*random_point_in_sphere, sphere_translation);
}
++random_point_on_sphere;
}
#ifdef CGAL_MESH_3_VERBOSE
std::cerr << "\n";
#endif
return pts;
}
} // end namespace Mesh_3
} // end namespace CGAL
#endif // LABELLED_MESH_TRAITS_3_H_
|
module.exports={A:{A:{"2":"K C G E A B YB"},B:{"2":"D w Z I M H"},C:{"2":"WB AB F J K C G E A B D w Z I M H N O P Q R S T U V W X Y y a b UB OB","132":"0 1 3 5 6 7 8 c d e f L h i j k l m n o p q r s t u v z x"},D:{"2":"0 1 3 5 6 7 8 F J K C G E A B D w Z I M H N O P Q R S T U V W X Y y a b c d e f L h i j k l m n o p q r s t u v z x BB IB DB FB ZB GB"},E:{"2":"F J K C G E A B HB CB JB KB LB MB NB g PB"},F:{"2":"4 9 E B D I M H N O P Q R S T U V W X Y y a b c d e f L h i j k l m n o p q r s t u v QB RB SB TB g VB"},G:{"2":"2 G CB XB EB aB bB cB dB eB fB gB hB iB"},H:{"2":"jB"},I:{"2":"2 AB F BB kB lB mB nB oB pB"},J:{"2":"C A"},K:{"2":"4 9 A B D L g"},L:{"2":"DB"},M:{"132":"x"},N:{"2":"A B"},O:{"2":"qB"},P:{"2":"F J rB"},Q:{"2":"sB"},R:{"2":"tB"}},B:4,C:"CSS Counter Styles"};
|
#!/usr/bin/env python
###########################
# This code block is a HACK (!), but is necessary to avoid code duplication. Do NOT alter these lines.
import os
from setuptools import setup
import importlib.util
filepath = os.path.abspath(os.path.dirname(__file__))
filepath_import = os.path.join(filepath, '..', 'core', 'src', 'autogluon', 'core', '_setup_utils.py')
spec = importlib.util.spec_from_file_location("ag_min_dependencies", filepath_import)
ag = importlib.util.module_from_spec(spec)
# Identical to `from autogluon.core import _setup_utils as ag`, but works without `autogluon.core` being installed.
spec.loader.exec_module(ag)
###########################
version = ag.load_version_file()
version = ag.update_version(version)
submodule = 'tabular'
install_requires = [
# version ranges added in ag.get_dependency_version_ranges()
'numpy',
'scipy',
'pandas',
'scikit-learn',
'psutil',
'networkx>=2.3,<3.0',
f'autogluon.core=={version}',
f'autogluon.features=={version}',
]
extras_require = {
'lightgbm': [
'lightgbm>=3.3,<4.0',
],
'catboost': [
'catboost>=1.0,<1.1',
],
'xgboost': [
'xgboost>=1.4,<1.5',
],
'fastai': [
'torch>=1.0,<2.0',
'fastai>=2.3.1,<3.0',
],
'skex': [
'scikit-learn-intelex<=2021.3',
],
}
all_requires = []
# TODO: Consider adding 'skex' to 'all'
for extra_package in ['lightgbm', 'catboost', 'xgboost', 'fastai']:
all_requires += extras_require[extra_package]
all_requires = list(set(all_requires))
extras_require['all'] = all_requires
install_requires = ag.get_dependency_version_ranges(install_requires)
if __name__ == '__main__':
ag.create_version_file(version=version, submodule=submodule)
setup_args = ag.default_setup_args(version=version, submodule=submodule)
setup(
install_requires=install_requires,
extras_require=extras_require,
**setup_args,
)
|
# Copyright (C) 2009-2014 Wander Lairson Costa
#
# The following terms apply to all files associated
# with the software unless explicitly disclaimed in individual files.
#
# The authors hereby grant permission to use, copy, modify, distribute,
# and license this software and its documentation for any purpose, provided
# that existing copyright notices are retained in all copies and that this
# notice is included verbatim in any distributions. No written agreement,
# license, or royalty fee is required for any of the authorized uses.
# Modifications to this software may be copyrighted by their authors
# and need not follow the licensing terms described here, provided that
# the new terms are clearly indicated on the first page of each file where
# they apply.
#
# IN NO EVENT SHALL THE AUTHORS OR DISTRIBUTORS BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE, ITS DOCUMENTATION, OR ANY
# DERIVATIVES THEREOF, EVEN IF THE AUTHORS HAVE BEEN ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# THE AUTHORS AND DISTRIBUTORS SPECIFICALLY DISCLAIM ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THIS SOFTWARE
# IS PROVIDED ON AN "AS IS" BASIS, AND THE AUTHORS AND DISTRIBUTORS HAVE
# NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR
# MODIFICATIONS.
# Integration tests
import utils
import unittest
import usb.core
import devinfo
import usb._interop
from usb._debug import methodtrace
import usb.util
import usb.backend.libusb0 as libusb0
import usb.backend.libusb1 as libusb1
import usb.backend.openusb as openusb
import time
import sys
def make_data_list(length = 8):
return (utils.get_array_data1(length),
utils.get_array_data2(length),
utils.get_list_data1(length),
utils.get_list_data2(length),
utils.get_str_data1(length),
utils.get_str_data1(length))
class DeviceTest(unittest.TestCase):
@methodtrace(utils.logger)
def __init__(self, dev):
unittest.TestCase.__init__(self)
self.dev = dev
@methodtrace(utils.logger)
def runTest(self):
try:
self.test_attributes()
self.test_timeout()
self.test_set_configuration()
self.test_set_interface_altsetting()
self.test_write_read()
self.test_write_array()
self.test_ctrl_transfer()
self.test_clear_halt()
#self.test_reset()
finally:
usb.util.dispose_resources(self.dev)
@methodtrace(utils.logger)
def test_attributes(self):
self.assertEqual(self.dev.bLength, 18)
self.assertEqual(self.dev.bDescriptorType, usb.util.DESC_TYPE_DEVICE)
self.assertEqual(self.dev.bcdUSB, 0x0200)
self.assertEqual(self.dev.idVendor, devinfo.ID_VENDOR)
self.assertEqual(self.dev.idProduct, devinfo.ID_PRODUCT)
self.assertEqual(self.dev.bcdDevice, 0x0001)
self.assertEqual(self.dev.iManufacturer, 0x01)
self.assertEqual(self.dev.iProduct, 0x02)
self.assertEqual(self.dev.iSerialNumber, 0x03)
self.assertEqual(self.dev.bNumConfigurations, 0x01)
self.assertEqual(self.dev.bMaxPacketSize0, 8)
self.assertEqual(self.dev.bDeviceClass, 0x00)
self.assertEqual(self.dev.bDeviceSubClass, 0x00)
self.assertEqual(self.dev.bDeviceProtocol, 0x00)
@methodtrace(utils.logger)
def test_timeout(self):
def set_invalid_timeout():
self.dev.default_timeout = -1
tmo = self.dev.default_timeout
self.dev.default_timeout = 1
self.assertEqual(self.dev.default_timeout, 1)
self.dev.default_timeout = tmo
self.assertEqual(self.dev.default_timeout, tmo)
self.assertRaises(ValueError, set_invalid_timeout)
self.assertEqual(self.dev.default_timeout, tmo)
@methodtrace(utils.logger)
def test_set_configuration(self):
cfg = self.dev[0].bConfigurationValue
self.dev.set_configuration(cfg)
self.dev.set_configuration()
self.assertEqual(cfg, self.dev.get_active_configuration().bConfigurationValue)
@methodtrace(utils.logger)
def test_set_interface_altsetting(self):
intf = self.dev.get_active_configuration()[(0,0)]
self.dev.set_interface_altsetting(intf.bInterfaceNumber, intf.bAlternateSetting)
self.dev.set_interface_altsetting()
@methodtrace(utils.logger)
def test_reset(self):
self.dev.reset()
utils.delay_after_reset()
@methodtrace(utils.logger)
def test_write_read(self):
altsettings = [devinfo.INTF_BULK, devinfo.INTF_INTR]
eps = [devinfo.EP_BULK, devinfo.EP_INTR]
data_len = [8, 8]
if utils.is_iso_test_allowed():
altsettings.append(devinfo.INTF_ISO)
eps.append(devinfo.EP_ISO)
data_len.append(64)
def delay(alt):
# Hack to avoid two consecutive isochronous transfers to fail
if alt == devinfo.INTF_ISO and utils.is_windows():
time.sleep(0.5)
for alt, length in zip(altsettings, data_len):
self.dev.set_interface_altsetting(0, alt)
for data in make_data_list(length):
adata = utils.to_array(data)
length = utils.data_len(data)
buff = usb.util.create_buffer(length)
try:
ret = self.dev.write(eps[alt], data)
except NotImplementedError:
continue
self.assertEqual(ret, length)
self.assertEqual(
ret,
length,
'Failed to write data: ' + \
str(data) + ', in interface = ' + \
str(alt))
try:
ret = self.dev.read(eps[alt] | usb.util.ENDPOINT_IN, length)
except NotImplementedError:
continue
self.assertTrue(
utils.array_equals(ret, adata),
str(ret) + ' != ' + \
str(adata) + ', in interface = ' + \
str(alt))
delay(alt)
try:
ret = self.dev.write(eps[alt], data)
except NotImplementedError:
continue
self.assertEqual(ret, length)
self.assertEqual(
ret,
length,
'Failed to write data: ' + \
str(data) + ', in interface = ' + \
str(alt))
try:
ret = self.dev.read(eps[alt] | usb.util.ENDPOINT_IN, buff)
except NotImplementedError:
continue
self.assertEqual(ret, length)
self.assertTrue(
utils.array_equals(buff, adata),
str(buff) + ' != ' + \
str(adata) + ', in interface = ' + \
str(alt))
delay(alt)
@methodtrace(utils.logger)
def test_write_array(self):
a = usb._interop.as_array('test')
self.dev.set_interface_altsetting(0, devinfo.INTF_BULK)
self.assertEquals(self.dev.write(devinfo.EP_BULK, a), len(a))
self.assertTrue(utils.array_equals(
self.dev.read(devinfo.EP_BULK | usb.util.ENDPOINT_IN, len(a)),
a))
@methodtrace(utils.logger)
def test_ctrl_transfer(self):
for data in make_data_list():
length = utils.data_len(data)
adata = utils.to_array(data)
ret = self.dev.ctrl_transfer(
0x40,
devinfo.PICFW_SET_VENDOR_BUFFER,
0,
0,
data)
self.assertEqual(ret,
length,
'Failed to write data: ' + str(data))
ret = utils.to_array(self.dev.ctrl_transfer(
0xC0,
devinfo.PICFW_GET_VENDOR_BUFFER,
0,
0,
length))
self.assertTrue(utils.array_equals(ret, adata),
str(ret) + ' != ' + str(adata))
buff = usb.util.create_buffer(length)
ret = self.dev.ctrl_transfer(
0x40,
devinfo.PICFW_SET_VENDOR_BUFFER,
0,
0,
data)
self.assertEqual(ret,
length,
'Failed to write data: ' + str(data))
ret = self.dev.ctrl_transfer(
0xC0,
devinfo.PICFW_GET_VENDOR_BUFFER,
0,
0,
buff)
self.assertEqual(ret, length)
self.assertTrue(utils.array_equals(buff, adata),
str(buff) + ' != ' + str(adata))
@methodtrace(utils.logger)
def test_clear_halt(self):
self.dev.set_interface_altsetting(0, 0)
self.dev.clear_halt(0x01)
self.dev.clear_halt(0x81)
class ConfigurationTest(unittest.TestCase):
@methodtrace(utils.logger)
def __init__(self, dev):
unittest.TestCase.__init__(self)
self.cfg = dev[0]
@methodtrace(utils.logger)
def runTest(self):
try:
self.test_attributes()
self.test_set()
finally:
usb.util.dispose_resources(self.cfg.device)
@methodtrace(utils.logger)
def test_attributes(self):
self.assertEqual(self.cfg.bLength, 9)
self.assertEqual(self.cfg.bDescriptorType, usb.util.DESC_TYPE_CONFIG)
self.assertEqual(self.cfg.wTotalLength, 78)
self.assertEqual(self.cfg.bNumInterfaces, 0x01)
self.assertEqual(self.cfg.bConfigurationValue, 0x01)
self.assertEqual(self.cfg.iConfiguration, 0x00)
self.assertEqual(self.cfg.bmAttributes, 0xC0)
self.assertEqual(self.cfg.bMaxPower, 50)
@methodtrace(utils.logger)
def test_set(self):
self.cfg.set()
class InterfaceTest(unittest.TestCase):
@methodtrace(utils.logger)
def __init__(self, dev):
unittest.TestCase.__init__(self)
self.dev = dev
self.intf = dev[0][(0,0)]
@methodtrace(utils.logger)
def runTest(self):
try:
self.dev.set_configuration()
self.test_attributes()
self.test_set_altsetting()
finally:
usb.util.dispose_resources(self.intf.device)
@methodtrace(utils.logger)
def test_attributes(self):
self.assertEqual(self.intf.bLength, 9)
self.assertEqual(self.intf.bDescriptorType, usb.util.DESC_TYPE_INTERFACE)
self.assertEqual(self.intf.bInterfaceNumber, 0)
self.assertEqual(self.intf.bAlternateSetting, 0)
self.assertEqual(self.intf.bNumEndpoints, 2)
self.assertEqual(self.intf.bInterfaceClass, 0x00)
self.assertEqual(self.intf.bInterfaceSubClass, 0x00)
self.assertEqual(self.intf.bInterfaceProtocol, 0x00)
self.assertEqual(self.intf.iInterface, 0x00)
@methodtrace(utils.logger)
def test_set_altsetting(self):
self.intf.set_altsetting()
class EndpointTest(unittest.TestCase):
@methodtrace(utils.logger)
def __init__(self, dev):
unittest.TestCase.__init__(self)
self.dev = dev
intf = dev[0][(0,0)]
self.ep_out = usb.util.find_descriptor(intf, bEndpointAddress=0x01)
self.ep_in = usb.util.find_descriptor(intf, bEndpointAddress=0x81)
@methodtrace(utils.logger)
def runTest(self):
try:
self.dev.set_configuration()
self.test_attributes()
self.test_write_read()
finally:
usb.util.dispose_resources(self.dev)
@methodtrace(utils.logger)
def test_attributes(self):
self.assertEqual(self.ep_out.bLength, 7)
self.assertEqual(self.ep_out.bDescriptorType, usb.util.DESC_TYPE_ENDPOINT)
self.assertEqual(self.ep_out.bEndpointAddress, 0x01)
self.assertEqual(self.ep_out.bmAttributes, 0x02)
self.assertEqual(self.ep_out.wMaxPacketSize, 16)
self.assertEqual(self.ep_out.bInterval, 0)
@methodtrace(utils.logger)
def test_write_read(self):
self.dev.set_interface_altsetting(0, 0)
for data in make_data_list():
adata = utils.to_array(data)
length = utils.data_len(data)
buff = usb.util.create_buffer(length)
ret = self.ep_out.write(data)
self.assertEqual(ret, length, 'Failed to write data: ' + str(data))
ret = self.ep_in.read(length)
self.assertTrue(utils.array_equals(ret, adata), str(ret) + ' != ' + str(adata))
ret = self.ep_out.write(data)
self.assertEqual(ret, length, 'Failed to write data: ' + str(data))
ret = self.ep_in.read(buff)
self.assertEqual(ret, length)
self.assertTrue(utils.array_equals(buff, adata), str(buff) + ' != ' + str(adata))
def get_suite():
suite = unittest.TestSuite()
test_cases = (DeviceTest, ConfigurationTest, InterfaceTest, EndpointTest)
for m in (libusb1, libusb0, openusb):
b = m.get_backend()
if b is None:
continue
dev = utils.find_my_device(b)
if dev is None:
utils.logger.warning('Test hardware not found for backend %s', m.__name__)
continue
for ObjectTestCase in test_cases:
utils.logger.info('Adding %s(%s) to test suite...', ObjectTestCase.__name__, m.__name__)
suite.addTest(ObjectTestCase(dev))
return suite
if __name__ == '__main__':
utils.run_tests(get_suite())
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import collections
from oslo_config import cfg
from octavia.common import constants
from octavia.tests.unit.common.sample_configs import sample_certs
CONF = cfg.CONF
def sample_amphora_tuple(id='sample_amphora_id_1', lb_network_ip='10.0.1.1',
vrrp_ip='10.1.1.1', ha_ip='192.168.10.1',
vrrp_port_id='1234', ha_port_id='1234', role=None,
status='ACTIVE', vrrp_interface=None,
vrrp_priority=None, api_version='0.5'):
in_amphora = collections.namedtuple(
'amphora', 'id, lb_network_ip, vrrp_ip, ha_ip, vrrp_port_id, '
'ha_port_id, role, status, vrrp_interface,'
'vrrp_priority, api_version')
return in_amphora(
id=id,
lb_network_ip=lb_network_ip,
vrrp_ip=vrrp_ip,
ha_ip=ha_ip,
vrrp_port_id=vrrp_port_id,
ha_port_id=ha_port_id,
role=role,
status=status,
vrrp_interface=vrrp_interface,
vrrp_priority=vrrp_priority,
api_version=api_version)
RET_PERSISTENCE = {
'type': 'HTTP_COOKIE',
'cookie_name': None}
RET_MONITOR_1 = {
'id': 'sample_monitor_id_1',
'type': 'HTTP',
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': '/index.html',
'expected_codes': '418',
'enabled': True,
'http_version': 1.0,
'domain_name': None}
RET_MONITOR_2 = {
'id': 'sample_monitor_id_2',
'type': 'HTTP',
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': '/healthmon.html',
'expected_codes': '418',
'enabled': True,
'http_version': 1.0,
'domain_name': None}
RET_MEMBER_1 = {
'id': 'sample_member_id_1',
'address': '10.0.0.99',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_MEMBER_2 = {
'id': 'sample_member_id_2',
'address': '10.0.0.98',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_MEMBER_3 = {
'id': 'sample_member_id_3',
'address': '10.0.0.97',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_POOL_1 = {
'id': 'sample_pool_id_1',
'protocol': 'http',
'lb_algorithm': 'roundrobin',
'members': [RET_MEMBER_1, RET_MEMBER_2],
'health_monitor': RET_MONITOR_1,
'session_persistence': RET_PERSISTENCE,
'enabled': True,
'operating_status': 'ACTIVE',
'stick_size': '10k',
constants.HTTP_REUSE: False,
'ca_tls_path': '',
'crl_path': '',
'tls_enabled': False}
RET_POOL_2 = {
'id': 'sample_pool_id_2',
'protocol': 'http',
'lb_algorithm': 'roundrobin',
'members': [RET_MEMBER_3],
'health_monitor': RET_MONITOR_2,
'session_persistence': RET_PERSISTENCE,
'enabled': True,
'operating_status': 'ACTIVE',
'stick_size': '10k',
constants.HTTP_REUSE: False,
'ca_tls_path': '',
'crl_path': '',
'tls_enabled': False}
RET_DEF_TLS_CONT = {'id': 'cont_id_1', 'allencompassingpem': 'imapem',
'primary_cn': 'FakeCn'}
RET_SNI_CONT_1 = {'id': 'cont_id_2', 'allencompassingpem': 'imapem2',
'primary_cn': 'FakeCn'}
RET_SNI_CONT_2 = {'id': 'cont_id_3', 'allencompassingpem': 'imapem3',
'primary_cn': 'FakeCn2'}
RET_L7RULE_1 = {
'id': 'sample_l7rule_id_1',
'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'key': None,
'value': '/api',
'invert': False,
'enabled': True}
RET_L7RULE_2 = {
'id': 'sample_l7rule_id_2',
'type': constants.L7RULE_TYPE_HEADER,
'compare_type': constants.L7RULE_COMPARE_TYPE_CONTAINS,
'key': 'Some-header',
'value': 'This\\ string\\\\\\ with\\ stuff',
'invert': True,
'enabled': True}
RET_L7RULE_3 = {
'id': 'sample_l7rule_id_3',
'type': constants.L7RULE_TYPE_COOKIE,
'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'key': 'some-cookie',
'value': 'this.*|that',
'invert': False,
'enabled': True}
RET_L7RULE_4 = {
'id': 'sample_l7rule_id_4',
'type': constants.L7RULE_TYPE_FILE_TYPE,
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'key': None,
'value': 'jpg',
'invert': False,
'enabled': True}
RET_L7RULE_5 = {
'id': 'sample_l7rule_id_5',
'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'key': None,
'value': '.example.com',
'invert': False,
'enabled': True}
RET_L7RULE_6 = {
'id': 'sample_l7rule_id_6',
'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'key': None,
'value': '.example.com',
'invert': False,
'enabled': False}
RET_L7POLICY_1 = {
'id': 'sample_l7policy_id_1',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_POOL,
'redirect_pool': RET_POOL_2,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_1],
'redirect_http_code': None}
RET_L7POLICY_2 = {
'id': 'sample_l7policy_id_2',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_URL,
'redirect_pool': None,
'redirect_url': 'http://www.example.com',
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': 302}
RET_L7POLICY_3 = {
'id': 'sample_l7policy_id_3',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_4, RET_L7RULE_5],
'redirect_http_code': None}
RET_L7POLICY_4 = {
'id': 'sample_l7policy_id_4',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [],
'redirect_http_code': None}
RET_L7POLICY_5 = {
'id': 'sample_l7policy_id_5',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': False,
'l7rules': [RET_L7RULE_5],
'redirect_http_code': None}
RET_L7POLICY_6 = {
'id': 'sample_l7policy_id_6',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [],
'redirect_http_code': None}
RET_L7POLICY_7 = {
'id': 'sample_l7policy_id_7',
'action': constants.L7POLICY_ACTION_REDIRECT_PREFIX,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': 'https://example.com',
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': 302}
RET_L7POLICY_8 = {
'id': 'sample_l7policy_id_8',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_URL,
'redirect_pool': None,
'redirect_url': 'http://www.example.com',
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': None}
RET_LISTENER = {
'id': 'sample_listener_id_1',
'protocol_port': '80',
'protocol': 'HTTP',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'amphorae': [sample_amphora_tuple()],
'peer_port': 1024,
'topology': 'SINGLE',
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {},
'timeout_client_data': 50000,
'timeout_member_connect': 5000,
'timeout_member_data': 50000,
'timeout_tcp_inspect': 0,
}
RET_LISTENER_L7 = {
'id': 'sample_listener_id_1',
'protocol_port': '80',
'protocol': 'HTTP',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'amphorae': [sample_amphora_tuple()],
'peer_port': 1024,
'topology': 'SINGLE',
'pools': [RET_POOL_1, RET_POOL_2],
'l7policies': [RET_L7POLICY_1, RET_L7POLICY_2, RET_L7POLICY_3,
RET_L7POLICY_4, RET_L7POLICY_5, RET_L7POLICY_6,
RET_L7POLICY_7],
'enabled': True,
'insert_headers': {},
'timeout_client_data': 50000,
'timeout_member_connect': 5000,
'timeout_member_data': 50000,
'timeout_tcp_inspect': 0,
}
RET_LISTENER_TLS = {
'id': 'sample_listener_id_1',
'protocol_port': '443',
'protocol': 'TERMINATED_HTTPS',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'tls_certificate_id': 'cont_id_1',
'default_tls_path': '/etc/ssl/sample_loadbalancer_id_1/fakeCN.pem',
'default_tls_container': RET_DEF_TLS_CONT,
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {}}
RET_LISTENER_TLS_SNI = {
'id': 'sample_listener_id_1',
'protocol_port': '443',
'protocol': 'http',
'protocol': 'TERMINATED_HTTPS',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'tls_certificate_id': 'cont_id_1',
'default_tls_path': '/etc/ssl/sample_loadbalancer_id_1/fakeCN.pem',
'default_tls_container': RET_DEF_TLS_CONT,
'crt_dir': '/v2/sample_loadbalancer_id_1',
'sni_container_ids': ['cont_id_2', 'cont_id_3'],
'sni_containers': [RET_SNI_CONT_1, RET_SNI_CONT_2],
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {}}
RET_AMPHORA = {
'id': 'sample_amphora_id_1',
'lb_network_ip': '10.0.1.1',
'vrrp_ip': '10.1.1.1',
'ha_ip': '192.168.10.1',
'vrrp_port_id': '1234',
'ha_port_id': '1234',
'role': None,
'status': 'ACTIVE',
'vrrp_interface': None,
'vrrp_priority': None}
RET_LB = {
'host_amphora': RET_AMPHORA,
'id': 'sample_loadbalancer_id_1',
'vip_address': '10.0.0.2',
'listener': RET_LISTENER,
'topology': 'SINGLE',
'enabled': True,
'global_connection_limit': constants.HAPROXY_MAX_MAXCONN}
RET_LB_L7 = {
'host_amphora': RET_AMPHORA,
'id': 'sample_loadbalancer_id_1',
'vip_address': '10.0.0.2',
'listener': RET_LISTENER_L7,
'topology': 'SINGLE',
'enabled': True,
'global_connection_limit': constants.HAPROXY_MAX_MAXCONN}
UDP_SOURCE_IP_BODY = {
'type': constants.SESSION_PERSISTENCE_SOURCE_IP,
'persistence_timeout': 33,
'persistence_granularity': '255.0.0.0'
}
RET_UDP_HEALTH_MONITOR = {
'id': 'sample_monitor_id_1',
'type': constants.HEALTH_MONITOR_UDP_CONNECT,
'delay': 30,
'timeout': 31,
'enabled': True,
'fall_threshold': 3,
'check_script_path': (CONF.haproxy_amphora.base_path +
'/lvs/check/udp_check.sh')
}
UDP_HEALTH_MONITOR_NO_SCRIPT = {
'id': 'sample_monitor_id_1',
'check_script_path': None,
'delay': 30,
'enabled': True,
'fall_threshold': 3,
'timeout': 31,
'type': 'UDP'
}
RET_UDP_MEMBER = {
'id': 'member_id_1',
'address': '192.0.2.10',
'protocol_port': 82,
'weight': 13,
'enabled': True,
'monitor_address': None,
'monitor_port': None
}
RET_UDP_MEMBER_MONITOR_IP_PORT = {
'id': 'member_id_1',
'address': '192.0.2.10',
'protocol_port': 82,
'weight': 13,
'enabled': True,
'monitor_address': '192.168.1.1',
'monitor_port': 9000
}
UDP_MEMBER_1 = {
'id': 'sample_member_id_1',
'address': '10.0.0.99',
'enabled': True,
'protocol_port': 82,
'weight': 13,
'monitor_address': None,
'monitor_port': None
}
UDP_MEMBER_2 = {
'id': 'sample_member_id_2',
'address': '10.0.0.98',
'enabled': True,
'protocol_port': 82,
'weight': 13,
'monitor_address': None,
'monitor_port': None
}
RET_UDP_POOL = {
'id': 'sample_pool_id_1',
'enabled': True,
'health_monitor': UDP_HEALTH_MONITOR_NO_SCRIPT,
'lb_algorithm': 'rr',
'members': [UDP_MEMBER_1, UDP_MEMBER_2],
'protocol': 'udp',
'session_persistence': UDP_SOURCE_IP_BODY
}
RET_UDP_LISTENER = {
'connection_limit': 98,
'default_pool': {
'id': 'sample_pool_id_1',
'enabled': True,
'health_monitor': RET_UDP_HEALTH_MONITOR,
'lb_algorithm': 'rr',
'members': [UDP_MEMBER_1, UDP_MEMBER_2],
'protocol': 'udp',
'session_persistence': UDP_SOURCE_IP_BODY
},
'enabled': True,
'id': 'sample_listener_id_1',
'protocol_mode': 'udp',
'protocol_port': '80'
}
def sample_loadbalancer_tuple(proto=None, monitor=True, persistence=True,
persistence_type=None, tls=False, sni=False,
topology=None, l7=False, enabled=True):
proto = 'HTTP' if proto is None else proto
topology = 'SINGLE' if topology is None else topology
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, listeners, amphorae,'
' enabled')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
topology=topology,
listeners=[sample_listener_tuple(proto=proto, monitor=monitor,
persistence=persistence,
persistence_type=persistence_type,
tls=tls,
sni=sni,
l7=l7,
enabled=enabled)],
enabled=enabled
)
def sample_listener_loadbalancer_tuple(proto=None, topology=None,
enabled=True):
proto = 'HTTP' if proto is None else proto
if topology and topology in ['ACTIVE_STANDBY', 'ACTIVE_ACTIVE']:
more_amp = True
else:
more_amp = False
topology = constants.TOPOLOGY_SINGLE
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, amphorae, topology, '
'listeners, enabled, project_id')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
amphorae=[sample_amphora_tuple(role=constants.ROLE_MASTER),
sample_amphora_tuple(
id='sample_amphora_id_2',
lb_network_ip='10.0.1.2',
vrrp_ip='10.1.1.2',
role=constants.ROLE_BACKUP)]
if more_amp else [sample_amphora_tuple()],
topology=topology,
listeners=[],
enabled=enabled,
project_id='12345'
)
def sample_lb_with_udp_listener_tuple(
proto=None, topology=None, enabled=True, pools=None):
proto = 'HTTP' if proto is None else proto
if topology and topology in ['ACTIVE_STANDBY', 'ACTIVE_ACTIVE']:
more_amp = True
else:
more_amp = False
topology = constants.TOPOLOGY_SINGLE
listeners = [sample_listener_tuple(
proto=constants.PROTOCOL_UDP,
persistence_type=constants.SESSION_PERSISTENCE_SOURCE_IP,
persistence_timeout=33,
persistence_granularity='255.255.0.0',
monitor_proto=constants.HEALTH_MONITOR_UDP_CONNECT)]
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, amphorae, topology, '
'pools, enabled, project_id, listeners')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
amphorae=[sample_amphora_tuple(role=constants.ROLE_MASTER),
sample_amphora_tuple(
id='sample_amphora_id_2',
lb_network_ip='10.0.1.2',
vrrp_ip='10.1.1.2',
role=constants.ROLE_BACKUP)]
if more_amp else [sample_amphora_tuple()],
topology=topology,
listeners=listeners,
pools=pools or [],
enabled=enabled,
project_id='12345'
)
def sample_vrrp_group_tuple():
in_vrrp_group = collections.namedtuple(
'vrrp_group', 'load_balancer_id, vrrp_auth_type, vrrp_auth_pass, '
'advert_int, smtp_server, smtp_connect_timeout, '
'vrrp_group_name')
return in_vrrp_group(
vrrp_group_name='sample_loadbalancer_id_1',
load_balancer_id='sample_loadbalancer_id_1',
vrrp_auth_type='PASS',
vrrp_auth_pass='123',
advert_int='1',
smtp_server='',
smtp_connect_timeout='')
def sample_vip_tuple():
vip = collections.namedtuple('vip', 'ip_address')
return vip(ip_address='10.0.0.2')
def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
persistence=True, persistence_type=None,
persistence_cookie=None, persistence_timeout=None,
persistence_granularity=None,
tls=False, sni=False, peer_port=None, topology=None,
l7=False, enabled=True, insert_headers=None,
be_proto=None, monitor_ip_port=False,
monitor_proto=None, backup_member=False,
disabled_member=False, connection_limit=-1,
timeout_client_data=50000,
timeout_member_connect=5000,
timeout_member_data=50000,
timeout_tcp_inspect=0,
client_ca_cert=False, client_crl_cert=False,
ssl_type_l7=False, pool_cert=False,
pool_ca_cert=False, pool_crl=False,
tls_enabled=False, hm_host_http_check=False,
id='sample_listener_id_1', recursive_nest=False):
proto = 'HTTP' if proto is None else proto
if be_proto is None:
be_proto = 'HTTP' if proto is 'TERMINATED_HTTPS' else proto
topology = 'SINGLE' if topology is None else topology
port = '443' if proto is 'HTTPS' or proto is 'TERMINATED_HTTPS' else '80'
peer_port = 1024 if peer_port is None else peer_port
insert_headers = insert_headers or {}
in_listener = collections.namedtuple(
'listener', 'id, project_id, protocol_port, protocol, default_pool, '
'connection_limit, tls_certificate_id, '
'sni_container_ids, default_tls_container, '
'sni_containers, load_balancer, peer_port, pools, '
'l7policies, enabled, insert_headers, timeout_client_data,'
'timeout_member_connect, timeout_member_data, '
'timeout_tcp_inspect, client_ca_tls_certificate_id, '
'client_ca_tls_certificate, client_authentication, '
'client_crl_container_id')
if l7:
pools = [
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check),
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie, sample_pool=2,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check)]
l7policies = [
sample_l7policy_tuple('sample_l7policy_id_1', sample_policy=1),
sample_l7policy_tuple('sample_l7policy_id_2', sample_policy=2),
sample_l7policy_tuple('sample_l7policy_id_3', sample_policy=3),
sample_l7policy_tuple('sample_l7policy_id_4', sample_policy=4),
sample_l7policy_tuple('sample_l7policy_id_5', sample_policy=5),
sample_l7policy_tuple('sample_l7policy_id_6', sample_policy=6),
sample_l7policy_tuple('sample_l7policy_id_7', sample_policy=7)]
if ssl_type_l7:
l7policies.append(sample_l7policy_tuple(
'sample_l7policy_id_8', sample_policy=8))
else:
pools = [
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
backup_member=backup_member, disabled_member=disabled_member,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check)]
l7policies = []
listener = in_listener(
id=id,
project_id='12345',
protocol_port=port,
protocol=proto,
load_balancer=sample_listener_loadbalancer_tuple(proto=proto,
topology=topology),
peer_port=peer_port,
default_pool=sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
persistence_timeout=persistence_timeout,
persistence_granularity=persistence_granularity,
monitor_ip_port=monitor_ip_port,
monitor_proto=monitor_proto,
pool_cert=pool_cert,
pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl,
tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check
) if alloc_default_pool else '',
connection_limit=connection_limit,
tls_certificate_id='cont_id_1' if tls else '',
sni_container_ids=['cont_id_2', 'cont_id_3'] if sni else [],
default_tls_container=sample_tls_container_tuple(
id='cont_id_1', certificate=sample_certs.X509_CERT,
private_key=sample_certs.X509_CERT_KEY,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN
) if tls else '',
sni_containers=[
sample_tls_sni_container_tuple(
tls_container_id='cont_id_2',
tls_container=sample_tls_container_tuple(
id='cont_id_2', certificate=sample_certs.X509_CERT_2,
private_key=sample_certs.X509_CERT_KEY_2,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN_2)),
sample_tls_sni_container_tuple(
tls_container_id='cont_id_3',
tls_container=sample_tls_container_tuple(
id='cont_id_3', certificate=sample_certs.X509_CERT_3,
private_key=sample_certs.X509_CERT_KEY_3,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN_3))]
if sni else [],
pools=pools,
l7policies=l7policies,
enabled=enabled,
insert_headers=insert_headers,
timeout_client_data=timeout_client_data,
timeout_member_connect=timeout_member_connect,
timeout_member_data=timeout_member_data,
timeout_tcp_inspect=timeout_tcp_inspect,
client_ca_tls_certificate_id='cont_id_ca' if client_ca_cert else '',
client_ca_tls_certificate=sample_tls_container_tuple(
id='cont_id_ca', certificate=sample_certs.X509_CA_CERT,
primary_cn=sample_certs.X509_CA_CERT_CN
) if client_ca_cert else '',
client_authentication=(
constants.CLIENT_AUTH_MANDATORY if client_ca_cert else
constants.CLIENT_AUTH_NONE),
client_crl_container_id='cont_id_crl' if client_crl_cert else '',
)
if recursive_nest:
listener.load_balancer.listeners.append(listener)
return listener
def sample_tls_sni_container_tuple(tls_container_id=None, tls_container=None):
sc = collections.namedtuple('sni_container', 'tls_container_id, '
'tls_container')
return sc(tls_container_id=tls_container_id, tls_container=tls_container)
def sample_tls_sni_containers_tuple(tls_container_id=None, tls_container=None):
sc = collections.namedtuple('sni_containers', 'tls_container_id, '
'tls_container')
return [sc(tls_container_id=tls_container_id, tls_container=tls_container)]
def sample_tls_container_tuple(id='cont_id_1', certificate=None,
private_key=None, intermediates=None,
primary_cn=None):
sc = collections.namedtuple(
'tls_container',
'id, certificate, private_key, intermediates, primary_cn')
return sc(id=id, certificate=certificate, private_key=private_key,
intermediates=intermediates or [], primary_cn=primary_cn)
def sample_pool_tuple(proto=None, monitor=True, persistence=True,
persistence_type=None, persistence_cookie=None,
persistence_timeout=None, persistence_granularity=None,
sample_pool=1, monitor_ip_port=False,
monitor_proto=None, backup_member=False,
disabled_member=False, has_http_reuse=True,
pool_cert=False, pool_ca_cert=False, pool_crl=False,
tls_enabled=False, hm_host_http_check=False):
proto = 'HTTP' if proto is None else proto
monitor_proto = proto if monitor_proto is None else monitor_proto
in_pool = collections.namedtuple(
'pool', 'id, protocol, lb_algorithm, members, health_monitor, '
'session_persistence, enabled, operating_status, '
'tls_certificate_id, ca_tls_certificate_id, '
'crl_container_id, tls_enabled, ' + constants.HTTP_REUSE)
if (proto == constants.PROTOCOL_UDP and
persistence_type == constants.SESSION_PERSISTENCE_SOURCE_IP):
kwargs = {'persistence_type': persistence_type,
'persistence_timeout': persistence_timeout,
'persistence_granularity': persistence_granularity}
else:
kwargs = {'persistence_type': persistence_type,
'persistence_cookie': persistence_cookie}
persis = sample_session_persistence_tuple(**kwargs)
mon = None
if sample_pool == 1:
id = 'sample_pool_id_1'
members = [sample_member_tuple('sample_member_id_1', '10.0.0.99',
monitor_ip_port=monitor_ip_port),
sample_member_tuple('sample_member_id_2', '10.0.0.98',
monitor_ip_port=monitor_ip_port,
backup=backup_member,
enabled=not disabled_member)]
if monitor is True:
mon = sample_health_monitor_tuple(
proto=monitor_proto, host_http_check=hm_host_http_check)
elif sample_pool == 2:
id = 'sample_pool_id_2'
members = [sample_member_tuple('sample_member_id_3', '10.0.0.97',
monitor_ip_port=monitor_ip_port)]
if monitor is True:
mon = sample_health_monitor_tuple(
proto=monitor_proto, sample_hm=2,
host_http_check=hm_host_http_check)
return in_pool(
id=id,
protocol=proto,
lb_algorithm='ROUND_ROBIN',
members=members,
health_monitor=mon,
session_persistence=persis if persistence is True else None,
enabled=True,
operating_status='ACTIVE', has_http_reuse=has_http_reuse,
tls_certificate_id='pool_cont_1' if pool_cert else None,
ca_tls_certificate_id='pool_ca_1' if pool_ca_cert else None,
crl_container_id='pool_crl' if pool_crl else None,
tls_enabled=tls_enabled)
def sample_member_tuple(id, ip, enabled=True, operating_status='ACTIVE',
monitor_ip_port=False, backup=False):
in_member = collections.namedtuple('member',
'id, ip_address, protocol_port, '
'weight, subnet_id, '
'enabled, operating_status, '
'monitor_address, monitor_port, '
'backup')
monitor_address = '192.168.1.1' if monitor_ip_port else None
monitor_port = 9000 if monitor_ip_port else None
return in_member(
id=id,
ip_address=ip,
protocol_port=82,
weight=13,
subnet_id='10.0.0.1/24',
enabled=enabled,
operating_status=operating_status,
monitor_address=monitor_address,
monitor_port=monitor_port,
backup=backup)
def sample_session_persistence_tuple(persistence_type=None,
persistence_cookie=None,
persistence_timeout=None,
persistence_granularity=None):
spersistence = collections.namedtuple('SessionPersistence',
'type, cookie_name, '
'persistence_timeout, '
'persistence_granularity')
pt = 'HTTP_COOKIE' if persistence_type is None else persistence_type
return spersistence(type=pt,
cookie_name=persistence_cookie,
persistence_timeout=persistence_timeout,
persistence_granularity=persistence_granularity)
def sample_health_monitor_tuple(proto='HTTP', sample_hm=1,
host_http_check=False):
proto = 'HTTP' if proto is 'TERMINATED_HTTPS' else proto
monitor = collections.namedtuple(
'monitor', 'id, type, delay, timeout, fall_threshold, rise_threshold,'
'http_method, url_path, expected_codes, enabled, '
'check_script_path, http_version, domain_name')
if sample_hm == 1:
id = 'sample_monitor_id_1'
url_path = '/index.html'
elif sample_hm == 2:
id = 'sample_monitor_id_2'
url_path = '/healthmon.html'
kwargs = {
'id': id,
'type': proto,
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': url_path,
'expected_codes': '418',
'enabled': True
}
if host_http_check:
kwargs.update({'http_version': 1.1, 'domain_name': 'testlab.com'})
else:
kwargs.update({'http_version': 1.0, 'domain_name': None})
if proto == constants.HEALTH_MONITOR_UDP_CONNECT:
kwargs['check_script_path'] = (CONF.haproxy_amphora.base_path +
'lvs/check/' + 'udp_check.sh')
else:
kwargs['check_script_path'] = None
return monitor(**kwargs)
def sample_l7policy_tuple(id,
action=constants.L7POLICY_ACTION_REJECT,
redirect_pool=None, redirect_url=None,
redirect_prefix=None,
enabled=True, redirect_http_code=302,
sample_policy=1):
in_l7policy = collections.namedtuple('l7policy',
'id, action, redirect_pool, '
'redirect_url, redirect_prefix, '
'l7rules, enabled,'
'redirect_http_code')
l7rules = []
if sample_policy == 1:
action = constants.L7POLICY_ACTION_REDIRECT_TO_POOL
redirect_pool = sample_pool_tuple(sample_pool=2)
l7rules = [sample_l7rule_tuple('sample_l7rule_id_1')]
elif sample_policy == 2:
action = constants.L7POLICY_ACTION_REDIRECT_TO_URL
redirect_url = 'http://www.example.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_2', sample_rule=2),
sample_l7rule_tuple('sample_l7rule_id_3', sample_rule=3)]
elif sample_policy == 3:
action = constants.L7POLICY_ACTION_REJECT
l7rules = [sample_l7rule_tuple('sample_l7rule_id_4', sample_rule=4),
sample_l7rule_tuple('sample_l7rule_id_5', sample_rule=5)]
elif sample_policy == 4:
action = constants.L7POLICY_ACTION_REJECT
elif sample_policy == 5:
action = constants.L7POLICY_ACTION_REJECT
enabled = False
l7rules = [sample_l7rule_tuple('sample_l7rule_id_5', sample_rule=5)]
elif sample_policy == 6:
action = constants.L7POLICY_ACTION_REJECT
l7rules = [sample_l7rule_tuple('sample_l7rule_id_6', sample_rule=6)]
elif sample_policy == 7:
action = constants.L7POLICY_ACTION_REDIRECT_PREFIX
redirect_prefix = 'https://example.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_2', sample_rule=2),
sample_l7rule_tuple('sample_l7rule_id_3', sample_rule=3)]
elif sample_policy == 8:
action = constants.L7POLICY_ACTION_REDIRECT_TO_URL
redirect_url = 'http://www.ssl-type-l7rule-test.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_7', sample_rule=7),
sample_l7rule_tuple('sample_l7rule_id_8', sample_rule=8),
sample_l7rule_tuple('sample_l7rule_id_9', sample_rule=9),
sample_l7rule_tuple('sample_l7rule_id_10', sample_rule=10),
sample_l7rule_tuple('sample_l7rule_id_11', sample_rule=11)]
return in_l7policy(
id=id,
action=action,
redirect_pool=redirect_pool,
redirect_url=redirect_url,
redirect_prefix=redirect_prefix,
l7rules=l7rules,
enabled=enabled,
redirect_http_code=redirect_http_code
if (action in [constants.L7POLICY_ACTION_REDIRECT_TO_URL,
constants.L7POLICY_ACTION_REDIRECT_PREFIX] and
redirect_http_code) else None)
def sample_l7rule_tuple(id,
type=constants.L7RULE_TYPE_PATH,
compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
key=None,
value='/api',
invert=False,
enabled=True,
sample_rule=1):
in_l7rule = collections.namedtuple('l7rule',
'id, type, compare_type, '
'key, value, invert, enabled')
if sample_rule == 2:
type = constants.L7RULE_TYPE_HEADER
compare_type = constants.L7RULE_COMPARE_TYPE_CONTAINS
key = 'Some-header'
value = 'This string\\ with stuff'
invert = True
enabled = True
if sample_rule == 3:
type = constants.L7RULE_TYPE_COOKIE
compare_type = constants.L7RULE_COMPARE_TYPE_REGEX
key = 'some-cookie'
value = 'this.*|that'
invert = False
enabled = True
if sample_rule == 4:
type = constants.L7RULE_TYPE_FILE_TYPE
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = 'jpg'
invert = False
enabled = True
if sample_rule == 5:
type = constants.L7RULE_TYPE_HOST_NAME
compare_type = constants.L7RULE_COMPARE_TYPE_ENDS_WITH
key = None
value = '.example.com'
invert = False
enabled = True
if sample_rule == 6:
type = constants.L7RULE_TYPE_HOST_NAME
compare_type = constants.L7RULE_COMPARE_TYPE_ENDS_WITH
key = None
value = '.example.com'
invert = False
enabled = False
if sample_rule == 7:
type = constants.L7RULE_TYPE_SSL_CONN_HAS_CERT
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = 'tRuE'
invert = False
enabled = True
if sample_rule == 8:
type = constants.L7RULE_TYPE_SSL_VERIFY_RESULT
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = '1'
invert = True
enabled = True
if sample_rule == 9:
type = constants.L7RULE_TYPE_SSL_DN_FIELD
compare_type = constants.L7RULE_COMPARE_TYPE_REGEX
key = 'STREET'
value = '^STREET.*NO\.$'
invert = True
enabled = True
if sample_rule == 10:
type = constants.L7RULE_TYPE_SSL_DN_FIELD
compare_type = constants.L7RULE_COMPARE_TYPE_STARTS_WITH
key = 'OU-3'
value = 'Orgnization Bala'
invert = True
enabled = True
return in_l7rule(
id=id,
type=type,
compare_type=compare_type,
key=key,
value=value,
invert=invert,
enabled=enabled)
def sample_base_expected_config(frontend=None, backend=None,
peers=None, global_opts=None, defaults=None):
if frontend is None:
frontend = ("frontend sample_listener_id_1\n"
" option httplog\n"
" maxconn {maxconn}\n"
" bind 10.0.0.2:80\n"
" mode http\n"
" default_backend sample_pool_id_1\n"
" timeout client 50000\n\n").format(
maxconn=constants.HAPROXY_MAX_MAXCONN)
if backend is None:
backend = ("backend sample_pool_id_1\n"
" mode http\n"
" balance roundrobin\n"
" cookie SRV insert indirect nocache\n"
" timeout check 31s\n"
" option httpchk GET /index.html HTTP/1.0\\r\\n\n"
" http-check expect rstatus 418\n"
" fullconn {maxconn}\n"
" option allbackups\n"
" timeout connect 5000\n"
" timeout server 50000\n"
" server sample_member_id_1 10.0.0.99:82 weight 13 "
"check inter 30s fall 3 rise 2 cookie sample_member_id_1\n"
" server sample_member_id_2 10.0.0.98:82 weight 13 "
"check inter 30s fall 3 rise 2 cookie sample_member_id_2\n"
"\n").format(maxconn=constants.HAPROXY_MAX_MAXCONN)
if peers is None:
peers = "\n\n"
if global_opts is None:
global_opts = " maxconn {maxconn}\n\n".format(
maxconn=constants.HAPROXY_MAX_MAXCONN)
if defaults is None:
defaults = ("defaults\n"
" log global\n"
" retries 3\n"
" option redispatch\n"
" option splice-request\n"
" option splice-response\n"
" option http-keep-alive\n\n")
return ("# Configuration for loadbalancer sample_loadbalancer_id_1\n"
"global\n"
" daemon\n"
" user nobody\n"
" log /dev/log local0\n"
" log /dev/log local1 notice\n"
" stats socket /var/lib/octavia/sample_listener_id_1.sock"
" mode 0666 level user\n" +
global_opts + defaults + peers + frontend + backend)
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import pipes
from recipe_engine.config import config_item_context, ConfigGroup
from recipe_engine.config import Dict, List, Single, Static, Set, BadConf
from recipe_engine.config_types import Path
# Because of the way that we use decorators, pylint can't figure out the proper
# type signature of functions annotated with the @config_ctx decorator.
# pylint: disable=E1123
HOST_PLATFORMS = ('linux', 'win', 'mac')
TARGET_PLATFORMS = HOST_PLATFORMS + ('ios', 'android', 'chromeos')
HOST_TARGET_BITS = (32, 64)
HOST_ARCHS = ('intel',)
TARGET_ARCHS = HOST_ARCHS + ('arm', 'mips', 'mipsel')
TARGET_CROS_BOARDS = (None, 'x86-generic')
BUILD_CONFIGS = ('Release', 'Debug', 'Coverage')
MEMORY_TOOLS = ('memcheck', 'drmemory_full', 'drmemory_light')
PROJECT_GENERATORS = ('gyp', 'gn', 'mb')
def check(val, potentials):
assert val in potentials, (val, potentials)
return val
# Schema for config items in this module.
def BaseConfig(HOST_PLATFORM, HOST_ARCH, HOST_BITS,
TARGET_PLATFORM, TARGET_ARCH, TARGET_BITS,
BUILD_CONFIG, TARGET_CROS_BOARD, **_kwargs):
equal_fn = lambda tup: ('%s=%s' % (tup[0], pipes.quote(str(tup[1]))))
return ConfigGroup(
compile_py = ConfigGroup(
default_targets = Set(basestring),
build_args = Single(basestring, required=False),
build_tool = Single(basestring, required=False),
cross_tool = Single(basestring, required=False),
compiler = Single(basestring, required=False),
mode = Single(basestring, required=False),
goma_dir = Single(Path, required=False),
goma_hermetic = Single(basestring, required=False),
goma_enable_remote_link = Single(bool, empty_val=False, required=False),
goma_store_local_run_output = Single(bool, empty_val=False, required=False),
goma_enable_compiler_info_cache = Single(
bool, empty_val=False, required=False),
clobber = Single(bool, empty_val=False, required=False, hidden=False),
pass_arch_flag = Single(bool, empty_val=False, required=False),
xcode_sdk = Single(basestring, required=False),
xcode_project = Single(Path, required=False),
solution = Single(Path, required=False),
ninja_confirm_noop = Single(bool, empty_val=False, required=False),
set_build_data_dir = Single(bool, empty_val=False, required=False),
),
runtest_py = ConfigGroup(
src_side = Single(bool),
),
gyp_env = ConfigGroup(
GYP_CROSSCOMPILE = Single(int, jsonish_fn=str, required=False),
GYP_CHROMIUM_NO_ACTION = Single(int, jsonish_fn=str, required=False),
GYP_DEFINES = Dict(equal_fn, ' '.join, (basestring,int,Path)),
GYP_GENERATORS = Set(basestring, ','.join),
GYP_GENERATOR_FLAGS = Dict(equal_fn, ' '.join, (basestring,int)),
GYP_INCLUDE_LAST = Single(Path, required=False),
GYP_LINK_CONCURRENCY = Single(int, required=False),
GYP_MSVS_VERSION = Single(basestring, required=False),
GYP_USE_SEPARATE_MSPDBSRV = Single(int, jsonish_fn=str, required=False),
LLVM_DOWNLOAD_GOLD_PLUGIN = Single(int, required=False),
),
env = ConfigGroup(
PATH = List(Path),
ADB_VENDOR_KEYS = Single(Path, required=False),
LLVM_FORCE_HEAD_REVISION = Single(basestring, required=False),
GOMA_STUBBY_PROXY_IP_ADDRESS = Single(basestring, required=False),
),
project_generator = ConfigGroup(
tool = Single(basestring, empty_val='gyp'),
args = Set(basestring),
),
build_dir = Single(Path),
cros_sdk = ConfigGroup(
external = Single(bool, empty_val=True, required=False),
args = List(basestring),
),
runtests = ConfigGroup(
memory_tool = Single(basestring, required=False),
memory_tests_runner = Single(Path),
enable_lsan = Single(bool, empty_val=False, required=False),
test_args = List(basestring),
run_asan_test = Single(bool, required=False),
swarming_extra_args = List(basestring),
swarming_tags = Set(basestring),
),
# Some platforms do not have a 1:1 correlation of BUILD_CONFIG to what is
# passed as --target on the command line.
build_config_fs = Single(basestring),
BUILD_CONFIG = Static(check(BUILD_CONFIG, BUILD_CONFIGS)),
HOST_PLATFORM = Static(check(HOST_PLATFORM, HOST_PLATFORMS)),
HOST_ARCH = Static(check(HOST_ARCH, HOST_ARCHS)),
HOST_BITS = Static(check(HOST_BITS, HOST_TARGET_BITS)),
TARGET_PLATFORM = Static(check(TARGET_PLATFORM, TARGET_PLATFORMS)),
TARGET_ARCH = Static(check(TARGET_ARCH, TARGET_ARCHS)),
TARGET_BITS = Static(check(TARGET_BITS, HOST_TARGET_BITS)),
TARGET_CROS_BOARD = Static(TARGET_CROS_BOARD),
gn_args = List(basestring),
lto = Single(bool, empty_val=False, required=False),
)
config_ctx = config_item_context(BaseConfig)
@config_ctx(is_root=True)
def BASE(c):
host_targ_tuples = [(c.HOST_PLATFORM, c.HOST_ARCH, c.HOST_BITS),
(c.TARGET_PLATFORM, c.TARGET_ARCH, c.TARGET_BITS)]
for (plat, arch, bits) in host_targ_tuples:
if plat == 'ios':
if arch not in ('arm', 'intel'): # pragma: no cover
raise BadConf('%s/%s arch is not supported on %s' % (arch, bits, plat))
elif plat in ('win', 'mac'):
if arch != 'intel': # pragma: no cover
raise BadConf('%s arch is not supported on %s' % (arch, plat))
elif plat in ('chromeos', 'android', 'linux'):
pass # no arch restrictions
else: # pragma: no cover
assert False, "Not covering a platform: %s" % plat
potential_platforms = {
# host -> potential target platforms
'win': ('win',),
'mac': ('mac', 'ios'),
'linux': ('linux', 'chromeos', 'android'),
}.get(c.HOST_PLATFORM)
if not potential_platforms: # pragma: no cover
raise BadConf('Cannot build on "%s"' % c.HOST_PLATFORM)
if c.TARGET_PLATFORM not in potential_platforms:
raise BadConf('Can not compile "%s" on "%s"' %
(c.TARGET_PLATFORM, c.HOST_PLATFORM)) # pragma: no cover
if c.TARGET_CROS_BOARD:
if not c.TARGET_PLATFORM == 'chromeos': # pragma: no cover
raise BadConf("Cannot specify CROS board for non-'chromeos' platform")
if c.HOST_PLATFORM != c.TARGET_PLATFORM or c.HOST_ARCH != c.TARGET_ARCH:
c.gyp_env.GYP_CROSSCOMPILE = 1
if c.HOST_BITS < c.TARGET_BITS:
raise BadConf('host bits < targ bits') # pragma: no cover
c.build_config_fs = c.BUILD_CONFIG
if c.HOST_PLATFORM == 'win':
if c.TARGET_BITS == 64:
# Windows requires 64-bit builds to be in <dir>_x64.
c.build_config_fs = c.BUILD_CONFIG + '_x64'
# Test runner memory tools that are not compile-time based.
c.runtests.memory_tests_runner = Path('[CHECKOUT]', 'tools', 'valgrind',
'chrome_tests',
platform_ext={'win': '.bat',
'mac': '.sh',
'linux': '.sh'})
if c.project_generator.tool not in PROJECT_GENERATORS: # pragma: no cover
raise BadConf('"%s" is not a supported project generator tool, the '
'supported ones are: %s' % (c.project_generator.tool,
','.join(PROJECT_GENERATORS)))
gyp_arch = {
('intel', 32): 'ia32',
('intel', 64): 'x64',
('arm', 32): 'arm',
('arm', 64): 'arm64',
('mips', 32): 'mips',
('mips', 64): 'mips64',
('mipsel', 32): 'mipsel',
('mipsel', 64): 'mips64el',
}.get((c.TARGET_ARCH, c.TARGET_BITS))
if gyp_arch:
c.gyp_env.GYP_DEFINES['target_arch'] = gyp_arch
if c.BUILD_CONFIG in ['Coverage', 'Release']:
# The 'Coverage' target is not explicitly used by Chrome, but by some other
# projects in the Chrome ecosystem (ie: Syzygy).
static_library(c, final=False)
elif c.BUILD_CONFIG == 'Debug':
shared_library(c, final=False)
else: # pragma: no cover
raise BadConf('Unknown build config "%s"' % c.BUILD_CONFIG)
@config_ctx()
def gn(c):
c.project_generator.tool = 'gn'
@config_ctx()
def mb(c):
c.project_generator.tool = 'mb'
@config_ctx()
def gn_for_uploads(c):
# This config is used to do the official builds of GN itself (which
# are uploaded into Google Cloud Storage). While most of the configuration
# of the build is done repo-side via MB, we need to set a few GYP_DEFINES
# so that `gclient runhooks` will do the right thing.
if c.TARGET_PLATFORM == 'linux':
c.gyp_env.GYP_DEFINES['branding'] = 'Chrome'
c.gyp_env.GYP_DEFINES['buildtype'] = 'Official'
@config_ctx()
def win_analyze(c):
c.gyp_env.GYP_DEFINES['win_analyze'] = '1'
c.gyp_env.GYP_DEFINES['fastbuild'] = '2'
c.gyp_env.GYP_DEFINES['use_goma'] = 0
@config_ctx(group='builder')
def ninja(c):
if c.TARGET_PLATFORM == 'ios':
c.gyp_env.GYP_GENERATORS.add('ninja')
c.compile_py.build_tool = 'ninja'
out_path = 'out'
if c.TARGET_CROS_BOARD:
out_path += '_%s' % (c.TARGET_CROS_BOARD,)
c.build_dir = Path('[CHECKOUT]', out_path)
@config_ctx(group='builder')
def msvs(c):
if c.HOST_PLATFORM != 'win': # pragma: no cover
raise BadConf('can not use msvs on "%s"' % c.HOST_PLATFORM)
# If compile.py is invoking devenv it needs to refer to a solution file.
# For chrome this defaults to ['CHECKOUT']/build/all.sln.
c.compile_py.solution = Path('[CHECKOUT]', 'build', 'all.sln')
c.gyp_env.GYP_GENERATORS.add('msvs')
c.compile_py.build_tool = 'vs'
c.build_dir = Path('[CHECKOUT]', 'build')
@config_ctx()
def msvs2010(c):
c.gyp_env.GYP_MSVS_VERSION = '2010'
@config_ctx()
def msvs2012(c):
c.gyp_env.GYP_MSVS_VERSION = '2012'
@config_ctx()
def msvs2013(c):
c.gyp_env.GYP_MSVS_VERSION = '2013'
@config_ctx()
def msvs2015(c):
c.gyp_env.GYP_MSVS_VERSION = '2015'
@config_ctx()
def goma_canary(c):
c.compile_py.goma_hermetic = 'error'
c.compile_py.goma_enable_compiler_info_cache = True
@config_ctx()
def goma_staging(c):
c.env.GOMA_STUBBY_PROXY_IP_ADDRESS = 'sandbox.google.com'
@config_ctx()
def goma_hermetic_fallback(c):
c.compile_py.goma_hermetic = 'fallback'
@config_ctx()
def goma_linktest(c):
c.compile_py.goma_enable_remote_link = True
c.compile_py.goma_store_local_run_output = True
@config_ctx()
def ninja_confirm_noop(c):
c.compile_py.ninja_confirm_noop = True
@config_ctx(group='builder')
def xcode(c): # pragma: no cover
if c.HOST_PLATFORM != 'mac':
raise BadConf('can not use xcodebuild on "%s"' % c.HOST_PLATFORM)
c.gyp_env.GYP_GENERATORS.add('xcode')
def _clang_common(c):
c.compile_py.compiler = 'clang'
c.gyp_env.GYP_DEFINES['clang'] = 1
@config_ctx(group='compiler')
def clang(c):
_clang_common(c)
@config_ctx(group='compiler')
def gcc(c):
c.gyp_env.GYP_DEFINES['clang'] = 0
@config_ctx(group='compiler')
def default_compiler(c):
if c.TARGET_PLATFORM in ('mac', 'ios'):
_clang_common(c)
@config_ctx(deps=['compiler', 'builder'], group='distributor')
def goma(c):
if c.compile_py.build_tool == 'vs': # pragma: no cover
raise BadConf('goma doesn\'t work with msvs')
if not c.compile_py.compiler:
c.compile_py.compiler = 'goma'
elif c.compile_py.compiler == 'clang':
c.compile_py.compiler = 'goma-clang'
else: # pragma: no cover
raise BadConf('goma config doesn\'t understand %s' % c.compile_py.compiler)
c.gyp_env.GYP_DEFINES['use_goma'] = 1
goma_dir = Path('[BUILD]', 'goma')
c.gyp_env.GYP_DEFINES['gomadir'] = goma_dir
c.compile_py.goma_dir = goma_dir
if c.TARGET_PLATFORM == 'win' and c.compile_py.compiler != 'goma-clang':
fastbuild(c)
pch(c, invert=True)
@config_ctx()
def pch(c, invert=False):
if c.TARGET_PLATFORM == 'win':
c.gyp_env.GYP_DEFINES['chromium_win_pch'] = int(not invert)
@config_ctx()
def dcheck(c, invert=False):
c.gyp_env.GYP_DEFINES['dcheck_always_on'] = int(not invert)
@config_ctx()
def fastbuild(c, invert=False):
c.gyp_env.GYP_DEFINES['fastbuild'] = int(not invert)
@config_ctx()
def no_dump_symbols(c):
c.gyp_env.GYP_DEFINES['linux_dump_symbols'] = 0
@config_ctx()
def isolation_mode_noop(c):
c.gyp_env.GYP_DEFINES['test_isolation_mode'] = 'noop'
@config_ctx(group='link_type')
def shared_library(c):
c.gyp_env.GYP_DEFINES['component'] = 'shared_library'
@config_ctx(group='link_type')
def static_library(c):
c.gyp_env.GYP_DEFINES['component'] = 'static_library'
@config_ctx()
def ffmpeg_branding(c, branding=None):
if branding:
c.gyp_env.GYP_DEFINES['ffmpeg_branding'] = branding
@config_ctx()
def proprietary_codecs(c, invert=False):
c.gyp_env.GYP_DEFINES['proprietary_codecs'] = int(not invert)
@config_ctx()
def chrome_with_codecs(c):
ffmpeg_branding(c, branding='Chrome')
proprietary_codecs(c)
@config_ctx()
def chromiumos(c):
c.gyp_env.GYP_DEFINES['chromeos'] = 1
@config_ctx(includes=['chromiumos'])
def chromeos(c):
ffmpeg_branding(c, branding='ChromeOS')
proprietary_codecs(c)
@config_ctx()
def ozone(c):
c.gyp_env.GYP_DEFINES['use_ozone'] = 1
@config_ctx()
def oilpan(c):
c.gyp_env.GYP_DEFINES['enable_oilpan'] = 0
@config_ctx()
def clobber(c):
c.compile_py.clobber = True
@config_ctx(includes=['static_library', 'clobber'])
def official(c):
c.gyp_env.GYP_DEFINES['branding'] = 'Chrome'
c.gyp_env.GYP_DEFINES['buildtype'] = 'Official'
c.compile_py.mode = 'official'
@config_ctx(deps=['compiler'])
def asan(c):
if 'clang' not in c.compile_py.compiler: # pragma: no cover
raise BadConf('asan requires clang')
c.runtests.swarming_tags |= {'asan:1'}
if c.TARGET_PLATFORM in ['mac', 'win']:
# Set fastbuild=0 and prevent other configs from changing it.
fastbuild(c, invert=True, optional=False)
c.gyp_env.GYP_DEFINES['asan'] = 1
if c.TARGET_PLATFORM != 'android' and c.TARGET_BITS == 64:
# LSAN isn't supported on Android or 32 bits platforms.
c.gyp_env.GYP_DEFINES['lsan'] = 1
@config_ctx(deps=['compiler'])
def lsan(c):
c.runtests.enable_lsan = True
c.runtests.swarming_extra_args += ['--lsan=1']
c.runtests.swarming_tags |= {'lsan:1'}
# TODO(infra,earthdok,glider): Make this a gyp variable. This is also not a
# good name as only v8 builds release symbolized with -O2 while
# chromium.lkgr uses -O1.
@config_ctx()
def asan_symbolized(c):
c.gyp_env.GYP_DEFINES['release_extra_cflags'] = (
'-fno-inline-functions -fno-inline')
@config_ctx()
def sanitizer_coverage(c):
c.gyp_env.GYP_DEFINES['sanitizer_coverage'] = 'edge'
@config_ctx(deps=['compiler'])
def msan(c):
if 'clang' not in c.compile_py.compiler: # pragma: no cover
raise BadConf('msan requires clang')
c.runtests.swarming_tags |= {'msan:1'}
c.gyp_env.GYP_DEFINES['msan'] = 1
@config_ctx()
def msan_full_origin_tracking(c):
# Track the chain of stores leading from allocation site to use site.
c.gyp_env.GYP_DEFINES['msan_track_origins'] = 2
# This is currently needed to make tests return a non-zero exit code when an
# UBSan failure happens.
# TODO(kjellander,samsonov): Remove when the upstream bug
# (https://llvm.org/bugs/show_bug.cgi?id=25569) is fixed.
@config_ctx()
def ubsan_fail_on_errors(c):
c.gyp_env.GYP_DEFINES['release_extra_cflags'] = (
'-fno-sanitize-recover=undefined')
@config_ctx(deps=['compiler'], includes=['ubsan_fail_on_errors'])
def ubsan(c):
if 'clang' not in c.compile_py.compiler: # pragma: no cover
raise BadConf('ubsan requires clang')
c.gyp_env.GYP_DEFINES['ubsan'] = 1
@config_ctx(deps=['compiler'], includes=['ubsan_fail_on_errors'])
def ubsan_vptr(c):
if 'clang' not in c.compile_py.compiler: # pragma: no cover
raise BadConf('ubsan_vptr requires clang')
c.gyp_env.GYP_DEFINES['ubsan_vptr'] = 1
@config_ctx()
def prebuilt_instrumented_libraries(c):
c.gyp_env.GYP_DEFINES['use_prebuilt_instrumented_libraries'] = 1
@config_ctx(group='memory_tool')
def memcheck(c):
_memory_tool(c, 'memcheck')
c.gyp_env.GYP_DEFINES['build_for_tool'] = 'memcheck'
@config_ctx(deps=['compiler'], group='memory_tool')
def tsan2(c):
if 'clang' not in c.compile_py.compiler: # pragma: no cover
raise BadConf('tsan2 requires clang')
c.runtests.swarming_tags |= {'tsan:1'}
gyp_defs = c.gyp_env.GYP_DEFINES
gyp_defs['tsan'] = 1
gyp_defs['disable_nacl'] = 1
@config_ctx()
def separate_mspdbsrv(c):
c.gyp_env.GYP_USE_SEPARATE_MSPDBSRV = 1
@config_ctx()
def syzyasan_compile_only(c):
gyp_defs = c.gyp_env.GYP_DEFINES
gyp_defs['syzyasan'] = 1
gyp_defs['win_z7'] = 0
@config_ctx(
deps=['compiler'], group='memory_tool', includes=['syzyasan_compile_only',
'separate_mspdbsrv'])
def syzyasan(c):
if c.gyp_env.GYP_DEFINES['component'] != 'static_library': # pragma: no cover
raise BadConf('SyzyASan requires component=static_library')
gyp_defs = c.gyp_env.GYP_DEFINES
gyp_defs['win_z7'] = 1
gyp_defs['chromium_win_pch'] = 0
c.gyp_env.GYP_USE_SEPARATE_MSPDBSRV = 1
@config_ctx(group='memory_tool')
def drmemory_full(c):
_memory_tool(c, 'drmemory_full')
c.gyp_env.GYP_DEFINES['build_for_tool'] = 'drmemory'
@config_ctx(group='memory_tool')
def drmemory_light(c):
_memory_tool(c, 'drmemory_light')
c.gyp_env.GYP_DEFINES['build_for_tool'] = 'drmemory'
def _memory_tool(c, tool):
if tool not in MEMORY_TOOLS: # pragma: no cover
raise BadConf('"%s" is not a supported memory tool, the supported ones '
'are: %s' % (tool, ','.join(MEMORY_TOOLS)))
c.runtests.memory_tool = tool
@config_ctx()
def lto(c):
c.lto = True
@config_ctx(includes=['lto'])
def cfi_vptr(c):
c.gyp_env.GYP_DEFINES['cfi_vptr'] = 1
c.gyp_env.GYP_LINK_CONCURRENCY = 8
@config_ctx()
def trybot_flavor(c):
fastbuild(c, optional=True)
dcheck(c, optional=True)
@config_ctx()
def gn_minimal_symbols(c):
c.gn_args.append('symbol_level=1')
@config_ctx()
def clang_tot(c):
c.env.LLVM_FORCE_HEAD_REVISION = 'YES'
# Plugin flags often need to be changed when using a plugin newer than
# the latest Clang package, so disable plugins.
# TODO(pcc): Investigate whether this should be consistent between Windows and
# non-Windows.
if c.TARGET_PLATFORM != 'win':
c.gyp_env.GYP_DEFINES['clang_use_chrome_plugins'] = 0
@config_ctx(includes=['ninja', 'clang', 'asan', 'static_library'])
def win_asan(c):
# These are set on the lkgr bot, and the fyi bots should match the lkgr bot.
# TODO(thakis): Once the lkgr bot uses recipes, the lkgr and the fyi bots
# should use the same context to ensure they use the same gyp defines.
c.gyp_env.GYP_DEFINES['enable_ipc_fuzzer'] = 1
c.gyp_env.GYP_DEFINES['v8_enable_verify_heap'] = 1
#### 'Full' configurations
@config_ctx(includes=['ninja', 'default_compiler'])
def chromium_no_goma(c):
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'default_compiler', 'goma'])
def chromium(c):
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
c.cros_sdk.external = True
@config_ctx(includes=['ninja', 'clang', 'goma'])
def chromium_win_clang(c):
fastbuild(c, final=False) # final=False so win_clang_asan can override it.
@config_ctx(includes=['ninja', 'clang', 'clang_tot']) # No goma.
def chromium_win_clang_tot(c):
fastbuild(c)
@config_ctx(includes=['chromium_win_clang', 'official'])
def chromium_win_clang_official(c):
pass
@config_ctx(includes=['chromium_win_clang_tot', 'official'])
def chromium_win_clang_official_tot(c):
pass
@config_ctx(includes=['win_asan', 'clang_tot']) # No goma.
def chromium_win_clang_asan_tot(c):
pass
@config_ctx(includes=['chromium_win_clang_asan_tot', 'sanitizer_coverage'])
def chromium_win_clang_asan_tot_coverage(c):
pass
@config_ctx(includes=['ninja', 'clang', 'clang_tot']) # No goma.
def clang_tot_linux(c):
pass
@config_ctx(includes=['ninja', 'clang', 'clang_tot']) # No goma.
def clang_tot_mac(c):
fastbuild(c, final=False) # final=False so clang_tot_mac_asan can override.
@config_ctx()
def asan_test_batch(c):
c.runtests.test_args.append('--test-launcher-batch-limit=1')
@config_ctx(includes=['clang_tot_linux', 'asan', 'chromium_sanitizer',
'asan_test_batch'])
def clang_tot_linux_asan(c):
# Like chromium_linux_asan, without goma.
pass
@config_ctx(includes=['clang_tot_linux', 'ubsan_vptr', 'sanitizer_coverage'])
def clang_tot_linux_ubsan_vptr(c):
pass
@config_ctx(includes=['clang_tot_mac', 'asan', 'chromium_sanitizer',
'static_library'])
def clang_tot_mac_asan(c):
# Like chromium_mac_asan, without goma.
# Clear lsan configuration for mac.
del c.gyp_env.GYP_DEFINES['lsan']
@config_ctx(includes=['android_common', 'ninja', 'clang', 'asan', 'clang_tot'])
def clang_tot_android_asan(c):
# Like android_clang, minus goma, minus static_libarary, plus asan.
pass
# GYP_DEFINES must not include 'asan' or 'clang', else the tester bot will try
# to compile clang.
@config_ctx(includes=['chromium_no_goma'])
def chromium_win_asan(c):
c.runtests.run_asan_test = True
@config_ctx()
def chromium_sanitizer(c):
c.runtests.test_args.append('--test-launcher-print-test-stdio=always')
@config_ctx(includes=['ninja', 'clang', 'goma', 'asan', 'chromium_sanitizer'])
def chromium_asan(c):
pass
@config_ctx(includes=['chromium_asan'])
def chromium_asan_default_targets(c):
c.compile_py.default_targets = ['chromium_builder_asan']
@config_ctx(includes=['chromium_asan', 'asan_test_batch'])
def chromium_linux_asan(c):
pass
@config_ctx(includes=['chromium_asan', 'static_library'])
def chromium_mac_asan(c):
# Clear lsan configuration for mac.
del c.gyp_env.GYP_DEFINES['lsan']
# Need to explicitly set host arch for mac asan 64.
# TODO(glider, earthdok): Figure out if this is really required or
# auto-detected by gyp.
if c.gyp_env.GYP_DEFINES['target_arch'] == 'x64':
c.gyp_env.GYP_DEFINES['host_arch'] = 'x64'
@config_ctx(includes=['chromium'])
def chromium_mac_mac_views(c):
c.gyp_env.GYP_DEFINES['mac_views_browser'] = '1'
@config_ctx(includes=['ninja', 'clang', 'goma', 'msan', 'chromium_sanitizer'])
def chromium_msan(c):
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'clang', 'goma', 'syzyasan'])
def chromium_syzyasan(c): # pragma: no cover
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'clang', 'goma', 'tsan2', 'chromium_sanitizer'])
def chromium_tsan2(c):
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'default_compiler', 'goma', 'chromeos'])
def chromium_chromeos(c): # pragma: no cover
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['chromium_asan', 'chromiumos', 'asan_test_batch'])
def chromium_chromiumos_asan(c):
pass
@config_ctx(includes=['ninja', 'clang', 'goma', 'chromeos'])
def chromium_chromeos_clang(c): # pragma: no cover
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['chromium_chromeos', 'ozone'])
def chromium_chromeos_ozone(c): # pragma: no cover
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'clang', 'goma'])
def chromium_clang(c):
c.compile_py.default_targets = ['All', 'chromium_builder_tests']
@config_ctx(includes=['ninja', 'clang', 'cfi_vptr'])
def chromium_cfi(c):
c.compile_py.default_targets = ['All']
@config_ctx(includes=['xcode', 'static_library'])
def chromium_xcode(c): # pragma: no cover
c.compile_py.build_tool = 'xcode'
c.compile_py.default_targets = ['All']
c.compile_py.xcode_project = Path('[CHECKOUT]', 'build', 'all.xcodeproj')
@config_ctx(includes=['chromium', 'official'])
def chromium_official(c):
# TODO(phajdan.jr): Unify compile targets used by official builders.
if c.TARGET_PLATFORM == 'win':
c.compile_py.default_targets = ['chrome_official_builder']
elif c.TARGET_PLATFORM in ['linux', 'mac']:
c.compile_py.default_targets = []
@config_ctx(includes=['chromium'])
def blink(c):
c.compile_py.default_targets = ['blink_tests']
@config_ctx()
def blink_logging_on(c, invert=False):
c.gyp_env.GYP_DEFINES['blink_logging_always_on'] = int(not invert)
@config_ctx()
def archive_gpu_tests(c):
# TODO(sergiyb): This option should be removed/refactored, because it was
# originally created to prevent buidling GPU tests on Chromium waterfalls,
# which is no longer useful as we trigger swarming GPU tests from tryservers.
c.gyp_env.GYP_DEFINES['archive_gpu_tests'] = 1
@config_ctx(includes=['android_common', 'ninja', 'static_library',
'default_compiler', 'goma'])
def android(c):
pass
@config_ctx(includes=['android_common', 'ninja', 'static_library', 'clang',
'goma'])
def android_clang(c):
pass
@config_ctx(includes=['android_common', 'ninja', 'shared_library', 'clang',
'goma', 'asan'])
def android_asan(c):
# ASan for Android needs shared_library, so it needs it own config.
# See https://www.chromium.org/developers/testing/addresssanitizer.
pass
@config_ctx()
def android_common(c):
gyp_defs = c.gyp_env.GYP_DEFINES
gyp_defs['fastbuild'] = 1
gyp_defs['OS'] = 'android'
c.env.PATH.extend([
Path('[CHECKOUT]', 'third_party', 'android_tools', 'sdk',
'platform-tools'),
Path('[CHECKOUT]', 'build', 'android')])
@config_ctx()
def android_findbugs(c):
c.gyp_env.GYP_DEFINES['run_findbugs'] = 1
@config_ctx(includes=['ninja', 'shared_library', 'clang', 'goma'])
def codesearch(c):
# -k 0 prevents stopping on errors, so the compile step tries to do as much as
# possible.
c.compile_py.build_args = '-k 0'
gyp_defs = c.gyp_env.GYP_DEFINES
gyp_defs['fastbuild'] = 1
@config_ctx(includes=['ninja', 'static_library', 'msvs2015'])
def chromium_pgo_base(c):
c.gyp_env.GYP_DEFINES['buildtype'] = 'Official'
c.gyp_env.GYP_DEFINES['use_goma'] = 0
fastbuild(c, invert=True)
c.compile_py.default_targets = ['chrome']
#### 'Full' configurations
@config_ctx(includes=['chromium_pgo_base'])
def chromium_pgo_instrument(c):
c.gyp_env.GYP_DEFINES['chrome_pgo_phase'] = 1
# Some of the binaries needed by the PGO gets copied into the build directory
# during the build, we need to augment the PATH variable so it can find them
# during the profiling step.
c.env.PATH.extend([c.build_dir.join(c.build_config_fs)])
@config_ctx(includes=['chromium_pgo_base'])
def chromium_pgo_optimize(c):
c.gyp_env.GYP_DEFINES['chrome_pgo_phase'] = 2
@config_ctx()
def v8_optimize_medium(c):
c.gyp_env.GYP_DEFINES['v8_optimized_debug'] = 1
@config_ctx()
def v8_slow_dchecks(c):
c.gyp_env.GYP_DEFINES['v8_enable_slow_dchecks'] = 1
@config_ctx()
def v8_verify_heap(c):
c.gyp_env.GYP_DEFINES['v8_enable_verify_heap'] = 1
@config_ctx()
def chromium_perf(c):
c.compile_py.clobber = False
@config_ctx()
def chromium_perf_fyi(c):
c.compile_py.clobber = False
if c.HOST_PLATFORM == 'win':
c.compile_py.compiler = None
c.compile_py.goma_dir = None
c.gyp_env.GYP_DEFINES['use_goma'] = 0
@config_ctx(includes=['chromium_clang'])
def cast_linux(c):
c.gyp_env.GYP_DEFINES['chromecast'] = 1
@config_ctx()
def internal_gles2_conform_tests(c):
c.gyp_env.GYP_DEFINES['internal_gles2_conform_tests'] = 1
|
const geoLocation = require("nativescript-geolocation");
const observableModule = require("tns-core-modules/data/observable");
const app = require("tns-core-modules/application");
const dialogs = require("tns-core-modules/ui/dialogs");
const appSettings = require("application-settings");
let page;
let viewModel;
let sideDrawer;
function onNavigatingTo(args) {
page = args.object;
viewModel = observableModule.fromObject({
showLocation: function () {
geoLocation.watchLocation(location => {
this.currentGeoLocation = location;
}, error => {
alert(error);
}, {
desiredAccuracy: 3,
updateDistance: 10,
minimumUpdateTime: 1000 * 1
});
}, enableLocationServices: function () {
geoLocation.isEnabled().then(enabled => {
if (!enabled) {
geoLocation.enableLocationRequest().then(() => this.showLocation());
} else {
this.showLocation();
}
});
}, currentGeoLocation: null
});
sideDrawer = app.getRootView();
sideDrawer.closeDrawer();
page.bindingContext = viewModel;
}
function onDrawerButtonTap() {
const sideDrawer = app.getRootView();
sideDrawer.showDrawer();
}
function onGeneralMenu() {
page.frame.goBack();
}
exports.onGeneralMenu = onGeneralMenu;
exports.onNavigatingTo = onNavigatingTo;
exports.onDrawerButtonTap = onDrawerButtonTap;
|
// This file was automatically generated. Do not modify.
'use strict';
goog.provide('Blockly.Msg.en');
goog.require('Blockly.Msg');
Blockly.Msg.ABOUT = "about";
Blockly.Msg.ACCELERATION_TOOLTIP = "Get the acceleration value in milli-gravities.";
Blockly.Msg.ACCELEROMETER_ROTATION_TOOLTIP = "Get the tilt or rotations in degrees.";
Blockly.Msg.ACCELEROMETER_TOOLTIP = "Represents an accelerometer.";
Blockly.Msg.ACTION_ANALOGIN = "actuator analog";
Blockly.Msg.ACTION_BUZZER = "buzzer";
Blockly.Msg.ACTION_BUZZER_ARDUINO = "buzzer HYT120";
Blockly.Msg.ACTION_CALLIBOT = "Calli:bot";
Blockly.Msg.ACTION_DIGITALIN = "actuator digital";
Blockly.Msg.ACTION_EVAL = "eval";
Blockly.Msg.ACTION_EVAL_AS = "as";
Blockly.Msg.ACTION_IN = "actuator";
Blockly.Msg.ACTION_INFRARED = "infrared emitter";
Blockly.Msg.ACTION_LCD = "LCD 1602";
Blockly.Msg.ACTION_LCDI2C = "LCD 1602 I²C";
Blockly.Msg.ACTION_LCDI2C_SENSEBOX = "OLED Display I²C";
Blockly.Msg.ACTION_LED = "LED";
Blockly.Msg.ACTION_MOTOR = "motor";
Blockly.Msg.ACTION_OLEDSSD1306I2C = "OLED SSD1306 I²C";
Blockly.Msg.ACTION_PLAY = "play";
Blockly.Msg.ACTION_PLOTTING = "plot";
Blockly.Msg.ACTION_PLOT_CLEAR = "clear the plot";
Blockly.Msg.ACTION_PLOT_CLEAR_TOOLTIP = "Removes all the data from the plot.";
Blockly.Msg.ACTION_PLOT_POINT = "plot a point on";
Blockly.Msg.ACTION_PLOT_POINT_TOOLTIP = "Plots a point with specified value (Y axis) at the specified tickmark (X axis).";
Blockly.Msg.ACTION_PLOT_TICKMARK = "at tickmark";
Blockly.Msg.ACTION_RELAY = "relay SRD-05VDC-SL-C";
Blockly.Msg.ACTION_RGBLED = "RGB LED";
Blockly.Msg.ACTION_SDCARD = "SD card";
Blockly.Msg.ACTION_SERIAL_PRINT = "show on Serial Monitor";
Blockly.Msg.ACTION_SERIAL_PRINT_TOOLTIP = "Show data on the Serial Monitor. You can find the Serial Monitor in the USB Programm on top, under View.";
Blockly.Msg.ACTION_SERVO = "servo motor";
Blockly.Msg.ACTION_SERVO_ARDUINO = "servo motor SG90";
Blockly.Msg.ACTION_STEPMOTOR = "step motor";
Blockly.Msg.ACTION_WIRELESS = "WiFi connection.";
Blockly.Msg.ACTIVITY_TOOLTIP = "Marker for an additional activity.";
Blockly.Msg.ACTOR_ANALOGIN_TOOLTIP = "Writes an analog value (PWM wave) to a pin. Only values between 0 and 255 should be used";
Blockly.Msg.ACTOR_DIGITALIN_TOOLTIP = "Writes a HIGH or a LOW value to a digital pin. Only the values HIGH »1« and LOW »0« should be used.";
Blockly.Msg.ACTOR_TOOLTIP = "Represents any actor.";
Blockly.Msg.ADDRESS = "address";
Blockly.Msg.ADD_COMMENT = "Add Comment";
Blockly.Msg.ALL_RGBLED = "RGB LED all";
Blockly.Msg.ANALOG = "analog";
Blockly.Msg.ANALOGIN_TOOLTIP = "Represents any actuator connected to an analog pin.";
Blockly.Msg.ANALOGOUT_TOOLTIP = "Represents any sensor connected to an analog pin.";
Blockly.Msg.AND = "and";
Blockly.Msg.ARDUBRICK_TOOLTIP = "Represents the Bot'n Roll board with connected actors and sensors. There are also inbuilt actors and sensors available, e.g. pushbuttons, display ...";
Blockly.Msg.AUTH = "Please authorize this app to enable your work to be saved and to allow it to be shared by you.";
Blockly.Msg.BATTERY_GETSAMPLE_TOOLTIP = "Gets the current voltage from the battery.";
Blockly.Msg.BELOW = "below";
Blockly.Msg.BLOCK_NOT_EXECUTED = "The exection of this block will have no effect!";
Blockly.Msg.BLOCK_NOT_SUPPORTED = "This robot does not support this block!";
Blockly.Msg.BLOCK_USED_INCORRECTLY = "Unfortunately, this block cannot be used in this way.";
Blockly.Msg.BOB3_READNUMBER_TOOLTIP = "Returns the previously stored number.";
Blockly.Msg.BOB3_RECALL_NUMBER = "recall number";
Blockly.Msg.BOB3_REMEMBER_NUMBER = "remember number";
Blockly.Msg.BOB3_SAVENUMBER_TOOLTIP = "The number to store should be an integer in the range of 0 to 255";
Blockly.Msg.BOTH = "both";
Blockly.Msg.BOTH_LED = "LED both";
Blockly.Msg.BOX_ID = "Device ID";
Blockly.Msg.BRICKLIGHT = "brick light";
Blockly.Msg.BRICKLIGHT_BLUE = "blue";
Blockly.Msg.BRICKLIGHT_COLOR = "colour";
Blockly.Msg.BRICKLIGHT_DOUBLE_FLASH = "double flashing";
Blockly.Msg.BRICKLIGHT_FLASH = "flashing";
Blockly.Msg.BRICKLIGHT_GREEN = "green";
Blockly.Msg.BRICKLIGHT_OFF_TOOLTIP = "Turns bricklight off.";
Blockly.Msg.BRICKLIGHT_ON = "on";
Blockly.Msg.BRICKLIGHT_ON_TOOLTIP = "Turns bricklight on.";
Blockly.Msg.BRICKLIGHT_ORANGE = "orange";
Blockly.Msg.BRICKLIGHT_RED = "red";
Blockly.Msg.BRICKLIGHT_RESET_TOOLTIP = "Resets bricklight. Sets the default bricklight: green and blinking.";
Blockly.Msg.BRICKNAME_WEDO = "WeDo";
Blockly.Msg.BRICK_IPADDRESS = "ip address";
Blockly.Msg.BRICK_PASSWORD = "password";
Blockly.Msg.BRICK_PHENOMENON = "Phenomenon";
Blockly.Msg.BRICK_PORT = "port";
Blockly.Msg.BRICK_TRACK_WIDTH = "track width";
Blockly.Msg.BRICK_USERNAME = "user name";
Blockly.Msg.BRICK_WHEEL_DIAMETER = "wheel diameter";
Blockly.Msg.BRUSH_OFF = "turn brush Off";
Blockly.Msg.BRUSH_OFF_TOOLTIP = "Turns the brush off.";
Blockly.Msg.BRUSH_ON = "turn brush on (RPM)";
Blockly.Msg.BRUSH_ON_TOOLTIP = "Turns on the brush with RPM of the motor (0<=RPM<=10000)";
Blockly.Msg.BUTTON_DO_SHARE = "Share";
Blockly.Msg.BUTTON_DO_UPLOAD_GALLERY = "Upload »$« to the gallery";
Blockly.Msg.BUTTON_EMPTY_LIST = "Empty list";
Blockly.Msg.BUZZER_TOOLTIP = "Represents a buzzer.";
Blockly.Msg.CALLIBOT_TOOLTIP = "Represents the Calli:bot extension board.";
Blockly.Msg.CALLIOPEBRICK_TOOLTIP = "Represents Calliope, a pocket-sized codeable computer. There are also inbuilt actors and sensors available, e.g. buttons, display ...";
Blockly.Msg.CB_ALL = "Calli:bot all";
Blockly.Msg.CB_BOTH = "Calli:bot both";
Blockly.Msg.CB_LEFT = "Calli:bot left";
Blockly.Msg.CB_RIGHT = "Calli:bot right";
Blockly.Msg.CENTER = "center";
Blockly.Msg.CHANGE_VALUE_TITLE = "Change value:";
Blockly.Msg.CHAT = "Chat with your collaborator by typing in this box!";
Blockly.Msg.CLEAN_UP = "Clean up Blocks";
Blockly.Msg.CLEAR = "clear";
Blockly.Msg.COLLAPSE_ALL = "Collapse Blocks";
Blockly.Msg.COLLAPSE_BLOCK = "Collapse Block";
Blockly.Msg.COLON = "colon";
Blockly.Msg.COLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP = "Gets the current ambient light reading from the sensor.";
Blockly.Msg.COLOUR_BLEND_COLOUR1 = "colour 1";
Blockly.Msg.COLOUR_BLEND_COLOUR2 = "colour 2";
Blockly.Msg.COLOUR_BLEND_HELPURL = "http://meyerweb.com/eric/tools/color-blend/";
Blockly.Msg.COLOUR_BLEND_RATIO = "ratio";
Blockly.Msg.COLOUR_BLEND_TITLE = "blend";
Blockly.Msg.COLOUR_BLEND_TOOLTIP = "Blends two colours together with a given ratio (0.0 - 1.0).";
Blockly.Msg.COLOUR_COLOUR_GETSAMPLE_TOOLTIP = "Gets the current colour reading from the sensor.";
Blockly.Msg.COLOUR_GETSAMPLE_TOOLTIP = "Gets the current reading from the colour sensor.";
Blockly.Msg.COLOUR_LIGHT_GETSAMPLE_TOOLTIP = "Gets the current brightness reading from the sensor.";
Blockly.Msg.COLOUR_PICKER_HELPURL = "https://en.wikipedia.org/wiki/Color";
Blockly.Msg.COLOUR_PICKER_TOOLTIP = "Choose a colour from the palette.";
Blockly.Msg.COLOUR_RANDOM_HELPURL = "http://randomcolour.com";
Blockly.Msg.COLOUR_RANDOM_TITLE = "random colour";
Blockly.Msg.COLOUR_RANDOM_TOOLTIP = "Choose a colour at random.";
Blockly.Msg.COLOUR_RGB_BLUE = "blue";
Blockly.Msg.COLOUR_RGB_GETSAMPLE_TOOLTIP = "Gets the current colour reading from the colour sensor. Values are in the range 0 to 255.";
Blockly.Msg.COLOUR_RGB_GREEN = "green";
Blockly.Msg.COLOUR_RGB_HELPURL = "http://www.december.com/html/spec/colorper.html";
Blockly.Msg.COLOUR_RGB_RED = "red";
Blockly.Msg.COLOUR_RGB_TITLE = "colour with";
Blockly.Msg.COLOUR_RGB_TOOLTIP = "Creates a color with the given red, green, and blue values. Values should be between 0 and 255.";
Blockly.Msg.COLOUR_RGB_WHITE = "white";
Blockly.Msg.COLOUR_TOOLTIP = "Represents a colour sensor.";
Blockly.Msg.COMPASS_CALIBRATE_TOOLTIP = "Calibrates the compass. Turn the compass sensor VERY slowly for two times (about 40 seconds).";
Blockly.Msg.COMPASS_GETSAMPLE_TOOLTIP = "Gets the current reading from the compass sensor.";
Blockly.Msg.COMPASS_TOOLTIP = "Represents a compass sensor.";
Blockly.Msg.COMPASS_TOOLTIP_EV3 = "Represents a HiTechnic NXT compass sensor.";
Blockly.Msg.CONFIGURATION_ERROR_ACTOR_MISSING = "This actuator is not configured. Please add the corresponding block in the configuration tab!";
Blockly.Msg.CONFIGURATION_ERROR_MOTORS_ROTATION_DIRECTION = "The direction of rotation of the left and right motor is different!";
Blockly.Msg.CONFIGURATION_ERROR_MOTOR_LEFT_MISSING = "Left motor missing in the configuration!";
Blockly.Msg.CONFIGURATION_ERROR_MOTOR_LEFT_UNREGULATED = "Left motor is not regulated!";
Blockly.Msg.CONFIGURATION_ERROR_MOTOR_MISSING = "Motor is missing on the given port!";
Blockly.Msg.CONFIGURATION_ERROR_MOTOR_RIGHT_MISSING = "Right motor missing in the configuration!";
Blockly.Msg.CONFIGURATION_ERROR_MOTOR_RIGHT_UNREGULATED = "Right motor is not regulated!";
Blockly.Msg.CONFIGURATION_ERROR_MULTIPLE_LEFT_MOTORS = "You have multiple left motors assigned to your configuration!";
Blockly.Msg.CONFIGURATION_ERROR_MULTIPLE_RIGHT_MOTORS = "You have multiple right motors assigned to your configuration!";
Blockly.Msg.CONFIGURATION_ERROR_NO_BUILTIN_RGBLED = "This board does not have a built in RGB LED!";
Blockly.Msg.CONFIGURATION_ERROR_OTHER_NOT_SUPPORTED = "Other power consumer does not support this type of block!";
Blockly.Msg.CONFIGURATION_ERROR_OVERLAPPING_PORTS = "Another component is already using the same port!";
Blockly.Msg.CONFIGURATION_ERROR_SENSOR_MISSING = "This sensor is not configured. Please add the corresponding block in the configuration tab!";
Blockly.Msg.CONFIGURATION_ERROR_SENSOR_WRONG = "Connected wrong sensor to the given port!";
Blockly.Msg.CONFIGURATION_ERROR_WLAN_CREDENTIALS_MISSING = "Missing WLAN credentials, please enter them in robot -> WLAN credentials ... !";
Blockly.Msg.CONFIGURATION_ERROR_WLAN_MISSING = "WiFi is not configured. Please add the corresponding block in the configuration tab!";
Blockly.Msg.CONFIGURATION_NO_PHENOMENON = "no phenomenon";
Blockly.Msg.CONFIGURATION_NO_PORT = "no port";
Blockly.Msg.CONFIGURATION_PORT = "Port1";
Blockly.Msg.CONFLIST_DELETE_ALL_TOOLTIP = "Click here to delete all selected programs.";
Blockly.Msg.CONFLIST_DELETE_TOOLTIP = "Click here to delete your robot configuration.";
Blockly.Msg.CONFLIST_LOAD_TOOLTIP = "Click here to load your robot configuration in the configuration environment.";
Blockly.Msg.CONNECTION_CHECK = "connection to robot %1 active?";
Blockly.Msg.CONNECTION_CHECK_TOOLTIP = "Check if the connection to the robot is active.";
Blockly.Msg.CONNECTION_CONNECT = "connect to robot name";
Blockly.Msg.CONNECTION_FROM_CONNECTION = "from connection";
Blockly.Msg.CONNECTION_FROM_ROBOT = "from robot";
Blockly.Msg.CONNECTION_MBED_RECEIVE_TOOLTIP = "Reads a message over one of the channels (0 - 255). The default channel is 0.";
Blockly.Msg.CONNECTION_MBED_SEND_TOOLTIP = "Sends a message to another system. You can specify a signal strength from 0 - 7, where 0 is very low and 7 is the strongests. The message is send over channel 0 unless you specify another one.";
Blockly.Msg.CONNECTION_OVER_CHANNEL = "over channel";
Blockly.Msg.CONNECTION_POWER = "with strength";
Blockly.Msg.CONNECTION_PROTOCOL_BLUETOOTH = "Bluetooth";
Blockly.Msg.CONNECTION_RECEIVED_DATA = "receive message";
Blockly.Msg.CONNECTION_RECEIVE_TOOLTIP = "Waits for a message from the robot which you declare in the connection.";
Blockly.Msg.CONNECTION_RECEIVE_TOOLTIP_BOB3 = "Reads a message via the IR receiver. Only numbers can be received.";
Blockly.Msg.CONNECTION_RECEIVE_TOOLTIP_MBOT = "Reads a message from the IR receiver. Only strings can be received.";
Blockly.Msg.CONNECTION_SEND_DATA = "send message";
Blockly.Msg.CONNECTION_SEND_TOOLTIP = "Sends a message to another robot.";
Blockly.Msg.CONNECTION_SEND_TOOLTIP_BOB3 = "Sends a message of type number to another Bob3. Hold the Bob3's face to face!";
Blockly.Msg.CONNECTION_SEND_TOOLTIP_MBOT = "Sends a message of type string to another mBot. Hold the mBots's face to face!";
Blockly.Msg.CONNECTION_SET_CHANNEL = "set channel to %1";
Blockly.Msg.CONNECTION_SET_CHANNEL_TOOLTIP = "Sets the channel for sending and receiving messages. Can be set from 0 to 255.";
Blockly.Msg.CONNECTION_START_TOOLTIP = "Tries to make a connection to another robot via Bluetooth.";
Blockly.Msg.CONNECTION_TOOLTIP = "A robot's connection";
Blockly.Msg.CONNECTION_TO_CONNECTION = "to connection";
Blockly.Msg.CONNECTION_TO_ROBOT = "to robot";
Blockly.Msg.CONNECTION_WAIT_FOR_CONNECTION = "wait for connection";
Blockly.Msg.CONNECTION_WAIT_TOOLTIP = "Waits for a connection via Bluetooth.";
Blockly.Msg.CONNECTOR = "hub";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_HELPURL = "https://github.com/google/blockly/wiki/Loops#loop-termination-blocks";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_BREAK = "break out of loop";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_OPERATOR_CONTINUE = "continue with next iteration of loop";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_BREAK = "Break out of the containing loop.";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_TOOLTIP_CONTINUE = "Skip the rest of this loop, and continue with the next iteration.";
Blockly.Msg.CONTROLS_FLOW_STATEMENTS_WARNING = "Warning: This block may only be used within a loop.";
Blockly.Msg.CONTROLS_FOREACH_HELPURL = "https://github.com/google/blockly/wiki/Loops#for-each";
Blockly.Msg.CONTROLS_FOREACH_TITLE = "for each item %1 in list %2";
Blockly.Msg.CONTROLS_FOREACH_TOOLTIP = "For each item in a list, set the variable '%1' to the item, and then do some statements.";
Blockly.Msg.CONTROLS_FOR_HELPURL = "https://github.com/google/blockly/wiki/Loops#count-with";
Blockly.Msg.CONTROLS_FOR_TITLE = "count with %1 from %2 while counter < %3 by %4";
Blockly.Msg.CONTROLS_FOR_TOOLTIP = "Have the variable '%1' take on the values from the start number as long as it is less than the end number, counting by the specified interval, and do the specified blocks.";
Blockly.Msg.CONTROLS_IF_ELSEIF_TOOLTIP = "Add a condition to the if block.";
Blockly.Msg.CONTROLS_IF_ELSE_TOOLTIP = "Add a final, catch-all condition to the if block.";
Blockly.Msg.CONTROLS_IF_HELPURL = "https://github.com/google/blockly/wiki/IfElse";
Blockly.Msg.CONTROLS_IF_IF_TOOLTIP = "Add, remove, or reorder sections to reconfigure this if block.";
Blockly.Msg.CONTROLS_IF_MSG_ELSE = "else";
Blockly.Msg.CONTROLS_IF_MSG_ELSEIF = "else if";
Blockly.Msg.CONTROLS_IF_MSG_IF = "if";
Blockly.Msg.CONTROLS_IF_TOOLTIP_1 = "If a value is true, then do some statements.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_2 = "If a value is true, then do the first block of statements. Otherwise, do the second block of statements.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_3 = "If the first value is true, then do the first block of statements. Otherwise, if the second value is true, do the second block of statements.";
Blockly.Msg.CONTROLS_IF_TOOLTIP_4 = "If the first value is true, then do the first block of statements. Otherwise, if the second value is true, do the second block of statements. If none of the values are true, do the last block of statements.";
Blockly.Msg.CONTROLS_REPEAT_HELPURL = "https://en.wikipedia.org/wiki/For_loop";
Blockly.Msg.CONTROLS_REPEAT_INPUT_DO = "do";
Blockly.Msg.CONTROLS_REPEAT_TITLE = "repeat %1 times";
Blockly.Msg.CONTROLS_REPEAT_TOOLTIP = "Do some statements several times.";
Blockly.Msg.CONTROLS_WHILEUNTIL_HELPURL = "https://github.com/google/blockly/wiki/Loops#repeat";
Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_UNTIL = "repeat until";
Blockly.Msg.CONTROLS_WHILEUNTIL_OPERATOR_WHILE = "repeat while";
Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_UNTIL = "While a value is false, then do some statements.";
Blockly.Msg.CONTROLS_WHILEUNTIL_TOOLTIP_WHILE = "While a value is true, then do some statements.";
Blockly.Msg.DATATABLE_ACTUALIZATION = "Modification date";
Blockly.Msg.DATATABLE_CONFIGURATIONS = "configurations";
Blockly.Msg.DATATABLE_CONFIGURATION_NAME = "Configuration name";
Blockly.Msg.DATATABLE_CREATED_BY = "Creator";
Blockly.Msg.DATATABLE_CREATED_ON = "Creation date";
Blockly.Msg.DATATABLE_MEMBERS = "members";
Blockly.Msg.DATATABLE_PROGRAMS = "programs";
Blockly.Msg.DATATABLE_PROGRAM_NAME = "Program name";
Blockly.Msg.DATATABLE_SHARED = "Shared";
Blockly.Msg.DATATABLE_SHARED_PROGRAMS = "shared programs";
Blockly.Msg.DATATABLE_SHARED_WITH = "Shared with";
Blockly.Msg.DATATABLE_USERGROUP = "user group";
Blockly.Msg.DATATABLE_USERGROUPS = "user groups";
Blockly.Msg.DATATABLE_USERGROUP_NAME = "Name of the user group";
Blockly.Msg.DATATABLE_USERGROUP_NAME_CREATE_HINT = "The name of the user group. Kepp in mind, that the members will have to type it in each time they log in.";
Blockly.Msg.DATATABLE_USERGROUP_OWNER = "Name of the owner of the user group";
Blockly.Msg.DELETE_ALL_BLOCKS = "Delete all %1 blocks?";
Blockly.Msg.DELETE_BLOCK = "Delete Block";
Blockly.Msg.DELETE_USERGROUP_MEMBER_AFTER_LOGIN_WARNING = "A member you want to delete did already log in and might have create own programs. Are you sure that you want to delete the selected member(s)?";
Blockly.Msg.DELETE_USERGROUP_MEMBER_WARNING = "Are you sure that you want to delete the selected member(s)?";
Blockly.Msg.DELETE_X_BLOCKS = "Delete %1 Blocks";
Blockly.Msg.DIGITAL = "digital";
Blockly.Msg.DIGITALIN_TOOLTIP = "Represents any actuator connected to a digital pin.";
Blockly.Msg.DIGITALOUT_TOOLTIP = "Represents any sensor connected to a digital.";
Blockly.Msg.DISABLE_BLOCK = "Disable Block";
Blockly.Msg.DISPLAY_ANIMATION = "animation";
Blockly.Msg.DISPLAY_CHARACTER = "character";
Blockly.Msg.DISPLAY_CLEAR = "clear display";
Blockly.Msg.DISPLAY_CLEAR_TOOLTIP = "Clears the display.";
Blockly.Msg.DISPLAY_COL = "in column";
Blockly.Msg.DISPLAY_GET_BRIGHTNESS_TOOLTIP = "Returns the brightness for all leds of the display. 0 means all leds are turned off, 9 is the brightest value.";
Blockly.Msg.DISPLAY_GET_PIXEL_TOOLTIP = "Returns the brightness for this led. 0 means the led is turned off, 9 is the brightest value.";
Blockly.Msg.DISPLAY_IMAGE = "image";
Blockly.Msg.DISPLAY_PICTURE = "picture";
Blockly.Msg.DISPLAY_PICTURE_EYES_CLOSED = "eyes closed";
Blockly.Msg.DISPLAY_PICTURE_EYES_OPEN = "eyes open";
Blockly.Msg.DISPLAY_PICTURE_FLOWERS = "flowers";
Blockly.Msg.DISPLAY_PICTURE_GLASSES = "glasses";
Blockly.Msg.DISPLAY_PICTURE_TACHO = "speedo";
Blockly.Msg.DISPLAY_PICTURE_TOOLTIP = "Displays a picture on the screen.";
Blockly.Msg.DISPLAY_PIXEL_BRIGHTNESS = "brightness";
Blockly.Msg.DISPLAY_PIXEL_TITLE = "LED";
Blockly.Msg.DISPLAY_ROW = "in row";
Blockly.Msg.DISPLAY_SET_BRIGHTNESS_TOOLTIP = "Sets the brightness for all leds of the display. 0 means all leds are turned off, 9 is the brightest value.";
Blockly.Msg.DISPLAY_SET_PIXEL_TOOLTIP = "Sets the brightness for this led. 0 means the led is turned off, 9 is the brightest value. With x and y you can determine the position of the led you would like to change.";
Blockly.Msg.DISPLAY_SHOW = "show";
Blockly.Msg.DISPLAY_TEXT = "text";
Blockly.Msg.DISPLAY_TEXT_TOOLTIP = "Displays a text on the screen.";
Blockly.Msg.DROP_TOOLTIP = "Represents a drop sensor.";
Blockly.Msg.DUPLICATE_BLOCK = "Duplicate";
Blockly.Msg.ENABLE_BLOCK = "Enable Block";
Blockly.Msg.ENCODER_GETSAMPLE_TOOLTIP = "Gets the current reading from the motor encoder.";
Blockly.Msg.ENCODER_RESET_TOOLTIP = "Resets the motor encoder.";
Blockly.Msg.ENCODER_TOOLTIP = "Represents an encoder.";
Blockly.Msg.ENVIRONMENTAL_TOOLTIP = "Represents an environmental sensor.";
Blockly.Msg.ENVIRONMENTAL_TOOLTIP_SENSEBOX = "Represents the BME680 environmental sensor.";
Blockly.Msg.ERROR_MISSING_PARAMETER = "An input value is missing!";
Blockly.Msg.ERROR_MISSING_RETURN = "The function return value is missing!";
Blockly.Msg.EV3BRICK_TOOLTIP = "Represents the EV3 brick with connected actors and sensors. There are also inbuilt actors and sensors available, e.g. buttons, display ...";
Blockly.Msg.EXPAND_ALL = "Expand Blocks";
Blockly.Msg.EXPAND_BLOCK = "Expand Block";
Blockly.Msg.EXTERNAL_INPUTS = "External Inputs";
Blockly.Msg.FLAME_GETSAMPLE_TOOLTIP = "Gets the current reading from the flame sensor.";
Blockly.Msg.FLAME_TOOLTIP = "Represents a flame sensor.";
Blockly.Msg.FLYOUT_VARIABLE_TEXT = "You need a variable? Please declare it first with a click on the + sign at the »start« block.";
Blockly.Msg.FOR = "for";
Blockly.Msg.FOURDIGITDISPLAY = "4-Digit Display";
Blockly.Msg.FOURDIGITDISPLAY_CLEAR_TOOLTIP = "Clears the 4-Digit Display.";
Blockly.Msg.FOURDIGITDISPLAY_SHOW_TOOLTIP = "Displays a number [0-9999] on the 4-Digit Display. Position [0-3] represents the starting position of the number";
Blockly.Msg.FOURDIGITDISPLAY_TOOLTIP = "Represents a Grove 4-Digit Display by Seeed";
Blockly.Msg.FRAME_WIDTH = "Frame width";
Blockly.Msg.FROM_POSITION = "from position";
Blockly.Msg.FSR_TOOLTIP = "Get the current reading from the force sensitive resistor under the feet of the robot.";
Blockly.Msg.GAIN = "gain";
Blockly.Msg.GALLERY_BY = "by";
Blockly.Msg.GALLERY_DATE = "created";
Blockly.Msg.GALLERY_DISLIKE = "dislike";
Blockly.Msg.GALLERY_LIKE = "like";
Blockly.Msg.GALLERY_SHARED_ALREADY = "You have already uploaded this program to the gallery. If you want to change it, look for the copy from the gallery and modify it. You can also remove it from the gallery while deleting the copy from the gallery.";
Blockly.Msg.GEARED_MOTOR = "geared motor";
Blockly.Msg.GET = "get";
Blockly.Msg.GETSAMPLE_TOOLTIP = "Gets the current reading from chosen sensor.";
Blockly.Msg.GET_CODE_TOOLTIP = "Returns the value of the solderable code pad in the head piece. Values are in range 0-31.";
Blockly.Msg.GO_TO_GROUPS = "Go to groups";
Blockly.Msg.GPS_TOOLTIP = "Represents a GPS receiver.";
Blockly.Msg.GROUP_CREATE_NAME_HINT = "Please keep in mind, that all members of a group have to enter the group name on each login. It should neither be complicated nor long.";
Blockly.Msg.GYRO_GETSAMPLE_TOOLTIP = "Gets the current reading from the gyro sensor.";
Blockly.Msg.GYRO_RESET_TOOLTIP = "Resets the gyro sensor.";
Blockly.Msg.GYRO_TOOLTIP = "Represents a gyro sensor.";
Blockly.Msg.GYRO_TOOLTIP_WEDO = "Represents a tilt sensor.";
Blockly.Msg.HELP = "Help";
Blockly.Msg.HINT_USERGROUP_MEMBER = "Enter the member id of your user here.";
Blockly.Msg.HINT_USERGROUP_OWNER = "Do <strong>not</strong> enter the real name of the owner of the user group here, but his <strong>username</strong> instead.";
Blockly.Msg.HINT_USER_ACCOUNT = "»IAmBotman« or »RobellaStracciatella«? Not everyone needs to know your real name. Think of a cool nickname that you can easily remember.";
Blockly.Msg.HINT_USER_AGE = "Are you under 16? Then please ask your parents to help you. They can specify their e-mail address to confirm your account.";
Blockly.Msg.HINT_USER_EMAIL = "This is voluntary! However, some functions of the lab are only available if you have verified your account by e-mail. You are younger than 16? Please ask your parents to help you out with one of their e-mail addresses. <br><a href='https://www.roberta-home.de/index.php?id=138&L=1' target='_blank'>Further information ...</a>";
Blockly.Msg.HINT_USER_NAME = "Enter your real name here if you like. This is just for you, no one else will see it.";
Blockly.Msg.HINT_USER_PASSWORT = "12345 is no secure password. Rather think of a safe combination of numbers and letters that you will not forget.";
Blockly.Msg.HINT_USER_PASSWORT_CONFIRM = "Got it? Better make sure!";
Blockly.Msg.HTCOLOUR_TOOLTIP = "Represents a HiTechnic NXT Color Sensor V2.";
Blockly.Msg.HUMIDITY_TOOLTIP = "Represents a humidity sensor.";
Blockly.Msg.ICON_BLOCKING_TOOLTIP = "Blocking block! This blocks needs some time to be executed, so other's have to wait until it gives back the control to the caller function.";
Blockly.Msg.ID = "ID";
Blockly.Msg.IF_TOOLTIP = "Checks the condition in »if«. If the condition is true, executes the »do« action.";
Blockly.Msg.IMAGE_GET_TOOLTIP = "Returns the chosen image.";
Blockly.Msg.IMAGE_GET_TOOLTIP_ANGRY = "angry";
Blockly.Msg.IMAGE_GET_TOOLTIP_ASLEEP = "asleep";
Blockly.Msg.IMAGE_GET_TOOLTIP_BUTTERFLY = "butterfly";
Blockly.Msg.IMAGE_GET_TOOLTIP_CHESSBOARD = "chessboard";
Blockly.Msg.IMAGE_GET_TOOLTIP_CONFUSED = "confused";
Blockly.Msg.IMAGE_GET_TOOLTIP_COW = "cow";
Blockly.Msg.IMAGE_GET_TOOLTIP_DIAMOND = "diamond";
Blockly.Msg.IMAGE_GET_TOOLTIP_DIAMOND_SMALL = "small diamond";
Blockly.Msg.IMAGE_GET_TOOLTIP_DUCK = "duck";
Blockly.Msg.IMAGE_GET_TOOLTIP_FABULOUS = "fabulous";
Blockly.Msg.IMAGE_GET_TOOLTIP_GHOST = "ghost";
Blockly.Msg.IMAGE_GET_TOOLTIP_GIRAFFE = "giraffe";
Blockly.Msg.IMAGE_GET_TOOLTIP_HEART = "heart";
Blockly.Msg.IMAGE_GET_TOOLTIP_HEART_SMALL = "small heart";
Blockly.Msg.IMAGE_GET_TOOLTIP_HOUSE = "house";
Blockly.Msg.IMAGE_GET_TOOLTIP_MEH = "meh!";
Blockly.Msg.IMAGE_GET_TOOLTIP_MUSIC_CROTCHET = "music crotchet";
Blockly.Msg.IMAGE_GET_TOOLTIP_MUSIC_QUAVER = "music quaver";
Blockly.Msg.IMAGE_GET_TOOLTIP_MUSIC_QUAVERS = "music quavers";
Blockly.Msg.IMAGE_GET_TOOLTIP_NO = "no";
Blockly.Msg.IMAGE_GET_TOOLTIP_PACMAN = "pacman";
Blockly.Msg.IMAGE_GET_TOOLTIP_PITCHFORK = "pitchfork";
Blockly.Msg.IMAGE_GET_TOOLTIP_RABBIT = "rabbit";
Blockly.Msg.IMAGE_GET_TOOLTIP_ROLLERSKATE = "rollerskate";
Blockly.Msg.IMAGE_GET_TOOLTIP_SAD = "sad";
Blockly.Msg.IMAGE_GET_TOOLTIP_SILLY = "silly";
Blockly.Msg.IMAGE_GET_TOOLTIP_SKULL = "skull";
Blockly.Msg.IMAGE_GET_TOOLTIP_SMILE = "smile";
Blockly.Msg.IMAGE_GET_TOOLTIP_SNAKE = "snake";
Blockly.Msg.IMAGE_GET_TOOLTIP_SQUARE = "square";
Blockly.Msg.IMAGE_GET_TOOLTIP_SQUARE_SMALL = "small square";
Blockly.Msg.IMAGE_GET_TOOLTIP_STICKFIGURE = "stickfigure";
Blockly.Msg.IMAGE_GET_TOOLTIP_SWORD = "sword";
Blockly.Msg.IMAGE_GET_TOOLTIP_TARGET = "target";
Blockly.Msg.IMAGE_GET_TOOLTIP_TORTOISE = "tortoise";
Blockly.Msg.IMAGE_GET_TOOLTIP_TRIANGLE = "triangle";
Blockly.Msg.IMAGE_GET_TOOLTIP_TRIANGLE_LEFT = "triangle left";
Blockly.Msg.IMAGE_GET_TOOLTIP_TSHIRT = "T-shirt";
Blockly.Msg.IMAGE_GET_TOOLTIP_UMBRELLA = "umbrella";
Blockly.Msg.IMAGE_GET_TOOLTIP_XMAS = "xmas";
Blockly.Msg.IMAGE_GET_TOOLTIP_YES = "yes";
Blockly.Msg.IMAGE_INVERT = "invert";
Blockly.Msg.IMAGE_INVERT_TOOLTIP = "Inverts the image. Each pixel with value 0 or none will be set to # or 9 and pixels with value # or 9 will be set to 0 or none.";
Blockly.Msg.IMAGE_SHIFT = "shift";
Blockly.Msg.IMAGE_SHIFT_TOOLTIP = "Shifts the image in the given direction by the given number";
Blockly.Msg.IMAGE_TOOLTIP = "Creates an image for the display. You can specify the brightness of each pixel from 0 to 9 or # where 0 means no light, 1 is a bit bright and 9 or # is the brightest value.";
Blockly.Msg.INFO_DOCUMENTATION_HINT = "Document your program here ...";
Blockly.Msg.INFO_TAGS = "Tags";
Blockly.Msg.INFRARED_DISTANCE_GETSAMPLE_TOOLTIP = "Gets the current relative distance from the infrared sensor. The values are between 1 and 75 cm.";
Blockly.Msg.INFRARED_GETSAMPLE_TOOLTIP = "Gets the current reading from the infrared sensor.";
Blockly.Msg.INFRARED_GETSAMPLE_TOOLTIP_MBOT = "Gets the current reading from the light sensor -- if a black line is detected (true/false).";
Blockly.Msg.INFRARED_PRESENCE_GETSAMPLE_TOOLTIP = "Returns an array of measurements for the presence of a beacon.";
Blockly.Msg.INFRARED_TOOLTIP = "Represents an infrared sensor.";
Blockly.Msg.INLINE_INPUTS = "Inline Inputs";
Blockly.Msg.INPUT = "input";
Blockly.Msg.INTERNAL_PORT = "internal";
Blockly.Msg.IRSEEKER_TOOLTIP = "Represents a HiTechnic NXT IRSeeker V2 sensor.";
Blockly.Msg.I_TIME = "integration time";
Blockly.Msg.JOYSTICK_GETSAMPLE_TOOLTIP = "Gets the current reading of one of the axises of the joystick";
Blockly.Msg.KEY_ISPRESSED_TOOLTIP = "Is the selected button pressed?";
Blockly.Msg.KEY_TOOLTIP = "Represents a button.";
Blockly.Msg.LANGUAGE = "language";
Blockly.Msg.LANGUAGE_ARABIC = "Arabic";
Blockly.Msg.LANGUAGE_BRAZILIAN = "Brazilian";
Blockly.Msg.LANGUAGE_CHINESE = "Chinese";
Blockly.Msg.LANGUAGE_CZECH = "Czech";
Blockly.Msg.LANGUAGE_DANISH = "Danish";
Blockly.Msg.LANGUAGE_DUTCH = "Dutch";
Blockly.Msg.LANGUAGE_ENGLISH = "English";
Blockly.Msg.LANGUAGE_FINNISH = "Finnish";
Blockly.Msg.LANGUAGE_FRENCH = "French";
Blockly.Msg.LANGUAGE_GERMAN = "German";
Blockly.Msg.LANGUAGE_GREEK = "Greek";
Blockly.Msg.LANGUAGE_ITALIAN = "Italian";
Blockly.Msg.LANGUAGE_JAPANESE = "Japanese";
Blockly.Msg.LANGUAGE_KOREAN = "Korean";
Blockly.Msg.LANGUAGE_NORWEGIAN = "Norwegian";
Blockly.Msg.LANGUAGE_POLISH = "Polish";
Blockly.Msg.LANGUAGE_PORTUGUESE = "Portuguese";
Blockly.Msg.LANGUAGE_RUSSIAN = "Russian";
Blockly.Msg.LANGUAGE_SPANISH = "Spanish";
Blockly.Msg.LANGUAGE_SWEDISH = "Swedish";
Blockly.Msg.LANGUAGE_TURKISH = "Turkish";
Blockly.Msg.LCDI2C_TOOLTIP = "Represents an LCD 1602 display with a soldered I²C module.";
Blockly.Msg.LCD_TOOLTIP = "Represents an LCD display.";
Blockly.Msg.LED = "LED";
Blockly.Msg.LEDBAR = "LED Bar";
Blockly.Msg.LEDBAR_SET_TOOLTIP = "Sets the specified LED [0-9] on the LED Bar to the given brightness [0-8].";
Blockly.Msg.LEDBAR_TOOLTIP = "Represents a Grove LED Bar v2.0 by Seeed.";
Blockly.Msg.LED_MATRIX = "LED matrix";
Blockly.Msg.LED_OFF = "turn LED off";
Blockly.Msg.LED_OFF_TOOLTIP = "Turns the LED off.";
Blockly.Msg.LED_ON = "turn LED on";
Blockly.Msg.LED_ON_TOOLTIP = "Turns the LED on and changes the color.";
Blockly.Msg.LED_ON_TOOLTIP_CB = "Turns the LED on and changes the color. Attention: calli:Bot only supports 7 different colors, the nearest will be chosen.";
Blockly.Msg.LED_ON_TOOLTIP_EDISON = "Turns the LED on.";
Blockly.Msg.LED_ON_WHITE_TOOLTIP = "Turns the LED on. Watch out, it's very bright!";
Blockly.Msg.LED_TOOLTIP = "Represents an LED.";
Blockly.Msg.LEFT = "left";
Blockly.Msg.LEFT_FRONT_RGBLED = "RGB LED left front";
Blockly.Msg.LEFT_INFRARED_SENSOR = "infraredsensor left";
Blockly.Msg.LEFT_LED = "LED left";
Blockly.Msg.LEFT_MOTOR = "motor left";
Blockly.Msg.LEFT_REAR_RGBLED = "RGB LED left rear";
Blockly.Msg.LIGHTVEML_TOOLTIP = "Represents a visible/UV light sensor.";
Blockly.Msg.LIGHT_ARDU_TOOLTIP = "Represents 8 light sensors.";
Blockly.Msg.LIGHT_GETSAMPLE_TOOLTIP = "Gets the current reading from the light sensor in percent.";
Blockly.Msg.LIGHT_LDR = "Light (LDR)";
Blockly.Msg.LIGHT_TOOLTIP = "Represents a light sensor.";
Blockly.Msg.LISTS_CREATE_EMPTY_HELPURL = "https://github.com/google/blockly/wiki/Lists#create-empty-list";
Blockly.Msg.LISTS_CREATE_EMPTY_TITLE = "create empty list";
Blockly.Msg.LISTS_CREATE_EMPTY_TOOLTIP = "Returns a list, of length 0, containing no data records";
Blockly.Msg.LISTS_CREATE_TITLE = "list";
Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TITLE_ADD = "list";
Blockly.Msg.LISTS_CREATE_WITH_CONTAINER_TOOLTIP = "Add, remove, or reorder sections to reconfigure this list block.";
Blockly.Msg.LISTS_CREATE_WITH_HELPURL = "https://github.com/google/blockly/wiki/Lists#create-list-with";
Blockly.Msg.LISTS_CREATE_WITH_INPUT_WITH = "create list with";
Blockly.Msg.LISTS_CREATE_WITH_ITEM_TOOLTIP = "Add an item to the list.";
Blockly.Msg.LISTS_CREATE_WITH_TOOLTIP = "Create a list with any number of items.";
Blockly.Msg.LISTS_GET_INDEX_FIRST = "first";
Blockly.Msg.LISTS_GET_INDEX_FROM_END = "# from end";
Blockly.Msg.LISTS_GET_INDEX_FROM_START = "#";
Blockly.Msg.LISTS_GET_INDEX_GET = "get";
Blockly.Msg.LISTS_GET_INDEX_GET_REMOVE = "get and remove";
Blockly.Msg.LISTS_GET_INDEX_LAST = "last";
Blockly.Msg.LISTS_GET_INDEX_RANDOM = "random";
Blockly.Msg.LISTS_GET_INDEX_REMOVE = "remove";
Blockly.Msg.LISTS_GET_INDEX_TAIL = "";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FIRST = "Returns the first item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_END = "Returns the item at the specified position in a list. #1 is the last item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_FROM_START = "Returns the item at the specified position in a list. #1 is the first item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_LAST = "Returns the last item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_RANDOM = "Returns a random item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FIRST = "Removes and returns the first item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_END = "Removes and returns the item at the specified position in a list. #1 is the last item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_FROM_START = "Removes and returns the item at the specified position in a list. #1 is the first item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_LAST = "Removes and returns the last item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_GET_REMOVE_RANDOM = "Removes and returns a random item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FIRST = "Removes the first item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_END = "Removes the item at the specified position in a list. #1 is the last item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_FROM_START = "Removes the item at the specified position in a list. #1 is the first item.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_LAST = "Removes the last item in a list.";
Blockly.Msg.LISTS_GET_INDEX_TOOLTIP_REMOVE_RANDOM = "Removes a random item in a list.";
Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_END = "to # from end";
Blockly.Msg.LISTS_GET_SUBLIST_END_FROM_START = "to #";
Blockly.Msg.LISTS_GET_SUBLIST_END_LAST = "to last";
Blockly.Msg.LISTS_GET_SUBLIST_HELPURL = "https://github.com/google/blockly/wiki/Lists#getting-a-sublist";
Blockly.Msg.LISTS_GET_SUBLIST_START_FIRST = "get sub-list from first";
Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_END = "get sub-list from # from end";
Blockly.Msg.LISTS_GET_SUBLIST_START_FROM_START = "get sub-list from #";
Blockly.Msg.LISTS_GET_SUBLIST_TAIL = "";
Blockly.Msg.LISTS_GET_SUBLIST_TOOLTIP = "Creates a copy of the specified portion of a list.";
Blockly.Msg.LISTS_INDEX_OF_FIRST = "find first occurrence of item";
Blockly.Msg.LISTS_INDEX_OF_HELPURL = "https://github.com/google/blockly/wiki/Lists#getting-items-from-a-list";
Blockly.Msg.LISTS_INDEX_OF_LAST = "find last occurrence of item";
Blockly.Msg.LISTS_INDEX_OF_TOOLTIP = "Returns the index of the first/last occurrence of the item in the list. Returns -1 if item is not found.";
Blockly.Msg.LISTS_INLIST = "in list";
Blockly.Msg.LISTS_ISEMPTY_HELPURL = "https://github.com/google/blockly/wiki/Lists#is-empty";
Blockly.Msg.LISTS_ISEMPTY_TITLE = "%1 is empty";
Blockly.Msg.LISTS_ISEMPTY_TOOLTIP = "Returns true if the list is empty.";
Blockly.Msg.LISTS_LENGTH_HELPURL = "https://github.com/google/blockly/wiki/Lists#length-of";
Blockly.Msg.LISTS_LENGTH_TITLE = "length of %1";
Blockly.Msg.LISTS_LENGTH_TOOLTIP = "Returns the length of a list.";
Blockly.Msg.LISTS_REPEAT_HELPURL = "https://github.com/google/blockly/wiki/Lists#create-list-with";
Blockly.Msg.LISTS_REPEAT_TITLE = "create list with item %1 repeated %2 times";
Blockly.Msg.LISTS_REPEAT_TOOLTIP = "Creates a list consisting of the given value repeated the specified number of times.";
Blockly.Msg.LISTS_SET_INDEX_HELPURL = "https://github.com/google/blockly/wiki/Lists#in-list--set";
Blockly.Msg.LISTS_SET_INDEX_INPUT_TO = "as";
Blockly.Msg.LISTS_SET_INDEX_INSERT = "insert at";
Blockly.Msg.LISTS_SET_INDEX_SET = "set";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FIRST = "Inserts the item at the start of a list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_END = "Inserts the item at the specified position in a list. #1 is the last item.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_FROM_START = "Inserts the item at the specified position in a list. #1 is the first item.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_LAST = "Append the item to the end of a list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_INSERT_RANDOM = "Inserts the item randomly in a list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FIRST = "Sets the first item in a list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_END = "Sets the item at the specified position in a list. #1 is the last item.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_FROM_START = "Sets the item at the specified position in a list. #1 is the first item.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_LAST = "Sets the last item in a list.";
Blockly.Msg.LISTS_SET_INDEX_TOOLTIP_SET_RANDOM = "Sets a random item in a list.";
Blockly.Msg.LISTS_SORT_HELPURL = "https://github.com/google/blockly/wiki/Lists#sorting-a-list";
Blockly.Msg.LISTS_SORT_ORDER_ASCENDING = "ascending";
Blockly.Msg.LISTS_SORT_ORDER_DESCENDING = "descending";
Blockly.Msg.LISTS_SORT_TITLE = "sort %1 %2 %3";
Blockly.Msg.LISTS_SORT_TOOLTIP = "Sort a copy of a list.";
Blockly.Msg.LISTS_SORT_TYPE_IGNORECASE = "alphabetic, ignore case";
Blockly.Msg.LISTS_SORT_TYPE_NUMERIC = "numeric";
Blockly.Msg.LISTS_SORT_TYPE_TEXT = "alphabetic";
Blockly.Msg.LISTS_SPLIT_HELPURL = "https://github.com/google/blockly/wiki/Lists#splitting-strings-and-joining-lists";
Blockly.Msg.LISTS_SPLIT_LIST_FROM_TEXT = "make list from text";
Blockly.Msg.LISTS_SPLIT_TEXT_FROM_LIST = "make text from list";
Blockly.Msg.LISTS_SPLIT_TOOLTIP_JOIN = "Join a list of texts into one text, separated by a delimiter.";
Blockly.Msg.LISTS_SPLIT_TOOLTIP_SPLIT = "Split text into a list of texts, breaking at each delimiter.";
Blockly.Msg.LISTS_SPLIT_WITH_DELIMITER = "with delimiter";
Blockly.Msg.LIST_BACK_TOOLTIP = "Back to previous view.";
Blockly.Msg.LOGIC_BOOLEAN_FALSE = "false";
Blockly.Msg.LOGIC_BOOLEAN_HELPURL = "https://github.com/google/blockly/wiki/Logic#values";
Blockly.Msg.LOGIC_BOOLEAN_TOOLTIP = "Returns either true or false.";
Blockly.Msg.LOGIC_BOOLEAN_TRUE = "true";
Blockly.Msg.LOGIC_COMPARE_HELPURL = "https://en.wikipedia.org/wiki/Inequality_(mathematics)";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_EQ = "Return true if both inputs equal each other.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GT = "Return true if the first input is greater than the second input.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_GTE = "Return true if the first input is greater than or equal to the second input.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LT = "Return true if the first input is smaller than the second input.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_LTE = "Return true if the first input is smaller than or equal to the second input.";
Blockly.Msg.LOGIC_COMPARE_TOOLTIP_NEQ = "Return true if both inputs are not equal to each other.";
Blockly.Msg.LOGIC_NEGATE_HELPURL = "https://github.com/google/blockly/wiki/Logic#not";
Blockly.Msg.LOGIC_NEGATE_TITLE = "not %1";
Blockly.Msg.LOGIC_NEGATE_TOOLTIP = "Returns true if the input is false. Returns false if the input is true.";
Blockly.Msg.LOGIC_NULL = "null";
Blockly.Msg.LOGIC_NULL_HELPURL = "https://en.wikipedia.org/wiki/Nullable_type";
Blockly.Msg.LOGIC_NULL_TOOLTIP = "Returns null.";
Blockly.Msg.LOGIC_OPERATION_AND = "and";
Blockly.Msg.LOGIC_OPERATION_HELPURL = "https://github.com/google/blockly/wiki/Logic#logical-operations";
Blockly.Msg.LOGIC_OPERATION_OR = "or";
Blockly.Msg.LOGIC_OPERATION_TOOLTIP_AND = "Return true if both inputs are true.";
Blockly.Msg.LOGIC_OPERATION_TOOLTIP_OR = "Return true if at least one of the inputs is true.";
Blockly.Msg.LOGIC_TERNARY_CONDITION = "test";
Blockly.Msg.LOGIC_TERNARY_HELPURL = "https://en.wikipedia.org/wiki/%3F:";
Blockly.Msg.LOGIC_TERNARY_IF_FALSE = "if false";
Blockly.Msg.LOGIC_TERNARY_IF_TRUE = "if true";
Blockly.Msg.LOGIC_TERNARY_TOOLTIP = "Check the condition in 'test'. If the condition is true, returns the 'if true' value; otherwise returns the 'if false' value.";
Blockly.Msg.LOOP = "repeat until";
Blockly.Msg.LOOPFOREVER_TOOLTIP = "Repeats indefinitely an action.";
Blockly.Msg.LOOP_FOREVER = "repeat indefinitely";
Blockly.Msg.MATH_ADDITION_SYMBOL = "+";
Blockly.Msg.MATH_ARITHMETIC_HELPURL = "https://en.wikipedia.org/wiki/Arithmetic";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_ADD = "Return the sum of the two numbers.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_DIVIDE = "Return the quotient of the two numbers.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MINUS = "Return the difference of the two numbers.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_MULTIPLY = "Return the product of the two numbers.";
Blockly.Msg.MATH_ARITHMETIC_TOOLTIP_POWER = "Return the first number raised to the power of the second number.";
Blockly.Msg.MATH_CAST_TOCHAR = "cast %1 to Char";
Blockly.Msg.MATH_CAST_TOCHAR_TOOLTIP = "Convert this number into a single ASCII character";
Blockly.Msg.MATH_CAST_TOSTRING = "cast %1 to String";
Blockly.Msg.MATH_CAST_TOSTRING_TOOLTIP = "Convert this number into a string.";
Blockly.Msg.MATH_CHANGE_HELPURL = "https://en.wikipedia.org/wiki/Programming_idiom#Incrementing_a_counter";
Blockly.Msg.MATH_CHANGE_TITLE = "change %1 by %2";
Blockly.Msg.MATH_CHANGE_TOOLTIP = "Add a number to variable '%1'.";
Blockly.Msg.MATH_CONSTANT_HELPURL = "https://en.wikipedia.org/wiki/Mathematical_constant";
Blockly.Msg.MATH_CONSTANT_TOOLTIP = "Return one of the common constants: π (3.141…), e (2.718…), φ (1.618…), sqrt(2) (1.414…), sqrt(½) (0.707…), or ∞ (infinity).";
Blockly.Msg.MATH_CONSTRAIN_HELPURL = "https://en.wikipedia.org/wiki/Clamping_%28graphics%29";
Blockly.Msg.MATH_CONSTRAIN_TITLE = "constrain %1 low %2 high %3";
Blockly.Msg.MATH_CONSTRAIN_TOOLTIP = "Constrain a number to be between the specified limits (inclusive).";
Blockly.Msg.MATH_DIVISION_SYMBOL = "÷";
Blockly.Msg.MATH_IS_DIVISIBLE_BY = "is divisible by";
Blockly.Msg.MATH_IS_EVEN = "is even";
Blockly.Msg.MATH_IS_NEGATIVE = "is negative";
Blockly.Msg.MATH_IS_ODD = "is odd";
Blockly.Msg.MATH_IS_POSITIVE = "is positive";
Blockly.Msg.MATH_IS_PRIME = "is prime";
Blockly.Msg.MATH_IS_TOOLTIP = "Check if a number is an even, odd, prime, whole, positive, negative, or if it is divisible by certain number. Returns true or false.";
Blockly.Msg.MATH_IS_WHOLE = "is whole";
Blockly.Msg.MATH_MODULO_HELPURL = "https://en.wikipedia.org/wiki/Modulo_operation";
Blockly.Msg.MATH_MODULO_TITLE = "remainder of %1 ÷ %2";
Blockly.Msg.MATH_MODULO_TOOLTIP = "Return the remainder from dividing the two numbers.";
Blockly.Msg.MATH_MULTIPLICATION_SYMBOL = "×";
Blockly.Msg.MATH_NUMBER_HELPURL = "https://en.wikipedia.org/wiki/Number";
Blockly.Msg.MATH_NUMBER_TOOLTIP = "A number.";
Blockly.Msg.MATH_ONLIST_HELPURL = "";
Blockly.Msg.MATH_ONLIST_OPERATOR_AVERAGE = "average of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_MAX = "max of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_MEDIAN = "median of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_MIN = "min of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_MODE = "modes of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_RANDOM = "random item of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_STD_DEV = "standard deviation of list";
Blockly.Msg.MATH_ONLIST_OPERATOR_SUM = "sum of list";
Blockly.Msg.MATH_ONLIST_TOOLTIP_AVERAGE = "Return the average (arithmetic mean) of the numeric values in the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MAX = "Return the largest number in the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MEDIAN = "Return the median number in the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MIN = "Return the smallest number in the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_MODE = "Return a list of the most common item(s) in the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_RANDOM = "Return a random element from the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_STD_DEV = "Return the standard deviation of the list.";
Blockly.Msg.MATH_ONLIST_TOOLTIP_SUM = "Return the sum of all the numbers in the list.";
Blockly.Msg.MATH_POWER_SYMBOL = "^";
Blockly.Msg.MATH_RANDOM_FLOAT_HELPURL = "https://en.wikipedia.org/wiki/Random_number_generation";
Blockly.Msg.MATH_RANDOM_FLOAT_TITLE_RANDOM = "random fraction";
Blockly.Msg.MATH_RANDOM_FLOAT_TOOLTIP = "Return a random fraction between 0.0 (inclusive) and 1.0 (exclusive).";
Blockly.Msg.MATH_RANDOM_INT_HELPURL = "https://en.wikipedia.org/wiki/Random_number_generation";
Blockly.Msg.MATH_RANDOM_INT_TITLE = "random integer from %1 to %2";
Blockly.Msg.MATH_RANDOM_INT_TOOLTIP = "Return a random integer between the two specified limits, inclusive.";
Blockly.Msg.MATH_ROUND_HELPURL = "https://en.wikipedia.org/wiki/Rounding";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUND = "round";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDDOWN = "round down";
Blockly.Msg.MATH_ROUND_OPERATOR_ROUNDUP = "round up";
Blockly.Msg.MATH_ROUND_TOOLTIP = "Round a number up or down.";
Blockly.Msg.MATH_SINGLE_HELPURL = "https://en.wikipedia.org/wiki/Square_root";
Blockly.Msg.MATH_SINGLE_OP_ABSOLUTE = "absolute";
Blockly.Msg.MATH_SINGLE_OP_ROOT = "square root";
Blockly.Msg.MATH_SINGLE_OP_SQUARE = "square";
Blockly.Msg.MATH_SINGLE_TOOLTIP_ABS = "Return the absolute value of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_EXP = "Return e to the power of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_LN = "Return the natural logarithm of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_LOG10 = "Return the base 10 logarithm of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_NEG = "Return the negation of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_POW10 = "Return 10 to the power of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_ROOT = "Return the square root of a number.";
Blockly.Msg.MATH_SINGLE_TOOLTIP_SQUARE = "Return the number multiplied by itself.";
Blockly.Msg.MATH_SUBTRACTION_SYMBOL = "-";
Blockly.Msg.MATH_TRIG_ACOS = "acos";
Blockly.Msg.MATH_TRIG_ASIN = "asin";
Blockly.Msg.MATH_TRIG_ATAN = "atan";
Blockly.Msg.MATH_TRIG_COS = "cos";
Blockly.Msg.MATH_TRIG_HELPURL = "https://en.wikipedia.org/wiki/Trigonometric_functions";
Blockly.Msg.MATH_TRIG_SIN = "sin";
Blockly.Msg.MATH_TRIG_TAN = "tan";
Blockly.Msg.MATH_TRIG_TOOLTIP_ACOS = "Return the arccosine of a number.";
Blockly.Msg.MATH_TRIG_TOOLTIP_ASIN = "Return the arcsine of a number.";
Blockly.Msg.MATH_TRIG_TOOLTIP_ATAN = "Return the arctangent of a number.";
Blockly.Msg.MATH_TRIG_TOOLTIP_COS = "Return the cosine of a degree (not radian).";
Blockly.Msg.MATH_TRIG_TOOLTIP_SIN = "Return the sine of a degree (not radian).";
Blockly.Msg.MATH_TRIG_TOOLTIP_TAN = "Return the tangent of a degree (not radian).";
Blockly.Msg.MAX_ANGLE = "Maximum angle";
Blockly.Msg.MAX_PULSE_WIDTH = "Maximum pulse width";
Blockly.Msg.ME = "Me";
Blockly.Msg.MENU_ABOUT = "about the Open Roberta Lab";
Blockly.Msg.MENU_ABOUT_PROJECT = "about the Open Roberta Project";
Blockly.Msg.MENU_ATTACH = "attach ...";
Blockly.Msg.MENU_BEGINNER = "beginner";
Blockly.Msg.MENU_CHANGE = "change ...";
Blockly.Msg.MENU_CHECK = "check";
Blockly.Msg.MENU_CODE_DOWNLOAD_TOOLTIP = "Download the source code of your program on the computer";
Blockly.Msg.MENU_CODE_REFRESH_TOOLTIP = "Refresh the source code, if you have changed the NEPO Blocks.";
Blockly.Msg.MENU_CONNECT = "connect ...";
Blockly.Msg.MENU_CREATE_LINK = "create program link ...";
Blockly.Msg.MENU_DEBUG_STEP_BREAKPOINT_TOOLTIP = "Step forward to the next breakpoint in the program.";
Blockly.Msg.MENU_DEBUG_STEP_INTO_TOOLTIP = "Step Into to the next block in the program.";
Blockly.Msg.MENU_DEBUG_STEP_OVER_TOOLTIP = "Step Over to the next block in the program.";
Blockly.Msg.MENU_DELETE_USER = "delete user ...";
Blockly.Msg.MENU_EDIT = "edit";
Blockly.Msg.MENU_EDIT_TOOLTIP = "edit";
Blockly.Msg.MENU_EV3 = "Robot preparation";
Blockly.Msg.MENU_EXPERT = "expert";
Blockly.Msg.MENU_EXPORT_ALL_PROGS = "export all programs";
Blockly.Msg.MENU_EXPORT_PROG = "export program";
Blockly.Msg.MENU_FAQ = "FAQ";
Blockly.Msg.MENU_GALLERY = "gallery";
Blockly.Msg.MENU_GALLERY_TOOLTIP = "gallery";
Blockly.Msg.MENU_GENERAL = "general help";
Blockly.Msg.MENU_HELP = "help";
Blockly.Msg.MENU_HELP_TOOLTIP = "help";
Blockly.Msg.MENU_IMPORT_PROG = "import program ...";
Blockly.Msg.MENU_LANGUAGE = "languages";
Blockly.Msg.MENU_LANGUAGE_TOOLTIP = "languages";
Blockly.Msg.MENU_LIST = "list ...";
Blockly.Msg.MENU_LIST_CONF = "my configurations ...";
Blockly.Msg.MENU_LIST_PROG = "my programs ...";
Blockly.Msg.MENU_LIST_PROG_EXAMPLES = "example programs ...";
Blockly.Msg.MENU_LOGGING = "logging";
Blockly.Msg.MENU_LOG_IN = "login ...";
Blockly.Msg.MENU_LOG_OUT = "logout";
Blockly.Msg.MENU_MANAGE_USERGROUPS = "Manage user groups ...";
Blockly.Msg.MENU_MESSAGE_DOWNLOAD = "Your program has been successfully downloaded.";
Blockly.Msg.MENU_NEW = "new";
Blockly.Msg.MENU_PROGRAMMING = "programming with NEPO";
Blockly.Msg.MENU_PROPERTIES = "properties";
Blockly.Msg.MENU_RESET_FIRMWARE = "reset to factory defaults";
Blockly.Msg.MENU_RIGHT_CODE_TOOLTIP = "Open/close the source code view.";
Blockly.Msg.MENU_RIGHT_HELP_TOOLTIP = "Open/close the help view.";
Blockly.Msg.MENU_RIGHT_INFO_TOOLTIP = "Open/close the program documentation view.";
Blockly.Msg.MENU_RIGHT_LEGAL_TOOLTIP = "Open/close the legal information view.";
Blockly.Msg.MENU_RIGHT_SIM_DEBUG_TOOLTIP = "Open/close the simulation view in debug mode.";
Blockly.Msg.MENU_RIGHT_SIM_TOOLTIP = "Open/close the simulation view.";
Blockly.Msg.MENU_RIGHT_TUTORIAL_TOOLTIP = "open/close the tutorial's view";
Blockly.Msg.MENU_ROBOT = "robot";
Blockly.Msg.MENU_ROBOT_STATE_INFO = "info";
Blockly.Msg.MENU_ROBOT_STATE_TOOLTIP = "robot info";
Blockly.Msg.MENU_ROBOT_STOP_HINT_EV3 = "Press <span class='typcn typcn-media-stop'></span>+<span class='typcn typcn-arrow-sorted-down'></span> buttons on the robot to abort the program!";
Blockly.Msg.MENU_ROBOT_STOP_HINT_NXT = "Press <span class='typcn typcn-media-cancel'></span> button on the robot to abort the program!";
Blockly.Msg.MENU_ROBOT_TOOLTIP = "robots";
Blockly.Msg.MENU_ROBOT_WLAN = "WLAN credentials ...";
Blockly.Msg.MENU_RUN_MULT_SIM = "multiple robot simulation ...";
Blockly.Msg.MENU_SAVE = "save";
Blockly.Msg.MENU_SAVE_AS = "save as ...";
Blockly.Msg.MENU_SHORTCUT = "keyboard shortcuts";
Blockly.Msg.MENU_SHORTCUT_RUN = "run on robot";
Blockly.Msg.MENU_SHOW_AGAIN = "show welcome note again";
Blockly.Msg.MENU_SHOW_CODE = "open/close source code view";
Blockly.Msg.MENU_SIM_ADD_COLOR_OBJECT_TOOLTIP = "Add a color area.";
Blockly.Msg.MENU_SIM_ADD_OBSTACLE_TOOLTIP = "Add an obstacle.";
Blockly.Msg.MENU_SIM_CHANGE_COLOR_TOOLTIP = "Choose a color for the selected obstacle / color area.";
Blockly.Msg.MENU_SIM_CONFIG_EXPORT = "Download simulation settings.";
Blockly.Msg.MENU_SIM_CONFIG_IMPORT = "Upload simulation settings.";
Blockly.Msg.MENU_SIM_DELETE_OBJECT_TOOLTIP = "Delete the selected obstacle / color area.";
Blockly.Msg.MENU_SIM_IMPORT_TOOLTIP = "Upload your own simulation background image, it will be appended at the end of the background's list.";
Blockly.Msg.MENU_SIM_POSE_TOOLTIP = "Resets the positions of all robots and obstacles then clears all drawings.";
Blockly.Msg.MENU_SIM_ROBOT_TOOLTIP = "open/close the robot's view";
Blockly.Msg.MENU_SIM_SCENE_TOOLTIP = "change the scene";
Blockly.Msg.MENU_SIM_START_TOOLTIP = "Start your program in the simulation.";
Blockly.Msg.MENU_SIM_STOP_TOOLTIP = "Stop your program in the simulation.";
Blockly.Msg.MENU_SIM_VALUES_TOOLTIP = "Open/close the sensors' data view.";
Blockly.Msg.MENU_SOURCE_CODE_EDITOR = "open source code editor";
Blockly.Msg.MENU_START_BRICK = "run on »$«";
Blockly.Msg.MENU_START_SIM = "open/close simulation view";
Blockly.Msg.MENU_STATE_INFO = "state information";
Blockly.Msg.MENU_STOP_BRICK = "stop program on »$«";
Blockly.Msg.MENU_TOOLBOX = "NEPO-Blocks";
Blockly.Msg.MENU_TOOLBOX_BEGINNER = "NEPO-Blocks beginner";
Blockly.Msg.MENU_TOOLBOX_EXPERT = "NEPO-Blocks expert";
Blockly.Msg.MENU_TUTORIAL = "tutorials";
Blockly.Msg.MENU_TUTORIAL_TOOLTIP = "tutorials";
Blockly.Msg.MENU_USER = "login";
Blockly.Msg.MENU_USERGROUP_LOG_IN = "Log in with user group ...";
Blockly.Msg.MENU_USER_STATE_TOOLTIP = "user info";
Blockly.Msg.MENU_USER_TOOLTIP = "user";
Blockly.Msg.MENU_WLAN_CREDENTIALS = "WLAN credentials";
Blockly.Msg.MENU_ZOOM = "zoom";
Blockly.Msg.MENU_ZOOM_IN = "zoom in";
Blockly.Msg.MENU_ZOOM_OUT = "zoom out";
Blockly.Msg.MENU_ZOOM_RESET = "reset zoom";
Blockly.Msg.MESSAGE_ADDED_USER = "User »$« was added";
Blockly.Msg.MESSAGE_CONFIGURATION_DELETED = "Configuration »$« was deleted";
Blockly.Msg.MESSAGE_EDIT_CHECK = "Your program is now checked!";
Blockly.Msg.MESSAGE_EDIT_SAVE_CONFIGURATION = "Your configuration has been saved";
Blockly.Msg.MESSAGE_EDIT_SAVE_CONFIGURATION_AS = "Your configuration has been saved as »$«";
Blockly.Msg.MESSAGE_EDIT_SAVE_GROUP_AS = "Your group has been created";
Blockly.Msg.MESSAGE_EDIT_SAVE_PROGRAM = "Your program has been saved";
Blockly.Msg.MESSAGE_EDIT_SAVE_PROGRAM_AS = "Your program has been saved as »$«";
Blockly.Msg.MESSAGE_EDIT_START = "Your program »$« will run in a moment!";
Blockly.Msg.MESSAGE_FIRMWARE_ERROR = "The firmware of your robot is newer than that of the Open Roberta Lab. Please tell your server admin, that the server needs to be updated.";
Blockly.Msg.MESSAGE_GROUP_DELETED = "Group »$« was deleted";
Blockly.Msg.MESSAGE_INVALID_CONF_NAME = "Please fill in a correct name. A correct name begins with a letter and can only contain letters or numbers. The default name »[robot]basis« can't be used here.";
Blockly.Msg.MESSAGE_INVALID_NAME = "Please fill in a correct name. A correct name begins with a letter and can only contain letters or numbers, max. length is 255.";
Blockly.Msg.MESSAGE_NOT_AVAILABLE = "Not available.";
Blockly.Msg.MESSAGE_PROGRAM_DELETED = "Program »$« was deleted";
Blockly.Msg.MESSAGE_RESTART_ROBOT = "Please reconnect the robot to the Open Roberta Lab.";
Blockly.Msg.MESSAGE_ROBOT_CONNECTED = "Your robot »$« is connected";
Blockly.Msg.MESSAGE_ROBOT_DISCONNECTED = "An active robot was disconnected";
Blockly.Msg.MESSAGE_USER_DELETED = "User deleted";
Blockly.Msg.MESSAGE_USER_GROUP_DELETED = "User »$« was deleted";
Blockly.Msg.MESSAGE_USER_LOGIN = "Hello »$«";
Blockly.Msg.MESSAGE_USER_LOGOUT = "You are logged out";
Blockly.Msg.MICROBITBRICK_TOOLTIP = "Represents micro:bit, a pocket-sized codeable computer. There are also inbuilt actors and sensors available, e.g. buttons, display ...";
Blockly.Msg.MICROPHONE_GETSAMPLE_TOOLTIP = "Gets the current reading from the microphone in % (mapped to 0 - 100). If the value is always low, the value has to be multiplied by 10, because the amplification is missing on the hardware.";
Blockly.Msg.MIN_ANGLE = "Minimum angle";
Blockly.Msg.MIN_PULSE_WIDTH = "Minimum pulse width";
Blockly.Msg.MODE = "mode";
Blockly.Msg.MODE_ACCELERATION = "acceleration";
Blockly.Msg.MODE_ALTITUDE = "altitude";
Blockly.Msg.MODE_AMBIENTLIGHT = "ambient light";
Blockly.Msg.MODE_ANALOG = "analog value";
Blockly.Msg.MODE_ANGLE = "angle";
Blockly.Msg.MODE_CALIBRATION = "Calibration Value";
Blockly.Msg.MODE_CLAP = "clap";
Blockly.Msg.MODE_CLOSE = "close";
Blockly.Msg.MODE_CO2EQUIVALENT = "CO2 Equivalent";
Blockly.Msg.MODE_COLOR = "color";
Blockly.Msg.MODE_COLOUR = "colour";
Blockly.Msg.MODE_COMPASS = "compass";
Blockly.Msg.MODE_CURRENT = "current";
Blockly.Msg.MODE_DATE = "date";
Blockly.Msg.MODE_DEGREE = "degree";
Blockly.Msg.MODE_DIGITAL = "digital value";
Blockly.Msg.MODE_DISTANCE = "distance";
Blockly.Msg.MODE_GESTURE = "gesture";
Blockly.Msg.MODE_GYRO = "gyroscope";
Blockly.Msg.MODE_HUMIDITY = "humidity";
Blockly.Msg.MODE_IAQ = "Indoor Air Quality (IAQ)";
Blockly.Msg.MODE_IDALL = "IDs (list)";
Blockly.Msg.MODE_IDONE = "ID";
Blockly.Msg.MODE_INFO = "information";
Blockly.Msg.MODE_LATITUDE = "latitude";
Blockly.Msg.MODE_LIGHT = "light";
Blockly.Msg.MODE_LINE = "line";
Blockly.Msg.MODE_LONGITUDE = "longitude";
Blockly.Msg.MODE_MAGNETICFIELD = "mag field";
Blockly.Msg.MODE_MODULATED = "modulated";
Blockly.Msg.MODE_MOISTURE = "moisture";
Blockly.Msg.MODE_NAMEALL = "names (list)";
Blockly.Msg.MODE_NAMEONE = "name";
Blockly.Msg.MODE_OBSTACLE = "obstacle";
Blockly.Msg.MODE_OPEN = "open";
Blockly.Msg.MODE_ORIENTATION = "orientation";
Blockly.Msg.MODE_PM10 = "PM10";
Blockly.Msg.MODE_PM25 = "PM2.5";
Blockly.Msg.MODE_PRESENCE = "presence";
Blockly.Msg.MODE_PRESSED = "pressed";
Blockly.Msg.MODE_PRESSURE = "pressure";
Blockly.Msg.MODE_PULSEHIGH = "pulse time HIGH";
Blockly.Msg.MODE_PULSELOW = "pulse time LOW";
Blockly.Msg.MODE_RATE = "rate";
Blockly.Msg.MODE_RCCODE = "R/C code";
Blockly.Msg.MODE_REFLEXION = "reflected light";
Blockly.Msg.MODE_RGB = "RGB";
Blockly.Msg.MODE_ROTATION = "rotation";
Blockly.Msg.MODE_SENSOR1 = "Light Sensor1";
Blockly.Msg.MODE_SENSOR2 = "Light Sensor2";
Blockly.Msg.MODE_SOUND = "sound";
Blockly.Msg.MODE_SPEED = "speed";
Blockly.Msg.MODE_TEMPERATURE = "temperature";
Blockly.Msg.MODE_TILTED = "tilted";
Blockly.Msg.MODE_TIME = "time";
Blockly.Msg.MODE_UNMODULATED = "unmodulated";
Blockly.Msg.MODE_UVLIGHT = "UV light";
Blockly.Msg.MODE_VALUE = "value";
Blockly.Msg.MODE_VOCEQUIVALENT = "Breathe VOC Equivalent";
Blockly.Msg.MODE_WORD = "word";
Blockly.Msg.MODE_X = "X-value";
Blockly.Msg.MODE_Y = "Y-value";
Blockly.Msg.MODE_Z = "Z-value";
Blockly.Msg.MOISTURE_TOOLTIP = "Represents a moisture sensor.";
Blockly.Msg.MOTIONKIT = "MotionKit";
Blockly.Msg.MOTIONKIT_DUAL_TOOLTIP = "Sets each MotionKit motor to the specified direction.";
Blockly.Msg.MOTIONKIT_PIN_OVERLAP_WARNING = "The MotionKit uses the pins P1, P2, A0, A1, C16 and C17, so please make sure no other configuration-block uses them!";
Blockly.Msg.MOTIONKIT_SINGLE_TOOLTIP = "Sets the selected MotionKit motor/motors to the specified direction.";
Blockly.Msg.MOTION_TOOLTIP = "Represents a motion sensor.";
Blockly.Msg.MOTOR = "motor";
Blockly.Msg.MOTORDIFF_ON_FOR_TOOLTIP = "Starts the robot with specific speed and stops after specific distance.";
Blockly.Msg.MOTORDIFF_ON_TOOLTIP = "Starts the robot with specific speed.";
Blockly.Msg.MOTORDIFF_STOP_TOOLTIP = "Stops the robot.";
Blockly.Msg.MOTORDIFF_TURN_FOR_TOOLTIP = "Turns the robot for number of degrees.";
Blockly.Msg.MOTORDIFF_TURN_TOOLTIP = "Turns the robot.";
Blockly.Msg.MOTORS_ON_TOOLTIP_CALLIOPE = "Turns motor A and B on with a specific power.";
Blockly.Msg.MOTORS_ON_TOOLTIP_CALLIOPE_CB = "Turns both motors on with a specific power. Power can be positiv or negativ for reverse direction.";
Blockly.Msg.MOTORS_STOP_TOOLTIP = "Stops both motors, A and B.";
Blockly.Msg.MOTOR_ARDU_TOOLTIP = "Represents a Bot'n Roll chassis motor.";
Blockly.Msg.MOTOR_BACKWARD = "backwards";
Blockly.Msg.MOTOR_BIG = "big";
Blockly.Msg.MOTOR_BIG_TOOLTIP = "Represents a big motor.";
Blockly.Msg.MOTOR_BRAKE = "brake";
Blockly.Msg.MOTOR_DEGREE = "degree";
Blockly.Msg.MOTOR_DISTANCE = "distance cm";
Blockly.Msg.MOTOR_DRIVE = "drive";
Blockly.Msg.MOTOR_FLOAT = "float";
Blockly.Msg.MOTOR_FOREWARD = "forwards";
Blockly.Msg.MOTOR_GETPOWER_TOOLTIP = "Gets current power of this motor.";
Blockly.Msg.MOTOR_LEFT = "left";
Blockly.Msg.MOTOR_MIDDLE = "middle";
Blockly.Msg.MOTOR_MIDDLE_TOOLTIP = "Represents a middle motor.";
Blockly.Msg.MOTOR_NONE = "none";
Blockly.Msg.MOTOR_ON_FOR_TOOLTIP = "Turns motor on and stops motor after execution of rotations/degrees.";
Blockly.Msg.MOTOR_ON_FOR_TOOLTIP_MS = "Turns motor on and stops motor after execution after time has passed.";
Blockly.Msg.MOTOR_ON_FOR_TOOLTIP_RPM = "Turns motor on at speed in rpms (rotation per minute) and stops motor after execution of rotations/degrees.";
Blockly.Msg.MOTOR_ON_FOR_TOOLTIP_SERVO = "Sets the servo motor to a specific position in degrees.";
Blockly.Msg.MOTOR_ON_TOOLTIP = "Turns motor on with specific power.";
Blockly.Msg.MOTOR_ON_TOOLTIP_CALLIOPE = "Turns motor A, B or A+B on with a specific power.";
Blockly.Msg.MOTOR_ON_TOOLTIP_CALLIOPE_CB = "Turns left or right motor on with a specific power. Power can be positiv or negativ for reverse direction.";
Blockly.Msg.MOTOR_OTHER = "other power consumer";
Blockly.Msg.MOTOR_PAN = "pan";
Blockly.Msg.MOTOR_PORT = "motor port";
Blockly.Msg.MOTOR_PORT_ARDUINO = "motor 28BYJ-48 port";
Blockly.Msg.MOTOR_REGULATION = "regulation";
Blockly.Msg.MOTOR_RIGHT = "right";
Blockly.Msg.MOTOR_ROTATION = "rotation";
Blockly.Msg.MOTOR_ROTATION_PER_MINUTE = "rpm";
Blockly.Msg.MOTOR_ROTATION_REVERSE = "direction of rotation";
Blockly.Msg.MOTOR_SETPOWER_TOOLTIP = "Sets power of this motor.";
Blockly.Msg.MOTOR_SIDE = "side";
Blockly.Msg.MOTOR_SPEED = "speed %";
Blockly.Msg.MOTOR_SPEED_0 = "Motor Speed is 0!";
Blockly.Msg.MOTOR_STEER = "steer";
Blockly.Msg.MOTOR_STOP = "stop";
Blockly.Msg.MOTOR_STOP_TOOLTIP = "Stops this motor.";
Blockly.Msg.MOTOR_TILT = "tilt";
Blockly.Msg.MOTOR_TOOLTOP = "Represents a motor.";
Blockly.Msg.MOTOR_TURN = "turn";
Blockly.Msg.NAO_ABSOLUTE = "absolute";
Blockly.Msg.NAO_ACCELEROMETER = "accelerometer";
Blockly.Msg.NAO_ACCELEROMETER_TOOLTIP = "Get the current reading from the accelerometer in the given direction";
Blockly.Msg.NAO_ANIMATION_TOOLTIP = "Perform the selected animation. TaiChi is a complex and artistic set of moves. Blink will only make the robot blink by using its LEDs. The wink and wipe forehead animation can be performed while siting and standing.";
Blockly.Msg.NAO_ANSWER = "answer";
Blockly.Msg.NAO_APPLYPOSTURE = "let NAO";
Blockly.Msg.NAO_APPLYPOSTURE_TOOLTIP = "Robot will take the selected posture or position. Use the dropdown menu to choose one.";
Blockly.Msg.NAO_AUTONOMOUS = "turn autonomous behaviour";
Blockly.Msg.NAO_AUTONOMOUS_TOOLTIP = "Turn the robots autonomous behaviour on or off. While 'on' the robot will react to sounds, move slightly from side to side and try to track faces. Turn it off if this behaviour interrupts your program.";
Blockly.Msg.NAO_BLINK = "blink";
Blockly.Msg.NAO_CAMERA = "camera";
Blockly.Msg.NAO_CAMERA_BOTTOM = "bottom";
Blockly.Msg.NAO_CAMERA_TOP = "top";
Blockly.Msg.NAO_FACE_GET_INFORMATION_TOOLTIP = "Returns additional information about the given detected in an array with following values: [XAngle, YAngle, XSize, YSize, Heading], please note that all values are given in camera angles.";
Blockly.Msg.NAO_FILENAME = "filename";
Blockly.Msg.NAO_FORGETFACEOF = "forget face of";
Blockly.Msg.NAO_FORGETFACE_TOOLTIP = "Delete a face previously saved under a given name from the vision recognition database on the robot. ";
Blockly.Msg.NAO_FRAME = "frame";
Blockly.Msg.NAO_FRAME_TORSO = "torso";
Blockly.Msg.NAO_FRAME_WORLD = "world";
Blockly.Msg.NAO_FSR = "force sensitive resistor";
Blockly.Msg.NAO_GETLANGUAGE_TOOLTIP = "Get the active language. This is the language the robot is currently using for Text to Speech and Voice recognition.";
Blockly.Msg.NAO_GETVOLUME_TOOLTIP = "Get current volume.";
Blockly.Msg.NAO_GYROMETER = "gyrometer";
Blockly.Msg.NAO_GYROMETER_TOOLTIP = "Get the current reading from the gyrometer in the given direction.";
Blockly.Msg.NAO_HAND = "hand";
Blockly.Msg.NAO_HAND_TOOLTIP = "Open or close a single hand (wrist) of the robot.";
Blockly.Msg.NAO_HEADSENSOR = "head sensor";
Blockly.Msg.NAO_INTENSITY = "intensity";
Blockly.Msg.NAO_LEARNFACEOF = "learn face of";
Blockly.Msg.NAO_LEARNFACE_TOOLTIP = "Learn and save a face under a given name in the vision recognition database on the robot.";
Blockly.Msg.NAO_LED = "LED";
Blockly.Msg.NAO_LEDOFF_TOOLTIP = "Turn the selected LED(s) off.";
Blockly.Msg.NAO_LEDRESET_TOOLTIP = "Reset the selected LEDs to their original state regarding colour and intensity.";
Blockly.Msg.NAO_LED_ALL = "all";
Blockly.Msg.NAO_LED_CHEST = "chest";
Blockly.Msg.NAO_LED_EAR = "ear";
Blockly.Msg.NAO_LED_EARS = "ears";
Blockly.Msg.NAO_LED_EYE = "eye";
Blockly.Msg.NAO_LED_EYES = "eyes";
Blockly.Msg.NAO_LED_FOOT = "foot";
Blockly.Msg.NAO_LED_HEAD = "head";
Blockly.Msg.NAO_LED_TOOLTIP = "Set the color of selected LED(s). Eyes and feet LEDs are available.";
Blockly.Msg.NAO_LOOKAT = "look at";
Blockly.Msg.NAO_MARK_GET_INFORMATION_TOOLTIP = "Returns additional information about the given NAO mark in an array with following values: [XAngle, YAngle, XSize, YSize, Heading], please note that all values are given in camera angles.";
Blockly.Msg.NAO_MOVE = "move";
Blockly.Msg.NAO_MOVEJOINT_TOOLTIP = "Move a single joint of the robot. A relative movement means that the current position of the selected joint is used to calculate the new position. Be aware that every joint has different limits. Therefore the input range for the degerees varies.";
Blockly.Msg.NAO_PART_ARM = "arm";
Blockly.Msg.NAO_PART_ARMS = "arms";
Blockly.Msg.NAO_PART_BODY = "body";
Blockly.Msg.NAO_PART_HEAD = "head";
Blockly.Msg.NAO_PART_LEG = "leg";
Blockly.Msg.NAO_PART_LEGS = "legs";
Blockly.Msg.NAO_PERFORM = "perform";
Blockly.Msg.NAO_PHRASE = "phrase";
Blockly.Msg.NAO_PLAYFILE_TOOLTIP = "Plays a sound file from the robot. Enter the name of the file. The file needs to be transferred to the robot beforehand.";
Blockly.Msg.NAO_PLAY_FILE = "play file";
Blockly.Msg.NAO_POINTAT = "point at";
Blockly.Msg.NAO_POINTLOOKAT_TOOLTIP = "Robot points or looks at a given position. The robot will move one of its hands or the head. Select the frame that is the point of reference. The values are entered in centimeter. Refer to the wiki for more information about the coordinate systems.";
Blockly.Msg.NAO_POSTURE_CROUCH = "crouch";
Blockly.Msg.NAO_POSTURE_LYINGBACK = "lie back";
Blockly.Msg.NAO_POSTURE_LYINGBELLY = "lie belly";
Blockly.Msg.NAO_POSTURE_REST = "rest";
Blockly.Msg.NAO_POSTURE_SIT = "sit";
Blockly.Msg.NAO_POSTURE_SITRELAX = "sit relaxed";
Blockly.Msg.NAO_POSTURE_STAND = "stand";
Blockly.Msg.NAO_POSTURE_STANDINIT = "stand init";
Blockly.Msg.NAO_POSTURE_STANDZERO = "stand zero";
Blockly.Msg.NAO_QQVGA = "160*120";
Blockly.Msg.NAO_QVGA = "320*240";
Blockly.Msg.NAO_RANDOMEYES = "random eyes";
Blockly.Msg.NAO_RANDOMEYES_TOOLTIP = "The color of the eyes is changed randomly for a specified amount of time entered in milliseconds.";
Blockly.Msg.NAO_RASTA = "rasta";
Blockly.Msg.NAO_RASTA_TOOLTIP = "The color of the eyes is changed between green, yellow and red for a specified amount of time entered in milliseconds.";
Blockly.Msg.NAO_RECOGNIZEWORD = "speech recognizer of";
Blockly.Msg.NAO_RECOGNIZEWORD_TOOLTIP = "Returns a word from the given list when recognized by NAO";
Blockly.Msg.NAO_RECORDVIDEO = "record video";
Blockly.Msg.NAO_RECORDVIDEO_TOOLTIP = "Records a video and saves it on the robot. Access the robots file system to view the video.";
Blockly.Msg.NAO_RELATIVE = "relative";
Blockly.Msg.NAO_RESOLUTION = "resolution";
Blockly.Msg.NAO_SETINTENSITY_TOOLTIP = "Set the intensity of selected LED(s) in a range from 0 to 100. Ears, head and chest LEDs are available";
Blockly.Msg.NAO_SETLANGUAGE_TOOLTIP = "Set the language. Be aware that it is necessary to download the language pack before you can use it. For more information refer to the manual of your robot.";
Blockly.Msg.NAO_SETVOLUME_TOOLTIP = "Set the volume in a range from 0 to 100.";
Blockly.Msg.NAO_STIFFNESS = "lock motors";
Blockly.Msg.NAO_STIFFNESS_TOOLTIP = "The stiffness of the selected body part of the robot will be turned on or off. Be aware that releasing the leg motors while the robot is standing may result in robot collapsing.";
Blockly.Msg.NAO_STOP = "stop movement";
Blockly.Msg.NAO_STOP_TOOLTIP = "The robot immediately stops all movement. Be aware that this can lead to situations where downfall is possible.";
Blockly.Msg.NAO_TAI_CHI = "tai chi";
Blockly.Msg.NAO_TAKEPICTURE = "take picture";
Blockly.Msg.NAO_TAKEPICTURE_TOOLTIP = "Takes a picture and saves it on the robot. Access the robots file system to view the picture.";
Blockly.Msg.NAO_TOUCHSENSOR_TOOLTIP = "Is true if the selected touchsensor on the robot was touched.";
Blockly.Msg.NAO_TURN_TOOLTIP = "Turns the robot for number of degrees. Only enter positive values and use the dropdown to select the direction. It is possible to enter values up to 360 degrees.";
Blockly.Msg.NAO_VGA = "640*480";
Blockly.Msg.NAO_WALK = "walk";
Blockly.Msg.NAO_WALKTO = "walk to";
Blockly.Msg.NAO_WALKTO_TOOLTIP = "The robot walks to the given position. The values are entered in cm and radians and are based on the coordinate system in NAOs body. Please refer to the wiki for more information on the coordinate system and how to calculate the coordinates.";
Blockly.Msg.NAO_WALK_ASYNC_TOOLTIP = "Makes the robot walk infinitely";
Blockly.Msg.NAO_WALK_TOOLTIP = "Makes the robot walk a distance entered in cm. Distances below 10cm might lead to no movement at all. Depending on your robot and the surface the robots is walking on the distance might not be exact.";
Blockly.Msg.NAO_WAVE = "wave";
Blockly.Msg.NAO_WIPE_FOREHEAD = "wipe forehead";
Blockly.Msg.NEW_VARIABLE = "New variable...";
Blockly.Msg.NEW_VARIABLE_TITLE = "New variable name:";
Blockly.Msg.NN_ADD_RAW_DATA = "add raw data for later feature extraction";
Blockly.Msg.NN_ADD_RAW_DATA_TOOLTIP = "add raw data (multiple times). Then extract the input features from it and add that to the trainings data";
Blockly.Msg.NN_ADD_TRAININGS_DATA = "add to trainings data";
Blockly.Msg.NN_ADD_TRAININGS_DATA_TOOLTIP = "take raw data, execute feature extraction and add the data from feature extraction to the collection of training data for a class";
Blockly.Msg.NN_CLASSIFY = "classifiy";
Blockly.Msg.NN_CLASSIFY_TOOLTIP = "use the trained neural network and data from feature extraction to classify and return the probabiliy for each class";
Blockly.Msg.NN_CLASS_NUMBER = "class #";
Blockly.Msg.NN_CLASS_PROBABILITIES = "probabilities";
Blockly.Msg.NN_INIT_RAW_DATA = "initialize raw data";
Blockly.Msg.NN_INIT_RAW_DATA_TOOLTIP = "clear raw data memory. Then add raw data multiple times, extract the input features from it and add that to the trainings data";
Blockly.Msg.NN_IO_NEURON_NAMES_INVALID = "names of input/output neurons must be all different and valid (start with letter, no spaces, ...)";
Blockly.Msg.NN_MAX_NUMBER_OF_NEURONS = "max neurons";
Blockly.Msg.NN_NUMBER_INPUT_NEURONS = "# input neurons";
Blockly.Msg.NN_NUMBER_OF_CLASSES = "# classes";
Blockly.Msg.NN_RAW_DATA = "raw value";
Blockly.Msg.NN_SETUP = "setup the neural network";
Blockly.Msg.NN_SETUP_TOOLTIP = "define properties of a neural network, which can classify data";
Blockly.Msg.NN_STEP = "one NN step";
Blockly.Msg.NN_STEP_TOOLTIP = "retrieve inputs, execute one step in a neural network, write the outputs into variables";
Blockly.Msg.NN_TRAIN = "train";
Blockly.Msg.NN_TRAIN_TOOLTIP = "train the neural network with the trainings data assembled";
Blockly.Msg.NO = "no";
Blockly.Msg.NXTBRICK_TOOLTIP = "Represents the NXT brick with connected actors and sensors. There are also inbuilt actors and sensors available, e.g. buttons, display ...";
Blockly.Msg.OFF = "off";
Blockly.Msg.OLDER_THEN_14 = "I am 16 or older than 16!";
Blockly.Msg.ON = "on";
Blockly.Msg.ORA_ACCESS_RIGHT_CHANGED = "The shared right for user »$« is updated!";
Blockly.Msg.ORA_ACCESS_RIGHT_DELETED = "The shared right for user »$« is deleted!";
Blockly.Msg.ORA_ACCOUNT_NOT_ACTIVATED_TO_SHARE = "Your are not allowed to share. Please activate your account! <br><a href='https://www.roberta-home.de/index.php?id=138&L=1' target='_blank'>Further information ...</a>";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_CONFIGURATION_NOT_FOUND = "The robot configuration could not be found on the server.";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_CONFIGURATION_TRANSFORM_FAILED = "The robot configuration could not be transformed into the robots programming language.";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_COMPILE_FAILED = "Your program has errors so it cannot be compiled. The compiler messages are: <span style='background-color:#ddd;font-family:courier;'>{MESSAGE}</span>";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_GENERATION_FAILED = "The program could not be generated.";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_GENERATION_FAILED_WITH_PARAMETERS = "The program could not be generated. Pin {PIN} is incorrect in block {BLOCK}";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_NOT_FOUND = "The program could not be transformed into the robots programming language.";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_STORE_FAILED = "The program could not be saved because of an internal error.";
Blockly.Msg.ORA_COMPILERWORKFLOW_ERROR_PROGRAM_TRANSFORM_FAILED = "The program could not be transformed into the robots programming language.";
Blockly.Msg.ORA_COMPILERWORKFLOW_SUCCESS = "The program build succeeded";
Blockly.Msg.ORA_CONFIGURATION_DELETE_ERROR = "An error has occurred while deleting the robot configuration, please try it again!";
Blockly.Msg.ORA_CONFIGURATION_DELETE_SUCCESS = "Configuration deleted";
Blockly.Msg.ORA_CONFIGURATION_ERROR_ID_INVALID = "Configuration name is not a valid identifier.";
Blockly.Msg.ORA_CONFIGURATION_GET_ALL_SUCCESS = "Configuration loaded";
Blockly.Msg.ORA_CONFIGURATION_GET_ONE_ERROR_NOT_FOUND = "The robot configuration could not be found in the database.";
Blockly.Msg.ORA_CONFIGURATION_GET_ONE_SUCCESS = "Configuration loaded";
Blockly.Msg.ORA_CONFIGURATION_SAVE_AS_ERROR_CONFIGURATION_EXISTS = "This configuration already exists.";
Blockly.Msg.ORA_CONFIGURATION_SAVE_ERROR = "An error has occurred while saving the robot configuration.";
Blockly.Msg.ORA_CONFIGURATION_SAVE_ERROR_NOT_SAVED_TO_DB = "An error error has occurred while saving the configuration in the database.";
Blockly.Msg.ORA_CONFIGURATION_SAVE_SUCCESS = "Configuration saved";
Blockly.Msg.ORA_FIRMWARE_RESET_ERROR = "Loading the original program failed!";
Blockly.Msg.ORA_FIRMWARE_RESET_SUCCESS = "Your original program will run in a moment!";
Blockly.Msg.ORA_GALLERY_UPLOAD_ERROR = "Your program »$« couldn't be uploaded to the gallery, it seems as if it is already uploaded.";
Blockly.Msg.ORA_GALLERY_UPLOAD_SUCCESS = "Your program »$« is now in the gallery!";
Blockly.Msg.ORA_GROUP_ADD_MEMBER_ERROR = "There was an error while adding new members the user group. Please reload the current form and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_ADD_MEMBER_ERROR_LIMIT_REACHED = "You can not have more than 99 members in one user group.";
Blockly.Msg.ORA_GROUP_ADD_MEMBER_ERROR_SMALLER_THAN_ONE = "To add automatically generated members, you must enter an integer that is bigger or equals to 1.";
Blockly.Msg.ORA_GROUP_CREATE_ERROR = "There was an error on creating the user group. Please try different inputs or try again later. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_CREATE_ERROR_GROUP_ALREADY_EXISTS = "You already have a user group with the that name. Please enter another name.";
Blockly.Msg.ORA_GROUP_CREATE_ERROR_GROUP_LIMIT_REACHED = "You reached the limit of user groups. You can not have more than 100 user groups in total.";
Blockly.Msg.ORA_GROUP_CREATE_ERROR_NOT_SAVED_TO_DB = "This group already exists";
Blockly.Msg.ORA_GROUP_DELETE_ERROR = "There was an error while deleting the user group. Please reload the user group list and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_DELETE_ERROR_GROUP_DOES_NOT_EXISTS = "Could not find a user group with the specified name. Please reload the list of your user groups and try again.";
Blockly.Msg.ORA_GROUP_DELETE_ERROR_GROUP_HAS_MEMBERS = "One or more members of the user group have logged in in the past and created programs. In order to delete the user group, delete its members first.";
Blockly.Msg.ORA_GROUP_ERROR_MISSING_RIGHTS_TO_BE_GROUP_OWNER = "You are not allowed to be a user group owner. Your account must have an validated email attached.";
Blockly.Msg.ORA_GROUP_ERROR_NAME_INVALID = "The name of the user group is invalid. Please do not use special characters and enter at least one character.";
Blockly.Msg.ORA_GROUP_GET_ALL_ERROR = "There was an error while aquiring the list of user groups. Please reload the menu and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_GET_ONE_ERROR = "There was an error while aquiring the user group. Please reload the menu and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_GET_ONE_ERROR_NOT_FOUND = "Could not find a user group with the specified name.";
Blockly.Msg.ORA_GROUP_MEMBER_ERROR_ALREADY_EXISTS = "A member with the given name already exists in the user group.";
Blockly.Msg.ORA_GROUP_MEMBER_SHARE_RESTRICTION_EXCEEDED = "You can only share programs with other members of your user group. The owner of your user group can automatically read your programs.";
Blockly.Msg.ORA_GROUP_RENAME_ERROR = "There was an error while changing the name of the user group. Please reload the current form and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_TO_SHARE_DOES_NOT_EXIST = "You do not own a user group with the name you entered. <br />Try again with a different name!";
Blockly.Msg.ORA_GROUP_UPDATE_ERROR = "There was an error while updating the user group. Please reload the user group list and try again. If this error should persist, please contact our support.";
Blockly.Msg.ORA_GROUP_UPDATE_ERROR_NOT_FOUND = "Could not find a user group with the specified name. Please reload the current form and try again.";
Blockly.Msg.ORA_GROUP_UPDATE_ERROR_NOT_OWNER = "Could not update the user group. You are not the owner of that group.";
Blockly.Msg.ORA_LIKE_DELETE_SUCCESS = "You do not like program »$« anymore!";
Blockly.Msg.ORA_LIKE_SAVE_ERROR_EXISTS = "You couldn't like program »$«. Maybe you have already liked it.";
Blockly.Msg.ORA_LIKE_SAVE_SUCCESS = "You like program »$« now!";
Blockly.Msg.ORA_LIST_CREATE_WITH_ERROR = "Error: This block may be used only within a variable declaration.";
Blockly.Msg.ORA_OWNER_DOES_NOT_EXIST = "The owner does not exist.";
Blockly.Msg.ORA_PROGRAM_CONFIGURATION_NOT_COMPATIBLE = "Program is not compatible with the configuration";
Blockly.Msg.ORA_PROGRAM_DELETE_ERROR = "An error error has occurred while deleting the configuration. Please try it again!";
Blockly.Msg.ORA_PROGRAM_DELETE_SUCCESS = "Program deleted";
Blockly.Msg.ORA_PROGRAM_ERROR_ID_INVALID = "The name of your program is already used by the system. Please choose another name and try it again!";
Blockly.Msg.ORA_PROGRAM_GET_ALL_ERROR_USER_NOT_FOUND = "Can not load the programs of the specified user. Please reload the lab and try again.";
Blockly.Msg.ORA_PROGRAM_GET_ALL_SUCCESS = "Programs loaded";
Blockly.Msg.ORA_PROGRAM_GET_ONE_ERROR_NOT_FOUND = "Program not found.";
Blockly.Msg.ORA_PROGRAM_GET_ONE_ERROR_NOT_LOGGED_IN = "You are not logged in, please log in with your username and password or create a new user.";
Blockly.Msg.ORA_PROGRAM_GET_ONE_SUCCESS = "Program loaded";
Blockly.Msg.ORA_PROGRAM_IMPORT_ERROR = "»$.xml« is not a valid NEPO program and cannot be uploaded!";
Blockly.Msg.ORA_PROGRAM_IMPORT_ERROR_WRONG_ROBOT_TYPE = "You tried to import a program suitable for »$«, which is different from your menu selection.<br> Please switch the robot type in the robot's menu and try to import again!";
Blockly.Msg.ORA_PROGRAM_INVALID_STATEMETNS = "There are errors in your program or configuration. Please check the messages.";
Blockly.Msg.ORA_PROGRAM_SAVE_AS_ERROR_PROGRAM_EXISTS = "This program already exists.";
Blockly.Msg.ORA_PROGRAM_SAVE_ERROR_NO_WRITE_PERMISSION = "You don't have the permission to modify this program!";
Blockly.Msg.ORA_PROGRAM_SAVE_ERROR_OPTIMISTIC_TIMESTAMP_LOCKING = "This program has been changed recently by someone else. You can save your changes in a new program, choose >save as< !";
Blockly.Msg.ORA_PROGRAM_SAVE_ERROR_PROGRAM_TO_UPDATE_NOT_FOUND = "The program could not be found, so it is impossible to update it.";
Blockly.Msg.ORA_PROGRAM_SAVE_SUCCESS = "Program saved";
Blockly.Msg.ORA_PROGRAM_TO_SHARE_DOES_NOT_EXIST = "The program that you would like to share does not exist.";
Blockly.Msg.ORA_ROBOT_DOES_NOT_EXIST = "It seems that we do not support the desired robot system!<br>Please choose another one.";
Blockly.Msg.ORA_ROBOT_FIRMWAREUPDATE_IMPOSSIBLE = "An error has occurred while updating the new firmware on your robot.";
Blockly.Msg.ORA_ROBOT_FIRMWAREUPDATE_POSSIBLE = "The firmware was updated";
Blockly.Msg.ORA_ROBOT_NOT_CONNECTED = "There is no robot connected. Check if your robot is switched on, connect it to the server and type in the robot's password under robot - connect in the menu. <br /> If you have problems to do this, please have a look at our help pages.";
Blockly.Msg.ORA_ROBOT_NOT_WAITING = "The robot does not wait for a run command.";
Blockly.Msg.ORA_ROBOT_PUSH_RUN = "Robot waited and now the jar is pushed to the robot";
Blockly.Msg.ORA_ROBOT_PUSH_RUN_CALLIOPE2016 = "Your program »$« is downloaded to Calliope, press the reset button to start it!";
Blockly.Msg.ORA_ROBOT_PUSH_RUN_NXT = "Your program »$« is downloaded to the NXT, start it on the robot!";
Blockly.Msg.ORA_ROBOT_SET_SUCCESS = "Switched to robot system »$«!";
Blockly.Msg.ORA_SERVER_ERROR = "Server-Error";
Blockly.Msg.ORA_TOKEN_SET_ERROR_NO_ROBOT_WAITING = "Check if your robot is switched on and connected to the server. <br />If you have problems to do this, please have a look at our help pages.";
Blockly.Msg.ORA_TOKEN_SET_ERROR_WRONG_ROBOTTYPE = "You are trying to connect a robot of a different type as selected in the menu. <br> Please switch the robot type in the robot's menu and try to connect again!";
Blockly.Msg.ORA_TOKEN_SET_SUCCESS = "Token set";
Blockly.Msg.ORA_TOOLBOX_DELETE_ERROR = "An error has occurred while deleting the toolbox, please try it again!";
Blockly.Msg.ORA_TOOLBOX_DELETE_SUCCESS = "Toolbox deleted";
Blockly.Msg.ORA_TOOLBOX_ERROR_ID_INVALID = "Toolbox name is not a valid identifier.";
Blockly.Msg.ORA_TOOLBOX_GET_ALL_SUCCESS = "Toolbox loaded";
Blockly.Msg.ORA_TOOLBOX_GET_ONE_ERROR_NOT_FOUND = "The toolbox could not be found in the database.";
Blockly.Msg.ORA_TOOLBOX_GET_ONE_SUCCESS = "Toolbox loaded";
Blockly.Msg.ORA_TOOLBOX_SAVE_ERROR = "An error has occurred while saving the toolbox.";
Blockly.Msg.ORA_TOOLBOX_SAVE_ERROR_NOT_SAVED_TO_DB = "An error error has occurred while saving the configuration in the database.";
Blockly.Msg.ORA_TOOLBOX_SAVE_SUCCESS = "Toolbox saved";
Blockly.Msg.ORA_TOOLBOX_TRANSFORM_ERROR = "Transformation error?";
Blockly.Msg.ORA_USER_ACTIVATION_INVALID_URL = "The link is not valid anymore. Please ask for resending your verification mail again";
Blockly.Msg.ORA_USER_ACTIVATION_SENT_MAIL_FAIL = "Sorry, we cannot send a mail to you, please contact »support-o-r@iais.fraunhofer.de«";
Blockly.Msg.ORA_USER_ACTIVATION_SENT_MAIL_SUCCESS = "We send a mail to you, please check your mailbox!";
Blockly.Msg.ORA_USER_ACTIVATION_SUCCESS = "Your account is successfully verified! Please login!";
Blockly.Msg.ORA_USER_CREATE_ERROR_ACCOUNT_LENGTH = "Your account name or your user name is to long. Please make sure that they are not longer then 25 digits. Your account couldn't be created.";
Blockly.Msg.ORA_USER_CREATE_ERROR_CONTAINS_SPECIAL_CHARACTERS = "You are using some one or more special characters in your account name! Please remove them. Your account couldn't be created.";
Blockly.Msg.ORA_USER_CREATE_ERROR_MISSING_REQ_FIELDS = "Please make sure you have filled in all required fields! Your account couldn't be created.";
Blockly.Msg.ORA_USER_CREATE_ERROR_NOT_SAVED_TO_DB = "The given user name already exists in the database, please choose another user name.";
Blockly.Msg.ORA_USER_CREATE_SUCCESS = "Your user account »$« was successfully created!";
Blockly.Msg.ORA_USER_DEACTIVATION_SUCCESS = "Your account is currently not verified, please check your mailbox!";
Blockly.Msg.ORA_USER_DELETE_ERROR_HAS_GROUPS = "You have user groups associated with your account. Please delete them first, before you delete your account.";
Blockly.Msg.ORA_USER_DELETE_ERROR_ID_NOT_FOUND = "Error while deleting user.";
Blockly.Msg.ORA_USER_DELETE_ERROR_NOT_DELETED_IN_DB = "Error while deleting user in database.";
Blockly.Msg.ORA_USER_DELETE_SUCCESS = "Your account »$« was successfully deleted. Hope to see you soon again!";
Blockly.Msg.ORA_USER_EMAIL_ONE_ERROR_USER_NOT_EXISTS_WITH_THIS_EMAIL = "This email address is unknown, maybe you have spelled it wrong!";
Blockly.Msg.ORA_USER_ERROR_EMAIL_USED = "There is already an account with this email address registered. Please enter another email address!";
Blockly.Msg.ORA_USER_ERROR_NOT_LOGGED_IN = "You need to be logged in to a user account to use this function. If you are logged in, please log out and in again.";
Blockly.Msg.ORA_USER_GET_ALL_SUCCESS = "Users loaded";
Blockly.Msg.ORA_USER_GET_ONE_ERROR_ID_OR_PASSWORD_WRONG = "You have entered wrong user name or password. Please try again!";
Blockly.Msg.ORA_USER_GET_ONE_SUCCESS = "Login successful";
Blockly.Msg.ORA_USER_GROUP_SAVE_AS_ERROR_USER_GROUP_EXISTS = "This user already belongs to the group";
Blockly.Msg.ORA_USER_PASSWORD_RECOVERY_EXPIRED_URL = "The link is not valid anymore. Please ask for resetting your password again, if you still cannot remember it!";
Blockly.Msg.ORA_USER_PASSWORD_RECOVERY_SENT_MAIL_FAIL = "Sorry, we cannot send a mail to you, please contact »support-o-r@iais.fraunhofer.de«";
Blockly.Msg.ORA_USER_PASSWORD_RECOVERY_SENT_MAIL_SUCCESS = "We send a mail to you, please check your mailbox!";
Blockly.Msg.ORA_USER_TO_ADD_NOT_FOUND = "User was not found";
Blockly.Msg.ORA_USER_TO_SHARE_DOES_NOT_EXIST = "The user with which you would like to share the program does not exist. <br />Try it again!";
Blockly.Msg.ORA_USER_TO_SHARE_SAME_AS_LOGIN_USER = "You cannot share any programs with yourself.";
Blockly.Msg.ORA_USER_UPDATE_ERROR_NOT_SAVED_TO_DB = "The password could not be saved to the database. Are you sure that you entered the old password correctly?";
Blockly.Msg.ORA_USER_UPDATE_SUCCESS = "Your changes have been successfully saved!";
Blockly.Msg.ORDINAL_NUMBER_SUFFIX = "";
Blockly.Msg.OUTPUT = "output";
Blockly.Msg.PARTICLE_TOOLTIP = "Represents SDS011 particle sensor.";
Blockly.Msg.PASSWORD = "Password";
Blockly.Msg.PIN_ISTOUCHED_TOOLTIP = "Is the selected pin touched?";
Blockly.Msg.PIN_PULL = "pull";
Blockly.Msg.PIN_PULL_DOWN = "down";
Blockly.Msg.PIN_PULL_NONE = "none";
Blockly.Msg.PIN_PULL_UP = "up";
Blockly.Msg.PIN_SET_PULL_BLOCK_WARNING = "This block got replaced. Please use the 'Sensor digital'-block from the robotconfiguration";
Blockly.Msg.PIN_SET_PULL_TOOLTIP = "Sets the pull of the chosen pin";
Blockly.Msg.PIN_WRITE = "write";
Blockly.Msg.PITCH = "pitch";
Blockly.Msg.PLAY = "play";
Blockly.Msg.PLAY_DURATION = "duration ms";
Blockly.Msg.PLAY_EIGHTH = "eighth note";
Blockly.Msg.PLAY_FILE = "file";
Blockly.Msg.PLAY_FILE_TOOLTIP = "Plays a sound file.";
Blockly.Msg.PLAY_FREQUENZ = "frequency Hz";
Blockly.Msg.PLAY_GETVOLUME_TOOLTIP = "Gets current volume.";
Blockly.Msg.PLAY_HALF = "half note";
Blockly.Msg.PLAY_NOTE = "note";
Blockly.Msg.PLAY_NOTE_TOOLTIP = "Plays a music note";
Blockly.Msg.PLAY_QUARTER = "quarter note";
Blockly.Msg.PLAY_SETVOLUME_TOOLTIP = "Sets volume.";
Blockly.Msg.PLAY_SIXTEENTH = "sixteenth note";
Blockly.Msg.PLAY_TONE = "tone";
Blockly.Msg.PLAY_TONE_TOOLTIP = "Plays a tone. Use frequency and duration to determin the tone pitch and duration.";
Blockly.Msg.PLAY_TOOLTIP = "Represents a buzzer.";
Blockly.Msg.PLAY_VOLUME = "volume";
Blockly.Msg.PLAY_WHOLE = "whole note";
Blockly.Msg.POPUP_ABOUT_JOIN = "I want to help";
Blockly.Msg.POPUP_ABOUT_TEXT = "The Open Roberta Lab is a cloud-based integrated programming environment that enables children and adolescents to program easily different robot/microcontroller systems. This platform is completely open source so taking part is desirable! Both the software and the open source developer tools are available via Fraunhofer servers.";
Blockly.Msg.POPUP_ABOUT_TEXT_DEV = "The Open Roberta Lab is an open-source programming platform developed by Fraunhofer IAIS within the initiative <a href='http://www.roberta-home.de/' target='_blank'>»Roberta – Learning with Robots«</a>";
Blockly.Msg.POPUP_ABOUT_TEXT_GOOG = "Open Roberta was initiated in collaboration with Google Germany to reduce the hurdles for students, teachers and schools programming educational robots.</a>";
Blockly.Msg.POPUP_AGE = "Age";
Blockly.Msg.POPUP_ATTENTION = "Attention";
Blockly.Msg.POPUP_BACKGROUND_REPLACE = "A program with the same name already exists. <br> Would you like to replace it?";
Blockly.Msg.POPUP_BACKGROUND_REPLACE_CONFIGURATION = "A configuration with the same name already exists. <br> Would you like to replace it?";
Blockly.Msg.POPUP_BACKGROUND_STORAGE = "The Open Roberta Lab can automatically load your simulation background on your next visits. For this purpose, we will save data in the Local Storage on your computer. <a href='https://www.roberta-home.de/en/privacy-policy-open-roberta-lab/' target='_blank'>More information in our privacy policy.</a>";
Blockly.Msg.POPUP_BEFOREUNLOAD = "You have unsaved changes in your program or configuration. Sign in and save your program or configuration.";
Blockly.Msg.POPUP_BEFOREUNLOAD_LOGGEDIN = "You have unsaved changes in your program or configuration.";
Blockly.Msg.POPUP_CANCEL = "Cancel";
Blockly.Msg.POPUP_CHANGE_PASSWORD = "change password ...";
Blockly.Msg.POPUP_CONFIRM_CONTINUE = "<br><br><i>Press »OK« to discard your work. Press »Cancel« to stay here and save your work first.</i>";
Blockly.Msg.POPUP_CONFIRM_DELETE_CONFIGURATION = "Do you really want to delete the configuration ?";
Blockly.Msg.POPUP_CONFIRM_DELETE_GROUP = "Do you really want to delete these groups?";
Blockly.Msg.POPUP_CONFIRM_DELETE_PROGRAM = "Do you really want to delete the following program or programs?<br><br>If you delete a program you share with others, you delete it for everyone.<br><br>If you delete a program shared with you, you do not delete the program but the sharing.<br><br>If you delete a program with the owner »Gallery«, you remove your program from the gallery!";
Blockly.Msg.POPUP_CONFIRM_DELETE_USER_GROUP = "Do you really want to remove these users below from the current group?";
Blockly.Msg.POPUP_CONFIRM_UPDATE_FIRMWARE = "There is a new firmware version for your robot available. You can try to run programs with the old version, but best bet is to update your robot now! You just have to click »Update now«.";
Blockly.Msg.POPUP_CONTINUE = "continue anyway";
Blockly.Msg.POPUP_CREATE_BOOKMARK = "Create a browser bookmark now to save your robot selection!";
Blockly.Msg.POPUP_DISPLAY_HEADER = "output of your program";
Blockly.Msg.POPUP_DOWNLOAD = "Download your program to »$«";
Blockly.Msg.POPUP_DOWNLOAD_CHECK = "Okay, I've changed the download folder of my browser permanently. Don't show this popup again and download my programs directly.";
Blockly.Msg.POPUP_DOWNLOAD_STEP_1 = "Right click on your program link below and";
Blockly.Msg.POPUP_DOWNLOAD_STEP_1_EDISON = "Connect your Edison via the EdComm cable and press the round button";
Blockly.Msg.POPUP_DOWNLOAD_STEP_1_SENSEBOX = "Check that your device is in programming mode (LED dimming in a pattern), if not, double press the red button. Right click on you program link below and";
Blockly.Msg.POPUP_DOWNLOAD_STEP_2 = "choose »Save link as ...«, then";
Blockly.Msg.POPUP_DOWNLOAD_STEP_2_EDISON = "click on »Play« to play your program to Edison and";
Blockly.Msg.POPUP_DOWNLOAD_STEP_3 = "click on your connected »$« in the left column,";
Blockly.Msg.POPUP_DOWNLOAD_STEP_3_EDISON = "wait until Edison beeps, then unplug.";
Blockly.Msg.POPUP_DOWNLOAD_STEP_4 = "now click on the »Save« button on the bottom right.<br><span style='font-size: 14px;'>If your program doesn't start automatically press the reset button after a while.</span>";
Blockly.Msg.POPUP_DOWNLOAD_STEP_4_EDISON = "Press the triangle button to start your program on your Edison<br><span style='font-size: 14px;'>Having trouble? Turn up the volume and check that all sound enhancements are turned off</span>";
Blockly.Msg.POPUP_DOWNLOAD_STEP_4_SENSEBOX = "now click on the »Save« button on the bottom right.";
Blockly.Msg.POPUP_DO_UPDATE_FIRMWARE = "Update now";
Blockly.Msg.POPUP_EMAIL = "E-Mail";
Blockly.Msg.POPUP_EMAIL_SEND = "Send now";
Blockly.Msg.POPUP_GET_LINK = "Here is the link to your actual program. Please don't change it, it probably won't work anymore. It's already copied to your clipboard!</br>$";
Blockly.Msg.POPUP_MULTIPLE_ROBOTS = "Select multiple programs for the simulation";
Blockly.Msg.POPUP_MULTROBOTS_NOPROGRAMS = "Please create and store at least two programs which then can be executed each by a simulated robot!";
Blockly.Msg.POPUP_NAME = "Name";
Blockly.Msg.POPUP_NEW_PASSWORD = "New Password";
Blockly.Msg.POPUP_OLD_PASSWORD = "Old Password";
Blockly.Msg.POPUP_PASSWORD = "Password";
Blockly.Msg.POPUP_PASSWORD_RECOVERY = "reset password ...";
Blockly.Msg.POPUP_PROGRAM_TERMINATED_UNEXPECTED = "The execution of the program on the robot has terminated unexpectedly!";
Blockly.Msg.POPUP_REGISTER_USER = "Register now";
Blockly.Msg.POPUP_REPEAT_PASSWORD = "Repeat password";
Blockly.Msg.POPUP_REPLACE = "Replace";
Blockly.Msg.POPUP_ROBOT_BATTERY = "Voltage";
Blockly.Msg.POPUP_ROBOT_NAME = "Name";
Blockly.Msg.POPUP_ROBOT_NOT_CONNECTED = "You have to make a connection to your robot first.";
Blockly.Msg.POPUP_ROBOT_STATE = "State";
Blockly.Msg.POPUP_ROBOT_STATE_BUSY = "busy";
Blockly.Msg.POPUP_ROBOT_STATE_DISCONNECTED = "disconnected";
Blockly.Msg.POPUP_ROBOT_STATE_WAIT = "wait";
Blockly.Msg.POPUP_ROBOT_SYSTEM = "System";
Blockly.Msg.POPUP_ROBOT_WAIT = "Waiting time";
Blockly.Msg.POPUP_SCANNED_ROBOTS = "found robots";
Blockly.Msg.POPUP_STARTUP_COOKIES = "We use cookies to personalise content and to analyse our traffic.";
Blockly.Msg.POPUP_STARTUP_HELP = "Do you need help?";
Blockly.Msg.POPUP_STARTUP_HELP_TEXT = "In our detailed help, we will explain everything you need, from building instructions to frequently asked questions.";
Blockly.Msg.POPUP_STARTUP_HIDE = "Okay, don't show this window again and remember my choice with a bookmark.";
Blockly.Msg.POPUP_STARTUP_START = "Choose your system!";
Blockly.Msg.POPUP_STARTUP_TOUR_TEXT = "Would you like to get started, but do not know exactly how? We will show you the first steps in an interactive tutorial.";
Blockly.Msg.POPUP_TOUR = "take a tour";
Blockly.Msg.POPUP_USERNAME = "Username";
Blockly.Msg.POPUP_USERNAME_LOGOFF = "You are not logged in.";
Blockly.Msg.POPUP_VALUE = "Value";
Blockly.Msg.POPUP_WLAN_SECURITY_INFORMATION = "Please note that the data provided in this dialog window is private and will not be stored in the OpenRoberta lab; it will only be used for program creation and during the compilation process. Make sure that for each new session you enter these credentials again. Please keep in mind that exposing this information to public may pose security risks.";
Blockly.Msg.POPUP_WLAN_SSID = "SSID";
Blockly.Msg.PORT_ANKLE = "ankle";
Blockly.Msg.PORT_BUMPER = "bumper";
Blockly.Msg.PORT_ELBOW = "elbow";
Blockly.Msg.PORT_HAND = "hand";
Blockly.Msg.PORT_HEAD = "head";
Blockly.Msg.PORT_HIP = "hip";
Blockly.Msg.PORT_INTERNAL = "Port internal";
Blockly.Msg.PORT_KNEE = "knee";
Blockly.Msg.PORT_SHOULDER = "shoulder";
Blockly.Msg.PORT_WRIST = "wrist";
Blockly.Msg.POTENTIOMETER_TOOLTIP = "Represents a potentiometer.";
Blockly.Msg.PROCEDURES_ALLOW_STATEMENTS = "allow statements";
Blockly.Msg.PROCEDURES_BEFORE_PARAMS = "with:";
Blockly.Msg.PROCEDURES_CALLNORETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_CALLNORETURN_TOOLTIP = "Run the user-defined function '%1'.";
Blockly.Msg.PROCEDURES_CALLRETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_CALLRETURN_TOOLTIP = "Run the user-defined function '%1' and use its output.";
Blockly.Msg.PROCEDURES_CALL_BEFORE_PARAMS = "with:";
Blockly.Msg.PROCEDURES_CREATE_DO = "Create '%1'";
Blockly.Msg.PROCEDURES_DEFNORETURN_COMMENT = "Describe this function...";
Blockly.Msg.PROCEDURES_DEFNORETURN_DO = "";
Blockly.Msg.PROCEDURES_DEFNORETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE = "doSomething";
Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE = "to";
Blockly.Msg.PROCEDURES_DEFNORETURN_TOOLTIP = "Creates a function with no output.";
Blockly.Msg.PROCEDURES_DEFRETURN_HELPURL = "https://en.wikipedia.org/wiki/Procedure_%28computer_science%29";
Blockly.Msg.PROCEDURES_DEFRETURN_RETURN = "return";
Blockly.Msg.PROCEDURES_DEFRETURN_TOOLTIP = "Creates a function with an output.";
Blockly.Msg.PROCEDURES_DEF_DUPLICATE_WARNING = "Warning: This function has duplicate parameters.";
Blockly.Msg.PROCEDURES_HIGHLIGHT_DEF = "Highlight function definition";
Blockly.Msg.PROCEDURES_IFRETURN_HELPURL = "http://c2.com/cgi/wiki?GuardClause";
Blockly.Msg.PROCEDURES_IFRETURN_TOOLTIP = "If a value is true, then return a second value.";
Blockly.Msg.PROCEDURES_IFRETURN_WARNING = "Warning: This block may be used only within a function definition.";
Blockly.Msg.PROCEDURES_MUTATORARG_TITLE = "input name:";
Blockly.Msg.PROCEDURES_MUTATORARG_TOOLTIP = "Add an input to the function.";
Blockly.Msg.PROCEDURES_MUTATORCONTAINER_TITLE = "inputs";
Blockly.Msg.PROCEDURES_MUTATORCONTAINER_TOOLTIP = "Add, remove, or reorder inputs to this function.";
Blockly.Msg.PROCEDURES_TITLE = "« procedure";
Blockly.Msg.PROCEDURES_VARIABLES_ERROR = "Error: This block may be used only within the »";
Blockly.Msg.PROCEDURES_VARIABLES_LOOP_ERROR = "Error: This block may be used only within a loop which declares ";
Blockly.Msg.PROGLIST_DELETE_ALL_TOOLTIP = "Click here to delete all selected programs.";
Blockly.Msg.PROGLIST_DELETE_TOOLTIP = "Click here to delete your program.";
Blockly.Msg.PROGLIST_LOAD_TOOLTIP = "Click here to load your robot configuration in the configuration environment.";
Blockly.Msg.PROGLIST_SHARE_TOOLTIP = "Click here to share your program with a friend.";
Blockly.Msg.PROGLIST_SHARE_WITH_GALLERY = "Do you really want to share your program with everybody? If you are not sure please check the question and answers <a href='https://www.roberta-home.de/index.php?id=138&L=1' target='_blank'>here</a>.";
Blockly.Msg.PROGLIST_SHARE_WITH_GALLERY_TOOLTIP = "Click here to upload your program to the gallery hence share it with all other users.";
Blockly.Msg.PROGRAM_ERROR_EXPRBLOCK_PARSE = "This expression is syntactically incorrect.";
Blockly.Msg.PROGRAM_ERROR_EXPRBLOCK_TYPECHECK = "This expression is invalid. The type check failed.";
Blockly.Msg.PULSEHIGH = "pulse time HIGH";
Blockly.Msg.PULSELOW = "pulse time LOW";
Blockly.Msg.PULSETOOLTIP = "Represents a pulse sensor.";
Blockly.Msg.RADIO_GET_RSSI_TOOLTIP = "Gets the RSSI of the last package.";
Blockly.Msg.REDO = "Redo";
Blockly.Msg.RELAY = "relay";
Blockly.Msg.RELAY_ARDUINO = "relay SRD-05VDC-SL-C";
Blockly.Msg.RELAY_TOOLTIP = "Represents a relay.";
Blockly.Msg.REMOVE_COMMENT = "Remove Comment";
Blockly.Msg.RENAME_VARIABLE = "Rename variable...";
Blockly.Msg.RENAME_VARIABLE_TITLE = "Rename all '%1' variables to:";
Blockly.Msg.RESEND_ACTIVATION = "resend verification email";
Blockly.Msg.RETURN = "return";
Blockly.Msg.RFID_TOOLTIP = "Represents an RFID reader.";
Blockly.Msg.RGBLED_TOOLTIP = "Represents an RGB LED.";
Blockly.Msg.RGB_LED_TOOLTIP = "Turns the LED on or off and changes the color.";
Blockly.Msg.RIGHT = "right";
Blockly.Msg.RIGHT_FRONT_RGBLED = "RGB LED right front";
Blockly.Msg.RIGHT_INFRARED_SENSOR = "infraredsensor right";
Blockly.Msg.RIGHT_LED = "LED right";
Blockly.Msg.RIGHT_MOTOR = "motor right";
Blockly.Msg.RIGHT_REAR_RGBLED = "RGB LED right rear";
Blockly.Msg.ROLL = "roll";
Blockly.Msg.ROTATIONS_PER_MINUTE = "rpm";
Blockly.Msg.SAY = "say";
Blockly.Msg.SAY_PARAMETERS_TOOLTIP = "The robot says the given text. It is also possible to enter special charaters. The robot will use the selected language to try and speak the entered text. Modify the speed (range: 0-100) and the pitch of the voice (range: 0-100) with the input fields";
Blockly.Msg.SAY_TOOLTIP = "The robot says the given text. It is also possible to enter special charaters. The robot will use the selected language to try and speak the entered text.";
Blockly.Msg.SDCARD_TOOLTIP = "Represents an SD card.";
Blockly.Msg.SEND_DATA = "data to send";
Blockly.Msg.SEND_DATA_SENSEMAP = "openSenseMap";
Blockly.Msg.SEND_DATA_TO = "send data to";
Blockly.Msg.SEND_DATA_TO_OSM_TOOLTIP = "This block is used to send data from sensors to the openSenseMap. Please configure the IDs in the configuration prior to the usage of this block.";
Blockly.Msg.SENSEBOXBRICK_TOOLTIP = "Represents the senseBox. Please configure here the id's from the openSenseMap, if you want to use them.";
Blockly.Msg.SENSOR_ACCELEROMETER = "accelerometer";
Blockly.Msg.SENSOR_ACCELEROMETER_X_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the acceleration value on the X axis in g. Can be in range from -4 g to 4 g.";
Blockly.Msg.SENSOR_ACCELEROMETER_X_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the acceleration value on the X axis in g. Can be in range from -2 g to 2 g.";
Blockly.Msg.SENSOR_ACCELEROMETER_Y_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the acceleration value on the Y axis in g. Can be in range from -4 g to 4 g.";
Blockly.Msg.SENSOR_ACCELEROMETER_Y_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the acceleration value on the Y axis in g. Can be in range from -2 g to 2 g.";
Blockly.Msg.SENSOR_ACCELEROMETER_Z_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the acceleration value on the Z axis in g. Can be in range from -4 g to 4 g.";
Blockly.Msg.SENSOR_ACCELEROMETER_Z_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the acceleration value on the Z axis in g. Can be in range from -2 g to 2 g.";
Blockly.Msg.SENSOR_AMBIENTLIGHT = "ambientlight sensor";
Blockly.Msg.SENSOR_ANALOGOUT = "sensor analog";
Blockly.Msg.SENSOR_ANY = "anyplace";
Blockly.Msg.SENSOR_APDS9960 = "APDS9960";
Blockly.Msg.SENSOR_ARM_TOOLTIP = "Returns true, if the selected part of the arm is touched, otherwise false.";
Blockly.Msg.SENSOR_BATTERY = "battery charge";
Blockly.Msg.SENSOR_BOTTOM = "bottom";
Blockly.Msg.SENSOR_CALIBRATE = "calibrate";
Blockly.Msg.SENSOR_CODE = "code pad";
Blockly.Msg.SENSOR_COLOUR = "colour sensor";
Blockly.Msg.SENSOR_COLOURTCS3472 = "colour sensor TCS3472";
Blockly.Msg.SENSOR_COLOUR_EDISON = "line tracker";
Blockly.Msg.SENSOR_COMPASS = "compass sensor";
Blockly.Msg.SENSOR_COMPASS_EV3 = "HT compass sensor";
Blockly.Msg.SENSOR_DATA_READY = "ready?";
Blockly.Msg.SENSOR_DETECTFACE = "face detector";
Blockly.Msg.SENSOR_DETECTFACE_GETSAMPLE_TOOLTIP = "Detect a face previously learned and saved.";
Blockly.Msg.SENSOR_DETECTMARK = "NAO Mark sensor";
Blockly.Msg.SENSOR_DETECTMARK_GETSAMPLE_TOOLTIP = "Returns one or an array of IDs (number) of the last detected NAO Mark(s). If no marks are detected, -1 or an array consisting of 1 element with value -1 is returned. For a list of NAO marks with corresponding numbers refer to the wiki.";
Blockly.Msg.SENSOR_DIGITALOUT = "sensor digital";
Blockly.Msg.SENSOR_DROP = "drop sensor";
Blockly.Msg.SENSOR_DROP_GETSAMPLE_TOOLTIP = "Gets the current reading from the drop sensor in percent.";
Blockly.Msg.SENSOR_DROP_OFF = "drop-off sensor";
Blockly.Msg.SENSOR_ELECTRICCURRENT = "current sensor";
Blockly.Msg.SENSOR_ELECTRICCURRENT_GETSAMPLE_TOOLTIP = "Get the electric current from the motorboard in the selected joint.";
Blockly.Msg.SENSOR_ENCODER = "encoder";
Blockly.Msg.SENSOR_ENVIRONMENTAL = "environmental sensor";
Blockly.Msg.SENSOR_ENVIRONMENTAL_GETSAMPLE_TOOLTIP = "Returns values from the environmental sensor.";
Blockly.Msg.SENSOR_FLAME = "flame sensor";
Blockly.Msg.SENSOR_FSR = "force-sensing resistor";
Blockly.Msg.SENSOR_FSR_GETSAMPLE_TOOLTIP = "Get the current reading from the force sensitive resistor under the feet of the robot.";
Blockly.Msg.SENSOR_GESTURE = "gesture";
Blockly.Msg.SENSOR_GESTURE_ACTIVE = "active?";
Blockly.Msg.SENSOR_GESTURE_DOWN = "upside down";
Blockly.Msg.SENSOR_GESTURE_DOWN_GETSAMPLE_TOOLTIP = "Returns »true« if the posture is down.";
Blockly.Msg.SENSOR_GESTURE_FACE_DOWN = "at the front side";
Blockly.Msg.SENSOR_GESTURE_FACE_DOWN_GETSAMPLE_TOOLTIP = "Returns »true« if the posture is face down.";
Blockly.Msg.SENSOR_GESTURE_FACE_UP = "at the back";
Blockly.Msg.SENSOR_GESTURE_FACE_UP_GETSAMPLE_TOOLTIP = "Returns »true« if the posture is face up.";
Blockly.Msg.SENSOR_GESTURE_FREEFALL = "freely falling";
Blockly.Msg.SENSOR_GESTURE_FREEFALL_GETSAMPLE_TOOLTIP = "Returns »true« if it is free falling.";
Blockly.Msg.SENSOR_GESTURE_SHAKE = "shaking";
Blockly.Msg.SENSOR_GESTURE_SHAKE_GETSAMPLE_TOOLTIP = "Returns »true« if it is shaken.";
Blockly.Msg.SENSOR_GESTURE_TOOLTIP = "Is the system in the selected state?";
Blockly.Msg.SENSOR_GESTURE_UP = "upright";
Blockly.Msg.SENSOR_GESTURE_UP_GETSAMPLE_TOOLTIP = "Returns »true« if the posture is upright.";
Blockly.Msg.SENSOR_GET = "get";
Blockly.Msg.SENSOR_GET_SAMPLE = "get value";
Blockly.Msg.SENSOR_GPS_ALTITUDE_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the altitude in meters.";
Blockly.Msg.SENSOR_GPS_DATE_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the current date.";
Blockly.Msg.SENSOR_GPS_LATITUDE_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the latitude in degree.";
Blockly.Msg.SENSOR_GPS_LONGITUDE_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the longitude in degree.";
Blockly.Msg.SENSOR_GPS_SENSEBOX = "GPS receiver";
Blockly.Msg.SENSOR_GPS_SPEED_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the speed in km/h.";
Blockly.Msg.SENSOR_GPS_TIME_GETSAMPLE_TOOLTIP_SENSEBOX = "Returns the current time.";
Blockly.Msg.SENSOR_GROVE = "Grove";
Blockly.Msg.SENSOR_GYRO = "gyroscope";
Blockly.Msg.SENSOR_GYRO_TILTED_GETSAMPLE_TOOLTIP_WEDO = "Is the tilt sensors position the indicated manner?";
Blockly.Msg.SENSOR_GYRO_WEDO = "tilt sensor";
Blockly.Msg.SENSOR_GYRO_X_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the gyroscope output on the X axis in degrees per second. Can be in range from -2000 °/s to 2000 °/s.";
Blockly.Msg.SENSOR_GYRO_Y_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the gyroscope output on the Y axis in degrees per second. Can be in range from -2000 °/s to 2000 °/s.";
Blockly.Msg.SENSOR_GYRO_Z_GETSAMPLE_TOOLTIP_ARDUINO = "Returns the gyroscope output on the Z axis in degrees per second. Can be in range from -2000 °/s to 2000 °/s.";
Blockly.Msg.SENSOR_HTCOLOUR = "HT colour sensor";
Blockly.Msg.SENSOR_HTS221 = "HTS221";
Blockly.Msg.SENSOR_HUMIDITY = "humidity sensor";
Blockly.Msg.SENSOR_HUMIDITY_ARDUINO = "humidity sensor DHT11";
Blockly.Msg.SENSOR_HUMIDITY_GETSAMPLE_TOOLTIP = "Gets the current reading from the humidity sensor in percent.";
Blockly.Msg.SENSOR_HUMIDITY_SENSEBOX = "humidity/temperature sensor HDC1080";
Blockly.Msg.SENSOR_INFRARED = "infrared sensor";
Blockly.Msg.SENSOR_INFRARED_DISTANCE_GETSAMPLE_TOOLTIP_WEDO = "Gets the current relative distance from the infrared sensor. The values are between 1, close, and 10, further away.";
Blockly.Msg.SENSOR_IRSEEKER = "HT infrared sensor";
Blockly.Msg.SENSOR_IRSEEKER_EDISON = "IR seeker";
Blockly.Msg.SENSOR_IRSEEKER_GETSAMPLE_TOOLTIP = "Gets the position of an infrared beacon.";
Blockly.Msg.SENSOR_IRSEEKER_RCCODE_GETSAMPLE_TOOLTIP_EDISON = "Receives a message from a remote control.";
Blockly.Msg.SENSOR_IS_ARM = "is arm";
Blockly.Msg.SENSOR_IS_PIN = "is";
Blockly.Msg.SENSOR_IS_PRESSED = "pressed?";
Blockly.Msg.SENSOR_IS_TILTED = "tilted?";
Blockly.Msg.SENSOR_IS_TOUCHED = "touched?";
Blockly.Msg.SENSOR_JOYSTICK = "joystick";
Blockly.Msg.SENSOR_KEY = "button";
Blockly.Msg.SENSOR_KEYPAD = "keypad";
Blockly.Msg.SENSOR_KEY_ANY = "any";
Blockly.Msg.SENSOR_KEY_DOWN = "down";
Blockly.Msg.SENSOR_KEY_ENTER = "enter";
Blockly.Msg.SENSOR_KEY_ESCAPE = "escape";
Blockly.Msg.SENSOR_KEY_LEFT = "left";
Blockly.Msg.SENSOR_KEY_PLAY = "play";
Blockly.Msg.SENSOR_KEY_REC = "record";
Blockly.Msg.SENSOR_KEY_RIGHT = "right";
Blockly.Msg.SENSOR_KEY_STOP = "stop";
Blockly.Msg.SENSOR_KEY_UP = "up";
Blockly.Msg.SENSOR_LIGHT = "light sensor";
Blockly.Msg.SENSOR_LIGHTVEML = "visible/UV light sensor";
Blockly.Msg.SENSOR_LIGHTVEML_LIGHT_GETSAMPLE_TOOLTIP = "Reads the current value of the visible light sensor in lux. The value is between 0 to 220000 lux.";
Blockly.Msg.SENSOR_LIGHTVEML_UVLIGHT_GETSAMPLE_TOOLTIP = "Reads the current value of the UV light sensor in μW/cm². The value is between 0 and more than 1000.";
Blockly.Msg.SENSOR_LIGHT_LIGHT_GETSAMPLE_TOOLTIP_EDISON = "Gets the current reading of the light sensor";
Blockly.Msg.SENSOR_LIGHT_LINETRACKER = "line tracker";
Blockly.Msg.SENSOR_LPS22HB = "LPS22HB";
Blockly.Msg.SENSOR_LSM9DS1 = "LSM9DS1";
Blockly.Msg.SENSOR_MIC = "microphone";
Blockly.Msg.SENSOR_MOISTURE = "moisture sensor";
Blockly.Msg.SENSOR_MOISTURE_GETSAMPLE_TOOLTIP = "Gets the current reading from the moisture sensor in percent.";
Blockly.Msg.SENSOR_MOTION = "motion sensor";
Blockly.Msg.SENSOR_MOTION_ARDUINO = "motion sensor HC-SR501";
Blockly.Msg.SENSOR_MOTION_GETSAMPLE_TOOLTIP = "Gets the current reading from the motion sensor (true/false).";
Blockly.Msg.SENSOR_MS_TIMER = "in ms";
Blockly.Msg.SENSOR_OBSTACLEDETECTOR = "Obstacle detector";
Blockly.Msg.SENSOR_OUT = "sensor";
Blockly.Msg.SENSOR_OUT_ANALOG_GETSAMPLE_TOOLTIP = "Returns the value from the specified analog pin. The value is between 0 and 1024.";
Blockly.Msg.SENSOR_OUT_DIGITAL_GETSAMPLE_TOOLTIP = "Returns the value from the specified digital pin. The value is either HIGH »1« or LOW »0«.";
Blockly.Msg.SENSOR_PARTICLE = "SDS011 particle sensor";
Blockly.Msg.SENSOR_PARTICLE_PM10_GETSAMPLE_TOOLTIP = "Reads the current value of PM10 from the particle sensor.";
Blockly.Msg.SENSOR_PARTICLE_PM25_GETSAMPLE_TOOLTIP = "Reads the current value of PM2.5 from the particle sensor.";
Blockly.Msg.SENSOR_PIN = "pin";
Blockly.Msg.SENSOR_PINTOUCH = "pin";
Blockly.Msg.SENSOR_PINTOUCH_BOB3 = "arm";
Blockly.Msg.SENSOR_PIN_ANALOG_GETSAMPLE_TOOLTIP = "Returns the value from the specified analog pin. The value is between 0 and 1024.";
Blockly.Msg.SENSOR_PIN_DIGITAL_GETSAMPLE_TOOLTIP = "Returns the value from the specified digital pin. The value is either HIGH »1« or LOW »0«.";
Blockly.Msg.SENSOR_PIN_PULSEHIGH_GETSAMPLE_TOOLTIP = "Returns the pulse HIGH on a pin in microseconds or -1 if no complete pulse was received within the timeout.";
Blockly.Msg.SENSOR_PIN_PULSELOW_GETSAMPLE_TOOLTIP = "Returns the pulse LOW on a pin in microseconds or -1 if no complete pulse was received within the timeout.";
Blockly.Msg.SENSOR_POTENTIOMETER = "potentiometer";
Blockly.Msg.SENSOR_POTENTIOMETER_GETSAMPLE_TOOLTIP = "Gets the current reading from the potentiometer in volts (0-5V).";
Blockly.Msg.SENSOR_PULSE = "pulse sensor";
Blockly.Msg.SENSOR_PULSE_GETSAMPLE_TOOLTIP = "Gets the current reading from the pulse sensor.";
Blockly.Msg.SENSOR_PULSE_GETSAMPLE_TOOLTIP_ARDUINO = "Gets the current reading from the pulse sensor. The value is between 0 and 1023.";
Blockly.Msg.SENSOR_RADIO_RSSI = "radio RSSI sensor";
Blockly.Msg.SENSOR_RESET = "reset";
Blockly.Msg.SENSOR_RESET_II = "";
Blockly.Msg.SENSOR_RESET_TOOLTIP_EDISON = "Resets the sensors and clears the values.";
Blockly.Msg.SENSOR_RFID = "RFID reader";
Blockly.Msg.SENSOR_RFID_ARDUINO = "RFID-RC522 reader";
Blockly.Msg.SENSOR_RFID_GETSAMPLE_TOOLTIP = "Gets the current reading from the RFID reader.";
Blockly.Msg.SENSOR_RSSI = "signal strength";
Blockly.Msg.SENSOR_RSSI_VALUE_GETSAMPLE_TOOLTIP = "Gets the signal strength from the last received message.";
Blockly.Msg.SENSOR_SONAR = "sonar";
Blockly.Msg.SENSOR_SOUND = "sound sensor";
Blockly.Msg.SENSOR_SOUND_CALLIOPE = "microphone";
Blockly.Msg.SENSOR_SOUND_CLAP_GETSAMPLE_TOOLTIP_EDISON = "Is a clap detected?";
Blockly.Msg.SENSOR_TEMPERATURE = "temperature sensor";
Blockly.Msg.SENSOR_TEMPERATURE_ARDUINO = "temperature sensor TMP36";
Blockly.Msg.SENSOR_TEMPERATURE_PRESSURE_GETSAMPLE_TOOLTIP = "Gets the current reading of the air pressure sensor in Pascal. Normal air pressure is 101325 Pascal.";
Blockly.Msg.SENSOR_TEMPERATURE_SENSEBOX = "temperature/pressure sensor BMP280";
Blockly.Msg.SENSOR_TIME = "time";
Blockly.Msg.SENSOR_TIMER = "timer";
Blockly.Msg.SENSOR_TOP = "top";
Blockly.Msg.SENSOR_TOUCH = "touch sensor";
Blockly.Msg.SENSOR_ULTRASONIC = "ultrasonic sensor";
Blockly.Msg.SENSOR_ULTRASONIC_ARDUINO = "ultrasonic sensor HC-SR04";
Blockly.Msg.SENSOR_ULTRASONIC_DISTANCE_GETSAMPLE_TOOLTIP_ARDUINO = "Gets the current reading from the ultrasonic sensor in cm. Maximum distance to messure is 400 cm.";
Blockly.Msg.SENSOR_ULTRASONIC_DISTANCE_GETSAMPLE_TOOLTIP_MBOT = "Gets the current reading from the ultrasonic sensor in cm. Maximum distance to messure is 400 cm. Minimum distance tp measure is 3 cm.";
Blockly.Msg.SENSOR_UNIT_ACCEL = "m/s²";
Blockly.Msg.SENSOR_UNIT_AMPERE = "A";
Blockly.Msg.SENSOR_UNIT_CM = "cm";
Blockly.Msg.SENSOR_UNIT_DEGREE = "°";
Blockly.Msg.SENSOR_UNIT_DEGREE_CELSIUS = "°C";
Blockly.Msg.SENSOR_UNIT_DEGREE_PER_SECOND = "°/s";
Blockly.Msg.SENSOR_UNIT_GAUSS = "Gauss";
Blockly.Msg.SENSOR_UNIT_INDEX = "#";
Blockly.Msg.SENSOR_UNIT_IRRADIANCE = "μW/cm²";
Blockly.Msg.SENSOR_UNIT_LUX = "lx";
Blockly.Msg.SENSOR_UNIT_METER = "m";
Blockly.Msg.SENSOR_UNIT_MILLIG = "milli-g";
Blockly.Msg.SENSOR_UNIT_MM = "mm";
Blockly.Msg.SENSOR_UNIT_MS = "ms";
Blockly.Msg.SENSOR_UNIT_NEWTON = "N";
Blockly.Msg.SENSOR_UNIT_OMEGA = "ω";
Blockly.Msg.SENSOR_UNIT_PASCAL = "hPa";
Blockly.Msg.SENSOR_UNIT_PERCENT = "%";
Blockly.Msg.SENSOR_UNIT_PM = "μg/m³";
Blockly.Msg.SENSOR_UNIT_SPEED = "km/h";
Blockly.Msg.SENSOR_UNIT_VOLT = "V";
Blockly.Msg.SENSOR_VALUE = "value";
Blockly.Msg.SENSOR_WALL = "wall sensor";
Blockly.Msg.SERVER_NOT_AVAILABLE = "The Open Roberta Lab is currently not available. </br>Your internet connection was interrupted. If you currently have an EV3 connected via the USB cable solve this with a click on the programming environment.<br>If this problem persists please check your internet connection and try to reload this website. If you have this problem again, please don't hesitate to contact us via mail:</br>support-o-r@iais.fraunhofer.de";
Blockly.Msg.SERVO_MOTOR = "servo motor";
Blockly.Msg.SERVO_MOTOR_ARDUINO = "servo motor SG90";
Blockly.Msg.SERVO_S1 = "servo motor S1";
Blockly.Msg.SERVO_S2 = "servo motor S2";
Blockly.Msg.SERVO_TOOLTIP = "Represents a servo motor.";
Blockly.Msg.SET = "set";
Blockly.Msg.SETLANGUAGE_TOOLTIP = "Set the language.";
Blockly.Msg.SET_LED = "turn LED";
Blockly.Msg.SET_RELAY = "turn relay";
Blockly.Msg.SET_RELAY_ARDUINO = "turn relay SRD-05VDC-SL-C";
Blockly.Msg.SET_RELAY_TOOLTIP = "Turns the relay on or off";
Blockly.Msg.SET_RGB_LED = "turn RGB LED";
Blockly.Msg.SHARE_PROGRAMS_USERGROUP_HINT = "To share programs with a user group, open your program list and share a program with a user group.";
Blockly.Msg.SHARE_WITH_USER = "Share your program with another user ...";
Blockly.Msg.SHARE_WITH_USERGROUP = "Share your program with the members of one of your user groups ...";
Blockly.Msg.SIDE_BRUSH_OFF = "turn side brush off";
Blockly.Msg.SIDE_BRUSH_OFF_TOOLTIP = "Turns the side brush off.";
Blockly.Msg.SIDE_BRUSH_STATUS = "turn side brush";
Blockly.Msg.SIDE_BRUSH_TOOLTIP = "Turns the side brush on or off .";
Blockly.Msg.SIM_BLOCK_NOT_SUPPORTED = "Unfortunately you can not use this block in the simulation!";
Blockly.Msg.SIM_CONFIGURATION_WARNING_SENSOR_MISSING = "This sensor is not in the configuration! The behaviour of the program on the real robot will be different than expected!";
Blockly.Msg.SIM_CONFIGURATION_WARNING_WRONG_INFRARED_SENSOR_PORT = "The infrared sensor is not connected in the configuration! The behaviour of the program on the real robot will be different than expected, please check the robot configuration!";
Blockly.Msg.SIM_CONFIGURATION_WARNING_WRONG_SENSOR_PORT = "In this block you have used a wrong sensor port! The behaviour of the program on the real robot will be different than expected, please check the robot configuration!";
Blockly.Msg.SINGLE_MOTOR_ON_TOOLTIP_CALLIOPE = "Turns the single motor on with a specific power. Power can be positiv or negativ for reverse direction.";
Blockly.Msg.SLEEP = "sleep";
Blockly.Msg.SLOT_FRONT = "front";
Blockly.Msg.SLOT_LEFT = "left";
Blockly.Msg.SLOT_LEFT_PITCH = "left pitch";
Blockly.Msg.SLOT_LEFT_ROLL = "left roll";
Blockly.Msg.SLOT_LEFT_YAW = "left yaw";
Blockly.Msg.SLOT_LEFT_YAW_PITCH = "left yaw pitch";
Blockly.Msg.SLOT_MIDDLE = "middle";
Blockly.Msg.SLOT_PITCH = "pitch";
Blockly.Msg.SLOT_REAR = "rear";
Blockly.Msg.SLOT_RIGHT = "right";
Blockly.Msg.SLOT_RIGHT_PITCH = "right pitch";
Blockly.Msg.SLOT_RIGHT_ROLL = "right roll";
Blockly.Msg.SLOT_RIGHT_YAW = "right yaw";
Blockly.Msg.SLOT_RIGHT_YAW_PITCH = "right yaw pitch";
Blockly.Msg.SLOT_SIDE = "side";
Blockly.Msg.SLOT_TILTED_ANY = "any";
Blockly.Msg.SLOT_TILTED_BACK = "back";
Blockly.Msg.SLOT_TILTED_DOWN = "down";
Blockly.Msg.SLOT_TILTED_FRONT = "front";
Blockly.Msg.SLOT_TILTED_NO = "not";
Blockly.Msg.SLOT_TILTED_UP = "up";
Blockly.Msg.SLOT_YAW = "yaw";
Blockly.Msg.SOCKET = "socket";
Blockly.Msg.SOUND_GETSAMPLE_TOOLTIP = "Gets the current reading from the sound sensor in the range 0 to 100. 0 is quiet and 100 is loud.";
Blockly.Msg.SOUND_TOOLTIP = "Represents a sound sensor.";
Blockly.Msg.SOURCE_CODE_EDITOR_BUILD_TOOLTIP = "Build the source code.";
Blockly.Msg.SOURCE_CODE_EDITOR_CLOSE_CONFIRMATION = "All your changes will be lost!";
Blockly.Msg.SOURCE_CODE_EDITOR_IMPORT_TOOLTIP = "Import current NEPO program.";
Blockly.Msg.SOURCE_CODE_EDITOR_PLACEHOLDER = "Import current NEPO program or just start typing";
Blockly.Msg.SOURCE_CODE_EDITOR_RUN_TOOLTIP = "Run on the robot.";
Blockly.Msg.SOURCE_CODE_EDITOR_UPLOAD_TOOLTIP = "Upload the source code.";
Blockly.Msg.SSD1306I2C_TOOLTIP = "Represents an I²C OLED SSD1306 display (addresses 128x64: 0x3D, 128x32: 0x3C)";
Blockly.Msg.SSID = "SSID";
Blockly.Msg.START = "start";
Blockly.Msg.START_ACTIVITY = "activity";
Blockly.Msg.START_ACTIVITY_TOOLTIP = "Starts additional activity.";
Blockly.Msg.START_PROGRAM = "start";
Blockly.Msg.START_PROGRAM_AUTOMOMOUSLIFE = "autonomous behaviour";
Blockly.Msg.START_PROGRAM_DEBUG = "show sensor data";
Blockly.Msg.START_TOOLTIP = "The starting point for the main program.";
Blockly.Msg.STEPMOTOR_TOOLTIP = "Represents a step motor.";
Blockly.Msg.STEPS_PER_REVOLUTION = "steps per revolution";
Blockly.Msg.STEP_MOTOR = "step motor";
Blockly.Msg.STEP_MOTOR_ARDUINO = "step motor 28BYJ-48";
Blockly.Msg.STEP_MOTOR_ON_TOOLTIP = "Turns motor on with a provided speed in rpm and stops motor after execution of provided number of rotations.";
Blockly.Msg.STRENGTH = "strength";
Blockly.Msg.SWITCH = "switch";
Blockly.Msg.SWITCH_LED_MATRIX_TOOLTIP = "Enables/Disables the LED matrix to use the dual purpose pins.";
Blockly.Msg.TAB_CONFIGURATION = "Robot configuration";
Blockly.Msg.TAB_PROGRAM = "Program";
Blockly.Msg.TEMPERATURE_GETSAMPLE_TOOLTIP = "Gets the current reading from the temperature sensor in °C.";
Blockly.Msg.TEMPERATURE_TOOLTIP = "Represents a temperature sensor.";
Blockly.Msg.TEXT_APPEND_APPENDTEXT = "append text";
Blockly.Msg.TEXT_APPEND_HELPURL = "https://github.com/google/blockly/wiki/Text#text-modification";
Blockly.Msg.TEXT_APPEND_TO = "to";
Blockly.Msg.TEXT_APPEND_TOOLTIP = "Append some text to the first text.";
Blockly.Msg.TEXT_CAST_CHAR_TONUMBER = "cast %1 at index %2 to Number";
Blockly.Msg.TEXT_CAST_CHAR_TONUMBER_TOOLTIP = "Convert the character at the given position in this string into the corresponding ASCII number. The first character in the string is at position 0!";
Blockly.Msg.TEXT_CAST_STRING_TONUMBER = "cast %1 to Number";
Blockly.Msg.TEXT_CAST_STRING_TONUMBER_TOOLTIP = "Convert this string into a number, if it contains a number.";
Blockly.Msg.TEXT_CHANGECASE_HELPURL = "https://github.com/google/blockly/wiki/Text#adjusting-text-case";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_LOWERCASE = "to lower case";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_TITLECASE = "to Title Case";
Blockly.Msg.TEXT_CHANGECASE_OPERATOR_UPPERCASE = "to UPPER CASE";
Blockly.Msg.TEXT_CHANGECASE_TOOLTIP = "Return a copy of the text in a different case.";
Blockly.Msg.TEXT_CHARAT_FIRST = "get first letter";
Blockly.Msg.TEXT_CHARAT_FROM_END = "get letter # from end";
Blockly.Msg.TEXT_CHARAT_FROM_START = "get letter #";
Blockly.Msg.TEXT_CHARAT_HELPURL = "https://github.com/google/blockly/wiki/Text#extracting-text";
Blockly.Msg.TEXT_CHARAT_INPUT_INTEXT = "in text";
Blockly.Msg.TEXT_CHARAT_LAST = "get last letter";
Blockly.Msg.TEXT_CHARAT_RANDOM = "get random letter";
Blockly.Msg.TEXT_CHARAT_TAIL = "";
Blockly.Msg.TEXT_CHARAT_TOOLTIP = "Returns the letter at the specified position.";
Blockly.Msg.TEXT_COMMENT_TOOLTIP = "Write a comment to your code here, to make it easier for you and others to read and understand your code. It will also generate an inline comment in the generated source code";
Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TOOLTIP = "Add an item to the text.";
Blockly.Msg.TEXT_CREATE_JOIN_TITLE_JOIN = "join";
Blockly.Msg.TEXT_CREATE_JOIN_TOOLTIP = "Add, remove, or reorder sections to reconfigure this text block.";
Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_END = "to letter # from end";
Blockly.Msg.TEXT_GET_SUBSTRING_END_FROM_START = "to letter #";
Blockly.Msg.TEXT_GET_SUBSTRING_END_LAST = "to last letter";
Blockly.Msg.TEXT_GET_SUBSTRING_HELPURL = "https://github.com/google/blockly/wiki/Text#extracting-a-region-of-text";
Blockly.Msg.TEXT_GET_SUBSTRING_INPUT_IN_TEXT = "in text";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FIRST = "get substring from first letter";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_END = "get substring from letter # from end";
Blockly.Msg.TEXT_GET_SUBSTRING_START_FROM_START = "get substring from letter #";
Blockly.Msg.TEXT_GET_SUBSTRING_TAIL = "";
Blockly.Msg.TEXT_GET_SUBSTRING_TOOLTIP = "Returns a specified portion of the text.";
Blockly.Msg.TEXT_INDEXOF_HELPURL = "https://github.com/google/blockly/wiki/Text#finding-text";
Blockly.Msg.TEXT_INDEXOF_INPUT_INTEXT = "in text";
Blockly.Msg.TEXT_INDEXOF_OPERATOR_FIRST = "find first occurrence of text";
Blockly.Msg.TEXT_INDEXOF_OPERATOR_LAST = "find last occurrence of text";
Blockly.Msg.TEXT_INDEXOF_TAIL = "";
Blockly.Msg.TEXT_INDEXOF_TOOLTIP = "Returns the index of the first/last occurrence of the first text in the second text. Returns 0 if text is not found.";
Blockly.Msg.TEXT_ISEMPTY_HELPURL = "https://github.com/google/blockly/wiki/Text#checking-for-empty-text";
Blockly.Msg.TEXT_ISEMPTY_TITLE = "%1 is empty";
Blockly.Msg.TEXT_ISEMPTY_TOOLTIP = "Returns true if the provided text is empty.";
Blockly.Msg.TEXT_JOIN_HELPURL = "https://github.com/google/blockly/wiki/Text#text-creation";
Blockly.Msg.TEXT_JOIN_TITLE_CREATEWITH = "create text with";
Blockly.Msg.TEXT_JOIN_TOOLTIP = "Create a piece of text by joining together any number of items.";
Blockly.Msg.TEXT_LENGTH_HELPURL = "https://github.com/google/blockly/wiki/Text#text-modification";
Blockly.Msg.TEXT_LENGTH_TITLE = "length of %1";
Blockly.Msg.TEXT_LENGTH_TOOLTIP = "Returns the number of letters (including spaces) in the provided text.";
Blockly.Msg.TEXT_PRINT_HELPURL = "https://github.com/google/blockly/wiki/Text#printing-text";
Blockly.Msg.TEXT_PRINT_TITLE = "print %1";
Blockly.Msg.TEXT_PRINT_TOOLTIP = "Print the specified text, number or other value.";
Blockly.Msg.TEXT_PROMPT_HELPURL = "https://github.com/google/blockly/wiki/Text#getting-input-from-the-user";
Blockly.Msg.TEXT_PROMPT_TOOLTIP_NUMBER = "Prompt for user for a number.";
Blockly.Msg.TEXT_PROMPT_TOOLTIP_TEXT = "Prompt for user for some text.";
Blockly.Msg.TEXT_PROMPT_TYPE_NUMBER = "prompt for number with message";
Blockly.Msg.TEXT_PROMPT_TYPE_TEXT = "prompt for text with message";
Blockly.Msg.TEXT_TEXT_HELPURL = "https://en.wikipedia.org/wiki/String_(computer_science)";
Blockly.Msg.TEXT_TEXT_TOOLTIP = "A letter, word, or line of text.";
Blockly.Msg.TEXT_TRIM_HELPURL = "https://github.com/google/blockly/wiki/Text#trimming-removing-spaces";
Blockly.Msg.TEXT_TRIM_OPERATOR_BOTH = "trim spaces from both sides of";
Blockly.Msg.TEXT_TRIM_OPERATOR_LEFT = "trim spaces from left side of";
Blockly.Msg.TEXT_TRIM_OPERATOR_RIGHT = "trim spaces from right side of";
Blockly.Msg.TEXT_TRIM_TOOLTIP = "Return a copy of the text with spaces removed from one or both ends.";
Blockly.Msg.THETA = "theta";
Blockly.Msg.TIMEOUT = "timeout";
Blockly.Msg.TIMER_GETSAMPLE_TOOLTIP = "Gets the current reading from the timer.";
Blockly.Msg.TIMER_RESET_TOOLTIP = "Resets the timer.";
Blockly.Msg.TITLE = "title";
Blockly.Msg.TO = "to";
Blockly.Msg.TODAY = "Today";
Blockly.Msg.TOOLBOX_ACTION = "Action";
Blockly.Msg.TOOLBOX_ANIMATION = "Animation";
Blockly.Msg.TOOLBOX_CLEANING = "Cleaning";
Blockly.Msg.TOOLBOX_COLOUR = "Colours";
Blockly.Msg.TOOLBOX_COMMUNICATION = "Messages";
Blockly.Msg.TOOLBOX_CONTROL = "Control";
Blockly.Msg.TOOLBOX_DECISION = "Decisions";
Blockly.Msg.TOOLBOX_DISPLAY = "Display";
Blockly.Msg.TOOLBOX_DRIVE = "Drive";
Blockly.Msg.TOOLBOX_IMAGE = "Images";
Blockly.Msg.TOOLBOX_INTEGRATED_SENSOR = "Integrated Sensors";
Blockly.Msg.TOOLBOX_LIGHT = "Lights";
Blockly.Msg.TOOLBOX_LIST = "Lists";
Blockly.Msg.TOOLBOX_LOGIC = "Logic";
Blockly.Msg.TOOLBOX_LOOP = "Loops";
Blockly.Msg.TOOLBOX_MATH = "Math";
Blockly.Msg.TOOLBOX_MOVE = "Move";
Blockly.Msg.TOOLBOX_NEURAL_NETWORK = "Neural Network";
Blockly.Msg.TOOLBOX_PIN = "Pin";
Blockly.Msg.TOOLBOX_PROCEDURE = "Functions";
Blockly.Msg.TOOLBOX_SENSOR = "Sensors";
Blockly.Msg.TOOLBOX_SOUND = "Sounds";
Blockly.Msg.TOOLBOX_TEXT = "Text";
Blockly.Msg.TOOLBOX_VARIABLE = "Variables";
Blockly.Msg.TOOLBOX_VISION = "Vision";
Blockly.Msg.TOOLBOX_WAIT = "Wait";
Blockly.Msg.TOOLBOX_WALK = "Walk";
Blockly.Msg.TOUCH_ISPRESSED_TOOLTIP = "Is the touch sensor pressed?";
Blockly.Msg.TOUCH_TOOLTIP = "Is the touch sensor pressed?";
Blockly.Msg.TOUR1_DESCRIPTION00 = "next";
Blockly.Msg.TOUR1_DESCRIPTION01 = "Welcome,<br>click on »next« and start a tour through the Open Roberta Lab.<br>I'll show you how you can program your robot with NEPO.<br>You can end this tour all the time with a click on »X« in the right upper corner.";
Blockly.Msg.TOUR1_DESCRIPTION02 = "The menu ;-)";
Blockly.Msg.TOUR1_DESCRIPTION03 = "Here you can find everything you need to program the robot!<br><span class='typcn typcn-document-text'></span> Save and load programs<br><span class='typcn typcn-ev3'></span> Connect your robot to the Open Roberta Lab or switch to another system<br><span class='typcn typcn-lightbulb'></span> Help for all kinds of problems<br><span class='typcn typcn-user'></span>Everything about user accounts, e.g. login or logoff<br><span class='typcn typcn-th-large-outline'></span> Look at the gallery and try great programs from other users!<br><span class='typcn typcn-world'></span> Click here to switch to another language";
Blockly.Msg.TOUR1_DESCRIPTION04 = "Click here and switch to the robot configuration";
Blockly.Msg.TOUR1_DESCRIPTION05 = "You can adapt the robot configuration later,<br>if you've constructed another robot.<br>If you don't have a robot and want to test your program in the simulation <br>this configuration is applied.";
Blockly.Msg.TOUR1_DESCRIPTION06 = "Click here and switch back to the programming area!";
Blockly.Msg.TOUR1_DESCRIPTION07 = "The toolbox.<br>Here you can find all programming blocks.";
Blockly.Msg.TOUR1_DESCRIPTION07a = "Once you are an experienced user you can switch here to the expert mode of the toolbox.<br>You will then find more blocks to program your robot.<br><span class='typcn typcn-media-stop-outline'></span> beginner mode<br><span class='typcn typcn-star-outline'></span> expert mode";
Blockly.Msg.TOUR1_DESCRIPTION08 = "Each program starts with the »program start« block.<br>Further programming blocks, which the robot should execute, should be attached to this block.<br>Just drag'n drop the desired block right under the start block.";
Blockly.Msg.TOUR1_DESCRIPTION09 = "Here you find some shortcuts!<br>From left to right:<br><span class='typcn typcn-media-play'></span> Click here to execute the program on the real robot<br><span class='typcn typcn-cloud-storage'></span> Save your program, but before that you need to sign in<br><span class='typcn typcn-zoom'></span> Click here and zoom in on the blocks, if you want to<br><span class='typcn typcn-archive'></span> The trashcan! Just move blocks into the trashcan and they will be deleted.<br> Attention: You cannot recover blocks once they are deleted!<br>";
Blockly.Msg.TOUR1_DESCRIPTION10 = "Click on »Action«";
Blockly.Msg.TOUR1_DESCRIPTION12 = "Now drag'n drop the new block just below the start-program-block so that they are connected";
Blockly.Msg.TOUR1_DESCRIPTION13 = "Now open the simulation ...";
Blockly.Msg.TOUR1_DESCRIPTION13a = "and test your program in the simulation!";
Blockly.Msg.TOUR1_DESCRIPTION15 = "Perfect,<br>your robot is driving 20 cm forward,<br>this is what you have programmed!";
Blockly.Msg.TOUR1_DESCRIPTION16 = "Bye and have fun trying out more!";
Blockly.Msg.ULTRASONIC_GETSAMPLE_TOOLTIP = "Gets the current reading from the ultrasonic sensor in cm. Maximum distance to measure is 255 cm.";
Blockly.Msg.ULTRASONIC_TOOLTIP = "Represents an ultrasonic sensor.";
Blockly.Msg.UNDO = "Undo";
Blockly.Msg.USERGROUP_ADD_MEMBERS = "Add members";
Blockly.Msg.USERGROUP_CREATE = "Create new user group";
Blockly.Msg.USERGROUP_CREATE_TOOLTIP = "Click here to create a new usergroup";
Blockly.Msg.USERGROUP_DELETE_WITH_MEMBERS_WARNING = "Are your sure that you want to delete the usergroup including all members? No member did log in so far.";
Blockly.Msg.USERGROUP_GENERATED_MEMBERS = "Automatically generated members";
Blockly.Msg.USERGROUP_GENERATED_MEMBERS_TOOLTIP = "Automatically genertated members are normal members of a user group. They are generated with a number as username, which you can change afterwards.";
Blockly.Msg.USERGROUP_LIST_DELETE_ALL_TOOLTIP = "Click here to delete all selected user groups.";
Blockly.Msg.USERGROUP_LIST_DELETE_TOOLTIP = "Click here to delete the user group.";
Blockly.Msg.USERGROUP_LOGIN_LOST_PASSWORD_HINT = "If you forgot your password, you can ask the owner of your user group to reset the password to the initial password for you. It will be the password you had to enter when you logged in first.";
Blockly.Msg.USERGROUP_MEMBER_CREATE_TOOLTIP = "Click here to add members to your usergroup.";
Blockly.Msg.USERGROUP_MEMBER_DEFAULT_PASSWORD_HINT = "The password for your account is not safe. Set your own password, so that only you can login to your account.";
Blockly.Msg.USERGROUP_MEMBER_LIST_DELETE_ALL_TOOLTIP = "Click here to delete all selected user group members.";
Blockly.Msg.USERGROUP_MEMBER_LIST_DELETE_TOOLTIP = "Click here to delete this user group member.";
Blockly.Msg.USERGROUP_MEMBER_LIST_RESET_ALL_PASSWORDS_TOOLTIP = "Click here to reset the password of all selected user group members to their corresponding default password.";
Blockly.Msg.USERGROUP_MEMBER_LIST_RESET_PASSWORD_TOOLTIP = "Click here to reset the password of this user group member to the default password.";
Blockly.Msg.USERGROUP_NAME_HINT = "The user group to which you want to log in has a name. Enter it here.";
Blockly.Msg.VACUUM_OFF = "turn vacuum off";
Blockly.Msg.VACUUM_OFF_TOOLTIP = "Turns the vacuum off.";
Blockly.Msg.VACUUM_ON = "turn vacuum on %";
Blockly.Msg.VACUUM_ON_TOOLTIP = "Turns on the vacuum speed in procent";
Blockly.Msg.VALIDATION_CONTAINS_SPECIAL_CHARACTERS = "Valid special characters: »=+!?.,%#+&^@_-«";
Blockly.Msg.VALIDATION_FIELD_REQUIRED = "This field cannot be empty!";
Blockly.Msg.VALIDATION_MAX_LENGTH = "The max. lenght is 25 characters!";
Blockly.Msg.VALIDATION_PASSWORD_MIN_LENGTH = "Password min. length is 6!";
Blockly.Msg.VALIDATION_SECOND_PASSWORD_EQUAL = "The two passwords must be equal!";
Blockly.Msg.VALIDATION_TOKEN_LENGTH = "Token length is 8 characters!";
Blockly.Msg.VALIDATION_VALID_EMAIL_ADDRESS = "Please enter a valid email address!";
Blockly.Msg.VALUE_TO = "value to";
Blockly.Msg.VARIABLES_DEFAULT_NAME = "item";
Blockly.Msg.VARIABLES_GET_CREATE_SET = "Create 'set %1'";
Blockly.Msg.VARIABLES_GET_HELPURL = "https://github.com/google/blockly/wiki/Variables#get";
Blockly.Msg.VARIABLES_GET_TOOLTIP = "Returns the value of this variable.";
Blockly.Msg.VARIABLES_GLOBAL_DECLARE_TOOLTIP = "Declares a global variable.";
Blockly.Msg.VARIABLES_LOCAL_DECLARE_TOOLTIP = "Declares a local variable.";
Blockly.Msg.VARIABLES_SET = "set %1 to %2";
Blockly.Msg.VARIABLES_SET_CREATE_GET = "Create 'get %1'";
Blockly.Msg.VARIABLES_SET_HELPURL = "https://github.com/google/blockly/wiki/Variables#set";
Blockly.Msg.VARIABLES_SET_TOOLTIP = "Sets this variable to be equal to the input.";
Blockly.Msg.VARIABLES_TITLE = "variable";
Blockly.Msg.VARIABLES_TYPE_ARRAY_BOOLEAN = "List Boolean";
Blockly.Msg.VARIABLES_TYPE_ARRAY_COLOUR = "List Colour";
Blockly.Msg.VARIABLES_TYPE_ARRAY_CONNECTION = "List Connection";
Blockly.Msg.VARIABLES_TYPE_ARRAY_IMAGE = "List Image";
Blockly.Msg.VARIABLES_TYPE_ARRAY_NUMBER = "List Number";
Blockly.Msg.VARIABLES_TYPE_ARRAY_STRING = "List String";
Blockly.Msg.VARIABLES_TYPE_BOOLEAN = "Boolean";
Blockly.Msg.VARIABLES_TYPE_COLOUR = "Colour";
Blockly.Msg.VARIABLES_TYPE_CONNECTION = "Connection";
Blockly.Msg.VARIABLES_TYPE_IMAGE = "Image";
Blockly.Msg.VARIABLES_TYPE_NUMBER = "Number";
Blockly.Msg.VARIABLES_TYPE_STRING = "String";
Blockly.Msg.VARIABLE_B = "store B [";
Blockly.Msg.VARIABLE_G = "store G [";
Blockly.Msg.VARIABLE_R = "if yes, store R [";
Blockly.Msg.VARIABLE_TO = "] into";
Blockly.Msg.VARIABLE_USED_BEFORE_DECLARATION = "The variable is used before declaration.";
Blockly.Msg.VARIABLE_VALUE = "if yes, store value [";
Blockly.Msg.VARIABLE_X = "if yes, store X [";
Blockly.Msg.VARIABLE_Y = "store Y [";
Blockly.Msg.VARIABLE_Z = "store Z [";
Blockly.Msg.VOICE_PITCH = "voice pitch";
Blockly.Msg.VOICE_SPEED = "voice speed";
Blockly.Msg.WAIT = "wait ms";
Blockly.Msg.WAIT_FOR_TOOLTIP = "Waits for sensor values.";
Blockly.Msg.WAIT_OR = "or wait for";
Blockly.Msg.WAIT_TIME_TOOLTIP = "Waits for a certain time in milliseconds.";
Blockly.Msg.WAIT_TOOLTIP = "Waits for a condition becoming true.";
Blockly.Msg.WAIT_UNTIL = "wait until";
Blockly.Msg.WIRELESS_TOOLTIP = "Represents a wireless connection.";
Blockly.Msg.WRITE_TO_PIN_TOOLTIP = "Sends the value to chosen pin.";
Blockly.Msg.X = "x";
Blockly.Msg.XEND = "end of X axis";
Blockly.Msg.XLABEL = "X axis label";
Blockly.Msg.XSTART = "start of X axis";
Blockly.Msg.XTICK = "distance between X ticks";
Blockly.Msg.Y = "y";
Blockly.Msg.YAW = "yaw";
Blockly.Msg.YEND = "end of Y axis";
Blockly.Msg.YES = "yes";
Blockly.Msg.YLABEL = "Y axis label";
Blockly.Msg.YOUNGER_THEN_14 = "I am younger than 16!";
Blockly.Msg.YSTART = "start of Y axis";
Blockly.Msg.YTICK = "distance between Y ticks";
Blockly.Msg.Z = "z";
Blockly.Msg.PROCEDURES_DEFRETURN_TITLE = Blockly.Msg.PROCEDURES_DEFNORETURN_TITLE;
Blockly.Msg.LISTS_GET_SUBLIST_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.ACTION_LEDBAR = Blockly.Msg.LEDBAR;
Blockly.Msg.KEY_GETSAMPLE_TOOLTIP = Blockly.Msg.KEY_ISPRESSED_TOOLTIP;
Blockly.Msg.SENSOR_INFRARED_GETSAMPLE_TOOLTIP = Blockly.Msg.INFRARED_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_PINTOUCH_PRESSED_GETSAMPLE_TOOLTIP = Blockly.Msg.PIN_ISTOUCHED_TOOLTIP;
Blockly.Msg.SENSOR_COLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_ACCELEROMETER_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_ACCELEROMETER_VALUE_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_HUMIDITY_TEMPERATURE_GETSAMPLE_TOOLTIP = Blockly.Msg.TEMPERATURE_GETSAMPLE_TOOLTIP;
Blockly.Msg.MODE_DOWN = Blockly.Msg.SENSOR_GESTURE_DOWN;
Blockly.Msg.SENSOR_COLOUR_RGB_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_RGB_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_ULTRASONIC_GETSAMPLE_TOOLTIP = Blockly.Msg.ULTRASONIC_GETSAMPLE_TOOLTIP;
Blockly.Msg.MODE_UP = Blockly.Msg.SENSOR_GESTURE_UP;
Blockly.Msg.LISTS_CREATE_WITH_ITEM_TITLE = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.SENSOR_HTCOLOUR_COLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_COLOUR_COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_ENCODER_GETSAMPLE_TOOLTIP = Blockly.Msg.ENCODER_GETSAMPLE_TOOLTIP;
Blockly.Msg.MODE_FACE_UP = Blockly.Msg.SENSOR_GESTURE_FACE_UP;
Blockly.Msg.SENSOR_HTCOLOUR_LIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_COLOUR_LIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_COLOUR_LIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_LIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.MATH_CHANGE_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.TOUCH_GETSAMPLE_TOOLTIP = Blockly.Msg.TOUCH_ISPRESSED_TOOLTIP;
Blockly.Msg.CONTROLS_FOR_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.MODE_FACE_DOWN = Blockly.Msg.SENSOR_GESTURE_FACE_DOWN;
Blockly.Msg.CONTROLS_IF_ELSE_TITLE_ELSE = Blockly.Msg.CONTROLS_IF_MSG_ELSE;
Blockly.Msg.SENSOR_COLOUR_COLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.HTCOLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_SOUND_GETSAMPLE_TOOLTIP_CALLIOPE = Blockly.Msg.MICROPHONE_GETSAMPLE_TOOLTIP;
Blockly.Msg.MODE_FREEFALL = Blockly.Msg.SENSOR_GESTURE_FREEFALL;
Blockly.Msg.PROCEDURES_DEFRETURN_DO = Blockly.Msg.PROCEDURES_DEFNORETURN_DO;
Blockly.Msg.LISTS_GET_INDEX_HELPURL = Blockly.Msg.LISTS_INDEX_OF_HELPURL;
Blockly.Msg.SENSOR_TIMER_GETSAMPLE_TOOLTIP = Blockly.Msg.TIMER_GETSAMPLE_TOOLTIP;
Blockly.Msg.LISTS_INDEX_OF_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.SENSOR_ACCELEROMETER_VALUE_GETSAMPLE_TOOLTIP = Blockly.Msg.ACCELEROMETER_ROTATION_TOOLTIP;
Blockly.Msg.ORA_ROBOT_PUSH_RUN = Blockly.Msg.MESSAGE_EDIT_START;
Blockly.Msg.LISTS_SET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.PROCEDURES_DEFRETURN_COMMENT = Blockly.Msg.PROCEDURES_DEFNORETURN_COMMENT;
Blockly.Msg.TEXT_CREATE_JOIN_ITEM_TITLE_ITEM = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.SENSOR_HUMIDITY_HUMIDITY_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_HUMIDITY_GETSAMPLE_TOOLTIP;
Blockly.Msg.MODE_SHAKE = Blockly.Msg.SENSOR_GESTURE_SHAKE;
Blockly.Msg.SENSOR_TEMPERATURE_TEMPERATURE_GETSAMPLE_TOOLTIP = Blockly.Msg.TEMPERATURE_GETSAMPLE_TOOLTIP;
Blockly.Msg.CONTROLS_IF_MSG_THEN = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.PROCEDURES_DEFRETURN_PROCEDURE = Blockly.Msg.PROCEDURES_DEFNORETURN_PROCEDURE;
Blockly.Msg.SENSOR_GYRO_GETSAMPLE_TOOLTIP = Blockly.Msg.GYRO_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_BATTERY_GETSAMPLE_TOOLTIP = Blockly.Msg.BATTERY_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_LIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.LIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.HTCOLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_COLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.CONTROLS_IF_ELSEIF_TITLE_ELSEIF = Blockly.Msg.CONTROLS_IF_MSG_ELSEIF;
Blockly.Msg.LISTS_GET_INDEX_INPUT_IN_LIST = Blockly.Msg.LISTS_INLIST;
Blockly.Msg.CONTROLS_FOREACH_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.SENSOR_INFRARED_PRESENCE_GETSAMPLE_TOOLTIP = Blockly.Msg.INFRARED_PRESENCE_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_COMPASS_GETSAMPLE_TOOLTIP = Blockly.Msg.COMPASS_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_HTCOLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.CONTROLS_IF_IF_TITLE_IF = Blockly.Msg.CONTROLS_IF_MSG_IF;
Blockly.Msg.CONTROLS_WHILEUNTIL_INPUT_DO = Blockly.Msg.CONTROLS_REPEAT_INPUT_DO;
Blockly.Msg.SENSOR_SOUND_GETSAMPLE_TOOLTIP = Blockly.Msg.SOUND_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_FLAME_GETSAMPLE_TOOLTIP = Blockly.Msg.FLAME_GETSAMPLE_TOOLTIP;
Blockly.Msg.ACTION_FOURDIGITDISPLAY = Blockly.Msg.FOURDIGITDISPLAY;
Blockly.Msg.SENSOR_COLOURTCS3472_COLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_HTCOLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_COLOUR_AMBIENTLIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.HTCOLOUR_COLOUR_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_COLOUR_GETSAMPLE_TOOLTIP;
Blockly.Msg.HTCOLOUR_LIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_LIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_INFRARED_DISTANCE_GETSAMPLE_TOOLTIP = Blockly.Msg.INFRARED_DISTANCE_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_HTCOLOUR_RGB_GETSAMPLE_TOOLTIP = Blockly.Msg.SENSOR_COLOUR_RGB_GETSAMPLE_TOOLTIP;
Blockly.Msg.TEXT_APPEND_VARIABLE = Blockly.Msg.VARIABLES_DEFAULT_NAME;
Blockly.Msg.SENSOR_KEY_GETSAMPLE_TOOLTIP = Blockly.Msg.KEY_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_TEMPERATURE_GETSAMPLE_TOOLTIP = Blockly.Msg.TEMPERATURE_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_TOUCH_GETSAMPLE_TOOLTIP = Blockly.Msg.TOUCH_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_COLOURTCS3472_RGB_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_RGB_GETSAMPLE_TOOLTIP;
Blockly.Msg.SENSOR_COLOURTCS3472_LIGHT_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_LIGHT_GETSAMPLE_TOOLTIP;
Blockly.Msg.HTCOLOUR_RGB_GETSAMPLE_TOOLTIP = Blockly.Msg.COLOUR_RGB_GETSAMPLE_TOOLTIP;
|
/****************************************************************************
** CUBE http://www.scalasca.org/ **
*****************************************************************************
** Copyright (c) 1998-2016 **
** Forschungszentrum Juelich GmbH, Juelich Supercomputing Centre **
** **
** This software may be modified and distributed under the terms of **
** a BSD-style license. See the COPYING file in the package base **
** directory for details. **
****************************************************************************/
#ifndef _COLORING_H
#define _COLORING_H
#include <QDialog>
#include "Constants.h"
#include "ColorMap.h"
#include "Settings.h"
class PluginServices;
class QDoubleSpinBox;
class ColorWidget;
// the ColorWidget class stores the color settings and serves with
// converting values into colors;
// it also invokes ColorDialog instances to allow the user to make color settings
class DefaultColorMap : public ColorMap, SettingsHandler
{
Q_OBJECT
public:
DefaultColorMap();
~DefaultColorMap();
// ColorMap interface
QColor
getColor( double value,
double minValue,
double maxValue,
bool whiteForZero ) const;
QString
getMapName() const;
bool
dialogAvailable() const;
void
showDialog();
// SettingsHandler interface
void
loadGlobalSettings( QSettings& );
void
saveGlobalSettings( QSettings& );
QString
settingName()
{
return "DefaultColorMap";
}
signals:
void
colorMapChanged();
private slots:
void
onColorMapChanged();
private:
ColorWidget* cw;
};
class ColorWidget : public QWidget
{
Q_OBJECT
public:
ColorWidget();
// copy constructor
ColorWidget( ColorWidget* colorWidget );
// some set methods
void
setLightenStart( double value );
void
setWhiteStart( double value );
void
setColoringMethod( ColoringMethod coloringMethod );
void
setInDialog( bool inDialog );
// this method is invoked by MainWidget when menu "Display/Coloring/General coloring"
// is activated;
// it connects ColorWidget with ColorDialog;
// starts a new dialog to input user color settings and sets
// the appropriete values
void
setColors();
// return a color corresponding to the value parameter
// having minValue at color position 0.0
// and maxValue at color position 1.0 on the color scale;
// whiteForZero specifies for topology widgets
// if the value zero should become white as color
QColor
getColor( double value,
double minValue,
double maxValue,
bool whiteForZero = true );
// in the color dialog the user may define the position of the colors
// blue, cyan, green, yellow, and red in the
// color scale;
// this method is invoked when in the color dialog one of those
// 5 color positions are changed;
// it sets the corresponding colorPos values, and assures
// that colorPos[i]<=colorPos[j] for all i<=j,
// parameters:
// index: index of the color position
// value: new position between 0 and 1
void
setColorPos( unsigned index,
double value );
// some get methods
double
getLightenStart();
double
getWhiteStart();
double
getColorPos( unsigned index );
ColoringMethod
getColoringMethod();
bool
getInDialog();
signals:
void
updateValueSpins();
// this signal is emitted when the color settings have been changed
void
colorsChanged();
public slots:
void
onApply( ColorWidget* _colorWidget );
protected:
// the main painting method
void
paintEvent( QPaintEvent* );
// mouse event handling methods;
// the user can use the mouse to specify the color positions
// if the mouse is pressed, kept pressed and moved,
// then the nearest color position will be changed accordingly
void
mousePressEvent( QMouseEvent* event );
void
mouseReleaseEvent( QMouseEvent* event );
void
mouseMoveEvent( QMouseEvent* event );
private:
// defines till which percentage of the color scale
// colors should be lightened
double lightenStart;
// defines till which percentage of the color scale
// colors should be white
double whiteStart;
// defines some color positions on a color range from 0.0 to 1.0:
// blue has the position 0.0
// red has the position 1.0
// color[0]: colors below this position are gray
// color[4]: colors above this position are gray
// color[1]: position of cyan
// color[2]: position of green
// color[3]: position of yellow
// colors corresponding to points
// between the blue-cyan-green-yellow-red data points are approximated
// according to different possible functions (e.g. linearly)
double colorPos[ 5 ];
// specifies which function should be used for that apporimation;
// Coloringmethod is defines in constants.h
ColoringMethod coloringMethod;
// remembers if we are in a color dialog
bool inDialog;
// painting color scales in dialogs
void
paintInDialog();
// painting color scale for the main widget
void
paint();
// if the mouse is clicked, determine which color position
// (i.e. which colorPos element)
// will be changed by mouse movement
void
setMouseColorPos( QMouseEvent* event );
// set new colorPos value after mouse movement
void
updateColorPos( QMouseEvent* event );
// functions for the different ColoringMethods
double
linIncr( double min,
double max,
double value );
double
linDecr( double min,
double max,
double value );
double
quadratic1Incr( double min,
double max,
double value );
double
quadratic1Decr( double min,
double max,
double value );
double
quadratic2Incr( double min,
double max,
double value );
double
quadratic2Decr( double min,
double max,
double value );
double
exp1Incr( double min,
double max,
double value );
double
exp1Decr( double min,
double max,
double value );
double
exp2Incr( double min,
double max,
double value );
double
exp2Decr( double min,
double max,
double value );
double
incr( double min,
double max,
double value );
double
decr( double min,
double max,
double value );
// stores if the mouse is currently pressed
bool mousePressed;
// stores which color position (i.e. which index in colorPos)
// will be changed by mouse movement
int mouseColorPos;
// method for lightening colors
QColor
lighten( QColor color,
double position );
};
// the ColorDialog class defines a color dialog to allow the user
// color settings;
// used only by ColorWidget
class ColorDialog : public QDialog
{
Q_OBJECT
public:
ColorDialog( QWidget* parent,
ColorWidget* colorWidget );
signals:
void
apply( ColorWidget* );
public slots:
void
onUpdateValueSpins();
private slots:
void
setLightenStart( double value );
void
setWhiteStart( double value );
void
setColorPos0( double value );
void
setColorPos1( double value );
void
setColorPos2( double value );
void
setColorPos3( double value );
void
setColorPos4( double value );
void
onLinear( bool activ );
void
onQuadratic1( bool activ );
void
onQuadratic2( bool activ );
void
onExponential1( bool activ );
void
onExponential2( bool activ );
void
onApply();
private:
QDoubleSpinBox* spin[ 5 ];
ColorWidget* colorWidget;
void
setColorPos( unsigned index,
double value );
};
#endif
|
from math import sin, cos
from micropython import const
from microqiskit import QuantumCircuit, simulate
from time import sleep
import pew
pew.init()
qc = QuantumCircuit(1,1)
qc.h(0)
qc.measure(0,0)
def randGen():
digits = []
for _ in range(3):
counts = simulate(qc, shots=1, get='memory')
digits += counts
BinString = ''.join(digits)
BinNum = int(BinString,2)
return BinNum
__MAX_RAINDROPS = const(20)
__STARTING_SPEED = const(2)
__STARTING_Y = const(0)
__PLAYER_STARTING_X = const(3)
__PLAYER_STARTING_Y = const(7)
__DRAWING_COLOR = const(1)
__ERASING_COLOR = const(0)
__PLAYER_COLOR = const(255)
__GROUND_Y = const(7)
__SCREEN_MAX_X = const(7)
__SCREEN_MIN_X = const(0)
# Mods
__WRAP_AROUND = True
__DEFAULT_SPEED_FACTOR = 1.012
# Physics
__V_ZERO = __STARTING_SPEED
__PLANCK = 1
class AcidRain:
def __init__(self):
self.screen = pew.Pix()
# Game stats
self.player = (__PLAYER_STARTING_X, __PLAYER_STARTING_Y)
self.old_player = (__PLAYER_STARTING_X, __PLAYER_STARTING_Y)
self.reset_game_logic()
self.game_speed = __STARTING_SPEED
self.speed_factor = __DEFAULT_SPEED_FACTOR
# Visuals
self.title = pew.Pix.from_text("AcidRain")
self.game_over = pew.Pix.from_text("Game Over!")
# Utility functions
def raindrops_are_too_close(self, A, B):
return A[0] == B[0] and B[1] - A[1] == 1
def raindrop_hit_the_ground(self, A):
return A[1] == __GROUND_Y
def raindrop_is_above_position(self, A, B):
return A[0] == B[0] and A[1] == B[1] - 1
def current_number_of_raindrops(self):
return len(self.raindrops)
def check_for_start(self):
keys = pew.keys()
return keys&pew.K_O or keys&pew.K_X
def clear_screen_for_start(self):
self.screen.box(0, x=0, y=0, width=8, height=8)
def reset_game_logic(self):
self.raindrops_evaded = 0
self.to_draw = []
self.to_erase = []
self.raindrops = []
self.game_speed = __STARTING_SPEED
def new_raindrop(self):
return randGen(), __STARTING_Y
def debounce(self):
for i in range(100):
pew.tick(1/100)
if not pew.keys():
return
# Game logic
def generate_new_raindrops(self):
how_many = 1
current_number_of_raindrops = self.current_number_of_raindrops()
if current_number_of_raindrops + how_many > __MAX_RAINDROPS:
how_many = __MAX_RAINDROPS - current_number_of_raindrops
new_raindrops = [self.new_raindrop() for i in range(how_many)]
i = 0
while i < how_many:
while any(self.raindrops_are_too_close(new_raindrops[i], old) for old in self.raindrops):
new_raindrops[i] = self.new_raindrop()
i += 1
# raindrops are safe to add now
self.to_erase += self.raindrops
self.to_draw += new_raindrops
self.raindrops += new_raindrops
def remove_fallen_raindrops(self):
old_number_of_raindrops = self.current_number_of_raindrops()
self.to_erase += list(filter(self.raindrop_hit_the_ground, self.raindrops))
self.raindrops = list(filter(lambda x: not self.raindrop_hit_the_ground(x), self.raindrops))
self.raindrops_evaded += old_number_of_raindrops - self.current_number_of_raindrops()
def check_and_move_player(self):
keys = pew.keys()
self.debounce()
x = self.player[0]
dx = 0
if keys & pew.K_UP:
self.handle_quantum_tunnelling()
else:
if keys & pew.K_LEFT:
dx -= 1
elif keys & pew.K_RIGHT:
dx += 1
if x + dx > __SCREEN_MAX_X:
dx = -7 if __WRAP_AROUND else 0
if x + dx < __SCREEN_MIN_X:
dx = 7 if __WRAP_AROUND else 0
self.old_player = self.player[0], self.player[1]
if dx: # actual movement
self.player = x+dx, self.player[1]
def handle_quantum_tunnelling(self):
if any(self.raindrop_is_above_position(raindrop, self.player) for raindrop in self.raindrops):
raindrop_above = self.player[0], self.player[1]-1
a = 1
m = 1
# E = self.game_speed**2/2
# k_0 = sqrt(2*m*E/__PLANCK**2)
# k_1 = sqrt(2*m*(E-__V_ZERO)/__PLANCK**2)
# t = 4*k_0*k_1*comp_exp(-1*a*(k_0-k_1))
# t /= (k_0+k_1)**2 - comp_exp(2*a*k_1)*(k_0 - k_1)**2
# t = 1 / ((k_0 - k_1)**4*sin(2*a*k_1)**2 + ((k_1+k_0)**2 - (k_0-k_1)**2*cos(2*a*k_1))**2 )
# t = 1 - exp(-self.game_speed / 2)
t = self.game_speed - self.game_speed**2/2 + self.game_speed**3/6
if randGen()/7 <= t:
if any(self.raindrop_is_above_position(raindrop, raindrop_above) for raindrop in self.raindrops):
raise pew.GameOver # die
else:
self.to_erase.append(raindrop_above)
self.raindrops = list(filter(lambda x: x != raindrop_above, self.raindrops))
else: # die
raise pew.GameOver
def check_for_player_collision(self):
if any(self.player == raindrop for raindrop in self.raindrops):
raise pew.GameOver
def update_raindrops(self):
for i in range(len(self.raindrops)):
self.to_erase.append(self.raindrops[i])
self.raindrops[i] = (self.raindrops[i][0], self.raindrops[i][1] + 1)
self.to_draw.append(self.raindrops[i])
def run_game(self):
game_started = False
# Title
while True:
for dx in range(-8, self.title.width):
self.screen.blit(self.title, -dx, 1)
pew.show(self.screen)
pew.tick(1/12)
game_started = self.check_for_start()
if game_started: break
if game_started: break
# Game started
self.clear_screen_for_start()
try:
while True:
# Poll keys
self.screen.pixel(*self.player, color=__ERASING_COLOR)
self.check_and_move_player()
self.screen.pixel(*self.player, color=__PLAYER_COLOR)
#####################################
self.remove_fallen_raindrops()
self.update_raindrops()
self.generate_new_raindrops()
self.check_for_player_collision()
# Update screen elements here
while len(self.to_erase):
self.screen.pixel(*self.to_erase.pop(0), color=__ERASING_COLOR)
while len(self.to_draw):
self.screen.pixel(*self.to_draw.pop(0), color=__DRAWING_COLOR)
pew.show(self.screen)
pew.tick(1/self.game_speed)
self.game_speed *= self.speed_factor
except pew.GameOver:
# Game over screen
self.clear_screen_for_start()
for dx in range(-8, self.game_over.width):
self.screen.blit(self.game_over, -dx, 1)
pew.show(self.screen)
pew.tick(1/17)
score = pew.Pix.from_text("Score: " + str(self.raindrops_evaded))
for dx in range(-8, score.width):
self.screen.blit(score, -dx, 1)
pew.show(self.screen)
pew.tick(1/13)
self.reset_game_logic()
self.debounce() # for any other button presses
|
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(React.createElement("path", {
d: "M20 2.01L4 2v20h16V2.01zM18 20H6v-9.02h12V20zm0-11H6V4h12v5zM8 5h2v3H8V5zm0 7h2v5H8v-5z"
}), 'KitchenSharp');
|
import React from 'react'
import { Link, StaticQuery, graphql } from 'gatsby'
import Layout from '../components/layout'
function Introduction() {
return (
<StaticQuery
query={graphql`
{
allMarkdownRemark(
sort: { order: ASC, fields: [frontmatter___order] }
) {
edges {
node {
frontmatter {
path
title
}
}
}
}
}
`}
render={(data) => {
const pages = data.allMarkdownRemark.edges.map(
({ node }) => node.frontmatter
)
return (
<Layout>
<ul>
{pages.map(({ title, path }) => (
<li key={path}>
<Link to={path}>{title}</Link>
</li>
))}
</ul>
</Layout>
)
}}
/>
)
}
export default Introduction
|
const alertUtils = (function () {
function showSiteAlerts(tabId, title, target, alertRisk) {
// Note that theres no need to load any tool data here
const config = {};
config.title = title;
config.risk = alertRisk;
utils.getUpgradedDomain(target)
.then(upgradedDomain => {
return apiCallWithResponse('alert', 'view', 'alertsByRisk', {url: upgradedDomain, recurse: 'true'});
})
.then(json => {
config.alerts = flattenAllAlerts(json);
utils.messageFrame(tabId, 'display', {action: 'showAllAlerts', config}).then(response => {
// Handle button choice
if (response.alertId) {
const backFunction = function () {
showSiteAlerts(tabId, title, target, alertRisk);
};
showAlertDetails(tabId, response.alertId, backFunction);
}
})
.catch(utils.errorHandler);
})
.catch(utils.errorHandler);
}
function flattenAllAlerts(alerts) {
const json = {};
json.Informational = flattenAlerts(alerts.alertsByRisk[0].Informational);
json.Low = flattenAlerts(alerts.alertsByRisk[1].Low);
json.Medium = flattenAlerts(alerts.alertsByRisk[2].Medium);
json.High = flattenAlerts(alerts.alertsByRisk[3].High);
return json;
}
function flattenAlerts(alerts) {
const json = {};
for (let i = 0; i < alerts.length; i++) {
const alert = alerts[i];
for (const key in alert) {
if (Object.prototype.hasOwnProperty.call(alert, key)) {
json[key] = alert[key];
}
}
}
return json;
}
function showPageAlerts(tabId, title, target, alertRisk) {
// Note that theres no need to load any tool data here
const config = {};
config.title = title;
config.risk = alertRisk;
const targetDomain = utils.parseDomainFromUrl(target);
localforage.getItem('upgradedDomains')
.then(upgradedDomains => {
if (targetDomain in upgradedDomains) {
// Its been upgraded to https by ZAP, but the alerts wont have been
target = target.replace('https://', 'http://');
}
if (target.indexOf('?') > 0) {
// Remove any url params
target = target.substring(0, target.indexOf('?'));
}
return apiCallWithResponse('alert', 'view', 'alertsByRisk', {url: target, recurse: 'false'});
})
.then(json => {
config.alerts = flattenAllAlerts(json);
return utils.messageFrame(tabId, 'display', {action: 'showAllAlerts', config});
})
.then(response => {
// Handle button choice
if (response.alertId) {
const backFunction = function () {
showPageAlerts(tabId, title, target, alertRisk);
};
return showAlertDetails(tabId, response.alertId, backFunction);
}
})
.catch(utils.errorHandler);
}
function showAlertDetails(tabId, id, backFunction) {
utils.log(LOG_DEBUG, 'showAlertDetails', String(id));
apiCallWithResponse('core', 'view', 'alert', {id})
.then(json => {
const config = {};
config.title = json.alert.alert;
config.details = json.alert;
utils.messageFrame(tabId, 'display', {action: 'showAlertDetails', config})
.then(response => {
if (response.back) {
backFunction();
}
})
.catch(utils.errorHandler);
})
.catch(utils.errorHandler);
}
function updatePageAlertCount(toolname, alertEvent) {
let alertUrl = alertEvent.uri;
if (alertUrl.startsWith('http://')) {
// It will have been upgraded to https in the HUD
alertUrl = alertUrl.replace('http://', 'https://');
}
utils.loadTool(toolname)
.then(tool => {
const alertData = tool.alerts[alertEvent.name];
if (!alertData) {
// Don't need to add much, its the fact its here that matters
tool.alerts[alertEvent.name] = [{
confidence: alertEvent.confidence,
name: alertEvent.name,
id: alertEvent.alertId,
url: alertEvent.uri
}];
tool.data = Object.keys(tool.alerts).length;
if (tool.isSelected) {
utils.messageAllTabs(tool.panel, {action: 'broadcastUpdate', context: {url: alertUrl}, tool: {name: toolname, data: tool.data}});
}
return utils.writeTool(tool);
}
})
.catch(utils.errorHandler);
}
function setPageAlerts(toolname, url, alerts) {
utils.loadTool(toolname)
.then(tool => {
tool.alerts = alerts;
tool.data = Object.keys(alerts).length;
if (tool.isSelected) {
utils.messageAllTabs(tool.panel, {action: 'broadcastUpdate', context: {url}, tool: {name: toolname, data: tool.data}});
}
return utils.writeTool(tool);
})
.catch(utils.errorHandler);
}
function showOptions(tabId, toolname, toolLabel) {
const config = {};
config.tool = toolname;
config.toolLabel = toolLabel;
config.options = {remove: I18n.t('common_remove')};
utils.messageFrame(tabId, 'display', {action: 'showButtonOptions', config})
.then(response => {
// Handle button choice
if (response.id === 'remove') {
utils.removeToolFromPanel(tabId, toolname);
} else {
// Cancel
}
})
.catch(utils.errorHandler);
}
return {
updatePageAlertCount,
showSiteAlerts,
showPageAlerts,
showAlertDetails,
showOptions,
flattenAllAlerts,
setPageAlerts
};
})();
|
from typing import List
from resultinterpretation.model import TestStepResultItem
class TestStepResult:
"""
Aggregate results of one test step over the course of multiple executions of one CouchEditTestSuite
"""
def __init__(self, stepNumber: int, resultItems: List[TestStepResultItem]):
self.stepNumber = stepNumber
self.resultItems = resultItems
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import os
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_utils import importutils
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.metadata import driver as metadata_driver
from neutron.agent import rpc as agent_rpc
from neutron.common import constants
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron.i18n import _LE, _LI, _LW
from neutron import manager
LOG = logging.getLogger(__name__)
class DhcpAgent(manager.Manager):
"""DHCP agent service manager.
Note that the public methods of this class are exposed as the server side
of an rpc interface. The neutron server uses
neutron.api.rpc.agentnotifiers.dhcp_rpc_agent_api.DhcpAgentNotifyApi as the
client side to execute the methods here. For more information about
changing rpc interfaces, see doc/source/devref/rpc_api.rst.
"""
target = oslo_messaging.Target(version='1.0')
def __init__(self, host=None, conf=None):
super(DhcpAgent, self).__init__(host=host)
self.needs_resync_reasons = collections.defaultdict(list)
self.conf = conf or cfg.CONF
self.cache = NetworkCache()
self.dhcp_driver_cls = importutils.import_class(self.conf.dhcp_driver)
ctx = context.get_admin_context_without_session()
self.plugin_rpc = DhcpPluginApi(topics.PLUGIN,
ctx, self.conf.use_namespaces,
self.conf.host)
# create dhcp dir to store dhcp info
dhcp_dir = os.path.dirname("/%s/dhcp/" % self.conf.state_path)
utils.ensure_dir(dhcp_dir)
self.dhcp_version = self.dhcp_driver_cls.check_version()
self._populate_networks_cache()
self._process_monitor = external_process.ProcessMonitor(
config=self.conf,
resource_type='dhcp')
def init_host(self):
self.sync_state()
def _populate_networks_cache(self):
"""Populate the networks cache when the DHCP-agent starts."""
try:
existing_networks = self.dhcp_driver_cls.existing_dhcp_networks(
self.conf
)
for net_id in existing_networks:
net = dhcp.NetModel(self.conf.use_namespaces,
{"id": net_id,
"subnets": [],
"ports": []})
self.cache.put(net)
except NotImplementedError:
# just go ahead with an empty networks cache
LOG.debug("The '%s' DHCP-driver does not support retrieving of a "
"list of existing networks",
self.conf.dhcp_driver)
def after_start(self):
self.run()
LOG.info(_LI("DHCP agent started"))
def run(self):
"""Activate the DHCP agent."""
self.sync_state()
self.periodic_resync()
def call_driver(self, action, network, **action_kwargs):
"""Invoke an action on a DHCP driver instance."""
LOG.debug('Calling driver for network: %(net)s action: %(action)s',
{'net': network.id, 'action': action})
try:
# the Driver expects something that is duck typed similar to
# the base models.
driver = self.dhcp_driver_cls(self.conf,
network,
self._process_monitor,
self.dhcp_version,
self.plugin_rpc)
getattr(driver, action)(**action_kwargs)
return True
except exceptions.Conflict:
# No need to resync here, the agent will receive the event related
# to a status update for the network
LOG.warning(_LW('Unable to %(action)s dhcp for %(net_id)s: there '
'is a conflict with its current state; please '
'check that the network and/or its subnet(s) '
'still exist.'),
{'net_id': network.id, 'action': action})
except Exception as e:
if getattr(e, 'exc_type', '') != 'IpAddressGenerationFailure':
# Don't resync if port could not be created because of an IP
# allocation failure. When the subnet is updated with a new
# allocation pool or a port is deleted to free up an IP, this
# will automatically be retried on the notification
self.schedule_resync(e, network.id)
if (isinstance(e, oslo_messaging.RemoteError)
and e.exc_type == 'NetworkNotFound'
or isinstance(e, exceptions.NetworkNotFound)):
LOG.warning(_LW("Network %s has been deleted."), network.id)
else:
LOG.exception(_LE('Unable to %(action)s dhcp for %(net_id)s.'),
{'net_id': network.id, 'action': action})
def schedule_resync(self, reason, network_id=None):
"""Schedule a resync for a given network and reason. If no network is
specified, resync all networks.
"""
self.needs_resync_reasons[network_id].append(reason)
@utils.synchronized('dhcp-agent')
def sync_state(self, networks=None):
"""Sync the local DHCP state with Neutron. If no networks are passed,
or 'None' is one of the networks, sync all of the networks.
"""
only_nets = set([] if (not networks or None in networks) else networks)
LOG.info(_LI('Synchronizing state'))
pool = eventlet.GreenPool(self.conf.num_sync_threads)
known_network_ids = set(self.cache.get_network_ids())
try:
active_networks = self.plugin_rpc.get_active_networks_info()
active_network_ids = set(network.id for network in active_networks)
for deleted_id in known_network_ids - active_network_ids:
try:
self.disable_dhcp_helper(deleted_id)
except Exception as e:
self.schedule_resync(e, deleted_id)
LOG.exception(_LE('Unable to sync network state on '
'deleted network %s'), deleted_id)
for network in active_networks:
if (not only_nets or # specifically resync all
network.id not in known_network_ids or # missing net
network.id in only_nets): # specific network to sync
pool.spawn(self.safe_configure_dhcp_for_network, network)
pool.waitall()
LOG.info(_LI('Synchronizing state complete'))
except Exception as e:
if only_nets:
for network_id in only_nets:
self.schedule_resync(e, network_id)
else:
self.schedule_resync(e)
LOG.exception(_LE('Unable to sync network state.'))
@utils.exception_logger()
def _periodic_resync_helper(self):
"""Resync the dhcp state at the configured interval."""
while True:
eventlet.sleep(self.conf.resync_interval)
if self.needs_resync_reasons:
# be careful to avoid a race with additions to list
# from other threads
reasons = self.needs_resync_reasons
self.needs_resync_reasons = collections.defaultdict(list)
for net, r in reasons.items():
if not net:
net = "*"
LOG.debug("resync (%(network)s): %(reason)s",
{"reason": r, "network": net})
self.sync_state(reasons.keys())
def periodic_resync(self):
"""Spawn a thread to periodically resync the dhcp state."""
eventlet.spawn(self._periodic_resync_helper)
def safe_get_network_info(self, network_id):
try:
network = self.plugin_rpc.get_network_info(network_id)
if not network:
LOG.warn(_LW('Network %s has been deleted.'), network_id)
return network
except Exception as e:
self.schedule_resync(e, network_id)
LOG.exception(_LE('Network %s info call failed.'), network_id)
def enable_dhcp_helper(self, network_id):
"""Enable DHCP for a network that meets enabling criteria."""
network = self.safe_get_network_info(network_id)
if network:
self.configure_dhcp_for_network(network)
@utils.exception_logger()
def safe_configure_dhcp_for_network(self, network):
try:
self.configure_dhcp_for_network(network)
except (exceptions.NetworkNotFound, RuntimeError):
LOG.warn(_LW('Network %s may have been deleted and its resources '
'may have already been disposed.'), network.id)
def configure_dhcp_for_network(self, network):
if not network.admin_state_up:
return
enable_metadata = self.dhcp_driver_cls.should_enable_metadata(
self.conf, network)
dhcp_network_enabled = False
for subnet in network.subnets:
if subnet.enable_dhcp:
if self.call_driver('enable', network):
dhcp_network_enabled = True
self.cache.put(network)
break
if enable_metadata and dhcp_network_enabled:
for subnet in network.subnets:
if subnet.ip_version == 4 and subnet.enable_dhcp:
self.enable_isolated_metadata_proxy(network)
break
def disable_dhcp_helper(self, network_id):
"""Disable DHCP for a network known to the agent."""
network = self.cache.get_network_by_id(network_id)
if network:
if (self.conf.use_namespaces and
self.conf.enable_isolated_metadata):
# NOTE(jschwarz): In the case where a network is deleted, all
# the subnets and ports are deleted before this function is
# called, so checking if 'should_enable_metadata' is True
# for any subnet is false logic here.
self.disable_isolated_metadata_proxy(network)
if self.call_driver('disable', network):
self.cache.remove(network)
def refresh_dhcp_helper(self, network_id):
"""Refresh or disable DHCP for a network depending on the current state
of the network.
"""
old_network = self.cache.get_network_by_id(network_id)
if not old_network:
# DHCP current not running for network.
return self.enable_dhcp_helper(network_id)
network = self.safe_get_network_info(network_id)
if not network:
return
old_cidrs = set(s.cidr for s in old_network.subnets if s.enable_dhcp)
new_cidrs = set(s.cidr for s in network.subnets if s.enable_dhcp)
if new_cidrs and old_cidrs == new_cidrs:
self.call_driver('reload_allocations', network)
self.cache.put(network)
elif new_cidrs:
if self.call_driver('restart', network):
self.cache.put(network)
else:
self.disable_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def network_create_end(self, context, payload):
"""Handle the network.create.end notification event."""
network_id = payload['network']['id']
self.enable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_update_end(self, context, payload):
"""Handle the network.update.end notification event."""
network_id = payload['network']['id']
if payload['network']['admin_state_up']:
self.enable_dhcp_helper(network_id)
else:
self.disable_dhcp_helper(network_id)
@utils.synchronized('dhcp-agent')
def network_delete_end(self, context, payload):
"""Handle the network.delete.end notification event."""
self.disable_dhcp_helper(payload['network_id'])
@utils.synchronized('dhcp-agent')
def subnet_update_end(self, context, payload):
"""Handle the subnet.update.end notification event."""
network_id = payload['subnet']['network_id']
self.refresh_dhcp_helper(network_id)
# Use the update handler for the subnet create event.
subnet_create_end = subnet_update_end
@utils.synchronized('dhcp-agent')
def subnet_delete_end(self, context, payload):
"""Handle the subnet.delete.end notification event."""
subnet_id = payload['subnet_id']
network = self.cache.get_network_by_subnet_id(subnet_id)
if network:
self.refresh_dhcp_helper(network.id)
@utils.synchronized('dhcp-agent')
def port_update_end(self, context, payload):
"""Handle the port.update.end notification event."""
updated_port = dhcp.DictModel(payload['port'])
network = self.cache.get_network_by_id(updated_port.network_id)
if network:
driver_action = 'reload_allocations'
if self._is_port_on_this_agent(updated_port):
orig = self.cache.get_port_by_id(updated_port['id'])
# assume IP change if not in cache
old_ips = {i['ip_address'] for i in orig['fixed_ips'] or []}
new_ips = {i['ip_address'] for i in updated_port['fixed_ips']}
if old_ips != new_ips:
driver_action = 'restart'
self.cache.put_port(updated_port)
self.call_driver(driver_action, network)
def _is_port_on_this_agent(self, port):
thishost = utils.get_dhcp_agent_device_id(
port['network_id'], self.conf.host)
return port['device_id'] == thishost
# Use the update handler for the port create event.
port_create_end = port_update_end
@utils.synchronized('dhcp-agent')
def port_delete_end(self, context, payload):
"""Handle the port.delete.end notification event."""
port = self.cache.get_port_by_id(payload['port_id'])
if port:
network = self.cache.get_network_by_id(port.network_id)
self.cache.remove_port(port)
self.call_driver('reload_allocations', network)
def enable_isolated_metadata_proxy(self, network):
# The proxy might work for either a single network
# or all the networks connected via a router
# to the one passed as a parameter
kwargs = {'network_id': network.id}
# When the metadata network is enabled, the proxy might
# be started for the router attached to the network
if self.conf.enable_metadata_network:
router_ports = [port for port in network.ports
if (port.device_owner in
constants.ROUTER_INTERFACE_OWNERS)]
if router_ports:
# Multiple router ports should not be allowed
if len(router_ports) > 1:
LOG.warning(_LW("%(port_num)d router ports found on the "
"metadata access network. Only the port "
"%(port_id)s, for router %(router_id)s "
"will be considered"),
{'port_num': len(router_ports),
'port_id': router_ports[0].id,
'router_id': router_ports[0].device_id})
kwargs = {'router_id': router_ports[0].device_id}
metadata_driver.MetadataDriver.spawn_monitored_metadata_proxy(
self._process_monitor, network.namespace, dhcp.METADATA_PORT,
self.conf, **kwargs)
def disable_isolated_metadata_proxy(self, network):
metadata_driver.MetadataDriver.destroy_monitored_metadata_proxy(
self._process_monitor, network.id, self.conf)
class DhcpPluginApi(object):
"""Agent side of the dhcp rpc API.
This class implements the client side of an rpc interface. The server side
of this interface can be found in
neutron.api.rpc.handlers.dhcp_rpc.DhcpRpcCallback. For more information
about changing rpc interfaces, see doc/source/devref/rpc_api.rst.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
and update_dhcp_port methods.
"""
def __init__(self, topic, context, use_namespaces, host):
self.context = context
self.host = host
self.use_namespaces = use_namespaces
target = oslo_messaging.Target(
topic=topic,
namespace=constants.RPC_NAMESPACE_DHCP_PLUGIN,
version='1.0')
self.client = n_rpc.get_client(target)
def get_active_networks_info(self):
"""Make a remote process call to retrieve all network info."""
cctxt = self.client.prepare(version='1.1')
networks = cctxt.call(self.context, 'get_active_networks_info',
host=self.host)
return [dhcp.NetModel(self.use_namespaces, n) for n in networks]
def get_network_info(self, network_id):
"""Make a remote process call to retrieve network info."""
cctxt = self.client.prepare()
network = cctxt.call(self.context, 'get_network_info',
network_id=network_id, host=self.host)
if network:
return dhcp.NetModel(self.use_namespaces, network)
def create_dhcp_port(self, port):
"""Make a remote process call to create the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'create_dhcp_port',
port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def update_dhcp_port(self, port_id, port):
"""Make a remote process call to update the dhcp port."""
cctxt = self.client.prepare(version='1.1')
port = cctxt.call(self.context, 'update_dhcp_port',
port_id=port_id, port=port, host=self.host)
if port:
return dhcp.DictModel(port)
def release_dhcp_port(self, network_id, device_id):
"""Make a remote process call to release the dhcp port."""
cctxt = self.client.prepare()
return cctxt.call(self.context, 'release_dhcp_port',
network_id=network_id, device_id=device_id,
host=self.host)
class NetworkCache(object):
"""Agent cache of the current network state."""
def __init__(self):
self.cache = {}
self.subnet_lookup = {}
self.port_lookup = {}
def get_network_ids(self):
return self.cache.keys()
def get_network_by_id(self, network_id):
return self.cache.get(network_id)
def get_network_by_subnet_id(self, subnet_id):
return self.cache.get(self.subnet_lookup.get(subnet_id))
def get_network_by_port_id(self, port_id):
return self.cache.get(self.port_lookup.get(port_id))
def put(self, network):
if network.id in self.cache:
self.remove(self.cache[network.id])
self.cache[network.id] = network
for subnet in network.subnets:
self.subnet_lookup[subnet.id] = network.id
for port in network.ports:
self.port_lookup[port.id] = network.id
def remove(self, network):
del self.cache[network.id]
for subnet in network.subnets:
del self.subnet_lookup[subnet.id]
for port in network.ports:
del self.port_lookup[port.id]
def put_port(self, port):
network = self.get_network_by_id(port.network_id)
for index in range(len(network.ports)):
if network.ports[index].id == port.id:
network.ports[index] = port
break
else:
network.ports.append(port)
self.port_lookup[port.id] = network.id
def remove_port(self, port):
network = self.get_network_by_port_id(port.id)
for index in range(len(network.ports)):
if network.ports[index] == port:
del network.ports[index]
del self.port_lookup[port.id]
break
def get_port_by_id(self, port_id):
network = self.get_network_by_port_id(port_id)
if network:
for port in network.ports:
if port.id == port_id:
return port
def get_state(self):
net_ids = self.get_network_ids()
num_nets = len(net_ids)
num_subnets = 0
num_ports = 0
for net_id in net_ids:
network = self.get_network_by_id(net_id)
num_subnets += len(network.subnets)
num_ports += len(network.ports)
return {'networks': num_nets,
'subnets': num_subnets,
'ports': num_ports}
class DhcpAgentWithStateReport(DhcpAgent):
def __init__(self, host=None, conf=None):
super(DhcpAgentWithStateReport, self).__init__(host=host, conf=conf)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
self.agent_state = {
'binary': 'neutron-dhcp-agent',
'host': host,
'availability_zone': self.conf.AGENT.availability_zone,
'topic': topics.DHCP_AGENT,
'configurations': {
'dhcp_driver': self.conf.dhcp_driver,
'use_namespaces': self.conf.use_namespaces,
'dhcp_lease_duration': self.conf.dhcp_lease_duration,
'log_agent_heartbeats': self.conf.AGENT.log_agent_heartbeats},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_DHCP}
report_interval = self.conf.AGENT.report_interval
self.use_call = True
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.agent_state.get('configurations').update(
self.cache.get_state())
ctx = context.get_admin_context_without_session()
self.state_rpc.report_state(ctx, self.agent_state, self.use_call)
self.use_call = False
except AttributeError:
# This means the server does not support report_state
LOG.warn(_LW("Neutron server does not support state report."
" State report for this agent will be disabled."))
self.heartbeat.stop()
self.run()
return
except Exception:
LOG.exception(_LE("Failed reporting state!"))
return
if self.agent_state.pop('start_flag', None):
self.run()
def agent_updated(self, context, payload):
"""Handle the agent_updated notification event."""
self.schedule_resync(_("Agent updated: %(payload)s") %
{"payload": payload})
LOG.info(_LI("agent_updated by server side %s!"), payload)
def after_start(self):
LOG.info(_LI("DHCP agent started"))
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""configure script to get build parameters from user."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import errno
import os
import platform
import re
import subprocess
import sys
# pylint: disable=g-import-not-at-top
try:
from shutil import which
except ImportError:
from distutils.spawn import find_executable as which
# pylint: enable=g-import-not-at-top
_DEFAULT_CUDA_VERSION = '9.0'
_DEFAULT_CUDNN_VERSION = '7'
_DEFAULT_NCCL_VERSION = '2.2'
_DEFAULT_CUDA_COMPUTE_CAPABILITIES = '3.5,7.0'
_DEFAULT_CUDA_PATH = '/usr/local/cuda'
_DEFAULT_CUDA_PATH_LINUX = '/opt/cuda'
_DEFAULT_CUDA_PATH_WIN = ('C:/Program Files/NVIDIA GPU Computing '
'Toolkit/CUDA/v%s' % _DEFAULT_CUDA_VERSION)
_DEFAULT_TENSORRT_PATH_LINUX = '/usr/lib/%s-linux-gnu' % platform.machine()
_TF_OPENCL_VERSION = '1.2'
_DEFAULT_COMPUTECPP_TOOLKIT_PATH = '/usr/local/computecpp'
_DEFAULT_TRISYCL_INCLUDE_DIR = '/usr/local/triSYCL/include'
_SUPPORTED_ANDROID_NDK_VERSIONS = [10, 11, 12, 13, 14, 15, 16]
_DEFAULT_PROMPT_ASK_ATTEMPTS = 10
_TF_WORKSPACE_ROOT = os.path.abspath(os.path.dirname(__file__))
_TF_BAZELRC_FILENAME = '.tf_configure.bazelrc'
_TF_BAZELRC = os.path.join(_TF_WORKSPACE_ROOT, _TF_BAZELRC_FILENAME)
_TF_WORKSPACE = os.path.join(_TF_WORKSPACE_ROOT, 'WORKSPACE')
class UserInputError(Exception):
pass
def is_windows():
return platform.system() == 'Windows'
def is_linux():
return platform.system() == 'Linux'
def is_macos():
return platform.system() == 'Darwin'
def is_ppc64le():
return platform.machine() == 'ppc64le'
def is_cygwin():
return platform.system().startswith('CYGWIN_NT')
def get_input(question):
try:
try:
answer = raw_input(question)
except NameError:
answer = input(question) # pylint: disable=bad-builtin
except EOFError:
answer = ''
return answer
def symlink_force(target, link_name):
"""Force symlink, equivalent of 'ln -sf'.
Args:
target: items to link to.
link_name: name of the link.
"""
try:
os.symlink(target, link_name)
except OSError as e:
if e.errno == errno.EEXIST:
os.remove(link_name)
os.symlink(target, link_name)
else:
raise e
def sed_in_place(filename, old, new):
"""Replace old string with new string in file.
Args:
filename: string for filename.
old: string to replace.
new: new string to replace to.
"""
with open(filename, 'r') as f:
filedata = f.read()
newdata = filedata.replace(old, new)
with open(filename, 'w') as f:
f.write(newdata)
def write_to_bazelrc(line):
with open(_TF_BAZELRC, 'a') as f:
f.write(line + '\n')
def write_action_env_to_bazelrc(var_name, var):
write_to_bazelrc('build --action_env %s="%s"' % (var_name, str(var)))
def run_shell(cmd, allow_non_zero=False):
if allow_non_zero:
try:
output = subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
output = e.output
else:
output = subprocess.check_output(cmd)
return output.decode('UTF-8').strip()
def cygpath(path):
"""Convert path from posix to windows."""
return os.path.abspath(path).replace('\\', '/')
def get_python_path(environ_cp, python_bin_path):
"""Get the python site package paths."""
python_paths = []
if environ_cp.get('PYTHONPATH'):
python_paths = environ_cp.get('PYTHONPATH').split(':')
try:
library_paths = run_shell(
[python_bin_path, '-c',
'import site; print("\\n".join(site.getsitepackages()))']).split('\n')
except subprocess.CalledProcessError:
library_paths = [run_shell(
[python_bin_path, '-c',
'from distutils.sysconfig import get_python_lib;'
'print(get_python_lib())'])]
all_paths = set(python_paths + library_paths)
paths = []
for path in all_paths:
if os.path.isdir(path):
paths.append(path)
return paths
def get_python_major_version(python_bin_path):
"""Get the python major version."""
return run_shell([python_bin_path, '-c', 'import sys; print(sys.version[0])'])
def setup_python(environ_cp):
"""Setup python related env variables."""
# Get PYTHON_BIN_PATH, default is the current running python.
default_python_bin_path = sys.executable
ask_python_bin_path = ('Please specify the location of python. [Default is '
'%s]: ') % default_python_bin_path
while True:
python_bin_path = get_from_env_or_user_or_default(
environ_cp, 'PYTHON_BIN_PATH', ask_python_bin_path,
default_python_bin_path)
# Check if the path is valid
if os.path.isfile(python_bin_path) and os.access(
python_bin_path, os.X_OK):
break
elif not os.path.exists(python_bin_path):
print('Invalid python path: %s cannot be found.' % python_bin_path)
else:
print('%s is not executable. Is it the python binary?' % python_bin_path)
environ_cp['PYTHON_BIN_PATH'] = ''
# Convert python path to Windows style before checking lib and version
if is_windows() or is_cygwin():
python_bin_path = cygpath(python_bin_path)
# Get PYTHON_LIB_PATH
python_lib_path = environ_cp.get('PYTHON_LIB_PATH')
if not python_lib_path:
python_lib_paths = get_python_path(environ_cp, python_bin_path)
if environ_cp.get('USE_DEFAULT_PYTHON_LIB_PATH') == '1':
python_lib_path = python_lib_paths[0]
else:
print('Found possible Python library paths:\n %s' %
'\n '.join(python_lib_paths))
default_python_lib_path = python_lib_paths[0]
python_lib_path = get_input(
'Please input the desired Python library path to use. '
'Default is [%s]\n' % python_lib_paths[0])
if not python_lib_path:
python_lib_path = default_python_lib_path
environ_cp['PYTHON_LIB_PATH'] = python_lib_path
python_major_version = get_python_major_version(python_bin_path)
# Convert python path to Windows style before writing into bazel.rc
if is_windows() or is_cygwin():
python_lib_path = cygpath(python_lib_path)
# Set-up env variables used by python_configure.bzl
write_action_env_to_bazelrc('PYTHON_BIN_PATH', python_bin_path)
write_action_env_to_bazelrc('PYTHON_LIB_PATH', python_lib_path)
write_to_bazelrc('build --python_path=\"%s"' % python_bin_path)
environ_cp['PYTHON_BIN_PATH'] = python_bin_path
# Write tools/python_bin_path.sh
with open(os.path.join(
_TF_WORKSPACE_ROOT, 'tools', 'python_bin_path.sh'), 'w') as f:
f.write('export PYTHON_BIN_PATH="%s"' % python_bin_path)
def reset_tf_configure_bazelrc(workspace_path):
"""Reset file that contains customized config settings."""
open(_TF_BAZELRC, 'w').close()
bazelrc_path = os.path.join(workspace_path, '.bazelrc')
data = []
if os.path.exists(bazelrc_path):
with open(bazelrc_path, 'r') as f:
data = f.read().splitlines()
with open(bazelrc_path, 'w') as f:
for l in data:
if _TF_BAZELRC_FILENAME in l:
continue
f.write('%s\n' % l)
if is_windows():
tf_bazelrc_path = _TF_BAZELRC.replace("\\", "/")
else:
tf_bazelrc_path = _TF_BAZELRC
f.write('import %s\n' % tf_bazelrc_path)
def cleanup_makefile():
"""Delete any leftover BUILD files from the Makefile build.
These files could interfere with Bazel parsing.
"""
makefile_download_dir = os.path.join(
_TF_WORKSPACE_ROOT, 'tensorflow', 'contrib', 'makefile', 'downloads')
if os.path.isdir(makefile_download_dir):
for root, _, filenames in os.walk(makefile_download_dir):
for f in filenames:
if f.endswith('BUILD'):
os.remove(os.path.join(root, f))
def get_var(environ_cp,
var_name,
query_item,
enabled_by_default,
question=None,
yes_reply=None,
no_reply=None):
"""Get boolean input from user.
If var_name is not set in env, ask user to enable query_item or not. If the
response is empty, use the default.
Args:
environ_cp: copy of the os.environ.
var_name: string for name of environment variable, e.g. "TF_NEED_HDFS".
query_item: string for feature related to the variable, e.g. "Hadoop File
System".
enabled_by_default: boolean for default behavior.
question: optional string for how to ask for user input.
yes_reply: optional string for reply when feature is enabled.
no_reply: optional string for reply when feature is disabled.
Returns:
boolean value of the variable.
Raises:
UserInputError: if an environment variable is set, but it cannot be
interpreted as a boolean indicator, assume that the user has made a
scripting error, and will continue to provide invalid input.
Raise the error to avoid infinitely looping.
"""
if not question:
question = 'Do you wish to build TensorFlow with %s support?' % query_item
if not yes_reply:
yes_reply = '%s support will be enabled for TensorFlow.' % query_item
if not no_reply:
no_reply = 'No %s' % yes_reply
yes_reply += '\n'
no_reply += '\n'
if enabled_by_default:
question += ' [Y/n]: '
else:
question += ' [y/N]: '
var = environ_cp.get(var_name)
if var is not None:
var_content = var.strip().lower()
true_strings = ('1', 't', 'true', 'y', 'yes')
false_strings = ('0', 'f', 'false', 'n', 'no')
if var_content in true_strings:
var = True
elif var_content in false_strings:
var = False
else:
raise UserInputError(
'Environment variable %s must be set as a boolean indicator.\n'
'The following are accepted as TRUE : %s.\n'
'The following are accepted as FALSE: %s.\n'
'Current value is %s.' % (
var_name, ', '.join(true_strings), ', '.join(false_strings),
var))
while var is None:
user_input_origin = get_input(question)
user_input = user_input_origin.strip().lower()
if user_input == 'y':
print(yes_reply)
var = True
elif user_input == 'n':
print(no_reply)
var = False
elif not user_input:
if enabled_by_default:
print(yes_reply)
var = True
else:
print(no_reply)
var = False
else:
print('Invalid selection: %s' % user_input_origin)
return var
def set_build_var(environ_cp, var_name, query_item, option_name,
enabled_by_default, bazel_config_name=None):
"""Set if query_item will be enabled for the build.
Ask user if query_item will be enabled. Default is used if no input is given.
Set subprocess environment variable and write to .bazelrc if enabled.
Args:
environ_cp: copy of the os.environ.
var_name: string for name of environment variable, e.g. "TF_NEED_HDFS".
query_item: string for feature related to the variable, e.g. "Hadoop File
System".
option_name: string for option to define in .bazelrc.
enabled_by_default: boolean for default behavior.
bazel_config_name: Name for Bazel --config argument to enable build feature.
"""
var = str(int(get_var(environ_cp, var_name, query_item, enabled_by_default)))
environ_cp[var_name] = var
if var == '1':
write_to_bazelrc('build --define %s=true' % option_name)
elif bazel_config_name is not None:
# TODO(mikecase): Migrate all users of configure.py to use --config Bazel
# options and not to set build configs through environment variables.
write_to_bazelrc('build:%s --define %s=true'
% (bazel_config_name, option_name))
def set_action_env_var(environ_cp,
var_name,
query_item,
enabled_by_default,
question=None,
yes_reply=None,
no_reply=None):
"""Set boolean action_env variable.
Ask user if query_item will be enabled. Default is used if no input is given.
Set environment variable and write to .bazelrc.
Args:
environ_cp: copy of the os.environ.
var_name: string for name of environment variable, e.g. "TF_NEED_HDFS".
query_item: string for feature related to the variable, e.g. "Hadoop File
System".
enabled_by_default: boolean for default behavior.
question: optional string for how to ask for user input.
yes_reply: optional string for reply when feature is enabled.
no_reply: optional string for reply when feature is disabled.
"""
var = int(
get_var(environ_cp, var_name, query_item, enabled_by_default, question,
yes_reply, no_reply))
write_action_env_to_bazelrc(var_name, var)
environ_cp[var_name] = str(var)
def convert_version_to_int(version):
"""Convert a version number to a integer that can be used to compare.
Version strings of the form X.YZ and X.Y.Z-xxxxx are supported. The
'xxxxx' part, for instance 'homebrew' on OS/X, is ignored.
Args:
version: a version to be converted
Returns:
An integer if converted successfully, otherwise return None.
"""
version = version.split('-')[0]
version_segments = version.split('.')
for seg in version_segments:
if not seg.isdigit():
return None
version_str = ''.join(['%03d' % int(seg) for seg in version_segments])
return int(version_str)
def check_bazel_version(min_version):
"""Check installed bazel version is at least min_version.
Args:
min_version: string for minimum bazel version.
Returns:
The bazel version detected.
"""
if which('bazel') is None:
print('Cannot find bazel. Please install bazel.')
sys.exit(0)
curr_version = run_shell(['bazel', '--batch', '--bazelrc=/dev/null', 'version'])
for line in curr_version.split('\n'):
if 'Build label: ' in line:
curr_version = line.split('Build label: ')[1]
break
min_version_int = convert_version_to_int(min_version)
curr_version_int = convert_version_to_int(curr_version)
# Check if current bazel version can be detected properly.
if not curr_version_int:
print('WARNING: current bazel installation is not a release version.')
print('Make sure you are running at least bazel %s' % min_version)
return curr_version
print('You have bazel %s installed.' % curr_version)
if curr_version_int < min_version_int:
print('Please upgrade your bazel installation to version %s or higher to '
'build TensorFlow!' % min_version)
sys.exit(0)
return curr_version
def set_cc_opt_flags(environ_cp):
"""Set up architecture-dependent optimization flags.
Also append CC optimization flags to bazel.rc..
Args:
environ_cp: copy of the os.environ.
"""
if is_ppc64le():
# gcc on ppc64le does not support -march, use mcpu instead
default_cc_opt_flags = '-mcpu=native'
elif is_windows():
default_cc_opt_flags = '/arch:AVX'
else:
default_cc_opt_flags = '-march=haswell' # ROCm supports Haswell or later architecture
question = ('Please specify optimization flags to use during compilation when'
' bazel option "--config=opt" is specified [Default is %s]: '
) % default_cc_opt_flags
cc_opt_flags = get_from_env_or_user_or_default(environ_cp, 'CC_OPT_FLAGS',
question, default_cc_opt_flags)
for opt in cc_opt_flags.split():
write_to_bazelrc('build:opt --copt=%s' % opt)
# It should be safe on the same build host.
if not is_ppc64le() and not is_windows():
write_to_bazelrc('build:opt --host_copt=-march=native')
write_to_bazelrc('build:opt --define with_default_optimizations=true')
def set_tf_cuda_clang(environ_cp):
"""set TF_CUDA_CLANG action_env.
Args:
environ_cp: copy of the os.environ.
"""
question = 'Do you want to use clang as CUDA compiler?'
yes_reply = 'Clang will be used as CUDA compiler.'
no_reply = 'nvcc will be used as CUDA compiler.'
set_action_env_var(
environ_cp,
'TF_CUDA_CLANG',
None,
False,
question=question,
yes_reply=yes_reply,
no_reply=no_reply)
def set_tf_download_clang(environ_cp):
"""Set TF_DOWNLOAD_CLANG action_env."""
question = 'Do you wish to download a fresh release of clang? (Experimental)'
yes_reply = 'Clang will be downloaded and used to compile tensorflow.'
no_reply = 'Clang will not be downloaded.'
set_action_env_var(
environ_cp,
'TF_DOWNLOAD_CLANG',
None,
False,
question=question,
yes_reply=yes_reply,
no_reply=no_reply)
def get_from_env_or_user_or_default(environ_cp, var_name, ask_for_var,
var_default):
"""Get var_name either from env, or user or default.
If var_name has been set as environment variable, use the preset value, else
ask for user input. If no input is provided, the default is used.
Args:
environ_cp: copy of the os.environ.
var_name: string for name of environment variable, e.g. "TF_NEED_HDFS".
ask_for_var: string for how to ask for user input.
var_default: default value string.
Returns:
string value for var_name
"""
var = environ_cp.get(var_name)
if not var:
var = get_input(ask_for_var)
print('\n')
if not var:
var = var_default
return var
def set_clang_cuda_compiler_path(environ_cp):
"""Set CLANG_CUDA_COMPILER_PATH."""
default_clang_path = which('clang') or ''
ask_clang_path = ('Please specify which clang should be used as device and '
'host compiler. [Default is %s]: ') % default_clang_path
while True:
clang_cuda_compiler_path = get_from_env_or_user_or_default(
environ_cp, 'CLANG_CUDA_COMPILER_PATH', ask_clang_path,
default_clang_path)
if os.path.exists(clang_cuda_compiler_path):
break
# Reset and retry
print('Invalid clang path: %s cannot be found.' % clang_cuda_compiler_path)
environ_cp['CLANG_CUDA_COMPILER_PATH'] = ''
# Set CLANG_CUDA_COMPILER_PATH
environ_cp['CLANG_CUDA_COMPILER_PATH'] = clang_cuda_compiler_path
write_action_env_to_bazelrc('CLANG_CUDA_COMPILER_PATH',
clang_cuda_compiler_path)
def prompt_loop_or_load_from_env(
environ_cp,
var_name,
var_default,
ask_for_var,
check_success,
error_msg,
suppress_default_error=False,
n_ask_attempts=_DEFAULT_PROMPT_ASK_ATTEMPTS
):
"""Loop over user prompts for an ENV param until receiving a valid response.
For the env param var_name, read from the environment or verify user input
until receiving valid input. When done, set var_name in the environ_cp to its
new value.
Args:
environ_cp: (Dict) copy of the os.environ.
var_name: (String) string for name of environment variable, e.g. "TF_MYVAR".
var_default: (String) default value string.
ask_for_var: (String) string for how to ask for user input.
check_success: (Function) function that takes one argument and returns a
boolean. Should return True if the value provided is considered valid. May
contain a complex error message if error_msg does not provide enough
information. In that case, set suppress_default_error to True.
error_msg: (String) String with one and only one '%s'. Formatted with each
invalid response upon check_success(input) failure.
suppress_default_error: (Bool) Suppress the above error message in favor of
one from the check_success function.
n_ask_attempts: (Integer) Number of times to query for valid input before
raising an error and quitting.
Returns:
[String] The value of var_name after querying for input.
Raises:
UserInputError: if a query has been attempted n_ask_attempts times without
success, assume that the user has made a scripting error, and will
continue to provide invalid input. Raise the error to avoid infinitely
looping.
"""
default = environ_cp.get(var_name) or var_default
full_query = '%s [Default is %s]: ' % (
ask_for_var,
default,
)
for _ in range(n_ask_attempts):
val = get_from_env_or_user_or_default(environ_cp,
var_name,
full_query,
default)
if check_success(val):
break
if not suppress_default_error:
print(error_msg % val)
environ_cp[var_name] = ''
else:
raise UserInputError('Invalid %s setting was provided %d times in a row. '
'Assuming to be a scripting mistake.' %
(var_name, n_ask_attempts))
environ_cp[var_name] = val
return val
def create_android_ndk_rule(environ_cp):
"""Set ANDROID_NDK_HOME and write Android NDK WORKSPACE rule."""
if is_windows() or is_cygwin():
default_ndk_path = cygpath('%s/Android/Sdk/ndk-bundle' %
environ_cp['APPDATA'])
elif is_macos():
default_ndk_path = '%s/library/Android/Sdk/ndk-bundle' % environ_cp['HOME']
else:
default_ndk_path = '%s/Android/Sdk/ndk-bundle' % environ_cp['HOME']
def valid_ndk_path(path):
return (os.path.exists(path) and
os.path.exists(os.path.join(path, 'source.properties')))
android_ndk_home_path = prompt_loop_or_load_from_env(
environ_cp,
var_name='ANDROID_NDK_HOME',
var_default=default_ndk_path,
ask_for_var='Please specify the home path of the Android NDK to use.',
check_success=valid_ndk_path,
error_msg=('The path %s or its child file "source.properties" '
'does not exist.')
)
write_action_env_to_bazelrc('ANDROID_NDK_HOME', android_ndk_home_path)
write_action_env_to_bazelrc('ANDROID_NDK_API_LEVEL',
check_ndk_level(android_ndk_home_path))
def create_android_sdk_rule(environ_cp):
"""Set Android variables and write Android SDK WORKSPACE rule."""
if is_windows() or is_cygwin():
default_sdk_path = cygpath('%s/Android/Sdk' % environ_cp['APPDATA'])
elif is_macos():
default_sdk_path = '%s/library/Android/Sdk' % environ_cp['HOME']
else:
default_sdk_path = '%s/Android/Sdk' % environ_cp['HOME']
def valid_sdk_path(path):
return (os.path.exists(path) and
os.path.exists(os.path.join(path, 'platforms')) and
os.path.exists(os.path.join(path, 'build-tools')))
android_sdk_home_path = prompt_loop_or_load_from_env(
environ_cp,
var_name='ANDROID_SDK_HOME',
var_default=default_sdk_path,
ask_for_var='Please specify the home path of the Android SDK to use.',
check_success=valid_sdk_path,
error_msg=('Either %s does not exist, or it does not contain the '
'subdirectories "platforms" and "build-tools".'))
platforms = os.path.join(android_sdk_home_path, 'platforms')
api_levels = sorted(os.listdir(platforms))
api_levels = [x.replace('android-', '') for x in api_levels]
def valid_api_level(api_level):
return os.path.exists(os.path.join(android_sdk_home_path,
'platforms',
'android-' + api_level))
android_api_level = prompt_loop_or_load_from_env(
environ_cp,
var_name='ANDROID_API_LEVEL',
var_default=api_levels[-1],
ask_for_var=('Please specify the Android SDK API level to use. '
'[Available levels: %s]') % api_levels,
check_success=valid_api_level,
error_msg='Android-%s is not present in the SDK path.')
build_tools = os.path.join(android_sdk_home_path, 'build-tools')
versions = sorted(os.listdir(build_tools))
def valid_build_tools(version):
return os.path.exists(os.path.join(android_sdk_home_path,
'build-tools',
version))
android_build_tools_version = prompt_loop_or_load_from_env(
environ_cp,
var_name='ANDROID_BUILD_TOOLS_VERSION',
var_default=versions[-1],
ask_for_var=('Please specify an Android build tools version to use. '
'[Available versions: %s]') % versions,
check_success=valid_build_tools,
error_msg=('The selected SDK does not have build-tools version %s '
'available.'))
write_action_env_to_bazelrc('ANDROID_BUILD_TOOLS_VERSION',
android_build_tools_version)
write_action_env_to_bazelrc('ANDROID_SDK_API_LEVEL',
android_api_level)
write_action_env_to_bazelrc('ANDROID_SDK_HOME',
android_sdk_home_path)
def check_ndk_level(android_ndk_home_path):
"""Check the revision number of an Android NDK path."""
properties_path = '%s/source.properties' % android_ndk_home_path
if is_windows() or is_cygwin():
properties_path = cygpath(properties_path)
with open(properties_path, 'r') as f:
filedata = f.read()
revision = re.search(r'Pkg.Revision = (\d+)', filedata)
if revision:
ndk_api_level = revision.group(1)
else:
raise Exception('Unable to parse NDK revision.')
if int(ndk_api_level) not in _SUPPORTED_ANDROID_NDK_VERSIONS:
print('WARNING: The API level of the NDK in %s is %s, which is not '
'supported by Bazel (officially supported versions: %s). Please use '
'another version. Compiling Android targets may result in confusing '
'errors.\n' % (android_ndk_home_path, ndk_api_level,
_SUPPORTED_ANDROID_NDK_VERSIONS))
return ndk_api_level
def set_gcc_host_compiler_path(environ_cp):
"""Set GCC_HOST_COMPILER_PATH."""
default_gcc_host_compiler_path = which('gcc') or ''
cuda_bin_symlink = '%s/bin/gcc' % environ_cp.get('CUDA_TOOLKIT_PATH')
if os.path.islink(cuda_bin_symlink):
# os.readlink is only available in linux
default_gcc_host_compiler_path = os.path.realpath(cuda_bin_symlink)
gcc_host_compiler_path = prompt_loop_or_load_from_env(
environ_cp,
var_name='GCC_HOST_COMPILER_PATH',
var_default=default_gcc_host_compiler_path,
ask_for_var=
'Please specify which gcc should be used by nvcc as the host compiler.',
check_success=os.path.exists,
error_msg='Invalid gcc path. %s cannot be found.',
)
write_action_env_to_bazelrc('GCC_HOST_COMPILER_PATH', gcc_host_compiler_path)
def reformat_version_sequence(version_str, sequence_count):
"""Reformat the version string to have the given number of sequences.
For example:
Given (7, 2) -> 7.0
(7.0.1, 2) -> 7.0
(5, 1) -> 5
(5.0.3.2, 1) -> 5
Args:
version_str: String, the version string.
sequence_count: int, an integer.
Returns:
string, reformatted version string.
"""
v = version_str.split('.')
if len(v) < sequence_count:
v = v + (['0'] * (sequence_count - len(v)))
return '.'.join(v[:sequence_count])
def set_tf_cuda_version(environ_cp):
"""Set CUDA_TOOLKIT_PATH and TF_CUDA_VERSION."""
ask_cuda_version = (
'Please specify the CUDA SDK version you want to use. '
'[Leave empty to default to CUDA %s]: ') % _DEFAULT_CUDA_VERSION
for _ in range(_DEFAULT_PROMPT_ASK_ATTEMPTS):
# Configure the Cuda SDK version to use.
tf_cuda_version = get_from_env_or_user_or_default(
environ_cp, 'TF_CUDA_VERSION', ask_cuda_version, _DEFAULT_CUDA_VERSION)
tf_cuda_version = reformat_version_sequence(str(tf_cuda_version), 2)
# Find out where the CUDA toolkit is installed
default_cuda_path = _DEFAULT_CUDA_PATH
if is_windows() or is_cygwin():
default_cuda_path = cygpath(
environ_cp.get('CUDA_PATH', _DEFAULT_CUDA_PATH_WIN))
elif is_linux():
# If the default doesn't exist, try an alternative default.
if (not os.path.exists(default_cuda_path)
) and os.path.exists(_DEFAULT_CUDA_PATH_LINUX):
default_cuda_path = _DEFAULT_CUDA_PATH_LINUX
ask_cuda_path = ('Please specify the location where CUDA %s toolkit is'
' installed. Refer to README.md for more details. '
'[Default is %s]: ') % (tf_cuda_version, default_cuda_path)
cuda_toolkit_path = get_from_env_or_user_or_default(
environ_cp, 'CUDA_TOOLKIT_PATH', ask_cuda_path, default_cuda_path)
if is_windows() or is_cygwin():
cuda_toolkit_path = cygpath(cuda_toolkit_path)
if is_windows():
cuda_rt_lib_paths = ['lib/x64/cudart.lib']
elif is_linux():
cuda_rt_lib_paths = ['%s/libcudart.so.%s' % (x, tf_cuda_version)
for x in ['lib64', 'lib/x86_64-linux-gnu']]
elif is_macos():
cuda_rt_lib_paths = ['lib/libcudart.%s.dylib' % tf_cuda_version]
cuda_toolkit_paths_full = [os.path.join(cuda_toolkit_path, x) for x in cuda_rt_lib_paths]
if any([os.path.exists(x) for x in cuda_toolkit_paths_full]):
break
# Reset and retry
print('Invalid path to CUDA %s toolkit. %s cannot be found' %
(tf_cuda_version, cuda_toolkit_path_full))
environ_cp['TF_CUDA_VERSION'] = ''
environ_cp['CUDA_TOOLKIT_PATH'] = ''
else:
raise UserInputError('Invalid TF_CUDA_SETTING setting was provided %d '
'times in a row. Assuming to be a scripting mistake.' %
_DEFAULT_PROMPT_ASK_ATTEMPTS)
# Set CUDA_TOOLKIT_PATH and TF_CUDA_VERSION
environ_cp['CUDA_TOOLKIT_PATH'] = cuda_toolkit_path
write_action_env_to_bazelrc('CUDA_TOOLKIT_PATH', cuda_toolkit_path)
environ_cp['TF_CUDA_VERSION'] = tf_cuda_version
write_action_env_to_bazelrc('TF_CUDA_VERSION', tf_cuda_version)
def set_tf_cudnn_version(environ_cp):
"""Set CUDNN_INSTALL_PATH and TF_CUDNN_VERSION."""
ask_cudnn_version = (
'Please specify the cuDNN version you want to use. '
'[Leave empty to default to cuDNN %s.0]: ') % _DEFAULT_CUDNN_VERSION
for _ in range(_DEFAULT_PROMPT_ASK_ATTEMPTS):
tf_cudnn_version = get_from_env_or_user_or_default(
environ_cp, 'TF_CUDNN_VERSION', ask_cudnn_version,
_DEFAULT_CUDNN_VERSION)
tf_cudnn_version = reformat_version_sequence(str(tf_cudnn_version), 1)
default_cudnn_path = environ_cp.get('CUDA_TOOLKIT_PATH')
ask_cudnn_path = (r'Please specify the location where cuDNN %s library is '
'installed. Refer to README.md for more details. [Default'
' is %s]: ') % (tf_cudnn_version, default_cudnn_path)
cudnn_install_path = get_from_env_or_user_or_default(
environ_cp, 'CUDNN_INSTALL_PATH', ask_cudnn_path, default_cudnn_path)
# Result returned from "read" will be used unexpanded. That make "~"
# unusable. Going through one more level of expansion to handle that.
cudnn_install_path = os.path.realpath(
os.path.expanduser(cudnn_install_path))
if is_windows() or is_cygwin():
cudnn_install_path = cygpath(cudnn_install_path)
if is_windows():
cuda_dnn_lib_path = 'lib/x64/cudnn.lib'
cuda_dnn_lib_alt_path = 'lib/x64/cudnn.lib'
elif is_linux():
cuda_dnn_lib_path = 'lib64/libcudnn.so.%s' % tf_cudnn_version
cuda_dnn_lib_alt_path = 'libcudnn.so.%s' % tf_cudnn_version
elif is_macos():
cuda_dnn_lib_path = 'lib/libcudnn.%s.dylib' % tf_cudnn_version
cuda_dnn_lib_alt_path = 'libcudnn.%s.dylib' % tf_cudnn_version
cuda_dnn_lib_path_full = os.path.join(cudnn_install_path, cuda_dnn_lib_path)
cuda_dnn_lib_alt_path_full = os.path.join(cudnn_install_path,
cuda_dnn_lib_alt_path)
if os.path.exists(cuda_dnn_lib_path_full) or os.path.exists(
cuda_dnn_lib_alt_path_full):
break
# Try another alternative for Linux
if is_linux():
ldconfig_bin = which('ldconfig') or '/sbin/ldconfig'
cudnn_path_from_ldconfig = run_shell([ldconfig_bin, '-p'])
cudnn_path_from_ldconfig = re.search('.*libcudnn.so .* => (.*)',
cudnn_path_from_ldconfig)
if cudnn_path_from_ldconfig:
cudnn_path_from_ldconfig = cudnn_path_from_ldconfig.group(1)
if os.path.exists('%s.%s' % (cudnn_path_from_ldconfig,
tf_cudnn_version)):
cudnn_install_path = os.path.dirname(cudnn_path_from_ldconfig)
break
# Reset and Retry
print(
'Invalid path to cuDNN %s toolkit. None of the following files can be '
'found:' % tf_cudnn_version)
print(cuda_dnn_lib_path_full)
print(cuda_dnn_lib_alt_path_full)
if is_linux():
print('%s.%s' % (cudnn_path_from_ldconfig, tf_cudnn_version))
environ_cp['TF_CUDNN_VERSION'] = ''
else:
raise UserInputError('Invalid TF_CUDNN setting was provided %d '
'times in a row. Assuming to be a scripting mistake.' %
_DEFAULT_PROMPT_ASK_ATTEMPTS)
# Set CUDNN_INSTALL_PATH and TF_CUDNN_VERSION
environ_cp['CUDNN_INSTALL_PATH'] = cudnn_install_path
write_action_env_to_bazelrc('CUDNN_INSTALL_PATH', cudnn_install_path)
environ_cp['TF_CUDNN_VERSION'] = tf_cudnn_version
write_action_env_to_bazelrc('TF_CUDNN_VERSION', tf_cudnn_version)
def is_cuda_compatible(lib, cuda_ver, cudnn_ver):
"""Check compatibility between given library and cudnn/cudart libraries."""
ldd_bin = which('ldd') or '/usr/bin/ldd'
ldd_out = run_shell([ldd_bin, lib], True)
ldd_out = ldd_out.split(os.linesep)
cudnn_pattern = re.compile('.*libcudnn.so\\.?(.*) =>.*$')
cuda_pattern = re.compile('.*libcudart.so\\.?(.*) =>.*$')
cudnn = None
cudart = None
cudnn_ok = True # assume no cudnn dependency by default
cuda_ok = True # assume no cuda dependency by default
for line in ldd_out:
if 'libcudnn.so' in line:
cudnn = cudnn_pattern.search(line)
cudnn_ok = False
elif 'libcudart.so' in line:
cudart = cuda_pattern.search(line)
cuda_ok = False
if cudnn and len(cudnn.group(1)):
cudnn = convert_version_to_int(cudnn.group(1))
if cudart and len(cudart.group(1)):
cudart = convert_version_to_int(cudart.group(1))
if cudnn is not None:
cudnn_ok = (cudnn == cudnn_ver)
if cudart is not None:
cuda_ok = (cudart == cuda_ver)
return cudnn_ok and cuda_ok
def set_tf_tensorrt_install_path(environ_cp):
"""Set TENSORRT_INSTALL_PATH and TF_TENSORRT_VERSION.
Adapted from code contributed by Sami Kama (https://github.com/samikama).
Args:
environ_cp: copy of the os.environ.
Raises:
ValueError: if this method was called under non-Linux platform.
UserInputError: if user has provided invalid input multiple times.
"""
if not is_linux():
raise ValueError('Currently TensorRT is only supported on Linux platform.')
# Ask user whether to add TensorRT support.
if str(int(get_var(environ_cp, 'TF_NEED_TENSORRT', 'TensorRT',
False))) != '1':
return
for _ in range(_DEFAULT_PROMPT_ASK_ATTEMPTS):
ask_tensorrt_path = (r'Please specify the location where TensorRT is '
'installed. [Default is %s]:') % (
_DEFAULT_TENSORRT_PATH_LINUX)
trt_install_path = get_from_env_or_user_or_default(
environ_cp, 'TENSORRT_INSTALL_PATH', ask_tensorrt_path,
_DEFAULT_TENSORRT_PATH_LINUX)
# Result returned from "read" will be used unexpanded. That make "~"
# unusable. Going through one more level of expansion to handle that.
trt_install_path = os.path.realpath(os.path.expanduser(trt_install_path))
def find_libs(search_path):
"""Search for libnvinfer.so in "search_path"."""
fl = set()
if os.path.exists(search_path) and os.path.isdir(search_path):
fl.update([
os.path.realpath(os.path.join(search_path, x))
for x in os.listdir(search_path)
if 'libnvinfer.so' in x
])
return fl
possible_files = find_libs(trt_install_path)
possible_files.update(find_libs(os.path.join(trt_install_path, 'lib')))
possible_files.update(find_libs(os.path.join(trt_install_path, 'lib64')))
cuda_ver = convert_version_to_int(environ_cp['TF_CUDA_VERSION'])
cudnn_ver = convert_version_to_int(environ_cp['TF_CUDNN_VERSION'])
nvinfer_pattern = re.compile('.*libnvinfer.so.?(.*)$')
highest_ver = [0, None, None]
for lib_file in possible_files:
if is_cuda_compatible(lib_file, cuda_ver, cudnn_ver):
matches = nvinfer_pattern.search(lib_file)
if len(matches.groups()) == 0:
continue
ver_str = matches.group(1)
ver = convert_version_to_int(ver_str) if len(ver_str) else 0
if ver > highest_ver[0]:
highest_ver = [ver, ver_str, lib_file]
if highest_ver[1] is not None:
trt_install_path = os.path.dirname(highest_ver[2])
tf_tensorrt_version = highest_ver[1]
break
# Try another alternative from ldconfig.
ldconfig_bin = which('ldconfig') or '/sbin/ldconfig'
ldconfig_output = run_shell([ldconfig_bin, '-p'])
search_result = re.search('.*libnvinfer.so\\.?([0-9.]*).* => (.*)',
ldconfig_output)
if search_result:
libnvinfer_path_from_ldconfig = search_result.group(2)
if os.path.exists(libnvinfer_path_from_ldconfig):
if is_cuda_compatible(libnvinfer_path_from_ldconfig, cuda_ver,
cudnn_ver):
trt_install_path = os.path.dirname(libnvinfer_path_from_ldconfig)
tf_tensorrt_version = search_result.group(1)
break
# Reset and Retry
if possible_files:
print('TensorRT libraries found in one the following directories',
'are not compatible with selected cuda and cudnn installations')
print(trt_install_path)
print(os.path.join(trt_install_path, 'lib'))
print(os.path.join(trt_install_path, 'lib64'))
if search_result:
print(libnvinfer_path_from_ldconfig)
else:
print(
'Invalid path to TensorRT. None of the following files can be found:')
print(trt_install_path)
print(os.path.join(trt_install_path, 'lib'))
print(os.path.join(trt_install_path, 'lib64'))
if search_result:
print(libnvinfer_path_from_ldconfig)
else:
raise UserInputError('Invalid TF_TENSORRT setting was provided %d '
'times in a row. Assuming to be a scripting mistake.' %
_DEFAULT_PROMPT_ASK_ATTEMPTS)
# Set TENSORRT_INSTALL_PATH and TF_TENSORRT_VERSION
environ_cp['TENSORRT_INSTALL_PATH'] = trt_install_path
write_action_env_to_bazelrc('TENSORRT_INSTALL_PATH', trt_install_path)
environ_cp['TF_TENSORRT_VERSION'] = tf_tensorrt_version
write_action_env_to_bazelrc('TF_TENSORRT_VERSION', tf_tensorrt_version)
def set_tf_nccl_install_path(environ_cp):
"""Set NCCL_INSTALL_PATH and TF_NCCL_VERSION.
Args:
environ_cp: copy of the os.environ.
Raises:
ValueError: if this method was called under non-Linux platform.
UserInputError: if user has provided invalid input multiple times.
"""
if not is_linux():
raise ValueError('Currently NCCL is only supported on Linux platforms.')
ask_nccl_version = (
'Please specify the NCCL version you want to use. If NCCL %s is not '
'installed, then you can use version 1.3 that can be fetched '
'automatically but it may have worse performance with multiple GPUs. '
'[Default is %s]: ') % (_DEFAULT_NCCL_VERSION, _DEFAULT_NCCL_VERSION)
for _ in range(_DEFAULT_PROMPT_ASK_ATTEMPTS):
tf_nccl_version = get_from_env_or_user_or_default(
environ_cp, 'TF_NCCL_VERSION', ask_nccl_version, _DEFAULT_NCCL_VERSION)
tf_nccl_version = reformat_version_sequence(str(tf_nccl_version), 1)
if tf_nccl_version == '1':
break # No need to get install path, NCCL 1 is a GitHub repo.
# TODO(csigg): Look with ldconfig first if we can find the library in paths
# like /usr/lib/x86_64-linux-gnu and the header file in the corresponding
# include directory. This is where the NCCL .deb packages install them.
# Then ask the user if we should use that. Instead of a single
# NCCL_INSTALL_PATH, pass separate NCCL_LIB_PATH and NCCL_HDR_PATH to
# nccl_configure.bzl
default_nccl_path = environ_cp.get('CUDA_TOOLKIT_PATH')
ask_nccl_path = (r'Please specify the location where NCCL %s library is '
'installed. Refer to README.md for more details. [Default '
'is %s]:') % (tf_nccl_version, default_nccl_path)
nccl_install_path = get_from_env_or_user_or_default(
environ_cp, 'NCCL_INSTALL_PATH', ask_nccl_path, default_nccl_path)
# Result returned from "read" will be used unexpanded. That make "~"
# unusable. Going through one more level of expansion to handle that.
nccl_install_path = os.path.realpath(os.path.expanduser(nccl_install_path))
if is_windows() or is_cygwin():
nccl_install_path = cygpath(nccl_install_path)
if is_windows():
nccl_lib_path = 'lib/x64/nccl.lib'
elif is_linux():
nccl_lib_path = 'lib/libnccl.so.%s' % tf_nccl_version
elif is_macos():
nccl_lib_path = 'lib/libnccl.%s.dylib' % tf_nccl_version
nccl_lib_path = os.path.join(nccl_install_path, nccl_lib_path)
nccl_hdr_path = os.path.join(nccl_install_path, 'include/nccl.h')
if os.path.exists(nccl_lib_path) and os.path.exists(nccl_hdr_path):
# Set NCCL_INSTALL_PATH
environ_cp['NCCL_INSTALL_PATH'] = nccl_install_path
write_action_env_to_bazelrc('NCCL_INSTALL_PATH', nccl_install_path)
break
# Reset and Retry
print('Invalid path to NCCL %s toolkit, %s or %s not found. Please use the '
'O/S agnostic package of NCCL 2' % (tf_nccl_version, nccl_lib_path,
nccl_hdr_path))
environ_cp['TF_NCCL_VERSION'] = ''
else:
raise UserInputError('Invalid TF_NCCL setting was provided %d '
'times in a row. Assuming to be a scripting mistake.' %
_DEFAULT_PROMPT_ASK_ATTEMPTS)
# Set TF_NCCL_VERSION
environ_cp['TF_NCCL_VERSION'] = tf_nccl_version
write_action_env_to_bazelrc('TF_NCCL_VERSION', tf_nccl_version)
def get_native_cuda_compute_capabilities(environ_cp):
"""Get native cuda compute capabilities.
Args:
environ_cp: copy of the os.environ.
Returns:
string of native cuda compute capabilities, separated by comma.
"""
device_query_bin = os.path.join(
environ_cp.get('CUDA_TOOLKIT_PATH'), 'extras/demo_suite/deviceQuery')
if os.path.isfile(device_query_bin) and os.access(device_query_bin, os.X_OK):
try:
output = run_shell(device_query_bin).split('\n')
pattern = re.compile('[0-9]*\\.[0-9]*')
output = [pattern.search(x) for x in output if 'Capability' in x]
output = ','.join(x.group() for x in output if x is not None)
except subprocess.CalledProcessError:
output = ''
else:
output = ''
return output
def set_tf_cuda_compute_capabilities(environ_cp):
"""Set TF_CUDA_COMPUTE_CAPABILITIES."""
while True:
native_cuda_compute_capabilities = get_native_cuda_compute_capabilities(
environ_cp)
if not native_cuda_compute_capabilities:
default_cuda_compute_capabilities = _DEFAULT_CUDA_COMPUTE_CAPABILITIES
else:
default_cuda_compute_capabilities = native_cuda_compute_capabilities
ask_cuda_compute_capabilities = (
'Please specify a list of comma-separated '
'Cuda compute capabilities you want to '
'build with.\nYou can find the compute '
'capability of your device at: '
'https://developer.nvidia.com/cuda-gpus.\nPlease'
' note that each additional compute '
'capability significantly increases your '
'build time and binary size. [Default is: %s]: ' %
default_cuda_compute_capabilities)
tf_cuda_compute_capabilities = get_from_env_or_user_or_default(
environ_cp, 'TF_CUDA_COMPUTE_CAPABILITIES',
ask_cuda_compute_capabilities, default_cuda_compute_capabilities)
# Check whether all capabilities from the input is valid
all_valid = True
# Remove all whitespace characters before splitting the string
# that users may insert by accident, as this will result in error
tf_cuda_compute_capabilities = ''.join(tf_cuda_compute_capabilities.split())
for compute_capability in tf_cuda_compute_capabilities.split(','):
m = re.match('[0-9]+.[0-9]+', compute_capability)
if not m:
print('Invalid compute capability: ' % compute_capability)
all_valid = False
else:
ver = int(m.group(0).split('.')[0])
if ver < 3:
print('Only compute capabilities 3.0 or higher are supported.')
all_valid = False
if all_valid:
break
# Reset and Retry
environ_cp['TF_CUDA_COMPUTE_CAPABILITIES'] = ''
# Set TF_CUDA_COMPUTE_CAPABILITIES
environ_cp['TF_CUDA_COMPUTE_CAPABILITIES'] = tf_cuda_compute_capabilities
write_action_env_to_bazelrc('TF_CUDA_COMPUTE_CAPABILITIES',
tf_cuda_compute_capabilities)
def set_other_cuda_vars(environ_cp):
"""Set other CUDA related variables."""
# If CUDA is enabled, always use GPU during build and test.
if environ_cp.get('TF_CUDA_CLANG') == '1':
write_to_bazelrc('build --config=cuda_clang')
write_to_bazelrc('test --config=cuda_clang')
else:
write_to_bazelrc('build --config=cuda')
write_to_bazelrc('test --config=cuda')
def set_host_cxx_compiler(environ_cp):
"""Set HOST_CXX_COMPILER."""
default_cxx_host_compiler = which('g++') or ''
host_cxx_compiler = prompt_loop_or_load_from_env(
environ_cp,
var_name='HOST_CXX_COMPILER',
var_default=default_cxx_host_compiler,
ask_for_var=('Please specify which C++ compiler should be used as the '
'host C++ compiler.'),
check_success=os.path.exists,
error_msg='Invalid C++ compiler path. %s cannot be found.',
)
write_action_env_to_bazelrc('HOST_CXX_COMPILER', host_cxx_compiler)
def set_host_c_compiler(environ_cp):
"""Set HOST_C_COMPILER."""
default_c_host_compiler = which('gcc') or ''
host_c_compiler = prompt_loop_or_load_from_env(
environ_cp,
var_name='HOST_C_COMPILER',
var_default=default_c_host_compiler,
ask_for_var=('Please specify which C compiler should be used as the host '
'C compiler.'),
check_success=os.path.exists,
error_msg='Invalid C compiler path. %s cannot be found.',
)
write_action_env_to_bazelrc('HOST_C_COMPILER', host_c_compiler)
def set_computecpp_toolkit_path(environ_cp):
"""Set COMPUTECPP_TOOLKIT_PATH."""
def toolkit_exists(toolkit_path):
"""Check if a computecpp toolkit path is valid."""
if is_linux():
sycl_rt_lib_path = 'lib/libComputeCpp.so'
else:
sycl_rt_lib_path = ''
sycl_rt_lib_path_full = os.path.join(toolkit_path,
sycl_rt_lib_path)
exists = os.path.exists(sycl_rt_lib_path_full)
if not exists:
print('Invalid SYCL %s library path. %s cannot be found' %
(_TF_OPENCL_VERSION, sycl_rt_lib_path_full))
return exists
computecpp_toolkit_path = prompt_loop_or_load_from_env(
environ_cp,
var_name='COMPUTECPP_TOOLKIT_PATH',
var_default=_DEFAULT_COMPUTECPP_TOOLKIT_PATH,
ask_for_var=(
'Please specify the location where ComputeCpp for SYCL %s is '
'installed.' % _TF_OPENCL_VERSION),
check_success=toolkit_exists,
error_msg='Invalid SYCL compiler path. %s cannot be found.',
suppress_default_error=True)
write_action_env_to_bazelrc('COMPUTECPP_TOOLKIT_PATH',
computecpp_toolkit_path)
def set_trisycl_include_dir(environ_cp):
"""Set TRISYCL_INCLUDE_DIR."""
ask_trisycl_include_dir = ('Please specify the location of the triSYCL '
'include directory. (Use --config=sycl_trisycl '
'when building with Bazel) '
'[Default is %s]: '
) % (_DEFAULT_TRISYCL_INCLUDE_DIR)
while True:
trisycl_include_dir = get_from_env_or_user_or_default(
environ_cp, 'TRISYCL_INCLUDE_DIR', ask_trisycl_include_dir,
_DEFAULT_TRISYCL_INCLUDE_DIR)
if os.path.exists(trisycl_include_dir):
break
print('Invalid triSYCL include directory, %s cannot be found'
% (trisycl_include_dir))
# Set TRISYCL_INCLUDE_DIR
environ_cp['TRISYCL_INCLUDE_DIR'] = trisycl_include_dir
write_action_env_to_bazelrc('TRISYCL_INCLUDE_DIR',
trisycl_include_dir)
def set_mpi_home(environ_cp):
"""Set MPI_HOME."""
default_mpi_home = which('mpirun') or which('mpiexec') or ''
default_mpi_home = os.path.dirname(os.path.dirname(default_mpi_home))
def valid_mpi_path(mpi_home):
exists = (os.path.exists(os.path.join(mpi_home, 'include')) and
os.path.exists(os.path.join(mpi_home, 'lib')))
if not exists:
print('Invalid path to the MPI Toolkit. %s or %s cannot be found' %
(os.path.join(mpi_home, 'include'),
os.path.exists(os.path.join(mpi_home, 'lib'))))
return exists
_ = prompt_loop_or_load_from_env(
environ_cp,
var_name='MPI_HOME',
var_default=default_mpi_home,
ask_for_var='Please specify the MPI toolkit folder.',
check_success=valid_mpi_path,
error_msg='',
suppress_default_error=True)
def set_other_mpi_vars(environ_cp):
"""Set other MPI related variables."""
# Link the MPI header files
mpi_home = environ_cp.get('MPI_HOME')
symlink_force('%s/include/mpi.h' % mpi_home, 'third_party/mpi/mpi.h')
# Determine if we use OpenMPI or MVAPICH, these require different header files
# to be included here to make bazel dependency checker happy
if os.path.exists(os.path.join(mpi_home, 'include/mpi_portable_platform.h')):
symlink_force(
os.path.join(mpi_home, 'include/mpi_portable_platform.h'),
'third_party/mpi/mpi_portable_platform.h')
# TODO(gunan): avoid editing files in configure
sed_in_place('third_party/mpi/mpi.bzl', 'MPI_LIB_IS_OPENMPI=False',
'MPI_LIB_IS_OPENMPI=True')
else:
# MVAPICH / MPICH
symlink_force(
os.path.join(mpi_home, 'include/mpio.h'), 'third_party/mpi/mpio.h')
symlink_force(
os.path.join(mpi_home, 'include/mpicxx.h'), 'third_party/mpi/mpicxx.h')
# TODO(gunan): avoid editing files in configure
sed_in_place('third_party/mpi/mpi.bzl', 'MPI_LIB_IS_OPENMPI=True',
'MPI_LIB_IS_OPENMPI=False')
if os.path.exists(os.path.join(mpi_home, 'lib/libmpi.so')):
symlink_force(
os.path.join(mpi_home, 'lib/libmpi.so'), 'third_party/mpi/libmpi.so')
else:
raise ValueError('Cannot find the MPI library file in %s/lib' % mpi_home)
def set_grpc_build_flags():
write_to_bazelrc('build --define grpc_no_ares=true')
def set_system_libs_flag(environ_cp):
syslibs = environ_cp.get('TF_SYSTEM_LIBS', '')
syslibs = ','.join(sorted(syslibs.split(',')))
if syslibs and syslibs != '':
write_action_env_to_bazelrc('TF_SYSTEM_LIBS', syslibs)
def set_windows_build_flags(environ_cp):
"""Set Windows specific build options."""
# The non-monolithic build is not supported yet
write_to_bazelrc('build --config monolithic')
# Suppress warning messages
write_to_bazelrc('build --copt=-w --host_copt=-w')
# Output more verbose information when something goes wrong
write_to_bazelrc('build --verbose_failures')
# The host and target platforms are the same in Windows build. So we don't
# have to distinct them. This avoids building the same targets twice.
write_to_bazelrc('build --distinct_host_configuration=false')
# Enable short object file path to avoid long path issue on Windows.
# TODO(pcloudy): Remove this flag when upgrading Bazel to 0.16.0
# Short object file path will be enabled by default.
write_to_bazelrc('build --experimental_shortened_obj_file_path=true')
if get_var(
environ_cp, 'TF_OVERRIDE_EIGEN_STRONG_INLINE', 'Eigen strong inline',
True,
('Would you like to override eigen strong inline for some C++ '
'compilation to reduce the compilation time?'),
'Eigen strong inline overridden.',
'Not overriding eigen strong inline, '
'some compilations could take more than 20 mins.'):
# Due to a known MSVC compiler issue
# https://github.com/tensorflow/tensorflow/issues/10521
# Overriding eigen strong inline speeds up the compiling of
# conv_grad_ops_3d.cc and conv_ops_3d.cc by 20 minutes,
# but this also hurts the performance. Let users decide what they want.
write_to_bazelrc('build --define=override_eigen_strong_inline=true')
def config_info_line(name, help_text):
"""Helper function to print formatted help text for Bazel config options."""
print('\t--config=%-12s\t# %s' % (name, help_text))
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--workspace",
type=str,
default=_TF_WORKSPACE_ROOT,
help="The absolute path to your active Bazel workspace.")
args = parser.parse_args()
# Make a copy of os.environ to be clear when functions and getting and setting
# environment variables.
environ_cp = dict(os.environ)
check_bazel_version('0.15.0')
reset_tf_configure_bazelrc(args.workspace)
cleanup_makefile()
setup_python(environ_cp)
if is_windows():
environ_cp['TF_NEED_AWS'] = '0'
environ_cp['TF_NEED_GCP'] = '0'
environ_cp['TF_NEED_HDFS'] = '0'
environ_cp['TF_NEED_JEMALLOC'] = '0'
environ_cp['TF_NEED_KAFKA'] = '0'
environ_cp['TF_NEED_OPENCL_SYCL'] = '0'
environ_cp['TF_NEED_COMPUTECPP'] = '0'
environ_cp['TF_NEED_OPENCL'] = '0'
environ_cp['TF_CUDA_CLANG'] = '0'
environ_cp['TF_NEED_TENSORRT'] = '0'
# TODO(ibiryukov): Investigate using clang as a cpu or cuda compiler on
# Windows.
environ_cp['TF_DOWNLOAD_CLANG'] = '0'
environ_cp['TF_ENABLE_XLA'] = '0'
environ_cp['TF_NEED_GDR'] = '0'
environ_cp['TF_NEED_VERBS'] = '0'
environ_cp['TF_NEED_MPI'] = '0'
environ_cp['TF_SET_ANDROID_WORKSPACE'] = '0'
if is_macos():
environ_cp['TF_NEED_JEMALLOC'] = '0'
environ_cp['TF_NEED_TENSORRT'] = '0'
# The numpy package on ppc64le uses OpenBLAS which has multi-threading
# issues that lead to incorrect answers. Set OMP_NUM_THREADS=1 at
# runtime to allow the Tensorflow testcases which compare numpy
# results to Tensorflow results to succeed.
if is_ppc64le():
write_action_env_to_bazelrc("OMP_NUM_THREADS", 1)
set_build_var(environ_cp, 'TF_NEED_JEMALLOC', 'jemalloc as malloc',
'with_jemalloc', True)
# ROCM TODO: restore these flags to default ones after we get a successful
# build
set_build_var(environ_cp, 'TF_NEED_GCP', 'Google Cloud Platform',
'with_gcp_support', False, 'gcp')
set_build_var(environ_cp, 'TF_NEED_HDFS', 'Hadoop File System',
'with_hdfs_support', False, 'hdfs')
set_build_var(environ_cp, 'TF_NEED_AWS', 'Amazon AWS Platform',
'with_aws_support', False, 'aws')
set_build_var(environ_cp, 'TF_NEED_KAFKA', 'Apache Kafka Platform',
'with_kafka_support', False, 'kafka')
set_build_var(environ_cp, 'TF_ENABLE_XLA', 'XLA JIT', 'with_xla_support',
True, 'xla')
set_build_var(environ_cp, 'TF_NEED_GDR', 'GDR', 'with_gdr_support',
False, 'gdr')
set_build_var(environ_cp, 'TF_NEED_VERBS', 'VERBS', 'with_verbs_support',
False, 'verbs')
set_build_var(environ_cp, 'TF_NEED_NGRAPH', 'nGraph',
'with_ngraph_support', False, 'ngraph')
set_action_env_var(environ_cp, 'TF_NEED_OPENCL_SYCL', 'OpenCL SYCL', False)
if environ_cp.get('TF_NEED_OPENCL_SYCL') == '1':
set_host_cxx_compiler(environ_cp)
set_host_c_compiler(environ_cp)
set_action_env_var(environ_cp, 'TF_NEED_COMPUTECPP', 'ComputeCPP', True)
if environ_cp.get('TF_NEED_COMPUTECPP') == '1':
set_computecpp_toolkit_path(environ_cp)
else:
set_trisycl_include_dir(environ_cp)
set_action_env_var(environ_cp, 'TF_NEED_ROCM', 'ROCm', False)
if (environ_cp.get('TF_NEED_ROCM') == '1' and
'LD_LIBRARY_PATH' in environ_cp and environ_cp.get(
'LD_LIBRARY_PATH') != '1'):
write_action_env_to_bazelrc('LD_LIBRARY_PATH',
environ_cp.get('LD_LIBRARY_PATH'))
set_action_env_var(environ_cp, 'TF_NEED_CUDA', 'CUDA', False)
if (environ_cp.get('TF_NEED_CUDA') == '1' and
'TF_CUDA_CONFIG_REPO' not in environ_cp):
set_tf_cuda_version(environ_cp)
set_tf_cudnn_version(environ_cp)
if is_linux():
set_tf_tensorrt_install_path(environ_cp)
set_tf_nccl_install_path(environ_cp)
set_tf_cuda_compute_capabilities(environ_cp)
if 'LD_LIBRARY_PATH' in environ_cp and environ_cp.get(
'LD_LIBRARY_PATH') != '1':
write_action_env_to_bazelrc('LD_LIBRARY_PATH',
environ_cp.get('LD_LIBRARY_PATH'))
set_tf_cuda_clang(environ_cp)
if environ_cp.get('TF_CUDA_CLANG') == '1':
# Ask whether we should download the clang toolchain.
set_tf_download_clang(environ_cp)
if environ_cp.get('TF_DOWNLOAD_CLANG') != '1':
# Set up which clang we should use as the cuda / host compiler.
set_clang_cuda_compiler_path(environ_cp)
else:
# Use downloaded LLD for linking.
write_to_bazelrc('build:cuda_clang --config=download_clang_use_lld')
write_to_bazelrc('test:cuda_clang --config=download_clang_use_lld')
else:
# Set up which gcc nvcc should use as the host compiler
# No need to set this on Windows
if not is_windows():
set_gcc_host_compiler_path(environ_cp)
set_other_cuda_vars(environ_cp)
else:
# CUDA not required. Ask whether we should download the clang toolchain and
# use it for the CPU build.
set_tf_download_clang(environ_cp)
if environ_cp.get('TF_DOWNLOAD_CLANG') == '1':
write_to_bazelrc('build --config=download_clang')
write_to_bazelrc('test --config=download_clang')
# SYCL / ROCm / CUDA are mutually exclusive.
# At most 1 GPU platform can be configured.
gpu_platform_count = 0
if environ_cp.get('TF_NEED_OPENCL_SYCL') == '1':
gpu_platform_count += 1
if environ_cp.get('TF_NEED_ROCM') == '1':
gpu_platform_count += 1
if environ_cp.get('TF_NEED_CUDA') == '1':
gpu_platform_count += 1
if gpu_platform_count >= 2:
raise UserInputError('SYCL / CUDA / ROCm are mututally exclusive. '
'At most 1 GPU platform can be configured.')
set_build_var(environ_cp, 'TF_NEED_MPI', 'MPI', 'with_mpi_support', False)
if environ_cp.get('TF_NEED_MPI') == '1':
set_mpi_home(environ_cp)
set_other_mpi_vars(environ_cp)
set_grpc_build_flags()
set_cc_opt_flags(environ_cp)
set_system_libs_flag(environ_cp)
if is_windows():
set_windows_build_flags(environ_cp)
if get_var(
environ_cp, 'TF_SET_ANDROID_WORKSPACE', 'android workspace',
False,
('Would you like to interactively configure ./WORKSPACE for '
'Android builds?'),
'Searching for NDK and SDK installations.',
'Not configuring the WORKSPACE for Android builds.'):
create_android_ndk_rule(environ_cp)
create_android_sdk_rule(environ_cp)
# On Windows, we don't have MKL support and the build is always monolithic.
# So no need to print the following message.
# TODO(pcloudy): remove the following if check when they make sense on Windows
if not is_windows():
print('Preconfigured Bazel build configs. You can use any of the below by '
'adding "--config=<>" to your build command. See tools/bazel.rc for '
'more details.')
config_info_line('mkl', 'Build with MKL support.')
config_info_line('monolithic', 'Config for mostly static monolithic build.')
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class IotVspOrgUserDeleteNotifyUserInfoRequest(object):
def __init__(self):
self._msg = None
self._state = None
self._vid = None
@property
def msg(self):
return self._msg
@msg.setter
def msg(self, value):
self._msg = value
@property
def state(self):
return self._state
@state.setter
def state(self, value):
self._state = value
@property
def vid(self):
return self._vid
@vid.setter
def vid(self, value):
self._vid = value
def to_alipay_dict(self):
params = dict()
if self.msg:
if hasattr(self.msg, 'to_alipay_dict'):
params['msg'] = self.msg.to_alipay_dict()
else:
params['msg'] = self.msg
if self.state:
if hasattr(self.state, 'to_alipay_dict'):
params['state'] = self.state.to_alipay_dict()
else:
params['state'] = self.state
if self.vid:
if hasattr(self.vid, 'to_alipay_dict'):
params['vid'] = self.vid.to_alipay_dict()
else:
params['vid'] = self.vid
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = IotVspOrgUserDeleteNotifyUserInfoRequest()
if 'msg' in d:
o.msg = d['msg']
if 'state' in d:
o.state = d['state']
if 'vid' in d:
o.vid = d['vid']
return o
|
import {useSession} from '@/entities';
import {Button, Col, Row, Steps, Typography} from 'antd';
import React from 'react';
const {Title} = Typography;
import {remote} from 'electron';
const {Step} = Steps;
export default () =>
{
const [session, {login, logout}] = useSession();
return <>
<Title>My app</Title>
<Row gutter={[10, 10]}>
<Col span={8}>Load a file </Col>
<Col span={8}>Analysis</Col>
<Col span={8}>Result</Col>
</Row>
<Row gutter={[10, 10]}>
<Col span={8}>
<Button type={'primary'} onClick={() =>
{
const {dialog} = remote;
const selectedFiles = dialog.showOpenDialogSync({
properties: ['openFile'],
title: 'Open file'
});
if (Boolean(selectedFiles) && selectedFiles.length > 0)
{
//ok
}
}}>Select file</Button>
</Col>
<Col span={8}>
<Steps direction="vertical" current={1}>
<Step title="Finished" description="This is a description."/>
<Step title="In Progress" description="This is a description."/>
<Step title="Waiting" description="This is a description."/>
</Steps></Col>
<Col span={8}>
Result
</Col>
</Row>
</>;
};
|
/*
* Copyright (c) 2018, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: MIT
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/MIT
*/
const fs = require('fs');
const path = require('path');
const pluginUtils = require('@rollup/pluginutils');
const compiler = require('@lwc/compiler');
const { resolveModule } = require('@lwc/module-resolver');
const { getModuleQualifiedName } = require('./utils');
const { DEFAULT_OPTIONS, DEFAULT_MODE, DEFAULT_MODULES } = require('./constants');
const IMPLICIT_DEFAULT_HTML_PATH = '@lwc/resources/empty_html.js';
const EMPTY_IMPLICIT_HTML_CONTENT = 'export default void 0';
function isImplicitHTMLImport(importee, importer) {
return (
path.extname(importer) === '.js' &&
path.extname(importee) === '.html' &&
path.dirname(importer) === path.dirname(importee) &&
path.basename(importer, '.js') === path.basename(importee, '.html')
);
}
function isMixingJsAndTs(importerExt, importeeExt) {
return (
(importerExt === '.js' && importeeExt === '.ts') ||
(importerExt === '.ts' && importeeExt === '.js')
);
}
module.exports = function rollupLwcCompiler(pluginOptions = {}) {
const { include, exclude } = pluginOptions;
const filter = pluginUtils.createFilter(include, exclude);
const mergedPluginOptions = Object.assign({}, DEFAULT_OPTIONS, pluginOptions);
let customResolvedModules;
let customRootDir;
return {
name: 'rollup-plugin-lwc-compiler',
options({ input }) {
const { modules: userModules = [], rootDir } = mergedPluginOptions;
customRootDir = rootDir ? path.resolve(rootDir) : path.dirname(path.resolve(input));
customResolvedModules = [...userModules, ...DEFAULT_MODULES, { dir: customRootDir }];
},
resolveId(importee, importer) {
// Normalize relative import to absolute import
if (importee.startsWith('.') && importer) {
const importerExt = path.extname(importer);
const ext = path.extname(importee) || importerExt;
// we don't currently support mixing .js and .ts
if (isMixingJsAndTs(importerExt, ext)) {
throw new Error(
`Importing a ${ext} file into a ${importerExt} is not supported`
);
}
const normalizedPath = path.resolve(path.dirname(importer), importee);
const absPath = pluginUtils.addExtension(normalizedPath, ext);
if (isImplicitHTMLImport(normalizedPath, importer) && !fs.existsSync(absPath)) {
return IMPLICIT_DEFAULT_HTML_PATH;
}
return pluginUtils.addExtension(normalizedPath, ext);
} else if (importer) {
try {
return resolveModule(importee, importer, {
modules: customResolvedModules,
rootDir: customRootDir,
}).entry;
} catch (err) {
if (err.code !== 'NO_LWC_MODULE_FOUND') {
throw err;
}
}
}
},
load(id) {
if (id === IMPLICIT_DEFAULT_HTML_PATH) {
return EMPTY_IMPLICIT_HTML_CONTENT;
}
const exists = fs.existsSync(id);
const isCSS = path.extname(id) === '.css';
if (!exists && isCSS) {
return '';
}
},
async transform(src, id) {
// Filter user-land config and lwc import
if (!filter(id)) {
return;
}
// If we don't find the moduleId, just resolve the module name/namespace
const moduleEntry = getModuleQualifiedName(id, mergedPluginOptions);
const { code, map } = await compiler.transform(src, id, {
mode: DEFAULT_MODE, // Use always default mode since any other (prod or compat) will be resolved later
name: moduleEntry.moduleName,
namespace: moduleEntry.moduleNamespace,
moduleSpecifier: moduleEntry.moduleSpecifier,
outputConfig: { sourcemap: mergedPluginOptions.sourcemap },
stylesheetConfig: mergedPluginOptions.stylesheetConfig,
experimentalDynamicComponent: mergedPluginOptions.experimentalDynamicComponent,
preserveHtmlComments: mergedPluginOptions.preserveHtmlComments,
});
return { code, map };
},
};
};
|
import numpy as np
import h5py
class BaseDataLoader():
def __init__(self, data_root, iter_method='between_frames'):
pass
def __getitem__():
pass
def __len__(self):
return self.length
|
# -*- coding: utf-8 -*-
#
import numpy
import pytest
from optimesh import cpt
from meshes import simple0, simple1, simple2, simple3, pacman
@pytest.mark.parametrize(
"mesh, ref",
[(simple0, 5.0 / 18.0), (simple1, 17.0 / 60.0), (pacman, 7.320400634147646)],
)
def test_energy(mesh, ref):
X, cells = mesh()
energy = cpt.energy_uniform(X, cells)
assert abs(energy - ref) < 1.0e-12 * ref
return
def test_simple1_jac():
X, cells = simple1()
# First assert that the Jacobian at interior points coincides with the finite
# difference computed for the energy component from that point. Note that the
# contribution from all other points is disregarded here, just like in the
# definition of the Jacobian of Chen-Holst; it's only an approximation after all.
jac = cpt.jac_uniform(X, cells)
for j in [0, 1]:
eps = 1.0e-7
x0 = X.copy()
x1 = X.copy()
x0[4, j] -= eps
x1[4, j] += eps
f1 = cpt._energy_uniform_per_node(x1, cells)
f0 = cpt._energy_uniform_per_node(x0, cells)
dE = (f1 - f0) / (2 * eps)
assert abs(dE[4] - jac[4, j]) < 1.0e-10
return
@pytest.mark.parametrize(
"mesh, ref1, ref2, refi",
[
(simple1, 0.96, 0.3262279745178587, 29.0 / 225.0),
(pacman, 12.35078985438217, 0.5420691555930099, 0.10101179397867549),
],
)
def test_jac(mesh, ref1, ref2, refi):
X, cells = mesh()
jac = cpt.jac_uniform(X, cells)
nc = jac.flatten()
norm1 = numpy.linalg.norm(nc, ord=1)
norm2 = numpy.linalg.norm(nc, ord=2)
normi = numpy.linalg.norm(nc, ord=numpy.inf)
tol = 1.0e-12
assert abs(norm1 - ref1) < tol * ref1
assert abs(norm2 - ref2) < tol * ref2
assert abs(normi - refi) < tol * refi
return
@pytest.mark.parametrize(
"method, mesh, ref1, ref2, refi",
[
(cpt.fixed_point_uniform, simple1, 5.0, 2.1213203435596424, 1.0),
(cpt.fixed_point_uniform, simple2, 7.390123456790124, 2.804687217072868, 1.7),
(cpt.fixed_point_uniform, simple3, 12.0, 3.9765648779799356, 2.0),
(cpt.fixed_point_uniform, pacman, 1903.6345096485093, 74.6604068632378, 5.0),
#
(cpt.quasi_newton_uniform, simple1, 5.0, 2.1213203435596424, 1.0),
(cpt.quasi_newton_uniform, simple2, 7.390123456790124, 2.804687217072868, 1.7),
(cpt.quasi_newton_uniform, simple3, 12.0, 3.976564877979913, 2.0),
(cpt.quasi_newton_uniform, pacman, 1902.1102921588854, 74.61593876126557, 5.0),
],
)
def test_methods(method, mesh, ref1, ref2, refi):
X_in, cells_in = mesh()
# X_before = X_in.copy()
# cells_before = cells_in.copy()
X, cells = method(X_in, cells_in, 1.0e-12, 100)
# assert numpy.all(cells_in == cells_before)
# assert numpy.all(numpy.abs(X_in == X_before) < 1.0e-15)
# Test if we're dealing with the mesh we expect.
nc = X.flatten()
norm1 = numpy.linalg.norm(nc, ord=1)
norm2 = numpy.linalg.norm(nc, ord=2)
normi = numpy.linalg.norm(nc, ord=numpy.inf)
tol = 1.0e-12
assert abs(norm1 - ref1) < tol * ref1
assert abs(norm2 - ref2) < tol * ref2
assert abs(normi - refi) < tol * refi
return
@pytest.mark.parametrize(
"mesh, ref1, ref2, refi",
[
(simple1, 5.0, 2.1213203435596424, 1.0),
(pacman, 1861.1845672600928, 73.12639677872326, 5.0),
],
)
def test_density_preserving(mesh, ref1, ref2, refi):
X, cells = mesh()
X, cells = cpt.linear_solve_density_preserving(X, cells, 0.0, 10)
# Test if we're dealing with the mesh we expect.
nc = X.flatten()
norm1 = numpy.linalg.norm(nc, ord=1)
norm2 = numpy.linalg.norm(nc, ord=2)
normi = numpy.linalg.norm(nc, ord=numpy.inf)
tol = 1.0e-12
assert abs(norm1 - ref1) < tol * ref1
assert abs(norm2 - ref2) < tol * ref2
assert abs(normi - refi) < tol * refi
return
# if __name__ == "__main__":
# from meshes import circle
# test_fixed_point()
# X, cells = circle()
# X, cells = cpt.fixed_point_uniform(X, cells, 1.0e-3, 100)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# imports.
from dev0s.classes.response import *
from dev0s.classes.defaults import objects
from dev0s.classes.response import response as _response_
# pip imports.
from bs4 import BeautifulSoup as bs4
import urllib
import requests as __requests__
# the requests class.
class Requests(object):
def __init__(self):
# docs.
DOCS = {
"module":"dev0s.requests",
"initialized":True,
"description":[],
"chapter":"Requests",
}
# attributes.
self.https = True
self.allow_redirects = True
# encode, quote, unqoute & serialize data.
def encode(self, data={}):
return f"?{urllib.parse.urlencode(self.serialize(data))}"
def quote(self, data={}):
return urllib.parse.quote(json.dumps(self.serialize(data)))
def unquote(self, encoded, depth=30):
return json.loads(urllib.parse.unquote(encoded))
def serialize(self, data={}):
_data_ = {}
for key,value in data.items():
if value.__class__.__name__ in ["OutputObject", "ResponseObject"]:
value = str(value.dict(safe=True))
elif value.__class__.__name__ in ["Dictionary", "Array"]:
value = str(_response_.serialize(value.raw(), safe=True))
elif isinstance(value, (list,Array,dict,Dictionary)):
value = str(value)
else:
value = Formats.denitialize(value)
_data_[key] = value
return _data_
# get.
def get(self,
# the url (str) (#1).
url=None,
# the sended post data (dict) (#2).
data={},
# the headers (dict).
headers={},
# serialize output to dictionary.
serialize=False,
# the method.
method="get",
):
# clean txt.
def clean_txt(txt, raw=True):
#txt = bs4(txt, 'html.parser')
while True:
if "<!DOCTYPE html>" in txt:
old = str(txt)
txt = String(txt).replace_between(["<!DOCTYPE html>", "</html>"], " *** HTML CODE ****")
if txt == old:
return self.error("Unable remove the html code from the txt.")
else: break
return txt
# url.
url = url.replace("http://", "").replace("https://", "")
url = f"{Boolean(self.https).string(true='https', false='http')}://"+str(gfp.clean(f"{url}/", remove_double_slash=True, remove_last_slash=False, remove_first_slash=True))
if data != {}: url += self.encode(data)
# request.
if method.lower() == "get":
original_request_object = __requests__.get(url, allow_redirects=self.allow_redirects, headers=headers)
elif method.lower() == "post":
original_request_object = __requests__.post(url, allow_redirects=self.allow_redirects, headers=headers)
elif method.lower() == "delete":
original_request_object = __requests__.delete(url, allow_redirects=self.allow_redirects, headers=headers)
else:
raise ValueError(f"Unkown method: {method}.")
if original_request_object.status_code != 200:
return _response_.error(f"Invalid request ({url}) [{original_request_object.status_code}]: {(clean_txt(original_request_object.text))}")
if serialize:
try: response = _response_.ResponseObject(original_request_object.json())
except Exception as e:
return _response_.error(f"Request ({url}) [{original_request_object.status_code}]: Unable to serialize output: {(clean_txt(original_request_object.text))}.")
return response
return original_request_object
# the restapi over ssh object class.
class RestAPI(objects.Object):
def __init__(self,
# the root url (optional).
url=None,
# the default data send with every request (will be appended to local data).
data={
"api_key":None,
},
):
# docs.
DOCS = {
"module":"dev0s.requests.RestAPI",
"initialized":False,
"description":[],
"chapter":"Requests",
}
# defaults.
objects.Object.__init__(self, traceback="dev0s.requests.RestAPI")
# attributes.
self.url = url
self.data = data
#
def get(self, url="/", data={}):
# data.
for key,value in self.data.items():
data[key] = value
# url.
if self.url != None:
url = gfp.clean(f"{self.url}/{url}/", remove_double_slash=True, remove_last_slash=False)
# request.
return requests.get(serialize=True, url=url, data=data)
#
#
#
# initialized classes.
requests = Requests()
#
|
# preferences_dialog.py
#
# MIT License
#
# Copyright (c) 2020-2021 Andrey Maksimov <meamka@ya.ru>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from gettext import gettext as _
import gi
from gi.repository import Gtk, Granite, GtkSource, Gdk
from norka.services.medium import Medium
from norka.gobject_worker import GObjectWorker
from norka.services.writeas import Writeas
class PreferencesDialog(Granite.Dialog):
__gtype_name__ = 'SettingsDialog'
def __init__(self, transient_for, settings):
super().__init__(transient_for=transient_for, modal=False)
self.settings = settings
self.set_default_size(340, 340)
hints = Gdk.Geometry()
hints.min_width = 340
hints.min_height = 340
self.set_geometry_hints(None, hints, Gdk.WindowHints.MIN_SIZE)
self.set_border_width(5)
self.set_deletable(False)
self.set_title(_('Preferences'))
self.toast = Granite.WidgetsToast(title=_("Toast"))
indent_width = Gtk.SpinButton.new_with_range(1, 24, 1)
indent_width.set_value(self.settings.get_int('indent-width'))
indent_width.connect('value-changed', self.on_indent_width)
self.sort_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.sort_switch.set_state(self.settings.get_boolean('sort-desc'))
self.sort_switch.connect("state-set", self.on_sort_desc)
self.spellcheck_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.spellcheck_switch.set_state(self.settings.get_boolean('spellcheck'))
self.spellcheck_switch.connect("state-set", self.on_spellcheck)
self.autosave_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.autosave_switch.set_state(self.settings.get_boolean('autosave'))
self.autosave_switch.connect("state-set", self.on_autosave)
self.autoindent_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.autoindent_switch.set_state(self.settings.get_boolean('autoindent'))
self.autoindent_switch.connect("state-set", self.on_autoindent)
self.spaces_tabs_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.spaces_tabs_switch.set_state(self.settings.get_boolean('spaces-instead-of-tabs'))
self.spaces_tabs_switch.connect("state-set", self.on_spaces_tabs)
general_grid = Gtk.Grid(column_spacing=8, row_spacing=8)
general_grid.attach(Granite.HeaderLabel(_("General")), 0, 0, 3, 1)
general_grid.attach(Gtk.Label(_("Save files when changed:"), hexpand=True, halign=Gtk.Align.END), 0, 1, 2, 1)
general_grid.attach(self.autosave_switch, 2, 1, 1, 1)
general_grid.attach(Gtk.Label(_("Sort documents backwards:"), hexpand=True, halign=Gtk.Align.END), 0, 2, 2, 1)
general_grid.attach(self.sort_switch, 2, 2, 1, 1)
general_grid.attach(Gtk.Label(_("Spell checking:"), hexpand=True, halign=Gtk.Align.END), 0, 3, 2, 1)
general_grid.attach(self.spellcheck_switch, 2, 3, 1, 1)
general_grid.attach(Granite.HeaderLabel(_("Tabs")), 0, 4, 3, 1)
general_grid.attach(Gtk.Label(_("Automatic indentation:"), hexpand=True, halign=Gtk.Align.END), 0, 5, 2, 1)
general_grid.attach(self.autoindent_switch, 2, 5, 1, 1)
general_grid.attach(Gtk.Label(_("Insert spaces instead of tabs:"), hexpand=True, halign=Gtk.Align.END), 0, 6, 2,
1)
general_grid.attach(self.spaces_tabs_switch, 2, 6, 1, 1)
general_grid.attach(Gtk.Label(_("Tab width:"), hexpand=True, halign=Gtk.Align.END), 0, 7, 2, 1)
general_grid.attach(indent_width, 2, 7, 2, 1)
# Interface grid
interface_grid = Gtk.Grid(column_spacing=8, row_spacing=8)
scrolled = Gtk.ScrolledWindow(hexpand=True, vexpand=True)
self.dark_theme_switch = Gtk.Switch(halign=Gtk.Align.START, valign=Gtk.Align.CENTER)
self.dark_theme_switch.set_state(self.settings.get_boolean('prefer-dark-theme'))
self.dark_theme_switch.connect("state-set", self.on_dark_theme)
style_chooser = GtkSource.StyleSchemeChooserWidget(hexpand=True, vexpand=True)
style_chooser.connect('notify::style-scheme', self.on_scheme_changed)
scrolled.add(style_chooser)
scheme = GtkSource.StyleSchemeManager.get_default().get_scheme(
self.settings.get_string('stylescheme')
)
if not scheme:
scheme = GtkSource.StyleSchemeManager().get_scheme("classic")
style_chooser.set_style_scheme(scheme)
interface_grid.attach(Granite.HeaderLabel(_("Appearance")), 0, 0, 3, 1)
interface_grid.attach(Gtk.Label(_("Prefer dark theme:"), hexpand=True, halign=Gtk.Align.END), 0, 1, 2, 1)
interface_grid.attach(self.dark_theme_switch, 2, 1, 1, 1)
interface_grid.attach(Granite.HeaderLabel(_("Styles")), 0, 2, 3, 1)
interface_grid.attach(scrolled, 0, 3, 3, 1)
# Export grid
export_grid = Gtk.Grid(column_spacing=8, row_spacing=8)
self.render_medium(export_grid)
self.render_writeas(export_grid)
# Main Stack
main_stack = Gtk.Stack(margin=6, margin_bottom=18, margin_top=8)
main_stack.add_titled(general_grid, "behavior", _("Behavior"))
main_stack.add_titled(interface_grid, "interface", _("Interface"))
main_stack.add_titled(export_grid, "export", _("Export"))
main_stackswitcher = Gtk.StackSwitcher(homogeneous=True)
main_stackswitcher.set_stack(main_stack)
main_stackswitcher.set_halign(Gtk.Align.CENTER)
main_grid = Gtk.Grid()
main_grid.attach(main_stackswitcher, 0, 0, 1, 1)
main_grid.attach(main_stack, 0, 1, 1, 1)
self.overlay = Gtk.Overlay()
self.overlay.add(main_grid)
self.overlay.add_overlay(self.toast)
self.get_content_area().add(self.overlay)
close_button = Gtk.Button(label=_("Close"))
close_button.connect('clicked', self.on_close_activated)
self.add_action_widget(close_button, 0)
def render_medium(self, content_grid):
self.medium_token = Gtk.Entry(hexpand=True, placeholder_text=_("Token"))
self.medium_token.set_text(self.settings.get_string('medium-personal-token'))
self.medium_token.connect("changed", self.on_medium_token)
self.medium_link = Gtk.LinkButton("https://medium.com/me/settings")
self.medium_link.set_label(_("Create Integration token and copy it here"))
content_grid.attach(Granite.HeaderLabel("Medium.com"), 0, 0, 3, 1)
content_grid.attach(Gtk.Label(_("Personal Token:"), halign=Gtk.Align.END), 0, 1, 1, 1)
content_grid.attach(self.medium_token, 1, 1, 2, 1)
content_grid.attach(self.medium_link, 0, 2, 3, 1)
def render_writeas(self, content_grid):
self.writeas_login = Gtk.Entry(hexpand=True, placeholder_text=_("Login"))
self.writeas_password = Gtk.Entry(hexpand=True, placeholder_text=_("Password"), visibility=False)
# Check for emptiness
self.writeas_login.connect('changed', self.writeas_entry_changed)
self.writeas_password.connect('changed', self.writeas_entry_changed)
self.writeas_login_button = Gtk.Button(label=_("Login"), sensitive=False)
self.writeas_login_button.connect("clicked", self.on_writeas_login)
self.writeas_logout_button = Gtk.Button(label=_("Logout"), hexpand=True)
self.writeas_logout_button.connect("clicked", self.on_writeas_logout)
content_grid.attach(Granite.HeaderLabel("Write.as"), 0, 3, 3, 1)
self.writeas_login_revealer = Gtk.Revealer()
self.writeas_login_revealer.set_transition_type(Gtk.RevealerTransitionType.CROSSFADE)
login_grid = Gtk.Grid(column_spacing=12, row_spacing=6)
login_grid.attach(Gtk.Label(_("Login:"), halign=Gtk.Align.END), 0, 0, 1, 1)
login_grid.attach(self.writeas_login, 1, 0, 2, 1)
login_grid.attach(Gtk.Label(_("Password:"), halign=Gtk.Align.END), 0, 1, 1, 1)
login_grid.attach(self.writeas_password, 1, 1, 2, 1)
login_grid.attach(self.writeas_login_button, 0, 2, 3, 1)
self.writeas_login_revealer.add(login_grid)
self.writeas_logout_revealer = Gtk.Revealer()
self.writeas_logout_revealer.set_transition_type(Gtk.RevealerTransitionType.CROSSFADE)
logout_grid = Gtk.Grid(column_spacing=12, row_spacing=6)
logout_grid.attach(self.writeas_logout_button, 0, 0, 3, 1)
self.writeas_logout_revealer.add(logout_grid)
content_grid.attach(self.writeas_login_revealer, 0, 4, 3, 1)
content_grid.attach(self.writeas_logout_revealer, 0, 4, 3, 1)
self.writeas_reveal()
self.settings.connect("changed", self.on_settings_changed)
def on_spellcheck(self, sender: Gtk.Widget, state):
self.settings.set_boolean("spellcheck", state)
return False
def on_sort_desc(self, sender: Gtk.Widget, state):
self.settings.set_boolean("sort-desc", state)
return False
def on_autosave(self, sender: Gtk.Widget, state):
self.settings.set_boolean("autosave", state)
return False
def on_close_activated(self, sender: Gtk.Widget):
self.destroy()
def on_dark_theme(self, sender, state):
self.settings.set_boolean('prefer-dark-theme', state)
def on_scheme_changed(self, style_chooser, event):
self.settings.set_string('stylescheme', style_chooser.get_style_scheme().get_id())
def on_autoindent(self, sender, state):
self.settings.set_boolean('autoindent', state)
def on_spaces_tabs(self, sender, state):
self.settings.set_boolean('spaces-instead-of-tabs', state)
def on_indent_width(self, sender: Gtk.SpinButton) -> None:
self.settings.set_int('indent-width', sender.get_value_as_int())
def on_medium_token(self, sender: Gtk.Entry) -> None:
token = sender.get_text().strip()
self.settings.set_string("medium-personal-token", token)
if token:
sender.set_sensitive(False)
medium_client = Medium(access_token=token)
GObjectWorker.call(medium_client.get_user, callback=self.on_medium_callback)
else:
self.settings.set_string("medium-user-id", "")
def on_medium_callback(self, result):
self.medium_token.set_sensitive(True)
if result:
self.toast.set_title(f"Token accepted, {result['name']}!")
self.settings.set_string("medium-user-id", result['id'])
self.toast.send_notification()
else:
self.on_medium_errorback()
def on_medium_errorback(self, error=None):
self.medium_token.set_sensitive(True)
self.toast.set_title(_("Something goes wrong!"))
self.toast.send_notification()
self.settings.set_string("medium-user-id", "")
def writeas_entry_changed(self, entry: Gtk.Entry):
state = (self.writeas_login.get_text().strip() != ""
and self.writeas_password.get_text().strip() != "")
self.writeas_login_button.set_sensitive(state)
def on_writeas_login(self, button: Gtk.Button):
"""Login to write.as and save token on success
"""
# Disable widgets while login
self.writeas_login.set_sensitive(False)
self.writeas_password.set_sensitive(False)
self.writeas_login_button.set_sensitive(False)
GObjectWorker.call(Writeas().login,
(self.writeas_login.get_text(), self.writeas_password.get_text()),
self.on_writeas_callback)
def on_writeas_logout(self, button: Gtk.Button):
"""Clear writeas access token settings
"""
self.toast.send_notification()
self.settings.set_string("writeas-access-token", "")
def writeas_reveal(self):
"""Toggle writeas revealers state
"""
# Maybe I should rewrite it with bind_property
if self.settings.get_string("writeas-access-token"):
self.writeas_login_revealer.set_reveal_child(False)
self.writeas_logout_revealer.set_reveal_child(True)
self.writeas_login_revealer.set_visible(False)
self.writeas_logout_revealer.set_visible(True)
else:
self.writeas_login_revealer.set_reveal_child(True)
self.writeas_logout_revealer.set_reveal_child(False)
self.writeas_login_revealer.set_visible(True)
self.writeas_logout_revealer.set_visible(False)
def on_writeas_callback(self, result):
data, error = result
self.toast.set_default_action(None)
if error:
self.toast.set_title(_("Login failed."))
self.toast.send_notification()
if data and "access_token" in data:
self.settings.set_string("writeas-access-token", data["access_token"])
self.toast.set_title(_("Logged as {}.").format(data['user']['username']))
self.toast.send_notification()
# Enable widgets while login
self.writeas_login.set_sensitive(True)
self.writeas_password.set_sensitive(True)
self.writeas_login_button.set_sensitive(True)
def on_settings_changed(self, settings, key):
if key == "writeas-access-token":
self.writeas_reveal()
|
from torch import nn
import torch
import torch.nn.functional as F
import cv2
import numpy as np
from torchvision import transforms
from torchvision import models
from torch.autograd import Variable
from PIL import Image
from torchvision import utils
from torch.autograd import Variable
import torch.autograd as autograd
from hparams import hparams as hp
from criteria import id_loss, moco_loss
from criteria.lpips.lpips import LPIPS
class Base_Loss(nn.Module):
def __init__(self):
super(Base_Loss,self).__init__()
import lpips
# self.loss_fn_alex = lpips.LPIPS(net='alex') # best forward scores
self.loss_fn_vgg = LPIPS(net_type='alex').cuda().eval()
if hp.dataset_type == 'car':
self.moco_loss = moco_loss.MocoLoss()
else:
self.id_loss = id_loss.IDLoss().cuda().eval()
self.criterion_mse = nn.MSELoss()
def forward(self, gt,predict_images):
loss_mse = self.criterion_mse(gt, predict_images)
loss_lpips = self.loss_fn_vgg(gt,predict_images)
if hp.dataset_type == 'car':
loss_per = self.moco_loss(predict_images,gt,gt)[0]
else:
loss_per = self.id_loss(predict_images,gt,gt)[0]
loss_all = hp.loss_lambda_mse*loss_mse + hp.loss_lambda_lpips*loss_lpips + hp.loss_lambda_id*loss_per
return loss_all,loss_mse,loss_lpips,loss_per
|
from flask import Flask, render_template, Response, request, redirect, url_for
app = Flask(__name__)
@app.route("/")
def index():
return render_template('prediction.html')
@app.route("/forward/", methods=['POST'])
def predict():
#Moving forward code
forward_message = "Testing..."
return render_template('prediction.html', message=forward_message);
@app.route('/json')
def json():
return render_template('prediction.html')
@app.route('/background_process_test')
def background_process_test():
print "Hello"
return "nothing"
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'myblog.settings.local')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
from GodOfPython.P10_ModulePackage.smtpkg7 import *
def smart_on():
while True:
choice = input('what do you want?')
if choice=='0':
break
if choice=='1':
camera.camera.photo()
elif choice=='2':
phone.phone.makeacall()
elif choice=='3':
print('나중에 구현될 기능')
print('프로그램이 종료되었습니다.')
if __name__=='__main__':
smart_on()
|
module.exports = {
pathPrefix: '/',
siteMetadata: {
author: 'Kostas',
title: `Kostas' Blog`,
siteUrl: `https://cuocsonganvui.net`,
description: `I'm Kostas Bariotis, a web developer, a proud wanderer and a passionate doer. My mission is to write clean and efficient code, to solve problems on the web and to learn something more.`
},
plugins: [
'gatsby-plugin-catch-links',
'gatsby-plugin-sass',
{
resolve: 'gatsby-source-filesystem',
options: {
path: `${__dirname}/src/pages`,
name: 'pages',
},
},
{
resolve: 'gatsby-transformer-remark',
options: {
plugins: [
`gatsby-remark-autolink-headers`,
{
resolve: `gatsby-remark-prismjs`,
options: {
classPrefix: 'language-',
},
},
{
resolve: 'gatsby-remark-responsive-image',
options: {
maxWidth: 750,
linkImagesToOriginal: true
}
},
]
}
},
'gatsby-plugin-react-helmet',
'gatsby-plugin-sharp',
{
resolve: `gatsby-plugin-google-analytics`,
options: {
trackingId: 'UA-129731173-1',
},
},
{
resolve: `gatsby-plugin-manifest`,
options: {
name: "Kostas",
short_name: "KBariotis",
start_url: "/",
theme_color: "#676d9c",
display: "minimal-ui",
icons: [
{
src: `/favicons/android-chrome-192x192.png`,
sizes: `192x192`,
type: `image/png`,
},
{
src: `/favicons/android-chrome-512x512.png`,
sizes: `512x512`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-57x57.png`,
sizes: `57x57`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-60x60.png`,
sizes: `60x60`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-72x72.png`,
sizes: `72x72`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-76x76.png`,
sizes: `76x76`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-114x114.png`,
sizes: `114x114`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-120x120.png`,
sizes: `120x120`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-144x144.png`,
sizes: `144x144`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-152x152.png`,
sizes: `152x152`,
type: `image/png`,
},
{
src: `/favicon/apple-touch-icon-180x180.png`,
sizes: `180x180`,
type: `image/png`,
},
{
src: `/favicon/favicon-36x36.png`,
sizes: `36x36`,
type: `image/png`,
},
{
src: `/favicon/android-chrome-192x192.png`,
sizes: `192x192`,
type: `image/png`,
},
{
src: `/favicon/favicon-96x96.png`,
sizes: `96x96`,
type: `image/png`,
},
],
},
},
{
resolve: `gatsby-plugin-sitemap`
},
{
resolve: `gatsby-plugin-canonical-urls`,
options: {
siteUrl: `https://cuocsonganvui.net`,
}
},
{
resolve: `gatsby-plugin-nprogress`,
options: {
color: `tomato`,
}
},
{
resolve: `gatsby-plugin-feed`,
options: {
query: `
{
site {
siteMetadata {
title
description
siteUrl
}
}
}
`,
feeds: [
{
serialize: ({ query: { site, allMarkdownRemark } }) => {
return allMarkdownRemark.edges
.filter(post => !post.node.frontmatter.draft)
.map(edge => {
return Object.assign({}, edge.node.frontmatter, {
description: edge.node.excerpt,
url: site.siteMetadata.siteUrl + edge.node.frontmatter.path,
guid: site.siteMetadata.siteUrl + edge.node.frontmatter.path,
custom_elements: [{ 'content:encoded': edge.node.html }],
});
});
},
query: `
{
allMarkdownRemark(
limit: 1000,
sort: { order: DESC, fields: [frontmatter___date] }
) {
edges {
node {
excerpt
html
frontmatter {
title
date
path
draft
}
}
}
}
}
`,
output: '/rss.xml'
}
]
}
}
],
}
|
/**
* @license angular-awesome-slider - v2.3.7
* (c) 2013 Julien VALERY https://github.com/darul75/angular-awesome-slider
* License: MIT
**/
!function(a){"use strict";a.module("angularAwesomeSlider",[]).directive("slider",["$compile","$templateCache","$timeout","$window","slider",function(b,c,d,e,f){return{restrict:"AE",require:"?ngModel",scope:{options:"=",ngDisabled:"="},priority:1,link:function(g,h,i,j){function k(){a.element(e).bind("resize",function(a){g.slider.onresize()})}if(j){if(!g.options)throw new Error('You must provide a value for "options" attribute.');a.injector();a.isString(g.options)&&(g.options=a.toJson(g.options)),g.mainSliderClass="jslider",g.mainSliderClass+=g.options.skin?" jslider_"+g.options.skin:" ",g.mainSliderClass+=g.options.vertical?" vertical ":"",g.mainSliderClass+=g.options.css?" sliderCSS":"",g.mainSliderClass+=g.options.className?" "+g.options.className:"",g.options.limits=a.isDefined(g.options.limits)?g.options.limits:!0,h.after(b(c.get("ng-slider/slider-bar.tmpl.html"))(g,function(a,b){b.tmplElt=a}));var l=!1,m=function(){g.from=""+g.options.from,g.to=""+g.options.to,g.options.calculate&&"function"==typeof g.options.calculate&&(g.from=g.options.calculate(g.from),g.to=g.options.calculate(g.to));var b={from:g.options.round?parseFloat(g.options.from):parseInt(g.options.from,10),to:g.options.round?parseFloat(g.options.to):parseInt(g.options.to,10),step:g.options.step,smooth:g.options.smooth,limits:g.options.limits,round:g.options.round||!1,value:j.$viewValue,dimension:"",scale:g.options.scale,modelLabels:g.options.modelLabels,vertical:g.options.vertical,css:g.options.css,className:g.options.className,realtime:g.options.realtime,cb:n,threshold:g.options.threshold,heterogeneity:g.options.heterogeneity};b.calculate=g.options.calculate||void 0,b.onstatechange=g.options.onstatechange||void 0,g.slider=g.slider?g.slider.init(h,g.tmplElt,b):p(h,g.tmplElt,b),l||k();var c=g.tmplElt.find("div")[7];a.element(c).html(g.slider.generateScale()),g.slider.drawScale(c),g.ngDisabled&&o(g.ngDisabled),l=!0};j.$render=function(){(j.$viewValue||0===j.$viewValue)&&("number"==typeof j.$viewValue&&(j.$viewValue=""+j.$viewValue),j.$viewValue.split(";")[1]||(g.mainSliderClass+=" jslider-single"),g.slider&&(g.slider.getPointers()[0].set(j.$viewValue.split(";")[0],!0),j.$viewValue.split(";")[1]&&g.slider.getPointers()[1].set(j.$viewValue.split(";")[1],!0)))};var n=function(a,b){g.disabled||(g.$apply(function(){j.$setViewValue(a)}),g.options.callback&&g.options.callback(a,b))};g.$watch("options",function(a){d(function(){m()})},g.watchOptions||!0);var o=function(a){g.disabled=a,g.slider&&(g.tmplElt.toggleClass("disabled"),g.slider.disable(a))};g.$watch("ngDisabled",function(a){o(a)}),g.limitValue=function(b){return g.options.modelLabels?a.isFunction(g.options.modelLabels)?g.options.modelLabels(b):void 0!==g.options.modelLabels[b]?g.options.modelLabels[b]:b:b};var p=function(a,b,c){return new f(a,b,c)}}}}}]).config(function(){}).run(function(){})}(angular),function(a){"use strict";a.module("angularAwesomeSlider").constant("sliderConstants",{SLIDER:{settings:{from:1,to:40,step:1,smooth:!0,limits:!1,round:!1,value:"3",dimension:"",vertical:!1,calculate:!1,onstatechange:!1,callback:!1,realtime:!1},className:"jslider",selector:".jslider-",css:{visible:{visibility:"visible"},hidden:{visibility:"hidden"}}},EVENTS:{}})}(angular),function(a){"use strict";a.module("angularAwesomeSlider").factory("sliderUtils",["$window",function(a){return{offset:function(a){var b=a[0],c=0,d=0,e=document.documentElement||document.body,f=window.pageXOffset||e.scrollLeft,g=window.pageYOffset||e.scrollTop;return c=b.getBoundingClientRect().left+f,d=b.getBoundingClientRect().top+g,{left:c,top:d}},browser:function(){var b=a.navigator.userAgent,c={mozilla:/mozilla/i,chrome:/chrome/i,safari:/safari/i,firefox:/firefox/i,ie:/internet explorer/i};for(var d in c)if(c[d].test(b))return d;return"unknown"}}}])}(angular),function(a){"use strict";a.module("angularAwesomeSlider").factory("sliderDraggable",["sliderUtils",function(b){function c(){this._init.apply(this,arguments)}return c.prototype.oninit=function(){},c.prototype.events=function(){},c.prototype.onmousedown=function(){this.ptr.css({position:"absolute"})},c.prototype.onmousemove=function(a,b,c){this.ptr.css({left:b,top:c})},c.prototype.onmouseup=function(){},c.prototype.isDefault={drag:!1,clicked:!1,toclick:!0,mouseup:!1},c.prototype._init=function(){if(arguments.length>0){if(this.ptr=arguments[0],this.parent=arguments[2],!this.ptr)return;this.is={},a.extend(this.is,this.isDefault);var c=b.offset(this.ptr);this.d={left:c.left,top:c.top,width:this.ptr[0].clientWidth,height:this.ptr[0].clientHeight},this.oninit.apply(this,arguments),this._events()}},c.prototype._getPageCoords=function(a){return a.targetTouches&&a.targetTouches[0]?{x:a.targetTouches[0].pageX,y:a.targetTouches[0].pageY}:{x:a.pageX,y:a.pageY}},c.prototype._bindEvent=function(a,b,c){this.supportTouches_?a[0].addEventListener(this.events_[b],c,!1):a.bind(this.events_[b],c)},c.prototype._events=function(){var b=this;this.supportTouches_="ontouchend"in document,this.events_={click:this.supportTouches_?"touchstart":"click",down:this.supportTouches_?"touchstart":"mousedown",move:this.supportTouches_?"touchmove":"mousemove",up:this.supportTouches_?"touchend":"mouseup",mousedown:(this.supportTouches_,"mousedown")};var c=a.element(window.document);this._bindEvent(c,"move",function(a){b.is.drag&&(a.stopPropagation(),a.preventDefault(),b.parent.disabled||b._mousemove(a))}),this._bindEvent(c,"down",function(a){b.is.drag&&(a.stopPropagation(),a.preventDefault())}),this._bindEvent(c,"up",function(a){b._mouseup(a)}),this._bindEvent(this.ptr,"down",function(a){return b._mousedown(a),!1}),this._bindEvent(this.ptr,"up",function(a){b._mouseup(a)}),this.events()},c.prototype._mousedown=function(b){this.is.drag=!0,this.is.clicked=!1,this.is.mouseup=!1;var c=this._getPageCoords(b);this.cx=c.x-this.ptr[0].offsetLeft,this.cy=c.y-this.ptr[0].offsetTop,a.extend(this.d,{left:c.x,top:c.y,width:this.ptr[0].clientWidth,height:this.ptr[0].clientHeight}),this.outer&&this.outer.get(0)&&this.outer.css({height:Math.max(this.outer.height(),$(document.body).height()),overflow:"hidden"}),this.onmousedown(b)},c.prototype._mousemove=function(a){this.is.toclick=!1;var b=this._getPageCoords(a);this.onmousemove(a,b.x-this.cx,b.y-this.cy)},c.prototype._mouseup=function(a){if(this.is.drag){this.is.drag=!1;var c=b.browser();this.outer&&this.outer.get(0)&&("mozilla"===c?this.outer.css({overflow:"hidden"}):this.outer.css({overflow:"visible"}),this.outer.css({height:"auto"})),this.onmouseup(a)}},c}])}(angular),function(a){"use strict";a.module("angularAwesomeSlider").factory("sliderPointer",["sliderDraggable","sliderUtils",function(b,c){function d(){b.apply(this,arguments)}return d.prototype=new b,d.prototype.oninit=function(b,c,d,e){this.uid=c,this.parent=e,this.value={},this.vertical=d,this.settings=a.copy(e.settings),this.threshold=this.settings.threshold},d.prototype.onmousedown=function(a){var b=c.offset(this.parent.domNode),d={left:b.left,top:b.top,width:this.parent.domNode[0].clientWidth,height:this.parent.domNode[0].clientHeight};this._parent={offset:d,width:d.width,height:d.height},this.ptr.addClass("jslider-pointer-hover")},d.prototype.onmousemove=function(b,c,d){var e=this._getPageCoords(b);this._set(this.vertical?this.calc(e.y):this.calc(e.x)),this.settings.realtime&&this.settings.cb&&a.isFunction(this.settings.cb)&&this.allowed&&this.settings.cb.call(this.parent,this.parent.getValue(),!this.is.drag)},d.prototype.onmouseup=function(b){this.settings.cb&&a.isFunction(this.settings.cb)&&this.allowed&&this.settings.cb.call(this.parent,this.parent.getValue(),!this.is.drag),this.is.drag||this.ptr.removeClass("jslider-pointer-hover")},d.prototype.limits=function(a){return this.parent.limits(a,this)},d.prototype.calc=function(a){return this.vertical?this.limits(100*(a-this._parent.offset.top)/this._parent.height):this.limits(100*(a-this._parent.offset.left)/this._parent.width)},d.prototype.set=function(a,b){this.value.origin=this.parent.round(a),this._set(this.parent.valueToPrc(a,this),b)},d.prototype._set=function(a,b){this.allowed=!0;var c=this.value.origin,d=this.value.prc;if(this.value.origin=this.parent.prcToValue(a),this.value.prc=a,this.threshold&&this.parent.o.pointers[1]){var e=this.value.origin,f=this.parent.o.pointers[0===this.uid?1:0].value.origin;this.allowed=Math.abs(f-e)>=this.threshold,this.allowed||void 0===c||void 0===d||(this.value.origin=c,this.value.prc=d)}this.vertical?this.ptr.css({top:this.value.prc+"%",marginTop:-5}):this.ptr.css({left:this.value.prc+"%"}),this.parent.redraw(this)},d}])}(angular),function(a){"use strict";a.module("angularAwesomeSlider").factory("slider",["sliderPointer","sliderConstants","sliderUtils",function(b,c,d){function e(){return this.init.apply(this,arguments)}return e.prototype.init=function(b,d,e){return this.settings=e,this.inputNode=b,this.inputNode.addClass("ng-hide"),this.settings.interval=this.settings.to-this.settings.from,this.settings.calculate&&a.isFunction(this.settings.calculate)&&(this.nice=this.settings.calculate),this.settings.onstatechange&&a.isFunction(this.settings.onstatechange)&&(this.onstatechange=this.settings.onstatechange),this.css=c.SLIDER.css,this.is={init:!1},this.o={},this.initValue={},this.isAsc=e.from<e.to,this.create(d),this},e.prototype.create=function(c){var e=this;this.domNode=c;var f=this.domNode.find("div"),g=this.domNode.find("i"),h=a.element,i=a.extend,j=a.forEach,k=h(f[1]),l=h(f[2]),m=h(f[5]),n=h(f[6]),o=h(g[0]),p=h(g[1]),q=h(g[2]),r=h(g[3]),s=h(g[4]),t=h(g[5]),u=h(g[6]),v=[k,l],w=d.offset(this.domNode),x={left:w.left,top:w.top,width:this.domNode[0].clientWidth,height:this.domNode[0].clientHeight},y=e.settings.value.split(";");this.sizes={domWidth:this.domNode[0].clientWidth,domHeight:this.domNode[0].clientHeight,domOffset:x},i(this.o,{pointers:{},labels:{0:{o:m},1:{o:n}},limits:{0:a.element(f[3]),1:a.element(f[4])},indicators:{0:r,1:s,2:t,3:u}}),i(this.o.labels[0],{value:this.o.labels[0].o.find("span")}),i(this.o.labels[1],{value:this.o.labels[1].o.find("span")}),this.settings.single=!e.settings.value.split(";")[1],this.settings.single&&q.addClass("ng-hide"),j(v,function(c,f){e.settings=a.copy(e.settings);var g,h,i,j,k,l=y[f];l&&(e.o.pointers[f]=new b(c,f,e.settings.vertical,e),g=y[f-1],h=g?parseInt(g,10):void 0,l=e.settings.round?parseFloat(l):parseInt(l,10),(g&&e.isAsc?h>l:l>h)&&(l=g),i=e.isAsc?l>e.settings.to:l<e.settings.to,j=i?e.settings.to:l,e.o.pointers[f].set(j,!0),k=d.offset(e.o.pointers[f].ptr),e.o.pointers[f].d={left:k.left,top:k.top})}),e.domNode.bind("mousedown",e.clickHandler.apply(e)),this.o.value=h(this.domNode.find("i")[2]),this.is.init=!0,this.settings.css&&(o.css(this.settings.css.background?this.settings.css.background:{}),p.css(this.settings.css.background?this.settings.css.background:{}),this.o.pointers[1]||(r.css(this.settings.css.before?this.settings.css.before:{}),u.css(this.settings.css.after?this.settings.css.after:{})),s.css(this.settings.css["default"]?this.settings.css["default"]:{}),t.css(this.settings.css["default"]?this.settings.css["default"]:{}),q.css(this.settings.css.range?this.settings.css.range:{}),k.css(this.settings.css.pointer?this.settings.css.pointer:{}),l.css(this.settings.css.pointer?this.settings.css.pointer:{})),j(this.o.pointers,function(a,b){e.redraw(a)})},e.prototype.clickHandler=function(){var b=this,c=function(a){var c=b.o.pointers[0].ptr,e=b.o.pointers[1].ptr,f=d.offset(c),g=d.offset(e);b.o.pointers[0].d={left:f.left,top:f.top,width:c[0].clientWidth,height:c[0].clientHeight},b.o.pointers[1].d={left:g.left,top:g.top,width:e[0].clientWidth,height:e[0].clientHeight}};return function(e){if(!b.disabled){var f=b.settings.vertical,g=0,h=d.offset(b.domNode),i=b.o.pointers[0],j=b.o.pointers[1]?b.o.pointers[1]:null,k=e.originalEvent?e.originalEvent:e,l=f?k.pageY:k.pageX,m=f?"top":"left",n={left:h.left,top:h.top,width:b.domNode[0].clientWidth,height:b.domNode[0].clientHeight},o=b.o.pointers[g];if(j){j.d.width||c();var p=d.offset(i.ptr)[m],q=d.offset(j.ptr)[m],r=Math.abs((q-p)/2),s=l>=q||l>=q-r;s&&(o=j)}o._parent={offset:n,width:n.width,height:n.height};var t=i._getPageCoords(e);return o.cx=t.x-o.d.left,o.cy=t.y-o.d.top,o.onmousemove(e,t.x,t.y),o.onmouseup(),a.extend(o.d,{left:t.x,top:t.y}),b.redraw(o),!1}}},e.prototype.disable=function(a){this.disabled=a},e.prototype.nice=function(a){return a},e.prototype.onstatechange=function(){},e.prototype.limits=function(a,b){if(!this.settings.smooth){var c=100*this.settings.step/this.settings.interval;a=Math.round(a/c)*c}if(b){var d=this.o.pointers[1-b.uid];d&&b.uid&&a<d.value.prc&&(a=d.value.prc),d&&!b.uid&&a>d.value.prc&&(a=d.value.prc)}return 0>a&&(a=0),a>100&&(a=100),Math.round(10*a)/10},e.prototype.getPointers=function(){return this.o.pointers},e.prototype.generateScale=function(){if(this.settings.scale&&this.settings.scale.length>0){for(var a,b,c="",d=this.settings.scale,e={},f=this.settings.vertical?"top":"left",g=0;g<d.length;g++)d[g].val||(a=(100/(d.length-1)).toFixed(2),c+='<span style="'+f+": "+g*a+'%">'+("|"!=d[g]?"<ins>"+d[g]+"</ins>":"")+"</span>"),d[g].val<=this.settings.to&&d[g].val>=this.settings.from&&!e[d[g].val]&&(e[d[g].val]=!0,a=this.valueToPrc(d[g].val),b=d[g].label?d[g].label:d[g].val,c+='<span style="'+f+": "+a+'%"><ins>'+b+"</ins></span>");return c}return""},e.prototype.onresize=function(){var b=this;this.sizes={domWidth:this.domNode[0].clientWidth,domHeight:this.domNode[0].clientHeight,domOffset:{left:this.domNode[0].offsetLeft,top:this.domNode[0].offsetTop,width:this.domNode[0].clientWidth,height:this.domNode[0].clientHeight}},a.forEach(this.o.pointers,function(a,c){b.redraw(a)})},e.prototype.update=function(){this.onresize(),this.drawScale()},e.prototype.drawScale=function(b){a.forEach(a.element(b).find("ins"),function(a,b){a.style.marginLeft=-a.clientWidth/2})},e.prototype.redraw=function(b){if(!this.is.init)return this.o.pointers[0]&&!this.o.pointers[1]?(this.originValue=this.o.pointers[0].value.prc,this.o.indicators[0].css(this.settings.vertical?{top:0,height:this.o.pointers[0].value.prc+"%"}:{left:0,width:this.o.pointers[0].value.prc+"%"}),this.o.indicators[1].css(this.settings.vertical?{top:this.o.pointers[0].value.prc+"%"}:{left:this.o.pointers[0].value.prc+"%"}),this.o.indicators[3].css(this.settings.vertical?{top:this.o.pointers[0].value.prc+"%"}:{left:this.o.pointers[0].value.prc+"%"})):(this.o.indicators[2].css(this.settings.vertical?{top:this.o.pointers[1].value.prc+"%"}:{left:this.o.pointers[1].value.prc+"%"}),this.o.indicators[0].css(this.settings.vertical?{top:0,height:"0"}:{left:0,width:"0"}),this.o.indicators[3].css(this.settings.vertical?{top:"0",height:"0"}:{left:"0",width:"0"})),!1;this.setValue();var c,d;this.o.pointers[0]&&this.o.pointers[1]&&(c=this.settings.vertical?{top:this.o.pointers[0].value.prc+"%",height:this.o.pointers[1].value.prc-this.o.pointers[0].value.prc+"%"}:{left:this.o.pointers[0].value.prc+"%",width:this.o.pointers[1].value.prc-this.o.pointers[0].value.prc+"%"},this.o.value.css(c),this.o.pointers[0].value.prc===this.o.pointers[1].value.prc&&this.o.pointers[1].ptr.css("z-index",0===this.o.pointers[0].value.prc?"3":"1")),this.o.pointers[0]&&!this.o.pointers[1]&&(d=this.o.pointers[0].value.prc-this.originValue,d>=0?this.o.indicators[3].css(this.settings.vertical?{height:d+"%"}:{width:d+"%"}):this.o.indicators[3].css(this.settings.vertical?{height:0}:{width:0}),this.o.pointers[0].value.prc<this.originValue?this.o.indicators[0].css(this.settings.vertical?{height:this.o.pointers[0].value.prc+"%"}:{width:this.o.pointers[0].value.prc+"%"}):this.o.indicators[0].css(this.settings.vertical?{height:this.originValue+"%"}:{width:this.originValue+"%"}));var e=this.nice(b.value.origin);this.settings.modelLabels&&(e=a.isFunction(this.settings.modelLabels)?this.settings.modelLabels(e):void 0!==this.settings.modelLabels[e]?this.settings.modelLabels[e]:e),this.o.labels[b.uid].value.html(e),this.redrawLabels(b)},e.prototype.redrawLabels=function(a){function b(a,b,d){b.margin=-b.label/2;var e=c.settings.vertical?c.sizes.domHeight:c.sizes.domWidth;if(c.sizes.domWidth){var f=b.border+b.margin;0>f&&(b.margin-=f),c.sizes.domWidth>0&&b.border+b.label/2>e?(b.margin=0,b.right=!0):b.right=!1}return c.settings.vertical?a.o.css({top:d+"%",marginLeft:"20px",marginTop:b.margin,bottom:"auto"}):a.o.css({left:d+"%",marginLeft:b.margin+"px",right:"auto"}),b.right&&c.sizes.domWidth>0&&(c.settings.vertical?a.o.css({top:"auto",bottom:0}):a.o.css({left:"auto",right:0})),b}var c=this,d=this.o.labels[a.uid],e=a.value.prc,f=0===d.o[0].offsetWidth?7*d.o[0].textContent.length:d.o[0].offsetWidth;this.sizes.domWidth=this.domNode[0].clientWidth,this.sizes.domHeight=this.domNode[0].clientHeight;var g,h,i={label:c.settings.vertical?d.o[0].offsetHeight:f,right:!1,border:e*(c.settings.vertical?this.sizes.domHeight:this.sizes.domWidth)/100},j=0===a.uid?1:0;if(!this.settings.single&&!this.settings.vertical){g=this.o.labels[j],h=this.o.pointers[j];var k=this.o.labels[0],l=this.o.labels[1],m=this.o.pointers[0],n=this.o.pointers[1],o=n.ptr[0].offsetLeft-m.ptr[0].offsetLeft,p=this.nice(h.value.origin);if(k.o.css(this.css.visible),l.o.css(this.css.visible),p=this.getLabelValue(p),o+10<k.o[0].offsetWidth+l.o[0].offsetWidth){if(g.o.css(this.css.hidden),g.value.html(p),e=(h.value.prc-e)/2+e,h.value.prc!=a.value.prc){p=this.nice(this.o.pointers[0].value.origin);var q=this.nice(this.o.pointers[1].value.origin);p=this.getLabelValue(p),q=this.getLabelValue(q),d.value.html(p+" – "+q),i.label=d.o[0].offsetWidth,i.border=e*r/100}}else g.value.html(p),g.o.css(this.css.visible)}i=b(d,i,e);var r=c.settings.vertical?c.sizes.domHeight:c.sizes.domWidth;if(g){var s=0===d.o[0].offsetWidth?d.o[0].textContent.length/2*7:d.o[0].offsetWidth,t={label:c.settings.vertical?g.o[0].offsetHeight:s,right:!1,border:h.value.prc*this.sizes.domWidth/100};i=b(g,t,h.value.prc)}this.redrawLimits()},e.prototype.redrawLimits=function(){if(this.settings.limits){var b=[!0,!0],c=0;for(var d in this.o.pointers)if(!this.settings.single||0===d){var e=this.o.pointers[d],f=this.o.labels[e.uid],g=f.o[0].offsetLeft-this.sizes.domOffset.left,h=this.o.limits[0];g<h[0].clientWidth&&(b[0]=!1),h=this.o.limits[1],g+f.o[0].clientWidth>this.sizes.domWidth-h[0].clientWidth&&(b[1]=!1)}for(;c<b.length;c++)b[c]?a.element(this.o.limits[c]).addClass("animate-show"):a.element(this.o.limits[c]).addClass("animate-hidde")}},e.prototype.setValue=function(){var a=this.getValue();this.inputNode.attr("value",a),this.onstatechange.call(this,a,this.inputNode)},e.prototype.getValue=function(){if(!this.is.init)return!1;var b=this,c="";return a.forEach(this.o.pointers,function(a,d){void 0===a.value.prc||isNaN(a.value.prc)||(c+=(d>0?";":"")+b.prcToValue(a.value.prc))}),c},e.prototype.getLabelValue=function(b){return this.settings.modelLabels?a.isFunction(this.settings.modelLabels)?this.settings.modelLabels(b):void 0!==this.settings.modelLabels[b]?this.settings.modelLabels[b]:b:b},e.prototype.getPrcValue=function(){if(!this.is.init)return!1;var a="";return a},e.prototype.prcToValue=function(a){var b;if(this.settings.heterogeneity&&this.settings.heterogeneity.length>0)for(var c=this.settings.heterogeneity,d=0,e=this.settings.round?parseFloat(this.settings.from):parseInt(this.settings.from,10),f=this.settings.round?parseFloat(this.settings.to):parseInt(this.settings.to,10),g=0;g<=c.length;g++){var h;h=c[g]?c[g].split("/"):[100,f];var i=this.settings.round?parseFloat(h[0]):parseInt(h[0],10),j=this.settings.round?parseFloat(h[1]):parseInt(h[1],10);a>=d&&i>=a&&(b=e+(a-d)*(j-e)/(i-d)),d=i,e=j}else b=this.settings.from+a*this.settings.interval/100;return this.round(b)},e.prototype.valueToPrc=function(a,b){var c,d=this.settings.round?parseFloat(this.settings.from):parseInt(this.settings.from,10);if(this.settings.heterogeneity&&this.settings.heterogeneity.length>0)for(var e=this.settings.heterogeneity,f=0,g=0;g<=e.length;g++){var h;h=e[g]?e[g].split("/"):[100,this.settings.to];var i=this.settings.round?parseFloat(h[0]):parseInt(h[0],10),j=this.settings.round?parseFloat(h[1]):parseInt(h[1],10);a>=d&&j>=a&&(c=b?b.limits(f+(a-d)*(i-f)/(j-d)):this.limits(f+(a-d)*(i-f)/(j-d))),f=i,d=j}else c=b?b.limits(100*(a-d)/this.settings.interval):this.limits(100*(a-d)/this.settings.interval);return c},e.prototype.round=function(a){return a=Math.round(a/this.settings.step)*this.settings.step,a=this.settings.round?Math.round(a*Math.pow(10,this.settings.round))/Math.pow(10,this.settings.round):Math.round(a)},e}])}(angular),function(a,b){"use strict";a.module("angularAwesomeSlider").run(["$templateCache",function(a){a.put("ng-slider/slider-bar.tmpl.html",'<span ng-class="mainSliderClass" id="{{sliderTmplId}}"><table><tr><td><div class="jslider-bg"><i class="left"></i><i class="right"></i><i class="range"></i><i class="before"></i><i class="default"></i><i class="default"></i><i class="after"></i></div><div class="jslider-pointer"></div><div class="jslider-pointer jslider-pointer-to"></div><div class="jslider-label" ng-show="options.limits"><span ng-bind="limitValue(options.from)"></span>{{options.dimension}}</div><div class="jslider-label jslider-label-to" ng-show="options.limits"><span ng-bind="limitValue(options.to)"></span>{{options.dimension}}</div><div class="jslider-value"><span></span>{{options.dimension}}</div><div class="jslider-value jslider-value-to"><span></span>{{options.dimension}}</div><div class="jslider-scale" id="{{sliderScaleDivTmplId}}"></div></td></tr></table></span>')}])}(window.angular);
|
"""
The basic building blocks making up the Widget system.
"""
# The classes defined here need more than 7 instance attributes,
# and there is no cyclic import during runtime.
# pylint: disable=too-many-instance-attributes, cyclic-import
from __future__ import annotations
from copy import deepcopy
from inspect import signature
from dataclasses import dataclass, field
from typing import Callable, Optional, Type, Iterator, Any
from ..input import keys
from ..parser import markup
from ..context_managers import cursor_at
from ..helpers import real_length, break_line
from ..exceptions import WidthExceededError, LineLengthError
from ..enums import SizePolicy, CenteringPolicy, WidgetAlignment
from ..ansi_interface import terminal, clear, MouseEvent, MouseAction
from . import boxes
from . import styles as w_styles
__all__ = ["MouseTarget", "MouseCallback", "Widget", "Container", "Label"]
MouseCallback = Callable[["MouseTarget", "Widget"], Any]
BoundCallback = Callable[..., Any]
def _set_obj_or_cls_style(
obj_or_cls: Type[Widget] | Widget, key: str, value: w_styles.StyleType
) -> Type[Widget] | Widget:
"""Set a style of an object or class"""
if not key in obj_or_cls.styles.keys():
raise KeyError(f"Style {key} is not valid for {obj_or_cls}!")
if not callable(value):
raise ValueError(f"Style {key} for {type(obj_or_cls)} has to be a callable.")
obj_or_cls.styles[key] = value
return obj_or_cls
def _set_obj_or_cls_char(
obj_or_cls: Type[Widget] | Widget, key: str, value: w_styles.CharType
) -> Type[Widget] | Widget:
"""Set a char of an object or class"""
if not key in obj_or_cls.chars.keys():
raise KeyError(f"Char {key} is not valid for {obj_or_cls}!")
obj_or_cls.chars[key] = value
return obj_or_cls
@dataclass
class MouseTarget:
"""A target for mouse events."""
parent: Widget
"""Parent of this target. Used for getting current position in `adjust`."""
left: int
"""Left offset from parent widget"""
right: int
"""Right offset from parent widget"""
height: int
"""Total height"""
top: int = 0
"""Top offset from parent widget"""
_start: tuple[int, int] = field(init=False)
_end: tuple[int, int] = field(init=False)
onclick: Optional[MouseCallback] = None
"""Callback function for click events"""
@property
def start(self) -> tuple[int, int]:
"""Get start position"""
return self._start
@property
def end(self) -> tuple[int, int]:
"""Get end position"""
return self._end
def adjust(self) -> None:
"""Adjust position to align with `parent`
This should be called every time the parent's position might have changed."""
pos = self.parent.pos
self._start = (pos[0] + self.left - 1, pos[1] + 1 + self.top)
self._end = (
pos[0] + self.parent.width - 1 - self.right,
pos[1] + self.top + self.height,
)
def contains(self, pos: tuple[int, int]) -> bool:
"""Check if `pos` is contained within the target area"""
start = self._start
end = self._end
return start[0] <= pos[0] <= end[0] and start[1] <= pos[1] <= end[1]
def click(self, caller: Widget) -> None:
"""Execute callback with self, caller as the argument"""
if self.onclick is None:
return
self.onclick(self, caller)
def show(self, color: Optional[int] = None) -> None:
"""Show target on screen with given color
Note: This is only meant to be a debug function."""
if color is None:
color = 210
for y_pos in range(self._start[1], self._end[1] + 1):
with cursor_at((self._start[0], y_pos)) as print_here:
length = self._end[0] - self._start[0]
print_here(markup.parse(f"[@{color}]" + " " * length))
class Widget:
"""The base of the Widget system"""
set_style = classmethod(_set_obj_or_cls_style)
set_char = classmethod(_set_obj_or_cls_char)
styles: dict[str, w_styles.StyleType] = {}
"""Default styles for this class"""
chars: dict[str, w_styles.CharType] = {}
"""Default characters for this class"""
keys: dict[str, set[str]] = {}
"""Groups of keys that are used in `handle_key`"""
serialized: list[str] = [
"id",
"pos",
"depth",
"width",
"height",
"selected_index",
"selectables_length",
]
"""Fields of widget that shall be serialized by `pytermgui.serializer.Serializer`"""
# This class is loaded after this module,
# and thus mypy doesn't see its existence.
_id_manager: Optional["_IDManager"] = None # type: ignore
is_bindable = False
"""Allow binding support"""
size_policy = SizePolicy.get_default()
"""`pytermgui.enums.SizePolicy` to set widget's width according to"""
parent_align = WidgetAlignment.get_default()
"""`pytermgui.enums.WidgetAlignment` to align widget by"""
def __init__(self, **attrs: Any) -> None:
"""Initialize object"""
self.set_style = lambda key, value: _set_obj_or_cls_style(self, key, value)
self.set_char = lambda key, value: _set_obj_or_cls_char(self, key, value)
self.width = 1
self.height = 1
self.pos = terminal.origin
self.depth = 0
self.styles = type(self).styles.copy()
self.chars = type(self).chars.copy()
self.mouse_targets: list[MouseTarget] = []
self.parent: Widget | None = None
self.selected_index: int | None = None
self.onclick: MouseCallback | None = None
self._selectables_length = 0
self._id: Optional[str] = None
self._serialized_fields = type(self).serialized
self._bindings: dict[str | Type[MouseEvent], tuple[BoundCallback, str]] = {}
for attr, value in attrs.items():
setattr(self, attr, value)
def __repr__(self) -> str:
"""Show representation of object.
By default this is the `debug()` method."""
return self.debug()
def __iter__(self) -> Iterator[Widget]:
"""Return self for iteration"""
yield self
@property
def bindings(self) -> dict[str | Type[MouseEvent], tuple[BoundCallback, str]]:
"""Return copy of the bindings dictionary"""
return self._bindings.copy()
@property
def id(self) -> Optional[str]: # pylint: disable=invalid-name
"""Getter for id property"""
return self._id
@id.setter
def id(self, value: str) -> None: # pylint: disable=invalid-name
"""Register widget to idmanager"""
if self._id == value:
return
manager = Widget._id_manager
assert manager is not None
old = manager.get_id(self)
if old is not None:
manager.deregister(old)
self._id = value
manager.register(self)
@property
def selectables_length(self) -> int:
"""Return count of selectables within this widget
Override this for custom `Widget` children that have
selectable items."""
return self._selectables_length
@property
def selectables(self) -> list[tuple[Widget, int]]:
"""Get a list of all selectable objects within this widget"""
return [(self, 0)]
@property
def is_selectable(self) -> bool:
"""Determine if this widget has any selectables.
Shorthand for `Widget.selectables_length != 0`"""
return self.selectables_length != 0
def static_width(self, value: int) -> None:
"""Write-only setter for width that also changes
`size_policy` to `STATIC`"""
self.width = value
self.size_policy = SizePolicy.STATIC
# Set static_width to a setter only property
static_width = property(None, static_width) # type: ignore
def define_mouse_target(
self, left: int, right: int, height: int, top: int = 0
) -> MouseTarget:
"""Define a mouse target, return it for method assignments
Note: Only use this within a `Widget`, preferably within its
`get_lines()` method."""
target = MouseTarget(self, left, right, height, top)
target.adjust()
self.mouse_targets.insert(0, target)
return target
def get_target(self, pos: tuple[int, int]) -> Optional[MouseTarget]:
"""Get MouseTarget for a position"""
for target in self.mouse_targets:
if target.contains(pos):
return target
return None
def handle_mouse(
self, event: MouseEvent, target: MouseTarget | None = None
) -> bool:
"""Handle a mouse event, return success
The default implementation handles LEFT_CLICK only."""
action, pos = event
target = target or self.get_target(pos)
if action is MouseAction.LEFT_CLICK:
if target is None:
return False
target.click(self)
return True
return False
def handle_key(self, key: str) -> bool:
"""Handle a keystroke, return success
The default implementation here does nothing."""
return False and hasattr(self, key)
def serialize(self) -> dict[str, Any]:
"""Serialize object using its `serialized` fields"""
fields = self._serialized_fields
out: dict[str, Any] = {"type": type(self).__name__}
for key in fields:
# Detect styled values
if key.startswith("*"):
style = True
key = key[1:]
else:
style = False
value = getattr(self, key)
# Convert styled value into markup
if style:
style_call = self._get_style(key)
if isinstance(value, list):
out[key] = [markup.get_markup(style_call(char)) for char in value]
else:
out[key] = markup.get_markup(style_call(value))
continue
out[key] = value
# The chars need to be handled separately
out["chars"] = {}
for key, value in self.chars.items():
style_call = self._get_style(key)
if isinstance(value, list):
out["chars"][key] = [
markup.get_markup(style_call(char)) for char in value
]
else:
out["chars"][key] = markup.get_markup(style_call(value))
return out
def copy(self) -> Widget:
"""Create a deepcopy of this Widget"""
return deepcopy(self)
def _get_style(self, key: str) -> w_styles.DepthlessStyleType:
"""Get style by `key`.
Raise `KeyError` if style key is invalid."""
style_method = self.styles[key]
return w_styles.StyleCall(self, style_method)
def _get_char(self, key: str) -> w_styles.CharType:
"""Get style by `char`.
Raise `KeyError` if char key is invalid."""
chars = self.chars[key]
if isinstance(chars, str):
return chars
return chars.copy()
def get_lines(self) -> list[str]:
"""Get lines representing object
These lines should be equal in length to the width of the `Widget`. Their
content does not matter otherwise."""
raise NotImplementedError(f"get_lines() is not defined for type {type(self)}.")
def bind(
self, key: str, action: BoundCallback, description: Optional[str] = None
) -> None:
"""Bind `action` to be called when `key` is pressed.
Note: This function is only called by implementations above this layer. To
use this functionality use `WindowManager` or write your own custom manager."""
if not self.is_bindable:
raise TypeError(f"Widget of type {type(self)} does not accept bindings.")
if description is None:
description = f"Binding of {key} to {action}"
self._bindings[key] = (action, description)
def execute_binding(self, key: Any) -> bool:
"""Execute a binding if one is found
True: Binding was found & executed
False: Binding was not found"""
# Execute special binding
if keys.ANY_KEY in self._bindings:
method, _ = self._bindings[keys.ANY_KEY]
method(self, key)
if key in self._bindings:
method, _ = self._bindings[key]
method(self, key)
return True
return False
def select(self, index: int | None = None) -> None:
"""Select a part of this Widget"""
if not self.is_selectable:
raise TypeError(f"Object of type {type(self)} has no selectables.")
if index is not None:
index = min(max(0, index), self.selectables_length - 1)
self.selected_index = index
def show_targets(self, color: Optional[int] = None) -> None:
"""Show all mouse targets of this Widget
Note: This is only meant to be a debug method."""
for target in self.mouse_targets:
target.show(color)
def print(self) -> None:
"""Print object within a Container
Overwrite this for Container-like widgets."""
Container(self).print()
def debug(self) -> str:
"""Print identifiable information about this Widget"""
constructor = "("
for name in signature(getattr(self, "__init__")).parameters:
current = ""
if name == "attrs":
current += "**attrs"
continue
if len(constructor) > 1:
current += ", "
current += name
attr = getattr(self, name, None)
if attr is None:
continue
current += "="
if isinstance(attr, str):
current += f'"{attr}"'
else:
current += str(attr)
constructor += current
constructor += ")"
return type(self).__name__ + constructor
class Container(Widget):
"""A widget that contains other widgets."""
chars: dict[str, w_styles.CharType] = {
"border": ["| ", "-", " |", "-"],
"corner": [""] * 4,
}
styles = {
"border": w_styles.MARKUP,
"corner": w_styles.MARKUP,
"fill": w_styles.BACKGROUND,
}
keys = {
"next": {keys.DOWN, keys.CTRL_N, "j"},
"previous": {keys.UP, keys.CTRL_P, "k"},
}
serialized = Widget.serialized + ["_centered_axis"]
allow_fullscreen = True
# TODO: Add `WidgetConvertible`? type instead of Any
def __init__(self, *widgets: Any, **attrs: Any) -> None:
"""Initialize Container data"""
super().__init__(**attrs)
# TODO: This is just a band-aid.
if "width" not in attrs:
self.width = 40
self._widgets: list[Widget] = []
self._centered_axis: CenteringPolicy | None = None
self._prev_screen: tuple[int, int] = (0, 0)
self._has_printed = False
self.styles = type(self).styles.copy()
self.chars = type(self).chars.copy()
for widget in widgets:
self._add_widget(widget)
self._drag_target: Widget | None = None
terminal.subscribe(terminal.RESIZE, lambda *_: self.center(self._centered_axis))
@property
def sidelength(self) -> int:
"""Returns `real_length` of left+right borders"""
chars = self._get_char("border")
style = self._get_style("border")
if not isinstance(chars, list):
return 0
left_border, _, right_border, _ = chars
return real_length(style(left_border) + style(right_border))
@property
def selectables(self) -> list[tuple[Widget, int]]:
"""Get all selectable widgets and their inner indices
The output format is as follows:
```python3
outer_container.selectables = [
(container_widget, 0)
(container_widget, 1)
(container_widget, 2)
]
```
This is so there is a constant way to reference inner objects.
"""
_selectables: list[tuple[Widget, int]] = []
for widget in self._widgets:
if not widget.is_selectable:
continue
for i, (inner, _) in enumerate(widget.selectables):
_selectables.append((inner, i))
return _selectables
@property
def selectables_length(self) -> int:
"""Get count of selectable subparts"""
return len(self.selectables)
@property
def selected(self) -> Optional[Widget]:
"""Return currently selected object"""
# TODO: Add deeper selection
if self.selected_index is None:
return None
if self.selected_index >= len(self.selectables):
return None
return self.selectables[self.selected_index][0]
@property
def box(self) -> boxes.Box:
"""Return current box setting"""
return self._box
@box.setter
def box(self, new: str | boxes.Box) -> None:
"""Apply new box
`new` can either be a reference to a Box instance,
or a string with a box name within the boxes module.
The `file_loader` system circumvents this by allowing
the `Serializer` to create references to registered `Box`
instances from strings."""
if isinstance(new, str):
from_module = vars(boxes).get(new)
if from_module is None:
raise ValueError(f"Unknown box type {new}.")
new = from_module
assert isinstance(new, boxes.Box)
self._box = new
new.set_chars_of(self)
def __iadd__(self, other: object) -> Container:
"""Call self._add_widget(other) and return self"""
self._add_widget(other)
return self
def __add__(self, other: object) -> Container:
"""Call self._add_widget(other)"""
self.__iadd__(other)
return self
def __iter__(self) -> Iterator[Widget]:
"""Iterate through self._widgets"""
for widget in self._widgets:
yield widget
def __len__(self) -> int:
"""Get length of widgets"""
return len(self._widgets)
def __getitem__(self, sli: int | slice) -> Widget | list[Widget]:
"""Index into self._widgets"""
return self._widgets[sli]
def __setitem__(self, index: int, value: Any) -> None:
"""Set item in self._widgets"""
self._widgets[index] = value
def __contains__(self, other: object) -> bool:
"""Determine if Container contains `other`"""
return other in self._widgets
def _add_widget(self, other: object, run_get_lines: bool = True) -> None:
"""Add `other` to self._widgets, convert using `auto` if necessary"""
if not isinstance(other, Widget):
to_widget = Widget.from_data(other)
if to_widget is None:
raise ValueError(
f"Could not convert {other} of type {type(other)} to a Widget!"
)
other = to_widget
# This is safe to do, as it would've raised an exception above already
assert isinstance(other, Widget)
self._widgets.append(other)
if isinstance(other, Container):
other.set_recursive_depth(self.depth + 2)
else:
other.depth = self.depth + 1
other.get_lines()
other.parent = self
if run_get_lines:
self.get_lines()
def _get_aligners(
self, widget: Widget, borders: tuple[str, str]
) -> tuple[Callable[[str], str], int]:
"""Get method to align a widget, along with a position offset"""
left, right = borders
char = self._get_style("fill")(" ")
def _align_left(text: str) -> str:
"""Align line to the left"""
padding = self.width - real_length(left + right) - real_length(text)
return left + text + padding * char + right
def _align_center(text: str) -> str:
"""Align line to the center"""
total = self.width - real_length(left + right) - real_length(text)
padding, offset = divmod(total, 2)
return left + (padding + offset) * char + text + padding * char + right
def _align_right(text: str) -> str:
"""Align line to the right"""
padding = self.width - real_length(left + right) - real_length(text)
return left + padding * char + text + right
if widget.parent_align == WidgetAlignment.CENTER:
total = self.width - real_length(left + right) - widget.width
padding, offset = divmod(total, 2)
return _align_center, real_length(left) + padding + offset
if widget.parent_align == WidgetAlignment.RIGHT:
return _align_right, self.width - real_length(left) - widget.width
# Default to left-aligned
return _align_left, real_length(left)
def _update_width(self, widget: Widget) -> None:
"""Update widths of both `widget` and self"""
available = (
self.width - self.sidelength - (0 if isinstance(widget, Container) else 1)
)
if widget.size_policy == SizePolicy.FILL:
widget.width = available
return
if widget.size_policy == SizePolicy.RELATIVE:
widget.width = widget.relative_width * available
return
if widget.width > available:
if widget.size_policy == SizePolicy.STATIC:
raise WidthExceededError(
f"Widget {widget}'s static width of {widget.width}"
+ f" exceeds its parent's available width {available}."
""
)
widget.width = available
def get_lines(self) -> list[str]: # pylint: disable=too-many-locals
"""Get lines of all widgets
This method essentially works by going through all widgets, resizing them appropriately
and collecting their `get_lines()` returns into a list, with each line aligned according
to the widget's alignment policy. After all widget's are collected lines containing the
uppper and lower border are inserted/appended to the list, and padding is added to bring
the Container to the appropriate height.
Note about pylint: Having less locals in this method would ruin readability."""
def _apply_style(
style: w_styles.DepthlessStyleType, target: list[str]
) -> list[str]:
"""Apply style to target list elements"""
for i, char in enumerate(target):
target[i] = style(char)
return target
# Get chars & styles
corner_style = self._get_style("corner")
border_style = self._get_style("border")
border_char = self._get_char("border")
assert isinstance(border_char, list)
corner_char = self._get_char("corner")
assert isinstance(corner_char, list)
left, top, right, bottom = _apply_style(border_style, border_char)
t_left, t_right, b_right, b_left = _apply_style(corner_style, corner_char)
def _get_border(left: str, char: str, right: str) -> str:
"""Get a line for the top/bottom border"""
offset = real_length(left + right)
return left + char * (self.width - offset) + right
# Set up lines list
lines: list[str] = []
self.mouse_targets = []
# Set root widget full screen if possible
if (
self._has_printed
and self.parent is None
and self.allow_fullscreen
and self.size_policy is SizePolicy.FILL
):
self.pos = terminal.origin
self.width, self.height = terminal.size
align, offset = self._get_aligners(self, (left, right))
# Go through widgets
for widget in self._widgets:
if self.width == 0:
self.width = widget.width
align, offset = self._get_aligners(widget, (left, right))
# Apply width policies
self._update_width(widget)
# TODO: This is ugly, and should be avoided.
# For now, only Container has a top offset, but this should be
# opened up as some kind of API for custom widgets.
if type(widget).__name__ == "Container":
container_vertical_offset = 1
else:
container_vertical_offset = 0
widget.pos = (
self.pos[0] + offset,
self.pos[1] + len(lines) + container_vertical_offset,
)
widget_lines: list[str] = []
for i, line in enumerate(widget.get_lines()):
if len(lines) > self.height:
break
# Pad horizontally
aligned = align(line)
new = real_length(aligned)
# Assert well formed lines
if not new == self.width:
raise LineLengthError(
f"Widget {widget} returned a line of invalid length"
+ f" at index {i}: ({new} != {self.width}): {aligned}"
)
widget_lines.append(aligned)
# Add to lines
lines += widget_lines
self.mouse_targets += widget.mouse_targets
capping_lines = 0
# Add capping lines
if real_length(top):
capping_lines += 1
lines.insert(0, _get_border(t_left, top, t_right))
if real_length(bottom):
capping_lines += 1
lines.append(_get_border(b_left, bottom, b_right))
# Update height
for _ in range(self.height - len(lines) - capping_lines):
lines.insert(-1, align(""))
for target in self.mouse_targets:
target.adjust()
self.height = len(lines)
# Return
return lines
def set_widgets(self, new: list[Widget]) -> None:
"""Set self._widgets to a new list"""
self._widgets = []
for widget in new:
self._add_widget(widget)
def serialize(self) -> dict[str, Any]:
"""Serialize object using its `serialized` fields"""
out = super().serialize()
out["_widgets"] = []
for widget in self._widgets:
out["_widgets"].append(widget.serialize())
return out
def pop(self, index: int) -> Widget:
"""Pop widget from self._widgets"""
return self._widgets.pop(index)
def remove(self, other: Widget) -> None:
"""Remove widget from self._widgets"""
return self._widgets.remove(other)
def set_recursive_depth(self, value: int) -> None:
"""Set depth for all children, recursively"""
self.depth = value
for widget in self._widgets:
if isinstance(widget, Container):
widget.set_recursive_depth(value + 1)
else:
widget.depth = value
def select(self, index: int | None = None) -> None:
"""Select inner subobject"""
# Unselect all sub-elements
for other in self._widgets:
if other.selectables_length > 0:
other.select(None)
if index is not None:
if index >= len(self.selectables) is None:
raise IndexError("Container selection index out of range")
widget, inner_index = self.selectables[index]
widget.select(inner_index)
self.selected_index = index
def center(
self, where: CenteringPolicy | None = None, store: bool = True
) -> Container:
"""Center object on given axis, optionally store choice
If `store` is set the policy is stored for the object and is
re-applied when `center()` is called without arguments."""
# Refresh in case changes happened
self.get_lines()
if where is None:
# See `enums.py` for explanation about this ignore.
where = CenteringPolicy.get_default() # type: ignore
centerx = centery = where is CenteringPolicy.ALL
centerx |= where is CenteringPolicy.HORIZONTAL
centery |= where is CenteringPolicy.VERTICAL
pos = list(self.pos)
if centerx:
pos[0] = (terminal.width - self.width + 2) // 2
if centery:
pos[1] = (terminal.height - self.height + 2) // 2
self.pos = (pos[0], pos[1])
if store:
self._centered_axis = where
self._prev_screen = terminal.size
return self
def handle_mouse(
self, event: MouseEvent, target: MouseTarget | None = None
) -> bool:
"""Handle mouse event on Container's children"""
def _get_widget(target: MouseTarget) -> Widget | None:
"""Try to get widget from its mouse target"""
for widget in self._widgets:
if target in widget.mouse_targets:
return widget
return None
action, pos = event
target = target or self.get_target(pos)
if target is None:
return False
target_widget = self._drag_target
if self._drag_target is None or target not in self._drag_target.mouse_targets:
target_widget = _get_widget(target)
if action is MouseAction.LEFT_CLICK:
self._drag_target = target_widget
elif action is MouseAction.RELEASE:
self._drag_target = None
if target_widget is None:
for widget in self._widgets:
if (
widget.pos[0] <= pos[0] < widget.pos[0] + widget.width
and widget.pos[1] <= pos[1] < widget.pos[1] + widget.height
):
target_widget = widget
break
else:
return False
handled = target_widget.handle_mouse(event, target)
if handled and target is not None:
self.select(self.mouse_targets.index(target))
return handled
def handle_key(self, key: str) -> bool:
"""Handle a keypress, return success"""
def _is_nav(key: str) -> bool:
"""Determine if a key is in the navigation sets"""
return key in self.keys["next"] | self.keys["previous"]
if self.selected is not None and self.selected.handle_key(key):
return True
# Only use navigation when there is more than one selectable
if self.selectables_length > 1 and _is_nav(key):
handled = False
if self.selected_index is None:
self.select(0)
assert isinstance(self.selected_index, int)
if key in self.keys["previous"]:
# No more selectables left, user wants to exit Container
# upwards.
if self.selected_index == 0:
return False
self.select(self.selected_index - 1)
handled = True
elif key in self.keys["next"]:
# Stop selection at last element, return as unhandled
new = self.selected_index + 1
if new == len(self.selectables):
return False
self.select(new)
handled = True
if handled:
return True
if key == keys.ENTER and self.selected is not None:
if self.selected.selected_index is not None:
self.selected.mouse_targets[self.selected.selected_index].click(self)
return True
return False
def wipe(self) -> None:
"""Wipe characters occupied by the object"""
with cursor_at(self.pos) as print_here:
for line in self.get_lines():
print_here(real_length(line) * " ")
def show_targets(self, color: Optional[int] = None) -> None:
"""Show all mouse targets of this Widget
Note: This is meant to be debug only method."""
super().show_targets(color)
for widget in self._widgets:
widget.show_targets(color)
def print(self) -> None:
"""Print object to stdout"""
if not terminal.size == self._prev_screen:
clear()
self.center(self._centered_axis)
self._prev_screen = terminal.size
if self.allow_fullscreen:
self.pos = terminal.origin
with cursor_at(self.pos) as print_here:
for line in self.get_lines():
print_here(line)
self._has_printed = True
def debug(self) -> str:
"""Return debug information about this object widgets"""
out = "Container("
for widget in self._widgets:
out += widget.debug() + ", "
out = out.strip(", ")
out += ", **attrs)"
return out
class Label(Widget):
"""A Widget to display a string
By default, this widget uses `pytermgui.widgets.styles.MARKUP`. This
allows it to house markup text that is parsed before display, such as:
```python3
import pytermgui as ptg
with ptg.alt_buffer():
root = ptg.Container(
ptg.Label("[italic 141 underline]This is some [green]fancy [white inverse]text!")
)
root.print()
ptg.getch()
```
<p style="text-align: center">
<img
src="https://github.com/bczsalba/pytermgui/blob/master/assets/docs/widgets_label.png?raw=true"
width=100%>
</p>
"""
styles: dict[str, w_styles.StyleType] = {"value": w_styles.MARKUP}
serialized = Widget.serialized + ["*value", "align", "padding"]
def __init__(self, value: str = "", padding: int = 0, **attrs: Any) -> None:
"""Set up object"""
super().__init__(**attrs)
self.value = value
self.padding = padding
self.width = real_length(value) + self.padding
def get_lines(self) -> list[str]:
"""Get lines representing `Label`, breaking lines as necessary"""
value_style = self._get_style("value")
line_gen = break_line(value_style(self.padding * " " + self.value), self.width)
return list(line_gen) or [""]
|
/**
* The stage for the application.
*/
function StageAssistant() { }
/**
* Setup up the stage.
*/
StageAssistant.prototype.setup = function() {
// Push the main scene.
this.controller.pushScene("main");
}; /* End setup(). */
|
"""
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import errno
import os
from pathlib import Path
from tqdm import tqdm
try:
import lxml.etree as ET
except ImportError:
import xml.etree.cElementTree as ET
from accuracy_checker.representation import DetectionAnnotation, SegmentationAnnotation
from accuracy_checker.representation.segmentation_representation import GTMaskLoader
from accuracy_checker.utils import get_path, string_to_bool, read_txt
from .format_converter import BaseFormatConverter
_VOC_CLASSES_DETECTION = (
'aeroplane', 'bicycle', 'bird', 'boat',
'bottle', 'bus', 'car', 'cat', 'chair',
'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant',
'sheep', 'sofa', 'train', 'tvmonitor'
)
_VOC_CLASSES_SEGMENTATION = tuple(['__background__']) + _VOC_CLASSES_DETECTION
_SEGMENTATION_COLORS = ((
(0, 0, 0), (128, 0, 0), (0, 128, 0), (128, 128, 0),
(0, 0, 128), (128, 0, 128), (0, 128, 128), (128, 128, 128),
(64, 0, 0), (192, 0, 0), (64, 128, 0), (192, 128, 0),
(64, 0, 128), (192, 0, 128), (64, 128, 128), (192, 128, 128),
(0, 64, 0), (128, 64, 0), (0, 192, 0), (128, 192, 0),
(0, 64, 128)
))
def prepare_detection_labels(has_background=True):
num_classes = len(_VOC_CLASSES_DETECTION)
labels_shift = 1 if has_background else 0
reversed_label_map = dict(zip(_VOC_CLASSES_DETECTION, list(range(labels_shift, num_classes + labels_shift))))
if has_background:
reversed_label_map['__background__'] = 0
return reversed_label_map
def reverse_label_map(label_map):
return {value: key for key, value in label_map.items()}
class PascalVOCSegmentationConverter(BaseFormatConverter):
__provider__ = 'voc_segmentation'
def convert(self, devkit_dir):
"""
Args:
devkit_dir: path to VOC2012 devkit dir (e.g. VOCdevkit/VOC2012)
"""
devkit_dir = get_path(devkit_dir, is_directory=True)
image_set_file = devkit_dir / 'ImageSets' / 'Segmentation' / 'test.txt'
mask_dir = Path('SegmentationClass')
image_dir = Path('JPEGImages')
annotations = []
for image in read_txt(image_set_file):
annotation = SegmentationAnnotation(
str(image_dir / '{}.jpg'.format(image)),
str(mask_dir / '{}.png'.format(image)),
mask_loader=GTMaskLoader.SCIPY
)
annotations.append(annotation)
meta = {
'label_map': dict(enumerate(_VOC_CLASSES_SEGMENTATION)),
'background_label': 0,
'segmentation_colors': _SEGMENTATION_COLORS
}
return annotations, meta
class PascalVOCDetectionConverter(BaseFormatConverter):
__provider__ = "voc07"
def convert(self, devkit_dir, has_background=True):
"""
Args:
devkit_dir: path to VOC2007 devkit dir (e.g. .../VOCdevkit/VOC2007)
has_background: allows to add background label to label map
"""
if isinstance(has_background, str):
has_background = string_to_bool(has_background)
class_to_ind = prepare_detection_labels(has_background)
devkit_dir = get_path(devkit_dir, is_directory=True)
annotation_directory = get_path(devkit_dir / 'Annotations', is_directory=True)
images_directory = get_path(devkit_dir / 'JPEGImages', is_directory=True)
detections = []
image_set_file = devkit_dir / 'ImageSets' / 'Main' / 'test.txt'
for image in tqdm(read_txt(image_set_file, sep=None)):
file_path = annotation_directory / '{}.xml'.format(image)
tree = ET.parse(str(file_path))
identifier = tree.find('.//filename').text
image_path = images_directory / identifier
if not image_path.is_file():
raise FileNotFoundError("{}: {}".format(os.strerror(errno.ENOENT), image_path))
labels, x_mins, y_mins, x_maxs, y_maxs = [], [], [], [], []
difficult_indices = []
for entry in tree.getroot():
if not entry.tag.startswith('object'):
continue
bbox = entry.find('bndbox')
difficult = int(entry.find('difficult').text)
if difficult == 1:
difficult_indices.append(len(labels))
labels.append(class_to_ind[entry.find('name').text])
x_mins.append(float(bbox.find('xmin').text) - 1)
y_mins.append(float(bbox.find('ymin').text) - 1)
x_maxs.append(float(bbox.find('xmax').text) - 1)
y_maxs.append(float(bbox.find('ymax').text) - 1)
image_annotation = DetectionAnnotation(identifier, labels, x_mins, y_mins, x_maxs, y_maxs)
image_annotation.metadata['difficult_boxes'] = difficult_indices
detections.append(image_annotation)
meta = {'label_map': reverse_label_map(class_to_ind)}
if has_background:
meta['background_label'] = 0
return detections, meta
|
# Copyright 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glance_store as store
import webob
import glance.api.v2.image_actions as image_actions
import glance.context
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
BASE_URI = unit_test_utils.BASE_URI
USER1 = '54492ba0-f4df-4e4e-be62-27f4d76b29cf'
UUID1 = 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d'
TENANT1 = '6838eb7b-6ded-434a-882c-b344c77fe8df'
CHKSUM = '93264c3edf5972c9f1cb309543d38a5c'
def _db_fixture(id, **kwargs):
obj = {
'id': id,
'name': None,
'is_public': False,
'properties': {},
'checksum': None,
'owner': None,
'status': 'queued',
'tags': [],
'size': None,
'virtual_size': None,
'locations': [],
'protected': False,
'disk_format': None,
'container_format': None,
'deleted': False,
'min_ram': None,
'min_disk': None,
}
obj.update(kwargs)
return obj
class TestImageActionsController(base.IsolatedUnitTest):
def setUp(self):
super(TestImageActionsController, self).setUp()
self.db = unit_test_utils.FakeDB()
self.policy = unit_test_utils.FakePolicyEnforcer()
self.notifier = unit_test_utils.FakeNotifier()
self.store = unit_test_utils.FakeStoreAPI()
for i in range(1, 4):
self.store.data['%s/fake_location_%i' % (BASE_URI, i)] = ('Z', 1)
self.store_utils = unit_test_utils.FakeStoreUtils(self.store)
self.controller = image_actions.ImageActionsController(
self.db,
self.policy,
self.notifier,
self.store)
self.controller.gateway.store_utils = self.store_utils
store.create_stores()
def _get_fake_context(self, user=USER1, tenant=TENANT1, roles=['member'],
is_admin=False):
kwargs = {
'user': user,
'tenant': tenant,
'roles': roles,
'is_admin': is_admin,
}
context = glance.context.RequestContext(**kwargs)
return context
def _create_image(self, status):
self.db.reset()
self.images = [
_db_fixture(UUID1, owner=TENANT1, checksum=CHKSUM,
name='1', size=256, virtual_size=1024,
is_public=True,
locations=[{'url': '%s/%s' % (BASE_URI, UUID1),
'metadata': {}, 'status': 'active'}],
disk_format='raw',
container_format='bare',
status=status),
]
context = self._get_fake_context()
[self.db.image_create(context, image) for image in self.images]
def test_deactivate_from_active(self):
self._create_image('active')
request = unit_test_utils.get_fake_request()
self.controller.deactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('deactivated', image['status'])
def test_deactivate_from_deactivated(self):
self._create_image('deactivated')
request = unit_test_utils.get_fake_request()
self.controller.deactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('deactivated', image['status'])
def _test_deactivate_from_wrong_status(self, status):
# deactivate will yield an error if the initial status is anything
# other than 'active' or 'deactivated'
self._create_image(status)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.deactivate,
request, UUID1)
def test_deactivate_from_queued(self):
self._test_deactivate_from_wrong_status('queued')
def test_deactivate_from_saving(self):
self._test_deactivate_from_wrong_status('saving')
def test_deactivate_from_killed(self):
self._test_deactivate_from_wrong_status('killed')
def test_deactivate_from_pending_delete(self):
self._test_deactivate_from_wrong_status('pending_delete')
def test_deactivate_from_deleted(self):
self._test_deactivate_from_wrong_status('deleted')
def test_reactivate_from_active(self):
self._create_image('active')
request = unit_test_utils.get_fake_request()
self.controller.reactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('active', image['status'])
def test_reactivate_from_deactivated(self):
self._create_image('deactivated')
request = unit_test_utils.get_fake_request()
self.controller.reactivate(request, UUID1)
image = self.db.image_get(request.context, UUID1)
self.assertEqual('active', image['status'])
def _test_reactivate_from_wrong_status(self, status):
# reactivate will yield an error if the initial status is anything
# other than 'active' or 'deactivated'
self._create_image(status)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.reactivate,
request, UUID1)
def test_reactivate_from_queued(self):
self._test_reactivate_from_wrong_status('queued')
def test_reactivate_from_saving(self):
self._test_reactivate_from_wrong_status('saving')
def test_reactivate_from_killed(self):
self._test_reactivate_from_wrong_status('killed')
def test_reactivate_from_pending_delete(self):
self._test_reactivate_from_wrong_status('pending_delete')
def test_reactivate_from_deleted(self):
self._test_reactivate_from_wrong_status('deleted')
|
export default class AccordionItem {
constructor(element) {
this.element = element;
this.closed = this.element.hasAttribute('data-accordion-closed');
this.open = this.element.hasAttribute('data-accordion-open');
if (this.closed) {
this.collapse();
}
if (this.open) {
this.expand();
}
}
collapse() {
const itemBody = this.element.querySelector('[data-accordion-item-body]');
if (!this.element.hasAttribute('data-accordion-closed')) {
this.element.setAttribute('data-accordion-closed', true);
if (this.element.hasAttribute('data-accordion-open')) {
this.element.removeAttribute('data-accordion-open');
}
if (itemBody) {
itemBody.style.display = 'none';
}
this.open = false;
this.closed = true;
}
}
expand() {
const itemBody = this.element.querySelector('[data-accordion-item-body]');
if (!this.element.hasAttribute('data-accordion-open')) {
this.element.setAttribute('data-accordion-open', true);
if (this.element.hasAttribute('data-accordion-closed')) {
this.element.removeAttribute('data-accordion-closed');
}
if (itemBody) {
itemBody.style.display = '';
}
this.open = true;
this.closed = false;
}
}
}
|
"""Test cases for Zinnia's views"""
from datetime import date
from django.test import TestCase
from django.utils import timezone
from django.contrib import comments
from django.contrib.sites.models import Site
from django.test.utils import override_settings
from django.test.utils import restore_template_loaders
from django.test.utils import setup_test_template_loader
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.signals import user_logged_in
from django.contrib.auth.models import update_last_login
from django.contrib.auth.tests.utils import skipIfCustomUser
from zinnia.models.entry import Entry
from zinnia.models.author import Author
from zinnia.models.category import Category
from zinnia.managers import PUBLISHED
from zinnia.settings import PAGINATION
from zinnia.tests.utils import datetime
from zinnia.flags import get_user_flagger
from zinnia.signals import connect_discussion_signals
from zinnia.signals import disconnect_discussion_signals
@skipIfCustomUser
@override_settings(
TEMPLATE_CONTEXT_PROCESSORS=(
'django.core.context_processors.request',
))
class ViewsBaseCase(TestCase):
"""
Setup and utility function base case.
"""
def setUp(self):
self.site = Site.objects.get_current()
self.author = Author.objects.create_user(username='admin',
email='admin@example.com',
password='password')
self.category = Category.objects.create(title='Tests', slug='tests')
params = {'title': 'Test 1',
'content': 'First test entry published',
'slug': 'test-1',
'tags': 'tests',
'creation_date': datetime(2010, 1, 1, 13, 25),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
params = {'title': 'Test 2',
'content': 'Second test entry published',
'slug': 'test-2',
'tags': 'tests',
'creation_date': datetime(2010, 6, 1, 12, 12),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
def create_published_entry(self):
params = {'title': 'My test entry',
'content': 'My test content',
'slug': 'my-test-entry',
'tags': 'tests',
'creation_date': datetime(2010, 1, 1, 15, 15),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
entry.authors.add(self.author)
return entry
def check_publishing_context(self, url, first_expected,
second_expected=None,
friendly_context=None,
queries=None):
"""Test the numbers of entries in context of an url,"""
if queries is not None:
with self.assertNumQueries(queries):
response = self.client.get(url)
else:
response = self.client.get(url)
self.assertEquals(len(response.context['object_list']),
first_expected)
if second_expected:
self.create_published_entry()
response = self.client.get(url)
self.assertEquals(len(response.context['object_list']),
second_expected)
if friendly_context:
self.assertEquals(
response.context['object_list'],
response.context[friendly_context])
return response
def check_capabilities(self, url, mimetype, queries=0):
"""Test simple views for the Weblog capabilities"""
with self.assertNumQueries(queries):
response = self.client.get(url)
self.assertEquals(response['Content-Type'], mimetype)
self.assertTrue('protocol' in response.context)
class ZinniaViewsTestCase(ViewsBaseCase):
"""
Test cases for generic views used in the application,
for reproducing and correcting issue :
http://github.com/Fantomas42/django-blog-zinnia/issues#issue/3
"""
urls = 'zinnia.tests.urls'
def tearDown(self):
"""Always try to restore the initial template loaders
even if the test_template_loader has not been enabled,
to avoid cascading errors if a test fails"""
try:
restore_template_loaders()
except AttributeError:
pass
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_index_no_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive.html' % \
date.today().strftime('%Y/%m/%d')
setup_test_template_loader(
{template_name_today: ''})
response = self.check_publishing_context(
'/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(response, template_name_today)
restore_template_loaders()
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_index_with_timezone(self):
template_name_today = 'zinnia/archives/%s/entry_archive.html' % \
timezone.localtime(timezone.now()
).strftime('%Y/%m/%d')
setup_test_template_loader(
{template_name_today: ''})
response = self.check_publishing_context(
'/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(response, template_name_today)
restore_template_loaders()
def test_zinnia_entry_archive_year(self):
setup_test_template_loader(
{'zinnia/archives/2010/entry_archive_year.html': ''})
response = self.check_publishing_context(
'/2010/', 2, 3, 'entry_list', 4)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/entry_archive_year.html')
restore_template_loaders()
def test_zinnia_entry_archive_week(self):
setup_test_template_loader(
{'zinnia/archives/2010/week/00/entry_archive_week.html': ''})
response = self.check_publishing_context(
'/2010/week/00/', 1, 2, 'entry_list', 1)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/week/00/entry_archive_week.html')
# All days in a new year preceding the first Monday
# are considered to be in week 0.
self.assertEquals(response.context['week'], date(2009, 12, 28))
self.assertEquals(response.context['week_end_day'], date(2010, 1, 3))
restore_template_loaders()
def test_zinnia_entry_archive_month(self):
setup_test_template_loader(
{'zinnia/archives/2010/month/01/entry_archive_month.html': '',
'zinnia/entry_archive_month.html': ''})
response = self.check_publishing_context(
'/2010/01/', 1, 2, 'entry_list', 4)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/month/01/entry_archive_month.html')
self.assertEquals(response.context['previous_month'], None)
self.assertEquals(response.context['next_month'], date(2010, 6, 1))
response = self.client.get('/2010/06/')
self.assertEquals(response.context['previous_month'], date(2010, 1, 1))
self.assertEquals(response.context['next_month'], None)
response = self.client.get('/2009/12/')
self.assertEquals(response.context['previous_month'], None)
self.assertEquals(response.context['next_month'], date(2010, 1, 1))
restore_template_loaders()
def test_zinnia_entry_archive_day(self):
setup_test_template_loader(
{'zinnia/archives/2010/01/01/entry_archive_day.html': '',
'zinnia/entry_archive_day.html': ''})
response = self.check_publishing_context(
'/2010/01/01/', 1, 2, 'entry_list', 5)
self.assertTemplateUsed(
response, 'zinnia/archives/2010/01/01/entry_archive_day.html')
self.assertEquals(response.context['previous_month'], None)
self.assertEquals(response.context['next_month'], date(2010, 6, 1))
self.assertEquals(response.context['previous_day'], None)
self.assertEquals(response.context['next_day'], date(2010, 6, 1))
response = self.client.get('/2010/06/01/')
self.assertEquals(response.context['previous_month'], date(2010, 1, 1))
self.assertEquals(response.context['next_month'], None)
self.assertEquals(response.context['previous_day'], date(2010, 1, 1))
self.assertEquals(response.context['next_day'], None)
restore_template_loaders()
@override_settings(USE_TZ=False)
def test_zinnia_entry_archive_today_no_timezone(self):
setup_test_template_loader(
{'zinnia/entry_archive_today.html': ''})
with self.assertNumQueries(5):
response = self.client.get('/today/')
self.assertTemplateUsed(response, 'zinnia/entry_archive_today.html')
self.assertEquals(response.context['day'].date(), date.today())
self.assertEquals(response.context['previous_month'], date(2010, 6, 1))
self.assertEquals(response.context['next_month'], None)
self.assertEquals(response.context['previous_day'], date(2010, 6, 1))
self.assertEquals(response.context['next_day'], None)
restore_template_loaders()
@override_settings(USE_TZ=True, TIME_ZONE='Europe/Paris')
def test_zinnia_entry_archive_today_with_timezone(self):
setup_test_template_loader(
{'zinnia/entry_archive_today.html': ''})
with self.assertNumQueries(5):
response = self.client.get('/today/')
self.assertTemplateUsed(response, 'zinnia/entry_archive_today.html')
self.assertEquals(response.context['day'].date(), timezone.localtime(
timezone.now()).date())
self.assertEquals(response.context['previous_month'], date(2010, 6, 1))
self.assertEquals(response.context['next_month'], None)
self.assertEquals(response.context['previous_day'], date(2010, 6, 1))
self.assertEquals(response.context['next_day'], None)
restore_template_loaders()
def test_zinnia_entry_shortlink(self):
with self.assertNumQueries(1):
response = self.client.get('/1/')
self.assertEquals(response.status_code, 301)
self.assertEquals(response['Location'],
'http://testserver/2010/01/01/test-1/')
def test_zinnia_entry_detail(self):
setup_test_template_loader(
{'zinnia/_entry_detail.html': '',
'404.html': ''})
entry = self.create_published_entry()
entry.sites.clear()
response = self.client.get('/2010/01/01/my-test-entry/')
self.assertEquals(response.status_code, 404)
entry.detail_template = '_entry_detail.html'
entry.save()
entry.sites.add(Site.objects.get_current())
with self.assertNumQueries(1):
response = self.client.get('/2010/01/01/my-test-entry/')
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/_entry_detail.html')
restore_template_loaders()
def test_zinnia_entry_detail_login(self):
setup_test_template_loader(
{'zinnia/entry_detail.html': '',
'zinnia/login.html': ''})
entry = self.create_published_entry()
entry.login_required = True
entry.save()
with self.assertNumQueries(4):
response = self.client.get('/2010/01/01/my-test-entry/')
self.assertTemplateUsed(response, 'zinnia/login.html')
response = self.client.post('/2010/01/01/my-test-entry/',
{'username': 'admin',
'password': 'password'})
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/entry_detail.html')
restore_template_loaders()
def test_zinnia_entry_detail_password(self):
setup_test_template_loader(
{'zinnia/entry_detail.html': '',
'zinnia/password.html': ''})
entry = self.create_published_entry()
entry.password = 'password'
entry.save()
with self.assertNumQueries(1):
response = self.client.get('/2010/01/01/my-test-entry/')
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEquals(response.context['error'], False)
with self.assertNumQueries(4):
response = self.client.post('/2010/01/01/my-test-entry/',
{'entry_password': 'bad_password'})
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEquals(response.context['error'], True)
with self.assertNumQueries(5):
response = self.client.post('/2010/01/01/my-test-entry/',
{'entry_password': 'password'})
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/entry_detail.html')
restore_template_loaders()
def test_zinnia_entry_detail_login_password(self):
user_logged_in.disconnect(update_last_login)
setup_test_template_loader(
{'zinnia/entry_detail.html': '',
'zinnia/login.html': '',
'zinnia/password.html': ''})
entry = self.create_published_entry()
entry.password = 'password'
entry.login_required = True
entry.save()
with self.assertNumQueries(4):
response = self.client.get('/2010/01/01/my-test-entry/')
self.assertTemplateUsed(response, 'zinnia/login.html')
with self.assertNumQueries(9):
response = self.client.post('/2010/01/01/my-test-entry/',
{'username': 'admin',
'password': 'password'})
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/password.html')
self.assertEquals(response.context['error'], False)
with self.assertNumQueries(6):
response = self.client.post('/2010/01/01/my-test-entry/',
{'entry_password': 'password'})
self.assertEquals(response.status_code, 200)
self.assertTemplateUsed(response, 'zinnia/entry_detail.html')
restore_template_loaders()
user_logged_in.connect(update_last_login)
def test_zinnia_entry_channel(self):
setup_test_template_loader(
{'zinnia/entry_list.html': ''})
self.check_publishing_context(
'/channel-test/', 2, 3, 'entry_list', 1)
restore_template_loaders()
def test_zinnia_category_list(self):
setup_test_template_loader(
{'zinnia/category_list.html': ''})
self.check_publishing_context(
'/categories/', 1,
friendly_context='category_list',
queries=0)
entry = Entry.objects.all()[0]
entry.categories.add(Category.objects.create(
title='New category', slug='new-category'))
self.check_publishing_context('/categories/', 2)
restore_template_loaders()
def test_zinnia_category_detail(self):
setup_test_template_loader(
{'zinnia/category/tests/entry_list.html': ''})
response = self.check_publishing_context(
'/categories/tests/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/category/tests/entry_list.html')
self.assertEquals(response.context['category'].slug, 'tests')
restore_template_loaders()
def test_zinnia_category_detail_paginated(self):
"""Test case reproducing issue #42 on category
detail view paginated"""
setup_test_template_loader(
{'zinnia/entry_list.html': ''})
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.categories.add(self.category)
response = self.client.get('/categories/tests/')
self.assertEquals(len(response.context['object_list']), PAGINATION)
response = self.client.get('/categories/tests/?page=2')
self.assertEquals(len(response.context['object_list']), 2)
response = self.client.get('/categories/tests/page/2/')
self.assertEquals(len(response.context['object_list']), 2)
self.assertEquals(response.context['category'].slug, 'tests')
restore_template_loaders()
def test_zinnia_author_list(self):
setup_test_template_loader(
{'zinnia/author_list.html': ''})
self.check_publishing_context(
'/authors/', 1,
friendly_context='author_list',
queries=0)
user = Author.objects.create(username='new-user',
email='new_user@example.com')
self.check_publishing_context('/authors/', 1)
entry = Entry.objects.all()[0]
entry.authors.add(user)
self.check_publishing_context('/authors/', 2)
restore_template_loaders()
def test_zinnia_author_detail(self):
setup_test_template_loader(
{'zinnia/author/admin/entry_list.html': ''})
response = self.check_publishing_context(
'/authors/admin/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/author/admin/entry_list.html')
self.assertEquals(response.context['author'].username, 'admin')
restore_template_loaders()
def test_zinnia_author_detail_paginated(self):
"""Test case reproducing issue #207 on author
detail view paginated"""
setup_test_template_loader(
{'zinnia/entry_list.html': ''})
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
entry.authors.add(self.author)
response = self.client.get('/authors/admin/')
self.assertEquals(len(response.context['object_list']), PAGINATION)
response = self.client.get('/authors/admin/?page=2')
self.assertEquals(len(response.context['object_list']), 2)
response = self.client.get('/authors/admin/page/2/')
self.assertEquals(len(response.context['object_list']), 2)
self.assertEquals(response.context['author'].username, 'admin')
restore_template_loaders()
def test_zinnia_tag_list(self):
setup_test_template_loader(
{'zinnia/tag_list.html': ''})
self.check_publishing_context(
'/tags/', 1,
friendly_context='tag_list',
queries=1)
entry = Entry.objects.all()[0]
entry.tags = 'tests, tag'
entry.save()
self.check_publishing_context('/tags/', 2)
restore_template_loaders()
def test_zinnia_tag_detail(self):
setup_test_template_loader(
{'zinnia/tag/tests/entry_list.html': '',
'404.html': ''})
response = self.check_publishing_context(
'/tags/tests/', 2, 3, 'entry_list', 2)
self.assertTemplateUsed(
response, 'zinnia/tag/tests/entry_list.html')
self.assertEquals(response.context['tag'].name, 'tests')
response = self.client.get('/tags/404/')
self.assertEquals(response.status_code, 404)
restore_template_loaders()
def test_zinnia_tag_detail_paginated(self):
setup_test_template_loader(
{'zinnia/entry_list.html': ''})
for i in range(PAGINATION):
params = {'title': 'My entry %i' % i,
'content': 'My content %i' % i,
'slug': 'my-entry-%i' % i,
'tags': 'tests',
'creation_date': datetime(2010, 1, 1),
'status': PUBLISHED}
entry = Entry.objects.create(**params)
entry.sites.add(self.site)
response = self.client.get('/tags/tests/')
self.assertEquals(len(response.context['object_list']), PAGINATION)
response = self.client.get('/tags/tests/?page=2')
self.assertEquals(len(response.context['object_list']), 2)
response = self.client.get('/tags/tests/page/2/')
self.assertEquals(len(response.context['object_list']), 2)
self.assertEquals(response.context['tag'].name, 'tests')
restore_template_loaders()
def test_zinnia_entry_search(self):
setup_test_template_loader(
{'zinnia/entry_search.html': ''})
self.check_publishing_context(
'/search/?pattern=test', 2, 3, 'entry_list', 1)
response = self.client.get('/search/?pattern=ab')
self.assertEquals(len(response.context['object_list']), 0)
self.assertEquals(response.context['error'],
_('The pattern is too short'))
response = self.client.get('/search/')
self.assertEquals(len(response.context['object_list']), 0)
self.assertEquals(response.context['error'],
_('No pattern to search found'))
restore_template_loaders()
def test_zinnia_entry_random(self):
setup_test_template_loader(
{'zinnia/entry_detail.html': ''})
response = self.client.get('/random/', follow=True)
self.assertTrue(response.redirect_chain[0][0].startswith(
'http://testserver/2010/'))
self.assertEquals(response.redirect_chain[0][1], 302)
restore_template_loaders()
def test_zinnia_sitemap(self):
setup_test_template_loader(
{'zinnia/sitemap.html': ''})
with self.assertNumQueries(0):
response = self.client.get('/sitemap/')
self.assertEquals(len(response.context['entries']), 2)
self.assertEquals(len(response.context['categories']), 1)
entry = self.create_published_entry()
entry.categories.add(Category.objects.create(title='New category',
slug='new-category'))
response = self.client.get('/sitemap/')
self.assertEquals(len(response.context['entries']), 3)
self.assertEquals(len(response.context['categories']), 2)
restore_template_loaders()
def test_zinnia_trackback(self):
setup_test_template_loader(
{'404.html': ''})
response = self.client.post('/trackback/404/')
self.assertEquals(response.status_code, 404)
restore_template_loaders()
self.assertEquals(
self.client.post('/trackback/1/').status_code, 301)
self.assertEquals(
self.client.get('/trackback/1/').status_code, 301)
entry = Entry.objects.get(slug='test-1')
self.assertEquals(entry.trackback_count, 0)
entry.trackback_enabled = False
entry.save()
self.assertEquals(
self.client.post('/trackback/1/',
{'url': 'http://example.com'}).content,
'<?xml version="1.0" encoding="utf-8"?>\n<response>\n \n '
'<error>1</error>\n <message>Trackback is not enabled for '
'Test 1</message>\n \n</response>\n')
entry.trackback_enabled = True
entry.save()
connect_discussion_signals()
get_user_flagger() # Memoize user flagger for stable query number
if comments.get_comment_app_name() == comments.DEFAULT_COMMENTS_APP:
# If we are not using the default comment app,
# we can count the database queries executed.
with self.assertNumQueries(6):
self.assertEquals(
self.client.post('/trackback/1/',
{'url': 'http://example.com'}).content,
'<?xml version="1.0" encoding="utf-8"?>\n<response>\n \n'
' <error>0</error>\n \n</response>\n')
else:
self.assertEquals(
self.client.post('/trackback/1/',
{'url': 'http://example.com'}).content,
'<?xml version="1.0" encoding="utf-8"?>\n<response>\n \n '
'<error>0</error>\n \n</response>\n')
disconnect_discussion_signals()
entry = Entry.objects.get(pk=entry.pk)
self.assertEquals(entry.trackback_count, 1)
self.assertEquals(
self.client.post('/trackback/1/',
{'url': 'http://example.com'}).content,
'<?xml version="1.0" encoding="utf-8"?>\n<response>\n \n '
'<error>1</error>\n <message>Trackback is already registered'
'</message>\n \n</response>\n')
def test_zinnia_trackback_on_entry_without_author(self):
entry = Entry.objects.get(slug='test-1')
entry.authors.clear()
self.assertEquals(
self.client.post('/trackback/1/',
{'url': 'http://example.com'}).content,
'<?xml version="1.0" encoding="utf-8"?>\n<response>\n \n '
'<error>0</error>\n \n</response>\n')
def test_capabilities(self):
self.check_capabilities('/humans.txt', 'text/plain', 0)
self.check_capabilities('/rsd.xml', 'application/rsd+xml', 0)
self.check_capabilities('/wlwmanifest.xml',
'application/wlwmanifest+xml', 0)
self.check_capabilities('/opensearch.xml',
'application/opensearchdescription+xml', 1)
def test_comment_success(self):
setup_test_template_loader(
{'comments/zinnia/entry/posted.html': '',
'zinnia/entry_list.html': ''})
with self.assertNumQueries(0):
response = self.client.get('/comments/success/')
self.assertTemplateUsed(response, 'comments/zinnia/entry/posted.html')
self.assertEquals(response.context['comment'], None)
with self.assertNumQueries(1):
response = self.client.get('/comments/success/?c=42')
self.assertEquals(response.context['comment'], None)
comment = comments.get_model().objects.create(
submit_date=timezone.now(),
comment='My Comment 1', content_object=self.category,
site=self.site, is_public=False)
with self.assertNumQueries(1):
response = self.client.get('/comments/success/?c=1')
self.assertEquals(response.context['comment'], comment)
comment.is_public = True
comment.save()
with self.assertNumQueries(5):
response = self.client.get('/comments/success/?c=1', follow=True)
self.assertEquals(
response.redirect_chain[1],
('http://example.com/categories/tests/', 302))
restore_template_loaders()
class ZinniaCustomDetailViews(ViewsBaseCase):
"""
Tests with an alternate urls.py that modifies how author_detail,
tags_detail and categories_detail views to be called with a custom
template_name keyword argument and an extra_context.
"""
urls = 'zinnia.tests.custom_views_detail_urls'
def setUp(self):
"""We don't need to generate the full template
to make the tests working"""
super(ZinniaCustomDetailViews, self).setUp()
setup_test_template_loader(
{'zinnia/entry_search.html': ''})
def tearDown(self):
restore_template_loaders()
def test_custom_category_detail(self):
response = self.check_publishing_context('/categories/tests/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_search.html')
self.assertEquals(response.context['category'].slug, 'tests')
self.assertEquals(response.context['extra'], 'context')
def test_custom_author_detail(self):
response = self.check_publishing_context('/authors/admin/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_search.html')
self.assertEquals(response.context['author'].username, 'admin')
self.assertEquals(response.context['extra'], 'context')
def test_custom_tag_detail(self):
response = self.check_publishing_context('/tags/tests/', 2, 3)
self.assertTemplateUsed(response, 'zinnia/entry_search.html')
self.assertEquals(response.context['tag'].name, 'tests')
self.assertEquals(response.context['extra'], 'context')
|
/*
* Copyright (c) 2018, Alliance for Open Media. All rights reserved
*
* This source code is subject to the terms of the BSD 2 Clause License and
* the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
* was not distributed with this source code in the LICENSE file, you can
* obtain it at www.aomedia.org/license/software. If the Alliance for Open
* Media Patent License 1.0 was not distributed with this source code in the
* PATENTS file, you can obtain it at www.aomedia.org/license/patent.
*/
#include <stdint.h>
#include <smmintrin.h>
// Byte-boundary alignment issues
#define ALIGN_SIZE 8
#define ALIGN_MASK (ALIGN_SIZE - 1)
#define CALC_CRC(op, crc, type, buf, len) \
while ((len) >= sizeof(type)) { \
(crc) = op((crc), *(type *)(buf)); \
(len) -= sizeof(type); \
buf += sizeof(type); \
}
/**
* Calculates 32-bit CRC for the input buffer
* polynomial is 0x11EDC6F41
* @return A 32-bit unsigned integer representing the CRC
*/
uint32_t av1_get_crc32c_value_sse4_2(void *crc_calculator, uint8_t *p,
size_t len) {
(void)crc_calculator;
const uint8_t *buf = p;
uint32_t crc = 0xFFFFFFFF;
// Align the input to the word boundary
for (; (len > 0) && ((intptr_t)buf & ALIGN_MASK); len--, buf++) {
crc = _mm_crc32_u8(crc, *buf);
}
#ifdef __x86_64__
uint64_t crc64 = crc;
CALC_CRC(_mm_crc32_u64, crc64, uint64_t, buf, len);
crc = (uint32_t)crc64;
#endif
CALC_CRC(_mm_crc32_u32, crc, uint32_t, buf, len);
CALC_CRC(_mm_crc32_u16, crc, uint16_t, buf, len);
CALC_CRC(_mm_crc32_u8, crc, uint8_t, buf, len);
return (crc ^= 0xFFFFFFFF);
}
|
import linked_list_ADT as N
def split_chain(node_chain):
"""
Purpose:
Splits the given node chain in half, returning the second half.
If the given chain has an odd length, the extra node is part of
the second half of the chain.
Pre-conditions:
:param node_chain: a node-chain, possibly empty
Post-conditions:
the original node chain is cut in half!
Return:
:return: A tuple (nc1, nc2) where nc1 and nc2 are node-chains
each containing about half of the nodes in node-chain
"""
# Returning the node chain if it is empty.
if node_chain is None:
return None, None
elif node_chain.get_next() is None:
return None, node_chain
else:
# the chain is 2 nodes long or longer
# technique: Two walkers, one hops twice as fast
# when the fast walker gets to the end,
# the slow one is half-way down
walker = node_chain
half_speed = node_chain
prev = None
while walker is not None:
walker = walker.get_next()
# hop a second time, but carefully!
if walker is not None:
walker = walker.get_next()
prev = half_speed
half_speed = half_speed.get_next()
# split the chain right in front of the slow walker
prev.set_next(None)
return node_chain, half_speed
def merge(nc1, nc2):
"""
Purpose:
Combine the two sorted node-chains nc1 and nc2 into a single
sorted node-chain.
Pre-conditions:
:param nc1: a node-chain, possibly empty,
containing values sorted in ascending order.
:param nc2: a node-chain, possibly empty,
containing values sorted in ascending order.
Post-condition:
None
Return:
:return: a sorted node chain (nc) that contains the
values from nc1 and nc2. If both node-chains are
empty an empty node-chain will be returned.
"""
# first, check for empty node-chains
if nc1 is None:
# could copy the other node-chain, but why bother?
return nc2
elif nc2 is None:
return nc1
# neither is None, so look at the data value to see which goes first
elif nc1.get_data() < nc2.get_data():
result = N.node(nc1.get_data())
nc1 = nc1.get_next()
else:
result = N.node(nc2.get_data())
nc2 = nc2.get_next()
# result refers to the first node in the merged node chain
# need a walker to make the appropriate connections
rwalker = result
while nc1 is not None and nc2 is not None:
# look for the smaller of the two data values
# advance only the one with the smaller value
if nc1.get_data() < nc2.get_data():
new_data = nc1.get_data()
nc1 = nc1.get_next()
else:
new_data = nc2.get_data()
nc2 = nc2.get_next()
# create a new node, and advance the walker
rwalker.set_next(N.node(new_data))
rwalker = rwalker.get_next()
# here, we've reached the end of one or both of the original chains
if nc1 is None:
# could copy the other node-chain, but why bother?
rwalker.set_next(nc2)
else:
# could copy the other node-chain, but why bother?
rwalker.set_next(nc1)
# finally, return the result
return result
def merge_sort(node_chain):
"""
Purpose:
Sorts the given node chain in ascending order using the
merge sort algorithm.
Pre-conditions:
:param node_chain: a node-chain, possibly empty,
containing only numbers
Post-condition:
the original node_chain may be modified and will likely
not contain all the original elements
Return
:return: a new node-chain with the same values as the
original node chain sorted in ascending order.
Ex: 45->1->21->5. Becomes 1->5->21->45
"""
# Base case.
if node_chain is None or node_chain.get_next() is None:
return node_chain
# Divide.
nc1, nc2 = split_chain(node_chain)
# Recursively sort.
nc1 = merge_sort(nc1)
nc2 = merge_sort(nc2)
# Combine.
node_chain = merge(nc1, nc2)
# Return the sorted chain.
return node_chain
|
// ---------------------- Requires, Includes and Globals ------------------------
var project = 'webmedia_2017';
var qtd_target = 100;
var activeTask = 4;
var kind = 'player';//Tasks 1, 2 and 3: 'job' ; Task 4: 'player'
var group = false;//Tasks 1, 3, 4: false; Task 2: true;
if(kind == 'job'){
var aggregation_method = require('./aggregation/'+project+'/task_'+activeTask+'.js');
}
var Functions = require('./utils/functions.js');
Functions = new Functions.functions();
var host = 'localhost';
var http = require('http');
var https = require('https');
var path = require('path');
var fs = require('fs');
var express = require('express');
var app = express();
app.set('view engine', 'ejs');
app.use(express.static(__dirname+'/'));
var bodyParser = require('body-parser')
app.use(bodyParser.json({limit: '50mb'}));
app.use(bodyParser.urlencoded({limit: '50mb'}));
var cors = require('cors');
app.use(cors({origin: 'null'}));
app.use(cors({origin: 'https://youtube.com'}));
app.use(cors({origin: 'https://ttv.microworkers.com'}));
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
//---------------- Database - MongoDb ---------
var mongoose = require('mongoose');
mongoose.Promise = global.Promise;
mongoose.connect('mongodb://localhost/crowdnote_'+project);
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('openUri', function() {});
var ItemSchema, Item;
var Schema = mongoose.Schema;
var ObjectId = Schema.ObjectId;
var Timestamp = Schema.Timestamp;
itemSchema = Schema({
// Global
item_id: String,
uri: String,
start: String,
end: String,
instant: String,
// at Runtime
item_index: String,
// Tasks 1, 2 and 3
point: String,
type: String,
image: String,
// Tasks 2 and 3
content: String,
content_type: String,
// Task 3
x: String,
y: String,
// User Identification for contributions
job_id: String,
fingerprint: String
});
Input = mongoose.model('items_'+activeTask, itemSchema);
Output = mongoose.model('contributions_'+activeTask, itemSchema);
Aggregation = mongoose.model('items_'+(activeTask+1), itemSchema);
var input = new Array();
var curInput = 0;
init();
// --------------------- Init Functions -----------------------------
function init(){
Input.find({},function (err, V) {
if (err) return console.error(err);
if(group){
V = groupInput(V);
}
for(var i=0; i < V.length; i++){
input[i] = V[i];
input[i].qtd = 0;
}
}).sort({'_id' : 1});
}
function groupInput(items){
var groups = new Array();
var indexes = new Array();
for(var i=0; i < items.length; i++){
if(!groups[items[i].item_id]){
groups[items[i].item_id] = new Array();
indexes.push(items[i].item_id);
}
groups[items[i].item_id].push(items[i]);
}
V = new Array();
for(var i=0; i < indexes.length; i++){
V.push(groups[indexes[i]]);
}
return V;
}
//----------------------- Endpoints -------------------------------
app.get('/', function(req, res) {
res.render('ejs/'+project+'/task_'+activeTask, null);
});
app.get('/thanks', function(req, res) {
//res.render('ejs/'+project+'/task_'+activeTask, null);
res.render('ejs/thanks', null);
});
app.get('/wiki_image', function(req, res) {
var url = req.query.url;
var parts = url.split('/');
var id =parts[parts.length -1];
const request = require('request');
request('https://en.wikipedia.org/w/api.php?action=query&titles='+id+'&prop=pageimages&pithumbsize=600&format=json', { json: true }, (err, res2, body) => {
if (err) { return console.log(err); }
for(var k in body.query.pages) {
if(body.query.pages[k].thumbnail != null){
var path = body.query.pages[k].thumbnail.source;
request.get(path, function (error, response, body) {
if (!error && response.statusCode == 200) {
data = "data:" + response.headers["content-type"] + ";base64," + new Buffer(body).toString('base64');
res.end(data);
}
});
}else{
res.send('');
}
break;
}
});
});
app.get('/player', function(req, res) {
var contents = new Array()
job_id = Functions.fingerprint(req,true);
print = Functions.fingerprint(req,false);
for(var i=0; i < input.length; i++){
var obj = input[i];
obj.job_id = job_id;
obj.fingerprint = print;
contents.push(obj);
}
res.json(contents);
});
app.get('/job', function(req, res) {
if(input.length < 1){
res.render('ejs/thanks', null);
}
if(!input[curInput]){
curInput--;
}
var contribs = new Array()
job_id = Functions.fingerprint(req,true);
print = Functions.fingerprint(req,false);
if(group){
var inp = input[curInput];
if(curInput < input.length-1){
curInput++;
}else{
curInput = 0;
}
var qtd = inp.qtd;
delete inp.qtd;
var more = new Object();
more.item_index = curInput;
more.qtd = qtd;
more.job_id = job_id;
more.fingerprint = print;
var obj = new Object();
obj.data = inp;
obj.info = more;
}else{
var obj = input[curInput];
obj.item_index = curInput;
if(curInput < input.length-1){
curInput++;
}else{
curInput = 0;
}
obj.job_id = job_id;
obj.fingerprint = print;
}
res.json(obj);
});
app.post('/store', function(req, res) {
var data = req.body;
input[data.item_index].qtd++;
if(input[data.item_index].qtd >= qtd_target){
input.splice( data.item_index, 1 );
}
delete data.item_index;
var c = new Output(data);
c.save(function (err, m0) {if (err) return console.error(err);});
res.end();
});
app.get('/aggregate', function(req, res) {
var agg = new aggregation_method.aggregation(req, res, Output, Aggregation, Functions);
agg.aggregate();
});
// ------------- Server Functions ------------------------------
app.get('/tools', function(req, res) {
var task = req.query.task;
var path = 'views/ejs/task_'+task+'.ejs';
fs.readFile(path, function (err, data){
res.setHeader('content-type', 'text/javascript');
res.end(data);
});
});
app.get('/include', function(req, res) {
var name = req.query.name;
var mime = req.query.mime;
var path = 'views/'+mime+'/'+name+'.'+mime;
fs.readFile(path, function (err, data){
res.setHeader('content-type', 'text/javascript');
res.end(data);
});
});
app.get('/images', function(req, res) {
var name = req.query.name;
var mime = req.query.mime;
var path = 'views/img/'+name+'.'+mime;
fs.readFile(path, function (err, data){
res.writeHead(200, {"Content-Type":"video/"+mime});
res.end(data);
});
});
// ------------- Create Server ------------------------------
http.createServer(app).listen(84);
//server.listen(process.env.PORT || 83, process.env.IP || "0.0.0.0", function(){
// var addr = server.address();
//});
|
import { Tracker } from 'meteor/tracker';
import { Session } from 'meteor/session';
import { Template } from 'meteor/templating';
Template.chatRoomItem.helpers({
roomData() {
let { name } = this;
if (this.fname) {
const realNameForDirectMessages = this.t === 'd' && RocketChat.settings.get('UI_Use_Real_Name');
const realNameForChannel = this.t !== 'd' && RocketChat.settings.get('UI_Allow_room_names_with_special_chars');
if (realNameForDirectMessages || realNameForChannel) {
name = this.fname;
}
}
const openedRoom = Tracker.nonreactive(() => Session.get('openedRoom'));
const unread = this.unread > 0 ? this.unread : false;
// if (this.unread > 0 && (!hasFocus || openedRoom !== this.rid)) {
// unread = this.unread;
// }
const active = [this.rid, this._id].includes((id) => id === openedRoom);
const archivedClass = this.archived ? 'archived' : false;
this.alert = !this.hideUnreadStatus && this.alert; // && (!hasFocus || FlowRouter.getParam('_id') !== this.rid);
const icon = RocketChat.roomTypes.getIcon(this.t);
const avatar = !icon;
const roomData = {
...this,
icon,
avatar,
username : this.name,
route: RocketChat.roomTypes.getRouteLink(this.t, this),
name: name || RocketChat.roomTypes.getRoomName(this.t, this),
unread,
active,
archivedClass,
status: this.t === 'd' || this.t === 'l',
};
roomData.username = roomData.username || roomData.name;
if (!this.lastMessage && RocketChat.settings.get('Store_Last_Message')) {
const room = RocketChat.models.Rooms.findOne(this.rid || this._id, { fields: { lastMessage: 1 } });
roomData.lastMessage = (room && room.lastMessage) || { msg: t('No_messages_yet') };
}
return roomData;
},
});
RocketChat.callbacks.add('enter-room', (sub) => {
const items = $('.rooms-list .sidebar-item');
items.filter('.sidebar-item--active').removeClass('sidebar-item--active');
if (sub) {
items.filter(`[data-id=${ sub._id }]`).addClass('sidebar-item--active');
}
return sub;
});
|
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.UsersController = void 0;
const router_1 = require("../../router");
const http_response_1 = require("../../router/http-response");
const views_create_user_1 = require("./views/views.create-user");
const states_post_user_1 = require("./states/states.post-user");
const states_get_single_user_1 = require("./states/states.get-single-user");
const views_user_1 = require("./views/views.user");
let UsersController = class UsersController {
postUser(req, res) {
return new states_post_user_1.PostUser().configure(req, res);
}
getSingleUser(req, res) {
return new states_get_single_user_1.GetSingleUser().configure(req, res);
}
};
__decorate([
router_1.Post({
consumes: 'application/vnd.user+json',
schema: {
body: views_create_user_1.createUserView,
},
}),
__metadata("design:type", Function),
__metadata("design:paramtypes", [Object, http_response_1.HttpResponse]),
__metadata("design:returntype", Object)
], UsersController.prototype, "postUser", null);
__decorate([
router_1.Get({
produces: 'application/vnd.user+json',
viewConverter: views_user_1.userViewConverter,
path: '/:id',
}),
__metadata("design:type", Function),
__metadata("design:paramtypes", [Object, http_response_1.HttpResponse]),
__metadata("design:returntype", Object)
], UsersController.prototype, "getSingleUser", null);
UsersController = __decorate([
router_1.Controller('/users')
], UsersController);
exports.UsersController = UsersController;
//# sourceMappingURL=users.controller.js.map
|
'use strict';
let totalClicks = 0;
let clicksAllowed = 10;
let allProducts = [];
let myContainer = document.querySelector('section');
let imageOne = document.querySelector('section img:first-child');
let imageTwo = document.querySelector('section img:nth-child(2)');
let imageThree = document.querySelector('section img:nth-child(3)');
let picArray = [imageOne, imageTwo, imageThree];
let uniqueArray = [];
let ctx = document.getElementById('productsChart').getContext('2d');
function Products(name, fileExtensions = 'jpg') {
this.name = name;
this.src = `img/${name}.${fileExtensions}`;
this.views = 0;
this.clicked = 0;
allProducts.push(this);
}
let getData = localStorage.getItem('products');
if (getData) {
let parsedProducts = JSON.parse(getData);
allProducts = parsedProducts;
} else {
new Products('bag');
new Products('banana');
new Products('bathroom');
new Products('boots');
new Products('breakfast');
new Products('bubblegum');
new Products('chair');
new Products('cthulhu');
new Products('dog-duck');
new Products('dragon');
new Products('pen');
new Products('pet-sweep');
new Products('scissors');
new Products('shark');
new Products('sweep', 'png');
new Products('tauntaun');
new Products('unicorn');
new Products('usb', 'gif');
new Products('water-can');
new Products('wine-glass');
}
function getRandomIndex() {
return Math.floor(Math.random() * allProducts.length);
}
function getUniqueIndex() {
while (uniqueArray.length < 6) {
var any = getRandomIndex();
while (!uniqueArray.includes(any)) {
uniqueArray.push(any);
}
}
}
function renderProducts() {
getUniqueIndex();
for (var i = 0; i < picArray.length; i++) {
var insert = uniqueArray.shift();
picArray[i].src = allProducts[insert].src;
picArray[i].title = allProducts[insert].name;
allProducts[insert].views++;
}
}
function handleClick(event) {
if (event.target === myContainer) {
alert('Must click image');
}
totalClicks++;
let getClicked = event.target.title;
for (let i = 0; i < allProducts.length; i++) {
if (getClicked === allProducts[i].name) {
allProducts[i].clicked++;
}
}
renderProducts();
if (totalClicks === clicksAllowed) {
myContainer.removeEventListener('click', handleClick);
renderChart();
let stringifiedProducts = JSON.stringify(allProducts);
localStorage.setItem('products' , stringifiedProducts);
}
}
renderProducts();
function renderChart() {
let productClicks = [];
let productNames = [];
let productViews = [];
for (let i = 0; i < allProducts.length; i++) {
productClicks.push(allProducts[i].clicked);
productNames.push(allProducts[i].name);
productViews.push(allProducts[i].views);
console.log(productNames);
}
let productsChart = new Chart(ctx, {
type: 'bar',
data: {
labels: productNames,
datasets: [{
label: 'Clicks',
data: productClicks,
backgroundColor: 'rgba(107, 159, 255, 1)',
borderColor: 'rgba(5, 93, 255, 1)',
borderWidth: 5
},
{
label: 'Views',
data: productViews,
backgroundColor: 'rgba(37, 223, 17, 0.57)',
borderColor: 'rgba(47, 165, 34, 1)',
borderWidth: 5
}]
},
responsive: false,
options: {
scales: {
yAxes: [{
ticks: {
beginAtZero: true
}
}]
}
}
});
}
myContainer.addEventListener('click', handleClick);
|
(window["webpackJsonp"]=window["webpackJsonp"]||[]).push([["chunk-2d0d0645"],{"687d":function(e,t,n){"use strict";n.r(t),n.d(t,"conf",function(){return a}),n.d(t,"language",function(){return o});var a={comments:{blockComment:["\x3c!--","--\x3e"]},brackets:[["<",">"]],autoClosingPairs:[{open:"<",close:">"},{open:"'",close:"'"},{open:'"',close:'"'}],surroundingPairs:[{open:"<",close:">"},{open:"'",close:"'"},{open:'"',close:'"'}]},o={defaultToken:"",tokenPostfix:".xml",ignoreCase:!0,qualifiedName:/(?:[\w\.\-]+:)?[\w\.\-]+/,tokenizer:{root:[[/[^<&]+/,""],{include:"@whitespace"},[/(<)(@qualifiedName)/,[{token:"delimiter"},{token:"tag",next:"@tag"}]],[/(<\/)(@qualifiedName)(\s*)(>)/,[{token:"delimiter"},{token:"tag"},"",{token:"delimiter"}]],[/(<\?)(@qualifiedName)/,[{token:"delimiter"},{token:"metatag",next:"@tag"}]],[/(<\!)(@qualifiedName)/,[{token:"delimiter"},{token:"metatag",next:"@tag"}]],[/<\!\[CDATA\[/,{token:"delimiter.cdata",next:"@cdata"}],[/&\w+;/,"string.escape"]],cdata:[[/[^\]]+/,""],[/\]\]>/,{token:"delimiter.cdata",next:"@pop"}],[/\]/,""]],tag:[[/[ \t\r\n]+/,""],[/(@qualifiedName)(\s*=\s*)("[^"]*"|'[^']*')/,["attribute.name","","attribute.value"]],[/(@qualifiedName)(\s*=\s*)("[^">?\/]*|'[^'>?\/]*)(?=[\?\/]\>)/,["attribute.name","","attribute.value"]],[/(@qualifiedName)(\s*=\s*)("[^">]*|'[^'>]*)/,["attribute.name","","attribute.value"]],[/@qualifiedName/,"attribute.name"],[/\?>/,{token:"delimiter",next:"@pop"}],[/(\/)(>)/,[{token:"tag"},{token:"delimiter",next:"@pop"}]],[/>/,{token:"delimiter",next:"@pop"}]],whitespace:[[/[ \t\r\n]+/,""],[/<!--/,{token:"comment",next:"@comment"}]],comment:[[/[^<\-]+/,"comment.content"],[/-->/,{token:"comment",next:"@pop"}],[/<!--/,"comment.content.invalid"],[/[<\-]/,"comment.content"]]}}}}]);
//# sourceMappingURL=chunk-2d0d0645.ca0ac47b.js.map
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Loss operations for use in neural networks.
Note: All the losses are added to the `GraphKeys.LOSSES` collection.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.framework.python.ops import add_arg_scope
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
from tensorflow.python.util.deprecation import deprecated
from tensorflow.python.util.deprecation import deprecated_args
from tensorflow.python.util.deprecation import deprecated_argument_lookup
__all__ = [
"absolute_difference", "add_loss", "cosine_distance",
"compute_weighted_loss", "get_losses", "get_regularization_losses",
"get_total_loss", "hinge_loss", "log_loss", "mean_pairwise_squared_error",
"mean_squared_error", "sigmoid_cross_entropy", "softmax_cross_entropy",
"sparse_softmax_cross_entropy"
]
def _scale_losses(losses, weights):
"""Computes the scaled loss.
Args:
losses: A `Tensor` of size [batch_size, d1, ... dN].
weights: A `Tensor` of size [1], [batch_size] or [batch_size, d1, ... dN].
The `losses` are reduced (tf.reduce_sum) until its dimension matches
that of `weights` at which point the reduced `losses` are element-wise
multiplied by `weights` and a final reduce_sum is computed on the result.
Conceptually, this operation is equivalent to broadcasting (tiling)
`weights` to be the same size as `losses`, performing an element-wise
multiplication, and summing the result.
Returns:
A scalar tf.float32 `Tensor` whose value represents the sum of the scaled
`losses`.
"""
# First, compute the sum of the losses over all elements:
start_index = max(0, weights.get_shape().ndims)
axis = list(range(start_index, losses.get_shape().ndims))
reduced_losses = math_ops.reduce_sum(losses, axis=axis)
reduced_losses = math_ops.multiply(reduced_losses, weights)
return math_ops.reduce_sum(reduced_losses)
def _safe_mean(losses, num_present):
"""Computes a safe mean of the losses.
Args:
losses: A tensor whose elements contain individual loss measurements.
num_present: The number of measurable losses in the tensor.
Returns:
A scalar representing the mean of the losses. If `num_present` is zero,
then zero is returned.
"""
total_loss = math_ops.reduce_sum(losses)
return math_ops.div_no_nan(total_loss, num_present, name="value")
@deprecated("2016-12-30", "Use tf.losses.compute_weighted_loss instead.")
def compute_weighted_loss(losses, weights=1.0, scope=None):
"""Computes the weighted loss.
Args:
losses: A tensor of size [batch_size, d1, ... dN].
weights: A tensor of size [1] or [batch_size, d1, ... dK] where K < N.
scope: the scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` that returns the weighted loss.
Raises:
ValueError: If `weights` is `None` or the shape is not compatible with
`losses`, or if the number of dimensions (rank) of either `losses` or
`weights` is missing.
"""
with ops.name_scope(scope, "weighted_loss", [losses, weights]):
losses = ops.convert_to_tensor(losses)
input_dtype = losses.dtype
losses = math_ops.cast(losses, dtypes.float32)
weights = math_ops.cast(ops.convert_to_tensor(weights), dtypes.float32)
if losses.get_shape().ndims is None:
raise ValueError("losses.get_shape().ndims cannot be None")
weights_shape = weights.get_shape()
if weights_shape.ndims is None:
raise ValueError("weights.get_shape().ndims cannot be None")
if weights_shape.ndims > 1 and weights_shape.dims[-1].is_compatible_with(1):
weights = array_ops.squeeze(weights, [-1])
total_loss = _scale_losses(losses, weights)
num_present = _num_present(losses, weights)
mean_loss = _safe_mean(total_loss, num_present)
# convert the result back to the input type
mean_loss = math_ops.cast(mean_loss, input_dtype)
add_loss(mean_loss)
return mean_loss
def _num_present(losses, weights, per_batch=False):
"""Computes the number of elements in the loss function induced by `weights`.
A given weights tensor induces different numbers of usable elements in the
`losses` tensor. The `weights` tensor is broadcast across `losses` for all
possible dimensions. For example, if `losses` is a tensor of dimension
[4, 5, 6, 3] and `weights` is a tensor of size [4, 5], then `weights` is, in
effect, tiled to match the size of `losses`. Following this effective tile,
the total number of present elements is the number of non-zero weights.
Args:
losses: A tensor of size [batch_size, d1, ... dN].
weights: A tensor of size [1] or [batch_size, d1, ... dK] where K < N.
per_batch: Whether to return the number of elements per batch or as a sum
total.
Returns:
The number of present (non-zero) elements in the losses tensor. If
`per_batch` is True, the value is returned as a tensor of size
[batch_size]. Otherwise, a single scalar tensor is returned.
"""
# If weights is a scalar, its easy to compute:
if weights.get_shape().ndims == 0:
batch_size = array_ops.reshape(
array_ops.slice(array_ops.shape(losses), [0], [1]), [])
num_per_batch = math_ops.div(
math_ops.cast(array_ops.size(losses), dtypes.float32),
math_ops.cast(batch_size, dtypes.float32))
num_per_batch = array_ops.where(
math_ops.equal(weights, 0), 0.0, num_per_batch)
num_per_batch = math_ops.multiply(
array_ops.ones(array_ops.reshape(batch_size, [1])), num_per_batch)
return num_per_batch if per_batch else math_ops.reduce_sum(num_per_batch)
# First, count the number of nonzero weights:
if weights.get_shape().ndims >= 1:
axis = list(range(1, weights.get_shape().ndims))
num_nonzero_per_batch = math_ops.reduce_sum(
math_ops.cast(math_ops.not_equal(weights, 0), dtypes.float32),
axis=axis)
# Next, determine the number of elements that weights would broadcast to:
broadcast_dims = array_ops.slice(
array_ops.shape(losses), [weights.get_shape().ndims], [-1])
num_to_broadcast = math_ops.cast(math_ops.reduce_prod(broadcast_dims),
dtypes.float32)
num_per_batch = math_ops.multiply(num_nonzero_per_batch, num_to_broadcast)
return num_per_batch if per_batch else math_ops.reduce_sum(num_per_batch)
@deprecated("2016-12-30", "Use tf.losses.add_loss instead.")
@add_arg_scope
def add_loss(loss, loss_collection=ops.GraphKeys.LOSSES):
"""Adds a externally defined loss to the collection of losses.
Args:
loss: A loss `Tensor`.
loss_collection: Optional collection to add the loss to.
"""
if loss_collection:
ops.add_to_collection(loss_collection, loss)
@deprecated("2016-12-30", "Use tf.losses.get_losses instead.")
def get_losses(scope=None, loss_collection=ops.GraphKeys.LOSSES):
"""Gets the list of losses from the loss_collection.
Args:
scope: an optional scope for filtering the losses to return.
loss_collection: Optional losses collection.
Returns:
a list of loss tensors.
"""
return ops.get_collection(loss_collection, scope)
@deprecated("2016-12-30", "Use tf.losses.get_regularization_losses instead.")
def get_regularization_losses(scope=None):
"""Gets the regularization losses.
Args:
scope: an optional scope for filtering the losses to return.
Returns:
A list of regularization losses as Tensors.
"""
return ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES, scope)
@deprecated("2016-12-30", "Use tf.losses.get_total_loss instead.")
def get_total_loss(add_regularization_losses=True, name="total_loss"):
"""Returns a tensor whose value represents the total loss.
Notice that the function adds the given losses to the regularization losses.
Args:
add_regularization_losses: A boolean indicating whether or not to use the
regularization losses in the sum.
name: The name of the returned tensor.
Returns:
A `Tensor` whose value represents the total loss.
Raises:
ValueError: if `losses` is not iterable.
"""
losses = get_losses()
if add_regularization_losses:
losses += get_regularization_losses()
return math_ops.add_n(losses, name=name)
@deprecated("2016-12-30", "Use tf.losses.absolute_difference instead.")
def absolute_difference(predictions, labels=None, weights=1.0, scope=None):
"""Adds an Absolute Difference loss to the training procedure.
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector. If the shape of
`weights` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weights`.
Args:
predictions: The predicted outputs.
labels: The ground truth output tensor, same dimensions as 'predictions'.
weights: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid.
"""
with ops.name_scope(scope, "absolute_difference",
[predictions, labels, weights]) as scope:
predictions.get_shape().assert_is_compatible_with(labels.get_shape())
predictions = math_ops.cast(predictions, dtypes.float32)
labels = math_ops.cast(labels, dtypes.float32)
losses = math_ops.abs(math_ops.subtract(predictions, labels))
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.sigmoid_cross_entropy instead. Note that the order "
"of the predictions and labels arguments has been changed.")
def sigmoid_cross_entropy(logits,
multi_class_labels,
weights=1.0,
label_smoothing=0,
scope=None):
"""Creates a cross-entropy loss using tf.nn.sigmoid_cross_entropy_with_logits.
`weights` acts as a coefficient for the loss. If a scalar is provided,
then the loss is simply scaled by the given value. If `weights` is a
tensor of size [`batch_size`], then the loss weights apply to each
corresponding sample.
If `label_smoothing` is nonzero, smooth the labels towards 1/2:
new_multiclass_labels = multiclass_labels * (1 - label_smoothing)
+ 0.5 * label_smoothing
Args:
logits: [batch_size, num_classes] logits outputs of the network .
multi_class_labels: [batch_size, num_classes] labels in (0, 1).
weights: Coefficients for the loss. The tensor must be a scalar, a tensor of
shape [batch_size] or shape [batch_size, num_classes].
label_smoothing: If greater than 0 then smooth the labels.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `logits` doesn't match that of
`multi_class_labels` or if the shape of `weights` is invalid, or if
`weights` is None.
"""
with ops.name_scope(scope, "sigmoid_cross_entropy_loss",
[logits, multi_class_labels, weights]) as scope:
logits.get_shape().assert_is_compatible_with(multi_class_labels.get_shape())
multi_class_labels = math_ops.cast(multi_class_labels, logits.dtype)
if label_smoothing > 0:
multi_class_labels = (
multi_class_labels * (1 - label_smoothing) + 0.5 * label_smoothing)
losses = nn.sigmoid_cross_entropy_with_logits(
labels=multi_class_labels, logits=logits, name="xentropy")
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.softmax_cross_entropy instead. Note that the order "
"of the logits and labels arguments has been changed.")
def softmax_cross_entropy(logits,
onehot_labels,
weights=1.0,
label_smoothing=0,
scope=None):
"""Creates a cross-entropy loss using tf.nn.softmax_cross_entropy_with_logits.
`weights` acts as a coefficient for the loss. If a scalar is provided,
then the loss is simply scaled by the given value. If `weights` is a
tensor of size [`batch_size`], then the loss weights apply to each
corresponding sample.
If `label_smoothing` is nonzero, smooth the labels towards 1/num_classes:
new_onehot_labels = onehot_labels * (1 - label_smoothing)
+ label_smoothing / num_classes
Args:
logits: [batch_size, num_classes] logits outputs of the network .
onehot_labels: [batch_size, num_classes] one-hot-encoded labels.
weights: Coefficients for the loss. The tensor must be a scalar or a tensor
of shape [batch_size].
label_smoothing: If greater than 0 then smooth the labels.
scope: the scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the mean loss value.
Raises:
ValueError: If the shape of `logits` doesn't match that of `onehot_labels`
or if the shape of `weights` is invalid or if `weights` is None.
"""
with ops.name_scope(scope, "softmax_cross_entropy_loss",
[logits, onehot_labels, weights]) as scope:
logits.get_shape().assert_is_compatible_with(onehot_labels.get_shape())
onehot_labels = math_ops.cast(onehot_labels, logits.dtype)
if label_smoothing > 0:
num_classes = math_ops.cast(
array_ops.shape(onehot_labels)[1], logits.dtype)
smooth_positives = 1.0 - label_smoothing
smooth_negatives = label_smoothing / num_classes
onehot_labels = onehot_labels * smooth_positives + smooth_negatives
losses = nn.softmax_cross_entropy_with_logits(
labels=onehot_labels, logits=logits, name="xentropy")
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.sparse_softmax_cross_entropy instead. Note that "
"the order of the logits and labels arguments has been changed.")
def sparse_softmax_cross_entropy(logits, labels, weights=1.0, scope=None):
"""Cross-entropy loss using `tf.nn.sparse_softmax_cross_entropy_with_logits`.
`weights` acts as a coefficient for the loss. If a scalar is provided,
then the loss is simply scaled by the given value. If `weights` is a
tensor of size [`batch_size`], then the loss weights apply to each
corresponding sample.
Args:
logits: [batch_size, num_classes] logits outputs of the network .
labels: [batch_size, 1] or [batch_size] labels of dtype `int32` or `int64`
in the range `[0, num_classes)`.
weights: Coefficients for the loss. The tensor must be a scalar or a tensor
of shape [batch_size] or [batch_size, 1].
scope: the scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the mean loss value.
Raises:
ValueError: If the shapes of `logits`, `labels`, and `weights` are
incompatible, or if `weights` is None.
"""
with ops.name_scope(scope, "sparse_softmax_cross_entropy_loss",
[logits, labels, weights]) as scope:
labels = array_ops.reshape(labels, shape=[array_ops.shape(labels)[0]])
losses = nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits, name="xentropy")
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.log_loss instead. Note that the order of the "
"predictions and labels arguments has been changed.")
def log_loss(predictions, labels=None, weights=1.0, epsilon=1e-7, scope=None):
"""Adds a Log Loss term to the training procedure.
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector. If the shape of
`weights` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weights`.
Args:
predictions: The predicted outputs.
labels: The ground truth output tensor, same dimensions as 'predictions'.
weights: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
epsilon: A small increment to add to avoid taking a log of zero.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid.
"""
with ops.name_scope(scope, "log_loss",
[predictions, labels, weights]) as scope:
predictions.get_shape().assert_is_compatible_with(labels.get_shape())
predictions = math_ops.cast(predictions, dtypes.float32)
labels = math_ops.cast(labels, dtypes.float32)
losses = -math_ops.multiply(
labels, math_ops.log(predictions + epsilon)) - math_ops.multiply(
(1 - labels), math_ops.log(1 - predictions + epsilon))
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.hinge_loss instead. Note that the order of the "
"logits and labels arguments has been changed, and to stay "
"unweighted, reduction=Reduction.NONE")
def hinge_loss(logits, labels=None, scope=None):
"""Method that returns the loss tensor for hinge loss.
Args:
logits: The logits, a float tensor. Note that logits are assumed to be
unbounded and 0-centered. A value > 0 (resp. < 0) is considered a positive
(resp. negative) binary prediction.
labels: The ground truth output tensor. Its shape should match the shape of
logits. The values of the tensor are expected to be 0.0 or 1.0. Internally
the {0,1} labels are converted to {-1,1} when calculating the hinge loss.
scope: The scope for the operations performed in computing the loss.
Returns:
An unweighted `Tensor` of same shape as `logits` and `labels` representing
the
loss values across the batch.
Raises:
ValueError: If the shapes of `logits` and `labels` don't match.
"""
with ops.name_scope(scope, "hinge_loss", [logits, labels]) as scope:
logits.get_shape().assert_is_compatible_with(labels.get_shape())
# We first need to convert binary labels to -1/1 labels (as floats).
labels = math_ops.cast(labels, dtypes.float32)
all_ones = array_ops.ones_like(labels)
labels = math_ops.subtract(2 * labels, all_ones)
return nn_ops.relu(
math_ops.subtract(all_ones, math_ops.multiply(labels, logits)))
@deprecated("2016-12-30", "Use tf.losses.mean_squared_error instead.")
def mean_squared_error(predictions, labels=None, weights=1.0, scope=None):
"""Adds a Sum-of-Squares loss to the training procedure.
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector. If the shape of
`weights` matches the shape of `predictions`, then the loss of each
measurable element of `predictions` is scaled by the corresponding value of
`weights`.
Args:
predictions: The predicted outputs.
labels: The ground truth output tensor, same dimensions as 'predictions'.
weights: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid.
"""
with ops.name_scope(scope, "mean_squared_error",
[predictions, labels, weights]) as scope:
predictions.get_shape().assert_is_compatible_with(labels.get_shape())
predictions = math_ops.cast(predictions, dtypes.float32)
labels = math_ops.cast(labels, dtypes.float32)
losses = math_ops.squared_difference(predictions, labels)
return compute_weighted_loss(losses, weights, scope=scope)
@deprecated("2016-12-30",
"Use tf.losses.mean_pairwise_squared_error instead. Note that the "
"order of the predictions and labels arguments has been changed.")
def mean_pairwise_squared_error(predictions,
labels=None,
weights=1.0,
scope=None):
"""Adds a pairwise-errors-squared loss to the training procedure.
Unlike `mean_squared_error`, which is a measure of the differences between
corresponding elements of `predictions` and `labels`,
`mean_pairwise_squared_error` is a measure of the differences between pairs of
corresponding elements of `predictions` and `labels`.
For example, if `labels`=[a, b, c] and `predictions`=[x, y, z], there are
three pairs of differences are summed to compute the loss:
loss = [ ((a-b) - (x-y)).^2 + ((a-c) - (x-z)).^2 + ((b-c) - (y-z)).^2 ] / 3
Note that since the inputs are of size [batch_size, d0, ... dN], the
corresponding pairs are computed within each batch sample but not across
samples within a batch. For example, if `predictions` represents a batch of
16 grayscale images of dimension [batch_size, 100, 200], then the set of pairs
is drawn from each image, but not across images.
`weights` acts as a coefficient for the loss. If a scalar is provided, then
the loss is simply scaled by the given value. If `weights` is a tensor of size
[batch_size], then the total loss for each sample of the batch is rescaled
by the corresponding element in the `weights` vector.
Args:
predictions: The predicted outputs, a tensor of size [batch_size, d0, .. dN]
where N+1 is the total number of dimensions in `predictions`.
labels: The ground truth output tensor, whose shape must match the shape of
the `predictions` tensor.
weights: Coefficients for the loss a scalar, a tensor of shape [batch_size]
or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If the shape of `predictions` doesn't match that of `labels` or
if the shape of `weights` is invalid.
"""
with ops.name_scope(scope, "mean_pairwise_squared_error",
[predictions, labels, weights]) as scope:
predictions.get_shape().assert_is_compatible_with(labels.get_shape())
predictions = math_ops.cast(predictions, dtypes.float32)
labels = math_ops.cast(labels, dtypes.float32)
weights = math_ops.cast(ops.convert_to_tensor(weights), dtypes.float32)
diffs = math_ops.subtract(predictions, labels)
# Need to verify here since the function doesn't use compute_weighted_loss
if diffs.get_shape().ndims is None:
raise ValueError("diffs.get_shape().ndims cannot be None")
if weights.get_shape().ndims is None:
raise ValueError("weights.get_shape().ndims cannot be None")
axis = list(range(1, diffs.get_shape().ndims))
sum_squares_diff_per_batch = math_ops.reduce_sum(
math_ops.square(diffs), axis=axis)
num_present_per_batch = _num_present(diffs, weights, per_batch=True)
term1 = 2.0 * math_ops.div_no_nan(
sum_squares_diff_per_batch, num_present_per_batch, name="value")
sum_diff = math_ops.reduce_sum(diffs, axis=axis)
term2 = 2.0 * math_ops.div_no_nan(
math_ops.square(sum_diff),
math_ops.square(num_present_per_batch),
name="value")
loss = _scale_losses(term1 - term2, weights)
mean_loss = array_ops.where(
math_ops.reduce_sum(num_present_per_batch) > 0,
loss,
array_ops.zeros_like(loss),
name="value")
add_loss(mean_loss)
return mean_loss
@deprecated("2016-12-30", "Use tf.losses.cosine_distance instead.")
@deprecated_args(None, "dim is deprecated, use axis instead", "dim")
def cosine_distance(predictions,
labels=None,
axis=None,
weights=1.0,
scope=None,
dim=None):
"""Adds a cosine-distance loss to the training procedure.
Note that the function assumes that `predictions` and `labels` are already
unit-normalized.
Args:
predictions: An arbitrary matrix.
labels: A `Tensor` whose shape matches 'predictions'
axis: The dimension along which the cosine distance is computed.
weights: Coefficients for the loss a scalar, a tensor of shape
[batch_size] or a tensor whose shape matches `predictions`.
scope: The scope for the operations performed in computing the loss.
dim: The old (deprecated) name for `axis`.
Returns:
A scalar `Tensor` representing the loss value.
Raises:
ValueError: If `predictions` shape doesn't match `labels` shape, or
`weights` is `None`.
"""
axis = deprecated_argument_lookup(
"axis", axis, "dim", dim)
if axis is None:
raise ValueError("You must specify 'axis'.")
with ops.name_scope(scope, "cosine_distance_loss",
[predictions, labels, weights]) as scope:
predictions.get_shape().assert_is_compatible_with(labels.get_shape())
predictions = math_ops.cast(predictions, dtypes.float32)
labels = math_ops.cast(labels, dtypes.float32)
radial_diffs = math_ops.multiply(predictions, labels)
losses = 1 - math_ops.reduce_sum(
radial_diffs, axis=[
axis,
])
return compute_weighted_loss(losses, weights, scope=scope)
|
"""Crie um programa que leia nome e duas notas de vários alunos e guarde tudo em uma lista composta. No final, mostre
um boletim contendo a média de cada um e permita que o usuário possa mostrar as notas de cada aluno individualmente."""
alunos = list()
cadastro = list()
media = 0
while True:
resp = ' '
nome = str(input('Digite o nome do aluno: '))
cadastro.append(nome)
for n in range(1, 3):
nota = float(input(f'Digite a {n} nota: '))
cadastro.append(nota)
media = (cadastro[1] + cadastro[2]) / 2
cadastro.append(media)
alunos.append(cadastro[:])
cadastro.clear()
while resp not in 'NS':
resp = str(input('Quer continuar? [S/N] ')).strip().upper()
if resp in 'N':
break
print('-=' * 30)
print(f'{"No.":<4}{"NOME":<10}{"MÉDIA":>8}')
print('-' * 30)
for p, i in enumerate(alunos):
print(f'{p:<4} {i[0]:<10} {i[2]:>8.1F}')
print('-' * 30)
while not resp == 999:
resp = int(input('Mostrar notas de qual aluno? (999 interrompe): '))
print(f'Notas de {alunos[resp][0]} são [{alunos[resp][1]}, {alunos[resp][2]}]')
|
from keras_applications import get_submodules_from_kwargs
from ._common_blocks import Conv2dBn
from ._utils import freeze_model, filter_keras_submodules
from ..backbones.backbones_factory import Backbones
backend = None
layers = None
models = None
keras_utils = None
# ---------------------------------------------------------------------
# Utility functions
# ---------------------------------------------------------------------
def get_submodules():
return {
'backend': backend,
'models': models,
'layers': layers,
'utils': keras_utils,
}
# ---------------------------------------------------------------------
# Blocks
# ---------------------------------------------------------------------
def Conv3x3BnReLU(filters, use_batchnorm, name=None):
kwargs = get_submodules()
def wrapper(input_tensor):
return Conv2dBn(
filters,
kernel_size=3,
activation='relu',
kernel_initializer='he_uniform',
padding='same',
use_batchnorm=use_batchnorm,
name=name,
**kwargs
)(input_tensor)
return wrapper
def DecoderUpsamplingX2Block(filters, stage, use_batchnorm=False):
up_name = 'decoder_stage{}_upsampling'.format(stage)
conv1_name = 'decoder_stage{}a'.format(stage)
conv2_name = 'decoder_stage{}b'.format(stage)
concat_name = 'decoder_stage{}_concat'.format(stage)
concat_axis = 3 if backend.image_data_format() == 'channels_last' else 1
def wrapper(input_tensor, skip=None):
x = layers.UpSampling2D(size=2, name=up_name)(input_tensor)
if skip is not None:
x = layers.Concatenate(axis=concat_axis, name=concat_name)([x, skip])
x = Conv3x3BnReLU(filters, use_batchnorm, name=conv1_name)(x)
x = Conv3x3BnReLU(filters, use_batchnorm, name=conv2_name)(x)
return x
return wrapper
def DecoderTransposeX2Block(filters, stage, use_batchnorm=False):
transp_name = 'decoder_stage{}a_transpose'.format(stage)
bn_name = 'decoder_stage{}a_bn'.format(stage)
relu_name = 'decoder_stage{}a_relu'.format(stage)
conv_block_name = 'decoder_stage{}b'.format(stage)
concat_name = 'decoder_stage{}_concat'.format(stage)
concat_axis = bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1
def layer(input_tensor, skip=None):
x = layers.Conv2DTranspose(
filters,
kernel_size=(4, 4),
strides=(2, 2),
padding='same',
name=transp_name,
use_bias=not use_batchnorm,
)(input_tensor)
if use_batchnorm:
x = layers.BatchNormalization(axis=bn_axis, name=bn_name)(x)
x = layers.Activation('relu', name=relu_name)(x)
if skip is not None:
x = layers.Concatenate(axis=concat_axis, name=concat_name)([x, skip])
x = Conv3x3BnReLU(filters, use_batchnorm, name=conv_block_name)(x)
return x
return layer
# ---------------------------------------------------------------------
# Unet Decoder
# ---------------------------------------------------------------------
def build_unet(
backbone,
decoder_block,
skip_connection_layers,
decoder_filters=(256, 128, 64, 32, 16),
n_upsample_blocks=5,
classes=1,
activation='sigmoid',
use_batchnorm=True,
global_feature=False
):
input_ = backbone.input
x = backbone.output
if global_feature:
concat_axis = 3 if backend.image_data_format() == 'channels_last' else 1
gf = layers.GlobalAveragePooling2D()(x)
gf = layers.Reshape((gf.shape[0],1,1,gf.shape[1]))(gf)
gf = layers.UpSampling2D()(gf)
x = layers.Concatenate(axis=concat_axis)([x, gf])
# extract skip connections
skips = ([backbone.get_layer(name=i).output if isinstance(i, str)
else backbone.get_layer(index=i).output for i in skip_connection_layers])
# add center block if previous operation was maxpooling (for vgg models)
if isinstance(backbone.layers[-1], layers.MaxPooling2D):
x = Conv3x3BnReLU(512, use_batchnorm, name='center_block1')(x)
x = Conv3x3BnReLU(512, use_batchnorm, name='center_block2')(x)
# building decoder blocks
for i in range(n_upsample_blocks):
if i < len(skips):
skip = skips[i]
else:
skip = None
x = decoder_block(decoder_filters[i], stage=i, use_batchnorm=use_batchnorm)(x, skip)
# model head (define number of output classes)
x = layers.Conv2D(
filters=classes,
kernel_size=(3, 3),
padding='same',
use_bias=True,
kernel_initializer='glorot_uniform',
name='final_conv',
)(x)
x = layers.Activation(activation, name=activation)(x)
# create keras model instance
model = models.Model(input_, x)
return model
# ---------------------------------------------------------------------
# Unet Model
# ---------------------------------------------------------------------
def Unet(
backbone_name='vgg16',
input_shape=(None, None, 3),
classes=1,
activation='sigmoid',
weights=None,
encoder_weights='imagenet',
encoder_freeze=False,
encoder_features='default',
decoder_block_type='upsampling',
decoder_filters=(256, 128, 64, 32, 16),
decoder_use_batchnorm=True,
global_feature=False,
**kwargs
):
""" Unet_ is a fully convolution neural network for image semantic segmentation
Args:
backbone_name: name of classification model (without last dense layers) used as feature
extractor to build segmentation model.
input_shape: shape of input data/image ``(H, W, C)``, in general
case you do not need to set ``H`` and ``W`` shapes, just pass ``(None, None, C)`` to make your model be
able to process images af any size, but ``H`` and ``W`` of input images should be divisible by factor ``32``.
classes: a number of classes for output (output shape - ``(h, w, classes)``).
activation: name of one of ``keras.activations`` for last model layer
(e.g. ``sigmoid``, ``softmax``, ``linear``).
weights: optional, path to model weights.
encoder_weights: one of ``None`` (random initialization), ``imagenet`` (pre-training on ImageNet).
encoder_freeze: if ``True`` set all layers of encoder (backbone model) as non-trainable.
encoder_features: a list of layer numbers or names starting from top of the model.
Each of these layers will be concatenated with corresponding decoder block. If ``default`` is used
layer names are taken from ``DEFAULT_SKIP_CONNECTIONS``.
decoder_block_type: one of blocks with following layers structure:
- `upsampling`: ``UpSampling2D`` -> ``Conv2D`` -> ``Conv2D``
- `transpose`: ``Transpose2D`` -> ``Conv2D``
decoder_filters: list of numbers of ``Conv2D`` layer filters in decoder blocks
decoder_use_batchnorm: if ``True``, ``BatchNormalisation`` layer between ``Conv2D`` and ``Activation`` layers
is used.
Returns:
``keras.models.Model``: **Unet**
.. _Unet:
https://arxiv.org/pdf/1505.04597
"""
global backend, layers, models, keras_utils
submodule_args = filter_keras_submodules(kwargs)
backend, layers, models, keras_utils = get_submodules_from_kwargs(submodule_args)
if decoder_block_type == 'upsampling':
decoder_block = DecoderUpsamplingX2Block
elif decoder_block_type == 'transpose':
decoder_block = DecoderTransposeX2Block
else:
raise ValueError('Decoder block type should be in ("upsampling", "transpose"). '
'Got: {}'.format(decoder_block_type))
backbone = Backbones.get_backbone(
backbone_name,
input_shape=input_shape,
weights=encoder_weights,
include_top=False,
**kwargs,
)
if encoder_features == 'default':
encoder_features = Backbones.get_feature_layers(backbone_name, n=4)
model = build_unet(
backbone=backbone,
decoder_block=decoder_block,
skip_connection_layers=encoder_features,
decoder_filters=decoder_filters,
classes=classes,
activation=activation,
n_upsample_blocks=len(decoder_filters),
use_batchnorm=decoder_use_batchnorm,
global_feature=global_feature
)
# lock encoder weights for fine-tuning
if encoder_freeze:
freeze_model(backbone, **kwargs)
# loading model weights
if weights is not None:
model.load_weights(weights)
return model
|
import { DicomMetadataStore } from '../services/DicomMetadataStore';
// TODO: Use above to inject so dependent datasources don't need to import or
// depend on @ohif/core?
/**
* Factory function that creates a new "Web API" data source.
* A "Web API" data source is any source that fetches data over
* HTTP. This function serves as an "adapter" to wrap those calls
* so that all "Web API" data sources have the same interface and can
* be used interchangeably.
*
* It's worth noting that a single implementation of this interface
* can define different underlying sources for "read" and "write" operations.
*/
function create({
query,
retrieve,
store,
reject,
retrieveSeriesMetadata,
deleteStudyMetadataPromise,
getImageIdsForDisplaySet,
}) {
const defaultQuery = {
studies: {
/**
* @param {string} params.patientName
* @param {string} params.mrn
* @param {object} params.studyDate
* @param {string} params.description
* @param {string} params.modality
* @param {string} params.accession
* @param {string} params.sortBy
* @param {string} params.sortDirection -
* @param {number} params.page
* @param {number} params.resultsPerPage
*/
mapParams: params => params,
requestResults: () => {},
processResults: results => results,
},
series: {},
instances: {},
};
const defaultRetrieve = {
series: {},
};
const defaultStore = {
dicom: async naturalizedDataset => {
throw new Error(
'store.dicom(naturalizedDicom, StudyInstanceUID) not implemented for dataSource.'
);
},
};
const defaultReject = {};
return {
query: query || defaultQuery,
retrieve: retrieve || defaultRetrieve,
reject: reject || defaultReject,
store: store || defaultStore,
getImageIdsForDisplaySet,
retrieveSeriesMetadata,
deleteStudyMetadataPromise,
};
}
const IWebApiDataSource = {
create,
};
export default IWebApiDataSource;
|
#!/usr/bin/python3
import sys
import numpy as np
from scipy.sparse import csr_matrix
from scipy.sparse import lil_matrix
from sklearn import preprocessing
from os import linesep
from threading import Thread
from threading import Lock
import time
import multiprocessing
import config
import db_connection as db_con
import retro_utils as utils
NUM_THREADS = max(2, multiprocessing.cpu_count()-2)
MIN_GROUP_SIZE = 2
def get_adjacency_vector(size, group_name, index_lookup, con, cur, data_type):
# get group elements (elements of column)
print('get adjacency vector for group:', group_name)
table_name, column_name = utils.get_column_data_from_label(
group_name, 'column')
# construct query
query = "SELECT %s::varchar FROM %s" % (column_name, table_name)
# retrieve all elements from the column
cur.execute(query)
group_elements = []
for x in cur.fetchall():
if data_type == 'number':
if x[0] is None:
continue
group_elements.append(group_name + '#' + x[0])
else:
group_elements.append(group_name + '#' + utils.tokenize(x[0]))
# construct vector
vector = np.zeros(size)
for element in group_elements:
i = index_lookup[element]
vector[i] = 1
return vector
def fill_adjacency_matrix_relational(size, column_names, index_lookup, group,
con, cur, v_P):
query = group['query']
# get group elements
table_name1, column_name1 = utils.get_column_data_from_label(
column_names[0], 'column')
table_name2, column_name2 = utils.get_column_data_from_label(
column_names[1], 'column')
# retrieve all relation elements from the database
cur.execute(query)
group_elements = cur.fetchall()
# construct matrix and count vector
A = lil_matrix((size, size))
c_out = np.zeros(size)
c_in = np.zeros(size)
for (text_value1, text_value2) in group_elements:
if group['data_type'][0] == 'string':
text_value1 = utils.tokenize(text_value1)
if group['data_type'][1] == 'string':
text_value2 = utils.tokenize(text_value2)
i = index_lookup[utils.get_label(column_names[0], text_value1)]
j = index_lookup[utils.get_label(column_names[1], text_value2)]
A[i, j] = 1
c_out[i] = 1
c_in[j] = 1
return csr_matrix(A), (c_in + c_out)
def create_adjacency_matrices(term_list, groups, con,
cur, v_P, conf):
A_rel = dict() # dict of (sparse) matrices or adj_vecs
A_cat = dict()
S = dict() # dict of (sparse) matrices
rel_key_pairs = set() # set of pairs of inverse relations
size = len(term_list) # m' size of M0
c = np.zeros(size) # store for each vector how many relations it has
# get lookup from matrix indices to terms
index_lookup = utils.construct_index_lookup(term_list)
# construct adjacency matrices from group information and db
# relations of vectors were no word embeddings exist are missing in groups??
for key in groups:
for group in groups[key]:
element_count = len(group['elements']) + len(group['inferred_elements']) \
if group['type'] == 'categorial' else group['elements']
print('Process group %s:%s ...' %
(key, group['name']), '(size: %d)' % element_count)
matrix_key = '%s:%s' % (key, group['name'])
suffix = ''
if matrix_key in A_rel:
suffix = str(len(groups[key]))
matrix_key += suffix
if group['type'] == 'categorial':
A_cat[matrix_key] = get_adjacency_vector(
size, key, index_lookup, con, cur, group['data_type'])
c += A_cat[matrix_key]
if group['type'] == 'relational':
if group['elements'] < MIN_GROUP_SIZE:
continue # group is too small
c1_t, c1_c, c2_t, c2_c = utils.get_column_data_from_label(
key, 'relation')
column1 = '%s.%s' % (c1_t, c1_c)
column2 = '%s.%s' % (c2_t, c2_c)
A_rel[matrix_key], c_inc = fill_adjacency_matrix_relational(
size, (column1, column2), index_lookup, group, con, cur, v_P)
reverse_key = '%s.%s~%s.%s:%s' % (
c2_t, c2_c, c1_t, c1_c, group['name']) + suffix
A_rel[reverse_key] = A_rel[matrix_key].T
S[matrix_key] = preprocessing.normalize(
A_rel[matrix_key], norm='l1')
S[reverse_key] = preprocessing.normalize(
A_rel[reverse_key], norm='l1')
rel_key_pairs.add((matrix_key, reverse_key))
c += c_inc # c_inc = c_in + c_out
return A_cat, S, c, rel_key_pairs
def create_M0(all_terms, present_vectors, dim, conf):
term_list = []
M0 = []
presence_vector = []
for key in all_terms:
for term in all_terms[key]:
row_label = utils.get_label(key, term)
term_list.append(row_label)
if key not in conf['M0_ZERO_COLUMNS'] and term in present_vectors[key]:
presence_vector.append(1)
M0.append(present_vectors[key][term])
else:
presence_vector.append(0)
M0.append(np.zeros(dim))
return term_list, np.array(M0), np.array(presence_vector)
def get_categorial_vector(v_cat, M0, presence_vector):
v_norm = v_cat * presence_vector
length = np.linalg.norm(v_norm, ord=1)
if length > 0:
v_norm = v_norm / np.linalg.norm(v_norm, ord=1)
res = v_norm.dot(M0)
return res
else:
return np.zeros(M0.shape[1])
def get_v_c(A_cat, M0, presence_vector): # parallel version
res_cat = dict()
for key in A_cat:
res_cat[key] = get_categorial_vector(A_cat[key], M0, presence_vector)
return res_cat
def calculate_Madd_rel(S, key, inv_key, Mc, c_inv, M_last, M_sum, v_denominator, conf, M_sum_lock, v_denominator_lock):
GAMMA = conf['GAMMA']
DELTA = conf['DELTA']
start = time.time()
gamma_i = None
num_sources = None
num_targets = None
max_cardinality = None
max_c_inv = None
targets_weighted = None
targets_one_hot = None
sources_one_hot = None
num_sources = len(set(S[key].nonzero()[0]))
num_targets = len(set(S[inv_key].nonzero()[0]))
max_cardinality = max(num_targets, num_sources)
inv_nz = set(S[inv_key].nonzero()[0])
targets_one_hot = np.array(
[1 if n in inv_nz else 0 for n in range(M_last.shape[0])])
nz = set(S[key].nonzero()[0])
sources_one_hot = np.array(
[1 if n in nz else 0 for n in range(M_last.shape[0])])
max_c_inv = 1
for i in range(M_last.shape[0]):
if (sources_one_hot[i] != 0) and (c_inv[i] < max_c_inv):
max_c_inv = c_inv[i]
if (targets_one_hot[i] != 0) and (c_inv[i] < max_c_inv):
max_c_inv = c_inv[i]
gamma_i = np.zeros(M_last.shape[0])
gamma_i_inv = list()
gamma_i = np.array(S[key].max(axis=1).todense()).T[0]
for i in gamma_i:
gamma_i_inv.append(1 / i if i > 0 else 0)
gamma_i_inv = np.array(gamma_i_inv)
targets_weighted = (
targets_one_hot * (1 / (max_cardinality * max_c_inv**-1))).dot(M_last)
print('Preprocessing for', key, 'done')
M_inc = S[key].T.multiply(c_inv).T.dot(M_last) * GAMMA
M_inc += (S[inv_key].T.multiply(c_inv).T).T.dot(M_last) * \
GAMMA
M_dec = (targets_weighted * np.array([sources_one_hot]).T - S[key].dot(M_last) * gamma_i_inv[:, None] / (
max_cardinality * max_c_inv**-1)) * DELTA * 2
denum_sum = np.array(np.sum(S[inv_key].T.multiply(c_inv), axis=1)).T[0] * GAMMA + GAMMA * sources_one_hot * c_inv - 2 * DELTA * (
num_targets - gamma_i_inv) / (max_cardinality * max_c_inv**-1) * sources_one_hot # add to dominator
M_sum_lock.acquire()
M_sum += (M_inc - M_dec)
M_sum_lock.release()
v_denominator_lock.acquire()
v_denominator += denum_sum
v_denominator_lock.release()
end = time.time()
print('Calculation for relation', key, 'done', 'time:', end - start)
return
def calculate_Madd_cat(A_cat, key, Mc, M_last, M_sum, v_denominator, v_c, conf, M_sum_lock, v_denominator_lock):
BETA = conf['BETA']
start = time.time()
centroid = v_c[key]
M_sum_lock.acquire()
M_sum += np.array([Mc.diagonal() * A_cat[key]]).T * centroid * BETA
M_sum_lock.release()
v_denominator_lock.acquire()
v_denominator += Mc.diagonal() * A_cat[key] * BETA
v_denominator_lock.release()
end = time.time()
print('Calculation for categorial relation', key, 'done', 'time:', end - start)
return
def calculate_Mk(M0, M_last, Mc, c_inv, S, v_c, v_P, A_cat, invert_rel, term_list, conf): # parallel version
ALPHA = conf['ALPHA']
M_sum = np.zeros(M0.shape)
v_denominator = np.zeros(M0.shape[0], dtype='float32')
M_sum += M0 * np.array([v_P]).T * ALPHA
threads = []
executer_threads = []
M_sum_lock = Lock()
v_denominator_lock = Lock()
for key in A_cat:
threads.append(Thread(target=calculate_Madd_cat,
args=(A_cat, key, Mc, M_last, M_sum, v_denominator, v_c, conf, M_sum_lock, v_denominator_lock)))
for key in S:
threads.append(Thread(target=calculate_Madd_rel, args=(
S, key, invert_rel[key], Mc, c_inv, M_last, M_sum, v_denominator, conf, M_sum_lock, v_denominator_lock)))
for i in range(NUM_THREADS):
executer_threads.append(Thread(target=utils.execute_threads_from_pool, args=(
threads,), kwargs={'verbose': False}))
for thread in executer_threads:
thread.start()
for thread in executer_threads:
thread.join()
result = M_sum / ((v_P * ALPHA + v_denominator)[:, None])
print('Delta:', sum(np.linalg.norm(result - M_last, ord=1, axis=1)))
print('Sum M_last', sum(np.linalg.norm(M_last, ord=1, axis=1)))
print('Sum', sum(np.linalg.norm(result, ord=1, axis=1)))
return result
def run_retrofitting(M0, S, v_c, c, v_P, A_cat, rel_key_pairs, term_list, conf):
num_iter = conf['ITERATIONS'] if ('ITERATIONS' in conf) else 10
size = M0.shape[0]
# create matrix from c
M_c = csr_matrix((size, size))
c_inv = list()
for elem in c:
if elem > 0:
c_inv.append(1 / elem)
else:
c_inv.append(0)
c_inv = np.array(c_inv)
M_c.setdiag(c_inv)
# get lookup for rel_key_pairs
invert_rel = dict()
for (key1, key2) in rel_key_pairs:
invert_rel[key1] = key2
invert_rel[key2] = key1
# call calculate_Mk in a loop
Mk = np.copy(M0)
for i in range(num_iter):
print('Start Iteration: ', i)
Mk = calculate_Mk(M0, Mk, M_c, c_inv, S,
v_c, v_P, A_cat, invert_rel, term_list, conf)
return Mk
def output_vectors(term_list, Mk, output_file_name):
# init output file
f_out = open(output_file_name, 'w')
# write meta information
f_out.write('%d %d' % (Mk.shape[0], Mk.shape[1]) + linesep)
# write term vector pairs
for i, term in enumerate(term_list):
if (i % 1000 == 0):
print('Exported', i, 'term vectors | Current term:', term)
f_out.write('%s %s' % (term, ' '.join([str(x) for x in Mk[i]])))
f_out.write(linesep)
f_out.close()
return
def main(argc, argv):
# get retrofitting config
conf = config.get_config(argv)
# get group information
groups_info = utils.parse_groups(conf['GROUPS_FILE_NAME'])
data_columns = utils.get_data_columns_from_group_data(groups_info)
db_config = db_con.get_db_config(path=argv[2])
con, cur = db_con.create_connection(db_config)
# get tokens of data columns
all_terms = utils.get_terms(data_columns, con, cur)
print('Retrieved terms from database')
present_vectors, dim = utils.get_vectors_for_present_terms_from_group_file(
data_columns, groups_info)
print('Got vectors of terms from group file')
# create M0 and presence vector
term_list, M0, v_P = create_M0(all_terms, present_vectors, dim, conf)
print('Constructed initial matrix M0 with size', M0.shape)
print('len', len(v_P.nonzero()[0]))
# create adjacency matrices, weight matrices, count vectors and vector for R
A_cat, S, c, rel_key_pairs = create_adjacency_matrices(
term_list, groups_info, con, cur, v_P, conf)
print('Created matrix representations')
# get category vectors v_c
v_c = get_v_c(A_cat, M0, v_P)
for key in v_c:
print(key, np.linalg.norm(v_c[key]))
print('Created category vectors')
v_Q = np.ones(len(v_P))
# run iterative algorithm
Mk = run_retrofitting(M0, S, v_c, c,
v_Q, A_cat, rel_key_pairs, term_list, conf)
print('Retrofitting done, start to generate vectors file ...')
# output result to file
output_vectors(term_list, Mk, conf['RETRO_VECS_FILE_NAME'])
print('Exported vectors')
return
if __name__ == "__main__":
main(len(sys.argv), sys.argv)
|
# -*- coding: utf-8 -*-
from django.contrib.auth import get_permission_codename, get_user_model
from django.forms.models import model_to_dict
from django.test.utils import override_settings
from cms.models.permissionmodels import PageUser
from cms.test_utils.testcases import CMSTestCase
from cms.utils.urlutils import admin_reverse
class PermissionsOnTestCase(CMSTestCase):
def _user_exists(self, username=None):
if PageUser.USERNAME_FIELD != "email":
username = username or "perms-testuser"
else:
username = username or "perms-testuser@django-cms.org"
query = {PageUser.USERNAME_FIELD: username}
return PageUser.objects.filter(**query).exists()
def _get_user_data(self, **kwargs):
data = {
'password1': 'changeme',
'password2': 'changeme',
}
if PageUser.USERNAME_FIELD != "email":
data[PageUser.USERNAME_FIELD] = "perms-testuser"
else:
data[PageUser.USERNAME_FIELD] = "perms-testuser@django-cms.org"
data.update(**kwargs)
return data
def _get_delete_perm(self):
return get_permission_codename('delete', get_user_model()._meta)
@override_settings(CMS_PERMISSION=True)
class PermissionsOnGlobalTest(PermissionsOnTestCase):
"""
Tests all user interactions with the page user admin
while permissions are set to True and user has
global permissions.
"""
def test_user_in_admin_index(self):
endpoint = admin_reverse('app_list', args=['cms'])
staff_user = self.get_staff_user_with_no_permissions()
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=True)
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<a href="/en/admin/cms/pageuser/">Users (page)</a>',
html=True,
)
endpoint = self.get_admin_url(PageUser, 'changelist')
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
def test_user_not_in_admin_index(self):
staff_user = self.get_staff_user_with_no_permissions()
endpoint = admin_reverse('app_list', args=['cms'])
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=False)
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 404)
endpoint = self.get_admin_url(PageUser, 'changelist')
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 403)
def test_user_can_add_user(self):
endpoint = self.get_admin_url(PageUser, 'add')
staff_user = self.get_staff_user_with_no_permissions()
data = self._get_user_data()
data['_addanother'] = '1'
self.add_permission(staff_user, 'add_pageuser')
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=True)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, endpoint)
self.assertTrue(self._user_exists())
def test_user_cant_add_user(self):
endpoint = self.get_admin_url(PageUser, 'add')
staff_user = self.get_staff_user_with_no_permissions()
data = self._get_user_data()
self.add_permission(staff_user, 'add_pageuser')
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=False)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertFalse(self._user_exists())
def test_user_can_change_user(self):
user = self.get_staff_page_user()
endpoint = self.get_admin_url(PageUser, 'change', user.pk)
staff_user = self.get_staff_user_with_no_permissions()
data = model_to_dict(user, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=True)
if user.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[user.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, endpoint)
self.assertTrue(self._user_exists(username))
def test_user_cant_change_user(self):
user = self.get_staff_page_user()
endpoint = self.get_admin_url(PageUser, 'change', user.pk)
staff_user = self.get_staff_user_with_no_permissions()
data = model_to_dict(user, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_global_permission(staff_user, can_change_permissions=False)
if user.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[user.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertFalse(self._user_exists(username))
def test_user_can_delete_user(self):
user = self.get_staff_page_user()
endpoint = self.get_admin_url(PageUser, 'delete', user.pk)
redirect_to = admin_reverse('index')
staff_user = self.get_staff_user_with_no_permissions()
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_global_permission(staff_user, can_change_permissions=True)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, redirect_to)
self.assertFalse(self._user_exists())
def test_user_cant_delete_user(self):
user = self.get_staff_page_user()
endpoint = self.get_admin_url(PageUser, 'delete', user.pk)
staff_user = self.get_staff_user_with_no_permissions()
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_global_permission(staff_user, can_change_permissions=False)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertTrue(self._user_exists())
@override_settings(CMS_PERMISSION=True)
class PermissionsOnPageTest(PermissionsOnTestCase):
"""
Tests all user interactions with the page user admin
while permissions are set to True and user has
page permissions.
"""
def setUp(self):
self._permissions_page = self.get_permissions_test_page()
def test_user_in_admin_index(self):
endpoint = admin_reverse('app_list', args=['cms'])
staff_user = self.get_staff_user_with_no_permissions()
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<a href="/en/admin/cms/pageuser/">Users (page)</a>',
html=True,
)
endpoint = self.get_admin_url(PageUser, 'changelist')
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 200)
def test_user_not_in_admin_index(self):
staff_user = self.get_staff_user_with_no_permissions()
endpoint = admin_reverse('app_list', args=['cms'])
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=False,
)
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 404)
endpoint = self.get_admin_url(PageUser, 'changelist')
with self.login_user_context(staff_user):
response = self.client.get(endpoint)
self.assertEqual(response.status_code, 403)
def test_user_can_add_user(self):
"""
User can add new users if can_change_permissions
is set to True.
"""
endpoint = self.get_admin_url(PageUser, 'add')
staff_user = self.get_staff_user_with_no_permissions()
data = self._get_user_data()
data['_addanother'] = '1'
self.add_permission(staff_user, 'add_pageuser')
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, endpoint)
self.assertTrue(self._user_exists())
def test_user_cant_add_user(self):
"""
User can't add new users if can_change_permissions
is set to False.
"""
endpoint = self.get_admin_url(PageUser, 'add')
staff_user = self.get_staff_user_with_no_permissions()
data = self._get_user_data()
self.add_permission(staff_user, 'add_pageuser')
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=False,
)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertFalse(self._user_exists())
def test_user_can_change_subordinate(self):
"""
User can change users he created if can_change_permissions
is set to True.
"""
staff_user = self.get_staff_user_with_no_permissions()
subordinate = self.get_staff_page_user(created_by=staff_user)
endpoint = self.get_admin_url(PageUser, 'change', subordinate.pk)
data = model_to_dict(subordinate, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
if subordinate.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[subordinate.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, endpoint)
self.assertTrue(self._user_exists(username))
def test_user_cant_change_subordinate(self):
"""
User cant change users he created if can_change_permissions
is set to False.
"""
staff_user = self.get_staff_user_with_no_permissions()
subordinate = self.get_staff_page_user(created_by=staff_user)
endpoint = self.get_admin_url(PageUser, 'change', subordinate.pk)
data = model_to_dict(subordinate, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=False,
)
if subordinate.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[subordinate.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertFalse(self._user_exists(username))
def test_user_cant_change_self(self):
"""
User cant change his own user,
even with can_change_permissions set to True.
"""
admin = self.get_superuser()
staff_user = self.get_staff_page_user(created_by=admin)
endpoint = self.get_admin_url(PageUser, 'change', staff_user.pk)
data = model_to_dict(staff_user, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
if staff_user.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[staff_user.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 404)
self.assertFalse(self._user_exists(username))
def test_user_cant_change_others(self):
"""
User cant change a users created by another user,
even with can_change_permissions set to True.
"""
admin = self.get_superuser()
staff_user = self.get_staff_user_with_no_permissions()
staff_user_2 = self.get_staff_page_user(created_by=admin)
endpoint = self.get_admin_url(PageUser, 'change', staff_user_2.pk)
data = model_to_dict(staff_user_2, exclude=['date_joined'])
data['_continue'] = '1'
data['date_joined_0'] = '2016-06-21'
data['date_joined_1'] = '15:00:00'
self.add_permission(staff_user, 'change_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
if staff_user_2.USERNAME_FIELD != "email":
username = "perms-testuser2"
else:
username = "perms-testuser+2@django-cms.org"
data[staff_user_2.USERNAME_FIELD] = username
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 404)
self.assertFalse(self._user_exists(username))
def test_user_can_delete_subordinate(self):
"""
User can delete users he created if can_change_permissions
is set to True.
"""
staff_user = self.get_staff_user_with_no_permissions()
subordinate = self.get_staff_page_user(created_by=staff_user)
endpoint = self.get_admin_url(PageUser, 'delete', subordinate.pk)
redirect_to = admin_reverse('index')
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertRedirects(response, redirect_to)
self.assertFalse(self._user_exists())
def test_user_cant_delete_subordinate(self):
"""
User cant delete users he created if can_change_permissions
is set to False.
"""
staff_user = self.get_staff_user_with_no_permissions()
subordinate = self.get_staff_page_user(created_by=staff_user)
endpoint = self.get_admin_url(PageUser, 'delete', subordinate.pk)
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=False,
)
with self.login_user_context(staff_user):
response = self.client.post(endpoint, data)
self.assertEqual(response.status_code, 403)
self.assertTrue(self._user_exists())
def test_user_cant_delete_self(self):
"""
User cant delete his own user,
even with can_change_permissions set to True.
"""
admin = self.get_superuser()
staff_user = self.get_staff_page_user(created_by=admin)
endpoint = self.get_admin_url(PageUser, 'delete', staff_user.pk)
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
with self.login_user_context(staff_user):
username = getattr(staff_user, staff_user.USERNAME_FIELD)
response = self.client.post(endpoint, data)
# The response is a 404 instead of a 403
# because the queryset is limited to objects
# that the user has permissions for.
# This queryset is used to fetch the object
# from the request, resulting in a 404.
self.assertEqual(response.status_code, 404)
self.assertTrue(self._user_exists(username))
def test_user_cant_delete_others(self):
"""
User cant delete a user created by another user,
even with can_change_permissions set to True.
"""
admin = self.get_superuser()
staff_user = self.get_staff_user_with_no_permissions()
staff_user_2 = self.get_staff_page_user(created_by=admin)
endpoint = self.get_admin_url(PageUser, 'delete', staff_user_2.pk)
data = {'post': 'yes'}
self.add_permission(staff_user, self._get_delete_perm())
self.add_permission(staff_user, 'delete_pageuser')
self.add_page_permission(
staff_user,
self._permissions_page,
can_change_permissions=True,
)
with self.login_user_context(staff_user):
username = getattr(staff_user_2, staff_user_2.USERNAME_FIELD)
response = self.client.post(endpoint, data)
# The response is a 404 instead of a 403
# because the queryset is limited to objects
# that the user has permissions for.
# This queryset is used to fetch the object
# from the request, resulting in a 404.
self.assertEqual(response.status_code, 404)
self.assertTrue(self._user_exists(username))
|
console.log("CIAO");
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the WinRAR Windows Registry plugin."""
from __future__ import unicode_literals
import unittest
from dfdatetime import filetime as dfdatetime_filetime
from dfwinreg import definitions as dfwinreg_definitions
from dfwinreg import fake as dfwinreg_fake
from plaso.formatters import winreg # pylint: disable=unused-import
from plaso.parsers.winreg_plugins import winrar
from tests.parsers.winreg_plugins import test_lib
class WinRarArcHistoryPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the WinRAR ArcHistory Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates WinRAR ArcHistory Registry keys and values for testing.
Args:
key_path (str): Windows Registry key path.
time_string (str): key last written date and time.
Returns:
dfwinreg.WinRegistryKey: a Windows Registry key.
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromDateTimeString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
'ArcHistory', key_path=key_path, last_written_time=filetime.timestamp,
offset=1456)
value_data = 'C:\\Downloads\\The Sleeping Dragon CD1.iso'.encode(
'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'0', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=1892)
registry_key.AddValue(registry_value)
value_data = 'C:\\Downloads\\plaso-static.rar'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'1', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=612)
registry_key.AddValue(registry_value)
return registry_key
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = winrar.WinRarHistoryPlugin()
key_path = 'HKEY_CURRENT_USER\\Software\\WinRAR\\ArcHistory'
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
def testProcess(self):
"""Tests the Process function."""
key_path = 'HKEY_CURRENT_USER\\Software\\WinRAR\\ArcHistory'
time_string = '2012-08-28 09:23:49.002031'
registry_key = self._CreateTestKey(key_path, time_string)
plugin = winrar.WinRarHistoryPlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin)
self.assertEqual(storage_writer.number_of_events, 1)
events = list(storage_writer.GetEvents())
event = events[0]
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
self.assertEqual(event.parser, plugin.plugin_name)
self.CheckTimestamp(event.timestamp, '2012-08-28 09:23:49.002031')
expected_message = (
'[{0:s}] '
'0: C:\\Downloads\\The Sleeping Dragon CD1.iso '
'1: C:\\Downloads\\plaso-static.rar').format(key_path)
expected_short_message = '{0:s}...'.format(expected_message[:77])
self._TestGetMessageStrings(event, expected_message, expected_short_message)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import sys
import json
from pprint import pprint
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import os
if len(sys.argv) < 2:
print('Usage: %s <results_base> sub_dirs...' % sys.argv[0])
sys.exit(1)
results_base = sys.argv[1]
print('Analyzing results in %s' % results_base)
results = []
if len(sys.argv) == 2:
results = [os.path.join(results_base, f) for f in os.listdir(results_base)
if os.path.isdir(os.path.join(results_base, f))]
else:
results = [os.path.join(results_base, f) for f in sys.argv[2:]]
pprint(results)
data = []
for results_dir in results:
name = os.path.split(results_dir)[-1]
future_data = json.load(open('%s/tasks/future_overhead.json' % results_dir))
mhz = float(future_data['context']['mhz_per_cpu'])
hpx_make_ready_void = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_make_ready_void_overhead/real_time').next()['real_time'])
hpx_make_ready_int = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_make_ready_overhead/real_time').next()['real_time'])
hpx_promise_void = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_promise_void_overhead/real_time').next()['real_time'])
hpx_promise_int = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_promise_overhead/real_time').next()['real_time'])
hpx_async_void = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_async_void_overhead/real_time').next()['real_time'])
hpx_async_int = float((x for x in future_data['benchmarks']
if x['name'] == 'hpx_async_int_overhead/real_time').next()['real_time'])
std_promise_void = float((x for x in future_data['benchmarks']
if x['name'] == 'std_promise_void_overhead/real_time').next()['real_time'])
std_promise_int = float((x for x in future_data['benchmarks']
if x['name'] == 'std_promise_overhead/real_time').next()['real_time'])
std_async_void = float((x for x in future_data['benchmarks']
if x['name'] == 'std_async_void_overhead/real_time').next()['real_time'])
std_async_int = float((x for x in future_data['benchmarks']
if x['name'] == 'std_async_int_overhead/real_time').next()['real_time'])
times1 = np.array([
hpx_make_ready_void,
#hpx_make_ready_int,
hpx_promise_void,
#hpx_promise_int,
hpx_async_void,
#hpx_async_int,
#std_promise_int,
#std_async_int
])
cycles1 = (times1 * mhz) / 1000
times2 = np.array([
#hpx_make_ready_int,
#hpx_promise_int,
hpx_async_void,
#hpx_async_int,
#std_promise_int,
std_async_void
#std_async_int
])
cycles2 = (times2 * mhz) / 1000
data.append((name, cycles1, cycles2))
# Number of data points:
# - coroutines (plain)
# - hpx threads
# - std threads
N = len(data[0][1])
N_experiments = len(data)
ind = np.arange(N)
width = 1.0 / (N_experiments + 1.0)
pgf_with_latex = {
"pgf.texsystem": "xelatex", # use Xelatex which is TTF font aware
"text.usetex": True, # use LaTeX to write all text
"font.family": "serif", # use serif rather than sans-serif
"font.serif": "TeX Gyre Pagella", # use 'Ubuntu' as the standard font
"font.sans-serif": [],
"font.monospace": "Anonymous Pro", # use Ubuntu mono if we have mono
"axes.labelsize": 11, # LaTeX default is 10pt font.
"font.size": 11,
"figure.titlesize": 11, # Make the legend/label fonts a little smaller
"figure.titleweight": 1, # Make the legend/label fonts a little smaller
"legend.fontsize": 11, # Make the legend/label fonts a little smaller
"xtick.labelsize": 11,
"ytick.labelsize": 11,
"pgf.rcfonts": False, # Use pgf.preamble, ignore standard Matplotlib RC
"text.latex.unicode": True,
"pgf.preamble": [
r'\usepackage{fontspec}',
r'\setmainfont[Mapping=tex-text]{TeX Gyre Pagella}',
r'\setsansfont[Mapping=tex-text]{TeX Gyre Adventor}',
r'\setmonofont[Mapping=tex-text]{Anonymous Pro}',
r'\newfontfamily\chapfont[Mapping=tex-text]{TeX Gyre Adventor}',
]
}
matplotlib.rcParams.update(pgf_with_latex)
fig, ax = plt.subplots(figsize=(5.78851, 5.78851 * (9./16.)))
rects = []
for d, i in zip(data, range(0, len(data))):
rect = ax.bar(ind + i * width, d[1], width)
rects.append(rect)
ax.set_ylabel('Cycles')
ax.set_title('Future timings', fontsize=11, weight='bold')
ax.set_xticks(ind + ((N_experiments - 1) * width) / 2.0)
ax.set_xticklabels((
'\\texttt{hpx::make\_ready\_future}',
'\\texttt{hpx::promise}',
'\\texttt{std::promise}'), rotation=-10, ha='center')
#for tick in ax.get_xticklabels():
# tick.set_rotation(-40)
ax.legend((r for r in rects), (d[0] for d in data))
plt.tight_layout(.5)
#plt.show()
fname = '%s/future_overhead1.pgf' % os.path.join(results_base, '../figures/')
plt.savefig(fname)
fig, ax = plt.subplots(figsize=(5.78851, 5.78851 * (9./16.)))
N = len(data[0][2])
N_experiments = len(data)
ind = np.arange(N)
width = 1.0 / (N_experiments + 1.0)
rects = []
for d, i in zip(data, range(0, len(data))):
rect = ax.bar(ind + i * width, d[2], width)
rects.append(rect)
ax.set_ylabel('Cycles')
ax.set_title('Async timings', fontsize=11, weight='bold')
ax.set_xticks(ind + ((N_experiments - 1) * width) / 2.0)
ax.set_xticklabels((
'\\texttt{hpx::async}',
'\\texttt{std::async}'), rotation=-10, ha='center')
#for tick in ax.get_xticklabels():
# tick.set_rotation(-40)
ax.legend((r for r in rects), (d[0] for d in data))
plt.tight_layout(.5)
#plt.show()
fname = '%s/future_overhead2.pgf' % os.path.join(results_base, '../figures/')
plt.savefig(fname)
|
"""
数据库更新操作
更新操作用于更新数据表的的数据,以下实例将 TESTDB 表中 SEX 为 'M' 的 AGE 字段递增 1:
"""
import pymysql
# 打开数据库连接
db = pymysql.connect("localhost", "root", "", "TESTDB")
# 使用cursor()方法获取操作游标
cursor = db.cursor()
# SQL 更新语句
sql = "UPDATE EMPLOYEE SET AGE = AGE + 1 WHERE SEX = '%c'" % ('M')
try:
# 执行SQL语句
cursor.execute(sql)
# 提交到数据库执行
db.commit()
except:
# 发生错误时回滚
db.rollback()
# 关闭数据库连接
db.close()
|
"""
Dialogue text with with player responses.
- necromancer
- rat
"""
from engine.initiate import player_name
# 80 characters for reference:
#------------------------------------------------------------------------------#
necromancer_dialogue = {
1: {
"text":
"""I have to say, I am impressed. I sensed you intruding my lair and you managed
to pass through my traps and creatures up until now. Now tell me, do you wish
to die here or do you wish to be the test subject for my new experiment? It will
be painful either way!""",
"options": [
(f"My name is {player_name}. I've heard of your exploits against the people of\n\
Stennerden and I've come to bring justice. Duel me, necromancer!", 2),
("I don't care for chat, I will be the last thing you will ever see! (fight)", 21)
]
},
2: {
"text":
"""HAH! A peasant human with your little toy against ME, a mage with years of
conjuration practice and whose life is devoted entirely towards the dark arts?!
I mirth at your feebleness!""",
"options": [
################################################################################...
("Do not underestimate me, mage. I've passed all your traps and destroyed all your\n\
monsters and came out a better warrior than I ever was! Now, will you duel\n\
me?", 3),
("Who you are does not scare me at all. I've come for one thing and one thing\n\
only: your head! (fight)", 21)
]
},
3: {
"text":
"""How very brave. I have met some of you. Though none have lived to tell the tale
of winning a duel with a necromancer. But before I end your life, I will tell
you this: Do you really know why you are here? Randolf only told you what you
needed to know.""",
"options": [
################################################################################...
("You're really tempting me here, mage... but of course. My job here is simple.\n\
You harrass the innocent townspeople and I'm here to stop you. For good.", 4),
("I trust Randolf and his word. I have no quarrel with him either and a fat sum of\n\
aurels is waiting for me back in Stennerden in exchange for your head. What\n\
do you have in mind?", 5),
("I don't stay and chat with the likes of you. Your time ends here! (fight)", 21)
]
},
4: {
"text":
'''I am not surprised by your words. Those "innocent" townspeople have caused me
nothing but agony and humiliation. They were all his believers. It was not
enough either that I had my home scorched up in flames and be sent to a pillory.
And I tell them that Randolf is a cruel and corrupt man, that the blood was
falsely placed on my hands, but to no avail.''',
"options": [
("What do you mean?", 6),
("A change of mind, I don't care to hear a word more. Die! (fight)", 21)
]
},
5: {
"text":
'''I am not surprised by your words. Randolf is a manipulative liar and should be
sentenced to the gallows for what he has done to me and to his fellow
townspeople. But they were all his believers. It was not enough either that I
had my home scorched up in flames and be sent to a pillory. And I tell them that
Randolf is a cruel and corrupt man, that the blood was falsely placed on my
hands, but to no avail.''',
"options": [
("What do you mean?", 6),
("A change of mind, I don't care to hear a word more. Die! (fight)", 21),
]
},
6: {
"text":
"""As I have said previously, Randolf always gets his way out with bribery and
coercion. At least once a week he would set off in the evening with some young
maiden from the village and take her to his manor. And often those maidens were
unwilling as they loved their husbands. But you already know in his nature, he
still managed to submit them to his henious desires, through ways I do not wish
to imagine... But it was until one evening that-""",
"options": [
("*let him continue*", 7),
("Stop, I don't believe any of this. You're trying to get me to side with you.", 20),
("You know what, I have no more time for this. Taste my steel! (fight)", 21),
]
},
7: { # Point of no return, you may only listen to his backstory from now on haha.
"text":
"""-I came home to the sound of struggle and yelping. The front door was locked
so I ran up from the back and to my eyes I saw my poor wife, my beautiful
darling, being mishandled by that pot-bellied, dog-headed whoreson they call the
town bailiff. I yelled her name. I lunged at the bastard and we fought for a bit
until at one moment got the best of me and pulled a dagger to my wife's throat,
claiming he would kill her if I got any closer.""",
"options": [
("...", 8)
]
},
8: {
"text":
"""It was at this point I had to recollect my wits before I could do any sudden
moves. Then there was a knock on the front door. It threw him off guard and a
chance for her to try to release herself and I to intervene. She squared him in
the nose but his stagger dodged my lunge and by then, the dagger was already in
her gut.""",
"options": [
("...", 9)
]
},
9: {
"text":
"""I grabbed a candlestick from the table and whacked him across his fat skull. And
in a fit of rage, I took the knife from his loosened clasp and proceeded to
repeatedly insert it back and forth across his abdomen all while he screeched
for help. It was most unfortunate that two guards happened to come by through
the back door at this state. They saw the blade buried underneath him and a dead
woman laying beside me.""",
"options": [
("...", 10)
]
},
10: {
"text":
"""They took me in for the town's jail, scheduled for the pillory on the morrow,
and the gallows over the next. The bailiff narrowly avoided bleeding out by
hasty action from the town's surgeon, and in hindsight, I should have went for
the throat. But as for her... the Gods could not save her. No form or amount of
explanation overrode his authority as he told them I was strangling my own wife
and that he had come in from the noises to stop me, only to be attacked and
stabbed.""",
"options": [
("...", 11)
]
},
11: {
"text":
"""Pillory day. A God-awful entire day as hurls of rotten vittles, insults, and
whatever filth they brought in rained upon what was left of my dignity. Back in
the cell I was, and they only fed me whatever scraps that were left from the
platform I was on. The last day came and I was only a few hours away from having
my neck hanged up from the gallows. But then I heard this mysterious voice in my
head, telling me how he had a plan for me and that I could still save her.""",
"options": [
("...", 12)
]
},
12: {
"text":
"""She... she was my sole reason to be, and without her the ounce of light I had
left in me had been taken. I chanced upon the offer and it was by some divine
intervention that I was given the fortitude and willpower to strangle the guard
from inside of the cell, unlocked it with his keys, and sneaked out. It was only
a matter of time before others would find out, and I raced to the graveyard and
found her lifeless, cold body still on a cart waiting to be buried.""",
"options": [
("...", 13)
]
},
13: {
"text":
"""I stole her away and escaped into the northern woods. Since then the intrusion
in my head would lead me to things I never would have imagined. I delved into
the mystic arts, into necromancy, still trying to preserve her beautiful body.
For years and years, I studied immensely, still following the voice. But I had
one other thing in mind the whole time. Randolf. And I am so very close to
perfecting my plan in having revenge. And then you came along.""",
"options": [
("...", 14)
]
},
14: {
"text":
"""...Heh, I see that you have been listening attentively to my backstory. And I am
quite certain you are wondering why I have not killed you yet.""",
"options": [
("...", 15),
("Well, why are you telling me all this then?", 15),
("I do, you are trying to convert me to your side! You fear me!", 15),
("I'm still itching for a fight mage, despite what you say!", 15)
]
},
15: {
"text":
"""I have decided during our little conversation to give you a chance. You heard
the truth about what happened in Stennerden. I can offer you more than they will
ever reward you. I have a plan for you.""",
"options": [
("A plan? You mean the plan the voice in your head told you? I'm curious...", 16),
("This... this isn't right. Indeed, Randolf is scum, and I thank you\n\
for showing me the truth. The town is greater off without such a corrupt and\n\
incompetent man. But hear me: You cannot escape death, not even your\n\
loved ones can. That voice is corrupting you.", 17),
]
},
16: {
"text":
"""I want you to join me. I am able to see it within you, your reliability, your
determination, your fearlessness. And perhaps your loyalty... You would make a
valuable asset. I will show you a new way of life, powers you never could have
dreamt...""",
"options": [
("You are right with your words. You were wronged and you deserve more with my\n\
help. I accept your proposal. (end convo)", 18),
("This... this isn't right. Indeed, Randolf is scum, and I thank you\n\
for showing me the truth. The town is greater off without such a corrupt and\n\
incompetent man. But hear me: You cannot escape death, not even your\n\
loved ones can. That voice is corrupting you.", 17),
]
},
17: {
"text":
################################################################################
f"*Sigh*... Perhaps you may have misunderstood me. {player_name}, this world\n\
in all its cruelness, where people can be moreso cruel than even monsters, is\n\
not to be treaded so heedlessly. To this, one must be opportunistic. Take\n\
advantage so that you may rise against those who oppose you. I can see it in\n\
you, orphaned before you could even talk, you are alone in this world. Off to\n\
fend for yourself at an early age, you only had yourself to take care of. I can\n\
offer you a true home here. And together, I will give you a purpose. Join me.",
"options": [
("I stand by my principles. You are an ill and disturbed old man and your\n\
unethical practices would do no good in this world. I will not accept. (end\n\
convo)", 19),
("Your... your words are right. I never would have thought of coming\n\
into the samemind with the likes of you but... you are right. You were wronged\n\
and I shall help you rise up in this dishonorable world. I accept. (end convo)", 18)
]
},
18: { # Player chooses to join villain's side.
"text":
"""Excellent. You chose reason and potential over petty, mortal affairs. My name is
Benswald Nightcaster. I foresee a marvelous future ahead of us. Now, come with
me...""",
"options": 2
},
19: { # Player rejects villain's proposal, commence final battle.
"text":
"""Alas, it is a shame you were not able to see reason. You would still make a
valuable asset however... BY USING YOUR BLOOD!""",
"options": 1
},
20: {
"text":
"""I assure you I am truthfully telling you how it is, how I was wronged, and why I
chose this lifestyle. Now, shall I continue?""",
"options": [
("Go on...", 7),
("This is all bogus. I don't want to hear any further. (fight)", 21)
]
},
21: { # Player fights villain immediately without hearing backstory.
"text":
"""Then so be it, YOU WON'T LEAVE HERE ALIVE!""",
"options": 0
},
}
#------------------------------------------------------------------------------#
gay_rat_dialogue = {
1: {
"text": "Greetings, I'm Gil the gay rat. And who are you? :3",
"options": [
("Wait, you're a talking rat? How?", 2),
(f"My name is {player_name}, what are you doing here?", 3),
("Wait, you're gay? I must slay you! (fight)", 6),
]
},
2: {
"text":
"""Why yes I am, unlike my fellow brothers and sisters. The necromancer here made
me who I am. Say, what brings you here?""",
"options": [
("I'm looking for the necromancer, I heard from folks he's based around here. Know\n\
where he is?", 4),
("Just exploring around this place, looking for adventure wherever I go.", 5),
("Enough chatting, time to die! (fight)", 6),
]
},
3: {
"text":
"""Oh, I live here as the necromancer's pet. I wasn't like this before you know, I
just sort of... became me all of a sudden. He's quite an intelligent man too.
But he doesn't need me right now so I'm just going around for scraps, mating
with other gay rats, you know, stuff.""",
"options": [
("Speaking of the necromancer, know of his whereabouts?", 4),
("Alright, I've had enough of this nonesense. (fight)", 6),
]
},
4: {
"text":
"""Yes, he lives right over there to the north and I believe he's performing some
experiments in his chamber currently. Why do you ask?""",
"options": [
("I've come to bring justice to the people of Stennerden and kill this madman!", 6),
("Oh, I'm a friend of his, just need a little word with him, is all. (lie)", 5),
]
},
5: {
"text": "Well alright then. I'd better get going.",
"options": 0
},
6: {
"text": "I will defecate all over your dead body! >:(",
"options": 1
},
}
|
#include <stdio.h>
#include "include/sonoff-s20.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "esp_system.h"
#include "esp_spi_flash.h"
void app_main()
{
printf("Hello world!\n");
/* Print chip information */
esp_chip_info_t chip_info;
esp_chip_info(&chip_info);
printf("This is ESP8266 chip with %d CPU cores, WiFi, ",
chip_info.cores);
printf("silicon revision %d, ", chip_info.revision);
printf("%dMB %s flash\n", spi_flash_get_chip_size() / (1024 * 1024),
(chip_info.features & CHIP_FEATURE_EMB_FLASH) ? "embedded" : "external");
for (int i = 10; i >= 0; i--) {
printf("Restarting in %d seconds...\n", i);
vTaskDelay(1000 / portTICK_PERIOD_MS);
}
printf("Restarting now.\n");
fflush(stdout);
esp_restart();
}
|
/*
** $Id: lapi.c,v 2.259 2016/02/29 14:27:14 roberto Exp $
** Lua API
** See Copyright Notice in lua.h
*/
#define lapi_c
#define LUA_CORE
#include "lprefix.h"
#include <stdarg.h>
#include <string.h>
#include "lua.h"
#include "lapi.h"
#include "ldebug.h"
#include "ldo.h"
#include "lfunc.h"
#include "lgc.h"
#include "lmem.h"
#include "lobject.h"
#include "lstate.h"
#include "lstring.h"
#include "ltable.h"
#include "ltm.h"
#include "lundump.h"
#include "lvm.h"
const char lua_ident[] =
"$LuaVersion: " LUA_COPYRIGHT " $"
"$LuaAuthors: " LUA_AUTHORS " $";
/* value at a non-valid index */
#define NONVALIDVALUE cast(TValue *, luaO_nilobject)
/* corresponding test */
#define isvalid(o) ((o) != luaO_nilobject)
/* test for pseudo index */
#define ispseudo(i) ((i) <= LUA_REGISTRYINDEX)
/* test for upvalue */
#define isupvalue(i) ((i) < LUA_REGISTRYINDEX)
/* test for valid but not pseudo index */
#define isstackindex(i, o) (isvalid(o) && !ispseudo(i))
#define api_checkvalidindex(l,o) api_check(l, isvalid(o), "invalid index")
#define api_checkstackindex(l, i, o) \
api_check(l, isstackindex(i, o), "index not in the stack")
static TValue *index2addr (lua_State *L, int idx) {
CallInfo *ci = L->ci;
if (idx > 0) {
TValue *o = ci->func + idx;
api_check(L, idx <= ci->top - (ci->func + 1), "unacceptable index");
if (o >= L->top) return NONVALIDVALUE;
else return o;
}
else if (!ispseudo(idx)) { /* negative index */ /* 假索引(负数索引) */
api_check(L, idx != 0 && -idx <= L->top - (ci->func + 1), "invalid index");
return L->top + idx;
}
else if (idx == LUA_REGISTRYINDEX)
return &G(L)->l_registry;
else { /* upvalues */
idx = LUA_REGISTRYINDEX - idx;
api_check(L, idx <= MAXUPVAL + 1, "upvalue index too large");
if (ttislcf(ci->func)) /* light C function? */
return NONVALIDVALUE; /* it has no upvalues */
else {
CClosure *func = clCvalue(ci->func);
return (idx <= func->nupvalues) ? &func->upvalue[idx-1] : NONVALIDVALUE;
}
}
}
/*
** to be called by 'lua_checkstack' in protected mode, to grow stack
** capturing memory errors
*/
static void growstack (lua_State *L, void *ud) {
int size = *(int *)ud;
luaD_growstack(L, size);
}
LUA_API int lua_checkstack (lua_State *L, int n) {
int res;
CallInfo *ci = L->ci;
lua_lock(L);
api_check(L, n >= 0, "negative 'n'");
if (L->stack_last - L->top > n) /* stack large enough? */
res = 1; /* yes; check is OK */
else { /* no; need to grow stack */
int inuse = cast_int(L->top - L->stack) + EXTRA_STACK;
if (inuse > LUAI_MAXSTACK - n) /* can grow without overflow? */
res = 0; /* no */
else /* try to grow stack */
res = (luaD_rawrunprotected(L, &growstack, &n) == LUA_OK);
}
if (res && ci->top < L->top + n)
ci->top = L->top + n; /* adjust frame top */
lua_unlock(L);
return res;
}
LUA_API void lua_xmove (lua_State *from, lua_State *to, int n) {
int i;
if (from == to) return;
lua_lock(to);
api_checknelems(from, n);
api_check(from, G(from) == G(to), "moving among independent states");
api_check(from, to->ci->top - to->top >= n, "stack overflow");
from->top -= n;
for (i = 0; i < n; i++) {
setobj2s(to, to->top, from->top + i);
to->top++; /* stack already checked by previous 'api_check' */
}
lua_unlock(to);
}
LUA_API lua_CFunction lua_atpanic (lua_State *L, lua_CFunction panicf) {
lua_CFunction old;
lua_lock(L);
old = G(L)->panic;
G(L)->panic = panicf;
lua_unlock(L);
return old;
}
LUA_API const lua_Number *lua_version (lua_State *L) {
static const lua_Number version = LUA_VERSION_NUM;
if (L == NULL) return &version;
else return G(L)->version;
}
/*
** basic stack manipulation
*/
/*
** convert an acceptable stack index into an absolute index
*/
LUA_API int lua_absindex (lua_State *L, int idx) {
return (idx > 0 || ispseudo(idx))
? idx
: cast_int(L->top - L->ci->func) + idx;
}
LUA_API int lua_gettop (lua_State *L) {
return cast_int(L->top - (L->ci->func + 1));
}
LUA_API void lua_settop (lua_State *L, int idx) {
StkId func = L->ci->func;
lua_lock(L);
//idx为正向前移动索引,为负向后移动索引
if (idx >= 0) {
api_check(L, idx <= L->stack_last - (func + 1), "new top too large");
while (L->top < (func + 1) + idx)
setnilvalue(L->top++);
L->top = (func + 1) + idx;
}
else {
api_check(L, -(idx+1) <= (L->top - (func + 1)), "invalid new top");
L->top += idx+1; /* 'subtract' index (index is negative) */ /* cn: 负数索引 */
}
lua_unlock(L);
}
/*
** Reverse the stack segment from 'from' to 'to'
** (auxiliary to 'lua_rotate')
*/
static void reverse (lua_State *L, StkId from, StkId to) {
for (; from < to; from++, to--) {
TValue temp;
setobj(L, &temp, from);
setobjs2s(L, from, to);
setobj2s(L, to, &temp);
}
}
/*
** Let x = AB, where A is a prefix of length 'n'. Then,
** rotate x n == BA. But BA == (A^r . B^r)^r.
*/
LUA_API void lua_rotate (lua_State *L, int idx, int n) {
StkId p, t, m;
lua_lock(L);
t = L->top - 1; /* end of stack segment being rotated */
p = index2addr(L, idx); /* start of segment */
api_checkstackindex(L, idx, p);
api_check(L, (n >= 0 ? n : -n) <= (t - p + 1), "invalid 'n'");
m = (n >= 0 ? t - n : p - n - 1); /* end of prefix */
reverse(L, p, m); /* reverse the prefix with length 'n' */
reverse(L, m + 1, t); /* reverse the suffix */
reverse(L, p, t); /* reverse the entire segment */
lua_unlock(L);
}
LUA_API void lua_copy (lua_State *L, int fromidx, int toidx) {
TValue *fr, *to;
lua_lock(L);
fr = index2addr(L, fromidx);
to = index2addr(L, toidx);
api_checkvalidindex(L, to);
setobj(L, to, fr);
if (isupvalue(toidx)) /* function upvalue? */
luaC_barrier(L, clCvalue(L->ci->func), fr);
/* LUA_REGISTRYINDEX does not need gc barrier
(collector revisits it before finishing collection) */
lua_unlock(L);
}
LUA_API void lua_pushvalue (lua_State *L, int idx) {
lua_lock(L);
setobj2s(L, L->top, index2addr(L, idx));
api_incr_top(L);
lua_unlock(L);
}
/*
** access functions (stack -> C)
*/
LUA_API int lua_type (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
return (isvalid(o) ? ttnov(o) : LUA_TNONE);
}
LUA_API const char *lua_typename (lua_State *L, int t) {
UNUSED(L);
api_check(L, LUA_TNONE <= t && t < LUA_NUMTAGS, "invalid tag");
return ttypename(t);
}
LUA_API int lua_iscfunction (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
return (ttislcf(o) || (ttisCclosure(o)));
}
LUA_API int lua_isinteger (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
return ttisinteger(o);
}
LUA_API int lua_isnumber (lua_State *L, int idx) {
lua_Number n;
const TValue *o = index2addr(L, idx);
return tonumber(o, &n);
}
LUA_API int lua_isstring (lua_State *L, int idx) {
const TValue *o = index2addr(L, idx);
return (ttisstring(o) || cvt2str(o));
}
LUA_API int lua_isuserdata (lua_State *L, int idx) {
const TValue *o = index2addr(L, idx);
return (ttisfulluserdata(o) || ttislightuserdata(o));
}
LUA_API int lua_rawequal (lua_State *L, int index1, int index2) {
StkId o1 = index2addr(L, index1);
StkId o2 = index2addr(L, index2);
return (isvalid(o1) && isvalid(o2)) ? luaV_rawequalobj(o1, o2) : 0;
}
LUA_API void lua_arith (lua_State *L, int op) {
lua_lock(L);
if (op != LUA_OPUNM && op != LUA_OPBNOT)
api_checknelems(L, 2); /* all other operations expect two operands */
else { /* for unary operations, add fake 2nd operand */
api_checknelems(L, 1);
setobjs2s(L, L->top, L->top - 1);
api_incr_top(L);
}
/* first operand at top - 2, second at top - 1; result go to top - 2 */
luaO_arith(L, op, L->top - 2, L->top - 1, L->top - 2);
L->top--; /* remove second operand */
lua_unlock(L);
}
LUA_API int lua_compare (lua_State *L, int index1, int index2, int op) {
StkId o1, o2;
int i = 0;
lua_lock(L); /* may call tag method */
o1 = index2addr(L, index1);
o2 = index2addr(L, index2);
if (isvalid(o1) && isvalid(o2)) {
switch (op) {
case LUA_OPEQ: i = luaV_equalobj(L, o1, o2); break;
case LUA_OPLT: i = luaV_lessthan(L, o1, o2); break;
case LUA_OPLE: i = luaV_lessequal(L, o1, o2); break;
default: api_check(L, 0, "invalid option");
}
}
lua_unlock(L);
return i;
}
LUA_API size_t lua_stringtonumber (lua_State *L, const char *s) {
size_t sz = luaO_str2num(s, L->top);
if (sz != 0)
api_incr_top(L);
return sz;
}
LUA_API lua_Number lua_tonumberx (lua_State *L, int idx, int *pisnum) {
lua_Number n;
const TValue *o = index2addr(L, idx);
int isnum = tonumber(o, &n);
if (!isnum)
n = 0; /* call to 'tonumber' may change 'n' even if it fails */
if (pisnum) *pisnum = isnum;
return n;
}
LUA_API lua_Integer lua_tointegerx (lua_State *L, int idx, int *pisnum) {
lua_Integer res;
const TValue *o = index2addr(L, idx);
int isnum = tointeger(o, &res);
if (!isnum)
res = 0; /* call to 'tointeger' may change 'n' even if it fails */
if (pisnum) *pisnum = isnum;
return res;
}
LUA_API int lua_toboolean (lua_State *L, int idx) {
const TValue *o = index2addr(L, idx);
return !l_isfalse(o);
}
LUA_API const char *lua_tolstring (lua_State *L, int idx, size_t *len) {
StkId o = index2addr(L, idx);
if (!ttisstring(o)) {
if (!cvt2str(o)) { /* not convertible? */
if (len != NULL) *len = 0;
return NULL;
}
lua_lock(L); /* 'luaO_tostring' may create a new string */
luaO_tostring(L, o);
luaC_checkGC(L);
o = index2addr(L, idx); /* previous call may reallocate the stack */
lua_unlock(L);
}
if (len != NULL)
*len = vslen(o);
return svalue(o);
}
LUA_API size_t lua_rawlen (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
switch (ttype(o)) {
case LUA_TSHRSTR: return tsvalue(o)->shrlen;
case LUA_TLNGSTR: return tsvalue(o)->u.lnglen;
case LUA_TUSERDATA: return uvalue(o)->len;
case LUA_TTABLE: return luaH_getn(hvalue(o));
default: return 0;
}
}
LUA_API lua_CFunction lua_tocfunction (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
if (ttislcf(o)) return fvalue(o);
else if (ttisCclosure(o))
return clCvalue(o)->f;
else return NULL; /* not a C function */
}
LUA_API void *lua_touserdata (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
switch (ttnov(o)) {
case LUA_TUSERDATA: return getudatamem(uvalue(o));
case LUA_TLIGHTUSERDATA: return pvalue(o);
default: return NULL;
}
}
LUA_API lua_State *lua_tothread (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
return (!ttisthread(o)) ? NULL : thvalue(o);
}
LUA_API const void *lua_topointer (lua_State *L, int idx) {
StkId o = index2addr(L, idx);
switch (ttype(o)) {
case LUA_TTABLE: return hvalue(o);
case LUA_TLCL: return clLvalue(o);
case LUA_TCCL: return clCvalue(o);
case LUA_TLCF: return cast(void *, cast(size_t, fvalue(o)));
case LUA_TTHREAD: return thvalue(o);
case LUA_TUSERDATA: return getudatamem(uvalue(o));
case LUA_TLIGHTUSERDATA: return pvalue(o);
default: return NULL;
}
}
/*
** push functions (C -> stack)
*/
LUA_API void lua_pushnil (lua_State *L) {
lua_lock(L);
setnilvalue(L->top);
api_incr_top(L);
lua_unlock(L);
}
LUA_API void lua_pushnumber (lua_State *L, lua_Number n) {
lua_lock(L);
setfltvalue(L->top, n);
api_incr_top(L);
lua_unlock(L);
}
LUA_API void lua_pushinteger (lua_State *L, lua_Integer n) {
lua_lock(L);
setivalue(L->top, n);
api_incr_top(L);
lua_unlock(L);
}
/*
** Pushes on the stack a string with given length. Avoid using 's' when
** 'len' == 0 (as 's' can be NULL in that case), due to later use of
** 'memcmp' and 'memcpy'.
*/
LUA_API const char *lua_pushlstring (lua_State *L, const char *s, size_t len) {
TString *ts;
lua_lock(L);
ts = (len == 0) ? luaS_new(L, "") : luaS_newlstr(L, s, len);
setsvalue2s(L, L->top, ts);
api_incr_top(L);
luaC_checkGC(L);
lua_unlock(L);
return getstr(ts);
}
LUA_API const char *lua_pushstring (lua_State *L, const char *s) {
lua_lock(L);
if (s == NULL)
setnilvalue(L->top);
else {
TString *ts;
ts = luaS_new(L, s);
setsvalue2s(L, L->top, ts);
s = getstr(ts); /* internal copy's address */
}
api_incr_top(L);
luaC_checkGC(L);
lua_unlock(L);
return s;
}
LUA_API const char *lua_pushvfstring (lua_State *L, const char *fmt,
va_list argp) {
const char *ret;
lua_lock(L);
ret = luaO_pushvfstring(L, fmt, argp);
luaC_checkGC(L);
lua_unlock(L);
return ret;
}
LUA_API const char *lua_pushfstring (lua_State *L, const char *fmt, ...) {
const char *ret;
va_list argp;
lua_lock(L);
va_start(argp, fmt);
ret = luaO_pushvfstring(L, fmt, argp);
va_end(argp);
luaC_checkGC(L);
lua_unlock(L);
return ret;
}
LUA_API void lua_pushcclosure (lua_State *L, lua_CFunction fn, int n) {
lua_lock(L);
if (n == 0) {
setfvalue(L->top, fn);
}
else {
CClosure *cl;
api_checknelems(L, n);
api_check(L, n <= MAXUPVAL, "upvalue index too large");
cl = luaF_newCclosure(L, n);
cl->f = fn;
L->top -= n;
while (n--) {
setobj2n(L, &cl->upvalue[n], L->top + n);
/* does not need barrier because closure is white */
}
setclCvalue(L, L->top, cl);
}
api_incr_top(L);
luaC_checkGC(L);
lua_unlock(L);
}
LUA_API void lua_pushboolean (lua_State *L, int b) {
lua_lock(L);
setbvalue(L->top, (b != 0)); /* ensure that true is 1 */
api_incr_top(L);
lua_unlock(L);
}
LUA_API void lua_pushlightuserdata (lua_State *L, void *p) {
lua_lock(L);
setpvalue(L->top, p);
api_incr_top(L);
lua_unlock(L);
}
LUA_API int lua_pushthread (lua_State *L) {
lua_lock(L);
setthvalue(L, L->top, L);
api_incr_top(L);
lua_unlock(L);
return (G(L)->mainthread == L);
}
/*
** get functions (Lua -> stack)
*/
static int auxgetstr (lua_State *L, const TValue *t, const char *k) {
const TValue *slot;
TString *str = luaS_new(L, k);
if (luaV_fastget(L, t, str, slot, luaH_getstr)) {
setobj2s(L, L->top, slot);
api_incr_top(L);
}
else {
setsvalue2s(L, L->top, str);
api_incr_top(L);
luaV_finishget(L, t, L->top - 1, L->top - 1, slot);
}
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API int lua_getglobal (lua_State *L, const char *name) {
Table *reg = hvalue(&G(L)->l_registry);
lua_lock(L);
return auxgetstr(L, luaH_getint(reg, LUA_RIDX_GLOBALS), name);
}
LUA_API int lua_gettable (lua_State *L, int idx) {
StkId t;
lua_lock(L);
t = index2addr(L, idx);
luaV_gettable(L, t, L->top - 1, L->top - 1);
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API int lua_getfield (lua_State *L, int idx, const char *k) {
lua_lock(L);
return auxgetstr(L, index2addr(L, idx), k);
}
LUA_API int lua_geti (lua_State *L, int idx, lua_Integer n) {
StkId t;
const TValue *slot;
lua_lock(L);
t = index2addr(L, idx);
if (luaV_fastget(L, t, n, slot, luaH_getint)) {
setobj2s(L, L->top, slot);
api_incr_top(L);
}
else {
setivalue(L->top, n);
api_incr_top(L);
luaV_finishget(L, t, L->top - 1, L->top - 1, slot);
}
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API int lua_rawget (lua_State *L, int idx) {
StkId t;
lua_lock(L);
t = index2addr(L, idx);
api_check(L, ttistable(t), "table expected");
setobj2s(L, L->top - 1, luaH_get(hvalue(t), L->top - 1));
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API int lua_rawgeti (lua_State *L, int idx, lua_Integer n) {
StkId t;
lua_lock(L);
t = index2addr(L, idx);
api_check(L, ttistable(t), "table expected");
setobj2s(L, L->top, luaH_getint(hvalue(t), n));
api_incr_top(L);
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API int lua_rawgetp (lua_State *L, int idx, const void *p) {
StkId t;
TValue k;
lua_lock(L);
t = index2addr(L, idx);
api_check(L, ttistable(t), "table expected");
setpvalue(&k, cast(void *, p));
setobj2s(L, L->top, luaH_get(hvalue(t), &k));
api_incr_top(L);
lua_unlock(L);
return ttnov(L->top - 1);
}
LUA_API void lua_createtable (lua_State *L, int narray, int nrec) {
Table *t;
lua_lock(L);
t = luaH_new(L);
sethvalue(L, L->top, t);
api_incr_top(L);
if (narray > 0 || nrec > 0)
luaH_resize(L, t, narray, nrec);
luaC_checkGC(L);
lua_unlock(L);
}
LUA_API int lua_getmetatable (lua_State *L, int objindex) {
const TValue *obj;
Table *mt;
int res = 0;
lua_lock(L);
obj = index2addr(L, objindex);
switch (ttnov(obj)) {
case LUA_TTABLE:
mt = hvalue(obj)->metatable;
break;
case LUA_TUSERDATA:
mt = uvalue(obj)->metatable;
break;
default:
mt = G(L)->mt[ttnov(obj)];
break;
}
if (mt != NULL) {
sethvalue(L, L->top, mt);
api_incr_top(L);
res = 1;
}
lua_unlock(L);
return res;
}
LUA_API int lua_getuservalue (lua_State *L, int idx) {
StkId o;
lua_lock(L);
o = index2addr(L, idx);
api_check(L, ttisfulluserdata(o), "full userdata expected");
getuservalue(L, uvalue(o), L->top);
api_incr_top(L);
lua_unlock(L);
return ttnov(L->top - 1);
}
/*
** set functions (stack -> Lua)
*/
/*
** t[k] = value at the top of the stack (where 'k' is a string)
*/
static void auxsetstr (lua_State *L, const TValue *t, const char *k) {
const TValue *slot;
TString *str = luaS_new(L, k);
api_checknelems(L, 1);
if (luaV_fastset(L, t, str, slot, luaH_getstr, L->top - 1))
L->top--; /* pop value */
else {
setsvalue2s(L, L->top, str); /* push 'str' (to make it a TValue) */
api_incr_top(L);
luaV_finishset(L, t, L->top - 1, L->top - 2, slot);
L->top -= 2; /* pop value and key */
}
lua_unlock(L); /* lock done by caller */
}
LUA_API void lua_setglobal (lua_State *L, const char *name) {
Table *reg = hvalue(&G(L)->l_registry);
lua_lock(L); /* unlock done in 'auxsetstr' */
auxsetstr(L, luaH_getint(reg, LUA_RIDX_GLOBALS), name);
}
LUA_API void lua_settable (lua_State *L, int idx) {
StkId t;
lua_lock(L);
api_checknelems(L, 2);
t = index2addr(L, idx);
luaV_settable(L, t, L->top - 2, L->top - 1);
L->top -= 2; /* pop index and value */
lua_unlock(L);
}
LUA_API void lua_setfield (lua_State *L, int idx, const char *k) {
lua_lock(L); /* unlock done in 'auxsetstr' */
auxsetstr(L, index2addr(L, idx), k);
}
LUA_API void lua_seti (lua_State *L, int idx, lua_Integer n) {
StkId t;
const TValue *slot;
lua_lock(L);
api_checknelems(L, 1);
t = index2addr(L, idx);
if (luaV_fastset(L, t, n, slot, luaH_getint, L->top - 1))
L->top--; /* pop value */
else {
setivalue(L->top, n);
api_incr_top(L);
luaV_finishset(L, t, L->top - 1, L->top - 2, slot);
L->top -= 2; /* pop value and key */
}
lua_unlock(L);
}
LUA_API void lua_rawset (lua_State *L, int idx) {
StkId o;
TValue *slot;
lua_lock(L);
api_checknelems(L, 2);
o = index2addr(L, idx);
api_check(L, ttistable(o), "table expected");
slot = luaH_set(L, hvalue(o), L->top - 2);
setobj2t(L, slot, L->top - 1);
invalidateTMcache(hvalue(o));
luaC_barrierback(L, hvalue(o), L->top-1);
L->top -= 2;
lua_unlock(L);
}
LUA_API void lua_rawseti (lua_State *L, int idx, lua_Integer n) {
StkId o;
lua_lock(L);
api_checknelems(L, 1);
o = index2addr(L, idx);
api_check(L, ttistable(o), "table expected");
luaH_setint(L, hvalue(o), n, L->top - 1);
luaC_barrierback(L, hvalue(o), L->top-1);
L->top--;
lua_unlock(L);
}
LUA_API void lua_rawsetp (lua_State *L, int idx, const void *p) {
StkId o;
TValue k, *slot;
lua_lock(L);
api_checknelems(L, 1);
o = index2addr(L, idx);
api_check(L, ttistable(o), "table expected");
setpvalue(&k, cast(void *, p));
slot = luaH_set(L, hvalue(o), &k);
setobj2t(L, slot, L->top - 1);
luaC_barrierback(L, hvalue(o), L->top - 1);
L->top--;
lua_unlock(L);
}
LUA_API int lua_setmetatable (lua_State *L, int objindex) {
TValue *obj;
Table *mt;
lua_lock(L);
api_checknelems(L, 1);
obj = index2addr(L, objindex);
if (ttisnil(L->top - 1))
mt = NULL;
else {
api_check(L, ttistable(L->top - 1), "table expected");
mt = hvalue(L->top - 1);
}
switch (ttnov(obj)) {
case LUA_TTABLE: {
hvalue(obj)->metatable = mt;
if (mt) {
luaC_objbarrier(L, gcvalue(obj), mt);
luaC_checkfinalizer(L, gcvalue(obj), mt);
}
break;
}
case LUA_TUSERDATA: {
uvalue(obj)->metatable = mt;
if (mt) {
luaC_objbarrier(L, uvalue(obj), mt);
luaC_checkfinalizer(L, gcvalue(obj), mt);
}
break;
}
default: {
G(L)->mt[ttnov(obj)] = mt;
break;
}
}
L->top--;
lua_unlock(L);
return 1;
}
LUA_API void lua_setuservalue (lua_State *L, int idx) {
StkId o;
lua_lock(L);
api_checknelems(L, 1);
o = index2addr(L, idx);
api_check(L, ttisfulluserdata(o), "full userdata expected");
setuservalue(L, uvalue(o), L->top - 1);
luaC_barrier(L, gcvalue(o), L->top - 1);
L->top--;
lua_unlock(L);
}
/*
** 'load' and 'call' functions (run Lua code)
*/
#define checkresults(L,na,nr) \
api_check(L, (nr) == LUA_MULTRET || (L->ci->top - L->top >= (nr) - (na)), \
"results from function overflow current stack size")
LUA_API void lua_callk (lua_State *L, int nargs, int nresults,
lua_KContext ctx, lua_KFunction k) {
StkId func;
lua_lock(L);
api_check(L, k == NULL || !isLua(L->ci),
"cannot use continuations inside hooks");
api_checknelems(L, nargs+1);
api_check(L, L->status == LUA_OK, "cannot do calls on non-normal thread");
checkresults(L, nargs, nresults);
func = L->top - (nargs+1);
if (k != NULL && L->nny == 0) { /* need to prepare continuation? */
L->ci->u.c.k = k; /* save continuation */
L->ci->u.c.ctx = ctx; /* save context */
luaD_call(L, func, nresults); /* do the call */
}
else /* no continuation or no yieldable */
luaD_callnoyield(L, func, nresults); /* just do the call */
adjustresults(L, nresults);
lua_unlock(L);
}
/*
** Execute a protected call.
*/
struct CallS { /* data to 'f_call' */
StkId func;
int nresults;
};
static void f_call (lua_State *L, void *ud) {
struct CallS *c = cast(struct CallS *, ud);
luaD_callnoyield(L, c->func, c->nresults);
}
LUA_API int lua_pcallk (lua_State *L, int nargs, int nresults, int errfunc,
lua_KContext ctx, lua_KFunction k) {
struct CallS c;
int status;
ptrdiff_t func;
lua_lock(L);
api_check(L, k == NULL || !isLua(L->ci),
"cannot use continuations inside hooks");
api_checknelems(L, nargs+1);
api_check(L, L->status == LUA_OK, "cannot do calls on non-normal thread");
checkresults(L, nargs, nresults);
if (errfunc == 0)
func = 0;
else {
StkId o = index2addr(L, errfunc);
api_checkstackindex(L, errfunc, o);
func = savestack(L, o);
}
c.func = L->top - (nargs+1); /* function to be called */
if (k == NULL || L->nny > 0) { /* no continuation or no yieldable? */
/* cn: 没有协同? */
c.nresults = nresults; /* do a 'conventional' protected call */
status = luaD_pcall(L, f_call, &c, savestack(L, c.func), func);
}
else { /* prepare continuation (call is already protected by 'resume') */
/* cn: 预先的协同,调用已经被resume保护 */
CallInfo *ci = L->ci;
ci->u.c.k = k; /* save continuation */ /* cn: 保存协同方法 */
ci->u.c.ctx = ctx; /* save context */ /* cn: 保存上下文 */
/* save information for error recovery */
ci->extra = savestack(L, c.func);
ci->u.c.old_errfunc = L->errfunc;
L->errfunc = func;
setoah(ci->callstatus, L->allowhook); /* save value of 'allowhook' */
ci->callstatus |= CIST_YPCALL; /* function can do error recovery */
luaD_call(L, c.func, nresults); /* do the call */
ci->callstatus &= ~CIST_YPCALL;
L->errfunc = ci->u.c.old_errfunc;
status = LUA_OK; /* if it is here, there were no errors */
}
adjustresults(L, nresults);
lua_unlock(L);
return status;
}
LUA_API int lua_load (lua_State *L, lua_Reader reader, void *data,
const char *chunkname, const char *mode) {
ZIO z;
int status;
lua_lock(L);
if (!chunkname) chunkname = "?";
luaZ_init(L, &z, reader, data);
status = luaD_protectedparser(L, &z, chunkname, mode);
if (status == LUA_OK) { /* no errors? */
LClosure *f = clLvalue(L->top - 1); /* get newly created function */
if (f->nupvalues >= 1) { /* does it have an upvalue? */
/* get global table from registry */
Table *reg = hvalue(&G(L)->l_registry);
const TValue *gt = luaH_getint(reg, LUA_RIDX_GLOBALS);
/* set global table as 1st upvalue of 'f' (may be LUA_ENV) */
setobj(L, f->upvals[0]->v, gt);
luaC_upvalbarrier(L, f->upvals[0]);
}
}
lua_unlock(L);
return status;
}
LUA_API int lua_dump (lua_State *L, lua_Writer writer, void *data, int strip) {
int status;
TValue *o;
lua_lock(L);
api_checknelems(L, 1);
o = L->top - 1;
if (isLfunction(o))
status = luaU_dump(L, getproto(o), writer, data, strip);
else
status = 1;
lua_unlock(L);
return status;
}
LUA_API int lua_status (lua_State *L) {
return L->status;
}
/*
** Garbage-collection function
*/
LUA_API int lua_gc (lua_State *L, int what, int data) {
int res = 0;
global_State *g;
lua_lock(L);
g = G(L);
switch (what) {
case LUA_GCSTOP: {
g->gcrunning = 0;
break;
}
case LUA_GCRESTART: {
luaE_setdebt(g, 0);
g->gcrunning = 1;
break;
}
case LUA_GCCOLLECT: {
luaC_fullgc(L, 0);
break;
}
case LUA_GCCOUNT: {
/* GC values are expressed in Kbytes: #bytes/2^10 */
res = cast_int(gettotalbytes(g) >> 10);
break;
}
case LUA_GCCOUNTB: {
res = cast_int(gettotalbytes(g) & 0x3ff);
break;
}
case LUA_GCSTEP: {
l_mem debt = 1; /* =1 to signal that it did an actual step */
lu_byte oldrunning = g->gcrunning;
g->gcrunning = 1; /* allow GC to run */
if (data == 0) {
luaE_setdebt(g, -GCSTEPSIZE); /* to do a "small" step */
luaC_step(L);
}
else { /* add 'data' to total debt */
debt = cast(l_mem, data) * 1024 + g->GCdebt;
luaE_setdebt(g, debt);
luaC_checkGC(L);
}
g->gcrunning = oldrunning; /* restore previous state */
if (debt > 0 && g->gcstate == GCSpause) /* end of cycle? */
res = 1; /* signal it */
break;
}
case LUA_GCSETPAUSE: {
res = g->gcpause;
g->gcpause = data;
break;
}
case LUA_GCSETSTEPMUL: {
res = g->gcstepmul;
if (data < 40) data = 40; /* avoid ridiculous low values (and 0) */
g->gcstepmul = data;
break;
}
case LUA_GCISRUNNING: {
res = g->gcrunning;
break;
}
default: res = -1; /* invalid option */
}
lua_unlock(L);
return res;
}
/*
** miscellaneous functions
*/
LUA_API int lua_error (lua_State *L) {
lua_lock(L);
api_checknelems(L, 1);
luaG_errormsg(L);
/* code unreachable; will unlock when control actually leaves the kernel */
return 0; /* to avoid warnings */
}
LUA_API int lua_next (lua_State *L, int idx) {
StkId t;
int more;
lua_lock(L);
t = index2addr(L, idx);
api_check(L, ttistable(t), "table expected");
more = luaH_next(L, hvalue(t), L->top - 1);
if (more) {
api_incr_top(L);
}
else /* no more elements */
L->top -= 1; /* remove key */
lua_unlock(L);
return more;
}
LUA_API void lua_concat (lua_State *L, int n) {
lua_lock(L);
api_checknelems(L, n);
if (n >= 2) {
luaV_concat(L, n);
}
else if (n == 0) { /* push empty string */
setsvalue2s(L, L->top, luaS_newlstr(L, "", 0));
api_incr_top(L);
}
/* else n == 1; nothing to do */
luaC_checkGC(L);
lua_unlock(L);
}
LUA_API void lua_len (lua_State *L, int idx) {
StkId t;
lua_lock(L);
t = index2addr(L, idx);
luaV_objlen(L, L->top, t);
api_incr_top(L);
lua_unlock(L);
}
LUA_API lua_Alloc lua_getallocf (lua_State *L, void **ud) {
lua_Alloc f;
lua_lock(L);
if (ud) *ud = G(L)->ud;
f = G(L)->frealloc;
lua_unlock(L);
return f;
}
LUA_API void lua_setallocf (lua_State *L, lua_Alloc f, void *ud) {
lua_lock(L);
G(L)->ud = ud;
G(L)->frealloc = f;
lua_unlock(L);
}
LUA_API void *lua_newuserdata (lua_State *L, size_t size) {
Udata *u;
lua_lock(L);
u = luaS_newudata(L, size);
setuvalue(L, L->top, u);
api_incr_top(L);
luaC_checkGC(L);
lua_unlock(L);
return getudatamem(u);
}
static const char *aux_upvalue (StkId fi, int n, TValue **val,
CClosure **owner, UpVal **uv) {
switch (ttype(fi)) {
case LUA_TCCL: { /* C closure */
CClosure *f = clCvalue(fi);
if (!(1 <= n && n <= f->nupvalues)) return NULL;
*val = &f->upvalue[n-1];
if (owner) *owner = f;
return "";
}
case LUA_TLCL: { /* Lua closure */
LClosure *f = clLvalue(fi);
TString *name;
Proto *p = f->p;
if (!(1 <= n && n <= p->sizeupvalues)) return NULL;
*val = f->upvals[n-1]->v;
if (uv) *uv = f->upvals[n - 1];
name = p->upvalues[n-1].name;
return (name == NULL) ? "(*no name)" : getstr(name);
}
default: return NULL; /* not a closure */
}
}
LUA_API const char *lua_getupvalue (lua_State *L, int funcindex, int n) {
const char *name;
TValue *val = NULL; /* to avoid warnings */
lua_lock(L);
name = aux_upvalue(index2addr(L, funcindex), n, &val, NULL, NULL);
if (name) {
setobj2s(L, L->top, val);
api_incr_top(L);
}
lua_unlock(L);
return name;
}
LUA_API const char *lua_setupvalue (lua_State *L, int funcindex, int n) {
const char *name;
TValue *val = NULL; /* to avoid warnings */
CClosure *owner = NULL;
UpVal *uv = NULL;
StkId fi;
lua_lock(L);
fi = index2addr(L, funcindex);
api_checknelems(L, 1);
name = aux_upvalue(fi, n, &val, &owner, &uv);
if (name) {
L->top--;
setobj(L, val, L->top);
if (owner) { luaC_barrier(L, owner, L->top); }
else if (uv) { luaC_upvalbarrier(L, uv); }
}
lua_unlock(L);
return name;
}
static UpVal **getupvalref (lua_State *L, int fidx, int n, LClosure **pf) {
LClosure *f;
StkId fi = index2addr(L, fidx);
api_check(L, ttisLclosure(fi), "Lua function expected");
f = clLvalue(fi);
api_check(L, (1 <= n && n <= f->p->sizeupvalues), "invalid upvalue index");
if (pf) *pf = f;
return &f->upvals[n - 1]; /* get its upvalue pointer */
}
LUA_API void *lua_upvalueid (lua_State *L, int fidx, int n) {
StkId fi = index2addr(L, fidx);
switch (ttype(fi)) {
case LUA_TLCL: { /* lua closure */
return *getupvalref(L, fidx, n, NULL);
}
case LUA_TCCL: { /* C closure */
CClosure *f = clCvalue(fi);
api_check(L, 1 <= n && n <= f->nupvalues, "invalid upvalue index");
return &f->upvalue[n - 1];
}
default: {
api_check(L, 0, "closure expected");
return NULL;
}
}
}
LUA_API void lua_upvaluejoin (lua_State *L, int fidx1, int n1,
int fidx2, int n2) {
LClosure *f1;
UpVal **up1 = getupvalref(L, fidx1, n1, &f1);
UpVal **up2 = getupvalref(L, fidx2, n2, NULL);
luaC_upvdeccount(L, *up1);
*up1 = *up2;
(*up1)->refcount++;
if (upisopen(*up1)) (*up1)->u.open.touched = 1;
luaC_upvalbarrier(L, *up1);
}
|
const cheerio = require('cheerio');
const axios = require('axios');
exports.covid19 = () => axios.get('https://www.worldometers.info/coronavirus').then(response => {
let veri = [];
const $ = cheerio.load(response.data);
const table = $('#main_table_countries_today');
const tbodies = table.find('tbody');
const tbody_countries = tbodies[0];
const table_rows_countries = $(tbody_countries).find('tr').toArray();
table_rows_countries.forEach(row => {
const columns = $(row).find('td');
rank = parseInt($(columns[0]).text().replace(/[,+]/g,'')) || 0;
country = $(columns[1]).text().trim().toUpperCase();
totalCases = parseInt($(columns[2]).text().replace(/[,+]/g,'')) || 0;
newCases = parseInt($(columns[3]).text().replace(/[,+]/g,'')) || 0;
totalDeaths = parseInt($(columns[4]).text().replace(/[,+]/g,'')) || 0;
newDeaths = parseInt($(columns[5]).text().replace(/[,+]/g,'')) || 0;
totalRecovered = parseInt($(columns[6]).text().replace(/[,+]/g,'')) || 0;
activeCases = parseInt($(columns[8]).text().replace(/[,+]/g,'')) || 0;
criticalCase = parseInt($(columns[9]).text().replace(/[,+]/g,'')) || 0;
totalTests = parseInt($(columns[12]).text().replace(/[,+]/g,'')) || 0;
veri.push({rank, country,totalCases,newCases,totalDeaths,newDeaths,totalRecovered,activeCases,criticalCase,totalTests});
});
return veri;
});
exports.country = (country) => {
return this.covid19().then(stats => {
return stats.find(entry => entry.country == country.toUpperCase());
});
}
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import numpy as np
from tests.common.tensorio import compare_tensor
from akg.utils import kernel_exec as utils
from tests.common.test_op import argmin
from akg.ops.math import argmax
from tests.common.base import get_rtol_atol
from akg.utils.dsl_create import get_reduce_out_shape
from tests.common.gen_random import random_gaussian
from akg import tvm
from tests.common.test_utils import compute_blockdim
def common_run(shape, dtype, axis, attrs, method):
if attrs is None:
attrs = {}
attrs["enable_algebra_simplify"] = True
if attrs.get("dynamic"):
build_shape = []
for i in range(len(shape)):
build_shape.append(tvm.var("I" + str(i)))
else:
build_shape = shape
if 'tuning' in attrs.keys():
t = attrs.get("tuning", False)
kernel_name = attrs.get("kernel_name", False)
if method is "min":
mod = utils.op_build_test(argmin.argmin, [build_shape], [dtype], op_attrs=[axis], kernel_name=kernel_name,
attrs=attrs, tuning=t)
elif method is "max":
mod = utils.op_build_test(argmax.argmax, [build_shape], [dtype], op_attrs=[axis], kernel_name=kernel_name,
attrs=attrs, tuning=t)
else:
raise RuntimeError("not support " + method)
if t:
args, exp_output, input = gen_data(axis, dtype, method, shape)
return mod, exp_output, args
else:
return mod
else:
if method is "min":
mod = utils.op_build_test(argmin.argmin, [build_shape], [dtype], op_attrs=[axis], kernel_name="argmin",
attrs=attrs)
elif method is "max":
mod = utils.op_build_test(argmax.argmax, [build_shape], [dtype], op_attrs=[axis], kernel_name="argmax",
attrs=attrs)
else:
raise RuntimeError("not support " + method)
args, exp_output, input = gen_data(axis, dtype, method, shape)
if attrs.get("dynamic"):
for i in range(len(shape)):
args.append(shape[i])
block_dim = compute_blockdim(shape)
args.append(block_dim)
res = utils.mod_launch(mod, args, outputs=(1,), expect=exp_output)
acu_output = res.astype("int32")
rtol, atol = get_rtol_atol("argmax_min_common", dtype)
return input, acu_output, exp_output, compare_tensor(acu_output, exp_output, rtol=rtol, atol=atol, equal_nan=True)
def gen_data(axis, dtype, method, shape):
support_list = {"float16": np.float16, "float32": np.float32, "int32": np.int32, "int8": np.int8}
input = random_gaussian(shape, miu=1, sigma=100).astype(support_list[dtype])
if dtype == "float32":
input = np.around(input, 0)
if method is "min":
exp_output = np.argmin(input, axis=axis)
elif method is "max":
exp_output = np.argmax(input, axis=axis)
else:
raise RuntimeError("not support " + method)
out_shape = get_reduce_out_shape(shape, axis=axis)
output = np.full(out_shape, np.nan, np.int32)
args = [input, output]
return args, exp_output, input
|
# Modified by Microsoft Corporation.
# Licensed under the MIT license.
import pydash as ps
import torch.nn as nn
from convlab.agent.net import net_util
from convlab.agent.net.base import Net
from convlab.lib import util
class RecurrentNet(Net, nn.Module):
'''
Class for generating arbitrary sized recurrent neural networks which take a sequence of states as input.
Assumes that a single input example is organized into a 3D tensor
batch_size x seq_len x state_dim
The entire model consists of three parts:
1. self.fc_model (state processing)
2. self.rnn_model
3. self.model_tails
e.g. net_spec
"net": {
"type": "RecurrentNet",
"shared": true,
"cell_type": "GRU",
"fc_hid_layers": [],
"hid_layers_activation": "relu",
"out_layer_activation": null,
"rnn_hidden_size": 32,
"rnn_num_layers": 1,
"bidirectional": False,
"seq_len": 4,
"init_fn": "xavier_uniform_",
"clip_grad_val": 1.0,
"loss_spec": {
"name": "MSELoss"
},
"optim_spec": {
"name": "Adam",
"lr": 0.01
},
"lr_scheduler_spec": {
"name": "StepLR",
"step_size": 30,
"gamma": 0.1
},
"update_type": "replace",
"update_frequency": 1,
"polyak_coef": 0.9,
"gpu": true
}
'''
def __init__(self, net_spec, in_dim, out_dim):
'''
net_spec:
cell_type: any of RNN, LSTM, GRU
fc_hid_layers: list of fc layers preceeding the RNN layers
hid_layers_activation: activation function for the fc hidden layers
out_layer_activation: activation function for the output layer, same shape as out_dim
rnn_hidden_size: rnn hidden_size
rnn_num_layers: number of recurrent layers
bidirectional: if RNN should be bidirectional
seq_len: length of the history of being passed to the net
init_fn: weight initialization function
clip_grad_val: clip gradient norm if value is not None
loss_spec: measure of error between model predictions and correct outputs
optim_spec: parameters for initializing the optimizer
lr_scheduler_spec: Pytorch optim.lr_scheduler
update_type: method to update network weights: 'replace' or 'polyak'
update_frequency: how many total timesteps per update
polyak_coef: ratio of polyak weight update
gpu: whether to train using a GPU. Note this will only work if a GPU is available, othewise setting gpu=True does nothing
'''
nn.Module.__init__(self)
super().__init__(net_spec, in_dim, out_dim)
# set default
util.set_attr(self, dict(
out_layer_activation=None,
cell_type='GRU',
rnn_num_layers=1,
bidirectional=False,
init_fn=None,
clip_grad_val=None,
loss_spec={'name': 'MSELoss'},
optim_spec={'name': 'Adam'},
lr_scheduler_spec=None,
update_type='replace',
update_frequency=1,
polyak_coef=0.0,
gpu=False,
))
util.set_attr(self, self.net_spec, [
'cell_type',
'fc_hid_layers',
'hid_layers_activation',
'out_layer_activation',
'rnn_hidden_size',
'rnn_num_layers',
'bidirectional',
'seq_len',
'init_fn',
'clip_grad_val',
'loss_spec',
'optim_spec',
'lr_scheduler_spec',
'update_type',
'update_frequency',
'polyak_coef',
'gpu',
])
# restore proper in_dim from env stacked state_dim (stack_len, *raw_state_dim)
self.in_dim = in_dim[1:] if len(in_dim) > 2 else in_dim[1]
# fc body: state processing model
if ps.is_empty(self.fc_hid_layers):
self.rnn_input_dim = self.in_dim
else:
fc_dims = [self.in_dim] + self.fc_hid_layers
self.fc_model = net_util.build_fc_model(fc_dims, self.hid_layers_activation)
self.rnn_input_dim = fc_dims[-1]
# RNN model
self.rnn_model = getattr(nn, net_util.get_nn_name(self.cell_type))(
input_size=self.rnn_input_dim,
hidden_size=self.rnn_hidden_size,
num_layers=self.rnn_num_layers,
batch_first=True, bidirectional=self.bidirectional)
# tails. avoid list for single-tail for compute speed
if ps.is_integer(self.out_dim):
self.model_tail = net_util.build_fc_model([self.rnn_hidden_size, self.out_dim], self.out_layer_activation)
else:
if not ps.is_list(self.out_layer_activation):
self.out_layer_activation = [self.out_layer_activation] * len(out_dim)
assert len(self.out_layer_activation) == len(self.out_dim)
tails = []
for out_d, out_activ in zip(self.out_dim, self.out_layer_activation):
tail = net_util.build_fc_model([self.rnn_hidden_size, out_d], out_activ)
tails.append(tail)
self.model_tails = nn.ModuleList(tails)
net_util.init_layers(self, self.init_fn)
self.loss_fn = net_util.get_loss_fn(self, self.loss_spec)
self.to(self.device)
self.train()
def forward(self, x):
'''The feedforward step. Input is batch_size x seq_len x state_dim'''
# Unstack input to (batch_size x seq_len) x state_dim in order to transform all state inputs
batch_size = x.size(0)
x = x.view(-1, self.in_dim)
if hasattr(self, 'fc_model'):
x = self.fc_model(x)
# Restack to batch_size x seq_len x rnn_input_dim
x = x.view(-1, self.seq_len, self.rnn_input_dim)
if self.cell_type == 'LSTM':
_output, (h_n, c_n) = self.rnn_model(x)
else:
_output, h_n = self.rnn_model(x)
hid_x = h_n[-1] # get final time-layer
# return tensor if single tail, else list of tail tensors
if hasattr(self, 'model_tails'):
outs = []
for model_tail in self.model_tails:
outs.append(model_tail(hid_x))
return outs
else:
return self.model_tail(hid_x)
|