hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf8c06e8ff70cebe0c1c9c8fc95a724f943b47b | 49,344 | py | Python | test/functional/test_framework/test_framework.py | lulworm/Cashhandv2 | 974ff588a4217d85b7ed5d307a0fa749b660453e | [
"MIT"
] | 41 | 2015-02-25T20:29:32.000Z | 2021-05-10T11:54:32.000Z | test/functional/test_framework/test_framework.py | lulworm/Cashhandv2 | 974ff588a4217d85b7ed5d307a0fa749b660453e | [
"MIT"
] | 42 | 2017-09-12T03:09:56.000Z | 2021-01-27T18:43:28.000Z | test/functional/test_framework/test_framework.py | lulworm/Cashhandv2 | 974ff588a4217d85b7ed5d307a0fa749b660453e | [
"MIT"
] | 37 | 2015-10-02T19:33:04.000Z | 2021-04-21T22:26:23.000Z | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Base class for RPC testing."""
from enum import Enum
from io import BytesIO
import logging
import optparse
import os
import pdb
import shutil
from struct import pack
import sys
import tempfile
import time
from . import coverage
from .address import wif_to_privkey
from .authproxy import JSONRPCException
from .blocktools import (
create_block,
create_coinbase_pos,
create_transaction_from_outpoint,
is_zerocoin,
)
from .key import CECKey
from .messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxOut,
hash256,
)
from .script import (
CScript,
OP_CHECKSIG,
)
from .test_node import TestNode
from .util import (
MAX_NODES,
PortSeed,
assert_equal,
assert_greater_than,
check_json_precision,
connect_nodes_bi,
connect_nodes_clique,
disconnect_nodes,
DEFAULT_FEE,
get_datadir_path,
hex_str_to_bytes,
bytes_to_hex_str,
initialize_datadir,
set_node_times,
SPORK_ACTIVATION_TIME,
SPORK_DEACTIVATION_TIME,
sync_blocks,
sync_mempools,
vZC_DENOMS,
)
class TestStatus(Enum):
PASSED = 1
FAILED = 2
SKIPPED = 3
TEST_EXIT_PASSED = 0
TEST_EXIT_FAILED = 1
TEST_EXIT_SKIPPED = 77
TMPDIR_PREFIX = "pivx_func_test_"
class PivxTestFramework():
"""Base class for a pivx test script.
Individual pivx test scripts should subclass this class and override the set_test_params() and run_test() methods.
Individual tests can also override the following methods to customize the test setup:
- add_options()
- setup_chain()
- setup_network()
- setup_nodes()
The __init__() and main() methods should not be overridden.
This class also contains various public and private helper methods."""
def __init__(self):
"""Sets test framework defaults. Do not override this method. Instead, override the set_test_params() method"""
self.setup_clean_chain = False
self.nodes = []
self.mocktime = 0
self.supports_cli = False
self.set_test_params()
assert hasattr(self, "num_nodes"), "Test must set self.num_nodes in set_test_params()"
def main(self):
"""Main function. This should not be overridden by the subclass test scripts."""
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave pivxds and test.* datadir on exit or error")
parser.add_option("--noshutdown", dest="noshutdown", default=False, action="store_true",
help="Don't stop pivxds after the test execution")
parser.add_option("--srcdir", dest="srcdir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__))+"/../../../src"),
help="Source directory containing pivxd/pivx-cli (default: %default)")
parser.add_option("--cachedir", dest="cachedir", default=os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../../cache"),
help="Directory for caching pregenerated datadirs")
parser.add_option("--tmpdir", dest="tmpdir", help="Root directory for datadirs")
parser.add_option("-l", "--loglevel", dest="loglevel", default="INFO",
help="log events at this level and higher to the console. Can be set to DEBUG, INFO, WARNING, ERROR or CRITICAL. Passing --loglevel DEBUG will output all logs to console. Note that logs at all levels are always written to the test_framework.log file in the temporary test directory.")
parser.add_option("--tracerpc", dest="trace_rpc", default=False, action="store_true",
help="Print out all RPC calls as they are made")
parser.add_option("--portseed", dest="port_seed", default=os.getpid(), type='int',
help="The seed to use for assigning port numbers (default: current process id)")
parser.add_option("--coveragedir", dest="coveragedir",
help="Write tested RPC commands into this directory")
parser.add_option("--configfile", dest="configfile",
help="Location of the test framework config file")
parser.add_option("--pdbonfailure", dest="pdbonfailure", default=False, action="store_true",
help="Attach a python debugger if test fails")
parser.add_option("--usecli", dest="usecli", default=False, action="store_true",
help="use pivx-cli instead of RPC for all commands")
self.add_options(parser)
(self.options, self.args) = parser.parse_args()
PortSeed.n = self.options.port_seed
os.environ['PATH'] = self.options.srcdir + ":" + self.options.srcdir + "/qt:" + os.environ['PATH']
check_json_precision()
self.options.cachedir = os.path.abspath(self.options.cachedir)
# Set up temp directory and start logging
if self.options.tmpdir:
self.options.tmpdir = os.path.abspath(self.options.tmpdir)
os.makedirs(self.options.tmpdir, exist_ok=False)
else:
self.options.tmpdir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX)
self._start_logging()
success = TestStatus.FAILED
try:
if self.options.usecli and not self.supports_cli:
raise SkipTest("--usecli specified but test does not support using CLI")
self.setup_chain()
self.setup_network()
time.sleep(5)
self.run_test()
success = TestStatus.PASSED
except JSONRPCException as e:
self.log.exception("JSONRPC error")
except SkipTest as e:
self.log.warning("Test Skipped: %s" % e.message)
success = TestStatus.SKIPPED
except AssertionError as e:
self.log.exception("Assertion failed")
except KeyError as e:
self.log.exception("Key error")
except Exception as e:
self.log.exception("Unexpected exception caught during testing")
except KeyboardInterrupt as e:
self.log.warning("Exiting after keyboard interrupt")
if success == TestStatus.FAILED and self.options.pdbonfailure:
print("Testcase failed. Attaching python debugger. Enter ? for help")
pdb.set_trace()
if not self.options.noshutdown:
self.log.info("Stopping nodes")
if self.nodes:
self.stop_nodes()
else:
for node in self.nodes:
node.cleanup_on_exit = False
self.log.info("Note: pivxds were not stopped and may still be running")
if not self.options.nocleanup and not self.options.noshutdown and success != TestStatus.FAILED:
self.log.info("Cleaning up")
shutil.rmtree(self.options.tmpdir)
else:
self.log.warning("Not cleaning up dir %s" % self.options.tmpdir)
if success == TestStatus.PASSED:
self.log.info("Tests successful")
exit_code = TEST_EXIT_PASSED
elif success == TestStatus.SKIPPED:
self.log.info("Test skipped")
exit_code = TEST_EXIT_SKIPPED
else:
self.log.error("Test failed. Test logging available at %s/test_framework.log", self.options.tmpdir)
self.log.error("Hint: Call {} '{}' to consolidate all logs".format(os.path.normpath(os.path.dirname(os.path.realpath(__file__)) + "/../combine_logs.py"), self.options.tmpdir))
exit_code = TEST_EXIT_FAILED
logging.shutdown()
sys.exit(exit_code)
# Methods to override in subclass test scripts.
def set_test_params(self):
"""Tests must this method to change default values for number of nodes, topology, etc"""
raise NotImplementedError
def add_options(self, parser):
"""Override this method to add command-line options to the test"""
pass
def setup_chain(self):
"""Override this method to customize blockchain setup"""
self.log.info("Initializing test directory " + self.options.tmpdir)
if self.setup_clean_chain:
self._initialize_chain_clean()
else:
self._initialize_chain()
def setup_network(self):
"""Override this method to customize test network topology"""
self.setup_nodes()
# Connect the nodes as a "chain". This allows us
# to split the network between nodes 1 and 2 to get
# two halves that can work on competing chains.
for i in range(self.num_nodes - 1):
connect_nodes_bi(self.nodes, i, i + 1)
self.sync_all()
def setup_nodes(self):
"""Override this method to customize test node setup"""
extra_args = None
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args)
self.start_nodes()
def run_test(self):
"""Tests must override this method to define test logic"""
raise NotImplementedError
# Public helper methods. These can be accessed by the subclass test scripts.
def add_nodes(self, num_nodes, extra_args=None, rpchost=None, timewait=None, binary=None):
"""Instantiate TestNode objects"""
if extra_args is None:
extra_args = [[]] * num_nodes
if binary is None:
binary = [None] * num_nodes
assert_equal(len(extra_args), num_nodes)
assert_equal(len(binary), num_nodes)
for i in range(num_nodes):
self.nodes.append(TestNode(i, self.options.tmpdir, extra_args[i], rpchost, timewait=timewait, binary=binary[i], stderr=None, mocktime=self.mocktime, coverage_dir=self.options.coveragedir, use_cli=self.options.usecli))
def start_node(self, i, *args, **kwargs):
"""Start a pivxd"""
node = self.nodes[i]
node.start(*args, **kwargs)
node.wait_for_rpc_connection()
time.sleep(10)
if self.options.coveragedir is not None:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def start_nodes(self, extra_args=None, *args, **kwargs):
"""Start multiple pivxds"""
if extra_args is None:
extra_args = [None] * self.num_nodes
assert_equal(len(extra_args), self.num_nodes)
try:
for i, node in enumerate(self.nodes):
node.start(extra_args[i], *args, **kwargs)
for node in self.nodes:
node.wait_for_rpc_connection()
except:
# If one node failed to start, stop the others
self.stop_nodes()
raise
time.sleep(10)
if self.options.coveragedir is not None:
for node in self.nodes:
coverage.write_all_rpc_commands(self.options.coveragedir, node.rpc)
def stop_node(self, i):
"""Stop a pivxd test node"""
self.nodes[i].stop_node()
self.nodes[i].wait_until_stopped()
def stop_nodes(self):
"""Stop multiple pivxd test nodes"""
for node in self.nodes:
# Issue RPC to stop nodes
node.stop_node()
for node in self.nodes:
# Wait for nodes to stop
time.sleep(5)
node.wait_until_stopped()
def restart_node(self, i, extra_args=None):
"""Stop and start a test node"""
self.stop_node(i)
self.start_node(i, extra_args)
def assert_start_raises_init_error(self, i, extra_args=None, expected_msg=None, *args, **kwargs):
with tempfile.SpooledTemporaryFile(max_size=2**16) as log_stderr:
try:
self.start_node(i, extra_args, stderr=log_stderr, *args, **kwargs)
self.stop_node(i)
except Exception as e:
assert 'pivxd exited' in str(e) # node must have shutdown
self.nodes[i].running = False
self.nodes[i].process = None
if expected_msg is not None:
log_stderr.seek(0)
stderr = log_stderr.read().decode('utf-8')
if expected_msg not in stderr:
raise AssertionError("Expected error \"" + expected_msg + "\" not found in:\n" + stderr)
else:
if expected_msg is None:
assert_msg = "pivxd should have exited with an error"
else:
assert_msg = "pivxd should have exited with expected error " + expected_msg
raise AssertionError(assert_msg)
def wait_for_node_exit(self, i, timeout):
self.nodes[i].process.wait(timeout)
def split_network(self):
"""
Split the network of four nodes into nodes 0/1 and 2/3.
"""
disconnect_nodes(self.nodes[1], 2)
disconnect_nodes(self.nodes[2], 1)
self.sync_all([self.nodes[:2], self.nodes[2:]])
def join_network(self):
"""
Join the (previously split) network halves together.
"""
connect_nodes_bi(self.nodes, 1, 2)
self.sync_all()
def sync_all(self, node_groups=None):
if not node_groups:
node_groups = [self.nodes]
for group in node_groups:
sync_blocks(group)
sync_mempools(group)
def enable_mocktime(self):
"""Enable mocktime for the script.
mocktime may be needed for scripts that use the cached version of the
blockchain. If the cached version of the blockchain is used without
mocktime then the mempools will not sync due to IBD.
Sets mocktime to Tuesday, October 31, 2017 6:21:20 PM GMT (1572546080)
"""
self.mocktime = 1572546080
def disable_mocktime(self):
self.mocktime = 0
# Private helper methods. These should not be accessed by the subclass test scripts.
def _start_logging(self):
# Add logger and logging handlers
self.log = logging.getLogger('TestFramework')
self.log.setLevel(logging.DEBUG)
# Create file handler to log all messages
fh = logging.FileHandler(self.options.tmpdir + '/test_framework.log')
fh.setLevel(logging.DEBUG)
# Create console handler to log messages to stderr. By default this logs only error messages, but can be configured with --loglevel.
ch = logging.StreamHandler(sys.stdout)
# User can provide log level as a number or string (eg DEBUG). loglevel was caught as a string, so try to convert it to an int
ll = int(self.options.loglevel) if self.options.loglevel.isdigit() else self.options.loglevel.upper()
ch.setLevel(ll)
# Format logs the same as pivxd's debug.log with microprecision (so log files can be concatenated and sorted)
formatter = logging.Formatter(fmt='%(asctime)s.%(msecs)03d000 %(name)s (%(levelname)s): %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
formatter.converter = time.gmtime
fh.setFormatter(formatter)
ch.setFormatter(formatter)
# add the handlers to the logger
self.log.addHandler(fh)
self.log.addHandler(ch)
if self.options.trace_rpc:
rpc_logger = logging.getLogger("BitcoinRPC")
rpc_logger.setLevel(logging.DEBUG)
rpc_handler = logging.StreamHandler(sys.stdout)
rpc_handler.setLevel(logging.DEBUG)
rpc_logger.addHandler(rpc_handler)
def _initialize_chain(self, toPosPhase=False):
"""Initialize a pre-mined blockchain for use by the test."""
def create_cachedir(cachedir):
if os.path.isdir(cachedir):
shutil.rmtree(cachedir)
os.makedirs(cachedir)
def copy_cachedir(origin, destination, num_nodes=MAX_NODES):
for i in range(num_nodes):
from_dir = get_datadir_path(origin, i)
to_dir = get_datadir_path(destination, i)
shutil.copytree(from_dir, to_dir)
initialize_datadir(destination, i) # Overwrite port/rpcport in pivx.conf
def clone_cache_from_node_1(cachedir, from_num=4):
""" Clones cache subdir from node 1 to nodes from 'from_num' to MAX_NODES"""
def copy_and_overwrite(from_path, to_path):
if os.path.exists(to_path):
shutil.rmtree(to_path)
shutil.copytree(from_path, to_path)
assert from_num < MAX_NODES
node_0_datadir = os.path.join(get_datadir_path(cachedir, 0), "regtest")
for i in range(from_num, MAX_NODES):
node_i_datadir = os.path.join(get_datadir_path(cachedir, i), "regtest")
for subdir in ["blocks", "chainstate", "sporks", "zerocoin"]:
copy_and_overwrite(os.path.join(node_0_datadir, subdir),
os.path.join(node_i_datadir, subdir))
initialize_datadir(cachedir, i) # Overwrite port/rpcport in pivx.conf
def cachedir_valid(cachedir):
for i in range(MAX_NODES):
if not os.path.isdir(get_datadir_path(cachedir, i)):
return False
# nodes directories exist. check if the first one has the .incomplete flagfile
return (not os.path.exists(os.path.join(get_datadir_path(cachedir, 0), ".incomplete")))
def clean_cache_subdir(cachedir):
os.remove(os.path.join(get_datadir_path(cachedir, 0), ".incomplete"))
def cache_path(n, *paths):
return os.path.join(get_datadir_path(cachedir, n), "regtest", *paths)
for i in range(MAX_NODES):
for entry in os.listdir(cache_path(i)):
if entry not in ['wallet.dat', 'chainstate', 'blocks', 'sporks', 'zerocoin', 'backups']:
os.remove(cache_path(i, entry))
def clean_cache_dir():
if os.path.isdir(self.options.cachedir):
# migrate old cache dir
if cachedir_valid(self.options.cachedir):
powcachedir = os.path.join(self.options.cachedir, "pow")
self.log.info("Found old cachedir. Migrating to %s" % str(powcachedir))
copy_cachedir(self.options.cachedir, powcachedir)
# remove everything except pow and pos subdirs
for entry in os.listdir(self.options.cachedir):
if entry not in ['pow', 'pos']:
entry_path = os.path.join(self.options.cachedir, entry)
if os.path.isfile(entry_path):
os.remove(entry_path)
elif os.path.isdir(entry_path):
shutil.rmtree(entry_path)
# no cachedir found
else:
os.makedirs(self.options.cachedir)
def start_nodes_from_dir(ddir, num_nodes=MAX_NODES):
self.log.info("Starting %d nodes..." % num_nodes)
for i in range(num_nodes):
datadir = initialize_datadir(ddir, i)
if i == 0:
# Add .incomplete flagfile
# (removed at the end during clean_cache_subdir)
open(os.path.join(datadir, ".incomplete"), 'a').close()
args = [os.getenv("BITCOIND", "pivxd"), "-spendzeroconfchange=1", "-server", "-keypool=1",
"-datadir=" + datadir, "-discover=0"]
self.nodes.append(
TestNode(i, ddir, extra_args=[], rpchost=None, timewait=None, binary=None, stderr=None,
mocktime=self.mocktime, coverage_dir=None))
self.nodes[i].args = args
self.start_node(i)
self.log.info("Node %d started." % i)
# Wait for RPC connections to be ready
self.log.info("Nodes started. Waiting for RPC connections...")
for node in range(4):
self.nodes[node].wait_for_rpc_connection()
self.log.info("Connecting nodes")
connect_nodes_clique(self.nodes)
def stop_and_clean_cache_dir(ddir):
self.stop_nodes()
self.nodes = []
# Copy cache for nodes 5 to MAX_NODES
self.log.info("Copying cache dir to non-started nodes")
clone_cache_from_node_1(ddir)
self.log.info("Cleaning up.")
clean_cache_subdir(ddir)
def generate_pow_cache():
### POW Cache ###
# Create a 200-block-long chain; each of the 4 first nodes
# gets 25 mature blocks and 25 immature.
# Note: To preserve compatibility with older versions of
# initialize_chain, only 4 nodes will generate coins.
#
# blocks are created with timestamps 1 minutes apart
# starting from 331 minutes in the past
# Create cache directories, run pivxds:
create_cachedir(powcachedir)
self.log.info("Creating 'PoW-chain': 200 blocks")
start_nodes_from_dir(powcachedir, 4)
# Mine the blocks
self.log.info("Mining 200 blocks")
self.enable_mocktime()
block_time = self.mocktime - (331 * 60)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 60
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
# Shut them down, and clean up cache directories:
self.log.info("Stopping nodes")
stop_and_clean_cache_dir(powcachedir)
self.log.info("---> pow cache created")
self.disable_mocktime()
assert self.num_nodes <= MAX_NODES
clean_cache_dir()
powcachedir = os.path.join(self.options.cachedir, "pow")
is_powcache_valid = cachedir_valid(powcachedir)
poscachedir = os.path.join(self.options.cachedir, "pos")
is_poscache_valid = cachedir_valid(poscachedir)
if not toPosPhase and not is_powcache_valid:
self.log.info("PoW-CACHE NOT FOUND or INVALID.")
self.log.info("Creating new cached blockchain data.")
generate_pow_cache()
elif toPosPhase and not is_poscache_valid:
self.log.info("PoS-CACHE NOT FOUND or INVALID.")
self.log.info("Creating new cached blockchain data.")
# check if first 200 blocks (pow cache) is present. if not generate it.
if not is_powcache_valid:
self.log.info("PoW-CACHE NOT FOUND or INVALID. Generating it first.")
generate_pow_cache()
self.enable_mocktime()
block_time = self.mocktime - (131 * 60)
### POS Cache ###
# Create a 330-block-long chain
# First 200 PoW blocks are copied from PoW chain.
# The next 48 PoW blocks are mined in 12-blocks bursts by the first 4 nodes.
# The last 2 PoW blocks are then mined by the last node (Node 3).
# Then 80 PoS blocks are generated in 20-blocks bursts by the first 4 nodes.
#
# - Node 0 and node 1 get 62 mature blocks (pow) + 20 immmature (pos)
# 42 rewards spendable (62 mature blocks - 20 spent rewards)
# - Node 2 gets 56 mature blocks (pow) + 26 immmature (6 pow + 20 pos)
# 35 rewards spendable (55 mature blocks - 20 spent rewards)
# - Node 3 gets 50 mature blocks (pow) + 34 immmature (14 pow + 20 pos)
# 30 rewards spendable (50 mature blocks - 20 spent rewards)
# - Nodes 2 and 3 mint one zerocoin for each denom (tot 6666 PIV) on block 301/302
# 8 mature zc + 8/3 rewards spendable (35/30 - 27 spent) + change 83.92
#
# Block 331-336 will mature last 6 pow blocks mined by node 2.
# Then 337-350 will mature last 14 pow blocks mined by node 3.
# Then staked blocks start maturing at height 351.
# Create cache directories, run pivxds:
create_cachedir(poscachedir)
self.log.info("Creating 'PoS-chain': 330 blocks")
self.log.info("Copying 200 initial blocks from pow cache")
copy_cachedir(powcachedir, poscachedir)
# Change datadir and restart the nodes (only 4 of them)
start_nodes_from_dir(poscachedir, 4)
# Mine 50 more blocks to reach PoS start.
self.log.info("Mining 50 more blocks to reach PoS phase")
for peer in range(4):
for j in range(12):
set_node_times(self.nodes, block_time)
self.nodes[peer].generate(1)
block_time += 60
# Must sync before next peer starts generating blocks
if peer < 3:
sync_blocks(self.nodes)
set_node_times(self.nodes, block_time)
self.nodes[3].generate(2)
block_time += 60
sync_blocks(self.nodes)
# Then stake 80 blocks.
self.log.info("Staking 80 blocks...")
nBlocks = 250
res = [] # used to save the two txids for change outputs of mints (locked)
for peer in range(4):
for j in range(20):
# Stake block
block_time = self.generate_pos(peer, block_time)
nBlocks += 1
# Mint zerocoins with node-2 at block 301 and with node-3 at block 302
if nBlocks == 301 or nBlocks == 302:
# mints 7 zerocoins, one for each denom (tot 6666 PIV), fee = 0.01 * 8
# consumes 27 utxos (tot 6750 PIV), change = 6750 - 6666 - fee
res.append(self.nodes[nBlocks-299].mintzerocoin(6666))
self.sync_all()
# lock the change output (so it's not used as stake input in generate_pos)
assert (self.nodes[nBlocks-299].lockunspent(False, [{"txid": res[-1]['txid'], "vout": 8}]))
# Must sync before next peer starts generating blocks
sync_blocks(self.nodes)
time.sleep(1)
self.log.info("80 blocks staked")
# Unlock previously locked change outputs
for i in [2, 3]:
assert (self.nodes[i].lockunspent(True, [{"txid": res[i-2]['txid'], "vout": 8}]))
# Verify height and balances
self.test_PoS_chain_balances()
# Shut nodes down, and clean up cache directories:
self.log.info("Stopping nodes")
stop_and_clean_cache_dir(poscachedir)
self.log.info("--> pos cache created")
self.disable_mocktime()
else:
self.log.info("CACHE FOUND.")
# Copy requested cache to tempdir
if toPosPhase:
self.log.info("Copying datadir from %s to %s" % (poscachedir, self.options.tmpdir))
copy_cachedir(poscachedir, self.options.tmpdir, self.num_nodes)
else:
self.log.info("Copying datadir from %s to %s" % (powcachedir, self.options.tmpdir))
copy_cachedir(powcachedir, self.options.tmpdir, self.num_nodes)
def _initialize_chain_clean(self):
"""Initialize empty blockchain for use by the test.
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization."""
for i in range(self.num_nodes):
initialize_datadir(self.options.tmpdir, i)
### PIVX Specific TestFramework ###
###################################
def init_dummy_key(self):
self.DUMMY_KEY = CECKey()
self.DUMMY_KEY.set_secretbytes(hash256(pack('<I', 0xffff)))
def test_PoS_chain_balances(self):
from .util import DecimalAmt
# 330 blocks
# - Nodes 0 and 1 get 82 blocks:
# 62 pow + 20 pos (20 immature)
# - Nodes 2 gets 82 blocks:
# 62 pow + 20 pos (26 immature)
# - Nodes 3 gets 84 blocks:
# 64 pow + 20 pos (34 immature)
# - Nodes 2 and 3 have 6666 PIV worth of zerocoins
zc_tot = sum(vZC_DENOMS)
zc_fee = len(vZC_DENOMS) * 0.01
used_utxos = (zc_tot // 250) + 1
zc_change = 250 * used_utxos - zc_tot - zc_fee
# check at least 1 node and at most 5
num_nodes = min(5, len(self.nodes))
assert_greater_than(num_nodes, 0)
# each node has the same height and tip
best_block = self.nodes[0].getbestblockhash()
for i in range(num_nodes):
assert_equal(self.nodes[i].getblockcount(), 330)
if i > 0:
assert_equal(self.nodes[i].getbestblockhash(), best_block)
# balance is mature pow blocks rewards minus stake inputs (spent)
w_info = [self.nodes[i].getwalletinfo() for i in range(num_nodes)]
assert_equal(w_info[0]["balance"], DecimalAmt(250.0 * (62 - 20)))
assert_equal(w_info[1]["balance"], DecimalAmt(250.0 * (62 - 20)))
assert_equal(w_info[2]["balance"], DecimalAmt(250.0 * (56 - 20) - (used_utxos * 250) + zc_change))
assert_equal(w_info[3]["balance"], DecimalAmt(250.0 * (50 - 20) - (used_utxos * 250) + zc_change))
for i in range(4, num_nodes):
# only first 4 nodes have mined/staked
assert_equal(w_info[i]["balance"], DecimalAmt(0))
# immature balance is immature pow blocks rewards plus
# immature stakes (outputs=inputs+rewards)
assert_equal(w_info[0]["immature_balance"], DecimalAmt(500.0 * 20))
assert_equal(w_info[1]["immature_balance"], DecimalAmt(500.0 * 20))
assert_equal(w_info[2]["immature_balance"], DecimalAmt((250.0 * 6) + (500.0 * 20)))
assert_equal(w_info[3]["immature_balance"], DecimalAmt((250.0 * 14) + (500.0 * 20)))
for i in range(4, num_nodes):
# only first 4 nodes have mined/staked
assert_equal(w_info[i]["immature_balance"], DecimalAmt(0))
# check zerocoin balances / mints
for peer in [2, 3]:
if num_nodes > peer:
zcBalance = self.nodes[peer].getzerocoinbalance()
zclist = self.nodes[peer].listmintedzerocoins(True)
zclist_spendable = self.nodes[peer].listmintedzerocoins(True, True)
assert_equal(len(zclist), len(vZC_DENOMS))
assert_equal(zcBalance['Total'], 6666)
assert_equal(zcBalance['Immature'], 0)
if peer == 2:
assert_equal(len(zclist), len(zclist_spendable))
assert_equal(set([x['denomination'] for x in zclist]), set(vZC_DENOMS))
assert_equal([x['confirmations'] for x in zclist], [30-peer] * len(vZC_DENOMS))
self.log.info("Balances of first %d nodes check out" % num_nodes)
def get_prevouts(self, node_id, utxo_list, zpos=False, nHeight=-1):
""" get prevouts (map) for each utxo in a list
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxos.
utxo_list: <if zpos=False> (JSON list) utxos returned from listunspent used as input
<if zpos=True> (JSON list) mints returned from listmintedzerocoins used as input
zpos: (bool) type of utxo_list
nHeight: (int) height of the previous block. used only if zpos=True for
stake checksum. Optional, if not provided rpc_conn's height is used.
:return: prevouts: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zpiv-)
to (amount, prevScript, timeBlockFrom).
For zpiv prevScript is replaced with serialHash hex string.
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
prevouts = {}
for utxo in utxo_list:
if not zpos:
outPoint = COutPoint(int(utxo['txid'], 16), utxo['vout'])
outValue = int(utxo['amount']) * COIN
prevtx_json = rpc_conn.getrawtransaction(utxo['txid'], 1)
prevTx = CTransaction()
prevTx.deserialize(BytesIO(hex_str_to_bytes(prevtx_json['hex'])))
if (prevTx.is_coinbase() or prevTx.is_coinstake()) and utxo['confirmations'] < 100:
# skip immature coins
continue
prevScript = prevtx_json['vout'][utxo['vout']]['scriptPubKey']['hex']
prevTime = prevtx_json['blocktime']
prevouts[outPoint.serialize_uniqueness()] = (outValue, prevScript, prevTime)
else:
uniqueness = bytes.fromhex(utxo['hash stake'])[::-1]
prevouts[uniqueness] = (int(utxo["denomination"]) * COIN, utxo["serial hash"], 0)
return prevouts
def make_txes(self, node_id, spendingPrevOuts, to_pubKey):
""" makes a list of CTransactions each spending an input from spending PrevOuts to an output to_pubKey
:param node_id: (int) index of the CTestNode used as rpc connection. Must own spendingPrevOuts.
spendingPrevouts: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zpiv-)
to (amount, prevScript, timeBlockFrom).
For zpiv prevScript is replaced with serialHash hex string.
to_pubKey (bytes) recipient public key
:return: block_txes: ([CTransaction] list)
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
block_txes = []
for uniqueness in spendingPrevOuts:
if is_zerocoin(uniqueness):
# spend zPIV
_, serialHash, _ = spendingPrevOuts[uniqueness]
raw_spend = rpc_conn.createrawzerocoinspend(serialHash, "", False)
else:
# spend PIV
value_out = int(spendingPrevOuts[uniqueness][0] - DEFAULT_FEE * COIN)
scriptPubKey = CScript([to_pubKey, OP_CHECKSIG])
prevout = COutPoint()
prevout.deserialize_uniqueness(BytesIO(uniqueness))
tx = create_transaction_from_outpoint(prevout, b"", value_out, scriptPubKey)
# sign tx
raw_spend = rpc_conn.signrawtransaction(bytes_to_hex_str(tx.serialize()))['hex']
# add signed tx to the list
signed_tx = CTransaction()
signed_tx.from_hex(raw_spend)
block_txes.append(signed_tx)
return block_txes
def stake_block(self, node_id,
nHeight,
prevHhash,
prevModifier,
stakeableUtxos,
startTime=None,
privKeyWIF=None,
vtx=[],
fDoubleSpend=False):
""" manually stakes a block selecting the coinstake input from a list of candidates
:param node_id: (int) index of the CTestNode used as rpc connection. Must own stakeableUtxos.
nHeight: (int) height of the block being produced
prevHash: (string) hex string of the previous block hash
prevModifier (string) hex string of the previous block stake modifier
stakeableUtxos: ({bytes --> (int, bytes, int)} dictionary)
maps CStake "uniqueness" (i.e. serialized COutPoint -or hash stake, for zpiv-)
to (amount, prevScript, timeBlockFrom).
For zpiv prevScript is replaced with serialHash hex string.
startTime: (int) epoch time to be used as blocktime (iterated in solve_stake)
privKeyWIF: (string) private key to be used for staking/signing
If empty string, it will be used the pk from the stake input
(dumping the sk from rpc_conn). If None, then the DUMMY_KEY will be used.
vtx: ([CTransaction] list) transactions to add to block.vtx
fDoubleSpend: (bool) wether any tx in vtx is allowed to spend the coinstake input
:return: block: (CBlock) block produced, must be manually relayed
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
if not len(stakeableUtxos) > 0:
raise Exception("Need at least one stakeable utxo to stake a block!")
# Get start time to stake
if startTime is None:
startTime = time.time()
# Create empty block with coinbase
nTime = int(startTime) & 0xfffffff0
coinbaseTx = create_coinbase_pos(nHeight)
block = create_block(int(prevHhash, 16), coinbaseTx, nTime)
# Find valid kernel hash - iterates stakeableUtxos, then block.nTime
block.solve_stake(stakeableUtxos, int(prevModifier, 16))
# Check if this is a zPoS block or regular/cold stake - sign stake tx
block_sig_key = CECKey()
isZPoS = is_zerocoin(block.prevoutStake)
if isZPoS:
# !TODO: remove me
raise Exception("zPOS tests discontinued")
else:
coinstakeTx_unsigned = CTransaction()
prevout = COutPoint()
prevout.deserialize_uniqueness(BytesIO(block.prevoutStake))
coinstakeTx_unsigned.vin.append(CTxIn(prevout, b"", 0xffffffff))
coinstakeTx_unsigned.vout.append(CTxOut())
amount, prevScript, _ = stakeableUtxos[block.prevoutStake]
outNValue = int(amount + 250 * COIN)
coinstakeTx_unsigned.vout.append(CTxOut(outNValue, hex_str_to_bytes(prevScript)))
if privKeyWIF == "":
# Use dummy key
if not hasattr(self, 'DUMMY_KEY'):
self.init_dummy_key()
block_sig_key = self.DUMMY_KEY
# replace coinstake output script
coinstakeTx_unsigned.vout[1].scriptPubKey = CScript([block_sig_key.get_pubkey(), OP_CHECKSIG])
else:
if privKeyWIF == None:
# Use pk of the input. Ask sk from rpc_conn
rawtx = rpc_conn.getrawtransaction('{:064x}'.format(prevout.hash), True)
privKeyWIF = rpc_conn.dumpprivkey(rawtx["vout"][prevout.n]["scriptPubKey"]["addresses"][0])
# Use the provided privKeyWIF (cold staking).
# export the corresponding private key to sign block
privKey, compressed = wif_to_privkey(privKeyWIF)
block_sig_key.set_compressed(compressed)
block_sig_key.set_secretbytes(bytes.fromhex(privKey))
# Sign coinstake TX and add it to the block
stake_tx_signed_raw_hex = rpc_conn.signrawtransaction(
bytes_to_hex_str(coinstakeTx_unsigned.serialize()))['hex']
# Add coinstake to the block
coinstakeTx = CTransaction()
coinstakeTx.from_hex(stake_tx_signed_raw_hex)
block.vtx.append(coinstakeTx)
# Add provided transactions to the block.
# Don't add tx doublespending the coinstake input, unless fDoubleSpend=True
for tx in vtx:
if not fDoubleSpend:
# assume txes don't double spend zPIV inputs when fDoubleSpend is false. It needs to
# be checked outside until a convenient tx.spends(zerocoin) is added to the framework.
if not isZPoS and tx.spends(prevout):
continue
block.vtx.append(tx)
# Get correct MerkleRoot and rehash block
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
# sign block with block signing key and return it
block.sign_block(block_sig_key)
return block
def stake_next_block(self, node_id,
stakeableUtxos,
btime=None,
privKeyWIF=None,
vtx=[],
fDoubleSpend=False):
""" Calls stake_block appending to the current tip"""
assert_greater_than(len(self.nodes), node_id)
nHeight = self.nodes[node_id].getblockcount()
prevHhash = self.nodes[node_id].getblockhash(nHeight)
prevModifier = self.nodes[node_id].getblock(prevHhash)['stakeModifier']
return self.stake_block(node_id,
nHeight+1,
prevHhash,
prevModifier,
stakeableUtxos,
btime,
privKeyWIF,
vtx,
fDoubleSpend)
def check_tx_in_chain(self, node_id, txid):
assert_greater_than(len(self.nodes), node_id)
rawTx = self.nodes[node_id].getrawtransaction(txid, 1)
assert_greater_than(rawTx["confirmations"], 0)
def spend_inputs(self, node_id, inputs, outputs):
""" auxiliary function used by spend_utxo / spend_utxos """
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
spendingTx = rpc_conn.createrawtransaction(inputs, outputs)
spendingTx_signed = rpc_conn.signrawtransaction(spendingTx)
if spendingTx_signed["complete"]:
txhash = rpc_conn.sendrawtransaction(spendingTx_signed["hex"])
return txhash
else:
return ""
def spend_utxo(self, node_id, utxo, recipient=''):
""" spend amount from previously unspent output to a provided address
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxo.
utxo: (JSON) returned from listunspent used as input
recipient: (string) destination address (new one if not provided)
:return: txhash: (string) tx hash if successful, empty string otherwise
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
inputs = [{"txid": utxo["txid"], "vout": utxo["vout"]}]
out_amount = float(utxo["amount"]) - DEFAULT_FEE
outputs = {}
if recipient == '':
recipient = rpc_conn.getnewaddress()
outputs[recipient] = out_amount
return self.spend_inputs(node_id, inputs, outputs)
def spend_utxos(self, node_id, utxo_list, recipient='', fMultiple=False):
""" spend utxos to provided list of addresses or 10 new generate ones.
:param node_id: (int) index of the CTestNode used as rpc connection. Must own the utxo.
utxo_list: (JSON list) returned from listunspent used as input
recipient: (string, optional) destination address (new one if not provided)
fMultiple: (boolean, optional, default=false) spend each utxo on a different tx
:return: txHashes: (string list) list of hashes of completed txs
"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
txHashes = []
# If no recipient is given, create a new one
if recipient == '':
recipient = rpc_conn.getnewaddress()
# If fMultiple=True send one tx for each utxo
if fMultiple:
for utxo in utxo_list:
txHash = self.spend_utxo(node_id, utxo, recipient)
if txHash != "":
txHashes.append(txHash)
# Otherwise make a single tx with all the inputs
else:
inputs = [{"txid": x["txid"], "vout": x["vout"]} for x in utxo_list]
out_amount = sum([float(x["amount"]) for x in utxo_list]) - DEFAULT_FEE
outputs = {}
if recipient == '':
recipient = rpc_conn.getnewaddress()
outputs[recipient] = out_amount
txHash = self.spend_inputs(node_id, inputs, outputs)
if txHash != "":
txHashes.append(txHash)
return txHashes
def generate_pos(self, node_id, btime=None):
""" stakes a block using generate on nodes[node_id]"""
assert_greater_than(len(self.nodes), node_id)
rpc_conn = self.nodes[node_id]
ss = rpc_conn.getstakingstatus()
assert ss["walletunlocked"]
assert ss["stakeablecoins"] > 0
assert ss["stakingbalance"] > 0.0
if btime is not None:
next_btime = btime + 60
fStaked = False
failures = 0
while not fStaked:
try:
rpc_conn.generate(1)
fStaked = True
except JSONRPCException as e:
if ("Couldn't create new block" in str(e)):
failures += 1
# couldn't generate block. check that this node can still stake (after 60 failures)
if failures > 60:
ss = rpc_conn.getstakingstatus()
if not (ss["walletunlocked"] and ss["stakeablecoins"] > 0 and ss["stakingbalance"] > 0.0):
raise AssertionError("Node %d unable to stake!" % node_id)
# try to stake one sec in the future
if btime is not None:
btime += 1
set_node_times(self.nodes, btime)
else:
time.sleep(1)
else:
raise e
# block generated. adjust block time
if btime is not None:
btime = max(btime + 1, next_btime)
set_node_times(self.nodes, btime)
return btime
else:
return None
def generate_pow(self, node_id, btime=None):
""" stakes a block using generate on nodes[node_id]"""
assert_greater_than(len(self.nodes), node_id)
self.nodes[node_id].generate(1)
if btime is not None:
btime += 60
set_node_times(self.nodes, btime)
return btime
def set_spork(self, node_id, sporkName, value):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork(sporkName, value)
def get_spork(self, node_id, sporkName):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork("show")[sporkName]
def activate_spork(self, node_id, sporkName):
return self.set_spork(node_id, sporkName, SPORK_ACTIVATION_TIME)
def deactivate_spork(self, node_id, sporkName):
return self.set_spork(node_id, sporkName, SPORK_DEACTIVATION_TIME)
def is_spork_active(self, node_id, sporkName):
assert_greater_than(len(self.nodes), node_id)
return self.nodes[node_id].spork("active")[sporkName]
### ------------------------------------------------------
class ComparisonTestFramework(PivxTestFramework):
"""Test framework for doing p2p comparison testing
Sets up some pivxd binaries:
- 1 binary: test binary
- 2 binaries: 1 test binary, 1 ref binary
- n>2 binaries: 1 test binary, n-1 ref binaries"""
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "pivxd"),
help="pivxd binary to test")
parser.add_option("--refbinary", dest="refbinary",
default=os.getenv("BITCOIND", "pivxd"),
help="pivxd binary to use for reference nodes (if any)")
def setup_network(self):
extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes
if hasattr(self, "extra_args"):
extra_args = self.extra_args
self.add_nodes(self.num_nodes, extra_args,
binary=[self.options.testbinary] +
[self.options.refbinary] * (self.num_nodes - 1))
self.start_nodes()
class SkipTest(Exception):
"""This exception is raised to skip a test"""
def __init__(self, message):
self.message = message
| 44.215054 | 310 | 0.592757 |
acf8c0873975ebab70fb499fe58501bc6e6ccb0a | 4,106 | py | Python | dtwalign/window.py | mateuszpieniak/dtwalign | a20c2c1d3e8b14edd60b77f9c0f20c7c5dfadd28 | [
"MIT"
] | null | null | null | dtwalign/window.py | mateuszpieniak/dtwalign | a20c2c1d3e8b14edd60b77f9c0f20c7c5dfadd28 | [
"MIT"
] | null | null | null | dtwalign/window.py | mateuszpieniak/dtwalign | a20c2c1d3e8b14edd60b77f9c0f20c7c5dfadd28 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
from numba import jit
import matplotlib.pyplot as plt
import seaborn as sns
class BaseWindow():
"""Base window class."""
def __init__(self):
pass
def plot(self):
"""Show window.
"""
_, ax = plt.subplots(1)
sns.heatmap(self.matrix.T, vmin=0, vmax=1,
xticklabels=self.matrix.shape[0] // 10,
yticklabels=self.matrix.shape[1] // 10,
ax=ax)
ax.invert_yaxis()
ax.set_title(self.label)
ax.set_xlabel("query index")
ax.set_ylabel("reference index")
plt.show()
class NoWindow(BaseWindow):
label = "no window"
def __init__(self, len_x, len_y):
"""No window class which will be used for no constraint.
Parameters
----------
len_x : int
Length of query.
len_y : int
Length of reference.
"""
self._gen_window(len_x, len_y)
def _gen_window(self, len_x, len_y):
self.matrix = np.ones([len_x, len_y], dtype=bool)
self.list = np.argwhere(self.matrix == True)
class SakoechibaWindow(BaseWindow):
label = "sakoechiba window"
def __init__(self, len_x, len_y, size):
"""Sakoechiba window.
Parameters
----------
len_x : int
Length of query.
len_y : int
Length of reference.
size : int
Size of window width.
"""
self._gen_window(len_x, len_y, size)
def _gen_window(self, len_x, len_y, size):
xx = np.arange(len_x)
yy = np.arange(len_y)
self.matrix = np.abs(xx[:, np.newaxis] - yy[np.newaxis, :]) <= size
self.list = np.argwhere(self.matrix == True)
class DiagonalWindow(BaseWindow):
label = "diagonal"
def __init__(self, len_x, len_y, size):
super().__init__()
self._gen_window(len_x, len_y, size)
def _gen_window(self, len_x, len_y, size):
xx = np.arange(len_x)
yy = np.arange(len_y)
ratio = len_x / len_y
self.matrix = np.abs(xx[:, np.newaxis] - ratio * yy[np.newaxis, :]).astype(int) <= size
self.list = np.argwhere(self.matrix)
class ItakuraWindow(BaseWindow):
label = "itakura window"
def __init__(self, len_x, len_y):
"""Itakura window.
Parameters
----------
len_x : int
Length of query.
len_y : int
Length of reference.
"""
self._gen_window(len_x, len_y)
def _gen_window(self, len_x, len_y):
self.matrix = _gen_itakura_window(len_x, len_y).astype(np.bool)
self.list = np.argwhere(self.matrix == True)
@jit(nopython=True)
def _gen_itakura_window(len_x, len_y):
matrix = np.zeros((len_x, len_y), dtype=np.int8)
for xidx in range(len_x):
for yidx in range(len_y):
if (yidx < 2 * xidx + 1) and (xidx <= 2 * yidx + 1) and \
(xidx >= len_x - 2 * (len_y - yidx)) and \
(yidx > len_y - 2 * (len_x - xidx)):
matrix[xidx, yidx] = 1
return matrix
class UserWindow(BaseWindow):
label = "user defined window"
def __init__(self, len_x, len_y, win_func, *args, **kwargs):
"""Initialize user defined window.
Parameters
----------
len_x : int
Length of query.
len_y : int
Length of reference.
win_func : callable
Any function which returns bool.
*args, **kwargs :
Arguments for win_func
"""
self._gen_window(len_x, len_y, win_func, *args, **kwargs)
def _gen_window(self, len_x, len_y, win_func, *args, **kwargs):
matrix = np.zeros((len_x, len_y), dtype=np.bool)
for xidx in range(len_x):
for yidx in range(len_y):
if win_func(xidx, yidx, *args, **kwargs):
matrix[xidx, yidx] = True
self.matrix = matrix
self.list = np.argwhere(self.matrix == True)
| 27.013158 | 95 | 0.545787 |
acf8c0e7d994bd0d1c1f288d686820f745081b35 | 17,719 | py | Python | spellchecker/spellchecker.py | sagorbrur/bengali_pyspellchecker | 6a7533b1cd147c4708d6828dd01a5e96892dda89 | [
"MIT"
] | null | null | null | spellchecker/spellchecker.py | sagorbrur/bengali_pyspellchecker | 6a7533b1cd147c4708d6828dd01a5e96892dda89 | [
"MIT"
] | null | null | null | spellchecker/spellchecker.py | sagorbrur/bengali_pyspellchecker | 6a7533b1cd147c4708d6828dd01a5e96892dda89 | [
"MIT"
] | null | null | null | """ SpellChecker Module; simple, intuitive spell checker based on the post by
Peter Norvig. See: https://norvig.com/spell-correct.html """
from __future__ import absolute_import, division, unicode_literals
import os
import json
import string
from collections import Counter
from .utils import load_file, write_file, _parse_into_words, ENSURE_UNICODE
class SpellChecker(object):
""" The SpellChecker class encapsulates the basics needed to accomplish a
simple spell checking algorithm. It is based on the work by
Peter Norvig (https://norvig.com/spell-correct.html)
Args:
language (str): The language of the dictionary to load or None \
for no dictionary. Supported languages are `en`, `es`, `de`, fr` \
and `pt`. Defaults to `en`
local_dictionary (str): The path to a locally stored word \
frequency dictionary; if provided, no language will be loaded
distance (int): The edit distance to use. Defaults to 2.
case_sensitive (bool): Flag to use a case sensitive dictionary or \
not, only available when not using a language dictionary.
Note:
Using a case sensitive dictionary can be slow to correct words."""
__slots__ = ["_distance", "_word_frequency", "_tokenizer", "_case_sensitive"]
def __init__(
self,
language="bn",
local_dictionary="bn.json",
distance=2,
tokenizer=None,
case_sensitive=False,
):
self._distance = None
self.distance = distance # use the setter value check
self._tokenizer = _parse_into_words
if tokenizer is not None:
self._tokenizer = tokenizer
self._case_sensitive = case_sensitive if not language else False
self._word_frequency = WordFrequency(self._tokenizer, self._case_sensitive)
if local_dictionary:
self._word_frequency.load_dictionary(local_dictionary)
elif language:
filename = "{}.json.gz".format(language.lower())
here = os.path.dirname(__file__)
full_filename = os.path.join(here, "resources", filename)
if not os.path.exists(full_filename):
msg = (
"The provided dictionary language ({}) does not " "exist!"
).format(language.lower())
raise ValueError(msg)
self._word_frequency.load_dictionary(full_filename)
def __contains__(self, key):
""" setup easier known checks """
key = ENSURE_UNICODE(key)
return key in self._word_frequency
def __getitem__(self, key):
""" setup easier frequency checks """
key = ENSURE_UNICODE(key)
return self._word_frequency[key]
@property
def word_frequency(self):
""" WordFrequency: An encapsulation of the word frequency `dictionary`
Note:
Not settable """
return self._word_frequency
@property
def distance(self):
""" int: The maximum edit distance to calculate
Note:
Valid values are 1 or 2; if an invalid value is passed, \
defaults to 2 """
return self._distance
@distance.setter
def distance(self, val):
""" set the distance parameter """
tmp = 2
try:
int(val)
if val > 0 and val <= 2:
tmp = val
except (ValueError, TypeError):
pass
self._distance = tmp
def split_words(self, text):
""" Split text into individual `words` using either a simple whitespace
regex or the passed in tokenizer
Args:
text (str): The text to split into individual words
Returns:
list(str): A listing of all words in the provided text """
text = ENSURE_UNICODE(text)
return self._tokenizer(text)
def export(self, filepath, encoding="utf-8", gzipped=True):
""" Export the word frequency list for import in the future
Args:
filepath (str): The filepath to the exported dictionary
encoding (str): The encoding of the resulting output
gzipped (bool): Whether to gzip the dictionary or not """
data = json.dumps(self.word_frequency.dictionary, sort_keys=True)
write_file(filepath, encoding, gzipped, data)
def word_probability(self, word, total_words=None):
""" Calculate the probability of the `word` being the desired, correct
word
Args:
word (str): The word for which the word probability is \
calculated
total_words (int): The total number of words to use in the \
calculation; use the default for using the whole word \
frequency
Returns:
float: The probability that the word is the correct word """
if total_words is None:
total_words = self._word_frequency.total_words
word = ENSURE_UNICODE(word)
return self._word_frequency.dictionary[word] / total_words
def correction(self, word):
""" The most probable correct spelling for the word
Args:
word (str): The word to correct
Returns:
str: The most likely candidate """
word = ENSURE_UNICODE(word)
candidates = list(self.candidates(word))
return max(sorted(candidates), key=self.word_probability)
def candidates(self, word):
""" Generate possible spelling corrections for the provided word up to
an edit distance of two, if and only when needed
Args:
word (str): The word for which to calculate candidate spellings
Returns:
set: The set of words that are possible candidates """
word = ENSURE_UNICODE(word)
if self.known([word]): # short-cut if word is correct already
return {word}
# get edit distance 1...
res = [x for x in self.edit_distance_1(word)]
tmp = self.known(res)
if tmp:
return tmp
# if still not found, use the edit distance 1 to calc edit distance 2
if self._distance == 2:
tmp = self.known([x for x in self.__edit_distance_alt(res)])
if tmp:
return tmp
return {word}
def known(self, words):
""" The subset of `words` that appear in the dictionary of words
Args:
words (list): List of words to determine which are in the \
corpus
Returns:
set: The set of those words from the input that are in the \
corpus """
words = [ENSURE_UNICODE(w) for w in words]
tmp = [w if self._case_sensitive else w.lower() for w in words]
return set(
w
for w in tmp
if w in self._word_frequency.dictionary
or not self._check_if_should_check(w)
)
def unknown(self, words):
""" The subset of `words` that do not appear in the dictionary
Args:
words (list): List of words to determine which are not in the \
corpus
Returns:
set: The set of those words from the input that are not in \
the corpus """
words = [ENSURE_UNICODE(w) for w in words]
tmp = [
w if self._case_sensitive else w.lower()
for w in words
if self._check_if_should_check(w)
]
return set(w for w in tmp if w not in self._word_frequency.dictionary)
def edit_distance_1(self, word):
""" Compute all strings that are one edit away from `word` using only
the letters in the corpus
Args:
word (str): The word for which to calculate the edit distance
Returns:
set: The set of strings that are edit distance one from the \
provided word """
word = ENSURE_UNICODE(word).lower()
if self._check_if_should_check(word) is False:
return {word}
letters = self._word_frequency.letters
splits = [(word[:i], word[i:]) for i in range(len(word) + 1)]
deletes = [L + R[1:] for L, R in splits if R]
transposes = [L + R[1] + R[0] + R[2:] for L, R in splits if len(R) > 1]
replaces = [L + c + R[1:] for L, R in splits if R for c in letters]
inserts = [L + c + R for L, R in splits for c in letters]
return set(deletes + transposes + replaces + inserts)
def edit_distance_2(self, word):
""" Compute all strings that are two edits away from `word` using only
the letters in the corpus
Args:
word (str): The word for which to calculate the edit distance
Returns:
set: The set of strings that are edit distance two from the \
provided word """
word = ENSURE_UNICODE(word).lower()
return [
e2 for e1 in self.edit_distance_1(word) for e2 in self.edit_distance_1(e1)
]
def __edit_distance_alt(self, words):
""" Compute all strings that are 1 edits away from all the words using
only the letters in the corpus
Args:
words (list): The words for which to calculate the edit distance
Returns:
set: The set of strings that are edit distance two from the \
provided words """
words = [ENSURE_UNICODE(w) for w in words]
tmp = [
w if self._case_sensitive else w.lower()
for w in words
if self._check_if_should_check(w)
]
return [e2 for e1 in tmp for e2 in self.edit_distance_1(e1)]
@staticmethod
def _check_if_should_check(word):
if len(word) == 1 and word in string.punctuation:
return False
try: # check if it is a number (int, float, etc)
float(word)
return False
except ValueError:
pass
return True
class WordFrequency(object):
""" Store the `dictionary` as a word frequency list while allowing for
different methods to load the data and update over time """
__slots__ = [
"_dictionary",
"_total_words",
"_unique_words",
"_letters",
"_tokenizer",
"_case_sensitive",
]
def __init__(self, tokenizer=None, case_sensitive=False):
self._dictionary = Counter()
self._total_words = 0
self._unique_words = 0
self._letters = set()
self._case_sensitive = case_sensitive
self._tokenizer = _parse_into_words
if tokenizer is not None:
self._tokenizer = tokenizer
def __contains__(self, key):
""" turn on contains """
key = ENSURE_UNICODE(key)
key = key if self._case_sensitive else key.lower()
return key in self._dictionary
def __getitem__(self, key):
""" turn on getitem """
key = ENSURE_UNICODE(key)
key = key if self._case_sensitive else key.lower()
return self._dictionary[key]
def pop(self, key, default=None):
""" Remove the key and return the associated value or default if not
found
Args:
key (str): The key to remove
default (obj): The value to return if key is not present """
key = ENSURE_UNICODE(key)
key = key if self._case_sensitive else key.lower()
return self._dictionary.pop(key, default)
@property
def dictionary(self):
""" Counter: A counting dictionary of all words in the corpus and the \
number of times each has been seen
Note:
Not settable """
return self._dictionary
@property
def total_words(self):
""" int: The sum of all word occurances in the word frequency \
dictionary
Note:
Not settable """
return self._total_words
@property
def unique_words(self):
""" int: The total number of unique words in the word frequency list
Note:
Not settable """
return self._unique_words
@property
def letters(self):
""" str: The listing of all letters found within the corpus
Note:
Not settable """
return self._letters
def tokenize(self, text):
""" Tokenize the provided string object into individual words
Args:
text (str): The string object to tokenize
Yields:
str: The next `word` in the tokenized string
Note:
This is the same as the `spellchecker.split_words()` """
text = ENSURE_UNICODE(text)
for word in self._tokenizer(text):
yield word if self._case_sensitive else word.lower()
def keys(self):
""" Iterator over the key of the dictionary
Yields:
str: The next key in the dictionary
Note:
This is the same as `spellchecker.words()` """
for key in self._dictionary.keys():
yield key
def words(self):
""" Iterator over the words in the dictionary
Yields:
str: The next word in the dictionary
Note:
This is the same as `spellchecker.keys()` """
for word in self._dictionary.keys():
yield word
def items(self):
""" Iterator over the words in the dictionary
Yields:
str: The next word in the dictionary
int: The number of instances in the dictionary
Note:
This is the same as `dict.items()` """
for word in self._dictionary.keys():
yield word, self._dictionary[word]
def load_dictionary(self, filename, encoding="utf-8"):
""" Load in a pre-built word frequency list
Args:
filename (str): The filepath to the json (optionally gzipped) \
file to be loaded
encoding (str): The encoding of the dictionary """
with load_file(filename, encoding) as data:
data = data if self._case_sensitive else data.lower()
self._dictionary.update(json.loads(data, encoding=encoding))
self._update_dictionary()
def load_text_file(self, filename, encoding="utf-8", tokenizer=None):
""" Load in a text file from which to generate a word frequency list
Args:
filename (str): The filepath to the text file to be loaded
encoding (str): The encoding of the text file
tokenizer (function): The function to use to tokenize a string
"""
with load_file(filename, encoding=encoding) as data:
self.load_text(data, tokenizer)
def load_text(self, text, tokenizer=None):
""" Load text from which to generate a word frequency list
Args:
text (str): The text to be loaded
tokenizer (function): The function to use to tokenize a string
"""
text = ENSURE_UNICODE(text)
if tokenizer:
words = [x if self._case_sensitive else x.lower() for x in tokenizer(text)]
else:
words = self.tokenize(text)
self._dictionary.update(words)
self._update_dictionary()
def load_words(self, words):
""" Load a list of words from which to generate a word frequency list
Args:
words (list): The list of words to be loaded """
words = [ENSURE_UNICODE(w) for w in words]
self._dictionary.update(
[word if self._case_sensitive else word.lower() for word in words]
)
self._update_dictionary()
def add(self, word):
""" Add a word to the word frequency list
Args:
word (str): The word to add """
word = ENSURE_UNICODE(word)
self.load_words([word])
def remove_words(self, words):
""" Remove a list of words from the word frequency list
Args:
words (list): The list of words to remove """
words = [ENSURE_UNICODE(w) for w in words]
for word in words:
self._dictionary.pop(word if self._case_sensitive else word.lower())
self._update_dictionary()
def remove(self, word):
""" Remove a word from the word frequency list
Args:
word (str): The word to remove """
word = ENSURE_UNICODE(word)
self._dictionary.pop(word if self._case_sensitive else word.lower())
self._update_dictionary()
def remove_by_threshold(self, threshold=5):
""" Remove all words at, or below, the provided threshold
Args:
threshold (int): The threshold at which a word is to be \
removed """
keys = [x for x in self._dictionary.keys()]
for key in keys:
if self._dictionary[key] <= threshold:
self._dictionary.pop(key)
self._update_dictionary()
def _update_dictionary(self):
""" Update the word frequency object """
self._total_words = sum(self._dictionary.values())
self._unique_words = len(self._dictionary.keys())
self._letters = set()
for key in self._dictionary:
self._letters.update(key)
| 35.868421 | 87 | 0.582539 |
acf8c12e3292dceb7a09dd004dbbea0ee58ab683 | 2,710 | py | Python | airflow/providers/amazon/aws/example_dags/example_sqs.py | takuti/airflow | 0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e | [
"Apache-2.0"
] | 4 | 2021-06-26T13:37:35.000Z | 2022-01-11T15:49:44.000Z | airflow/providers/amazon/aws/example_dags/example_sqs.py | takuti/airflow | 0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e | [
"Apache-2.0"
] | 33 | 2021-07-25T10:29:30.000Z | 2022-03-30T04:39:06.000Z | airflow/providers/amazon/aws/example_dags/example_sqs.py | takuti/airflow | 0ac3b8c3dd749c59e60cf0169580b9e7c5049d9e | [
"Apache-2.0"
] | 6 | 2018-04-09T07:46:05.000Z | 2019-07-16T00:13:15.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime, timedelta
from airflow import DAG
from airflow.decorators import task
from airflow.providers.amazon.aws.hooks.sqs import SqsHook
from airflow.providers.amazon.aws.operators.sqs import SqsPublishOperator
from airflow.providers.amazon.aws.sensors.sqs import SqsSensor
QUEUE_NAME = 'Airflow-Example-Queue'
AWS_CONN_ID = 'aws_default'
@task(task_id="create_queue")
def create_queue_fn():
"""This is a Python function that creates an SQS queue"""
hook = SqsHook()
result = hook.create_queue(queue_name=QUEUE_NAME)
return result['QueueUrl']
@task(task_id="delete_queue")
def delete_queue_fn(queue_url):
"""This is a Python function that deletes an SQS queue"""
hook = SqsHook()
hook.get_conn().delete_queue(QueueUrl=queue_url)
with DAG(
dag_id='example_sqs',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
dagrun_timeout=timedelta(minutes=60),
tags=['example'],
catchup=False,
) as dag:
# [START howto_sqs_operator_and_sensor]
# Using a task-decorated function to create an SQS queue
create_queue = create_queue_fn()
publish_to_queue = SqsPublishOperator(
task_id='publish_to_queue',
sqs_queue=create_queue,
message_content="{{ task_instance }}-{{ execution_date }}",
message_attributes=None,
delay_seconds=0,
)
read_from_queue = SqsSensor(
task_id='read_from_queue',
sqs_queue=create_queue,
max_messages=5,
wait_time_seconds=1,
visibility_timeout=None,
message_filtering=None,
message_filtering_match_values=None,
message_filtering_config=None,
)
# Using a task-decorated function to delete the SQS queue we created earlier
delete_queue = delete_queue_fn(create_queue)
create_queue >> publish_to_queue >> read_from_queue >> delete_queue
# [END howto_sqs_operator_and_sensor]
| 33.04878 | 80 | 0.736162 |
acf8c2d834e9c5365552c41f2aadc877dfe88196 | 1,035 | py | Python | setup.py | corbinfanning/mustaine | 3750d2d17a39a308e5c5044553a91b0f4cf48bb0 | [
"BSD-3-Clause"
] | null | null | null | setup.py | corbinfanning/mustaine | 3750d2d17a39a308e5c5044553a91b0f4cf48bb0 | [
"BSD-3-Clause"
] | null | null | null | setup.py | corbinfanning/mustaine | 3750d2d17a39a308e5c5044553a91b0f4cf48bb0 | [
"BSD-3-Clause"
] | null | null | null | import os, sys
from setuptools import find_packages, setup
from mustaine import __version__
if sys.version_info < (2,6):
raise NotImplementedError("mustaine requires Python 2.6 or later")
setup(
name = "mustaine",
version = __version__,
description = "Hessian RPC Library",
long_description = file(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read(),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Object Brokering',
'Topic :: Software Development :: Libraries',
],
url = "http://github.com/bgilmore/mustaine",
author = "Brandon Gilmore",
author_email = "brandon@mg2.org",
license = "BSD",
platforms = "any",
packages = find_packages(exclude=["test"]),
zip_safe = True,
)
| 25.875 | 70 | 0.61256 |
acf8c37ebf53120180dcfb52c9b5c94728c8f0d0 | 336 | py | Python | website/markov.py | xPowerz/Chase-Morgan | b0cbb997076cfc2698f9337ad9ca155d9f00ac3e | [
"MIT"
] | 1 | 2021-08-22T22:50:00.000Z | 2021-08-22T22:50:00.000Z | website/markov.py | birdsoup/Chase-Morgan | b0cbb997076cfc2698f9337ad9ca155d9f00ac3e | [
"MIT"
] | null | null | null | website/markov.py | birdsoup/Chase-Morgan | b0cbb997076cfc2698f9337ad9ca155d9f00ac3e | [
"MIT"
] | null | null | null | import markovify
def generateMemeText(input):
with open("../meme_corpus.txt") as f:
text = f.read()
text_model = markovify.NewlineText(text)
try:
text = text_model.make_sentence_with_start(input)
except KeyError:
text = text_model.make_sentence(tries=100)
return text | 30.545455 | 61 | 0.630952 |
acf8c37f1003384f0782d28909f4fbba9f02b729 | 3,249 | py | Python | nicos_mlz/nectar/setups/special/monitor-html.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12 | 2019-11-06T15:40:36.000Z | 2022-01-01T16:23:00.000Z | nicos_mlz/nectar/setups/special/monitor-html.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91 | 2020-08-18T09:20:26.000Z | 2022-02-01T11:07:14.000Z | nicos_mlz/nectar/setups/special/monitor-html.py | jkrueger1/nicos | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | [
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6 | 2020-01-11T10:52:30.000Z | 2022-02-25T12:35:23.000Z | description = 'setup for the status monitor'
group = 'special'
_expcolumn = Column(
Block('Experiment', [
BlockRow(
Field(name='Proposal', key='exp/proposal', width=10),
Field(name='Title', key='exp/title', width=50, istext=True),
Field(name='Current status', key='exp/action', width=30, istext=True),
),
BlockRow(
Field(name='Sample', key='sample/samplename', width=40,
istext=True, maxlen=40),
Field(name='Remark', key='exp/remark', width=40,
istext=True, maxlen=40),
),
],
),
Block('Reactor', [
BlockRow(
Field(name='Reactor Power', dev='ReactorPower'),
),
]),
)
_translationColumn = Column(
Block('Sample translation', [
BlockRow(
Field(dev='stx'),
),
BlockRow(
Field(dev='sty'),
),
BlockRow(
Field(dev='sry'),
),
],
setups='servostar',
),
)
_detectorikonlblock = Block('Detector', [
BlockRow(
Field(name='Last Image', key='exp/lastpoint'),
),
BlockRow(
Field(dev='ccdTemp'),
Field(name='CCD status', key='ccd/status[1]', width=15),
),
BlockRow(
Field(name='bin', key='ccd.bin'),
Field(name='flip (H,V)', key='ccd.flip'),
Field(name='rotation', key='ccd.rotation'),
),
BlockRow(
Field(name='roi', key='ccd.roi'),
Field(name='hsspeed', key='ccd.hsspeed', width=4),
Field(name='vsspeed', key='ccd.vsspeed', width=4),
Field(name='pgain', key='ccd.pgain', width=4),
),
],
setups='detector_ikonl*',
)
_detectorneoblock = Block('Detector', [
BlockRow(
Field(name='Last Image', key='exp/lastpoint'),
),
BlockRow(
Field(dev='temp_neo'),
Field(name='CCD status', key='neo/status[1]', width=15),
),
BlockRow(
Field(name='bin', key='neo.bin'),
Field(name='flip (H,V)', key='neo.flip'),
Field(name='rotation', key='neo.rotation'),
),
BlockRow(
Field(name='roi', key='neo.roi'),
Field(name='elshuttermode', key='neo.elshuttermode', width=6),
Field(name='readoutrate MHz', key='neo.readoutrate', width=4),
),
],
setups='detector_neo',
)
_detectorColumn = Column(
_detectorikonlblock,
_detectorneoblock,
)
_ubahnColumn = Column(
Block('U-Bahn', [
BlockRow(
Field(dev='UBahn'),
),
],
setups='ubahn',
),
)
devices = dict(
Monitor = device('nicos.services.monitor.html.Monitor',
title = 'NICOS status monitor',
loglevel = 'info',
filename = '/nectarcontrol/webroot/status.html',
interval = 10,
prefix = 'nicos/',
cache = 'nectarhw.nectar.frm2',
font = 'Luxi Sans',
valuefont = 'Consolas',
padding = 0,
layout = [[_expcolumn],
[_translationColumn, _detectorColumn, _ubahnColumn]],
noexpired = True,
),
)
| 27.302521 | 82 | 0.503847 |
acf8c3c31b5d253fac209ad42302a3ca7401e0f4 | 1,510 | py | Python | test.py | jmilliaan/smarthome-3tier | 62ec80439834c2f0158775467d4a1c92153ae6a8 | [
"MIT"
] | null | null | null | test.py | jmilliaan/smarthome-3tier | 62ec80439834c2f0158775467d4a1c92153ae6a8 | [
"MIT"
] | null | null | null | test.py | jmilliaan/smarthome-3tier | 62ec80439834c2f0158775467d4a1c92153ae6a8 | [
"MIT"
] | null | null | null | import pytest
from MySmartHomeService import MainService
from MySmartHomeService import Room
test_service = MainService()
test_room = Room()
@pytest.mark.checkuser
def test_check_user_in_db_1():
assert test_service.check_user_in_db(username="Johan", password="10102190103") is True
@pytest.mark.checkuser
def test_check_user_in_db_2():
assert test_service.check_user_in_db(username="Johan", password="wrongpassword") == "Wrong Password"
@pytest.mark.checkuser
def test_check_user_in_db_3():
assert test_service.check_user_in_db(username="Joy", password="notrelevantpassword") is False
@pytest.mark.log
def test_log_():
assert test_service.log(sensor="LDR", actuator="Lampu", details="Turning on Lamp") == \
("LDR", "Lampu", "Turning on Lamp")
@pytest.mark.addsensor
def test_addsensor():
test_room.add_sensor(1, "Infrared", 2)
test_room.add_sensor(2, "Thermometer", 2)
assert test_room.add_sensor(3, "LDR", 2) == [(1, "Infrared", 2),
(2, "Thermometer", 2),
(3, "LDR", 2)]
@pytest.mark.addactuator
def test_addactuator():
test_room.add_actuator(1, "AC", 3)
test_room.add_actuator(2, "Speaker", 3)
assert test_room.add_actuator(3, "Lampu", 3) == [(1, "AC", 3),
(2, "Speaker", 3),
(3, "Lampu", 3)]
| 32.12766 | 105 | 0.595364 |
acf8c5714eb8ee793fd0e3b704294f724c7153d0 | 189 | py | Python | CortaCinta.tests.py | manureta/segmentacion | 74f67dbd0f84189d620e95a9ba777f3aa6f65f4b | [
"MIT"
] | 1 | 2019-06-05T12:21:43.000Z | 2019-06-05T12:21:43.000Z | CortaCinta.tests.py | manureta/segmentacion | 74f67dbd0f84189d620e95a9ba777f3aa6f65f4b | [
"MIT"
] | 9 | 2019-06-05T18:20:10.000Z | 2019-11-20T20:04:49.000Z | CortaCinta.tests.py | manureta/segmentacion | 74f67dbd0f84189d620e95a9ba777f3aa6f65f4b | [
"MIT"
] | 3 | 2016-12-06T21:07:41.000Z | 2019-06-04T20:59:44.000Z | from CortaCinta import CortaCinta
cintas = [
[],
[1],
[1, 1],
[4, 5],
[2, 2, 2, 2],
[3, 2, 1, 1, 2],
]
n, m = 2, 3
for cinta in cintas:
print (cinta, ':', CortaCinta(cinta, n, m))
| 10.5 | 45 | 0.513228 |
acf8c5f02a3042e67829f46745dd0cfc8b9c0198 | 391 | py | Python | backend/muutoca/wsgi.py | relsi/muutoca | 8db8c7783558ac1f71cd0a257c23ddc8737e1cdf | [
"MIT"
] | 1 | 2021-03-06T23:27:47.000Z | 2021-03-06T23:27:47.000Z | backend/muutoca/wsgi.py | relsi/muutoca | 8db8c7783558ac1f71cd0a257c23ddc8737e1cdf | [
"MIT"
] | 1 | 2021-03-09T23:06:24.000Z | 2021-03-09T23:06:24.000Z | backend/muutoca/wsgi.py | relsi/muutoca | 8db8c7783558ac1f71cd0a257c23ddc8737e1cdf | [
"MIT"
] | 1 | 2021-03-06T18:49:10.000Z | 2021-03-06T18:49:10.000Z | """
WSGI config for muutoca project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'muutoca.settings')
application = get_wsgi_application()
| 23 | 78 | 0.785166 |
acf8c64fa96c1bb9e2a77b9c7fe1c385d5cdab50 | 13,477 | py | Python | scattertext/termscoring/ScaledFScore.py | Nimesh-Patel/Python_TextAnal_Visualization | aaa78b06698b4adf6a19b58b3804524ca88d91ff | [
"Apache-2.0"
] | null | null | null | scattertext/termscoring/ScaledFScore.py | Nimesh-Patel/Python_TextAnal_Visualization | aaa78b06698b4adf6a19b58b3804524ca88d91ff | [
"Apache-2.0"
] | null | null | null | scattertext/termscoring/ScaledFScore.py | Nimesh-Patel/Python_TextAnal_Visualization | aaa78b06698b4adf6a19b58b3804524ca88d91ff | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pandas as pd
from scipy.stats import norm, rankdata
from scattertext.Common import DEFAULT_SCALER_ALGO, DEFAULT_BETA
class InvalidScalerException(Exception):
pass
class ScoreBalancer(object):
@staticmethod
def balance_scores(cat_scores, not_cat_scores):
scores = ScoreBalancer.balance_scores_and_dont_scale(cat_scores, not_cat_scores)
return ScoreBalancer._zero_centered_scale(scores)
@staticmethod
def balance_scores_and_dont_scale(cat_scores, not_cat_scores):
'''
median = np.median(cat_scores)
scores = np.zeros(len(cat_scores)).astype(np.float)
scores[cat_scores > median] = cat_scores[cat_scores > median]
not_cat_mask = cat_scores < median if median != 0 else cat_scores <= median
scores[not_cat_mask] = -not_cat_scores[not_cat_mask]
'''
scores = np.zeros(len(cat_scores)).astype(np.float)
scores[cat_scores > not_cat_scores] = cat_scores[cat_scores > not_cat_scores]
scores[cat_scores < not_cat_scores] = -not_cat_scores[cat_scores < not_cat_scores]
return scores
@staticmethod
def _zero_centered_scale(ar):
ar[ar > 0] = ScoreBalancer._scale(ar[ar > 0])
ar[ar < 0] = -ScoreBalancer._scale(-ar[ar < 0])
return (ar + 1) / 2.
@staticmethod
def _scale(ar):
if len(ar) == 0:
return ar
if ar.min() == ar.max():
return np.full(len(ar), 0.5)
return (ar - ar.min()) / (ar.max() - ar.min())
class ScaledFScorePresets(object):
def __init__(self,
scaler_algo=DEFAULT_SCALER_ALGO,
beta=DEFAULT_BETA,
one_to_neg_one=False,
priors=None,
use_score_difference=False,
):
self.scaler_algo_ = scaler_algo
self.beta_ = beta
self.one_to_neg_one_ = one_to_neg_one
self.priors_ = priors
self.use_score_difference_ = use_score_difference
assert self.beta_ > 0
def get_name(self):
return 'Scaled F-Score'
def get_default_score(self):
if self.one_to_neg_one_:
return 0
return 0.5
def get_scores(self, cat_word_counts, not_cat_word_counts):
'''
Parameters
----------
cat_word_counts : np.array
category counts
not_cat_word_counts : np.array
not category counts
Returns
-------
np.array
scores
'''
cat_scores = self.get_scores_for_category(cat_word_counts,
not_cat_word_counts)
not_cat_scores = self.get_scores_for_category(not_cat_word_counts,
cat_word_counts)
if self.use_score_difference_:
scores = ((cat_scores - not_cat_scores) + 1.) / 2.
else:
scores = ScoreBalancer.balance_scores(cat_scores, not_cat_scores)
if self.one_to_neg_one_:
return 2 * scores - 1
else:
return scores
def get_scores_for_category(self, cat_word_counts, not_cat_word_counts):
'''
Parameters
----------
cat_word_counts : np.array
category counts
not_cat_word_counts : np.array
not category counts
Returns
-------
np.array
scores
'''
beta = self.beta_
# import pdb; pdb.set_trace()
assert len(cat_word_counts) == len(not_cat_word_counts)
old_cat_word_counts = None
if type(cat_word_counts) == pd.Series:
assert all(cat_word_counts.index == not_cat_word_counts.index)
old_cat_word_counts = cat_word_counts
cat_word_counts = cat_word_counts.values
if type(not_cat_word_counts) == pd.Series:
not_cat_word_counts = not_cat_word_counts.values
if self.priors_ is not None:
p = self.priors_
assert len(p) == len(cat_word_counts)
precision = ((cat_word_counts + p * 1.) /
(cat_word_counts + not_cat_word_counts + 2 * p))
recall = (cat_word_counts + p) * 1. / (cat_word_counts.sum() + p.sum())
else:
precision = (cat_word_counts * 1. / (cat_word_counts + not_cat_word_counts))
recall = cat_word_counts * 1. / cat_word_counts.sum()
precision_normcdf = ScaledFScore._safe_scaler(self.scaler_algo_, precision)
recall_normcdf = ScaledFScore._safe_scaler(self.scaler_algo_, recall)
scores = self._weighted_h_mean(precision_normcdf, recall_normcdf)
scores[np.isnan(scores)] = 0.
if old_cat_word_counts is not None:
return pd.Series(scores, index=old_cat_word_counts.index)
return scores
def _weighted_h_mean(self, precision_normcdf, recall_normcdf):
scores = (1 + self.beta_ ** 2) * (precision_normcdf * recall_normcdf) \
/ ((self.beta_ ** 2) * precision_normcdf + recall_normcdf)
return scores
class ScaledFScorePresetsNeg1To1(ScaledFScorePresets):
@staticmethod
def get_default_score():
return 0
def get_scores(self, cat_word_counts, not_cat_word_counts):
scores = ScaledFScorePresets.get_scores(self, cat_word_counts, not_cat_word_counts)
return scores * 2 - 1
class ScaledFZScore(ScaledFScorePresets):
@staticmethod
def get_default_score():
return 0
def get_scores(self, cat_word_counts, not_cat_word_counts):
sfs = ScaledFScorePresets.get_scores(self, cat_word_counts, not_cat_word_counts)
# sfs = self.get_score_deltas(cat_word_counts, not_cat_word_counts)
# import pdb; pdb.set_trace()
# return (sfs - 0.5) / np.std(sfs - 0.5)
return (sfs - sfs.mean()) / np.std(sfs)
def get_name(self):
return "Scaled F-Score Z-Score"
def get_score_deltas(self, cat_word_counts, not_cat_word_counts):
cat_scores = ScaledFScorePresets.get_scores_for_category(
self, cat_word_counts, not_cat_word_counts)
not_cat_scores = ScaledFScorePresets.get_scores_for_category(
self, not_cat_word_counts, cat_word_counts)
return np.log(cat_scores) - np.log(not_cat_scores)
def get_p_vals(self, X):
'''
Parameters
----------
X : np.array
Array of word counts, shape (N, 2) where N is the vocab size. X[:,0] is the
positive class, while X[:,1] is the negative class. None by default
Returns
-------
np.array of p-values
'''
z_scores = self.get_scores(X[:, 0], X[:, 1])
return norm.cdf(z_scores)
class ScaledFZScorePrior(ScaledFZScore):
def __init__(self, prior, alpha=1, scaler_algo=DEFAULT_SCALER_ALGO, beta=DEFAULT_BETA):
self.prior = prior
self.alpha = alpha
ScaledFZScore.__init__(self, scaler_algo, beta)
def get_name(self):
return 'SFS w/ Informative Prior Z-Score'
def apply_prior(self, c):
n = np.sum(c)
prior_scale = (np.sum(c) * self.alpha * 1. / np.sum(self.prior))
return c + (self.prior * prior_scale)
def get_scores(self, cat_word_counts, not_cat_word_counts):
sfs = ScaledFScorePresets.get_scores(self, self.apply_prior(cat_word_counts),
self.apply_prior(not_cat_word_counts))
# sfs = self.get_score_deltas(cat_word_counts, not_cat_word_counts)
# import pdb; pdb.set_trace()
# return (sfs - 0.5) / np.std(sfs - 0.5)
return (sfs - sfs.mean()) / np.std(sfs)
def get_name(self):
return "SFS Z-Scores"
def get_score_deltas(self, cat_word_counts, not_cat_word_counts):
cat_scores = ScaledFScorePresets.get_scores_for_category(
self,
self.apply_prior(cat_word_counts),
self.apply_prior(not_cat_word_counts))
not_cat_scores = ScaledFScorePresets.get_scores_for_category(
self,
self.apply_prior(not_cat_word_counts),
self.apply_prior(cat_word_counts))
return np.log(cat_scores) - np.log(not_cat_scores)
class ScaledFScore(object):
@staticmethod
def get_default_score():
return 0.5
@staticmethod
def get_scores(cat_word_counts, not_cat_word_counts,
scaler_algo=DEFAULT_SCALER_ALGO, beta=DEFAULT_BETA):
''' Computes balanced scaled f-scores
Parameters
----------
cat_word_counts : np.array
category counts
not_cat_word_counts : np.array
not category counts
scaler_algo : str
Function that scales an array to a range \in [0 and 1]. Use 'percentile', 'normcdf'. Default.
beta : float
Beta in (1+B^2) * (Scale(P(w|c)) * Scale(P(c|w)))/(B^2*Scale(P(w|c)) + Scale(P(c|w))). Default.
Returns
-------
np.array
Harmonic means of scaled P(word|category)
and scaled P(category|word) for >median half of scores. Low scores are harmonic means
of scaled P(word|~category) and scaled P(~category|word). Array is squashed to between
0 and 1, with 0.5 indicating a median score.
'''
cat_scores = ScaledFScore.get_scores_for_category(cat_word_counts,
not_cat_word_counts,
scaler_algo,
beta)
not_cat_scores = ScaledFScore.get_scores_for_category(not_cat_word_counts,
cat_word_counts,
scaler_algo, beta)
return ScoreBalancer.balance_scores(cat_scores, not_cat_scores)
@staticmethod
def get_scores_for_category(cat_word_counts,
not_cat_word_counts,
scaler_algo=DEFAULT_SCALER_ALGO,
beta=DEFAULT_BETA):
''' Computes unbalanced scaled-fscores
Parameters
----------
category : str
category name to score
scaler_algo : str
Function that scales an array to a range \in [0 and 1]. Use 'percentile', 'normcdf'. Default normcdf
beta : float
Beta in (1+B^2) * (Scale(P(w|c)) * Scale(P(c|w)))/(B^2*Scale(P(w|c)) + Scale(P(c|w))). Defaults to 1.
Returns
-------
np.array of harmonic means of scaled P(word|category) and scaled P(category|word).
'''
assert beta > 0
old_cat_word_counts = None
if type(cat_word_counts) == pd.Series:
old_cat_word_counts = cat_word_counts
cat_word_counts = cat_word_counts.values
if type(not_cat_word_counts) == pd.Series:
not_cat_word_counts = not_cat_word_counts.values
precision = (cat_word_counts * 1. / (cat_word_counts + not_cat_word_counts))
recall = cat_word_counts * 1. / cat_word_counts.sum()
precision_normcdf = ScaledFScore._safe_scaler(scaler_algo, precision)
recall_normcdf = ScaledFScore._safe_scaler(scaler_algo, recall)
scores = (1 + beta ** 2) * (precision_normcdf * recall_normcdf) \
/ ((beta ** 2) * precision_normcdf + recall_normcdf)
scores[np.isnan(scores)] = 0.
if old_cat_word_counts is None:
return scores
else:
return pd.Series(scores, index=old_cat_word_counts.index)
@staticmethod
def _get_scaled_f_score_from_counts(cat_word_counts, not_cat_word_counts, scaler_algo, beta=DEFAULT_BETA):
p_word_given_category = cat_word_counts.astype(np.float) / cat_word_counts.sum()
p_category_given_word = cat_word_counts * 1. / (cat_word_counts + not_cat_word_counts)
scores \
= ScaledFScore._get_harmonic_mean_of_probabilities_over_non_zero_in_category_count_terms \
(cat_word_counts, p_category_given_word, p_word_given_category, scaler_algo, beta)
return scores
@staticmethod
def _safe_scaler(algo, ar):
if algo == 'none':
return ar
scaled_ar = ScaledFScore._get_scaler_function(algo)(ar)
if np.isnan(scaled_ar).any():
return ScaledFScore._get_scaler_function('percentile')(scaled_ar)
return scaled_ar
@staticmethod
def _get_scaler_function(scaler_algo):
scaler = None
if scaler_algo == 'normcdf':
scaler = lambda x: norm.cdf(x, x.mean(), x.std())
elif scaler_algo == 'lognormcdf':
scaler = lambda x: norm.cdf(np.log(x), np.log(x).mean(), np.log(x).std())
elif scaler_algo == 'percentile':
scaler = lambda x: rankdata(x).astype(np.float64) / len(x)
elif scaler_algo == 'percentiledense':
scaler = lambda x: rankdata(x, method='dense').astype(np.float64) / len(x)
elif scaler_algo == 'ecdf':
from statsmodels.distributions import ECDF
scaler = lambda x: ECDF(x)
elif scaler_algo == 'none':
scaler = lambda x: x
else:
raise InvalidScalerException("Invalid scaler alogrithm. Must be either percentile or normcdf.")
return scaler
| 39.177326 | 113 | 0.608741 |
acf8c6b2830f0359c7063395000a13c0e028d041 | 2,166 | py | Python | geopandas/tests/test_merge.py | schilli/geopandas | 29add0a735b00dc20c79e0fccc8e6a775c4997b0 | [
"BSD-3-Clause"
] | 1 | 2022-01-12T09:00:54.000Z | 2022-01-12T09:00:54.000Z | geopandas/tests/test_merge.py | samuelduchesne/geopandas | 29add0a735b00dc20c79e0fccc8e6a775c4997b0 | [
"BSD-3-Clause"
] | null | null | null | geopandas/tests/test_merge.py | samuelduchesne/geopandas | 29add0a735b00dc20c79e0fccc8e6a775c4997b0 | [
"BSD-3-Clause"
] | null | null | null | from __future__ import absolute_import
import pandas as pd
from shapely.geometry import Point
from geopandas import GeoDataFrame, GeoSeries
class TestMerging:
def setup_method(self):
self.gseries = GeoSeries([Point(i, i) for i in range(3)])
self.series = pd.Series([1, 2, 3])
self.gdf = GeoDataFrame({'geometry': self.gseries, 'values': range(3)})
self.df = pd.DataFrame({'col1': [1, 2, 3], 'col2': [0.1, 0.2, 0.3]})
def _check_metadata(self, gdf, geometry_column_name='geometry', crs=None):
assert gdf._geometry_column_name == geometry_column_name
assert gdf.crs == crs
def test_merge(self):
res = self.gdf.merge(self.df, left_on='values', right_on='col1')
# check result is a GeoDataFrame
assert isinstance(res, GeoDataFrame)
# check geometry property gives GeoSeries
assert isinstance(res.geometry, GeoSeries)
# check metadata
self._check_metadata(res)
## test that crs and other geometry name are preserved
self.gdf.crs = {'init' :'epsg:4326'}
self.gdf = (self.gdf.rename(columns={'geometry': 'points'})
.set_geometry('points'))
res = self.gdf.merge(self.df, left_on='values', right_on='col1')
assert isinstance(res, GeoDataFrame)
assert isinstance(res.geometry, GeoSeries)
self._check_metadata(res, 'points', self.gdf.crs)
def test_concat_axis0(self):
# frame
res = pd.concat([self.gdf, self.gdf])
assert res.shape == (6, 2)
assert isinstance(res, GeoDataFrame)
assert isinstance(res.geometry, GeoSeries)
self._check_metadata(res)
# series
res = pd.concat([self.gdf.geometry, self.gdf.geometry])
assert res.shape == (6, )
assert isinstance(res, GeoSeries)
assert isinstance(res.geometry, GeoSeries)
def test_concat_axis1(self):
res = pd.concat([self.gdf, self.df], axis=1)
assert res.shape == (3, 4)
assert isinstance(res, GeoDataFrame)
assert isinstance(res.geometry, GeoSeries)
self._check_metadata(res)
| 32.328358 | 79 | 0.632041 |
acf8c6ea149637e3e7d6cc3058d17b8d21c1f1fb | 1,807 | py | Python | day11/main.py | henriknh/advent-of-code-2021 | f35750179dbcb005611e01fd931f528062f0652d | [
"MIT"
] | null | null | null | day11/main.py | henriknh/advent-of-code-2021 | f35750179dbcb005611e01fd931f528062f0652d | [
"MIT"
] | null | null | null | day11/main.py | henriknh/advent-of-code-2021 | f35750179dbcb005611e01fd931f528062f0652d | [
"MIT"
] | null | null | null | def day11(lines):
flashes = 0
for i in range(len(lines)):
lines[i] = [int(c) for c in lines[i]]
print_grid(lines)
for i in range(2000):
print('Step', i+1)
increase_energy_level(lines)
handle_flashes(lines)
flashes += count_flashes(lines)
print_grid(lines)
print('Flashes:', flashes)
def print_grid(lines):
print('GRID:')
for line in lines:
print(''.join([str(i) for i in line]))
def increase_energy_level(lines):
for y in range(len(lines)):
for x in range(len(lines[0])):
lines[y][x] = lines[y][x] + 1
def handle_flashes(lines):
to_flash = []
for y in range(len(lines)):
for x in range(len(lines[0])):
if lines[y][x] == 10:
to_flash.append([y,x])
for [y,x] in to_flash:
do_flash(lines, y, x)
def do_flash(lines, y, x):
for _y in [y-1, y, y+1]:
for _x in [x-1, x, x+1]:
if _y == y and _x == x:
continue
elif _y < 0:
continue
elif _x < 0:
continue
elif _y >= len(lines):
continue
elif _x >= len(lines[0]):
continue
else:
before = lines[_y][_x]
lines[_y][_x] += 1
if before == 9 and lines[_y][_x] == 10:
do_flash(lines, _y, _x)
def count_flashes(lines):
count = 0
total_possible = len(lines) * len(lines[0])
for y in range(len(lines)):
for x in range(len(lines[0])):
if lines[y][x] > 9:
count += 1
lines[y][x] = 0
print(count, total_possible)
if count == total_possible:
raise NameError('ALL FLASHES')
return count
| 25.814286 | 55 | 0.490315 |
acf8c6fe1d69ed1a94cfb07eb8b5a70c434a8fab | 18,018 | py | Python | neutron/tests/unit/db/test_ipam_backend_mixin.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/db/test_ipam_backend_mixin.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/db/test_ipam_backend_mixin.py | tankertyp/openstack-learning | d729672663f170d0138ecf23b3c23df225c1b1b8 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2015 Infoblox Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
import netaddr
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants
from neutron_lib import exceptions as exc
from neutron_lib.exceptions import address_scope as addr_scope_exc
from oslo_utils import uuidutils
import webob.exc
from neutron.db import db_base_plugin_v2
from neutron.db import ipam_backend_mixin
from neutron.db import portbindings_db
from neutron.objects import subnet as subnet_obj
from neutron.tests import base
from neutron.tests.unit.db import test_db_base_plugin_v2
class TestIpamBackendMixin(base.BaseTestCase):
def setUp(self):
super(TestIpamBackendMixin, self).setUp()
self.mixin = ipam_backend_mixin.IpamBackendMixin()
self.ctx = mock.Mock()
self.default_new_ips = (('id-1', '192.168.1.1'),
('id-2', '192.168.1.2'))
self.default_original_ips = (('id-1', '192.168.1.1'),
('id-5', '172.20.16.5'))
self.owner_non_router = constants.DEVICE_OWNER_DHCP
self.owner_router = constants.DEVICE_OWNER_ROUTER_INTF
def _prepare_ips(self, ips):
results = []
for ip in ips:
ip_dict = {'ip_address': ip[1],
'subnet_id': ip[0]}
if len(ip) > 2:
ip_dict['delete_subnet'] = ip[2]
results.append(ip_dict)
return results
def _mock_slaac_subnet_on(self):
slaac_subnet_obj = subnet_obj.Subnet(
self.ctx,
ipv6_address_mode=constants.IPV6_SLAAC,
ipv6_ra_mode=constants.IPV6_SLAAC)
self.mixin._get_subnet_object = mock.Mock(
return_value=slaac_subnet_obj)
def _mock_slaac_subnet_off(self):
non_slaac_subnet_obj = subnet_obj.Subnet(
self.ctx,
ipv6_address_mode=None,
ipv6_ra_mode=None)
self.mixin._get_subnet_object = mock.Mock(
return_value=non_slaac_subnet_obj)
def _mock_slaac_for_subnet_ids(self, subnet_ids):
"""Mock incoming subnets as autoaddressed."""
def _get_subnet_object(context, subnet_id):
if subnet_id in subnet_ids:
return subnet_obj.Subnet(
self.ctx,
ipv6_address_mode=constants.IPV6_SLAAC,
ipv6_ra_mode=constants.IPV6_SLAAC)
else:
return subnet_obj.Subnet(
self.ctx,
ipv6_address_mode=None,
ipv6_ra_mode=None)
self.mixin._get_subnet_object = mock.Mock(
side_effect=_get_subnet_object)
def test__is_distributed_service(self):
port = {'device_owner':
'%snova' % constants.DEVICE_OWNER_COMPUTE_PREFIX,
'device_id': uuidutils.generate_uuid()}
self.assertFalse(self.mixin._is_distributed_service(port))
port = {'device_owner': constants.DEVICE_OWNER_DHCP,
'device_id': uuidutils.generate_uuid()}
self.assertFalse(self.mixin._is_distributed_service(port))
port = {'device_owner': constants.DEVICE_OWNER_DHCP,
'device_id': 'ovnmeta-%s' % uuidutils.generate_uuid()}
self.assertFalse(self.mixin._is_distributed_service(port))
port = {'device_owner': constants.DEVICE_OWNER_DISTRIBUTED,
'device_id': 'ovnmeta-%s' % uuidutils.generate_uuid()}
self.assertTrue(self.mixin._is_distributed_service(port))
def _test_get_changed_ips_for_port(self, expected, original_ips,
new_ips, owner):
change = self.mixin._get_changed_ips_for_port(self.ctx,
original_ips,
new_ips,
owner)
self.assertItemsEqual(expected.add, change.add)
self.assertItemsEqual(expected.original, change.original)
self.assertItemsEqual(expected.remove, change.remove)
def test__get_changed_ips_for_port(self):
new_ips = self._prepare_ips(self.default_new_ips)
original_ips = self._prepare_ips(self.default_original_ips)
expected_change = self.mixin.Changes(add=[new_ips[1]],
original=[original_ips[0]],
remove=[original_ips[1]])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_router)
def test__get_changed_ips_for_port_autoaddress(self):
new_ips = self._prepare_ips(self.default_new_ips)
original = (('id-1', '192.168.1.1'),
('id-5', '2000:1234:5678::12FF:FE34:5678'))
original_ips = self._prepare_ips(original)
self._mock_slaac_subnet_on()
expected_change = self.mixin.Changes(add=[new_ips[1]],
original=original_ips,
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_remove_autoaddress(self):
new = (('id-5', '2000:1234:5678::12FF:FE34:5678', True),
('id-1', '192.168.1.1'))
new_ips = self._prepare_ips(new)
reference_ips = [ip for ip in new_ips
if ip['subnet_id'] == 'id-1']
original = (('id-5', '2000:1234:5678::12FF:FE34:5678'),)
original_ips = self._prepare_ips(original)
# mock ipv6 subnet as auto addressed and leave ipv4 as regular
self._mock_slaac_for_subnet_ids([new[0][0]])
# Autoaddressed ip allocation has to be removed
# if it has 'delete_subnet' flag set to True
expected_change = self.mixin.Changes(add=reference_ips,
original=[],
remove=original_ips)
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_autoaddress_ipv6_pd_enabled(self):
owner_not_router = constants.DEVICE_OWNER_DHCP
new_ips = self._prepare_ips(self.default_new_ips)
original = (('id-1', '192.168.1.1'),
('id-5', '2000:1234:5678::12FF:FE34:5678'))
original_ips = self._prepare_ips(original)
# mock to test auto address part
pd_subnet_obj = subnet_obj.Subnet(
self.ctx,
id=uuidutils.generate_uuid(),
subnetpool_id=constants.IPV6_PD_POOL_ID,
ipv6_address_mode=constants.IPV6_SLAAC,
ipv6_ra_mode=constants.IPV6_SLAAC)
self.mixin._get_subnet_object = mock.Mock(return_value=pd_subnet_obj)
# make a copy of original_ips
# since it is changed by _get_changed_ips_for_port
expected_change = self.mixin.Changes(add=[new_ips[1]],
original=[original_ips[0]],
remove=[original_ips[1]])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, owner_not_router)
def _test_get_changed_ips_for_port_no_ip_address(self):
# IP address should be added if only subnet_id is provided,
# independently from auto_address status for subnet
new_ips = [{'subnet_id': 'id-3'}]
original_ips = []
expected_change = self.mixin.Changes(add=[new_ips[0]],
original=[],
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_no_ip_address_no_slaac(self):
self._mock_slaac_subnet_off()
self._test_get_changed_ips_for_port_no_ip_address()
def test__get_changed_ips_for_port_no_ip_address_slaac(self):
self._mock_slaac_subnet_on()
self._test_get_changed_ips_for_port_no_ip_address()
def test__get_changed_ips_for_port_subnet_id_no_ip(self):
# If a subnet is specified without an IP address only allocate a new
# address if one doesn't exist
self._mock_slaac_subnet_off()
new_ips = [{'subnet_id': 'id-3'}]
original_ips = [{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'}]
expected_change = self.mixin.Changes(
add=[],
original=[{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'}],
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_multiple_ips_one_subnet_add_third(self):
# If a subnet is specified without an IP address only allocate a new
# address if one doesn't exist
self._mock_slaac_subnet_off()
new_ips = [{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'},
{'subnet_id': 'id-3'},
{'subnet_id': 'id-3', 'ip_address': '4.3.2.10'}]
original_ips = [{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'},
{'subnet_id': 'id-3', 'ip_address': '4.3.2.10'}]
expected_change = self.mixin.Changes(
add=[{'subnet_id': 'id-3'}],
original=[{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'},
{'subnet_id': 'id-3', 'ip_address': '4.3.2.10'}],
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_multiple_ips_one_subnet_noip(self):
# If a subnet is specified without an IP address only allocate a new
# address if one doesn't exist
self._mock_slaac_subnet_off()
new_ips = [{'subnet_id': 'id-3'},
{'subnet_id': 'id-3'}]
original_ips = [{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'},
{'subnet_id': 'id-3', 'ip_address': '4.3.2.10'}]
expected_change = self.mixin.Changes(
add=[],
original=[{'subnet_id': 'id-3', 'ip_address': '4.3.2.1'},
{'subnet_id': 'id-3', 'ip_address': '4.3.2.10'}],
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_subnet_id_no_ip_ipv6(self):
# If a subnet is specified without an IP address only allocate a new
# address if one doesn't exist
self._mock_slaac_subnet_off()
new_ips = [{'subnet_id': 'id-3'}]
original_ips = [{'subnet_id': 'id-3', 'ip_address': '2001:db8::8'}]
expected_change = self.mixin.Changes(
add=[],
original=[{'subnet_id': 'id-3', 'ip_address': '2001:db8::8'}],
remove=[])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__get_changed_ips_for_port_subnet_id_no_ip_eui64(self):
# If a subnet is specified without an IP address allocate a new address
# if the address is eui-64. This supports changing prefix when prefix
# delegation is in use.
self._mock_slaac_subnet_off()
new_ips = [{'subnet_id': 'id-3'}]
original_ips = [{'subnet_id': 'id-3',
'ip_address': '2001::eeb1:d7ff:fe2c:9c5f'}]
expected_change = self.mixin.Changes(
add=[{'subnet_id': 'id-3'}],
original=[],
remove=[{'subnet_id': 'id-3',
'ip_address': '2001::eeb1:d7ff:fe2c:9c5f'}])
self._test_get_changed_ips_for_port(expected_change, original_ips,
new_ips, self.owner_non_router)
def test__is_ip_required_by_subnet_for_router_port(self):
# Owner -> router:
# _get_subnet_object should not be called,
# expected True
self._mock_slaac_subnet_off()
result = self.mixin._is_ip_required_by_subnet(self.ctx, 'id',
self.owner_router)
self.assertTrue(result)
self.assertFalse(self.mixin._get_subnet_object.called)
def test__is_ip_required_by_subnet_for_non_router_port(self):
# Owner -> not router:
# _get_subnet_object should be called,
# expected True, because subnet is not slaac
self._mock_slaac_subnet_off()
result = self.mixin._is_ip_required_by_subnet(self.ctx, 'id',
self.owner_non_router)
self.assertTrue(result)
self.assertTrue(self.mixin._get_subnet_object.called)
def test__is_ip_required_by_subnet_for_non_router_port_and_slaac(self):
# Owner -> not router:
# _get_subnet_object should be called,
# expected False, because subnet is slaac
self._mock_slaac_subnet_on()
result = self.mixin._is_ip_required_by_subnet(self.ctx, 'id',
self.owner_non_router)
self.assertFalse(result)
self.assertTrue(self.mixin._get_subnet_object.called)
def test__validate_network_subnetpools_mismatch_address_scopes(self):
address_scope_id = "dummy-scope"
subnetpool = mock.MagicMock()
address_scope = mock.MagicMock()
subnetpool.address_scope.return_value = address_scope_id
address_scope.id.return_value = address_scope_id
self.assertRaises(addr_scope_exc.NetworkAddressScopeAffinityError,
self.mixin._validate_network_subnetpools,
mock.MagicMock(),
constants.IP_VERSION_4,
subnetpool,
address_scope)
def test__validate_network_subnetpools_subnetpool_mismatch(self):
subnet = mock.MagicMock(ip_version=constants.IP_VERSION_4)
subnet.subnetpool_id = 'fake-subnetpool'
network = mock.MagicMock(subnets=[subnet])
subnetpool = mock.MagicMock(id=uuidutils.generate_uuid())
subnetpool.ip_version = constants.IP_VERSION_4
self.assertRaises(exc.NetworkSubnetPoolAffinityError,
self.mixin._validate_network_subnetpools,
network,
constants.IP_VERSION_4,
subnetpool,
None)
class TestPlugin(db_base_plugin_v2.NeutronDbPluginV2,
portbindings_db.PortBindingMixin):
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = [portbindings.ALIAS]
def get_plugin_description(self):
return "Test Plugin"
@classmethod
def get_plugin_type(cls):
return "test_plugin"
def create_port(self, context, port):
port_dict = super(TestPlugin, self).create_port(context, port)
self._process_portbindings_create_and_update(
context, port['port'], port_dict)
return port_dict
class TestPortUpdateIpam(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def setUp(self, plugin=None):
if not plugin:
plugin = 'neutron.tests.unit.db.test_ipam_backend_mixin.TestPlugin'
super(TestPortUpdateIpam, self).setUp(plugin=plugin)
def test_port_update_allocate_from_net_subnet(self):
"""Tests that a port can get address by updating fixed_ips"""
with self.network() as network:
pass
# Create a bound port with no IP address (since there is not subnet)
response = self._create_port(self.fmt,
net_id=network['network']['id'],
tenant_id=network['network']['tenant_id'],
arg_list=(portbindings.HOST_ID,),
**{portbindings.HOST_ID: 'fakehost'})
port = self.deserialize(self.fmt, response)
# Create the subnet and try to update the port to get an IP
with self.subnet(network=network) as subnet:
data = {'port': {
'fixed_ips': [{'subnet_id': subnet['subnet']['id']}]}}
port_id = port['port']['id']
port_req = self.new_update_request('ports', data, port_id)
response = port_req.get_response(self.api)
res = self.deserialize(self.fmt, response)
self.assertEqual(webob.exc.HTTPOk.code, response.status_int)
self.assertEqual(1, len(res['port']['fixed_ips']))
ip = res['port']['fixed_ips'][0]['ip_address']
ip_net = netaddr.IPNetwork(subnet['subnet']['cidr'])
self.assertIn(ip, ip_net)
class TestPortUpdateIpamML2(TestPortUpdateIpam):
def setUp(self):
super(TestPortUpdateIpamML2, self).setUp(plugin='ml2')
| 44.27027 | 79 | 0.603175 |
acf8c71269ffe2ad238e47f75a61cb2a5b8ec8aa | 1,740 | py | Python | packages/monomanage/src/monomanage/app/wheels.py | 0mars/ektebly-api | 79e77c73d39e30cf37ec08097d5d3a7fec7a7c54 | [
"Apache-2.0"
] | 2 | 2019-08-19T06:57:46.000Z | 2021-06-02T06:10:24.000Z | packages/monomanage/src/monomanage/app/wheels.py | 0mars/ektebly-api | 79e77c73d39e30cf37ec08097d5d3a7fec7a7c54 | [
"Apache-2.0"
] | 1 | 2019-09-26T11:20:50.000Z | 2019-09-26T11:20:50.000Z | packages/monomanage/src/monomanage/app/wheels.py | 0mars/ektebly-api | 79e77c73d39e30cf37ec08097d5d3a7fec7a7c54 | [
"Apache-2.0"
] | 1 | 2019-09-24T07:42:21.000Z | 2019-09-24T07:42:21.000Z | # Copyright (C) 2019 Simon Biggs
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from glob import glob
import subprocess
import json
WHITELIST = (
'pymedphys_base',
'pymedphys_coordsandscales',
'pymedphys_dicom',
'pymedphys_fileformats',
'pymedphys_utilities',
'pymedphys_mudensity',
'pymedphys_gamma',
'pymedphys')
def build_wheels_with_yarn():
yarn = shutil.which("yarn")
subprocess.call([yarn, "pypi:clean"])
for package in WHITELIST:
subprocess.call(
[yarn, "lerna", "run", "pypi:build", "--scope={}".format(package)])
def copy_wheels(packages_dir, new_dir):
wheel_filepaths = glob(os.path.join(packages_dir, '*', 'dist', '*.whl'))
pymedphys_wheel_urls = []
for filepath in wheel_filepaths:
filename = os.path.basename(filepath)
if not filename.split('-')[0] in WHITELIST:
continue
pymedphys_wheel_urls.append(filename)
new_filepath = os.path.join(new_dir, filename)
shutil.copy(filepath, new_filepath)
filenames_filepath = os.path.join(new_dir, 'paths.json')
with open(filenames_filepath, 'w') as filenames_file:
json.dump(pymedphys_wheel_urls, filenames_file)
| 30 | 79 | 0.702299 |
acf8c7478f230a0e1dec83a106d8bb3e808dca79 | 2,022 | py | Python | Spritesheet.py | FearlessClock/RobotFactory | 8b3602d62cfb1656d6bf376fda636d856cda8607 | [
"MIT"
] | null | null | null | Spritesheet.py | FearlessClock/RobotFactory | 8b3602d62cfb1656d6bf376fda636d856cda8607 | [
"MIT"
] | null | null | null | Spritesheet.py | FearlessClock/RobotFactory | 8b3602d62cfb1656d6bf376fda636d856cda8607 | [
"MIT"
] | null | null | null | # https://www.pygame.org/wiki/Spritesheet
# This class handles sprite sheets
# This was taken from www.scriptefun.com/transcript-2-using
# sprite-sheets-and-drawing-the-background
# I've added some code to fail if the file wasn't found..
# Note: When calling images_at the rect is the format:
# (x, y, x + offset, y + offset)
import pygame
class SpriteSheet(object):
def __init__(self, filename):
try:
self.sheet = pygame.image.load(filename).convert()
except pygame.error as message:
print(('Unable to load spritesheet image:', filename))
raise SystemExit(message)
# Load a specific image from a specific rectangle
def image_at(self, rectangle, scale, colorkey=None):
"Loads image from x,y,x+offset,y+offset"
rect = pygame.Rect(rectangle)
image = pygame.Surface(rect.size)
image.blit(self.sheet, (0, 0), rect)
if colorkey is not None:
if colorkey is -1:
colorkey = image.get_at((0, 0))
image.set_colorkey(colorkey, pygame.RLEACCEL)
image = pygame.transform.scale(image, (int(scale.x), int(scale.y)))
return image
# Load a whole bunch of images and return them as a list
def images_at(self, rects, scale, colorkey=None):
"Loads multiple images, supply a list of coordinates"
return [self.image_at(rect, scale, colorkey) for rect in rects]
# Load a whole strip of images
def load_strip(self, rect, image_count, scale, colorkey=None):
"Loads a strip of images and returns them as a list"
tups = [(rect[0] + rect[2] * x, rect[1], rect[2], rect[3])
for x in range(image_count)]
return self.images_at(tups, scale, colorkey)
def load_grid(self, rect, image_count, row_count, scale, colorkey=None):
strips = [(rect[0], rect[1] + rect[3] * y, rect[2], rect[3]) for y in range(row_count)]
return [self.load_strip(strip, image_count, scale, colorkey) for strip in strips]
| 41.265306 | 95 | 0.648863 |
acf8c772af3dd0c3a95fda51d35b4eb4fa1053c6 | 4,083 | py | Python | tb_rest_client/models/models_pe/entity_count_query.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 30 | 2020-06-19T06:42:50.000Z | 2021-08-23T21:16:36.000Z | tb_rest_client/models/models_pe/entity_count_query.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 25 | 2021-08-30T01:17:27.000Z | 2022-03-16T14:10:14.000Z | tb_rest_client/models/models_pe/entity_count_query.py | samson0v/python_tb_rest_client | 08ff7898740f7cec2170e85d5c3c89e222e967f7 | [
"Apache-2.0"
] | 23 | 2020-07-06T13:41:54.000Z | 2021-08-23T21:04:50.000Z | # coding: utf-8
"""
ThingsBoard REST API
ThingsBoard Professional Edition IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3PAAS-RC1
Contact: info@thingsboard.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class EntityCountQuery(object):
"""NOTE: This class is auto generated by the swagger code generator program.
from tb_rest_client.api_client import ApiClient
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'entity_filter': 'EntityFilter',
'key_filters': 'list[KeyFilter]'
}
attribute_map = {
'entity_filter': 'entityFilter',
'key_filters': 'keyFilters'
}
def __init__(self, entity_filter=None, key_filters=None): # noqa: E501
"""EntityCountQuery - a model defined in Swagger""" # noqa: E501
self._entity_filter = None
self._key_filters = None
self.discriminator = None
if entity_filter is not None:
self.entity_filter = entity_filter
if key_filters is not None:
self.key_filters = key_filters
@property
def entity_filter(self):
"""Gets the entity_filter of this EntityCountQuery. # noqa: E501
:return: The entity_filter of this EntityCountQuery. # noqa: E501
:rtype: EntityFilter
"""
return self._entity_filter
@entity_filter.setter
def entity_filter(self, entity_filter):
"""Sets the entity_filter of this EntityCountQuery.
:param entity_filter: The entity_filter of this EntityCountQuery. # noqa: E501
:type: EntityFilter
"""
self._entity_filter = entity_filter
@property
def key_filters(self):
"""Gets the key_filters of this EntityCountQuery. # noqa: E501
:return: The key_filters of this EntityCountQuery. # noqa: E501
:rtype: list[KeyFilter]
"""
return self._key_filters
@key_filters.setter
def key_filters(self, key_filters):
"""Sets the key_filters of this EntityCountQuery.
:param key_filters: The key_filters of this EntityCountQuery. # noqa: E501
:type: list[KeyFilter]
"""
self._key_filters = key_filters
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EntityCountQuery, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EntityCountQuery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.80292 | 88 | 0.593926 |
acf8c7c05acb97de0d948cf86e787cd55db5280a | 1,276 | py | Python | src/terra/contracts/spectrum.py | fentas/staketaxcsv | ad37a32d8864111dbf88e926b80eb4ccacb921c6 | [
"MIT"
] | null | null | null | src/terra/contracts/spectrum.py | fentas/staketaxcsv | ad37a32d8864111dbf88e926b80eb4ccacb921c6 | [
"MIT"
] | null | null | null | src/terra/contracts/spectrum.py | fentas/staketaxcsv | ad37a32d8864111dbf88e926b80eb4ccacb921c6 | [
"MIT"
] | null | null | null | # known contracts from protocol
CONTRACTS = [
# SPEC Platform
"terra1vvu80qnl0yn94stkc9sy2f5xcqcscu2fercgzq",
# SPEC Token
"terra1s5eczhe0h0jutf46re52x5z4r03c8hupacxmdr",
# SPEC Governance
"terra1dpe4fmcz2jqk6t50plw0gqa2q3he2tj6wex5cl",
# SPEC Staking
"terra1fxwelge6mf5l6z0rjpylzcfq9w9tw2q7tewaf5",
# SPEC Farm
"terra17hjvrkcwn3jk2qf69s5ldxx5rjccchu35assga",
# SPEC Mirror Farm
"terra1kehar0l76kzuvrrcwj5um72u3pjq2uvp62aruf",
# SPEC Anchor Farm
"terra1fqzczuddqsdml37a20pysjx5wk9dh4tdzu2mrw",
# SPEC Valkyrie Farm
"terra1xt4ugaxds6wjehjckqchzg4e99n3cjd2rtfw4f",
# SPEC Nexus Psi-UST Farm
"terra1j2hdp4jelqe9tkfwnsx5mlheqagaryxhqwr4h2",
# SPEC Orion Farm
"terra106en784zr4kpe6phlaj8c8t3aeqgn3xsugaelx",
# SPEC Terraworld Farm
"terra1cdyw7fydevn372re7xjgfh8kqrrf2lxm5k6ve3",
# SPEC Nexus nEth-Psi Farm,
"terra1lmm7xjareer3fd040kz2epw93hg20p9f64uh98",
# SPEC Nexus nLuna-Psi Farm,
"terra19kzel57gvx42e628k6frh624x5vm2kpck9cr9c",
# SPEC bPsiDP-24m Farm
"terra1kr82wxlvg773vjay95epyckna9g4vppjyfxgd0",
# Spectrum SPEC-UST Pair
"terra1tn8ejzw8kpuc87nu42f6qeyen4c7qy35tl8t20",
]
def handle(exporter, elem, txinfo, contract):
print('SPEC!')
#print(elem)
| 33.578947 | 51 | 0.762539 |
acf8c9a5ae36c4095f9e4289d7e51e61bc5bcdd7 | 704 | py | Python | core/tests/test_manager_mocks.py | grzes5003/LicensePlateRec | c051db15000e8165aeaf75bed8bc1bdb32914576 | [
"MIT"
] | null | null | null | core/tests/test_manager_mocks.py | grzes5003/LicensePlateRec | c051db15000e8165aeaf75bed8bc1bdb32914576 | [
"MIT"
] | 1 | 2021-01-03T15:50:56.000Z | 2021-01-09T18:28:46.000Z | core/tests/test_manager_mocks.py | grzes5003/LicensePlateRec | c051db15000e8165aeaf75bed8bc1bdb32914576 | [
"MIT"
] | null | null | null | import filecmp
import os
import time
from core.manager.Manager import Manager
def test_manager_mocks():
import toml
# TODO temporary test disable
return
with open("core/tests/test_config.toml") as file:
config = toml.load(file)
manager = Manager(config)
manager.run()
while manager.get_status():
pass
# TODO add signal from generator it has generated output
time.sleep(1)
print(os.listdir('core/tests/'))
assert os.path.isfile('core/tests/test_target_log.log') is True
assert os.path.isfile('core/tests/test_log.log') is True
assert filecmp.cmp('core/tests/test_target_log.log', 'core/tests/test_log.log', shallow=True) is True
| 24.275862 | 105 | 0.700284 |
acf8ca004410908b5070494f7227a69359f2712c | 1,146 | py | Python | djangocms_content_expiry/cache.py | Aiky30/djangocms-content-expiry | da7d348bcdafbf1a9862e4cc69a8363b3305a31a | [
"BSD-3-Clause"
] | null | null | null | djangocms_content_expiry/cache.py | Aiky30/djangocms-content-expiry | da7d348bcdafbf1a9862e4cc69a8363b3305a31a | [
"BSD-3-Clause"
] | 4 | 2021-09-27T10:15:13.000Z | 2021-11-23T17:18:04.000Z | djangocms_content_expiry/cache.py | Aiky30/djangocms-content-expiry | da7d348bcdafbf1a9862e4cc69a8363b3305a31a | [
"BSD-3-Clause"
] | 4 | 2021-09-06T20:13:45.000Z | 2021-10-02T15:00:58.000Z | from django.core.cache import cache
from djangocms_content_expiry.conf import (
DEFAULT_CONTENT_EXPIRY_CHANGELIST_PAGECONTENT_EXCLUSION_CACHE_EXPIRY,
)
from djangocms_content_expiry.constants import (
CONTENT_EXPIRY_CHANGELIST_PAGECONTENT_EXCLUSION_CACHE_KEY,
)
def _get_cache_key(site_id):
return f"{CONTENT_EXPIRY_CHANGELIST_PAGECONTENT_EXCLUSION_CACHE_KEY}_{site_id}"
def set_changelist_page_content_exclusion_cache(value, site_id):
"""
Populate the cache that is set to never expire!
:param value: A value to set the cache object with
:param site_id: The site id to get the correct cache entry
"""
cache_key = _get_cache_key(site_id)
cache.set(
cache_key,
value,
timeout=DEFAULT_CONTENT_EXPIRY_CHANGELIST_PAGECONTENT_EXCLUSION_CACHE_EXPIRY
)
def get_changelist_page_content_exclusion_cache(site_id):
"""
Get the cached value if it exists.
:returns: the cache if it is set, or None if it the key doesn't exist.
:param site_id: The site id to get the correct cache entry
"""
cache_key = _get_cache_key(site_id)
return cache.get(cache_key)
| 29.384615 | 84 | 0.759162 |
acf8ca4ee2c19d808524a66c8026a95d5a626a66 | 1,042 | py | Python | BlueTest/Report/demo.py | liufeng3486/BuleTest | b6aa746ff4785444192a0237716a6515c7795bbf | [
"MIT"
] | 1 | 2018-11-20T08:07:38.000Z | 2018-11-20T08:07:38.000Z | BlueTest/Report/demo.py | liufeng3486/GreenTest | b6aa746ff4785444192a0237716a6515c7795bbf | [
"MIT"
] | null | null | null | BlueTest/Report/demo.py | liufeng3486/GreenTest | b6aa746ff4785444192a0237716a6515c7795bbf | [
"MIT"
] | null | null | null |
#report的demo 未完善
import BlueTest
import random
class Resualt():
def __init__(self):
self.__class__.__name__ = "123123"
self.result = 2
def id(self):
return ""+".test1"
def shortDescription(self):
return "shortDescription"
class Test():
def __init__(self):
self.results = []
def solorun(self):
self.i += 1
dd = Resualt()
return (random.randint(0,1),dd,"error message","ps")
def run(self):
self.i = 1
for i in range(5):
self.results.append(self.solorun())
def Result(self):
self.run()
return self.results
if __name__ == '__main__':
runner = BlueTest.HTMLTestRunner(
stream='D:\\HTMLTestReportCN4.html',
title='title222',
description='',
tester='tester'
)
#运行测试用例
d = BlueTest.Report()
d.Result()
d.result = Test().Result()
d.Arrangement()
runner.run(d)
# 关闭文件,否则会无法生成文件
#fp.close() | 22.652174 | 61 | 0.538388 |
acf8ca8bbf2cacd5c5481da0a8f70b8dce6380fa | 2,250 | py | Python | components/alibi-explain-server/alibiexplainer/__main__.py | juldou/seldon-core | 34021ee3ead41c729ff57efd1964ab3f0d37861e | [
"Apache-2.0"
] | 1 | 2018-03-22T03:12:47.000Z | 2018-03-22T03:12:47.000Z | components/alibi-explain-server/alibiexplainer/__main__.py | juldou/seldon-core | 34021ee3ead41c729ff57efd1964ab3f0d37861e | [
"Apache-2.0"
] | 59 | 2021-05-18T09:04:28.000Z | 2022-03-28T07:07:08.000Z | components/alibi-explain-server/alibiexplainer/__main__.py | juldou/seldon-core | 34021ee3ead41c729ff57efd1964ab3f0d37861e | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 kubeflow.org.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copied from https://github.com/kubeflow/kfserving/blob/master/python/alibiexplainer
# /alibiexplainer/__main__.py
# and modified since
#
import logging
import sys
from alibiexplainer import AlibiExplainer
from alibiexplainer.constants import SELDON_LOGLEVEL
from alibiexplainer.parser import parse_args
from alibiexplainer.server import ExplainerServer
from alibiexplainer.utils import (
ExplainerMethod,
Protocol,
construct_predict_fn,
get_persisted_explainer,
get_persisted_keras,
is_persisted_explainer,
is_persisted_keras,
)
logging.basicConfig(level=SELDON_LOGLEVEL)
def main():
args, extra = parse_args(sys.argv[1:])
# Pretrained Alibi explainer
alibi_model = None
keras_model = None
predict_fn = construct_predict_fn(
predictor_host=args.predictor_host,
model_name=args.model_name,
protocol=Protocol(args.protocol),
tf_data_type=args.tf_data_type,
)
if args.storage_uri is not None:
# we assume here that model is local
path = args.storage_uri
if is_persisted_explainer(path):
alibi_model = get_persisted_explainer(predict_fn=predict_fn, dirname=path)
if is_persisted_keras(path):
keras_model = get_persisted_keras(path)
explainer = AlibiExplainer(
name=args.model_name,
predict_fn=predict_fn,
method=ExplainerMethod(args.command),
config=extra,
explainer=alibi_model,
protocol=Protocol(args.protocol),
keras_model=keras_model,
)
explainer.load()
ExplainerServer(args.http_port).start(explainer)
if __name__ == "__main__":
main()
| 28.481013 | 86 | 0.729333 |
acf8cb070e9debb24688b9064105325065b4529e | 1,031 | py | Python | python-twisted/examples/here-now.py | Dareen/pubnub-python | 713e98b53f6623a8abca2cee8a47fd92ceb7a75b | [
"MIT"
] | null | null | null | python-twisted/examples/here-now.py | Dareen/pubnub-python | 713e98b53f6623a8abca2cee8a47fd92ceb7a75b | [
"MIT"
] | null | null | null | python-twisted/examples/here-now.py | Dareen/pubnub-python | 713e98b53f6623a8abca2cee8a47fd92ceb7a75b | [
"MIT"
] | 1 | 2019-09-10T04:07:35.000Z | 2019-09-10T04:07:35.000Z | ## www.pubnub.com - PubNub Real-time push service in the cloud.
# coding=utf8
## PubNub Real-time Push APIs and Notifications Framework
## Copyright (c) 2010 Stephen Blum
## http://www.pubnub.com/
import sys
from pubnub import PubnubTwisted as Pubnub
publish_key = len(sys.argv) > 1 and sys.argv[1] or 'demo'
subscribe_key = len(sys.argv) > 2 and sys.argv[2] or 'demo'
secret_key = len(sys.argv) > 3 and sys.argv[3] or 'demo'
cipher_key = len(sys.argv) > 4 and sys.argv[4] or ''
ssl_on = len(sys.argv) > 5 and bool(sys.argv[5]) or False
## -----------------------------------------------------------------------
## Initiate Pubnub State
## -----------------------------------------------------------------------
pubnub = Pubnub(publish_key=publish_key, subscribe_key=subscribe_key,
secret_key=secret_key, cipher_key=cipher_key, ssl_on=ssl_on)
channel = 'hello_world'
# Asynchronous usage
def callback(message):
print(message)
pubnub.here_now(channel, callback=callback, error=callback)
pubnub.start()
| 30.323529 | 76 | 0.619787 |
acf8cc58e2da7a2393635721ea606ccf2a312d74 | 3,376 | py | Python | barbican/context.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | barbican/context.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | barbican/context.py | stanzikratel/barbican-2 | 10fae57c1cae3e140c19069a48f562d62ca53663 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011-2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbican.common import utils
from barbican.openstack.common import local
from barbican.openstack.common import policy
class RequestContext(object):
"""Stores information about the security context under which the user
accesses the system, as well as additional request information.
"""
def __init__(self, auth_tok=None, user=None, tenant=None, roles=None,
is_admin=False, read_only=False, show_deleted=False,
owner_is_tenant=True, service_catalog=None,
policy_enforcer=None):
self.auth_tok = auth_tok
self.user = user
self.tenant = tenant
self.roles = roles or []
self.read_only = read_only
# TODO(jwood): self._show_deleted = show_deleted
# (mkbhanda) possibly domain could be owner
# brings us to the key scope question
self.owner_is_tenant = owner_is_tenant
self.request_id = utils.generate_uuid()
self.service_catalog = service_catalog
self.policy_enforcer = policy_enforcer or policy.Enforcer()
self.is_admin = is_admin
# TODO(jwood): Is this needed?
# if not self.is_admin:
# self.is_admin = \
# self.policy_enforcer.check_is_admin(self)
if not hasattr(local.store, 'context'):
self.update_store()
def to_dict(self):
# NOTE(ameade): These keys are named to correspond with the default
# format string for logging the context in openstack common
return {
'request_id': self.request_id,
#NOTE(bcwaldon): openstack-common logging expects 'user'
'user': self.user,
'user_id': self.user,
#NOTE(bcwaldon): openstack-common logging expects 'tenant'
'tenant': self.tenant,
'tenant_id': self.tenant,
'project_id': self.tenant,
# TODO(jwood): 'is_admin': self.is_admin,
# TODO(jwood): 'read_deleted': self.show_deleted,
'roles': self.roles,
'auth_token': self.auth_tok,
'service_catalog': self.service_catalog,
}
@classmethod
def from_dict(cls, values):
return cls(**values)
def update_store(self):
local.store.context = self
@property
def owner(self):
"""Return the owner to correlate with key."""
return self.tenant if self.owner_is_tenant else self.user
# TODO(jwood):
# @property
# def show_deleted(self):
# """Admins can see deleted by default"""
# if self._show_deleted or self.is_admin:
# return True
# return False
| 36.301075 | 78 | 0.634775 |
acf8cdd4d6f180dfb5771a348c02c587f697ed4f | 113,068 | py | Python | airflow/www/views.py | rfrenoy/incubator-airflow | 33d604b9f3a006f27579a233563280a0893fefbd | [
"Apache-2.0"
] | null | null | null | airflow/www/views.py | rfrenoy/incubator-airflow | 33d604b9f3a006f27579a233563280a0893fefbd | [
"Apache-2.0"
] | null | null | null | airflow/www/views.py | rfrenoy/incubator-airflow | 33d604b9f3a006f27579a233563280a0893fefbd | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import ast
import codecs
import copy
import datetime as dt
import itertools
import json
import logging
import math
import os
import traceback
from collections import defaultdict
from datetime import timedelta
from functools import wraps
from textwrap import dedent
import bleach
import markdown
import nvd3
import pendulum
import sqlalchemy as sqla
from flask import (
abort, jsonify, redirect, url_for, request, Markup, Response,
current_app, render_template, make_response)
from flask import flash
from flask._compat import PY2
from flask_admin import BaseView, expose, AdminIndexView
from flask_admin.actions import action
from flask_admin.babel import lazy_gettext
from flask_admin.contrib.sqla import ModelView
from flask_admin.form.fields import DateTimeField
from flask_admin.tools import iterdecode
from jinja2 import escape
from jinja2.sandbox import ImmutableSandboxedEnvironment
from past.builtins import basestring, unicode
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
from sqlalchemy import or_, desc, and_, union_all
from wtforms import (
Form, SelectField, TextAreaField, PasswordField,
StringField, validators)
import airflow
from airflow.orm import DagBag
from airflow import configuration as conf
from airflow import models
from airflow import settings
from airflow.api.common.experimental.mark_tasks import (set_dag_run_state_to_running,
set_dag_run_state_to_success,
set_dag_run_state_to_failed)
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.models import XCom, DagRun
from airflow.operators.subdag_operator import SubDagOperator
from airflow.ti_deps.dep_context import DepContext, QUEUE_DEPS, SCHEDULER_DEPS
from airflow.utils import timezone
from airflow.utils.dates import infer_time_unit, scale_time_units, parse_execution_date
from airflow.utils.db import create_session, provide_session
from airflow.utils.helpers import alchemy_to_dict
from airflow.utils.json import json_ser
from airflow.utils.net import get_hostname
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from airflow.www import utils as wwwutils
from airflow.www.forms import (DateTimeForm, DateTimeWithNumRunsForm,
DateTimeWithNumRunsWithDagRunsForm)
from airflow.www.validators import GreaterEqualThan
QUERY_LIMIT = 100000
CHART_LIMIT = 200000
UTF8_READER = codecs.getreader('utf-8')
dagbag = DagBag(settings.DAGS_FOLDER)
login_required = airflow.login.login_required
current_user = airflow.login.current_user
logout_user = airflow.login.logout_user
FILTER_BY_OWNER = False
PAGE_SIZE = conf.getint('webserver', 'page_size')
if conf.getboolean('webserver', 'FILTER_BY_OWNER'):
# filter_by_owner if authentication is enabled and filter_by_owner is true
FILTER_BY_OWNER = not current_app.config['LOGIN_DISABLED']
def dag_link(v, c, m, p):
if m.dag_id is None:
return Markup()
dag_id = bleach.clean(m.dag_id)
url = url_for(
'airflow.graph',
dag_id=dag_id,
execution_date=m.execution_date)
return Markup(
'<a href="{}">{}</a>'.format(url, dag_id))
def log_url_formatter(v, c, m, p):
return Markup(
'<a href="{m.log_url}">'
' <span class="glyphicon glyphicon-book" aria-hidden="true">'
'</span></a>').format(**locals())
def dag_run_link(v, c, m, p):
dag_id = bleach.clean(m.dag_id)
url = url_for(
'airflow.graph',
dag_id=m.dag_id,
run_id=m.run_id,
execution_date=m.execution_date)
return Markup('<a href="{url}">{m.run_id}</a>'.format(**locals()))
def task_instance_link(v, c, m, p):
dag_id = bleach.clean(m.dag_id)
task_id = bleach.clean(m.task_id)
url = url_for(
'airflow.task',
dag_id=dag_id,
task_id=task_id,
execution_date=m.execution_date.isoformat())
url_root = url_for(
'airflow.graph',
dag_id=dag_id,
root=task_id,
execution_date=m.execution_date.isoformat())
return Markup(
"""
<span style="white-space: nowrap;">
<a href="{url}">{task_id}</a>
<a href="{url_root}" title="Filter on this task and upstream">
<span class="glyphicon glyphicon-filter" style="margin-left: 0px;"
aria-hidden="true"></span>
</a>
</span>
""".format(**locals()))
def state_token(state):
color = State.color(state)
return Markup(
'<span class="label" style="background-color:{color};">'
'{state}</span>'.format(**locals()))
def parse_datetime_f(value):
if not isinstance(value, dt.datetime):
return value
return timezone.make_aware(value)
def state_f(v, c, m, p):
return state_token(m.state)
def duration_f(v, c, m, p):
if m.end_date and m.duration:
return timedelta(seconds=m.duration)
def datetime_f(v, c, m, p):
attr = getattr(m, p)
dttm = attr.isoformat() if attr else ''
if timezone.utcnow().isoformat()[:4] == dttm[:4]:
dttm = dttm[5:]
return Markup("<nobr>{}</nobr>".format(dttm))
def nobr_f(v, c, m, p):
return Markup("<nobr>{}</nobr>".format(getattr(m, p)))
def label_link(v, c, m, p):
try:
default_params = ast.literal_eval(m.default_params)
except Exception:
default_params = {}
url = url_for(
'airflow.chart', chart_id=m.id, iteration_no=m.iteration_no,
**default_params)
return Markup("<a href='{url}'>{m.label}</a>".format(**locals()))
def pool_link(v, c, m, p):
url = '/admin/taskinstance/?flt1_pool_equals=' + m.pool
return Markup("<a href='{url}'>{m.pool}</a>".format(**locals()))
def pygment_html_render(s, lexer=lexers.TextLexer):
return highlight(
s,
lexer(),
HtmlFormatter(linenos=True),
)
def render(obj, lexer):
out = ""
if isinstance(obj, basestring):
out += pygment_html_render(obj, lexer)
elif isinstance(obj, (tuple, list)):
for i, s in enumerate(obj):
out += "<div>List item #{}</div>".format(i)
out += "<div>" + pygment_html_render(s, lexer) + "</div>"
elif isinstance(obj, dict):
for k, v in obj.items():
out += '<div>Dict item "{}"</div>'.format(k)
out += "<div>" + pygment_html_render(v, lexer) + "</div>"
return out
def wrapped_markdown(s):
return '<div class="rich_doc">' + markdown.markdown(s) + "</div>"
attr_renderer = {
'bash_command': lambda x: render(x, lexers.BashLexer),
'hql': lambda x: render(x, lexers.SqlLexer),
'sql': lambda x: render(x, lexers.SqlLexer),
'doc': lambda x: render(x, lexers.TextLexer),
'doc_json': lambda x: render(x, lexers.JsonLexer),
'doc_rst': lambda x: render(x, lexers.RstLexer),
'doc_yaml': lambda x: render(x, lexers.YamlLexer),
'doc_md': wrapped_markdown,
'python_callable': lambda x: render(
wwwutils.get_python_source(x),
lexers.PythonLexer,
),
}
def data_profiling_required(f):
"""Decorator for views requiring data profiling access"""
@wraps(f)
def decorated_function(*args, **kwargs):
if (
current_app.config['LOGIN_DISABLED'] or
(not current_user.is_anonymous and current_user.data_profiling())
):
return f(*args, **kwargs)
else:
flash("This page requires data profiling privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
def fused_slots(v, c, m, p):
url = (
'/admin/taskinstance/' +
'?flt1_pool_equals=' + m.pool +
'&flt2_state_equals=running')
return Markup("<a href='{0}'>{1}</a>".format(url, m.used_slots()))
def fqueued_slots(v, c, m, p):
url = (
'/admin/taskinstance/' +
'?flt1_pool_equals=' + m.pool +
'&flt2_state_equals=queued&sort=10&desc=1')
return Markup("<a href='{0}'>{1}</a>".format(url, m.queued_slots()))
def recurse_tasks(tasks, task_ids, dag_ids, task_id_to_dag):
if isinstance(tasks, list):
for task in tasks:
recurse_tasks(task, task_ids, dag_ids, task_id_to_dag)
return
if isinstance(tasks, SubDagOperator):
subtasks = tasks.subdag.tasks
dag_ids.append(tasks.subdag.dag_id)
for subtask in subtasks:
if subtask.task_id not in task_ids:
task_ids.append(subtask.task_id)
task_id_to_dag[subtask.task_id] = tasks.subdag
recurse_tasks(subtasks, task_ids, dag_ids, task_id_to_dag)
if isinstance(tasks, BaseOperator):
task_id_to_dag[tasks.task_id] = tasks.dag
def get_chart_height(dag):
"""
TODO(aoen): See [AIRFLOW-1263] We use the number of tasks in the DAG as a heuristic to
approximate the size of generated chart (otherwise the charts are tiny and unreadable
when DAGs have a large number of tasks). Ideally nvd3 should allow for dynamic-height
charts, that is charts that take up space based on the size of the components within.
"""
return 600 + len(dag.tasks) * 10
def get_date_time_num_runs_dag_runs_form_data(request, session, dag):
dttm = request.args.get('execution_date')
if dttm:
dttm = pendulum.parse(dttm)
else:
dttm = dag.latest_execution_date or timezone.utcnow()
base_date = request.args.get('base_date')
if base_date:
base_date = timezone.parse(base_date)
else:
# The DateTimeField widget truncates milliseconds and would loose
# the first dag run. Round to next second.
base_date = (dttm + timedelta(seconds=1)).replace(microsecond=0)
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
DR = models.DagRun
drs = (
session.query(DR)
.filter(
DR.dag_id == dag.dag_id,
DR.execution_date <= base_date)
.order_by(desc(DR.execution_date))
.limit(num_runs)
.all()
)
dr_choices = []
dr_state = None
for dr in drs:
dr_choices.append((dr.execution_date.isoformat(), dr.run_id))
if dttm == dr.execution_date:
dr_state = dr.state
# Happens if base_date was changed and the selected dag run is not in result
if not dr_state and drs:
dr = drs[0]
dttm = dr.execution_date
dr_state = dr.state
return {
'dttm': dttm,
'base_date': base_date,
'num_runs': num_runs,
'execution_date': dttm.isoformat(),
'dr_choices': dr_choices,
'dr_state': dr_state,
}
class Airflow(BaseView):
def is_visible(self):
return False
@expose('/')
@login_required
def index(self):
return self.render('airflow/dags.html')
@expose('/chart_data')
@data_profiling_required
@wwwutils.gzipped
# @cache.cached(timeout=3600, key_prefix=wwwutils.make_cache_key)
def chart_data(self):
from airflow import macros
import pandas as pd
if conf.getboolean('core', 'secure_mode'):
abort(404)
with create_session() as session:
chart_id = request.args.get('chart_id')
csv = request.args.get('csv') == "true"
chart = session.query(models.Chart).filter_by(id=chart_id).first()
db = session.query(
models.Connection).filter_by(conn_id=chart.conn_id).first()
payload = {
"state": "ERROR",
"error": ""
}
# Processing templated fields
try:
args = ast.literal_eval(chart.default_params)
if not isinstance(args, dict):
raise AirflowException('Not a dict')
except Exception:
args = {}
payload['error'] += (
"Default params is not valid, string has to evaluate as "
"a Python dictionary. ")
request_dict = {k: request.args.get(k) for k in request.args}
args.update(request_dict)
args['macros'] = macros
sandbox = ImmutableSandboxedEnvironment()
sql = sandbox.from_string(chart.sql).render(**args)
label = sandbox.from_string(chart.label).render(**args)
payload['sql_html'] = Markup(highlight(
sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
payload['label'] = label
pd.set_option('display.max_colwidth', 100)
hook = db.get_hook()
try:
df = hook.get_pandas_df(
wwwutils.limit_sql(sql, CHART_LIMIT, conn_type=db.conn_type))
df = df.fillna(0)
except Exception as e:
payload['error'] += "SQL execution failed. Details: " + str(e)
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
if not payload['error'] and len(df) == CHART_LIMIT:
payload['warning'] = (
"Data has been truncated to {0}"
" rows. Expect incomplete results.").format(CHART_LIMIT)
if not payload['error'] and len(df) == 0:
payload['error'] += "Empty result set. "
elif (
not payload['error'] and
chart.sql_layout == 'series' and
chart.chart_type != "datatable" and
len(df.columns) < 3):
payload['error'] += "SQL needs to return at least 3 columns. "
elif (
not payload['error'] and
chart.sql_layout == 'columns' and
len(df.columns) < 2):
payload['error'] += "SQL needs to return at least 2 columns. "
elif not payload['error']:
import numpy as np
chart_type = chart.chart_type
data = None
if chart.show_datatable or chart_type == "datatable":
data = df.to_dict(orient="split")
data['columns'] = [{'title': c} for c in data['columns']]
payload['data'] = data
# Trying to convert time to something Highcharts likes
x_col = 1 if chart.sql_layout == 'series' else 0
if chart.x_is_date:
try:
# From string to datetime
df[df.columns[x_col]] = pd.to_datetime(
df[df.columns[x_col]])
df[df.columns[x_col]] = df[df.columns[x_col]].apply(
lambda x: int(x.strftime("%s")) * 1000)
except Exception as e:
payload['error'] = "Time conversion failed"
if chart_type == 'datatable':
payload['state'] = 'SUCCESS'
return wwwutils.json_response(payload)
else:
if chart.sql_layout == 'series':
# User provides columns (series, x, y)
df[df.columns[2]] = df[df.columns[2]].astype(np.float)
df = df.pivot_table(
index=df.columns[1],
columns=df.columns[0],
values=df.columns[2], aggfunc=np.sum)
else:
# User provides columns (x, y, metric1, metric2, ...)
df.index = df[df.columns[0]]
df = df.sort_values(by=df.columns[0])
del df[df.columns[0]]
for col in df.columns:
df[col] = df[col].astype(np.float)
df = df.fillna(0)
NVd3ChartClass = chart_mapping.get(chart.chart_type)
NVd3ChartClass = getattr(nvd3, NVd3ChartClass)
nvd3_chart = NVd3ChartClass(x_is_date=chart.x_is_date)
for col in df.columns:
nvd3_chart.add_serie(name=col, y=df[col].tolist(), x=df[col].index.tolist())
try:
nvd3_chart.buildcontent()
payload['chart_type'] = nvd3_chart.__class__.__name__
payload['htmlcontent'] = nvd3_chart.htmlcontent
except Exception as e:
payload['error'] = str(e)
payload['state'] = 'SUCCESS'
payload['request_dict'] = request_dict
return wwwutils.json_response(payload)
@expose('/chart')
@data_profiling_required
def chart(self):
if conf.getboolean('core', 'secure_mode'):
abort(404)
with create_session() as session:
chart_id = request.args.get('chart_id')
embed = request.args.get('embed')
chart = session.query(models.Chart).filter_by(id=chart_id).first()
NVd3ChartClass = chart_mapping.get(chart.chart_type)
if not NVd3ChartClass:
flash(
"Not supported anymore as the license was incompatible, "
"sorry",
"danger")
redirect('/admin/chart/')
sql = ""
if chart.show_sql:
sql = Markup(highlight(
chart.sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/nvd3.html',
chart=chart,
title="Airflow - Chart",
sql=sql,
label=chart.label,
embed=embed)
@expose('/dag_stats')
@login_required
@provide_session
def dag_stats(self, session=None):
ds = models.DagStat
ds.update(
dag_ids=[dag.dag_id for dag in dagbag.dags.values() if not dag.is_subdag]
)
qry = (
session.query(ds.dag_id, ds.state, ds.count)
)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
payload = {}
for dag in dagbag.dags.values():
payload[dag.safe_dag_id] = []
for state in State.dag_states:
count = data.get(dag.dag_id, {}).get(state, 0)
payload[dag.safe_dag_id].append({
'state': state,
'count': count,
'dag_id': dag.dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/task_stats')
@login_required
@provide_session
def task_stats(self, session=None):
TI = models.TaskInstance
DagRun = models.DagRun
Dag = models.DagModel
LastDagRun = (
session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date'))
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state != State.RUNNING)
.filter(Dag.is_active == True) # noqa: E712
.filter(Dag.is_subdag == False) # noqa: E712
.group_by(DagRun.dag_id)
.subquery('last_dag_run')
)
RunningDagRun = (
session.query(DagRun.dag_id, DagRun.execution_date)
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state == State.RUNNING)
.filter(Dag.is_active == True) # noqa: E712
.filter(Dag.is_subdag == False) # noqa: E712
.subquery('running_dag_run')
)
# Select all task_instances from active dag_runs.
# If no dag_run is active, return task instances from most recent dag_run.
LastTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(LastDagRun, and_(
LastDagRun.c.dag_id == TI.dag_id,
LastDagRun.c.execution_date == TI.execution_date))
)
RunningTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(RunningDagRun, and_(
RunningDagRun.c.dag_id == TI.dag_id,
RunningDagRun.c.execution_date == TI.execution_date))
)
UnionTI = union_all(LastTI, RunningTI).alias('union_ti')
qry = (
session.query(UnionTI.c.dag_id, UnionTI.c.state, sqla.func.count())
.group_by(UnionTI.c.dag_id, UnionTI.c.state)
)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
session.commit()
payload = {}
for dag in dagbag.dags.values():
payload[dag.safe_dag_id] = []
for state in State.task_states:
count = data.get(dag.dag_id, {}).get(state, 0)
payload[dag.safe_dag_id].append({
'state': state,
'count': count,
'dag_id': dag.dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/code')
@login_required
def code(self):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
title = dag_id
try:
with wwwutils.open_maybe_zipped(dag.fileloc, 'r') as f:
code = f.read()
html_code = highlight(
code, lexers.PythonLexer(), HtmlFormatter(linenos=True))
except IOError as e:
html_code = str(e)
return self.render(
'airflow/dag_code.html', html_code=html_code, dag=dag, title=title,
root=request.args.get('root'),
demo_mode=conf.getboolean('webserver', 'demo_mode'))
@expose('/dag_details')
@login_required
@provide_session
def dag_details(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
title = "DAG details"
TI = models.TaskInstance
states = session\
.query(TI.state, sqla.func.count(TI.dag_id))\
.filter(TI.dag_id == dag_id)\
.group_by(TI.state)\
.all()
return self.render(
'airflow/dag_details.html',
dag=dag, title=title, states=states, State=State)
@current_app.errorhandler(404)
def circles(self):
return render_template(
'airflow/circles.html', hostname=get_hostname()), 404
@current_app.errorhandler(500)
def show_traceback(self):
from airflow.utils import asciiart as ascii_
return render_template(
'airflow/traceback.html',
hostname=get_hostname(),
nukular=ascii_.nukular,
info=traceback.format_exc()), 500
@expose('/noaccess')
def noaccess(self):
return self.render('airflow/noaccess.html')
@expose('/pickle_info')
@login_required
def pickle_info(self):
d = {}
dag_id = request.args.get('dag_id')
dags = [dagbag.dags.get(dag_id)] if dag_id else dagbag.dags.values()
for dag in dags:
if not dag.is_subdag:
d[dag.dag_id] = dag.pickle_info()
return wwwutils.json_response(d)
@expose('/login', methods=['GET', 'POST'])
def login(self):
return airflow.login.login(self, request)
@expose('/logout')
def logout(self):
logout_user()
flash('You have been logged out.')
return redirect(url_for('admin.index'))
@expose('/rendered')
@login_required
@wwwutils.action_logging
def rendered(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
task = copy.copy(dag.get_task(task_id))
ti = models.TaskInstance(task=task, execution_date=dttm)
try:
ti.render_templates()
except Exception as e:
flash("Error rendering template: " + str(e), "error")
title = "Rendered Template"
html_dict = {}
for template_field in task.__class__.template_fields:
content = getattr(task, template_field)
if template_field in attr_renderer:
html_dict[template_field] = attr_renderer[template_field](content)
else:
html_dict[template_field] = (
"<pre><code>" + str(content) + "</pre></code>")
return self.render(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
title=title, )
@expose('/get_logs_with_metadata')
@login_required
@wwwutils.action_logging
@provide_session
def get_logs_with_metadata(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
try_number = int(request.args.get('try_number'))
metadata = request.args.get('metadata')
metadata = json.loads(metadata)
# metadata may be null
if not metadata:
metadata = {}
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format(
execution_date))
response = jsonify({'error': error_message})
response.status_code = 400
return response
logger = logging.getLogger('airflow.task')
task_log_reader = conf.get('core', 'task_log_reader')
handler = next((handler for handler in logger.handlers
if handler.name == task_log_reader), None)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
try:
if ti is None:
logs = ["*** Task instance did not exist in the DB\n"]
metadata['end_of_log'] = True
else:
dag = dagbag.get_dag(dag_id)
ti.task = dag.get_task(ti.task_id)
logs, metadatas = handler.read(ti, try_number, metadata=metadata)
metadata = metadatas[0]
for i, log in enumerate(logs):
if PY2 and not isinstance(log, unicode):
logs[i] = log.decode('utf-8')
message = logs[0]
return jsonify(message=message, metadata=metadata)
except AttributeError as e:
error_message = ["Task log handler {} does not support read logs.\n{}\n"
.format(task_log_reader, str(e))]
metadata['end_of_log'] = True
return jsonify(message=error_message, error=True, metadata=metadata)
@expose('/log')
@login_required
@wwwutils.action_logging
@provide_session
def log(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
logs = [''] * (ti.next_try_number - 1 if ti is not None else 0)
return self.render(
'airflow/ti_log.html',
logs=logs, dag=dag, title="Log by attempts",
dag_id=dag.dag_id, task_id=task_id,
execution_date=execution_date, form=form)
@expose('/task')
@login_required
@wwwutils.action_logging
def task(self):
TI = models.TaskInstance
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect('/admin/')
task = copy.copy(dag.get_task(task_id))
task.resolve_template_files()
ti = TI(task=task, execution_date=dttm)
ti.refresh_from_db()
ti_attrs = []
for attr_name in dir(ti):
if not attr_name.startswith('_'):
attr = getattr(ti, attr_name)
if type(attr) != type(self.task): # noqa: E721
ti_attrs.append((attr_name, str(attr)))
task_attrs = []
for attr_name in dir(task):
if not attr_name.startswith('_'):
attr = getattr(task, attr_name)
if type(attr) != type(self.task) and \
attr_name not in attr_renderer: # noqa: E721
task_attrs.append((attr_name, str(attr)))
# Color coding the special attributes that are code
special_attrs_rendered = {}
for attr_name in attr_renderer:
if hasattr(task, attr_name):
source = getattr(task, attr_name)
special_attrs_rendered[attr_name] = attr_renderer[attr_name](source)
no_failed_deps_result = [(
"Unknown",
dedent("""\
All dependencies are met but the task instance is not running.
In most cases this just means that the task will probably
be scheduled soon unless:<br/>
- The scheduler is down or under heavy load<br/>
- The following configuration values may be limiting the number
of queueable processes:
<code>parallelism</code>,
<code>dag_concurrency</code>,
<code>max_active_dag_runs_per_dag</code>,
<code>non_pooled_task_slot_count</code><br/>
{}
<br/>
If this task instance does not start soon please contact your Airflow """
"""administrator for assistance."""
.format(
"- This task instance already ran and had its state changed "
"manually (e.g. cleared in the UI)<br/>"
if ti.state == State.NONE else "")))]
# Use the scheduler's context to figure out which dependencies are not met
dep_context = DepContext(SCHEDULER_DEPS)
failed_dep_reasons = [(dep.dep_name, dep.reason) for dep in
ti.get_failed_dep_statuses(
dep_context=dep_context)]
title = "Task Instance Details"
return self.render(
'airflow/task.html',
task_attrs=task_attrs,
ti_attrs=ti_attrs,
failed_dep_reasons=failed_dep_reasons or no_failed_deps_result,
task_id=task_id,
execution_date=execution_date,
special_attrs_rendered=special_attrs_rendered,
form=form,
dag=dag, title=title)
@expose('/xcom')
@login_required
@wwwutils.action_logging
@provide_session
def xcom(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = pendulum.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect('/admin/')
xcomlist = session.query(XCom).filter(
XCom.dag_id == dag_id, XCom.task_id == task_id,
XCom.execution_date == dttm).all()
attributes = []
for xcom in xcomlist:
if not xcom.key.startswith('_'):
attributes.append((xcom.key, xcom.value))
title = "XCom"
return self.render(
'airflow/xcom.html',
attributes=attributes,
task_id=task_id,
execution_date=execution_date,
form=form,
dag=dag, title=title)
@expose('/run')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def run(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
origin = request.args.get('origin')
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
execution_date = request.args.get('execution_date')
execution_date = pendulum.parse(execution_date)
ignore_all_deps = request.args.get('ignore_all_deps') == "true"
ignore_task_deps = request.args.get('ignore_task_deps') == "true"
ignore_ti_state = request.args.get('ignore_ti_state') == "true"
from airflow.executors import GetDefaultExecutor
executor = GetDefaultExecutor()
valid_celery_config = False
valid_kubernetes_config = False
try:
from airflow.executors.celery_executor import CeleryExecutor
valid_celery_config = isinstance(executor, CeleryExecutor)
except ImportError:
pass
try:
from airflow.contrib.executors.kubernetes_executor import KubernetesExecutor
valid_kubernetes_config = isinstance(executor, KubernetesExecutor)
except ImportError:
pass
if not valid_celery_config and not valid_kubernetes_config:
flash("Only works with the Celery or Kubernetes executors, sorry", "error")
return redirect(origin)
ti = models.TaskInstance(task=task, execution_date=execution_date)
ti.refresh_from_db()
# Make sure the task instance can be queued
dep_context = DepContext(
deps=QUEUE_DEPS,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
if failed_deps:
failed_deps_str = ", ".join(
["{}: {}".format(dep.dep_name, dep.reason) for dep in failed_deps])
flash("Could not queue task instance for execution, dependencies not met: "
"{}".format(failed_deps_str),
"error")
return redirect(origin)
executor.start()
executor.queue_task_instance(
ti,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
executor.heartbeat()
flash(
"Sent {} to the message queue, "
"it should start any moment now.".format(ti))
return redirect(origin)
@expose('/delete')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def delete(self):
from airflow.api.common.experimental import delete_dag
from airflow.exceptions import DagNotFound, DagFileExists
dag_id = request.args.get('dag_id')
origin = request.args.get('origin') or "/admin/"
try:
delete_dag.delete_dag(dag_id)
except DagNotFound:
flash("DAG with id {} not found. Cannot delete".format(dag_id))
return redirect(request.referrer)
except DagFileExists:
flash("Dag id {} is still in DagBag. "
"Remove the DAG file first.".format(dag_id))
return redirect(request.referrer)
flash("Deleting DAG with id {}. May take a couple minutes to fully"
" disappear.".format(dag_id))
# Upon successful delete return to origin
return redirect(origin)
@expose('/trigger')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def trigger(self):
dag_id = request.args.get('dag_id')
origin = request.args.get('origin') or "/admin/"
dag = dagbag.get_dag(dag_id)
if not dag:
flash("Cannot find dag {}".format(dag_id))
return redirect(origin)
execution_date = timezone.utcnow()
run_id = "manual__{0}".format(execution_date.isoformat())
dr = DagRun.find(dag_id=dag_id, run_id=run_id)
if dr:
flash("This run_id {} already exists".format(run_id))
return redirect(origin)
run_conf = {}
dag.create_dagrun(
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=run_conf,
external_trigger=True
)
flash(
"Triggered {}, "
"it should start any moment now.".format(dag_id))
return redirect(origin)
def _clear_dag_tis(self, dag, start_date, end_date, origin,
recursive=False, confirmed=False):
if confirmed:
count = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
)
flash("{0} task instances have been cleared".format(count))
return redirect(origin)
tis = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
dry_run=True,
include_parentdag=recursive,
)
if not tis:
flash("No task instances to clear", 'error')
response = redirect(origin)
else:
details = "\n".join([str(t) for t in tis])
response = self.render(
'airflow/confirm.html',
message=("Here's the list of task instances you are about "
"to clear:"),
details=details)
return response
@expose('/clear')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def clear(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
origin = request.args.get('origin')
dag = dagbag.get_dag(dag_id)
execution_date = request.args.get('execution_date')
execution_date = pendulum.parse(execution_date)
confirmed = request.args.get('confirmed') == "true"
upstream = request.args.get('upstream') == "true"
downstream = request.args.get('downstream') == "true"
future = request.args.get('future') == "true"
past = request.args.get('past') == "true"
recursive = request.args.get('recursive') == "true"
dag = dag.sub_dag(
task_regex=r"^{0}$".format(task_id),
include_downstream=downstream,
include_upstream=upstream)
end_date = execution_date if not future else None
start_date = execution_date if not past else None
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=recursive, confirmed=confirmed)
@expose('/dagrun_clear')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_clear(self):
dag_id = request.args.get('dag_id')
origin = request.args.get('origin')
execution_date = request.args.get('execution_date')
confirmed = request.args.get('confirmed') == "true"
dag = dagbag.get_dag(dag_id)
execution_date = pendulum.parse(execution_date)
start_date = execution_date
end_date = execution_date
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=True, confirmed=confirmed)
@expose('/blocked')
@login_required
@provide_session
def blocked(self, session=None):
DR = models.DagRun
dags = session\
.query(DR.dag_id, sqla.func.count(DR.id))\
.filter(DR.state == State.RUNNING)\
.group_by(DR.dag_id)\
.all()
payload = []
for dag_id, active_dag_runs in dags:
max_active_runs = 0
if dag_id in dagbag.dags:
max_active_runs = dagbag.dags[dag_id].max_active_runs
payload.append({
'dag_id': dag_id,
'active_dag_run': active_dag_runs,
'max_active_runs': max_active_runs,
})
return wwwutils.json_response(payload)
def _mark_dagrun_state_as_failed(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = pendulum.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed)
if confirmed:
flash('Marked failed on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render('airflow/confirm.html',
message=("Here's the list of task instances you are "
"about to mark as failed"),
details=details)
return response
def _mark_dagrun_state_as_success(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = pendulum.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_success(dag, execution_date,
commit=confirmed)
if confirmed:
flash('Marked success on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render('airflow/confirm.html',
message=("Here's the list of task instances you are "
"about to mark as success"),
details=details)
return response
@expose('/dagrun_failed')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_failed(self):
dag_id = request.args.get('dag_id')
execution_date = request.args.get('execution_date')
confirmed = request.args.get('confirmed') == 'true'
origin = request.args.get('origin')
return self._mark_dagrun_state_as_failed(dag_id, execution_date,
confirmed, origin)
@expose('/dagrun_success')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def dagrun_success(self):
dag_id = request.args.get('dag_id')
execution_date = request.args.get('execution_date')
confirmed = request.args.get('confirmed') == 'true'
origin = request.args.get('origin')
return self._mark_dagrun_state_as_success(dag_id, execution_date,
confirmed, origin)
def _mark_task_instance_state(self, dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, state):
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
task.dag = dag
execution_date = pendulum.parse(execution_date)
if not dag:
flash("Cannot find DAG: {}".format(dag_id))
return redirect(origin)
if not task:
flash("Cannot find task {} in DAG {}".format(task_id, dag.dag_id))
return redirect(origin)
from airflow.api.common.experimental.mark_tasks import set_state
if confirmed:
altered = set_state(task=task, execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=True)
flash("Marked {} on {} task instances".format(state, len(altered)))
return redirect(origin)
to_be_altered = set_state(task=task, execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=False)
details = "\n".join([str(t) for t in to_be_altered])
response = self.render("airflow/confirm.html",
message=("Here's the list of task instances you are "
"about to mark as {}:".format(state)),
details=details)
return response
@expose('/failed')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def failed(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
origin = request.args.get('origin')
execution_date = request.args.get('execution_date')
confirmed = request.args.get('confirmed') == "true"
upstream = request.args.get('upstream') == "true"
downstream = request.args.get('downstream') == "true"
future = request.args.get('future') == "true"
past = request.args.get('past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.FAILED)
@expose('/success')
@login_required
@wwwutils.action_logging
@wwwutils.notify_owner
def success(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
origin = request.args.get('origin')
execution_date = request.args.get('execution_date')
confirmed = request.args.get('confirmed') == "true"
upstream = request.args.get('upstream') == "true"
downstream = request.args.get('downstream') == "true"
future = request.args.get('future') == "true"
past = request.args.get('past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.SUCCESS)
@expose('/tree')
@login_required
@wwwutils.gzipped
@wwwutils.action_logging
@provide_session
def tree(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if dag_id not in dagbag.dags:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_downstream=False,
include_upstream=True)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
DR = models.DagRun
dag_runs = (
session.query(DR)
.filter(
DR.dag_id == dag.dag_id,
DR.execution_date <= base_date)
.order_by(DR.execution_date.desc())
.limit(num_runs)
.all()
)
dag_runs = {
dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs}
dates = sorted(list(dag_runs.keys()))
max_date = max(dates) if dates else None
min_date = min(dates) if dates else None
tis = dag.get_task_instances(
session, start_date=min_date, end_date=base_date)
task_instances = {}
for ti in tis:
tid = alchemy_to_dict(ti)
dr = dag_runs.get(ti.execution_date)
tid['external_trigger'] = dr['external_trigger'] if dr else False
task_instances[(ti.task_id, ti.execution_date)] = tid
expanded = []
# The default recursion traces every path so that tree view has full
# expand/collapse functionality. After 5,000 nodes we stop and fall
# back on a quick DFS search for performance. See PR #320.
node_count = [0]
node_limit = 5000 / max(1, len(dag.roots))
def recurse_nodes(task, visited):
visited.add(task)
node_count[0] += 1
children = [
recurse_nodes(t, visited) for t in task.upstream_list
if node_count[0] < node_limit or t not in visited]
# D3 tree uses children vs _children to define what is
# expanded or not. The following block makes it such that
# repeated nodes are collapsed by default.
children_key = 'children'
if task.task_id not in expanded:
expanded.append(task.task_id)
elif children:
children_key = "_children"
def set_duration(tid):
if isinstance(tid, dict) and tid.get("state") == State.RUNNING \
and tid["start_date"] is not None:
d = timezone.utcnow() - pendulum.parse(tid["start_date"])
tid["duration"] = d.total_seconds()
return tid
return {
'name': task.task_id,
'instances': [
set_duration(task_instances.get((task.task_id, d))) or {
'execution_date': d.isoformat(),
'task_id': task.task_id
}
for d in dates],
children_key: children,
'num_dep': len(task.upstream_list),
'operator': task.task_type,
'retries': task.retries,
'owner': task.owner,
'start_date': task.start_date,
'end_date': task.end_date,
'depends_on_past': task.depends_on_past,
'ui_color': task.ui_color,
}
data = {
'name': '[DAG]',
'children': [recurse_nodes(t, set()) for t in dag.roots],
'instances': [dag_runs.get(d) or {'execution_date': d.isoformat()} for d in dates],
}
# minimize whitespace as this can be huge for bigger dags
data = json.dumps(data, default=json_ser, separators=(',', ':'))
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
return self.render(
'airflow/tree.html',
operators=sorted(
list(set([op.__class__ for op in dag.tasks])),
key=lambda x: x.__name__
),
root=root,
form=form,
dag=dag, data=data, blur=blur, num_runs=num_runs)
@expose('/graph')
@login_required
@wwwutils.gzipped
@wwwutils.action_logging
@provide_session
def graph(self, session=None):
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if dag_id not in dagbag.dags:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
arrange = request.args.get('arrange', dag.orientation)
nodes = []
edges = []
for task in dag.tasks:
nodes.append({
'id': task.task_id,
'value': {
'label': task.task_id,
'labelStyle': "fill:{0};".format(task.ui_fgcolor),
'style': "fill:{0};".format(task.ui_color),
}
})
def get_upstream(task):
for t in task.upstream_list:
edge = {
'u': t.task_id,
'v': task.task_id,
}
if edge not in edges:
edges.append(edge)
get_upstream(t)
for t in dag.roots:
get_upstream(t)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dt_nr_dr_data['arrange'] = arrange
dttm = dt_nr_dr_data['dttm']
class GraphForm(DateTimeWithNumRunsWithDagRunsForm):
arrange = SelectField("Layout", choices=(
('LR', "Left->Right"),
('RL', "Right->Left"),
('TB', "Top->Bottom"),
('BT', "Bottom->Top"),
))
form = GraphForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(session, dttm, dttm)}
tasks = {
t.task_id: {
'dag_id': t.dag_id,
'task_type': t.task_type,
}
for t in dag.tasks}
if not tasks:
flash("No tasks found", "error")
session.commit()
doc_md = markdown.markdown(dag.doc_md) if hasattr(dag, 'doc_md') and dag.doc_md else ''
return self.render(
'airflow/graph.html',
dag=dag,
form=form,
width=request.args.get('width', "100%"),
height=request.args.get('height', "800"),
execution_date=dttm.isoformat(),
state_token=state_token(dt_nr_dr_data['dr_state']),
doc_md=doc_md,
arrange=arrange,
operators=sorted(
list(set([op.__class__ for op in dag.tasks])),
key=lambda x: x.__name__
),
blur=blur,
root=root or '',
task_instances=json.dumps(task_instances, indent=2),
tasks=json.dumps(tasks, indent=2),
nodes=json.dumps(nodes, indent=2),
edges=json.dumps(edges, indent=2), )
@expose('/duration')
@login_required
@wwwutils.action_logging
@provide_session
def duration(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if dag is None:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
cum_chart = nvd3.lineChart(
name="cumLineChart", x_is_date=True, height=chart_height, width="1200")
y = defaultdict(list)
x = defaultdict(list)
cum_y = defaultdict(list)
tis = dag.get_task_instances(
session, start_date=min_date, end_date=base_date)
TF = models.TaskFail
ti_fails = (
session
.query(TF)
.filter(
TF.dag_id == dag.dag_id,
TF.execution_date >= min_date,
TF.execution_date <= base_date,
TF.task_id.in_([t.task_id for t in dag.tasks]))
.all()
)
fails_totals = defaultdict(int)
for tf in ti_fails:
dict_key = (tf.dag_id, tf.task_id, tf.execution_date)
fails_totals[dict_key] += tf.duration
for ti in tis:
if ti.duration:
dttm = wwwutils.epoch(ti.execution_date)
x[ti.task_id].append(dttm)
y[ti.task_id].append(float(ti.duration))
fails_dict_key = (ti.dag_id, ti.task_id, ti.execution_date)
fails_total = fails_totals[fails_dict_key]
cum_y[ti.task_id].append(float(ti.duration + fails_total))
# determine the most relevant time unit for the set of task instance
# durations for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
cum_y_unit = infer_time_unit([d for t in cum_y.values() for d in t])
# update the y Axis on both charts to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '40'
cum_chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(cum_y_unit))
cum_chart.axislist['yAxis']['axisLabelDistance'] = '40'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
cum_chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(cum_y[task.task_id],
cum_y_unit))
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
cum_chart.buildcontent()
s_index = cum_chart.htmlcontent.rfind('});')
cum_chart.htmlcontent = (cum_chart.htmlcontent[:s_index] +
"$(function() {$( document ).trigger('chartload') })" +
cum_chart.htmlcontent[s_index:])
return self.render(
'airflow/duration_chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent,
cum_chart=cum_chart.htmlcontent
)
@expose('/tries')
@login_required
@wwwutils.action_logging
@provide_session
def tries(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, y_axis_format='d', height=chart_height,
width="1200")
for task in dag.tasks:
y = []
x = []
for ti in task.get_task_instances(session, start_date=min_date,
end_date=base_date):
dttm = wwwutils.epoch(ti.execution_date)
x.append(dttm)
y.append(ti.try_number)
if x:
chart.add_serie(name=task.task_id, x=x, y=y)
tis = dag.get_task_instances(
session, start_date=min_date, end_date=base_date)
tries = sorted(list({ti.try_number for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if tries else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render(
'airflow/chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent
)
@expose('/landing_times')
@login_required
@wwwutils.action_logging
@provide_session
def landing_times(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = pendulum.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else datetime(2000, 1, 1)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
y = {}
x = {}
for task in dag.tasks:
y[task.task_id] = []
x[task.task_id] = []
for ti in task.get_task_instances(session, start_date=min_date,
end_date=base_date):
if ti.end_date:
ts = ti.execution_date
following_schedule = dag.following_schedule(ts)
if dag.schedule_interval and following_schedule:
ts = following_schedule
dttm = wwwutils.epoch(ti.execution_date)
secs = (ti.end_date - ts).total_seconds()
x[ti.task_id].append(dttm)
y[ti.task_id].append(secs)
# determine the most relevant time unit for the set of landing times
# for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
# update the y Axis to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Landing Time ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '40'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
tis = dag.get_task_instances(
session, start_date=min_date, end_date=base_date)
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render(
'airflow/chart.html',
dag=dag,
chart=chart.htmlcontent,
height=str(chart_height + 100) + "px",
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
)
@expose('/paused', methods=['POST'])
@login_required
@wwwutils.action_logging
@provide_session
def paused(self, session=None):
DagModel = models.DagModel
dag_id = request.args.get('dag_id')
orm_dag = session.query(
DagModel).filter(DagModel.dag_id == dag_id).first()
if request.args.get('is_paused') == 'false':
orm_dag.is_paused = True
else:
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit()
dagbag.get_dag(dag_id)
return "OK"
@expose('/refresh')
@login_required
@wwwutils.action_logging
@provide_session
def refresh(self, session=None):
DagModel = models.DagModel
dag_id = request.args.get('dag_id')
orm_dag = session.query(
DagModel).filter(DagModel.dag_id == dag_id).first()
if orm_dag:
orm_dag.last_expired = timezone.utcnow()
session.merge(orm_dag)
session.commit()
models.DagStat.update([dag_id], session=session, dirty_only=False)
dagbag.get_dag(dag_id)
flash("DAG [{}] is now fresh as a daisy".format(dag_id))
return redirect(request.referrer)
@expose('/refresh_all')
@login_required
@wwwutils.action_logging
def refresh_all(self):
dagbag.collect_dags(only_if_updated=False)
flash("All DAGs are now up to date")
return redirect('/')
@expose('/gantt')
@login_required
@wwwutils.action_logging
@provide_session
def gantt(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
demo_mode = conf.getboolean('webserver', 'demo_mode')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dttm = dt_nr_dr_data['dttm']
form = DateTimeWithNumRunsWithDagRunsForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
tis = [
ti for ti in dag.get_task_instances(session, dttm, dttm)
if ti.start_date]
tis = sorted(tis, key=lambda ti: ti.start_date)
TF = models.TaskFail
ti_fails = list(itertools.chain(*[(
session
.query(TF)
.filter(TF.dag_id == ti.dag_id,
TF.task_id == ti.task_id,
TF.execution_date == ti.execution_date)
.all()
) for ti in tis]))
TR = models.TaskReschedule
ti_reschedules = list(itertools.chain(*[(
session
.query(TR)
.filter(TR.dag_id == ti.dag_id,
TR.task_id == ti.task_id,
TR.execution_date == ti.execution_date)
.all()
) for ti in tis]))
# determine bars to show in the gantt chart
# all reschedules of one attempt are combinded into one bar
gantt_bar_items = []
for task_id, items in itertools.groupby(
sorted(tis + ti_fails + ti_reschedules, key=lambda ti: ti.task_id),
key=lambda ti: ti.task_id):
start_date = None
for i in sorted(items, key=lambda ti: ti.start_date):
start_date = start_date or i.start_date
end_date = i.end_date or timezone.utcnow()
if type(i) == models.TaskInstance:
gantt_bar_items.append((task_id, start_date, end_date, i.state))
start_date = None
elif type(i) == TF and (len(gantt_bar_items) == 0 or
end_date != gantt_bar_items[-1][2]):
gantt_bar_items.append((task_id, start_date, end_date, State.FAILED))
start_date = None
tasks = []
for gantt_bar_item in gantt_bar_items:
task_id = gantt_bar_item[0]
start_date = gantt_bar_item[1]
end_date = gantt_bar_item[2]
state = gantt_bar_item[3]
tasks.append({
'startDate': wwwutils.epoch(start_date),
'endDate': wwwutils.epoch(end_date),
'isoStart': start_date.isoformat()[:-4],
'isoEnd': end_date.isoformat()[:-4],
'taskName': task_id,
'duration': "{}".format(end_date - start_date)[:-4],
'status': state,
'executionDate': dttm.isoformat(),
})
states = {task['status']: task['status'] for task in tasks}
data = {
'taskNames': [ti.task_id for ti in tis],
'tasks': tasks,
'taskStatus': states,
'height': len(tis) * 25 + 25,
}
session.commit()
return self.render(
'airflow/gantt.html',
dag=dag,
execution_date=dttm.isoformat(),
form=form,
data=json.dumps(data, indent=2),
base_date='',
demo_mode=demo_mode,
root=root,
)
@expose('/object/task_instances')
@login_required
@wwwutils.action_logging
@provide_session
def task_instances(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
dttm = request.args.get('execution_date')
if dttm:
dttm = pendulum.parse(dttm)
else:
return "Error: Invalid execution_date"
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(session, dttm, dttm)}
return json.dumps(task_instances)
@expose('/variables/<form>', methods=["GET", "POST"])
@login_required
@wwwutils.action_logging
def variables(self, form):
try:
if request.method == 'POST':
data = request.json
if data:
with create_session() as session:
var = models.Variable(key=form, val=json.dumps(data))
session.add(var)
session.commit()
return ""
else:
return self.render(
'airflow/variables/{}.html'.format(form)
)
except Exception:
# prevent XSS
form = escape(form)
return ("Error: form airflow/variables/{}.html "
"not found.").format(form), 404
@expose('/varimport', methods=["GET", "POST"])
@login_required
@wwwutils.action_logging
def varimport(self):
try:
d = json.load(UTF8_READER(request.files['file']))
except Exception as e:
flash("Missing file or syntax error: {}.".format(e))
else:
suc_count = fail_count = 0
for k, v in d.items():
try:
models.Variable.set(k, v, serialize_json=isinstance(v, dict))
except Exception as e:
logging.info('Variable import failed: {}'.format(repr(e)))
fail_count += 1
else:
suc_count += 1
flash("{} variable(s) successfully updated.".format(suc_count), 'info')
if fail_count:
flash(
"{} variables(s) failed to be updated.".format(fail_count), 'error')
return redirect('/admin/variable')
class HomeView(AdminIndexView):
@expose("/")
@login_required
@provide_session
def index(self, session=None):
DM = models.DagModel
# restrict the dags shown if filter_by_owner and current user is not superuser
do_filter = FILTER_BY_OWNER and (not current_user.is_superuser())
owner_mode = conf.get('webserver', 'OWNER_MODE').strip().lower()
hide_paused_dags_by_default = conf.getboolean('webserver',
'hide_paused_dags_by_default')
show_paused_arg = request.args.get('showPaused', 'None')
def get_int_arg(value, default=0):
try:
return int(value)
except ValueError:
return default
arg_current_page = request.args.get('page', '0')
arg_search_query = request.args.get('search', None)
dags_per_page = PAGE_SIZE
current_page = get_int_arg(arg_current_page, default=0)
if show_paused_arg.strip().lower() == 'false':
hide_paused = True
elif show_paused_arg.strip().lower() == 'true':
hide_paused = False
else:
hide_paused = hide_paused_dags_by_default
# read orm_dags from the db
sql_query = session.query(DM)
if do_filter and owner_mode == 'ldapgroup':
sql_query = sql_query.filter(
~DM.is_subdag,
DM.is_active,
DM.owners.in_(current_user.ldap_groups)
)
elif do_filter and owner_mode == 'user':
sql_query = sql_query.filter(
~DM.is_subdag, DM.is_active,
DM.owners == current_user.user.username
)
else:
sql_query = sql_query.filter(
~DM.is_subdag, DM.is_active
)
# optionally filter out "paused" dags
if hide_paused:
sql_query = sql_query.filter(~DM.is_paused)
orm_dags = {dag.dag_id: dag for dag
in sql_query
.all()}
import_errors = session.query(models.ImportError).all()
for ie in import_errors:
flash(
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=ie),
"error")
# get a list of all non-subdag dags visible to everyone
# optionally filter out "paused" dags
if hide_paused:
unfiltered_webserver_dags = [dag for dag in dagbag.dags.values() if
not dag.parent_dag and not dag.is_paused]
else:
unfiltered_webserver_dags = [dag for dag in dagbag.dags.values() if
not dag.parent_dag]
# optionally filter to get only dags that the user should see
if do_filter and owner_mode == 'ldapgroup':
# only show dags owned by someone in @current_user.ldap_groups
webserver_dags = {
dag.dag_id: dag
for dag in unfiltered_webserver_dags
if dag.owner in current_user.ldap_groups
}
elif do_filter and owner_mode == 'user':
# only show dags owned by @current_user.user.username
webserver_dags = {
dag.dag_id: dag
for dag in unfiltered_webserver_dags
if dag.owner == current_user.user.username
}
else:
webserver_dags = {
dag.dag_id: dag
for dag in unfiltered_webserver_dags
}
if arg_search_query:
lower_search_query = arg_search_query.lower()
# filter by dag_id
webserver_dags_filtered = {
dag_id: dag
for dag_id, dag in webserver_dags.items()
if (lower_search_query in dag_id.lower() or
lower_search_query in dag.owner.lower())
}
all_dag_ids = (set([dag.dag_id for dag in orm_dags.values()
if lower_search_query in dag.dag_id.lower() or
lower_search_query in dag.owners.lower()]) |
set(webserver_dags_filtered.keys()))
sorted_dag_ids = sorted(all_dag_ids)
else:
webserver_dags_filtered = webserver_dags
sorted_dag_ids = sorted(set(orm_dags.keys()) | set(webserver_dags.keys()))
start = current_page * dags_per_page
end = start + dags_per_page
num_of_all_dags = len(sorted_dag_ids)
page_dag_ids = sorted_dag_ids[start:end]
num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page)))
auto_complete_data = set()
for dag in webserver_dags_filtered.values():
auto_complete_data.add(dag.dag_id)
auto_complete_data.add(dag.owner)
for dag in orm_dags.values():
auto_complete_data.add(dag.dag_id)
auto_complete_data.add(dag.owners)
return self.render(
'airflow/dags.html',
webserver_dags=webserver_dags_filtered,
orm_dags=orm_dags,
hide_paused=hide_paused,
current_page=current_page,
search_query=arg_search_query if arg_search_query else '',
page_size=dags_per_page,
num_of_pages=num_of_pages,
num_dag_from=start + 1,
num_dag_to=min(end, num_of_all_dags),
num_of_all_dags=num_of_all_dags,
paging=wwwutils.generate_pages(current_page, num_of_pages,
search=arg_search_query,
showPaused=not hide_paused),
dag_ids_in_page=page_dag_ids,
auto_complete_data=auto_complete_data)
class QueryView(wwwutils.DataProfilingMixin, BaseView):
@expose('/', methods=['POST', 'GET'])
@wwwutils.gzipped
@provide_session
def query(self, session=None):
dbs = session.query(models.Connection).order_by(
models.Connection.conn_id).all()
session.expunge_all()
db_choices = list(
((db.conn_id, db.conn_id) for db in dbs if db.get_hook()))
conn_id_str = request.form.get('conn_id')
csv = request.form.get('csv') == "true"
sql = request.form.get('sql')
class QueryForm(Form):
conn_id = SelectField("Layout", choices=db_choices)
sql = TextAreaField("SQL", widget=wwwutils.AceEditorWidget())
data = {
'conn_id': conn_id_str,
'sql': sql,
}
results = None
has_data = False
error = False
if conn_id_str:
db = [db for db in dbs if db.conn_id == conn_id_str][0]
hook = db.get_hook()
try:
df = hook.get_pandas_df(wwwutils.limit_sql(sql, QUERY_LIMIT, conn_type=db.conn_type))
# df = hook.get_pandas_df(sql)
has_data = len(df) > 0
df = df.fillna('')
results = df.to_html(
classes=[
'table', 'table-bordered', 'table-striped', 'no-wrap'],
index=False,
na_rep='',
) if has_data else ''
except Exception as e:
flash(str(e), 'error')
error = True
if has_data and len(df) == QUERY_LIMIT:
flash(
"Query output truncated at " + str(QUERY_LIMIT) +
" rows", 'info')
if not has_data and error:
flash('No data', 'error')
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
form = QueryForm(request.form, data=data)
session.commit()
return self.render(
'airflow/query.html', form=form,
title="Ad Hoc Query",
results=results or '',
has_data=has_data)
class AirflowModelView(ModelView):
list_template = 'airflow/model_list.html'
edit_template = 'airflow/model_edit.html'
create_template = 'airflow/model_create.html'
column_display_actions = True
page_size = PAGE_SIZE
class ModelViewOnly(wwwutils.LoginMixin, AirflowModelView):
"""
Modifying the base ModelView class for non edit, browse only operations
"""
named_filter_urls = True
can_create = False
can_edit = False
can_delete = False
column_display_pk = True
class PoolModelView(wwwutils.SuperUserMixin, AirflowModelView):
column_list = ('pool', 'slots', 'used_slots', 'queued_slots')
column_formatters = dict(
pool=pool_link, used_slots=fused_slots, queued_slots=fqueued_slots)
named_filter_urls = True
form_args = {
'pool': {
'validators': [
validators.DataRequired(),
]
}
}
class SlaMissModelView(wwwutils.SuperUserMixin, ModelViewOnly):
verbose_name_plural = "SLA misses"
verbose_name = "SLA miss"
column_list = (
'dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp')
column_formatters = dict(
task_id=task_instance_link,
execution_date=datetime_f,
timestamp=datetime_f,
dag_id=dag_link)
named_filter_urls = True
column_searchable_list = ('dag_id', 'task_id',)
column_filters = (
'dag_id', 'task_id', 'email_sent', 'timestamp', 'execution_date')
filter_converter = wwwutils.UtcFilterConverter()
form_widget_args = {
'email_sent': {'disabled': True},
'timestamp': {'disabled': True},
}
@provide_session
def _connection_ids(session=None):
return [(c.conn_id, c.conn_id) for c in (
session
.query(models.Connection.conn_id)
.group_by(models.Connection.conn_id))]
class ChartModelView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "chart"
verbose_name_plural = "charts"
form_columns = (
'label',
'owner',
'conn_id',
'chart_type',
'show_datatable',
'x_is_date',
'y_log_scale',
'show_sql',
'height',
'sql_layout',
'sql',
'default_params',
)
column_list = (
'label',
'conn_id',
'chart_type',
'owner',
'last_modified',
)
column_sortable_list = (
'label',
'conn_id',
'chart_type',
('owner', 'owner.username'),
'last_modified',
)
column_formatters = dict(label=label_link, last_modified=datetime_f)
column_default_sort = ('last_modified', True)
create_template = 'airflow/chart/create.html'
edit_template = 'airflow/chart/edit.html'
column_filters = ('label', 'owner.username', 'conn_id')
column_searchable_list = ('owner.username', 'label', 'sql')
column_descriptions = {
'label': "Can include {{ templated_fields }} and {{ macros }}",
'chart_type': "The type of chart to be displayed",
'sql': "Can include {{ templated_fields }} and {{ macros }}.",
'height': "Height of the chart, in pixels.",
'conn_id': "Source database to run the query against",
'x_is_date': (
"Whether the X axis should be casted as a date field. Expect most "
"intelligible date formats to get casted properly."
),
'owner': (
"The chart's owner, mostly used for reference and filtering in "
"the list view."
),
'show_datatable':
"Whether to display an interactive data table under the chart.",
'default_params': (
'A dictionary of {"key": "values",} that define what the '
'templated fields (parameters) values should be by default. '
'To be valid, it needs to "eval" as a Python dict. '
'The key values will show up in the url\'s querystring '
'and can be altered there.'
),
'show_sql': "Whether to display the SQL statement as a collapsible "
"section in the chart page.",
'y_log_scale': "Whether to use a log scale for the Y axis.",
'sql_layout': (
"Defines the layout of the SQL that the application should "
"expect. Depending on the tables you are sourcing from, it may "
"make more sense to pivot / unpivot the metrics."
),
}
column_labels = {
'sql': "SQL",
'height': "Chart Height",
'sql_layout': "SQL Layout",
'show_sql': "Display the SQL Statement",
'default_params': "Default Parameters",
}
form_choices = {
'chart_type': [
('line', 'Line Chart'),
('spline', 'Spline Chart'),
('bar', 'Bar Chart'),
('column', 'Column Chart'),
('area', 'Overlapping Area Chart'),
('stacked_area', 'Stacked Area Chart'),
('percent_area', 'Percent Area Chart'),
('datatable', 'No chart, data table only'),
],
'sql_layout': [
('series', 'SELECT series, x, y FROM ...'),
('columns', 'SELECT x, y (series 1), y (series 2), ... FROM ...'),
],
'conn_id': _connection_ids()
}
def on_model_change(self, form, model, is_created=True):
if model.iteration_no is None:
model.iteration_no = 0
else:
model.iteration_no += 1
if not model.user_id and current_user and hasattr(current_user, 'id'):
model.user_id = current_user.id
model.last_modified = timezone.utcnow()
chart_mapping = (
('line', 'lineChart'),
('spline', 'lineChart'),
('bar', 'multiBarChart'),
('column', 'multiBarChart'),
('area', 'stackedAreaChart'),
('stacked_area', 'stackedAreaChart'),
('percent_area', 'stackedAreaChart'),
('datatable', 'datatable'),
)
chart_mapping = dict(chart_mapping)
class KnownEventView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "known event"
verbose_name_plural = "known events"
form_columns = (
'label',
'event_type',
'start_date',
'end_date',
'reported_by',
'description',
)
form_args = {
'label': {
'validators': [
validators.DataRequired(),
],
},
'event_type': {
'validators': [
validators.DataRequired(),
],
},
'start_date': {
'validators': [
validators.DataRequired(),
],
'filters': [
parse_datetime_f,
],
},
'end_date': {
'validators': [
validators.DataRequired(),
GreaterEqualThan(fieldname='start_date'),
],
'filters': [
parse_datetime_f,
]
},
'reported_by': {
'validators': [
validators.DataRequired(),
],
}
}
column_list = (
'label',
'event_type',
'start_date',
'end_date',
'reported_by',
)
column_default_sort = ("start_date", True)
column_sortable_list = (
'label',
# todo: yes this has a spelling error
('event_type', 'event_type.know_event_type'),
'start_date',
'end_date',
('reported_by', 'reported_by.username'),
)
filter_converter = wwwutils.UtcFilterConverter()
form_overrides = dict(start_date=DateTimeField, end_date=DateTimeField)
class KnownEventTypeView(wwwutils.DataProfilingMixin, AirflowModelView):
pass
# NOTE: For debugging / troubleshooting
# mv = KnowEventTypeView(
# models.KnownEventType,
# Session, name="Known Event Types", category="Manage")
# admin.add_view(mv)
# class DagPickleView(SuperUserMixin, ModelView):
# pass
# mv = DagPickleView(
# models.DagPickle,
# Session, name="Pickles", category="Manage")
# admin.add_view(mv)
class VariableView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "Variable"
verbose_name_plural = "Variables"
list_template = 'airflow/variable_list.html'
def hidden_field_formatter(view, context, model, name):
if wwwutils.should_hide_value_for_key(model.key):
return Markup('*' * 8)
val = getattr(model, name)
if val:
return val
else:
return Markup('<span class="label label-danger">Invalid</span>')
form_columns = (
'key',
'val',
)
column_list = ('key', 'val', 'is_encrypted',)
column_filters = ('key', 'val')
column_searchable_list = ('key', 'val', 'is_encrypted',)
column_default_sort = ('key', False)
form_widget_args = {
'is_encrypted': {'disabled': True},
'val': {
'rows': 20,
}
}
form_args = {
'key': {
'validators': {
validators.DataRequired(),
},
},
}
column_sortable_list = (
'key',
'val',
'is_encrypted',
)
column_formatters = {
'val': hidden_field_formatter,
}
# Default flask-admin export functionality doesn't handle serialized json
@action('varexport', 'Export', None)
@provide_session
def action_varexport(self, ids, session=None):
V = models.Variable
qry = session.query(V).filter(V.id.in_(ids)).all()
var_dict = {}
d = json.JSONDecoder()
for var in qry:
val = None
try:
val = d.decode(var.val)
except Exception:
val = var.val
var_dict[var.key] = val
response = make_response(json.dumps(var_dict, sort_keys=True, indent=4))
response.headers["Content-Disposition"] = "attachment; filename=variables.json"
return response
def on_form_prefill(self, form, id):
if wwwutils.should_hide_value_for_key(form.key.data):
form.val.data = '*' * 8
class XComView(wwwutils.SuperUserMixin, AirflowModelView):
verbose_name = "XCom"
verbose_name_plural = "XComs"
form_columns = (
'key',
'value',
'execution_date',
'task_id',
'dag_id',
)
form_extra_fields = {
'value': StringField('Value'),
}
form_args = {
'execution_date': {
'filters': [
parse_datetime_f,
]
}
}
column_filters = ('key', 'timestamp', 'execution_date', 'task_id', 'dag_id')
column_searchable_list = ('key', 'timestamp', 'execution_date', 'task_id', 'dag_id')
filter_converter = wwwutils.UtcFilterConverter()
form_overrides = dict(execution_date=DateTimeField)
class JobModelView(ModelViewOnly):
verbose_name_plural = "jobs"
verbose_name = "job"
column_display_actions = False
column_default_sort = ('start_date', True)
column_filters = (
'job_type', 'dag_id', 'state',
'unixname', 'hostname', 'start_date', 'end_date', 'latest_heartbeat')
column_formatters = dict(
start_date=datetime_f,
end_date=datetime_f,
hostname=nobr_f,
state=state_f,
latest_heartbeat=datetime_f)
filter_converter = wwwutils.UtcFilterConverter()
class DagRunModelView(ModelViewOnly):
verbose_name_plural = "DAG Runs"
can_edit = True
can_create = True
column_editable_list = ('state',)
verbose_name = "dag run"
column_default_sort = ('execution_date', True)
form_choices = {
'state': [
('success', 'success'),
('running', 'running'),
('failed', 'failed'),
],
}
form_args = dict(
dag_id=dict(validators=[validators.DataRequired()])
)
column_list = (
'state', 'dag_id', 'execution_date', 'run_id', 'external_trigger')
column_filters = column_list
filter_converter = wwwutils.UtcFilterConverter()
column_searchable_list = ('dag_id', 'state', 'run_id')
column_formatters = dict(
execution_date=datetime_f,
state=state_f,
start_date=datetime_f,
dag_id=dag_link,
run_id=dag_run_link
)
@action('new_delete', "Delete", "Are you sure you want to delete selected records?")
@provide_session
def action_new_delete(self, ids, session=None):
deleted = set(session.query(models.DagRun)
.filter(models.DagRun.id.in_(ids))
.all())
session.query(models.DagRun) \
.filter(models.DagRun.id.in_(ids)) \
.delete(synchronize_session='fetch')
session.commit()
dirty_ids = []
for row in deleted:
dirty_ids.append(row.dag_id)
models.DagStat.update(dirty_ids, dirty_only=False, session=session)
@action('set_running', "Set state to 'running'", None)
@provide_session
def action_set_running(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
dr.state = State.RUNNING
dr.start_date = timezone.utcnow()
models.DagStat.update(dirty_ids, session=session)
flash(
"{count} dag runs were set to running".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
@action('set_failed', "Set state to 'failed'",
"All running task instances would also be marked as failed, are you sure?")
@provide_session
def action_set_failed(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_failed(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
models.DagStat.update(dirty_ids, session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to failed".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
@action('set_success', "Set state to 'success'",
"All task instances would also be marked as success, are you sure?")
@provide_session
def action_set_success(self, ids, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(DR.id.in_(ids)).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_success(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
models.DagStat.update(dirty_ids, session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to success".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
# Called after editing DagRun model in the UI.
@provide_session
def after_model_change(self, form, dagrun, is_created, session=None):
altered_tis = []
if dagrun.state == State.SUCCESS:
altered_tis = set_dag_run_state_to_success(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
elif dagrun.state == State.FAILED:
altered_tis = set_dag_run_state_to_failed(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
elif dagrun.state == State.RUNNING:
altered_tis = set_dag_run_state_to_running(
dagbag.get_dag(dagrun.dag_id),
dagrun.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
models.DagStat.update([dagrun.dag_id], session=session)
flash(
"1 dag run and {altered_ti_count} task instances "
"were set to '{dagrun.state}'".format(**locals()))
class LogModelView(ModelViewOnly):
verbose_name_plural = "logs"
verbose_name = "log"
column_display_actions = False
column_default_sort = ('dttm', True)
column_filters = ('dag_id', 'task_id', 'execution_date', 'extra')
filter_converter = wwwutils.UtcFilterConverter()
column_formatters = dict(
dttm=datetime_f, execution_date=datetime_f, dag_id=dag_link)
class TaskInstanceModelView(ModelViewOnly):
verbose_name_plural = "task instances"
verbose_name = "task instance"
column_filters = (
'state', 'dag_id', 'task_id', 'execution_date', 'hostname',
'queue', 'pool', 'operator', 'start_date', 'end_date')
filter_converter = wwwutils.UtcFilterConverter()
named_filter_urls = True
column_formatters = dict(
log_url=log_url_formatter,
task_id=task_instance_link,
hostname=nobr_f,
state=state_f,
execution_date=datetime_f,
start_date=datetime_f,
end_date=datetime_f,
queued_dttm=datetime_f,
dag_id=dag_link,
run_id=dag_run_link,
duration=duration_f)
column_searchable_list = ('dag_id', 'task_id', 'state')
column_default_sort = ('job_id', True)
form_choices = {
'state': [
('success', 'success'),
('running', 'running'),
('failed', 'failed'),
],
}
column_list = (
'state', 'dag_id', 'task_id', 'execution_date', 'operator',
'start_date', 'end_date', 'duration', 'job_id', 'hostname',
'unixname', 'priority_weight', 'queue', 'queued_dttm', 'try_number',
'pool', 'log_url')
page_size = PAGE_SIZE
@action('set_running', "Set state to 'running'", None)
def action_set_running(self, ids):
self.set_task_instance_state(ids, State.RUNNING)
@action('set_failed', "Set state to 'failed'", None)
def action_set_failed(self, ids):
self.set_task_instance_state(ids, State.FAILED)
@action('set_success', "Set state to 'success'", None)
def action_set_success(self, ids):
self.set_task_instance_state(ids, State.SUCCESS)
@action('set_retry', "Set state to 'up_for_retry'", None)
def action_set_retry(self, ids):
self.set_task_instance_state(ids, State.UP_FOR_RETRY)
@provide_session
@action('clear',
lazy_gettext('Clear'),
lazy_gettext(
'Are you sure you want to clear the state of the selected task instance(s)'
' and set their dagruns to the running state?'))
def action_clear(self, ids, session=None):
try:
TI = models.TaskInstance
dag_to_task_details = {}
dag_to_tis = {}
# Collect dags upfront as dagbag.get_dag() will reset the session
for id_str in ids:
task_id, dag_id, execution_date = iterdecode(id_str)
dag = dagbag.get_dag(dag_id)
task_details = dag_to_task_details.setdefault(dag, [])
task_details.append((task_id, execution_date))
for dag, task_details in dag_to_task_details.items():
for task_id, execution_date in task_details:
execution_date = parse_execution_date(execution_date)
ti = session.query(TI).filter(TI.task_id == task_id,
TI.dag_id == dag.dag_id,
TI.execution_date == execution_date).one()
tis = dag_to_tis.setdefault(dag, [])
tis.append(ti)
for dag, tis in dag_to_tis.items():
models.clear_task_instances(tis, session, dag=dag)
session.commit()
flash("{0} task instances have been cleared".format(len(ids)))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to clear task instances', 'error')
@provide_session
def set_task_instance_state(self, ids, target_state, session=None):
try:
TI = models.TaskInstance
count = len(ids)
for id in ids:
task_id, dag_id, execution_date = iterdecode(id)
execution_date = parse_execution_date(execution_date)
ti = session.query(TI).filter(TI.task_id == task_id,
TI.dag_id == dag_id,
TI.execution_date == execution_date).one()
ti.state = target_state
session.commit()
flash(
"{count} task instances were set to '{target_state}'".format(**locals()))
except Exception as ex:
if not self.handle_view_exception(ex):
raise Exception("Ooops")
flash('Failed to set state', 'error')
def get_one(self, id):
"""
As a workaround for AIRFLOW-252, this method overrides Flask-Admin's ModelView.get_one().
TODO: this method should be removed once the below bug is fixed on Flask-Admin side.
https://github.com/flask-admin/flask-admin/issues/1226
"""
task_id, dag_id, execution_date = iterdecode(id)
execution_date = pendulum.parse(execution_date)
return self.session.query(self.model).get((task_id, dag_id, execution_date))
class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView):
create_template = 'airflow/conn_create.html'
edit_template = 'airflow/conn_edit.html'
list_template = 'airflow/conn_list.html'
form_columns = (
'conn_id',
'conn_type',
'host',
'schema',
'login',
'password',
'port',
'extra',
'extra__jdbc__drv_path',
'extra__jdbc__drv_clsname',
'extra__google_cloud_platform__project',
'extra__google_cloud_platform__key_path',
'extra__google_cloud_platform__keyfile_dict',
'extra__google_cloud_platform__scope',
)
verbose_name = "Connection"
verbose_name_plural = "Connections"
column_default_sort = ('conn_id', False)
column_list = ('conn_id', 'conn_type', 'host', 'port', 'is_encrypted', 'is_extra_encrypted',)
form_overrides = dict(_password=PasswordField, _extra=TextAreaField)
form_widget_args = {
'is_extra_encrypted': {'disabled': True},
'is_encrypted': {'disabled': True},
}
# Used to customized the form, the forms elements get rendered
# and results are stored in the extra field as json. All of these
# need to be prefixed with extra__ and then the conn_type ___ as in
# extra__{conn_type}__name. You can also hide form elements and rename
# others from the connection_form.js file
form_extra_fields = {
'extra__jdbc__drv_path': StringField('Driver Path'),
'extra__jdbc__drv_clsname': StringField('Driver Class'),
'extra__google_cloud_platform__project': StringField('Project Id'),
'extra__google_cloud_platform__key_path': StringField('Keyfile Path'),
'extra__google_cloud_platform__keyfile_dict': PasswordField('Keyfile JSON'),
'extra__google_cloud_platform__scope': StringField('Scopes (comma separated)'),
}
form_choices = {
'conn_type': models.Connection._types
}
def on_model_change(self, form, model, is_created):
formdata = form.data
if formdata['conn_type'] in ['jdbc', 'google_cloud_platform']:
extra = {
key: formdata[key]
for key in self.form_extra_fields.keys() if key in formdata}
model.extra = json.dumps(extra)
@classmethod
def alert_fernet_key(cls):
fk = None
try:
fk = conf.get('core', 'fernet_key')
except Exception:
pass
return fk is None
@classmethod
def is_secure(cls):
"""
Used to display a message in the Connection list view making it clear
that the passwords and `extra` field can't be encrypted.
"""
is_secure = False
try:
import cryptography # noqa F401
conf.get('core', 'fernet_key')
is_secure = True
except Exception:
pass
return is_secure
def on_form_prefill(self, form, id):
try:
d = json.loads(form.data.get('extra', '{}'))
except Exception:
d = {}
for field in list(self.form_extra_fields.keys()):
value = d.get(field, '')
if value:
field = getattr(form, field)
field.data = value
class UserModelView(wwwutils.SuperUserMixin, AirflowModelView):
verbose_name = "User"
verbose_name_plural = "Users"
column_default_sort = 'username'
class VersionView(wwwutils.SuperUserMixin, BaseView):
@expose('/')
def version(self):
# Look at the version from setup.py
try:
airflow_version = airflow.__version__
except Exception as e:
airflow_version = None
logging.error(e)
# Get the Git repo and git hash
git_version = None
try:
with open(os.path.join(*[settings.AIRFLOW_HOME, 'airflow', 'git_version'])) as f:
git_version = f.readline()
except Exception as e:
logging.error(e)
# Render information
title = "Version Info"
return self.render('airflow/version.html',
title=title,
airflow_version=airflow_version,
git_version=git_version)
class ConfigurationView(wwwutils.SuperUserMixin, BaseView):
@expose('/')
def conf(self):
raw = request.args.get('raw') == "true"
title = "Airflow Configuration"
subtitle = conf.AIRFLOW_CONFIG
if conf.getboolean("webserver", "expose_config"):
with open(conf.AIRFLOW_CONFIG, 'r') as f:
config = f.read()
table = [(section, key, value, source)
for section, parameters in conf.as_dict(True, True).items()
for key, (value, source) in parameters.items()]
else:
config = (
"# Your Airflow administrator chose not to expose the "
"configuration, most likely for security reasons.")
table = None
if raw:
return Response(
response=config,
status=200,
mimetype="application/text")
else:
code_html = Markup(highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/config.html',
pre_subtitle=settings.HEADER + " v" + airflow.__version__,
code_html=code_html, title=title, subtitle=subtitle,
table=table)
class DagModelView(wwwutils.SuperUserMixin, ModelView):
column_list = ('dag_id', 'owners')
column_editable_list = ('is_paused',)
form_excluded_columns = ('is_subdag', 'is_active')
column_searchable_list = ('dag_id',)
column_filters = (
'dag_id', 'owners', 'is_paused', 'is_active', 'is_subdag',
'last_scheduler_run', 'last_expired')
filter_converter = wwwutils.UtcFilterConverter()
form_widget_args = {
'last_scheduler_run': {'disabled': True},
'fileloc': {'disabled': True},
'is_paused': {'disabled': True},
'last_pickled': {'disabled': True},
'pickle_id': {'disabled': True},
'last_loaded': {'disabled': True},
'last_expired': {'disabled': True},
'pickle_size': {'disabled': True},
'scheduler_lock': {'disabled': True},
'owners': {'disabled': True},
}
column_formatters = dict(
dag_id=dag_link,
)
can_delete = False
can_create = False
page_size = PAGE_SIZE
list_template = 'airflow/list_dags.html'
named_filter_urls = True
def get_query(self):
"""
Default filters for model
"""
return super(DagModelView, self)\
.get_query()\
.filter(or_(models.DagModel.is_active, models.DagModel.is_paused))\
.filter(~models.DagModel.is_subdag)
def get_count_query(self):
"""
Default filters for model
"""
return super(DagModelView, self)\
.get_count_query()\
.filter(models.DagModel.is_active)\
.filter(~models.DagModel.is_subdag)
| 35.86045 | 102 | 0.57216 |
acf8cf80aebb39e642d5ba02b21b5ceff92f3969 | 23,605 | py | Python | tests/test_modeling_tf_xlnet.py | moscow25/transformers | a25c9fc8e14f3e8914116e6142af2a9589dc8e63 | [
"Apache-2.0"
] | null | null | null | tests/test_modeling_tf_xlnet.py | moscow25/transformers | a25c9fc8e14f3e8914116e6142af2a9589dc8e63 | [
"Apache-2.0"
] | null | null | null | tests/test_modeling_tf_xlnet.py | moscow25/transformers | a25c9fc8e14f3e8914116e6142af2a9589dc8e63 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import unittest
from transformers import XLNetConfig, is_tf_available
from transformers.testing_utils import require_tf, slow
from .test_configuration_common import ConfigTester
from .test_modeling_tf_common import TFModelTesterMixin, ids_tensor
if is_tf_available():
import tensorflow as tf
from transformers.modeling_tf_xlnet import (
TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST,
TFXLNetForMultipleChoice,
TFXLNetForQuestionAnsweringSimple,
TFXLNetForSequenceClassification,
TFXLNetForTokenClassification,
TFXLNetLMHeadModel,
TFXLNetModel,
)
class TFXLNetModelTester:
def __init__(
self, parent,
):
self.parent = parent
self.batch_size = 13
self.seq_length = 7
self.mem_len = 10
# self.key_len = seq_length + mem_len
self.clamp_len = -1
self.reuse_len = 15
self.is_training = True
self.use_labels = True
self.vocab_size = 99
self.cutoffs = [10, 50, 80]
self.hidden_size = 32
self.num_attention_heads = 4
self.d_inner = 128
self.num_hidden_layers = 5
self.type_sequence_label_size = 2
self.untie_r = True
self.bi_data = False
self.same_length = False
self.initializer_range = 0.05
self.seed = 1
self.type_vocab_size = 2
self.bos_token_id = 1
self.eos_token_id = 2
self.pad_token_id = 5
self.num_choices = 4
def prepare_config_and_inputs(self):
input_ids_1 = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_ids_2 = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
segment_ids = ids_tensor([self.batch_size, self.seq_length], self.type_vocab_size)
input_mask = ids_tensor([self.batch_size, self.seq_length], 2, dtype=tf.float32)
input_ids_q = ids_tensor([self.batch_size, self.seq_length + 1], self.vocab_size)
perm_mask = tf.zeros((self.batch_size, self.seq_length + 1, self.seq_length), dtype=tf.float32)
perm_mask_last = tf.ones((self.batch_size, self.seq_length + 1, 1), dtype=tf.float32)
perm_mask = tf.concat([perm_mask, perm_mask_last], axis=-1)
# perm_mask[:, :, -1] = 1.0 # Previous tokens don't see last token
target_mapping = tf.zeros((self.batch_size, 1, self.seq_length), dtype=tf.float32)
target_mapping_last = tf.ones((self.batch_size, 1, 1), dtype=tf.float32)
target_mapping = tf.concat([target_mapping, target_mapping_last], axis=-1)
# target_mapping[:, 0, -1] = 1.0 # predict last token
sequence_labels = None
lm_labels = None
is_impossible_labels = None
if self.use_labels:
lm_labels = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
sequence_labels = ids_tensor([self.batch_size], self.type_sequence_label_size)
is_impossible_labels = ids_tensor([self.batch_size], 2, dtype=tf.float32)
config = XLNetConfig(
vocab_size=self.vocab_size,
d_model=self.hidden_size,
n_head=self.num_attention_heads,
d_inner=self.d_inner,
n_layer=self.num_hidden_layers,
untie_r=self.untie_r,
mem_len=self.mem_len,
clamp_len=self.clamp_len,
same_length=self.same_length,
reuse_len=self.reuse_len,
bi_data=self.bi_data,
initializer_range=self.initializer_range,
num_labels=self.type_sequence_label_size,
bos_token_id=self.bos_token_id,
pad_token_id=self.pad_token_id,
eos_token_id=self.eos_token_id,
return_dict=True,
)
return (
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
)
def set_seed(self):
random.seed(self.seed)
tf.random.set_seed(self.seed)
def create_and_check_xlnet_base_model(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
model = TFXLNetModel(config)
inputs = {"input_ids": input_ids_1, "input_mask": input_mask, "token_type_ids": segment_ids}
result = model(inputs)
inputs = [input_ids_1, input_mask]
result = model(inputs)
config.mem_len = 0
model = TFXLNetModel(config)
no_mems_outputs = model(inputs)
self.parent.assertEqual(len(no_mems_outputs), 1)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertListEqual(
[mem.shape for mem in result.mems],
[(self.seq_length, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
def create_and_check_xlnet_lm_head(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
model = TFXLNetLMHeadModel(config)
inputs_1 = {"input_ids": input_ids_1, "token_type_ids": segment_ids}
all_logits_1, mems_1 = model(inputs_1).to_tuple()
inputs_2 = {"input_ids": input_ids_2, "mems": mems_1, "token_type_ids": segment_ids}
all_logits_2, mems_2 = model(inputs_2).to_tuple()
inputs_3 = {"input_ids": input_ids_q, "perm_mask": perm_mask, "target_mapping": target_mapping}
logits, _ = model(inputs_3).to_tuple()
self.parent.assertEqual(all_logits_1.shape, (self.batch_size, self.seq_length, self.vocab_size))
self.parent.assertListEqual(
[mem.shape for mem in mems_1],
[(self.seq_length, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
self.parent.assertEqual(all_logits_2.shape, (self.batch_size, self.seq_length, self.vocab_size))
self.parent.assertListEqual(
[mem.shape for mem in mems_2],
[(self.mem_len, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
def create_and_check_xlnet_qa(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
model = TFXLNetForQuestionAnsweringSimple(config)
inputs = {"input_ids": input_ids_1, "attention_mask": input_mask, "token_type_ids": segment_ids}
result = model(inputs)
self.parent.assertEqual(result.start_logits.shape, (self.batch_size, self.seq_length))
self.parent.assertEqual(result.end_logits.shape, (self.batch_size, self.seq_length))
self.parent.assertListEqual(
[mem.shape for mem in result.mems],
[(self.seq_length, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
def create_and_check_xlnet_sequence_classif(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
model = TFXLNetForSequenceClassification(config)
result = model(input_ids_1)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.type_sequence_label_size))
self.parent.assertListEqual(
[mem.shape for mem in result.mems],
[(self.seq_length, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
def create_and_check_xlnet_for_token_classification(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
config.num_labels = input_ids_1.shape[1]
model = TFXLNetForTokenClassification(config)
inputs = {
"input_ids": input_ids_1,
"attention_mask": input_mask,
# 'token_type_ids': token_type_ids
}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.seq_length, config.num_labels))
self.parent.assertListEqual(
[mem.shape for mem in result.mems],
[(self.seq_length, self.batch_size, self.hidden_size)] * self.num_hidden_layers,
)
def create_and_check_xlnet_for_multiple_choice(
self,
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
):
config.num_choices = self.num_choices
model = TFXLNetForMultipleChoice(config=config)
multiple_choice_inputs_ids = tf.tile(tf.expand_dims(input_ids_1, 1), (1, self.num_choices, 1))
multiple_choice_input_mask = tf.tile(tf.expand_dims(input_mask, 1), (1, self.num_choices, 1))
multiple_choice_token_type_ids = tf.tile(tf.expand_dims(segment_ids, 1), (1, self.num_choices, 1))
inputs = {
"input_ids": multiple_choice_inputs_ids,
"attention_mask": multiple_choice_input_mask,
"token_type_ids": multiple_choice_token_type_ids,
}
result = model(inputs)
self.parent.assertEqual(result.logits.shape, (self.batch_size, self.num_choices))
self.parent.assertListEqual(
[mem.shape for mem in result.mems],
[(self.seq_length, self.batch_size * self.num_choices, self.hidden_size)] * self.num_hidden_layers,
)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
(
config,
input_ids_1,
input_ids_2,
input_ids_q,
perm_mask,
input_mask,
target_mapping,
segment_ids,
lm_labels,
sequence_labels,
is_impossible_labels,
) = config_and_inputs
inputs_dict = {"input_ids": input_ids_1}
return config, inputs_dict
@require_tf
class TFXLNetModelTest(TFModelTesterMixin, unittest.TestCase):
all_model_classes = (
(
TFXLNetModel,
TFXLNetLMHeadModel,
TFXLNetForSequenceClassification,
TFXLNetForTokenClassification,
TFXLNetForQuestionAnsweringSimple,
TFXLNetForMultipleChoice,
)
if is_tf_available()
else ()
)
all_generative_model_classes = (
(TFXLNetLMHeadModel,) if is_tf_available() else ()
) # TODO (PVP): Check other models whether language generation is also applicable
test_pruning = False
def setUp(self):
self.model_tester = TFXLNetModelTester(self)
self.config_tester = ConfigTester(self, config_class=XLNetConfig, d_inner=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_xlnet_base_model(self):
self.model_tester.set_seed()
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_base_model(*config_and_inputs)
def test_xlnet_lm_head(self):
self.model_tester.set_seed()
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_lm_head(*config_and_inputs)
def test_xlnet_sequence_classif(self):
self.model_tester.set_seed()
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_sequence_classif(*config_and_inputs)
def test_xlnet_token_classification(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_for_token_classification(*config_and_inputs)
def test_xlnet_qa(self):
self.model_tester.set_seed()
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_qa(*config_and_inputs)
def test_xlnet_for_multiple_choice(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_xlnet_for_multiple_choice(*config_and_inputs)
@slow
def test_model_from_pretrained(self):
for model_name in TF_XLNET_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = TFXLNetModel.from_pretrained(model_name)
self.assertIsNotNone(model)
@require_tf
class TFXLNetModelLanguageGenerationTest(unittest.TestCase):
@slow
def test_lm_generate_xlnet_base_cased(self):
model = TFXLNetLMHeadModel.from_pretrained("xlnet-base-cased")
input_ids = tf.convert_to_tensor(
[
[
67,
2840,
19,
18,
1484,
20,
965,
29077,
8719,
1273,
21,
45,
273,
17,
10,
15048,
28,
27511,
21,
4185,
11,
41,
2444,
9,
32,
1025,
20,
8719,
26,
23,
673,
966,
19,
29077,
20643,
27511,
20822,
20643,
19,
17,
6616,
17511,
18,
8978,
20,
18,
777,
9,
19233,
1527,
17669,
19,
24,
673,
17,
28756,
150,
12943,
4354,
153,
27,
442,
37,
45,
668,
21,
24,
256,
20,
416,
22,
2771,
4901,
9,
12943,
4354,
153,
51,
24,
3004,
21,
28142,
23,
65,
20,
18,
416,
34,
24,
2958,
22947,
9,
1177,
45,
668,
3097,
13768,
23,
103,
28,
441,
148,
48,
20522,
19,
12943,
4354,
153,
12860,
34,
18,
326,
27,
17492,
684,
21,
6709,
9,
8585,
123,
266,
19,
12943,
4354,
153,
6872,
24,
3004,
20,
18,
9225,
2198,
19,
12717,
103,
22,
401,
24,
6348,
9,
12943,
4354,
153,
1068,
2768,
2286,
19,
33,
104,
19,
176,
24,
9313,
19,
20086,
28,
45,
10292,
9,
4,
3,
]
],
dtype=tf.int32,
)
# In 1991, the remains of Russian Tsar Nicholas II and his family
# (except for Alexei and Maria) are discovered.
# The voice of Nicholas's young son, Tsarevich Alexei Nikolaevich, narrates the
# remainder of the story. 1883 Western Siberia,
# a young Grigori Rasputin is asked by his father and a group of men to perform magic.
# Rasputin has a vision and denounces one of the men as a horse thief. Although his
# father initially slaps him for making such an accusation, Rasputin watches as the
# man is chased outside and beaten. Twenty years later, Rasputin sees a vision of
# the Virgin Mary, prompting him to become a priest. Rasputin quickly becomes famous,
# with people, even a bishop, begging for his blessing. """
expected_output_ids = [
67,
2840,
19,
18,
1484,
20,
965,
29077,
8719,
1273,
21,
45,
273,
17,
10,
15048,
28,
27511,
21,
4185,
11,
41,
2444,
9,
32,
1025,
20,
8719,
26,
23,
673,
966,
19,
29077,
20643,
27511,
20822,
20643,
19,
17,
6616,
17511,
18,
8978,
20,
18,
777,
9,
19233,
1527,
17669,
19,
24,
673,
17,
28756,
150,
12943,
4354,
153,
27,
442,
37,
45,
668,
21,
24,
256,
20,
416,
22,
2771,
4901,
9,
12943,
4354,
153,
51,
24,
3004,
21,
28142,
23,
65,
20,
18,
416,
34,
24,
2958,
22947,
9,
1177,
45,
668,
3097,
13768,
23,
103,
28,
441,
148,
48,
20522,
19,
12943,
4354,
153,
12860,
34,
18,
326,
27,
17492,
684,
21,
6709,
9,
8585,
123,
266,
19,
12943,
4354,
153,
6872,
24,
3004,
20,
18,
9225,
2198,
19,
12717,
103,
22,
401,
24,
6348,
9,
12943,
4354,
153,
1068,
2768,
2286,
19,
33,
104,
19,
176,
24,
9313,
19,
20086,
28,
45,
10292,
9,
4,
3,
19,
12943,
4354,
153,
27,
442,
22,
2771,
4901,
9,
69,
27,
50,
551,
22,
2771,
4901,
19,
21,
45,
668,
21,
18,
416,
41,
1499,
22,
755,
18,
14285,
9,
12943,
4354,
153,
27,
1499,
22,
642,
22,
]
# In 1991, the remains of Russian Tsar Nicholas II and his family (except for Alexei and Maria)
# are discovered. The voice of Nicholas's young son, Tsarevich Alexei Nikolaevich,
# narrates the remainder of the story. 1883 Western Siberia, a young Grigori Rasputin
# is asked by his father and a group of men to perform magic. Rasputin has a vision and
# denounces one of the men as a horse thief. Although his father initially slaps
# him for making such an accusation, Rasputin watches as the man is chased outside and beaten.
# Twenty years later, Rasputin sees a vision of the Virgin Mary, prompting him to become a priest.
# Rasputin quickly becomes famous, with people, even a bishop, begging for his blessing.
# <sep><cls>, Rasputin is asked to perform magic.
# He is not able to perform magic, and his father and
# the men are forced to leave the monastery. Rasputin is forced to return to
output_ids = model.generate(input_ids, max_length=200, do_sample=False)
self.assertListEqual(output_ids[0].numpy().tolist(), expected_output_ids)
| 29.691824 | 117 | 0.49642 |
acf8cfd125ee87e3784708b9b2873a7983ba89b9 | 986 | py | Python | tests/test_codecs.py | petnet-independence-project/hbmq | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | null | null | null | tests/test_codecs.py | petnet-independence-project/hbmq | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | 2 | 2020-12-31T06:37:20.000Z | 2021-01-07T21:36:02.000Z | tests/test_codecs.py | petnet-independence-project/hbmqtt | 72947007235040720cc2d1ecf51fa61ffa63c347 | [
"MIT"
] | null | null | null | # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
import unittest
import asyncio
from hbmqtt.codecs import (
bytes_to_hex_str,
bytes_to_int,
decode_string,
encode_string,
)
class TestCodecs(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
def test_bytes_to_hex_str(self):
ret = bytes_to_hex_str(b"\x7f")
self.assertEqual(ret, "0x7f")
def test_bytes_to_int(self):
ret = bytes_to_int(b"\x7f")
self.assertEqual(ret, 127)
ret = bytes_to_int(b"\xff\xff")
self.assertEqual(ret, 65535)
def test_decode_string(self):
stream = asyncio.StreamReader(loop=self.loop)
stream.feed_data(b"\x00\x02AA")
ret = self.loop.run_until_complete(decode_string(stream))
self.assertEqual(ret, "AA")
def test_encode_string(self):
encoded = encode_string("AA")
self.assertEqual(b"\x00\x02AA", encoded)
| 25.947368 | 65 | 0.666329 |
acf8d03908880ef13280581da3d26be97883a1b6 | 7,859 | py | Python | seismic/utils.py | slimgroup/Devito-Examples | 449e1286a18ebc4172069372ba2bf3cd2ec99a2f | [
"MIT"
] | 7 | 2020-08-19T18:23:08.000Z | 2022-02-18T19:19:24.000Z | seismic/utils.py | slimgroup/Devito-Examples | 449e1286a18ebc4172069372ba2bf3cd2ec99a2f | [
"MIT"
] | null | null | null | seismic/utils.py | slimgroup/Devito-Examples | 449e1286a18ebc4172069372ba2bf3cd2ec99a2f | [
"MIT"
] | 3 | 2020-12-01T22:17:09.000Z | 2021-05-21T11:29:07.000Z | import numpy as np
from argparse import ArgumentParser, Action
from devito import error, configuration, warning
from devito.tools import Pickable
from .source import *
__all__ = ['AcquisitionGeometry', 'setup_geometry', 'seismic_args']
def setup_geometry(model, tn, f0=0.010):
# Source and receiver geometries
src_coordinates = np.empty((1, model.dim))
src_coordinates[0, :] = np.array(model.domain_size) * .5
if model.dim > 1:
src_coordinates[0, -1] = model.origin[-1] + model.spacing[-1]
rec_coordinates = setup_rec_coords(model)
geometry = AcquisitionGeometry(model, rec_coordinates, src_coordinates,
t0=0.0, tn=tn, src_type='Ricker', f0=f0)
return geometry
def setup_rec_coords(model):
nrecx = model.shape[0]
recx = np.linspace(model.origin[0], model.domain_size[0], nrecx)
if model.dim == 1:
return recx.reshape((nrecx, 1))
elif model.dim == 2:
rec_coordinates = np.empty((nrecx, model.dim))
rec_coordinates[:, 0] = recx
rec_coordinates[:, -1] = model.origin[-1] + 2 * model.spacing[-1]
return rec_coordinates
else:
nrecy = model.shape[1]
recy = np.linspace(model.origin[1], model.domain_size[1], nrecy)
rec_coordinates = np.empty((nrecx*nrecy, model.dim))
rec_coordinates[:, 0] = np.array([recx[i] for i in range(nrecx)
for j in range(nrecy)])
rec_coordinates[:, 1] = np.array([recy[j] for i in range(nrecx)
for j in range(nrecy)])
rec_coordinates[:, -1] = model.origin[-1] + 2 * model.spacing[-1]
return rec_coordinates
class AcquisitionGeometry(Pickable):
"""
Encapsulate the geometry of an acquisition:
- receiver positions and number
- source positions and number
In practice this would only point to a segy file with the
necessary information
"""
def __init__(self, model, rec_positions, src_positions, t0, tn, **kwargs):
"""
In practice would be __init__(segyfile) and all below parameters
would come from a segy_read (at property call rather than at init)
"""
src_positions = np.reshape(src_positions, (-1, model.dim))
rec_positions = np.reshape(rec_positions, (-1, model.dim))
self.rec_positions = rec_positions
self._nrec = rec_positions.shape[0]
self.src_positions = src_positions
self._nsrc = src_positions.shape[0]
self._src_type = kwargs.get('src_type')
assert self.src_type in sources
self._f0 = kwargs.get('f0')
self._a = kwargs.get('a', None)
self._t0w = kwargs.get('t0w', None)
if self._src_type is not None and self._f0 is None:
error("Peak frequency must be provided in KH" +
" for source of type %s" % self._src_type)
self._model = model
self._dt = model.critical_dt
self._t0 = t0
self._tn = tn
def resample(self, dt):
self._dt = dt
return self
@property
def time_axis(self):
return TimeAxis(start=self.t0, stop=self.tn, step=self.dt)
@property
def model(self):
return self._model
@model.setter
def model(self, model):
self._model = model
@property
def src_type(self):
return self._src_type
@property
def grid(self):
return self.model.grid
@property
def f0(self):
return self._f0
@property
def tn(self):
return self._tn
@property
def t0(self):
return self._t0
@property
def dt(self):
return self._dt
@property
def nt(self):
return self.time_axis.num
@property
def nrec(self):
return self._nrec
@property
def nsrc(self):
return self._nsrc
@property
def dtype(self):
return self.grid.dtype
@property
def rec(self):
return self.new_rec()
def new_rec(self, name='rec'):
return Receiver(name=name, grid=self.grid,
time_range=self.time_axis, npoint=self.nrec,
coordinates=self.rec_positions)
@property
def adj_src(self):
if self.src_type is None:
warning("No surce type defined, returning uninitiallized (zero) shot record")
return self.rec
adj_src = sources[self.src_type](name='rec', grid=self.grid, f0=self.f0,
time_range=self.time_axis, npoint=self.nrec,
coordinates=self.rec_positions,
t0=self._t0w, a=self._a)
# Revert time axis to have a proper shot record and not compute on zeros
for i in range(self.nrec):
adj_src.data[:, i] = adj_src.wavelet[::-1]
return adj_src
@property
def src(self):
if self.src_type is None:
warning("No surce type defined, returning uninistiallized (zero) source")
return PointSource(name='src', grid=self.grid,
time_range=self.time_axis, npoint=self.nsrc,
coordinates=self.src_positions)
else:
return sources[self.src_type](name='src', grid=self.grid, f0=self.f0,
time_range=self.time_axis, npoint=self.nsrc,
coordinates=self.src_positions,
t0=self._t0w, a=self._a)
_pickle_args = ['model', 'rec_positions', 'src_positions', 't0', 'tn']
_pickle_kwargs = ['f0', 'src_type']
sources = {'Wavelet': WaveletSource, 'Ricker': RickerSource, 'Gabor': GaborSource}
def seismic_args(description):
"""
Command line options for the seismic examples
"""
class _dtype_store(Action):
def __call__(self, parser, args, values, option_string=None):
values = {'float32': np.float32, 'float64': np.float64}[values]
setattr(args, self.dest, values)
parser = ArgumentParser(description=description)
parser.add_argument("-nd", dest="ndim", default=3, type=int,
help="Number of dimensions")
parser.add_argument("-d", "--shape", default=(51, 51, 51), type=int, nargs="+",
help="Number of grid points along each axis")
parser.add_argument('-f', '--full', default=False, action='store_true',
help="Execute all operators and store forward wavefield")
parser.add_argument("-so", "--space_order", default=4,
type=int, help="Space order of the simulation")
parser.add_argument("--nbl", default=40,
type=int, help="Number of boundary layers around the domain")
parser.add_argument("--constant", default=False, action='store_true',
help="Constant velocity model, default is a two layer model")
parser.add_argument("--checkpointing", default=False, action='store_true',
help="Constant velocity model, default is a two layer model")
parser.add_argument("-opt", default="advanced",
choices=configuration._accepted['opt'],
help="Performance optimization level")
parser.add_argument('-a', '--autotune', default='off',
choices=(configuration._accepted['autotuning']),
help="Operator auto-tuning mode")
parser.add_argument("-tn", "--tn", default=0,
type=float, help="Simulation time in millisecond")
parser.add_argument("-dtype", action=_dtype_store, dest="dtype", default=np.float32,
choices=['float32', 'float64'])
return parser
| 35.722727 | 89 | 0.589642 |
acf8d10cd4c15176345e0a743ea4a26a111100e2 | 18,573 | py | Python | python3-virtualenv/Lib/python3.6/site-packages/alembic/autogenerate/api.py | LindaNayeli104/mlh-orientation-hackathon-project | d86b58f76721a9d5f3374399bfc6d3b1445d16ca | [
"MIT"
] | null | null | null | python3-virtualenv/Lib/python3.6/site-packages/alembic/autogenerate/api.py | LindaNayeli104/mlh-orientation-hackathon-project | d86b58f76721a9d5f3374399bfc6d3b1445d16ca | [
"MIT"
] | null | null | null | python3-virtualenv/Lib/python3.6/site-packages/alembic/autogenerate/api.py | LindaNayeli104/mlh-orientation-hackathon-project | d86b58f76721a9d5f3374399bfc6d3b1445d16ca | [
"MIT"
] | 1 | 2021-06-20T19:28:37.000Z | 2021-06-20T19:28:37.000Z | """Provide the 'autogenerate' feature which can produce migration operations
automatically."""
import contextlib
from sqlalchemy import inspect
from . import compare
from . import render
from .. import util
from ..operations import ops
def compare_metadata(context, metadata):
"""Compare a database schema to that given in a
:class:`~sqlalchemy.schema.MetaData` instance.
The database connection is presented in the context
of a :class:`.MigrationContext` object, which
provides database connectivity as well as optional
comparison functions to use for datatypes and
server defaults - see the "autogenerate" arguments
at :meth:`.EnvironmentContext.configure`
for details on these.
The return format is a list of "diff" directives,
each representing individual differences::
from alembic.migration import MigrationContext
from alembic.autogenerate import compare_metadata
from sqlalchemy.schema import SchemaItem
from sqlalchemy.types import TypeEngine
from sqlalchemy import (create_engine, MetaData, Column,
Integer, String, Table, text)
import pprint
engine = create_engine("sqlite://")
with engine.begin() as conn:
conn.execute(text('''
create table foo (
id integer not null primary key,
old_data varchar,
x integer
)'''))
conn.execute(text('''
create table bar (
data varchar
)'''))
metadata = MetaData()
Table('foo', metadata,
Column('id', Integer, primary_key=True),
Column('data', Integer),
Column('x', Integer, nullable=False)
)
Table('bat', metadata,
Column('info', String)
)
mc = MigrationContext.configure(engine.connect())
diff = compare_metadata(mc, metadata)
pprint.pprint(diff, indent=2, width=20)
Output::
[ ( 'add_table',
Table('bat', MetaData(bind=None),
Column('info', String(), table=<bat>), schema=None)),
( 'remove_table',
Table(u'bar', MetaData(bind=None),
Column(u'data', VARCHAR(), table=<bar>), schema=None)),
( 'add_column',
None,
'foo',
Column('data', Integer(), table=<foo>)),
( 'remove_column',
None,
'foo',
Column(u'old_data', VARCHAR(), table=None)),
[ ( 'modify_nullable',
None,
'foo',
u'x',
{ 'existing_server_default': None,
'existing_type': INTEGER()},
True,
False)]]
:param context: a :class:`.MigrationContext`
instance.
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
instance.
.. seealso::
:func:`.produce_migrations` - produces a :class:`.MigrationScript`
structure based on metadata comparison.
"""
migration_script = produce_migrations(context, metadata)
return migration_script.upgrade_ops.as_diffs()
def produce_migrations(context, metadata):
"""Produce a :class:`.MigrationScript` structure based on schema
comparison.
This function does essentially what :func:`.compare_metadata` does,
but then runs the resulting list of diffs to produce the full
:class:`.MigrationScript` object. For an example of what this looks like,
see the example in :ref:`customizing_revision`.
.. seealso::
:func:`.compare_metadata` - returns more fundamental "diff"
data from comparing a schema.
"""
autogen_context = AutogenContext(context, metadata=metadata)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
)
compare._populate_migration_script(autogen_context, migration_script)
return migration_script
def render_python_code(
up_or_down_op,
sqlalchemy_module_prefix="sa.",
alembic_module_prefix="op.",
render_as_batch=False,
imports=(),
render_item=None,
migration_context=None,
):
"""Render Python code given an :class:`.UpgradeOps` or
:class:`.DowngradeOps` object.
This is a convenience function that can be used to test the
autogenerate output of a user-defined :class:`.MigrationScript` structure.
"""
opts = {
"sqlalchemy_module_prefix": sqlalchemy_module_prefix,
"alembic_module_prefix": alembic_module_prefix,
"render_item": render_item,
"render_as_batch": render_as_batch,
}
if migration_context is None:
from ..runtime.migration import MigrationContext
from sqlalchemy.engine.default import DefaultDialect
migration_context = MigrationContext.configure(
dialect=DefaultDialect()
)
autogen_context = AutogenContext(migration_context, opts=opts)
autogen_context.imports = set(imports)
return render._indent(
render._render_cmd_body(up_or_down_op, autogen_context)
)
def _render_migration_diffs(context, template_args):
"""legacy, used by test_autogen_composition at the moment"""
autogen_context = AutogenContext(context)
upgrade_ops = ops.UpgradeOps([])
compare._produce_net_changes(autogen_context, upgrade_ops)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=upgrade_ops,
downgrade_ops=upgrade_ops.reverse(),
)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
class AutogenContext(object):
"""Maintains configuration and state that's specific to an
autogenerate operation."""
metadata = None
"""The :class:`~sqlalchemy.schema.MetaData` object
representing the destination.
This object is the one that is passed within ``env.py``
to the :paramref:`.EnvironmentContext.configure.target_metadata`
parameter. It represents the structure of :class:`.Table` and other
objects as stated in the current database model, and represents the
destination structure for the database being examined.
While the :class:`~sqlalchemy.schema.MetaData` object is primarily
known as a collection of :class:`~sqlalchemy.schema.Table` objects,
it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary
that may be used by end-user schemes to store additional schema-level
objects that are to be compared in custom autogeneration schemes.
"""
connection = None
"""The :class:`~sqlalchemy.engine.base.Connection` object currently
connected to the database backend being compared.
This is obtained from the :attr:`.MigrationContext.bind` and is
ultimately set up in the ``env.py`` script.
"""
dialect = None
"""The :class:`~sqlalchemy.engine.Dialect` object currently in use.
This is normally obtained from the
:attr:`~sqlalchemy.engine.base.Connection.dialect` attribute.
"""
imports = None
"""A ``set()`` which contains string Python import directives.
The directives are to be rendered into the ``${imports}`` section
of a script template. The set is normally empty and can be modified
within hooks such as the
:paramref:`.EnvironmentContext.configure.render_item` hook.
.. seealso::
:ref:`autogen_render_types`
"""
migration_context = None
"""The :class:`.MigrationContext` established by the ``env.py`` script."""
def __init__(
self, migration_context, metadata=None, opts=None, autogenerate=True
):
if (
autogenerate
and migration_context is not None
and migration_context.as_sql
):
raise util.CommandError(
"autogenerate can't use as_sql=True as it prevents querying "
"the database for schema information"
)
if opts is None:
opts = migration_context.opts
self.metadata = metadata = (
opts.get("target_metadata", None) if metadata is None else metadata
)
if (
autogenerate
and metadata is None
and migration_context is not None
and migration_context.script is not None
):
raise util.CommandError(
"Can't proceed with --autogenerate option; environment "
"script %s does not provide "
"a MetaData object or sequence of objects to the context."
% (migration_context.script.env_py_location)
)
include_object = opts.get("include_object", None)
include_name = opts.get("include_name", None)
object_filters = []
name_filters = []
if include_object:
object_filters.append(include_object)
if include_name:
name_filters.append(include_name)
self._object_filters = object_filters
self._name_filters = name_filters
self.migration_context = migration_context
if self.migration_context is not None:
self.connection = self.migration_context.bind
self.dialect = self.migration_context.dialect
self.imports = set()
self.opts = opts
self._has_batch = False
@util.memoized_property
def inspector(self):
return inspect(self.connection)
@contextlib.contextmanager
def _within_batch(self):
self._has_batch = True
yield
self._has_batch = False
def run_name_filters(self, name, type_, parent_names):
"""Run the context's name filters and return True if the targets
should be part of the autogenerate operation.
This method should be run for every kind of name encountered within the
reflection side of an autogenerate operation, giving the environment
the chance to filter what names should be reflected as database
objects. The filters here are produced directly via the
:paramref:`.EnvironmentContext.configure.include_name` parameter.
"""
if "schema_name" in parent_names:
if type_ == "table":
table_name = name
else:
table_name = parent_names.get("table_name", None)
if table_name:
schema_name = parent_names["schema_name"]
if schema_name:
parent_names["schema_qualified_table_name"] = "%s.%s" % (
schema_name,
table_name,
)
else:
parent_names["schema_qualified_table_name"] = table_name
for fn in self._name_filters:
if not fn(name, type_, parent_names):
return False
else:
return True
def run_object_filters(self, object_, name, type_, reflected, compare_to):
"""Run the context's object filters and return True if the targets
should be part of the autogenerate operation.
This method should be run for every kind of object encountered within
an autogenerate operation, giving the environment the chance
to filter what objects should be included in the comparison.
The filters here are produced directly via the
:paramref:`.EnvironmentContext.configure.include_object` parameter.
"""
for fn in self._object_filters:
if not fn(object_, name, type_, reflected, compare_to):
return False
else:
return True
run_filters = run_object_filters
@util.memoized_property
def sorted_tables(self):
"""Return an aggregate of the :attr:`.MetaData.sorted_tables` collection(s).
For a sequence of :class:`.MetaData` objects, this
concatenates the :attr:`.MetaData.sorted_tables` collection
for each individual :class:`.MetaData` in the order of the
sequence. It does **not** collate the sorted tables collections.
"""
result = []
for m in util.to_list(self.metadata):
result.extend(m.sorted_tables)
return result
@util.memoized_property
def table_key_to_table(self):
"""Return an aggregate of the :attr:`.MetaData.tables` dictionaries.
The :attr:`.MetaData.tables` collection is a dictionary of table key
to :class:`.Table`; this method aggregates the dictionary across
multiple :class:`.MetaData` objects into one dictionary.
Duplicate table keys are **not** supported; if two :class:`.MetaData`
objects contain the same table key, an exception is raised.
"""
result = {}
for m in util.to_list(self.metadata):
intersect = set(result).intersection(set(m.tables))
if intersect:
raise ValueError(
"Duplicate table keys across multiple "
"MetaData objects: %s"
% (", ".join('"%s"' % key for key in sorted(intersect)))
)
result.update(m.tables)
return result
class RevisionContext(object):
"""Maintains configuration and state that's specific to a revision
file generation operation."""
def __init__(
self,
config,
script_directory,
command_args,
process_revision_directives=None,
):
self.config = config
self.script_directory = script_directory
self.command_args = command_args
self.process_revision_directives = process_revision_directives
self.template_args = {
"config": config # Let templates use config for
# e.g. multiple databases
}
self.generated_revisions = [self._default_revision()]
def _to_script(self, migration_script):
template_args = {}
for k, v in self.template_args.items():
template_args.setdefault(k, v)
if getattr(migration_script, "_needs_render", False):
autogen_context = self._last_autogen_context
# clear out existing imports if we are doing multiple
# renders
autogen_context.imports = set()
if migration_script.imports:
autogen_context.imports.update(migration_script.imports)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
return self.script_directory.generate_revision(
migration_script.rev_id,
migration_script.message,
refresh=True,
head=migration_script.head,
splice=migration_script.splice,
branch_labels=migration_script.branch_label,
version_path=migration_script.version_path,
depends_on=migration_script.depends_on,
**template_args
)
def run_autogenerate(self, rev, migration_context):
self._run_environment(rev, migration_context, True)
def run_no_autogenerate(self, rev, migration_context):
self._run_environment(rev, migration_context, False)
def _run_environment(self, rev, migration_context, autogenerate):
if autogenerate:
if self.command_args["sql"]:
raise util.CommandError(
"Using --sql with --autogenerate does not make any sense"
)
if set(self.script_directory.get_revisions(rev)) != set(
self.script_directory.get_revisions("heads")
):
raise util.CommandError("Target database is not up to date.")
upgrade_token = migration_context.opts["upgrade_token"]
downgrade_token = migration_context.opts["downgrade_token"]
migration_script = self.generated_revisions[-1]
if not getattr(migration_script, "_needs_render", False):
migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
migration_script.downgrade_ops_list[
-1
].downgrade_token = downgrade_token
migration_script._needs_render = True
else:
migration_script._upgrade_ops.append(
ops.UpgradeOps([], upgrade_token=upgrade_token)
)
migration_script._downgrade_ops.append(
ops.DowngradeOps([], downgrade_token=downgrade_token)
)
self._last_autogen_context = autogen_context = AutogenContext(
migration_context, autogenerate=autogenerate
)
if autogenerate:
compare._populate_migration_script(
autogen_context, migration_script
)
if self.process_revision_directives:
self.process_revision_directives(
migration_context, rev, self.generated_revisions
)
hook = migration_context.opts["process_revision_directives"]
if hook:
hook(migration_context, rev, self.generated_revisions)
for migration_script in self.generated_revisions:
migration_script._needs_render = True
def _default_revision(self):
op = ops.MigrationScript(
rev_id=self.command_args["rev_id"] or util.rev_id(),
message=self.command_args["message"],
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
head=self.command_args["head"],
splice=self.command_args["splice"],
branch_label=self.command_args["branch_label"],
version_path=self.command_args["version_path"],
depends_on=self.command_args["depends_on"],
)
return op
def generate_scripts(self):
for generated_revision in self.generated_revisions:
yield self._to_script(generated_revision)
| 34.780899 | 85 | 0.614548 |
acf8d2c0a2aa13e652984f907f95c39ce968dcb0 | 10,992 | py | Python | harness/determined/cli/cli.py | gh-determined-ai/determined | 9a1ab33a3a356b69681b3351629fef4ab98ddb56 | [
"Apache-2.0"
] | null | null | null | harness/determined/cli/cli.py | gh-determined-ai/determined | 9a1ab33a3a356b69681b3351629fef4ab98ddb56 | [
"Apache-2.0"
] | null | null | null | harness/determined/cli/cli.py | gh-determined-ai/determined | 9a1ab33a3a356b69681b3351629fef4ab98ddb56 | [
"Apache-2.0"
] | null | null | null | import hashlib
import os
import socket
import ssl
import sys
from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, FileType, Namespace
from typing import List, Sequence, Union, cast
import argcomplete
import argcomplete.completers
import requests
import tabulate
from OpenSSL import SSL, crypto
from termcolor import colored
import determined
import determined.cli
from determined.cli import render
from determined.cli.agent import args_description as agent_args_description
from determined.cli.checkpoint import args_description as checkpoint_args_description
from determined.cli.experiment import args_description as experiment_args_description
from determined.cli.job import args_description as job_args_description
from determined.cli.master import args_description as master_args_description
from determined.cli.model import args_description as model_args_description
from determined.cli.notebook import args_description as notebook_args_description
from determined.cli.oauth import args_description as oauth_args_description
from determined.cli.project import args_description as project_args_description
from determined.cli.remote import args_description as remote_args_description
from determined.cli.resources import args_description as resources_args_description
from determined.cli.shell import args_description as shell_args_description
from determined.cli.sso import args_description as auth_args_description
from determined.cli.task import args_description as task_args_description
from determined.cli.template import args_description as template_args_description
from determined.cli.tensorboard import args_description as tensorboard_args_description
from determined.cli.top_arg_descriptions import deploy_cmd
from determined.cli.trial import args_description as trial_args_description
from determined.cli.user import args_description as user_args_description
from determined.cli.version import args_description as version_args_description
from determined.cli.version import check_version
from determined.cli.workspace import args_description as workspace_args_description
from determined.common import api, yaml
from determined.common.api import authentication, certs
from determined.common.check import check_not_none
from determined.common.declarative_argparse import Arg, Cmd, add_args, generate_aliases
from determined.common.util import (
chunks,
debug_mode,
get_default_master_address,
safe_load_yaml_with_exceptions,
)
from .errors import EnterpriseOnlyError
@authentication.required
def preview_search(args: Namespace) -> None:
experiment_config = safe_load_yaml_with_exceptions(args.config_file)
args.config_file.close()
if "searcher" not in experiment_config:
print("Experiment configuration must have 'searcher' section")
sys.exit(1)
r = api.post(args.master, "searcher/preview", json=experiment_config)
j = r.json()
def to_full_name(kind: str) -> str:
try:
# The unitless searcher case, for masters newer than 0.17.6.
length = int(kind)
return f"train for {length}"
except ValueError:
pass
if kind[-1] == "R":
return "train {} records".format(kind[:-1])
if kind[-1] == "B":
return "train {} batch(es)".format(kind[:-1])
if kind[-1] == "E":
return "train {} epoch(s)".format(kind[:-1])
if kind == "V":
return "validation"
raise ValueError("unexpected kind: {}".format(kind))
def render_sequence(sequence: List[str]) -> str:
if not sequence:
return "N/A"
instructions = []
current = sequence[0]
count = 0
for k in sequence:
if k != current:
instructions.append("{} x {}".format(count, to_full_name(current)))
current = k
count = 1
else:
count += 1
instructions.append("{} x {}".format(count, to_full_name(current)))
return ", ".join(instructions)
headers = ["Trials", "Breakdown"]
values = [
(count, render_sequence(operations.split())) for operations, count in j["results"].items()
]
print(colored("Using search configuration:", "green"))
yml = yaml.YAML()
yml.indent(mapping=2, sequence=4, offset=2)
yml.dump(experiment_config["searcher"], sys.stdout)
print()
print("This search will create a total of {} trial(s).".format(sum(j["results"].values())))
print(tabulate.tabulate(values, headers, tablefmt="presto"), flush=False)
# fmt: off
args_description = [
Arg("-u", "--user",
help="run as the given user", metavar="username",
default=None),
Arg("-m", "--master",
help="master address", metavar="address",
default=get_default_master_address()),
Arg("-v", "--version",
action="version", help="print CLI version and exit",
version="%(prog)s {}".format(determined.__version__)),
Cmd("preview-search", preview_search, "preview search", [
Arg("config_file", type=FileType("r"),
help="experiment config file (.yaml)")
]),
deploy_cmd,
] # type: List[object]
# fmt: on
all_args_description = (
args_description
+ experiment_args_description
+ checkpoint_args_description
+ master_args_description
+ model_args_description
+ agent_args_description
+ notebook_args_description
+ job_args_description
+ resources_args_description
+ project_args_description
+ shell_args_description
+ task_args_description
+ template_args_description
+ tensorboard_args_description
+ trial_args_description
+ remote_args_description
+ user_args_description
+ version_args_description
+ workspace_args_description
+ auth_args_description
+ oauth_args_description
)
def make_parser() -> ArgumentParser:
return ArgumentParser(
description="Determined command-line client", formatter_class=ArgumentDefaultsHelpFormatter
)
def main(
args: List[str] = sys.argv[1:],
) -> None:
if sys.platform == "win32":
# Magic incantation to make a Windows 10 cmd.exe process color-related ANSI escape codes.
os.system("")
# TODO: we lazily import "det deploy" but in the future we'd want to lazily import everything.
parser = make_parser()
full_cmd, aliases = generate_aliases(deploy_cmd.name)
is_deploy_cmd = len(args) > 0 and any(args[0] == alias for alias in [*aliases, full_cmd])
if is_deploy_cmd:
from determined.deploy.cli import args_description as deploy_args_description
add_args(parser, [deploy_args_description])
else:
add_args(parser, all_args_description)
try:
argcomplete.autocomplete(parser)
parsed_args = parser.parse_args(args)
def die(message: str, always_print_traceback: bool = False) -> None:
if always_print_traceback or debug_mode():
import traceback
traceback.print_exc(file=sys.stderr)
parser.exit(1, colored(message + "\n", "red"))
v = vars(parsed_args)
if not v.get("func"):
parser.print_usage()
parser.exit(2, "{}: no subcommand specified\n".format(parser.prog))
try:
# For `det deploy`, skip interaction with master.
if is_deploy_cmd:
parsed_args.func(parsed_args)
return
# Configure the CLI's Cert singleton.
certs.cli_cert = certs.default_load(parsed_args.master)
try:
check_version(parsed_args)
except requests.exceptions.SSLError:
# An SSLError usually means that we queried a master over HTTPS and got an untrusted
# cert, so allow the user to store and trust the current cert. (It could also mean
# that we tried to talk HTTPS on the HTTP port, but distinguishing that based on the
# exception is annoying, and we'll figure that out in the next step anyway.)
addr = api.parse_master_address(parsed_args.master)
check_not_none(addr.hostname)
check_not_none(addr.port)
try:
ctx = SSL.Context(SSL.TLSv1_2_METHOD)
conn = SSL.Connection(ctx, socket.socket())
conn.set_tlsext_host_name(cast(str, addr.hostname).encode())
conn.connect(cast(Sequence[Union[str, int]], (addr.hostname, addr.port)))
conn.do_handshake()
cert_pem_data = "".join(
crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode()
for cert in conn.get_peer_cert_chain()
)
except crypto.Error:
die(
"Tried to connect over HTTPS but couldn't get a certificate from the "
"master; consider using HTTP"
)
cert_hash = hashlib.sha256(ssl.PEM_cert_to_DER_cert(cert_pem_data)).hexdigest()
cert_fingerprint = ":".join(chunks(cert_hash, 2))
if not render.yes_or_no(
"The master sent an untrusted certificate chain with this SHA256 fingerprint:\n"
"{}\nDo you want to trust this certificate from now on?".format(
cert_fingerprint
)
):
die("Unable to verify master certificate")
certs.CertStore(certs.default_store()).set_cert(parsed_args.master, cert_pem_data)
# Reconfigure the CLI's Cert singleton, but preserve the certificate name.
old_cert_name = certs.cli_cert.name
certs.cli_cert = certs.Cert(cert_pem=cert_pem_data, name=old_cert_name)
check_version(parsed_args)
parsed_args.func(parsed_args)
except KeyboardInterrupt as e:
raise e
except (api.errors.BadRequestException, api.errors.BadResponseException) as e:
die("Failed to {}: {}".format(parsed_args.func.__name__, e))
except api.errors.CorruptTokenCacheException:
die(
"Failed to login: Attempted to read a corrupted token cache. "
"The store has been deleted; please try again."
)
except EnterpriseOnlyError as e:
die(f"Determined Enterprise Edition is required for this functionality: {e}")
except Exception:
die("Failed to {}".format(parsed_args.func.__name__), always_print_traceback=True)
except KeyboardInterrupt:
# die() may not be defined yet.
if debug_mode():
import traceback
traceback.print_exc(file=sys.stderr)
print(colored("Interrupting...\n", "red"), file=sys.stderr)
exit(3)
| 39.397849 | 100 | 0.664483 |
acf8d337afcd4c9ef72c80f4ae770a9bda4d0f52 | 20,715 | py | Python | grid_search_parameters.py | LinChen-65/utility-equity-covid-vac | 9194ee0e019b3160254401b84d369900a527da7e | [
"MIT"
] | 1 | 2021-12-30T08:02:33.000Z | 2021-12-30T08:02:33.000Z | grid_search_parameters.py | LinChen-65/utility-equity-covid-vac | 9194ee0e019b3160254401b84d369900a527da7e | [
"MIT"
] | null | null | null | grid_search_parameters.py | LinChen-65/utility-equity-covid-vac | 9194ee0e019b3160254401b84d369900a527da7e | [
"MIT"
] | null | null | null | # python grid_search_parameters.py MSA_NAME quick_test p_sick_at_t0
# python grid_search_parameters.py Atlanta False 5e-4
import setproctitle
setproctitle.setproctitle("covid-19-vac@chenlin")
import sys
import os
import datetime
import pandas as pd
import numpy as np
import pickle
import time
import constants
import helper
import disease_model #disease_model_original
from math import sqrt
from sklearn.metrics import mean_squared_error
############################################################
# Constants
root = '/data/chenlin/COVID-19/Data'
MSA_NAME_LIST = ['Atlanta','Chicago','Dallas','Houston', 'LosAngeles','Miami','NewYorkCity','Philadelphia','SanFrancisco','WashingtonDC']
MSA_NAME_FULL_DICT = {
'Atlanta':'Atlanta_Sandy_Springs_Roswell_GA',
'Chicago':'Chicago_Naperville_Elgin_IL_IN_WI',
'Dallas':'Dallas_Fort_Worth_Arlington_TX',
'Houston':'Houston_The_Woodlands_Sugar_Land_TX',
'LosAngeles':'Los_Angeles_Long_Beach_Anaheim_CA',
'Miami':'Miami_Fort_Lauderdale_West_Palm_Beach_FL',
'NewYorkCity':'New_York_Newark_Jersey_City_NY_NJ_PA',
'Philadelphia':'Philadelphia_Camden_Wilmington_PA_NJ_DE_MD',
'SanFrancisco':'San_Francisco_Oakland_Hayward_CA',
'WashingtonDC':'Washington_Arlington_Alexandria_DC_VA_MD_WV'
}
MIN_DATETIME = datetime.datetime(2020, 3, 1, 0)
MAX_DATETIME = datetime.datetime(2020, 5, 2, 23)
min_datetime=MIN_DATETIME
max_datetime=MAX_DATETIME
# beta_and_psi_plausible_range is output of make_param_plausibility_plot and should be updated whenever you recalibrate R0. These numbers allow R0_base to range from 0.1 - 2 and R0_PSI to range from 1-3.
BETA_AND_PSI_PLAUSIBLE_RANGE = {"min_home_beta": 0.0011982272027079982,
"max_home_beta": 0.023964544054159966,
"max_poi_psi": 4886.41659532027,
"min_poi_psi": 515.4024854336667}
print('Constants loaded.')
############################################################
# Main variable settings
MSA_NAME = sys.argv[1]; print('MSA_NAME: ',MSA_NAME)
MSA_NAME_FULL = MSA_NAME_FULL_DICT[MSA_NAME]
#MSA_NAME = 'SanFrancisco'
#MSA_NAME_FULL = 'San_Francisco_Oakland_Hayward_CA'
# how_to_select_best_grid_search_models = ['cases','cases_smooth','deaths','deaths_smooth]
how_to_select_best_grid_search_models = 'cases'
# Quick Test: prototyping
quick_test = sys.argv[2]
#quick_test = False
# Which part of parameters to test (ranging from 1 to 10)
p_sick_at_t0 = sys.argv[3]
# Parameters to experiment
if(quick_test == True):
NUM_SEEDS = 2
p_sick_at_t0_list = [1e-2, 5e-3]
home_beta_list = np.linspace(BETA_AND_PSI_PLAUSIBLE_RANGE['min_home_beta'],BETA_AND_PSI_PLAUSIBLE_RANGE['max_home_beta'], 2)
poi_psi_list = np.linspace(BETA_AND_PSI_PLAUSIBLE_RANGE['min_poi_psi'], BETA_AND_PSI_PLAUSIBLE_RANGE['max_poi_psi'], 2)
else:
NUM_SEEDS = 30
#p_sick_at_t0_list = [1e-2, 5e-3, 2e-3, 1e-3, 5e-4, 2e-4, 1e-4, 5e-5, 2e-5, 1e-5]
p_sick_at_t0_list = [p_sick_at_t0]
home_beta_list = np.linspace(BETA_AND_PSI_PLAUSIBLE_RANGE['min_home_beta'],BETA_AND_PSI_PLAUSIBLE_RANGE['max_home_beta'], 10)
poi_psi_list = np.linspace(BETA_AND_PSI_PLAUSIBLE_RANGE['min_poi_psi'], BETA_AND_PSI_PLAUSIBLE_RANGE['max_poi_psi'], 15)
STARTING_SEED = range(NUM_SEEDS)
############################################################
# functions
def match_msa_name_to_msas_in_acs_data(msa_name, acs_msas):
'''
Matches the MSA name from our annotated SafeGraph data to the
MSA name in the external datasource in MSA_COUNTY_MAPPING
'''
msa_pieces = msa_name.split('_')
query_states = set()
i = len(msa_pieces) - 1
while True:
piece = msa_pieces[i]
if len(piece) == 2 and piece.upper() == piece:
query_states.add(piece)
i -= 1
else:
break
query_cities = set(msa_pieces[:i+1])
for msa in acs_msas:
if ', ' in msa:
city_string, state_string = msa.split(', ')
states = set(state_string.split('-'))
if states == query_states:
cities = city_string.split('-')
overlap = set(cities).intersection(query_cities)
if len(overlap) > 0: # same states and at least one city matched
return msa
return None
def get_fips_codes_from_state_and_county_fp(state, county):
state = str(int(state))
county = str(int(county))
#state = str(state)
if len(state) == 1:
state = '0' + state
#county = str(county)
if len(county) == 1:
county = '00' + county
elif len(county) == 2:
county = '0' + county
#fips_codes.append(np.int64(state + county))
#return np.int64(state + county)
return int(state + county)
# Average history records across random seeds
def average_across_random_seeds(policy, history_C2, history_D2, num_cbgs, cbg_idxs, print_results=False, draw_results=True):
num_days = len(history_C2)
# Average history records across random seeds
avg_history_C2 = np.zeros((num_days,num_cbgs))
avg_history_D2 = np.zeros((num_days,num_cbgs))
for i in range(num_days):
avg_history_C2[i] = np.mean(history_C2[i],axis=0)
avg_history_D2[i] = np.mean(history_D2[i],axis=0)
# Extract lines corresponding to CBGs in the metro area/county
cases_msa = np.zeros(num_days)
deaths_msa = np.zeros(num_days)
for i in range(num_days):
for j in cbg_idxs:
cases_msa[i] += avg_history_C2[i][j]
deaths_msa[i] += avg_history_D2[i][j]
if(print_results==True):
print('Cases: ',cases_msa)
print('Deaths: ',deaths_msa)
return avg_history_C2, avg_history_D2,cases_msa,deaths_msa
def apply_smoothing(x, agg_func=np.mean, before=3, after=3):
new_x = []
for i, x_point in enumerate(x):
before_idx = max(0, i-before)
after_idx = min(len(x), i+after+1)
new_x.append(agg_func(x[before_idx:after_idx]))
return np.array(new_x)
############################################################
# Load Data
# Load POI-CBG visiting matrices
f = open(os.path.join(root, MSA_NAME, '%s_2020-03-01_to_2020-05-02.pkl'%MSA_NAME_FULL), 'rb')
poi_cbg_visits_list = pickle.load(f)
f.close()
# Load precomputed parameters to adjust(clip) POI dwell times
#d = pd.read_csv(os.path.join(root,'data_after_process(gao)\\parameters1.csv')) # Philadelphia MSA
d = pd.read_csv(os.path.join(root,MSA_NAME, 'parameters_%s.csv' % MSA_NAME))
#d.rename(columns={"safegraph_computed_area_in_square_feet":"feet"},inplace=True)
#d.rename(columns={"avg_median_dwell":"median"},inplace=True)
# No clipping
new_d = d
all_hours = helper.list_hours_in_range(min_datetime, max_datetime)
#poi_areas = new_d['safegraph_computed_area_in_square_feet'].values#面积
#poi_dwell_times = new_d['avg_median_dwell'].values#平均逗留时间
poi_areas = new_d['feet'].values#面积
poi_dwell_times = new_d['median'].values#平均逗留时间
poi_dwell_time_correction_factors = (poi_dwell_times / (poi_dwell_times+60)) ** 2
# Load ACS Data for MSA-county matching
acs_data = pd.read_csv(os.path.join(root,'list1.csv'),header=2)
acs_msas = [msa for msa in acs_data['CBSA Title'].unique() if type(msa) == str]
msa_match = match_msa_name_to_msas_in_acs_data(MSA_NAME_FULL, acs_msas)
msa_data = acs_data[acs_data['CBSA Title'] == msa_match].copy()
msa_data['FIPS Code'] = msa_data.apply(lambda x : get_fips_codes_from_state_and_county_fp((x['FIPS State Code']),x['FIPS County Code']), axis=1)
good_list = list(msa_data['FIPS Code'].values)
print(good_list)
# Load CBG ids for the MSA
cbg_ids_msa = pd.read_csv(os.path.join(root,MSA_NAME,'%s_cbg_ids.csv'%MSA_NAME_FULL))
cbg_ids_msa.rename(columns={"cbg_id":"census_block_group"}, inplace=True)
M = len(cbg_ids_msa)
# Mapping from cbg_ids to columns in hourly visiting matrices
cbgs_to_idxs = dict(zip(cbg_ids_msa['census_block_group'].values, range(M)))
x = {}
for i in cbgs_to_idxs:
x[str(i)] = cbgs_to_idxs[i]
print('Number of CBGs in this metro area:', M)
# Load SafeGraph data to obtain CBG sizes (i.e., populations)
filepath = os.path.join(root,"safegraph_open_census_data/data/cbg_b01.csv")
cbg_agesex = pd.read_csv(filepath)
# Extract CBGs belonging to the MSA
# https://covid-mobility.stanford.edu//datasets/
cbg_agesex_msa = pd.merge(cbg_ids_msa, cbg_agesex, on='census_block_group', how='left')
cbg_age_msa = cbg_agesex_msa.copy()
# Add up males and females of the same age, according to the detailed age list (DETAILED_AGE_LIST)
# which is defined in Constants.py
for i in range(3,25+1): # 'B01001e3'~'B01001e25'
male_column = 'B01001e'+str(i)
female_column = 'B01001e'+str(i+24)
cbg_age_msa[constants.DETAILED_AGE_LIST[i-3]] = cbg_age_msa.apply(lambda x : x[male_column]+x[female_column],axis=1)
# Rename
cbg_age_msa.rename(columns={'B01001e1':'Sum'},inplace=True)
# Extract columns of interest
columns_of_interest = ['census_block_group','Sum'] + constants.DETAILED_AGE_LIST
cbg_age_msa = cbg_age_msa[columns_of_interest].copy()
# Deal with CBGs with 0 populations
print(cbg_age_msa[cbg_age_msa['Sum']==0]['census_block_group'])
cbg_age_msa['Sum'] = cbg_age_msa['Sum'].apply(lambda x : x if x!=0 else 1)
M = len(cbg_age_msa)
cbg_sizes = cbg_age_msa['Sum'].values
cbg_sizes = np.array(cbg_sizes,dtype='int32')
print('Total population: ',np.sum(cbg_sizes))
# Select counties belonging to the MSA
'''
good_list = {
'Atlanta':[13013, 13015, 13035, 13045, 13057, 13063, 13067, 13077, 13085, 13089, 13097, 13113, 13117, 13121,
13135, 13143, 13149, 13151, 13159, 13171, 13199, 13211, 13217, 13223, 13227, 13231, 13247, 13255, 13297],
'Chicago':[17031, 17037, 17043, 17063, 17089, 17093, 17097, 17111, 17197, 18073, 18089, 18111, 18127, 55059],
'Dallas': [48085, 48113, 48121, 48139, 48221, 48231, 48251, 48257, 48367, 48397, 48425, 48439, 48497],
'Houston':[48015, 48039, 48071, 48157, 48167, 48201, 48291, 48339, 48473],
'LosAngeles':[6111, 6071, 6065, 6037, 6059],
'Miami': [12011, 12086, 12099],
'NewYorkCity': [34003, 34013, 34017, 34019, 34023, 34025, 34027, 34029, 34031, 34035, 34037, 34039, 36005, 36027,
36047, 36059, 36061, 36071, 36079, 36081, 36085, 36087, 36103, 36119, 42103],
'Philadelphia':[34005,34007,34015,42017,42029,42091,42045,42101,10003,24015,34033,42011,10001,34001,34009,34011],
'SanFrancisco': [6001, 6013, 6041, 6075, 6081],
'WashingtonDC': [24009, 24017, 24021, 24031, 24033, 51013, 51043, 51047, 51059, 51061, 51107, 51153, 51157, 51177,
51179, 51187, 51510, 51600, 51610, 51630, 51683, 51683, 51685]
}
'''
#good_list = [6111, 6071, 6065, 6037, 6059] # Los Angeles
y = []
for i in x:
if((len(i)==12) & (int(i[0:5])in good_list)):
y.append(x[i])
if((len(i)==11) & (int(i[0:4])in good_list)):
y.append(x[i])
idxs_msa = list(x.values())
idxs_county = y
print('Number of CBGs in this metro area:', len(idxs_msa))
print('Number of CBGs in to compare with NYT data:', len(idxs_county))
# Load ground truth: NYT Data
nyt_data = pd.read_csv(os.path.join(root, 'us-counties.csv'))
nyt_data['in_msa'] = nyt_data.apply(lambda x : x['fips'] in good_list , axis=1)
nyt_data_msa = nyt_data[nyt_data['in_msa']==True].copy()
# Extract data according to simulation time range
nyt_data_msa['in_simu_period'] = nyt_data_msa['date'].apply(lambda x : True if (x<'2020-05-10') & (x>'2020-03-07') else False)
nyt_data_msa_in_simu_period = nyt_data_msa[nyt_data_msa['in_simu_period']==True].copy()
nyt_data_msa_in_simu_period.reset_index(inplace=True)
# Group by date
nyt_data_group = nyt_data_msa_in_simu_period.groupby(nyt_data_msa_in_simu_period["date"])
# Sum up cases/deaths from different counties
# Cumulative
nyt_data_cumulative = nyt_data_group.sum()[['cases','deaths']]
# From cumulative to daily
# Cases
cases_daily = [0]
for i in range(1,len(nyt_data_cumulative)):
cases_daily.append(nyt_data_cumulative['cases'].values[i]-nyt_data_cumulative['cases'].values[i-1])
# Smoothed ground truth
cases_daily_smooth = apply_smoothing(cases_daily, agg_func=np.mean, before=3, after=3)
'''
if(len(cases_daily_smooth)<len(cases_total_no_vaccination)):
cases_daily_smooth = [0]*(len(cases_total_no_vaccination)-len(cases_daily_smooth)) + list(cases_daily_smooth)
'''
# Deaths
deaths_daily = [0]
for i in range(1,len(nyt_data_cumulative)):
deaths_daily.append(nyt_data_cumulative['deaths'].values[i]-nyt_data_cumulative['deaths'].values[i-1])
# Smoothed ground truth
deaths_daily_smooth = apply_smoothing(deaths_daily, agg_func=np.mean, before=3, after=3)
'''
if(len(deaths_daily_smooth)<len(deaths_total_no_vaccination)):
deaths_daily_smooth = [0]*(len(deaths_total_no_vaccination)-len(deaths_daily_smooth)) + list(deaths_daily_smooth)
'''
# Initialization: only need to be performed once
m = disease_model_original.Model(starting_seed=STARTING_SEED,
num_seeds=NUM_SEEDS,
debug=False,
clip_poisson_approximation=True,
ipf_final_match='poi',
ipf_num_iter=100)
rmse_dict_cases_agnostic = dict()
rmse_dict_cases_smooth_agnostic = dict()
rmse_dict_deaths_agnostic = dict()
rmse_dict_deaths_smooth_agnostic = dict()
# Grid search
isfirst = True
start = time.time()
for idx_p_sick_at_t0 in range(len(p_sick_at_t0_list)):
for idx_home_beta in range(len(home_beta_list)):
for idx_poi_psi in range(len(poi_psi_list)):
p_sick_at_t0=p_sick_at_t0_list[idx_p_sick_at_t0]
home_beta=home_beta_list[idx_home_beta]
poi_psi=poi_psi_list[idx_poi_psi]
print('\nCurrent parameter set: [%s,%s,%s].'%(p_sick_at_t0, home_beta, poi_psi))
m.init_exogenous_variables(poi_areas=poi_areas,
poi_dwell_time_correction_factors=poi_dwell_time_correction_factors,
cbg_sizes=cbg_sizes,
poi_cbg_visits_list=poi_cbg_visits_list,
all_hours=all_hours,
p_sick_at_t0=p_sick_at_t0,
home_beta=home_beta,
poi_psi=poi_psi,
just_compute_r0=False,
latency_period=96, # 4 days
infectious_period=84, # 3.5 days
confirmation_rate=.1,
confirmation_lag=168, # 7 days
death_rate=.0066,
death_lag=432)
m.init_endogenous_variables()
T1,L_1,I_1,R_1,C2,D2, history_C2, history_D2, total_affected_cbg = m.simulate_disease_spread()
total_affected_cbg_age_agnostic = total_affected_cbg
history_C2_age_agnostic = history_C2.copy()
history_D2_age_agnostic = history_D2.copy()
# Average history records across random seeds
policy = 'Age_Agnostic'
_, _, cases_total_age_agnostic, deaths_total_age_agnostic = average_across_random_seeds(policy, history_C2_age_agnostic, history_D2_age_agnostic, M, idxs_county,
print_results=False,draw_results=False)
print(cases_total_age_agnostic[-1], deaths_total_age_agnostic[-1])
cases_total_age_agnostic_final = cases_total_age_agnostic[-1]
deaths_total_age_agnostic_final = deaths_total_age_agnostic[-1]
# From cumulative to daily
cases_daily_total_age_agnostic = [0]
for i in range(1,len(cases_total_age_agnostic)):
cases_daily_total_age_agnostic.append(cases_total_age_agnostic[i]-cases_total_age_agnostic[i-1])
deaths_daily_total_age_agnostic = [0]
for i in range(1,len(deaths_total_age_agnostic)):
deaths_daily_total_age_agnostic.append(deaths_total_age_agnostic[i]-deaths_total_age_agnostic[i-1])
cases = nyt_data_cumulative['cases'].values
cases_smooth = apply_smoothing(cases, agg_func=np.mean, before=3, after=3)
'''
if(len(cases_smooth)<len(cases_total_no_vaccination)):
cases_smooth = [0]*(len(cases_total_no_vaccination)-len(cases_smooth)) + list(cases_smooth)
if(len(cases)<len(cases_total_no_vaccination)):
cases = [0]*(len(cases_total_no_vaccination)-len(cases)) + list(cases)
'''
deaths = nyt_data_cumulative['deaths'].values
deaths_smooth = apply_smoothing(deaths, agg_func=np.mean, before=3, after=3)
'''
if(len(deaths_smooth)<len(deaths_total_no_vaccination)):
deaths_smooth = [0]*(len(deaths_total_no_vaccination)-len(deaths_smooth)) + list(deaths_smooth)
print(len(deaths_smooth))
if(len(deaths)<len(deaths_total_no_vaccination)):
deaths = [0]*(len(deaths_total_no_vaccination)-len(deaths)) + list(deaths)
'''
# RMSE across random seeds
rmse_dict_cases_agnostic['%s,%s,%s'%(p_sick_at_t0, home_beta, poi_psi)] = sqrt(mean_squared_error(cases,cases_total_age_agnostic))
rmse_dict_cases_smooth_agnostic['%s,%s,%s'%(p_sick_at_t0, home_beta, poi_psi)] = sqrt(mean_squared_error(cases_smooth,cases_total_age_agnostic))
rmse_dict_deaths_agnostic['%s,%s,%s'%(p_sick_at_t0, home_beta, poi_psi)] = sqrt(mean_squared_error(deaths,deaths_total_age_agnostic))
rmse_dict_deaths_smooth_agnostic['%s,%s,%s'%(p_sick_at_t0, home_beta, poi_psi)] = sqrt(mean_squared_error(deaths_smooth,deaths_total_age_agnostic))
if(how_to_select_best_grid_search_models == 'cases'):
if(isfirst==True):
best_rmse = sqrt(mean_squared_error(cases,cases_total_age_agnostic))
best_parameters = [p_sick_at_t0, home_beta, poi_psi]
print('Current best: ', best_rmse, '\nCurrent best parameter set: [%s,%s,%s].'%(p_sick_at_t0, home_beta, poi_psi))
else:
print('Current mse: ', sqrt(mean_squared_error(cases,cases_total_age_agnostic)))
print('Previous best: ',best_rmse)
if(best_rmse > sqrt(mean_squared_error(cases,cases_total_age_agnostic))):
best_rmse = sqrt(mean_squared_error(cases,cases_total_age_agnostic))
best_parameters = [p_sick_at_t0, home_beta, poi_psi]
print('Current best: ', best_rmse, '\nCurrent best parameter set: [%s,%s,%s].'%(p_sick_at_t0, home_beta, poi_psi))
else:
print('Current best not changed. \nCurrent best parameter set:',best_parameters)
isfirst = False
# Save rmse dicts
np.save(os.path.join(root,MSA_NAME, '20210127_rmse_cases_%s_%s'%(MSA_NAME,p_sick_at_t0)),(rmse_dict_cases_agnostic)
np.save(os.path.join(root,MSA_NAME, '20210127_rmse_cases_smooth_%s'%(MSA_NAME,p_sick_at_t0)),rmse_dict_cases_smooth_agnostic)
np.save(os.path.join(root,MSA_NAME, '20210127_rmse_deaths_%s_%s'%(MSA_NAME,p_sick_at_t0)),rmse_dict_deaths_agnostic)
np.save(os.path.join(root,MSA_NAME, '20210127_rmse_deaths_smooth_%s_%s'%(MSA_NAME,p_sick_at_t0)),rmse_dict_deaths_smooth_agnostic)
# Save best results
best_results = dict()
best_results['rmse'] = best_rmse
best_results['parameters'] = best_parameters
np.save(os.path.join(root,MSA_NAME, '20210127_best_results_%s_%s_%s'%(how_to_select_best_grid_search_models,MSA_NAME,p_sick_at_t0)),best_results)
end = time.time()
print('Total Time:',(end-start))
| 46.033333 | 204 | 0.649336 |
acf8d3d55fe1cbcc7d3c8a2203b30f085de715cf | 35,850 | py | Python | booltest/testjobsproc.py | sobuch/polynomial-distinguishers | 5a007abd222d00cbf99f1083c3b537343d2fff56 | [
"MIT"
] | 5 | 2017-03-03T13:53:51.000Z | 2019-05-09T09:47:28.000Z | booltest/testjobsproc.py | sobuch/polynomial-distinguishers | 5a007abd222d00cbf99f1083c3b537343d2fff56 | [
"MIT"
] | 5 | 2017-10-07T11:15:09.000Z | 2021-01-25T17:03:59.000Z | booltest/testjobsproc.py | sobuch/polynomial-distinguishers | 5a007abd222d00cbf99f1083c3b537343d2fff56 | [
"MIT"
] | 6 | 2017-03-26T17:06:20.000Z | 2021-11-15T22:22:33.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# graphing dependencies: matplotlib, seaborn, pandas
__author__ = 'dusanklinec'
from past.builtins import cmp
import argparse
import fileinput
import json
import time
import re
import os
import copy
import shutil
import hashlib
import sys
import collections
import itertools
import traceback
import logging
import math
import coloredlogs
from typing import List, Dict, Tuple, Any, Optional
from booltest import common, egenerator, timer
logger = logging.getLogger(__name__)
coloredlogs.CHROOT_FILES = []
coloredlogs.install(level=logging.DEBUG, use_chroot=False)
# Method used for generating reference looking data stream
REFERENCE_METHOD = 'inctr-krnd-ri0'
class Checkpoint(object):
def __init__(self, **kwargs):
self.all_records = {} # primary caching DB
self.args = None
self.time = time.time()
# Secondary data for re-cache, not checkpointed as it is recreated from all_records
self.test_records = []
self.total_functions = []
self.ref_bins = collections.defaultdict(lambda: [])
self.timing_bins = collections.defaultdict(lambda: [])
self.skipped = 0
self.invalid_results = []
self.invalid_results_num = 0
for kw in kwargs:
setattr(self, kw, kwargs[kw])
def get_cached(self, bname):
return self.all_records[bname] if bname in self.all_records else None
def get_cached_keys(self):
return set(self.all_records.keys())
def add_record(self, tr):
if tr is None or tr.bname in self.all_records:
return
self.all_records[tr.bname] = tr
def recache(self):
for tr in self.test_records:
self.add_record(tr)
def to_json(self):
dct = dict(self.__dict__)
dct['args'] = args_to_dict(self.args) if not isinstance(self.args, dict) else self.args
dct['ref_bins'] = {} # {json.dumps(x): self.ref_bins[x] for x in self.ref_bins}
dct['timing_bins'] = {} # {json.dumps(x): self.timing_bins[x] for x in self.timing_bins}
dct['total_functions'] = list(self.total_functions)
dct['test_records'] = []
return dct
def from_json(self, data):
for k in data:
setattr(self, k, data[k])
# Migration
if len(self.all_records) == 0:
self.all_records = {x['bname']: x for x in self.test_records}
logger.info('Checkpoint migrated')
self.all_records = {x: TestRecord.new_from_json(self.all_records[x]) for x in self.all_records}
self.total_functions = set(self.total_functions)
self.test_records = [TestRecord.new_from_json(x) for x in self.test_records]
ref_data = {tuple(json.loads(x)): [TestRecord.new_from_json(y) for y in self.ref_bins[x]] for x in self.ref_bins}
self.ref_bins = collections.defaultdict(lambda: [], ref_data)
timing_data = {tuple(json.loads(x)): self.timing_bins[x] for x in self.timing_bins}
self.timing_bins = collections.defaultdict(lambda: [], timing_data)
class TestRecord(object):
"""
Represents one performed test and its result.
"""
def __init__(self, **kwargs):
self.function = None
self.round = None
self.block = None
self.deg = None
self.comb_deg = None
self.data = None
self.data_bytes = None
self.data_hash = None
self.elapsed = None
self.iteration = 0
self.strategy = None
self.method = None
self.ref = False
self.time_process = None
self.bname = None
self.fhash = None
self.mtime = None
self.cfg_file_name = None
self.seed = None
self.zscore = None
self.best_poly = None
self.is_halving = False
self.pvalue = None
self.halving_zscore = None
for kw in kwargs:
setattr(self, kw, kwargs[kw])
def __cmp__(self, other):
"""
Compare: function, round, data, block, deg, k.
:param other:
:return:
"""
a = (self.function, self.round, self.data, self.block, self.deg, self.comb_deg)
b = (other.function, other.round, other.data, other.block, other.deg, other.comb_deg)
return cmp(a, b)
def method_unhw(self):
return re.sub(r'hw[0-9]+[rsi]{0,3}', 'hw', self.method)
def method_generic(self):
return REFERENCE_METHOD
def __repr__(self):
return '%s-r%d-d%s_bl%d-deg%d-k%d' % (self.function, self.round, self.data, self.block, self.deg, self.comb_deg)
def ref_category(self):
return self.method, self.block, self.deg, self.comb_deg, self.data
def bool_category(self):
return self.block, self.deg, self.comb_deg, self.data
def ref_category_unhw(self):
return self.method_unhw(), self.block, self.deg, self.comb_deg, self.data
def ref_category_generic(self):
return self.method_generic(), self.block, self.deg, self.comb_deg, self.data
def to_json(self):
return self.__dict__
def from_json(self, data):
for k in data:
setattr(self, k, data[k])
@classmethod
def new_from_json(cls, data):
r = cls()
r.from_json(data)
return r
class PvalDb(object):
def __init__(self, fname=None):
self.fname = fname
self.data = None
self.map = collections.defaultdict(
lambda: collections.defaultdict(
lambda: collections.defaultdict(
lambda: None
)
))
def load(self):
if self.fname is None:
return
self.data = json.load(open(self.fname))
for rec in self.data:
# pvalrec = rec['pvals'][0]
self.map[rec['block']][rec['deg']][rec['comb_deg']] = rec['extremes']
def eval(self, block, deg, cdeg, zscore):
if self.map[block][deg][cdeg] is None:
return None
minv, maxv = self.map[block][deg][cdeg][0][0], self.map[block][deg][cdeg][1][0]
return abs(zscore) < minv or abs(zscore) > maxv
def args_to_dict(args):
return {x: getattr(args, x, None) for x in args.__dict__} if args else None
def get_method(strategy):
"""
Parses method from the strategy
:param strategy:
:return:
"""
# strip booltest params
method = re.sub(r'[\d]{1,4}MB-[\d]{3}bl-[\d]deg-[\d]k(-\d+)?', '', strategy)
# strip function dependent info
method = re.sub(r'tp[\w]+-[\w-]+?-r\d+-tv\d+', '', method)
method = re.sub(r'^[-]+', '', method)
method = re.sub(r'[-]+$', '', method)
# strip krnd iteration
method = method.replace('krnd0', 'krnd')
method = method.replace('krnd1', 'krnd')
method = method.replace('krnd-1', 'krnd')
method = re.sub(r'-krnd[0-9]+-$', 'krnd', method)
method = method.replace('--', '-')
if method.endswith('-static'):
return 'static'
return method
def process_file(js, fname, args=None):
"""
Process file json
:param js:
:param fname:
:param args:
:return:
"""
tr = TestRecord()
tr.zscore = common.defvalkey(js, 'best_zscore')
if tr.zscore:
tr.zscore = round(tr.zscore, 6)
if args.zscore_shape:
tr.zscore = int(abs(round(tr.zscore)))
tr.best_poly = common.defvalkey(js, 'best_poly')
tr.function = common.defvalkeys(js, 'config.config.spec.fnc')
tr.round = common.defvalkeys(js, 'config.config.spec.c_round')
tr.data = common.defvalkeys(js, 'config.config.spec.data_size')
tr.strategy = common.defvalkeys(js, 'config.config.spec.strategy')
tr.method = get_method(tr.strategy)
tr.time_process = common.defvalkeys(js, 'time_process')
tr.cfg_file_name = common.defvalkeys(js, 'config.config.spec.gen_cfg.file_name')
tr.data_bytes = common.defvalkeys(js, 'data_read')
tr.data_hash = common.defvalkeys(js, 'data_hash')
tr.seed = common.defvalkeys(js, 'config.config.spec.gen_cfg.seed')
if tr.data:
tr.data = int(math.ceil(math.ceil(tr.data/1024.0)/1024.0))
if not tr.cfg_file_name:
tr.cfg_file_name = common.defvalkeys(js, 'config.config.gen_file')
if tr.cfg_file_name and tr.cfg_file_name.startswith('gen-'):
tr.cfg_file_name = tr.cfg_file_name[4:]
if tr.cfg_file_name and tr.cfg_file_name.endswith('json'):
tr.cfg_file_name = tr.cfg_file_name[:-5]
tr.bname = fname
mtch = re.search(r'-(\d+)\.json$', fname)
if mtch:
tr.iteration = int(mtch.group(1))
# if 'stream' in js['generator']:
# tr.function = js['generator']['stream']['algorithm']
# tr.round = js['generator']['stream']['round']
#
# else:
# tr.function = js['generator']['algorithm']
# tr.round = js['generator']['round']
tr.block = js['blocklen']
tr.deg = js['degree']
tr.comb_deg = js['comb_degree']
# if 'elapsed' in js:
# tr.elapsed = js['elapsed']
tr.is_halving = common.defvalkeys(js, 'config.halving')
if tr.is_halving:
tr.pvalue = common.defvalkeys(js, 'best_pval')
return tr
def fls(x):
"""
Converts float to string, replacing . with , - excel separator
:param x:
:return:
"""
return str(x).replace('.', ',')
def get_ref_value(ref_avg, tr):
"""
Returns reference value closest to the test.
Fallbacks to the generic
:param ref_avg:
:param tr:
:return:
"""
ctg = tr.ref_category()
if ctg in ref_avg:
return ref_avg[ctg]
ctg_unhw = tr.ref_category_unhw()
if ctg_unhw in ref_avg:
return ref_avg[ctg_unhw]
ctg_gen = tr.ref_category_generic()
if ctg_gen in ref_avg:
return ref_avg[ctg_gen]
return None
def is_over_threshold(ref_avg, tr):
"""
Returns true of tr is over the reference threshold
:param ref_bins:
:param tr:
:type tr: TestRecord
:return:
"""
ref = get_ref_value(ref_avg, tr)
if ref is None:
return False
return abs(tr.zscore) >= ref + 1.0
def get_ref_val_def(ref_avg, block, deg, comb_deg, data):
cat = (REFERENCE_METHOD, block, deg, comb_deg, data)
return ref_avg[cat] if cat in ref_avg else None
def is_narrow(fname, narrow_type=0):
"""
Returns true if function is in the narrow set
:param fname:
:param narrow_type:
:return:
"""
return egenerator.is_narrow(fname, narrow_type)
def average(it):
return sum(it)/float(len(it))
class Processor(object):
CHECKPOINT_NAME = 'booltest_proc_checkpoint.json'
REF_NAME = '-aAES-r10-'
def __init__(self):
self.args = None
self.checkpoint = Checkpoint()
self.time_checkpoint = timer.Timer(start=False)
self.checkpointed_files = set()
self.last_checkpoint = time.time() # do not re-create checkpoint right from the start
self.last_checkpoint_new_rec = 0
self.last_checkpoint_cached_rec = 0
self.tf = None # tarfile
# ref bins: method, bl, deg, comb, data
self.skipped = None
self.total_functions = None
self.ref_bins = None
self.timing_bins = None
self.test_records = None # type: Optional[List[TestRecord]]
self.invalid_results = None
self.invalid_results_num = None
self.reset_state()
def reset_state(self):
self.skipped = 0
self.total_functions = set()
self.ref_bins = collections.defaultdict(lambda: [])
self.timing_bins = collections.defaultdict(lambda: [])
self.test_records = []
self.invalid_results = []
self.invalid_results_num = 0
def get_parser(self):
parser = argparse.ArgumentParser(description='Process battery of tests')
parser.add_argument('--json', dest='json', default=False, action='store_const', const=True,
help='JSON output')
parser.add_argument('--zscore-shape', dest='zscore_shape', default=False, action='store_const', const=True,
help='abs(round(zscore))')
parser.add_argument('--out-dir', dest='out_dir', default='.',
help='dir for results')
parser.add_argument('--file-suffix', dest='file_suffix', default='',
help='suffix for result files')
parser.add_argument('--delim', dest='delim', default=';',
help='CSV delimiter')
parser.add_argument('--tar', dest='tar', default=False, action='store_const', const=True,
help='Rad tar archive instead of the folder')
parser.add_argument('--narrow', dest='narrow', default=False, action='store_const', const=True,
help='Process only smaller set of functions')
parser.add_argument('--narrow2', dest='narrow2', default=False, action='store_const', const=True,
help='Process only smaller set of functions2')
parser.add_argument('--benchmark', dest='benchmark', default=False, action='store_const', const=True,
help='Process only smaller set of fnc: benchmark')
parser.add_argument('--static', dest='static', default=False, action='store_const', const=True,
help='Process only static test files')
parser.add_argument('--aes-ref', dest='aes_ref', default=False, action='store_const', const=True,
help='Process only AES reference')
parser.add_argument('--pval-data', dest='pval_data', default=None,
help='file with pval tables')
parser.add_argument('--num-inp', dest='num_inp', default=None, type=int,
help='Max number of inputs, for testing')
parser.add_argument('--checkpoint', dest='checkpoint', default=False, action='store_const', const=True,
help='Dump checkpoints')
parser.add_argument('--checkpoint-period', dest='checkpoint_period', default=50000, type=int,
help='Checkpoint period (create after X reads)')
parser.add_argument('--checkpoint-file', dest='checkpoint_file', default=self.CHECKPOINT_NAME,
help='Checkpoint file name')
parser.add_argument('--delete-invalid', dest='delete_invalid', default=False, action='store_const', const=True,
help='Delete invalid results')
parser.add_argument('--append-seed', dest='append_seed', default=False, action='store_const', const=True,
help='Append seed to the data file')
parser.add_argument('folder', nargs=argparse.ZERO_OR_MORE, default=[],
help='folder with test matrix resutls - result dir of testbed.py')
return parser
def should_checkpoint(self, idx):
return (idx > 0 and idx % self.args.checkpoint_period == 0) or (time.time() - self.last_checkpoint > 10*60)
def save_checkpoint(self):
with self.time_checkpoint:
try:
self.checkpoint.test_records = self.test_records
self.checkpoint.total_functions = self.total_functions
self.checkpoint.timing_bins = self.timing_bins
self.checkpoint.invalid_results = self.invalid_results
self.checkpoint.invalid_results_num = self.invalid_results_num
self.checkpoint.skipped = self.skipped
self.checkpoint.ref_bins = self.ref_bins
self.checkpoint.recache()
tmpfile1 = self.args.checkpoint_file + '.backup1'
tmpfile2 = self.args.checkpoint_file + '.backup2'
logger.info('Creating checkpoint %s ...' % self.args.checkpoint_file)
shutil.copyfile(self.args.checkpoint_file, tmpfile1)
json.dump(self.checkpoint.to_json(), open(tmpfile2, 'w+'), cls=common.AutoJSONEncoder, indent=2)
shutil.copyfile(tmpfile2, self.args.checkpoint_file)
os.remove(tmpfile2)
logger.info('Checkpoint saved %s' % self.args.checkpoint_file)
self.last_checkpoint = time.time()
self.last_checkpoint_new_rec = 0
self.last_checkpoint_cached_rec = 0
except Exception as e:
logger.exception('Could not create a checkpoint %s' % self.args.checkpoint_file, exc_info=e)
def load_checkpoint(self):
try:
logger.info('Loading checkpoint %s ...' % self.args.checkpoint_file)
if not os.path.exists(self.args.checkpoint_file):
return False
js = json.load(open(self.args.checkpoint_file))
self.checkpoint = Checkpoint()
self.checkpoint.from_json(js)
self.checkpointed_files = self.checkpoint.get_cached_keys()
return True
except Exception as e:
logger.exception('Could not load a checkpoint %s' % self.args.checkpoint_file, exc_info=e)
return False
def move_invalid(self, fname=None):
if fname is None or self.args.tar or not self.args.delete_invalid:
return
try:
os.rename(fname, '%s.invalid' % fname)
except Exception as e:
logger.exception('Could not move invalid file', exc_info=e)
def accepting_file(self, tfile, bname):
if self.args.static and ('static' not in bname and self.REF_NAME not in bname):
return False
if self.args.aes_ref and self.REF_NAME not in bname:
return False
if self.args.narrow and not is_narrow(bname):
return False
if self.args.narrow2 and not is_narrow(bname, 1):
return False
if self.args.benchmark and not is_narrow(bname, 2):
return False
return True
def read_file(self, tfile, bname):
js = None
data = None
stats = None
try:
if self.args.tar:
with self.tf.extractfile(tfile) as fh:
data = fh.read()
js = json.loads(data)
else:
fd = os.open(tfile.path, os.O_RDONLY)
try:
fh = os.fdopen(fd, 'r')
stats = os.fstat(fd)
data = fh.read()
js = json.loads(data)
finally:
os.close(fd)
except Exception as e:
logger.error('Exception during processing %s: %s' % (tfile, e))
logger.debug(traceback.format_exc())
return js, data, stats
def process_tr(self, tr, tfile, bname):
if tr.zscore is None or tr.data == 0:
self.invalid_results_num += 1
self.invalid_results.append(bname)
self.move_invalid(tfile.path if not self.args.tar else None)
return False
if self.REF_NAME in bname:
tr.ref = True
ref_cat = tr.ref_category()
ref_cat_unhw = tr.ref_category_unhw()
self.ref_bins[ref_cat].append(tr)
if ref_cat != ref_cat_unhw:
self.ref_bins[ref_cat_unhw].append(tr)
self.test_records.append(tr)
self.total_functions.add(tr.function)
if tr.time_process:
self.timing_bins[tr.bool_category()].append(tr.time_process)
return True
def read_file_tr(self, tfile, bname):
# File read & parse
js, data, stats = self.read_file(tfile, bname)
# File process
if js is None:
self.move_invalid(tfile.path if not self.args.tar else None)
return False
try:
hasher = hashlib.sha1()
if isinstance(data, str):
hasher.update(data.encode())
else:
hasher.update(data)
tr = process_file(js, bname, self.args)
tr.fhash = hasher.hexdigest()
tr.mtime = stats.st_mtime if stats else None
return tr
except Exception as e:
logger.exception('Could not process file', exc_info=e)
return None
def main(self):
"""
testbed.py results processor
"best_zscore"
"blocklen": 256,
"degree": 2,
"comb_degree": 2,
"top_k": 128,
config.config.spec.fnc
config.config.spec.c_round
config.config.spec.data_size
config.config.spec.strategy
config.config.spec.gen_cfg.stream.scode
config.config.spec.gen_cfg.stream.type
config.config.spec.gen_cfg.stream.source.type
:return:
"""
parser = self.get_parser()
args = parser.parse_args()
self.args = args
tstart = time.time()
# Process the input
if len(args.folder) == 0:
print('Error; no input given')
sys.exit(1)
ctr = -1
main_dir = args.folder[0]
self.tf = None
self.checkpoint = Checkpoint()
self.checkpoint.args = args
pval_db = PvalDb(args.pval_data)
pval_db.load()
if args.tar:
import tarfile
logger.info('Loading tar file: %s' % main_dir)
self.tf = tarfile.open(main_dir, 'r')
test_files = [x for x in self.tf.getmembers() if x.isfile()]
logger.info('Totally %d files found in the tar file' % len(test_files))
else:
# Read all files in the folder.
logger.info('Reading all testfiles list')
# test_files = [f for f in os.listdir(main_dir) if os.path.isfile(os.path.join(main_dir, f))]
test_files = os.scandir(main_dir)
# logger.info('Totally %d tests were performed, parsing...' % len(test_files))
# Test matrix definition
total_block = [128, 256, 384, 512]
total_deg = [1, 2, 3]
total_comb_deg = [1, 2, 3]
total_sizes = [10, 100]
total_cases = [total_block, total_deg, total_comb_deg]
total_cases_size = total_cases + [total_sizes]
# Load checkpoint, restore state
if args.checkpoint and self.load_checkpoint():
logger.info('Checkpoint loaded, files read: %s' % len(self.checkpointed_files))
num_cached = 0
for idx, tfile in enumerate(test_files):
bname = os.path.basename(tfile.name)
if not args.tar and not tfile.is_file():
continue
if idx % 1000 == 0:
logger.debug('Progress: %d, cur: %s skipped: %s, time: %.2f, #rec: %s, #fnc: %s, #cachedr: %s, #lcc: %s, lcr: %s'
% (idx, tfile.name, self.skipped, time.time() - tstart,
len(self.test_records), len(self.total_functions), num_cached,
self.last_checkpoint_cached_rec, self.last_checkpoint_new_rec))
is_file_checkpointed = bname in self.checkpointed_files
num_cached += 1 if is_file_checkpointed else 0
if args.checkpoint and self.should_checkpoint(idx) and self.last_checkpoint_cached_rec < self.last_checkpoint_new_rec:
self.save_checkpoint()
if args.num_inp is not None and args.num_inp < idx:
break
if not bname.endswith('json'):
continue
if not self.accepting_file(tfile, bname):
self.skipped += 1
continue
tr = self.read_file_tr(tfile, bname) if not is_file_checkpointed else self.checkpoint.get_cached(bname)
if tr is None:
self.move_invalid(tfile.path if not args.tar else None)
continue
try:
if not self.process_tr(tr, tfile, bname):
continue
self.last_checkpoint_cached_rec += 1 if is_file_checkpointed else 0
self.last_checkpoint_new_rec += 1
except Exception as e:
logger.error('Exception during processing %s: %s' % (tfile, e))
logger.debug(traceback.format_exc())
logger.info('Invalid results: %s' % self.invalid_results_num)
logger.info('Num records: %s' % len(self.test_records))
logger.info('Num functions: %s' % len(self.total_functions))
logger.info('Num ref bins: %s' % len(self.ref_bins.keys()))
logger.info('Post processing')
self.test_records.sort(key=lambda x: (x.function, x.round, x.method, x.data, x.block, x.deg, x.comb_deg))
if not args.json:
print(args.delim.join(['function', 'round', 'data'] +
['%s-%s-%s' % (x[0], x[1], x[2]) for x in itertools.product(*total_cases)]))
# Reference statistics.
ref_avg = {}
for mthd in list(self.ref_bins.keys()):
samples = self.ref_bins[mthd]
ref_avg[mthd] = sum([abs(x.zscore) for x in samples]) / float(len(samples))
# Stats files.
fname_narrow = 'nw_' if args.narrow else ''
if args.narrow2:
fname_narrow = 'nw2_'
elif args.benchmark:
fname_narrow = 'bench_'
fsuffix = self.args.file_suffix
fname_time = int(time.time())
fname_ref_json = os.path.join(args.out_dir, 'ref_%s%s%s.json' % (fname_narrow, fname_time, fsuffix))
fname_ref_csv = os.path.join(args.out_dir, 'ref_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
fname_results_json = os.path.join(args.out_dir, 'results_%s%s%s.json' % (fname_narrow, fname_time, fsuffix))
fname_results_bat_json = os.path.join(args.out_dir, 'results_bat_%s%s%s.json' % (fname_narrow, fname_time, fsuffix))
fname_results_csv = os.path.join(args.out_dir, 'results_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
fname_results_rf_csv = os.path.join(args.out_dir, 'results_rf_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
fname_results_rfd_csv = os.path.join(args.out_dir, 'results_rfd_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
fname_results_rfr_csv = os.path.join(args.out_dir, 'results_rfr_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
fname_timing_csv = os.path.join(args.out_dir, 'results_time_%s%s%s.csv' % (fname_narrow, fname_time, fsuffix))
# Reference bins
ref_keys = sorted(list(self.ref_bins.keys()))
with open(fname_ref_csv, 'w+') as fh_csv, open(fname_ref_json, 'w+') as fh_json:
fh_json.write('[\n')
for rf_key in ref_keys:
method, block, deg, comb_deg, data = rf_key
ref_cur = self.ref_bins[rf_key]
csv_line = args.delim.join([
method, fls(block), fls(deg), fls(comb_deg), fls(data), fls(ref_avg[rf_key])
] + [fls(x.zscore) for x in ref_cur])
fh_csv.write(csv_line+'\n')
js_cur = collections.OrderedDict()
js_cur['method'] = method
js_cur['block'] = block
js_cur['deg'] = deg
js_cur['comb_deg'] = comb_deg
js_cur['data_size'] = data
js_cur['zscore_avg'] = ref_avg[rf_key]
js_cur['zscores'] = [x.zscore for x in ref_cur]
json.dump(js_cur, fh_json, indent=2)
fh_json.write(', \n')
fh_json.write('\n null\n]\n')
# Timing resuls
with open(fname_timing_csv, 'w+') as fh:
hdr = ['block', 'degree', 'combdeg', 'data', 'num_samples', 'avg', 'stddev', 'data']
fh.write(args.delim.join(hdr) + '\n')
for case in itertools.product(*total_cases_size):
cur_data = list(case)
time_arr = self.timing_bins[case]
num_samples = len(time_arr)
if num_samples == 0:
cur_data += [0, None, None, None]
else:
cur_data.append(num_samples)
avg_ = sum(time_arr) / float(num_samples)
stddev = math.sqrt(sum([(x-avg_)**2 for x in time_arr])/(float(num_samples) - 1)) if num_samples > 1 else None
cur_data += [avg_, stddev]
cur_data += time_arr
fh.write(args.delim.join([str(x) for x in cur_data]) + '\n')
# Close old
fh_json.close()
fh_csv.close()
# Result processing
fh_json = open(fname_results_json, 'w+')
fh_bat_json = open(fname_results_bat_json, 'w+')
fh_csv = open(fname_results_csv, 'w+')
fh_rf_csv = open(fname_results_rf_csv, 'w+')
fh_rfd_csv = open(fname_results_rfd_csv, 'w+')
fh_rfr_csv = open(fname_results_rfr_csv, 'w+')
fh_json.write('[\n')
fh_bat_json.write('[\n')
# Headers
hdr = ['fnc_name', 'fnc_round', 'method', 'data_mb']
for cur_key in itertools.product(*total_cases):
hdr.append('%s-%s-%s' % (cur_key[0], cur_key[1], cur_key[2]))
fh_csv.write(args.delim.join(hdr) + '\n')
fh_rf_csv.write(args.delim.join(hdr) + '\n')
fh_rfd_csv.write(args.delim.join(hdr) + '\n')
fh_rfr_csv.write(args.delim.join(hdr) + '\n')
# Processing, one per group
js_out = []
ref_added = set()
for k, g in itertools.groupby(self.test_records, key=lambda x: (x.function, x.round, x.method, x.data)):
logger.info('Key: %s' % list(k))
fnc_name = k[0]
fnc_round = k[1]
method = k[2]
data_mb = k[3]
prefix_cols = [fnc_name, fls(fnc_round), method, fls(data_mb)]
# CSV grouping, avg all results
csv_grouper = lambda x: (x.block, x.deg, x.comb_deg)
group_expanded = sorted(list(g), key=csv_grouper) # type: List[TestRecord]
results_map = {}
for ssk, ssg in itertools.groupby(group_expanded, key=csv_grouper):
ssg = list(ssg)
if len(ssg) > 1:
cp = copy.deepcopy(ssg[0])
cp.zscore = average([x.zscore for x in ssg])
else:
cp = ssg[0]
results_map[ssk] = cp
# Add line with reference values so one can compare
if data_mb not in ref_added:
ref_added.add(data_mb)
results_list = []
for cur_key in itertools.product(*total_cases):
results_list.append(get_ref_val_def(ref_avg, *cur_key, data=data_mb))
csv_line = args.delim.join(['ref-AES', '10', REFERENCE_METHOD, fls(data_mb)]
+ [(fls(x) if x is not None else '-') for x in results_list])
fh_csv.write(csv_line + '\n')
# Grid list for booltest params
results_list = [] # type: List[TestRecord]
for cur_key in itertools.product(*total_cases):
if cur_key in results_map:
results_list.append(results_map[cur_key])
else:
results_list.append(None)
# CSV result
res_selector = lambda x: (x.pvalue if x.is_halving else x.zscore)
csv_line = args.delim.join(prefix_cols + [(fls(res_selector(x)) if x is not None else '-') for x in results_list])
fh_csv.write(csv_line+'\n')
# CSV only if above threshold
def zscoreref(x, retmode=0):
if x is None:
return '-'
is_over = False
thr = 0
if args.pval_data:
is_over = pval_db.eval(x.block, x.deg, x.comb_deg, x.zscore)
if is_over is None:
return '?'
else:
thr = get_ref_value(ref_avg, x)
if thr is None:
return '?'
is_over = is_over_threshold(ref_avg, x)
if is_over:
if retmode == 0:
return fls(x.zscore)
elif retmode == 1:
return fls(abs(x.zscore) - abs(thr))
elif retmode == 2:
thr = thr if thr != 0 else 1.
return fls(abs(x.zscore) / abs(thr))
return '.'
csv_line_rf = args.delim.join(
prefix_cols + [zscoreref(x) for x in results_list])
fh_rf_csv.write(csv_line_rf + '\n')
csv_line_rfd = args.delim.join(
prefix_cols + [zscoreref(x, 1) for x in results_list])
fh_rfd_csv.write(csv_line_rfd + '\n')
csv_line_rfr = args.delim.join(
prefix_cols + [zscoreref(x, 2) for x in results_list])
fh_rfr_csv.write(csv_line_rfr + '\n')
# JSON result
cur_js = collections.OrderedDict()
cur_js['function'] = fnc_name
cur_js['round'] = fnc_round
cur_js['method'] = method
cur_js['data_mb'] = data_mb
cur_js['tests'] = [[x.block, x.deg, x.comb_deg, res_selector(x)] for x in group_expanded]
json.dump(cur_js, fh_json, indent=2)
fh_json.write(',\n')
# JSON battery format result
for cur_res in group_expanded:
cdatafile = cur_res.cfg_file_name
if self.args.append_seed:
cdatafile = "%s_seed_%s" % (cur_res.cfg_file_name, cur_res.seed)
cur_js = collections.OrderedDict()
cur_js['battery'] = 'booltest'
cur_js['function'] = fnc_name
cur_js['round'] = fnc_round
cur_js['method'] = method
cur_js['data_bytes'] = cur_res.data_bytes
cur_js['deg'] = cur_res.deg
cur_js['k'] = cur_res.comb_deg
cur_js['m'] = cur_res.block
cur_js['data_file'] = cdatafile
cur_js['data_hash'] = cur_res.data_hash
cur_js['seed'] = cur_res.seed
cur_js['zscore'] = cur_res.zscore
cur_js['halving'] = cur_res.is_halving
if cur_res.is_halving:
cur_js['pvalue'] = cur_res.pvalue
cur_js['pval0_rej'] = cur_res.pvalue < (1./40000)
else:
cur_js['pval0_rej'] = pval_db.eval(cur_res.block, cur_res.deg, cur_res.comb_deg, cur_res.zscore) if args.pval_data else None
json.dump(cur_js, fh_bat_json, indent=2)
fh_bat_json.write(',\n')
if not args.json:
print(csv_line)
else:
js_out.append(cur_js)
fh_json.write('\nnull\n]\n')
fh_bat_json.write('\nnull\n]\n')
if args.json:
print(json.dumps(js_out, indent=2))
fh_json.close()
fh_bat_json.close()
fh_csv.close()
fh_rf_csv.close()
fh_rfd_csv.close()
fh_rfr_csv.close()
logger.info(fname_ref_json)
logger.info(fname_ref_csv)
logger.info(fname_results_json)
logger.info(fname_results_bat_json)
logger.info(fname_results_csv)
logger.info(fname_results_rf_csv)
logger.info(fname_results_rfd_csv)
logger.info(fname_results_rfr_csv)
logger.info(fname_timing_csv)
logger.info('Processing finished in %s s' % (time.time() - tstart))
def main():
p = Processor()
p.main()
if __name__ == '__main__':
main()
| 36.359026 | 144 | 0.576457 |
acf8d435a882cedb55cd4903c23b29c9a0baa6fd | 1,267 | py | Python | CRNN/compute_stds_means.py | yingbiaoluo/OCR_deployment | a2f4635d328e7bd484fb1c86da3ca79306d852a7 | [
"MIT"
] | null | null | null | CRNN/compute_stds_means.py | yingbiaoluo/OCR_deployment | a2f4635d328e7bd484fb1c86da3ca79306d852a7 | [
"MIT"
] | null | null | null | CRNN/compute_stds_means.py | yingbiaoluo/OCR_deployment | a2f4635d328e7bd484fb1c86da3ca79306d852a7 | [
"MIT"
] | null | null | null | import os
import cv2
import random
import numpy as np
from tqdm import tqdm
from config import config
from dataset import dataset
from utils import utils
def cal_std_mean(dataset, num):
img, _ = dataset[0]
imgs = img[np.newaxis, :, :, :]
for i in range(1, num):
if i % 100 == 0:
print(i)
img, _ = dataset[i]
img_ = img[np.newaxis, :, :, :]
imgs = np.concatenate((imgs, img_), axis=0)
print(imgs.shape)
imgs = imgs.astype(np.float32) / 255.
img_flat = imgs.flatten()
print('mean:', np.mean(img_flat))
print('std:', np.std(img_flat))
return 0
if __name__ == '__main__':
#### compute stds and means
# image_root = '/home/lyb/ocr/CRNN/dataset/images_sentences/images'
# label_path = '/home/lyb/ocr/CRNN/dataset/images_sentences/labels/sentences_label.txt'
image_root = '/home/lyb/dataset/OCR/Sythetic_Chinese_Character_Dataset/images'
label_path = '/home/lyb/crnn_chinese_characters_rec/train.txt'
alphabet_path = '/home/lyb/ocr/CRNN/dataset/alphabets.txt'
alphabet = utils.generate_alphabets(alphabet_path)
resize_shape = (32, 560)
dataset = dataset.Dataset_OCR(image_root, label_path, alphabet, resize_shape)
cal_std_mean(dataset, num=2000)
| 29.465116 | 91 | 0.674822 |
acf8d4903086414a99b463cb8089c273bba0328a | 1,892 | bzl | Python | gometalinter/deps.bzl | sthussey/bazel-tools | e80a71aa4398b3635067a142a57f7121f414e19f | [
"Apache-2.0"
] | null | null | null | gometalinter/deps.bzl | sthussey/bazel-tools | e80a71aa4398b3635067a142a57f7121f414e19f | [
"Apache-2.0"
] | null | null | null | gometalinter/deps.bzl | sthussey/bazel-tools | e80a71aa4398b3635067a142a57f7121f414e19f | [
"Apache-2.0"
] | null | null | null | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
_GOMETALINTER_TARBALLS = {
"darwin_amd64": (
"gometalinter-2.0.10-darwin-amd64.tar.gz",
"gometalinter-2.0.10-darwin-amd64",
"b21c28a236f05d1cd1a394240388afdc3a20a2ddfeb34acb19c651d5d1936523",
),
"linux_amd64": (
"gometalinter-2.0.10-linux-amd64.tar.gz",
"gometalinter-2.0.10-linux-amd64",
"111f656a8599349168544b9ae0dbc93240edcb28a81a92e9810ceaa40575545a",
),
}
def _gometalinter_download_impl(ctx):
if ctx.os.name == "linux":
host = "linux_amd64"
elif ctx.os.name == "mac os x":
host = "darwin_amd64"
else:
fail("Unsupported operating system: " + ctx.os.name)
if host not in _GOMETALINTER_TARBALLS:
fail("Unsupported host {}".format(host))
filename, prefix, sha256 = _GOMETALINTER_TARBALLS[host]
url = "https://github.com/alecthomas/gometalinter/releases/download/v2.0.10/" + filename
ctx.template(
"BUILD.bazel",
Label("@com_github_atlassian_bazel_tools//gometalinter:gometalinter.build.bazel"),
executable = False,
)
ctx.download_and_extract(
stripPrefix = prefix,
url = url,
sha256 = sha256,
)
_gometalinter_download = repository_rule(
implementation = _gometalinter_download_impl,
)
def gometalinter_dependencies():
_maybe(
http_archive,
name = "bazel_skylib",
sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
strip_prefix = "bazel-skylib-0.8.0",
urls = ["https://github.com/bazelbuild/bazel-skylib/archive/0.8.0.tar.gz"],
)
_gometalinter_download(
name = "com_github_atlassian_bazel_tools_gometalinter",
)
def _maybe(repo_rule, name, **kwargs):
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
| 32.067797 | 92 | 0.67019 |
acf8d4a60a4489b997a50b0855f2090beecef595 | 1,394 | py | Python | demonslayer/demonslayer/spiders/manga.py | mathematiguy/manga-scrapers | 294b150dccfb79e043b13d83b781c8f9ad844fdd | [
"MIT"
] | null | null | null | demonslayer/demonslayer/spiders/manga.py | mathematiguy/manga-scrapers | 294b150dccfb79e043b13d83b781c8f9ad844fdd | [
"MIT"
] | null | null | null | demonslayer/demonslayer/spiders/manga.py | mathematiguy/manga-scrapers | 294b150dccfb79e043b13d83b781c8f9ad844fdd | [
"MIT"
] | null | null | null | import os
import scrapy
import logging
from urllib.request import urlretrieve
class MangaSpider(scrapy.Spider):
name = 'manga'
allowed_domains = ['demon-slayer.online', 'cdn.readkakegurui.com']
start_urls = ['http://demon-slayer.online/']
def parse(self, response):
manga_urls = response.xpath('//figure/ul[contains(@class, "su-posts-list-loop")]/li/a/@href').extract()[::-1]
chapter_names = response.xpath('//figure/ul[contains(@class, "su-posts-list-loop")]/li/a/text()').extract()[::-1]
for i, item in enumerate(zip(manga_urls, chapter_names)):
url, chapter = item
yield scrapy.Request(url, callback=self.parse_manga, meta={'issue_num': i, 'chapter': chapter})
def parse_manga(self, response):
issue_name = response.url.split('/')[-2]
image_urls = response.xpath('//div[@class="entry-content"]/div[@class="separator"]/a/img/@src').extract()
for url in image_urls:
folder_dir = os.path.join('manga', issue_name)
if not os.path.exists(folder_dir):
os.makedirs(folder_dir)
fp = os.path.join(folder_dir, url.rsplit('/', 1)[-1])
yield {
'issue_name': issue_name,
'chapter': f'{response.meta["issue_num"]} - {response.meta["chapter"]}',
'fp': fp,
'url': url
}
| 41 | 121 | 0.596126 |
acf8d4bcfa7604038ae55efdcac4484da4811fb6 | 2,215 | py | Python | legged_gym/envs/anymal_b/anymal_b_config.py | SimarKareer/legged_gym | 3d43422ff8276fb830f31a356adb8699d5664f08 | [
"BSD-3-Clause"
] | null | null | null | legged_gym/envs/anymal_b/anymal_b_config.py | SimarKareer/legged_gym | 3d43422ff8276fb830f31a356adb8699d5664f08 | [
"BSD-3-Clause"
] | null | null | null | legged_gym/envs/anymal_b/anymal_b_config.py | SimarKareer/legged_gym | 3d43422ff8276fb830f31a356adb8699d5664f08 | [
"BSD-3-Clause"
] | null | null | null | # SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: BSD-3-Clause
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Copyright (c) 2021 ETH Zurich, Nikita Rudin
from legged_gym.envs import AnymalCRoughCfg, AnymalCRoughCfgPPO
class AnymalBRoughCfg(AnymalCRoughCfg):
class asset(AnymalCRoughCfg.asset):
file = "{LEGGED_GYM_ROOT_DIR}/resources/robots/anymal_b/urdf/anymal_b.urdf"
foot_name = "FOOT"
class rewards(AnymalCRoughCfg.rewards):
class scales(AnymalCRoughCfg.rewards.scales):
pass
class AnymalBRoughCfgPPO(AnymalCRoughCfgPPO):
class runner(AnymalCRoughCfgPPO.runner):
run_name = ""
experiment_name = "rough_anymal_b"
load_run = -1
| 45.204082 | 98 | 0.772009 |
acf8d5474520bb0a54fe5a16bd192995d6155856 | 1,221 | py | Python | posts/models.py | b31a0a35ec5d8c54/site_posts | 57be30efc310ab5df1922792d70da414d400a399 | [
"MIT"
] | null | null | null | posts/models.py | b31a0a35ec5d8c54/site_posts | 57be30efc310ab5df1922792d70da414d400a399 | [
"MIT"
] | null | null | null | posts/models.py | b31a0a35ec5d8c54/site_posts | 57be30efc310ab5df1922792d70da414d400a399 | [
"MIT"
] | null | null | null | import datetime
from django.db import models
from django.contrib.auth.models import User as BaseUser
class User(BaseUser):
class Meta:
proxy = True
def __str__(self):
return '{}' .format(self.username)
def get_my_posts(self, title=None):
qs = Post.objects.filter(user=self)
if title:
qs = qs.filter(title__contains=title)
return qs
@property
def my_posts(self):
return self.get_my_posts()
def registered_at(self):
return str(self.date_joined)
def posts_number(self):
return self.get_my_posts().count()
def posts_per_day(self):
post_count = self.posts_number()
now = datetime.datetime.now(datetime.timezone.utc)
day_count = (now - self.date_joined).days
if day_count == 0:
return 0
return post_count / day_count
class Post(models.Model):
class Meta:
verbose_name = "Пост"
verbose_name_plural = "Посты"
def __str__(self):
return '{}' .format(self.title)
user = models.ForeignKey(BaseUser, on_delete=models.CASCADE, related_name='posts')
title = models.CharField(max_length=64)
body = models.TextField()
| 23.037736 | 86 | 0.636364 |
acf8d669f2414d224e4961cbba2cc3cf12424c84 | 259 | py | Python | manage.py | satyadevi-nyros/circle-ci | 9f26e9e233e6251a35b1d0260e836f93dba63374 | [
"Apache-2.0"
] | null | null | null | manage.py | satyadevi-nyros/circle-ci | 9f26e9e233e6251a35b1d0260e836f93dba63374 | [
"Apache-2.0"
] | null | null | null | manage.py | satyadevi-nyros/circle-ci | 9f26e9e233e6251a35b1d0260e836f93dba63374 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "werckers_example.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 23.545455 | 80 | 0.779923 |
acf8d66c2038204bb578370d358d54f415bf739e | 611 | py | Python | agc/agc029/agc029b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | agc/agc029/agc029b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | agc/agc029/agc029b.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | N = int(input())
A = list(map(int, input().split()))
d = {}
for a in A:
if a in d:
d[a] += 1
else:
d[a] = 1
result = 0
for a in sorted(d, reverse=True):
if a not in d:
continue
t = (1 << a.bit_length()) - a
if t not in d:
continue
if a != t:
if d[a] < d[t]:
i = d[a]
else:
i = d[t]
else:
if d[a] == 1:
continue
i = d[a] // 2
result += i
if d[a] == i:
del d[a]
else:
d[a] -= i
if d[t] == i:
del d[t]
else:
d[t] -= i
print(result)
| 16.513514 | 35 | 0.369885 |
acf8d6d7e1744bead58cba4e2c65c893795c0e7a | 22,666 | py | Python | heat/engine/resources/openstack/neutron/port.py | ISCAS-VDI/heat-base | ca8390434edfd8396c7e46651e1e31ff488b2307 | [
"Apache-2.0"
] | 1 | 2015-12-18T21:46:55.000Z | 2015-12-18T21:46:55.000Z | heat/engine/resources/openstack/neutron/port.py | ISCAS-VDI/heat-base | ca8390434edfd8396c7e46651e1e31ff488b2307 | [
"Apache-2.0"
] | null | null | null | heat/engine/resources/openstack/neutron/port.py | ISCAS-VDI/heat-base | ca8390434edfd8396c7e46651e1e31ff488b2307 | [
"Apache-2.0"
] | null | null | null | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
from heat.common.i18n import _
from heat.common.i18n import _LW
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine.resources.openstack.neutron import neutron
from heat.engine.resources.openstack.neutron import subnet
from heat.engine import support
from heat.engine import translation
LOG = logging.getLogger(__name__)
class Port(neutron.NeutronResource):
"""A resource for managing Neutron ports.
A port represents a virtual switch port on a logical network switch.
Virtual instances attach their interfaces into ports. The logical port also
defines the MAC address and the IP address(es) to be assigned to the
interfaces plugged into them. When IP addresses are associated to a port,
this also implies the port is associated with a subnet, as the IP address
was taken from the allocation pool for a specific subnet.
"""
PROPERTIES = (
NAME, NETWORK_ID, NETWORK, FIXED_IPS, SECURITY_GROUPS,
REPLACEMENT_POLICY, DEVICE_ID, DEVICE_OWNER
) = (
'name', 'network_id', 'network', 'fixed_ips', 'security_groups',
'replacement_policy', 'device_id', 'device_owner'
)
EXTRA_PROPERTIES = (
VALUE_SPECS, ADMIN_STATE_UP, MAC_ADDRESS,
ALLOWED_ADDRESS_PAIRS, VNIC_TYPE, QOS_POLICY,
PORT_SECURITY_ENABLED,
) = (
'value_specs', 'admin_state_up', 'mac_address',
'allowed_address_pairs', 'binding:vnic_type', 'qos_policy',
'port_security_enabled',
)
_FIXED_IP_KEYS = (
FIXED_IP_SUBNET_ID, FIXED_IP_SUBNET, FIXED_IP_IP_ADDRESS,
) = (
'subnet_id', 'subnet', 'ip_address',
)
_ALLOWED_ADDRESS_PAIR_KEYS = (
ALLOWED_ADDRESS_PAIR_MAC_ADDRESS, ALLOWED_ADDRESS_PAIR_IP_ADDRESS,
) = (
'mac_address', 'ip_address',
)
ATTRIBUTES = (
ADMIN_STATE_UP_ATTR, DEVICE_ID_ATTR, DEVICE_OWNER_ATTR, FIXED_IPS_ATTR,
MAC_ADDRESS_ATTR, NAME_ATTR, NETWORK_ID_ATTR, SECURITY_GROUPS_ATTR,
STATUS, TENANT_ID, ALLOWED_ADDRESS_PAIRS_ATTR, SUBNETS_ATTR,
PORT_SECURITY_ENABLED_ATTR, QOS_POLICY_ATTR,
) = (
'admin_state_up', 'device_id', 'device_owner', 'fixed_ips',
'mac_address', 'name', 'network_id', 'security_groups',
'status', 'tenant_id', 'allowed_address_pairs', 'subnets',
'port_security_enabled', 'qos_policy_id',
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('A symbolic name for this port.'),
update_allowed=True
),
NETWORK_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
message=_('Use property %s.') % NETWORK,
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2'
)
),
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
NETWORK: properties.Schema(
properties.Schema.STRING,
_('Network this port belongs to. If you plan to use current port '
'to assign Floating IP, you should specify %(fixed_ips)s '
'with %(subnet)s. Note if this changes to a different network '
'update, the port will be replaced.') %
{'fixed_ips': FIXED_IPS, 'subnet': FIXED_IP_SUBNET},
support_status=support.SupportStatus(version='2014.2'),
required=True,
constraints=[
constraints.CustomConstraint('neutron.network')
],
),
DEVICE_ID: properties.Schema(
properties.Schema.STRING,
_('Device ID of this port.'),
update_allowed=True
),
DEVICE_OWNER: properties.Schema(
properties.Schema.STRING,
_('Name of the network owning the port. '
'The value is typically network:floatingip '
'or network:router_interface or network:dhcp.'),
update_allowed=True
),
FIXED_IPS: properties.Schema(
properties.Schema.LIST,
_('Desired IPs for this port.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
FIXED_IP_SUBNET_ID: properties.Schema(
properties.Schema.STRING,
support_status=support.SupportStatus(
status=support.HIDDEN,
version='5.0.0',
message=_('Use property %s.') % FIXED_IP_SUBNET,
previous_status=support.SupportStatus(
status=support.DEPRECATED,
version='2014.2 '
)
),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
FIXED_IP_SUBNET: properties.Schema(
properties.Schema.STRING,
_('Subnet in which to allocate the IP address for '
'this port.'),
support_status=support.SupportStatus(version='2014.2'),
constraints=[
constraints.CustomConstraint('neutron.subnet')
]
),
FIXED_IP_IP_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('IP address desired in the subnet for this port.'),
constraints=[
constraints.CustomConstraint('ip_addr')
]
),
},
),
update_allowed=True
),
SECURITY_GROUPS: properties.Schema(
properties.Schema.LIST,
_('Security group IDs to associate with this port.'),
update_allowed=True
),
REPLACEMENT_POLICY: properties.Schema(
properties.Schema.STRING,
_('Policy on how to respond to a stack-update for this resource. '
'REPLACE_ALWAYS will replace the port regardless of any '
'property changes. AUTO will update the existing port for any '
'changed update-allowed property.'),
default='AUTO',
constraints=[
constraints.AllowedValues(['REPLACE_ALWAYS', 'AUTO']),
],
update_allowed=True,
support_status=support.SupportStatus(
status=support.DEPRECATED,
version='6.0.0',
message=_('Replacement policy used to work around flawed '
'nova/neutron port interaction which has been '
'fixed since Liberty.'),
previous_status=support.SupportStatus(version='2014.2'))
),
}
# NOTE(prazumovsky): properties_schema has been separated because some
# properties used in server for creating internal port.
extra_properties_schema = {
VALUE_SPECS: properties.Schema(
properties.Schema.MAP,
_('Extra parameters to include in the request.'),
default={},
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('The administrative state of this port.'),
default=True,
update_allowed=True
),
MAC_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('MAC address to give to this port.'),
constraints=[
constraints.CustomConstraint('mac_addr')
]
),
ALLOWED_ADDRESS_PAIRS: properties.Schema(
properties.Schema.LIST,
_('Additional MAC/IP address pairs allowed to pass through the '
'port.'),
schema=properties.Schema(
properties.Schema.MAP,
schema={
ALLOWED_ADDRESS_PAIR_MAC_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('MAC address to allow through this port.'),
constraints=[
constraints.CustomConstraint('mac_addr')
]
),
ALLOWED_ADDRESS_PAIR_IP_ADDRESS: properties.Schema(
properties.Schema.STRING,
_('IP address to allow through this port.'),
required=True,
constraints=[
constraints.CustomConstraint('net_cidr')
]
),
},
)
),
VNIC_TYPE: properties.Schema(
properties.Schema.STRING,
_('The vnic type to be bound on the neutron port. '
'To support SR-IOV PCI passthrough networking, you can request '
'that the neutron port to be realized as normal (virtual nic), '
'direct (pci passthrough), or macvtap '
'(virtual interface with a tap-like software interface). Note '
'that this only works for Neutron deployments that support '
'the bindings extension.'),
constraints=[
constraints.AllowedValues(['normal', 'direct', 'macvtap']),
],
support_status=support.SupportStatus(version='2015.1'),
update_allowed=True
),
PORT_SECURITY_ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('Flag to enable/disable port security on the port. '
'When disable this feature(set it to False), there will be no '
'packages filtering, like security-group and address-pairs.'),
update_allowed=True,
support_status=support.SupportStatus(version='5.0.0')
),
QOS_POLICY: properties.Schema(
properties.Schema.STRING,
_('The name or ID of QoS policy to attach to this port.'),
constraints=[
constraints.CustomConstraint('neutron.qos_policy')
],
update_allowed=True,
support_status=support.SupportStatus(version='6.0.0')
),
}
# Need to update properties_schema with other properties before
# initialisation, because resource should contain all properties before
# creating. Also, documentation should correctly resolves resource
# properties schema.
properties_schema.update(extra_properties_schema)
attributes_schema = {
ADMIN_STATE_UP_ATTR: attributes.Schema(
_("The administrative state of this port."),
type=attributes.Schema.STRING
),
DEVICE_ID_ATTR: attributes.Schema(
_("Unique identifier for the device."),
type=attributes.Schema.STRING
),
DEVICE_OWNER: attributes.Schema(
_("Name of the network owning the port."),
type=attributes.Schema.STRING
),
FIXED_IPS_ATTR: attributes.Schema(
_("Fixed IP addresses."),
type=attributes.Schema.LIST
),
MAC_ADDRESS_ATTR: attributes.Schema(
_("MAC address of the port."),
type=attributes.Schema.STRING
),
NAME_ATTR: attributes.Schema(
_("Friendly name of the port."),
type=attributes.Schema.STRING
),
NETWORK_ID_ATTR: attributes.Schema(
_("Unique identifier for the network owning the port."),
type=attributes.Schema.STRING
),
SECURITY_GROUPS_ATTR: attributes.Schema(
_("A list of security groups for the port."),
type=attributes.Schema.LIST
),
STATUS: attributes.Schema(
_("The status of the port."),
type=attributes.Schema.STRING
),
TENANT_ID: attributes.Schema(
_("Tenant owning the port."),
type=attributes.Schema.STRING
),
ALLOWED_ADDRESS_PAIRS_ATTR: attributes.Schema(
_("Additional MAC/IP address pairs allowed to pass through "
"a port."),
type=attributes.Schema.LIST
),
SUBNETS_ATTR: attributes.Schema(
_("A list of all subnet attributes for the port."),
type=attributes.Schema.LIST
),
PORT_SECURITY_ENABLED_ATTR: attributes.Schema(
_("Port security enabled of the port."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.BOOLEAN
),
QOS_POLICY_ATTR: attributes.Schema(
_("The QoS policy ID attached to this port."),
type=attributes.Schema.STRING,
support_status=support.SupportStatus(version='6.0.0'),
),
}
def translation_rules(self, props):
return [
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.NETWORK],
value_path=[self.NETWORK_ID]
),
translation.TranslationRule(
props,
translation.TranslationRule.REPLACE,
[self.FIXED_IPS, self.FIXED_IP_SUBNET],
value_name=self.FIXED_IP_SUBNET_ID
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.NETWORK],
client_plugin=self.client_plugin(),
finder='find_resourceid_by_name_or_id',
entity='network'
),
translation.TranslationRule(
props,
translation.TranslationRule.RESOLVE,
[self.FIXED_IPS, self.FIXED_IP_SUBNET],
client_plugin=self.client_plugin(),
finder='find_resourceid_by_name_or_id',
entity='subnet'
)
]
def add_dependencies(self, deps):
super(Port, self).add_dependencies(deps)
# Depend on any Subnet in this template with the same
# network_id as this network_id.
# It is not known which subnet a port might be assigned
# to so all subnets in a network should be created before
# the ports in that network.
for res in six.itervalues(self.stack):
if res.has_interface('OS::Neutron::Subnet'):
dep_network = res.properties.get(subnet.Subnet.NETWORK)
network = self.properties[self.NETWORK]
if dep_network == network:
deps += (self, res)
def handle_create(self):
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
props['network_id'] = props.pop(self.NETWORK)
self._prepare_port_properties(props)
qos_policy = props.pop(self.QOS_POLICY, None)
if qos_policy:
props['qos_policy_id'] = self.client_plugin().get_qos_policy_id(
qos_policy)
port = self.client().create_port({'port': props})['port']
self.resource_id_set(port['id'])
def _prepare_port_properties(self, props, prepare_for_update=False):
if self.FIXED_IPS in props:
fixed_ips = props[self.FIXED_IPS]
if fixed_ips:
for fixed_ip in fixed_ips:
for key, value in list(fixed_ip.items()):
if value is None:
fixed_ip.pop(key)
if self.FIXED_IP_SUBNET in fixed_ip:
fixed_ip[
'subnet_id'] = fixed_ip.pop(self.FIXED_IP_SUBNET)
else:
# Passing empty list would have created a port without
# fixed_ips during CREATE and released the existing
# fixed_ips during UPDATE (default neutron behaviour).
# However, for backward compatibility we will let neutron
# assign ip for CREATE and leave the assigned ips during
# UPDATE by not passing it. ref bug #1538473.
del props[self.FIXED_IPS]
# delete empty MAC addresses so that Neutron validation code
# wouldn't fail as it not accepts Nones
if self.ALLOWED_ADDRESS_PAIRS in props:
address_pairs = props[self.ALLOWED_ADDRESS_PAIRS]
if address_pairs:
for pair in address_pairs:
if (self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS in pair
and pair[
self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS] is None):
del pair[self.ALLOWED_ADDRESS_PAIR_MAC_ADDRESS]
else:
props[self.ALLOWED_ADDRESS_PAIRS] = []
# if without 'security_groups', don't set the 'security_groups'
# property when creating, neutron will create the port with the
# 'default' securityGroup. If has the 'security_groups' and the
# value is [], which means to create the port without securityGroup.
if self.SECURITY_GROUPS in props:
if props.get(self.SECURITY_GROUPS) is not None:
props[self.SECURITY_GROUPS] = self.client_plugin(
).get_secgroup_uuids(props.get(self.SECURITY_GROUPS))
else:
# And the update should has the same behavior.
if prepare_for_update:
props[self.SECURITY_GROUPS] = self.client_plugin(
).get_secgroup_uuids(['default'])
if self.REPLACEMENT_POLICY in props:
del(props[self.REPLACEMENT_POLICY])
def _show_resource(self):
return self.client().show_port(
self.resource_id)['port']
def check_create_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def handle_delete(self):
try:
self.client().delete_port(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
else:
return True
def _resolve_attribute(self, name):
if name == self.SUBNETS_ATTR:
subnets = []
try:
fixed_ips = self._show_resource().get('fixed_ips', [])
for fixed_ip in fixed_ips:
subnet_id = fixed_ip.get('subnet_id')
if subnet_id:
subnets.append(self.client().show_subnet(
subnet_id)['subnet'])
except Exception as ex:
LOG.warning(_LW("Failed to fetch resource attributes: %s"), ex)
return
return subnets
return super(Port, self)._resolve_attribute(name)
def needs_replace(self, after_props):
"""Mandatory replace based on props."""
return after_props.get(self.REPLACEMENT_POLICY) == 'REPLACE_ALWAYS'
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
if prop_diff:
self.prepare_update_properties(prop_diff)
if self.QOS_POLICY in prop_diff:
qos_policy = prop_diff.pop(self.QOS_POLICY)
prop_diff['qos_policy_id'] = self.client_plugin(
).get_qos_policy_id(qos_policy) if qos_policy else None
self._prepare_port_properties(prop_diff, prepare_for_update=True)
LOG.debug('updating port with %s' % prop_diff)
self.client().update_port(self.resource_id, {'port': prop_diff})
def check_update_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def prepare_for_replace(self):
# if the port has not been created yet, return directly
if self.resource_id is None:
return
# store port fixed_ips for restoring after failed update
fixed_ips = self._show_resource().get('fixed_ips', [])
self.data_set('port_fip', jsonutils.dumps(fixed_ips))
# reset fixed_ips for this port by setting fixed_ips to []
props = {'fixed_ips': []}
self.client().update_port(self.resource_id, {'port': props})
def restore_prev_rsrc(self, convergence=False):
# In case of convergence, during rollback, the previous rsrc is
# already selected and is being acted upon.
bakup_resources = self.stack._backup_stack().resources
prev_port = self if convergence else bakup_resources.get(self.name)
fixed_ips = prev_port.data().get('port_fip', [])
props = {'fixed_ips': []}
if convergence:
existing_port, rsrc_owning_stack, stack = resource.Resource.load(
prev_port.context, prev_port.replaced_by, True,
prev_port.stack.cache_data
)
existing_port_id = existing_port.resource_id
else:
existing_port_id = self.resource_id
if existing_port_id:
# reset fixed_ips to [] for new resource
self.client().update_port(existing_port_id, {'port': props})
if fixed_ips and prev_port.resource_id:
# restore ip for old port
prev_port_props = {'fixed_ips': jsonutils.loads(fixed_ips)}
self.client().update_port(prev_port.resource_id,
{'port': prev_port_props})
def resource_mapping():
return {
'OS::Neutron::Port': Port,
}
| 41.361314 | 79 | 0.578179 |
acf8d875075d0c366da8bece8b761167da13c394 | 13,972 | py | Python | tests/test_dynamodb.py | xethorn/schema | 1bbb02bc2a5ed290c87b299fa7c2ff12d9b44ff7 | [
"MIT"
] | 3 | 2015-03-20T08:44:00.000Z | 2015-08-13T21:58:42.000Z | tests/test_dynamodb.py | xethorn/sukimu | 1bbb02bc2a5ed290c87b299fa7c2ff12d9b44ff7 | [
"MIT"
] | 3 | 2015-06-07T02:36:43.000Z | 2016-09-28T14:42:29.000Z | tests/test_dynamodb.py | xethorn/schema | 1bbb02bc2a5ed290c87b299fa7c2ff12d9b44ff7 | [
"MIT"
] | 1 | 2019-08-18T06:51:27.000Z | 2019-08-18T06:51:27.000Z | import time
import pytest
import uuid
from random import random
from random import shuffle
from oto import response
from oto import status
from sukimu import consts
from sukimu import exceptions
from sukimu.dynamodb import IndexDynamo
from sukimu.dynamodb import IndexDynamo
from sukimu.dynamodb import TableDynamo
from sukimu.fields import Field
from sukimu.operations import Between
from sukimu.operations import Equal
from sukimu.operations import In
from sukimu.schema import Schema, Index
from tests.fixtures import dynamodb
@pytest.fixture
def table_name():
return str(uuid.uuid1())[:8]
@pytest.fixture
def user_schema():
schema = Schema(
TableDynamo(str(uuid.uuid1())[:8], dynamodb.connection),
IndexDynamo(
Index.PRIMARY, 'id', read_capacity=8, write_capacity=2),
IndexDynamo(
Index.GLOBAL, 'username', read_capacity=8, write_capacity=2,
name='username-index'),
id=Field(),
username=Field(),
password=Field(),
map_field=Field(basetype=dict),
random_field=Field(basetype=int))
schema.table.create_table()
return schema
@pytest.fixture
def thread_schema():
schema = Schema(
TableDynamo(str(uuid.uuid1())[:8], dynamodb.connection),
IndexDynamo(Index.PRIMARY, 'forum_name', 'thread_title',
read_capacity=8, write_capacity=2),
IndexDynamo(Index.LOCAL, 'forum_name', 'thread_author',
name='local-index'),
IndexDynamo(Index.GLOBAL, 'thread_title', 'thread_author',
name='global-index', read_capacity=8, write_capacity=2),
forum_name=Field(),
thread_title=Field(),
thread_author=Field(),
thread_content=Field())
schema.table.create_table()
return schema
@pytest.fixture
def stats_schema():
schema = Schema(
TableDynamo(str(uuid.uuid1())[:8], dynamodb.connection),
IndexDynamo(Index.PRIMARY, 'user_id', 'day_id',
read_capacity=8, write_capacity=2),
user_id=Field(basetype=int),
day_id=Field(basetype=int),
metrics=Field(basetype=int))
schema.table.create_table()
return schema
@pytest.fixture
def stats_reverse_schema():
schema = Schema(
TableDynamo(str(uuid.uuid1())[:8], dynamodb.connection),
IndexDynamo(Index.PRIMARY, 'user_id', 'day_id',
read_capacity=8, write_capacity=2),
IndexDynamo(Index.GLOBAL, 'day_id', 'user_id',
name='day-id-user-id', read_capacity=8, write_capacity=2),
user_id=Field(basetype=int),
day_id=Field(basetype=int))
schema.table.create_table()
return schema
def test_can_create_fixtures(user_schema, thread_schema):
pass
def test_create_an_entry_with_wrong_field(user_schema):
resp = user_schema.create(id='30', username='michael', random_field='test')
assert not resp
assert isinstance(resp.errors.get('message').get('random_field'), str)
resp = user_schema.fetch_one(id=Equal('30'))
assert not resp
assert resp.status is status.NOT_FOUND
def test_extension(user_schema):
new_schema = user_schema.extends(
new_field=Field())
assert isinstance(new_schema, Schema)
assert new_schema.table.name == user_schema.table.name
assert len(new_schema.indexes) == len(user_schema.indexes)
assert len(new_schema.fields) - 1 == len(user_schema.fields)
assert not new_schema.table.schema == user_schema
def test_create_an_entry_for_user(user_schema):
resp = user_schema.create(id='30', username='michael')
assert resp
def test_update_an_entry_for_user(user_schema):
resp = user_schema.create(id='30', username='michael')
assert resp
resp = user_schema.update(dict(id='30'), username='joe')
assert resp
def test_delete_an_entry_for_user(user_schema):
user_schema.create(id='30', username='michael')
user_schema.create(id='40', username='michael2')
resp = user_schema.delete(id=Equal('30'))
assert resp
resp = user_schema.fetch_one(id=Equal(30))
assert not resp
def test_update_an_entry_on_existing_key(user_schema):
user_schema.create(id='.40', username='michael')
user_schema.create(id='30', username='joe')
resp = user_schema.update(dict(id='30'), username='michael')
assert not resp
assert isinstance(
resp.errors.get('message').get('username'), exceptions.FieldException)
resp = user_schema.fetch_one(id=Equal('30'))
assert resp.message.get('username') == 'joe'
def test_create_an_entry_on_existing_user_id(user_schema):
resp = user_schema.create(id='30', username='michael')
assert resp
resp = user_schema.create(id='30', username='otherusername')
assert not resp
assert not resp.errors.get('message').get('username')
assert resp.errors.get('code') is consts.ERROR_CODE_DUPLICATE_KEY
assert isinstance(
resp.errors.get('message').get('id'), exceptions.FieldException)
def test_create_an_entry_with_map_data(user_schema):
resp = user_schema.create(
id='190',
username='emichael90',
map_field=dict(
key1='value',
key2=dict(
key1='value')))
assert resp
resp = user_schema.fetch_one(id=Equal('190'))
assert resp
assert isinstance(resp.message.get('map_field'), dict)
def test_create_an_entry_on_existing_user_username(user_schema):
resp = user_schema.create(id='30', username='michael')
assert resp
resp = user_schema.create(id='20', username='michael')
assert not resp
assert not resp.errors.get('message').get('id')
assert resp.errors.get('code') is consts.ERROR_CODE_DUPLICATE_KEY
assert isinstance(
resp.errors.get('message').get('username'), exceptions.FieldException)
def test_create_an_entry_on_existing_user_username_and_id(user_schema):
resp = user_schema.create(id='30', username='michael')
assert resp
resp = user_schema.create(id='20', username='michael')
assert not resp
assert not resp.errors.get('message').get('id')
assert resp.errors.get('code') is consts.ERROR_CODE_DUPLICATE_KEY
assert isinstance(
resp.errors.get('message').get('username'), exceptions.FieldException)
def test_thread_creation(thread_schema):
resp = thread_schema.create(
forum_name='News', thread_title='title', thread_author='user',
thread_content='content')
assert resp
resp = thread_schema.fetch_one(
forum_name=Equal('News'), thread_title=Equal('title'))
assert resp
assert resp.message.get('thread_author') == 'user'
resp = thread_schema.fetch_one(
forum_name=Equal('News'), thread_author=Equal('user'))
assert resp
assert resp.message.get('thread_title') == 'title'
resp = thread_schema.fetch_one(
thread_title=Equal('title'), thread_author=Equal('user'))
assert resp
assert resp.message.get('forum_name') == 'News'
resp = thread_schema.create(
forum_name='Updates', thread_title='Title2', thread_author='user',
thread_content='content')
assert resp
resp = thread_schema.create(
forum_name='Updates', thread_title='Title3', thread_author='user2',
thread_content='content')
assert resp
resp = thread_schema.create(
forum_name='Others', thread_title='Title', thread_author='user4',
thread_content='foobar')
assert resp
def test_thread_creation_on_duplicate_indexes(thread_schema):
# Indexes:
# - Forum Name and Thread Title
# - Forum Name - Author
# - Forum Title - Author
resp = thread_schema.create(
forum_name='News', thread_title='title', thread_author='user',
thread_content='content')
assert resp
resp = thread_schema.create(
forum_name='News', thread_title='title', thread_author='user2',
thread_content='content')
assert not resp
assert resp.errors.get('message').get('forum_name')
assert resp.errors.get('message').get('thread_title')
resp = thread_schema.create(
forum_name='News', thread_title='title2', thread_author='user',
thread_content='content')
assert not resp
assert resp.errors.get('message').get('thread_author')
assert resp.errors.get('message').get('forum_name')
resp = thread_schema.create(
forum_name='Other', thread_title='title', thread_author='user',
thread_content='content')
assert not resp
assert resp.errors.get('message').get('thread_title')
assert resp.errors.get('message').get('thread_author')
def test_create_dynamo_schema(table_name):
table = TableDynamo(table_name, dynamodb.connection)
primary_index = IndexDynamo(Index.PRIMARY, 'id')
global_index = IndexDynamo(Index.GLOBAL, 'foo', 'bar')
tb = Schema(table, primary_index, global_index)
assert tb.table == table
assert tb.indexes[0] == primary_index
assert tb.indexes[1] == global_index
def test_fetch_on_index(thread_schema):
resp = thread_schema.create(
forum_name='News', thread_title='title', thread_author='user',
thread_content='content')
assert resp
resp = thread_schema.fetch(
forum_name=Equal('News'), thread_title=Equal('title'))
assert resp
assert resp.message[0].get('thread_author') == 'user'
resp = thread_schema.fetch(
thread_title=Equal('title'), thread_author=Equal('user'))
assert resp
assert resp.message[0].get('forum_name') == 'News'
def test_fetch_many(user_schema):
user_schema.create(id='30', username='michael1')
user_schema.create(id='40', username='michael2')
resp = user_schema.fetch(username=In('michael1', 'michael2'))
assert resp
assert len(resp.message) == 2
def test_between_request(stats_schema):
stats_schema.create(user_id=301, day_id=35, metrics=937)
for day in range(50):
metrics = int(random() * 400)
resp = stats_schema.create(user_id=300, day_id=day, metrics=metrics)
resp = stats_schema.fetch(user_id=Equal(300), day_id=Between(30, 40))
assert len(resp.message) == 11 # 40 is included
def test_sorting(stats_schema):
days = list(range(50))
shuffle(days)
for day in days:
metrics = int(random() * 400)
resp = stats_schema.create(user_id=300, day_id=day, metrics=metrics)
resp = stats_schema.fetch(user_id=Equal(300), sort=consts.SORT_DESCENDING)
start = 49
for i in range(50):
assert resp.message[i].get('day_id') == start
start = start - 1
resp = stats_schema.fetch(user_id=Equal(300), sort=consts.SORT_ASCENDING)
for i in range(50):
assert resp.message[i].get('day_id') == i
def test_reverse_schema(stats_reverse_schema):
days = list(range(50))
shuffle(days)
total_reverse = 0
for day in days:
metrics = int(random() * 400)
stats_reverse_schema.create(
user_id=300, day_id=day, metrics=metrics)
if not day % 2:
total_reverse += 1
stats_reverse_schema.create(
user_id=200, day_id=day, metrics=metrics)
resp = stats_reverse_schema.fetch(user_id=Equal(300))
assert len(resp.message) == 50
resp = stats_reverse_schema.fetch(user_id=Equal(200))
assert len(resp.message) == total_reverse
resp = stats_reverse_schema.fetch(day_id=Equal(18))
assert len(resp.message) == 2
def test_dynamo_table_creation(table_name):
schema = Schema(
TableDynamo(table_name, dynamodb.connection),
IndexDynamo(
Index.PRIMARY, 'id', read_capacity=8, write_capacity=4),
id=Field())
table = schema.table
table.create_table()
table.table.meta.client.get_waiter('table_exists').wait(
TableName=table_name)
assert dynamodb.connection.Table(table_name).item_count is 0
def test_dynamo_table_creation_collision(table_name):
tb = Schema(
TableDynamo(table_name, dynamodb.connection),
IndexDynamo(
Index.PRIMARY, 'id', read_capacity=8, write_capacity=2),
id=Field())
tb.table.create_table()
with pytest.raises(Exception):
tb.table.create_table()
def test_create_empty_table(table_name):
"""Test the creation of an empty table.
"""
tb = Schema(TableDynamo(table_name, dynamodb.connection))
with pytest.raises(Exception):
tb.table.create_table()
def test_create_table_without_index(table_name):
tb = Schema(TableDynamo(table_name, dynamodb.connection), id=Field())
with pytest.raises(Exception):
tb.table.create_table()
def test_create_table_without_fields(table_name):
tb = Schema(
TableDynamo(table_name, dynamodb.connection),
IndexDynamo(Index.PRIMARY, 'id', read_capacity=1, write_capacity=1))
with pytest.raises(Exception):
tb.table.create_table()
def test_extension_usage(user_schema):
@user_schema.extension('stats')
def stats(item, fields):
return {'days': 10, 'fields': fields}
@user_schema.extension('history')
def history(item, fields):
return {'length': 20}
response = user_schema.create(id='testextension', username='michael')
assert response
response = user_schema.fetch_one(
username=Equal('michael'), fields=['stats.foobar', 'stats.tests.bar'])
assert response.message.get('stats').get('days') == 10
assert 'foobar' in response.message.get('stats').get('fields')
assert 'tests.bar' in response.message.get('stats').get('fields')
response = user_schema.fetch_one(
username=Equal('michael'),
fields=['history', 'stats.foobar', 'stats.tests.bar'])
assert response.message.get('stats').get('days') == 10
assert 'foobar' in response.message.get('stats').get('fields')
assert 'tests.bar' in response.message.get('stats').get('fields')
assert response.message.get('history').get('length') == 20
| 31.899543 | 79 | 0.681864 |
acf8d8a1d254ddd0ab3c6b30e871fe6f217445ac | 9,063 | py | Python | inverse_rl/algos/irl_batch_polopt.py | keuntaeklee/inverse_rl | 721c1b3c85668a54681e5e7963ebd09be646822d | [
"MIT"
] | null | null | null | inverse_rl/algos/irl_batch_polopt.py | keuntaeklee/inverse_rl | 721c1b3c85668a54681e5e7963ebd09be646822d | [
"MIT"
] | null | null | null | inverse_rl/algos/irl_batch_polopt.py | keuntaeklee/inverse_rl | 721c1b3c85668a54681e5e7963ebd09be646822d | [
"MIT"
] | null | null | null | import time
from rllab.algos.base import RLAlgorithm
import rllab.misc.logger as logger
import rllab.plotter as plotter
from sandbox.rocky.tf.policies.base import Policy
import tensorflow as tf
from sandbox.rocky.tf.samplers.batch_sampler import BatchSampler
from sandbox.rocky.tf.samplers.vectorized_sampler import VectorizedSampler
import numpy as np
from collections import deque
from inverse_rl.utils.hyperparametrized import Hyperparametrized
class IRLBatchPolopt(RLAlgorithm, metaclass=Hyperparametrized):
"""
Base class for batch sampling-based policy optimization methods.
This includes various policy gradient methods like vpg, npg, ppo, trpo, etc.
"""
def __init__(
self,
env,
policy,
baseline,
scope=None,
n_itr=500,
start_itr=0,
batch_size=5000,
max_path_length=500,
discount=0.99,
gae_lambda=1,
plot=False,
pause_for_plot=False,
center_adv=True,
positive_adv=False,
store_paths=True,
whole_paths=True,
fixed_horizon=False,
sampler_cls=None,
sampler_args=None,
force_batch_sampler=False,
init_pol_params = None,
irl_model=None,
irl_model_wt=1.0,
discrim_train_itrs=10,
zero_environment_reward=False,
init_irl_params=None,
train_irl=True,
key='',
**kwargs
):
"""
:param env: Environment
:param policy: Policy
:type policy: Policy
:param baseline: Baseline
:param scope: Scope for identifying the algorithm. Must be specified if running multiple algorithms
simultaneously, each using different environments and policies
:param n_itr: Number of iterations.
:param start_itr: Starting iteration.
:param batch_size: Number of samples per iteration.
:param max_path_length: Maximum length of a single rollout.
:param discount: Discount.
:param gae_lambda: Lambda used for generalized advantage estimation.
:param plot: Plot evaluation run after each iteration.
:param pause_for_plot: Whether to pause before contiuing when plotting.
:param center_adv: Whether to rescale the advantages so that they have mean 0 and standard deviation 1.
:param positive_adv: Whether to shift the advantages so that they are always positive. When used in
conjunction with center_adv the advantages will be standardized before shifting.
:param store_paths: Whether to save all paths data to the snapshot.
:return:
"""
self.env = env
self.policy = policy
self.baseline = baseline
self.scope = scope
self.n_itr = n_itr
self.start_itr = start_itr
self.batch_size = batch_size
self.max_path_length = max_path_length
self.discount = discount
self.gae_lambda = gae_lambda
self.plot = plot
self.pause_for_plot = pause_for_plot
self.center_adv = center_adv
self.positive_adv = positive_adv
self.store_paths = store_paths
self.whole_paths = whole_paths
self.fixed_horizon = fixed_horizon
self.init_pol_params = init_pol_params
self.init_irl_params = init_irl_params
self.irl_model = irl_model
self.irl_model_wt = irl_model_wt
self.no_reward = zero_environment_reward
self.discrim_train_itrs = discrim_train_itrs
self.train_irl = train_irl
self.__irl_params = None
if self.irl_model_wt > 0:
assert self.irl_model is not None, "Need to specify a IRL model"
if sampler_cls is None:
if self.policy.vectorized and not force_batch_sampler:
print('using vec sampler')
sampler_cls = VectorizedSampler
else:
print('using batch sampler')
sampler_cls = BatchSampler
if sampler_args is None:
sampler_args = dict()
self.sampler = sampler_cls(self, **sampler_args)
self.init_opt()
def start_worker(self):
self.sampler.start_worker()
if self.plot:
plotter.init_plot(self.env, self.policy)
def shutdown_worker(self):
self.sampler.shutdown_worker()
def obtain_samples(self, itr):
return self.sampler.obtain_samples(itr)
def process_samples(self, itr, paths):
#processed = self.sampler.process_samples(itr, paths)
return self.sampler.process_samples(itr, paths)
def log_avg_returns(self, paths):
undiscounted_returns = [sum(path["rewards"]) for path in paths]
avg_return = np.mean(undiscounted_returns)
return avg_return
def get_irl_params(self):
return self.__irl_params
def compute_irl(self, paths, itr=0):
if self.no_reward:
tot_rew = 0
for path in paths:
tot_rew += np.sum(path['rewards'])
path['rewards'] *= 0
logger.record_tabular('OriginalTaskAverageReturn', tot_rew/float(len(paths)))
if self.irl_model_wt <=0:
return paths
if self.train_irl:
max_itrs = self.discrim_train_itrs
lr=1e-3
mean_loss = self.irl_model.fit(paths, policy=self.policy, itr=itr, max_itrs=max_itrs, lr=lr,
logger=logger)
logger.record_tabular('IRLLoss', mean_loss)
self.__irl_params = self.irl_model.get_params()
probs = self.irl_model.eval(paths, gamma=self.discount, itr=itr)
logger.record_tabular('IRLRewardMean', np.mean(probs))
logger.record_tabular('IRLRewardMax', np.max(probs))
logger.record_tabular('IRLRewardMin', np.min(probs))
if self.irl_model.score_trajectories:
# TODO: should I add to reward here or after advantage computation?
for i, path in enumerate(paths):
path['rewards'][-1] += self.irl_model_wt * probs[i]
else:
for i, path in enumerate(paths):
path['rewards'] += self.irl_model_wt * probs[i]
return paths
def train(self):
sess = tf.compat.v1.get_default_session()
sess.run(tf.compat.v1.global_variables_initializer())
if self.init_pol_params is not None:
self.policy.set_param_values(self.init_pol_params)
if self.init_irl_params is not None:
self.irl_model.set_params(self.init_irl_params)
self.start_worker()
start_time = time.time()
returns = []
for itr in range(self.start_itr, self.n_itr):
itr_start_time = time.time()
with logger.prefix('itr #%d | ' % itr):
logger.log("Obtaining samples...")
paths = self.obtain_samples(itr)
logger.log("Processing samples...")
paths = self.compute_irl(paths, itr=itr)
returns.append(self.log_avg_returns(paths))
samples_data = self.process_samples(itr, paths)
logger.log("Logging diagnostics...")
self.log_diagnostics(paths)
logger.log("Optimizing policy...")
self.optimize_policy(itr, samples_data)
logger.log("Saving snapshot...")
params = self.get_itr_snapshot(itr, samples_data) # , **kwargs)
if self.store_paths:
params["paths"] = samples_data["paths"]
logger.save_itr_params(itr, params)
logger.log("Saved")
logger.record_tabular('Time', time.time() - start_time)
logger.record_tabular('ItrTime', time.time() - itr_start_time)
logger.dump_tabular(with_prefix=False)
if self.plot:
self.update_plot()
if self.pause_for_plot:
input("Plotting evaluation run: Press Enter to "
"continue...")
self.shutdown_worker()
return
def log_diagnostics(self, paths):
self.env.log_diagnostics(paths)
self.policy.log_diagnostics(paths)
self.baseline.log_diagnostics(paths)
def init_opt(self):
"""
Initialize the optimization procedure. If using tensorflow, this may
include declaring all the variables and compiling functions
"""
raise NotImplementedError
def get_itr_snapshot(self, itr, samples_data):
"""
Returns all the data that should be saved in the snapshot for this
iteration.
"""
raise NotImplementedError
def optimize_policy(self, itr, samples_data):
raise NotImplementedError
def update_plot(self):
if self.plot:
plotter.update_plot(self.policy, self.max_path_length)
| 37.296296 | 111 | 0.616352 |
acf8d8bb9df4e51d0f05b8dcca5b3ced46f422a8 | 93,694 | py | Python | commonl/__init__.py | intel/tcf | f194cc913d31ec7fbd86082d1aa7643211936d05 | [
"Apache-2.0"
] | 24 | 2018-08-21T18:04:48.000Z | 2022-02-07T22:50:06.000Z | commonl/__init__.py | intel/tcf | f194cc913d31ec7fbd86082d1aa7643211936d05 | [
"Apache-2.0"
] | 16 | 2018-08-21T18:03:52.000Z | 2022-03-01T17:15:42.000Z | commonl/__init__.py | intel/tcf | f194cc913d31ec7fbd86082d1aa7643211936d05 | [
"Apache-2.0"
] | 29 | 2018-08-22T19:40:59.000Z | 2021-12-21T11:13:23.000Z | #! /usr/bin/python3
#
# Copyright (c) 2017 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
"""
Common timo infrastructure and code
Command line and logging helpers
.. moduleauthor:: FIXME <fixme@domain.com>
.. admonition:: FIXMEs
- This is still leaking temporary files (subpython's stdout and
stderr) when running top level tests.
"""
import argparse
import base64
import bisect
import collections
import contextlib
import errno
import fnmatch
import glob
import hashlib
import imp
import importlib
import io
import inspect
import logging
import numbers
import os
import random
import re
import requests
import signal
import socket
import string
import struct
import subprocess
import sys
import tempfile
import threading
import time
import traceback
import types
if False:
# disabling all this until we have a proper fix for the import
# mess they keyring package has
try:
import keyring
keyring_available = True
except ImportError as e:
logging.warning("can't import keyring, functionality disabled")
keyring_available = False
else:
keyring_available = False
from . import expr_parser
logging.addLevelName(50, "C")
logging.addLevelName(40, "E")
logging.addLevelName(30, "W")
logging.addLevelName(20, "I")
logging.addLevelName(10, "D")
logging.addLevelName(9, "D2")
logging.addLevelName(8, "D3")
logging.addLevelName(7, "D4")
logging.addLevelName(6, "D5")
# Ensure compatibility with python versions before 3.7 since older
# versions use re._pattern_type instead of re.Pattern
if sys.version_info.major == 3 and sys.version_info.minor < 7:
re.Pattern = re._pattern_type
def config_import_file(filename, namespace = "__main__",
raise_on_fail = True):
"""Import a Python [configuration] file.
Any symbol available to the current namespace is available to the
configuration file.
:param filename: path and file name to load.
:param namespace: namespace where to insert the configuration file
:param bool raise_on_fail: (optional) raise an exception if the
importing of the config file fails.
>>> timo.config_import_file("some/path/file.py", "__main__")
"""
logging.log(9, "%s: configuration file being loaded", filename)
try:
imp.load_source(namespace, filename)
sys.stdout.flush()
sys.stderr.flush()
logging.debug("%s: configuration file imported", filename)
except Exception as e: # pylint: disable = W0703
# throw a wide net to catch any errors in filename
logging.exception("%s: can't load config file: %s", filename, e)
if raise_on_fail:
raise
def path_expand(path_list):
# Compose the path list
_list = []
for _paths in path_list:
paths = _paths.split(os.pathsep)
for path in paths:
if path == "":
_list = []
else:
_list.append(os.path.expanduser(path))
return _list
def config_import(path_list, file_regex, namespace = "__main__",
raise_on_fail = True, imported_files = None):
"""Import Python [configuration] files that match file_regex in any of
the list of given paths into the given namespace.
Any symbol available to the current namespace is available to the
configuration file.
:param paths: list of paths where to import from; each item can be
a list of colon separated paths and thus the list would be further
expanded. If an element is the empty list, it removes the
current list.
:param file_regex: a compiled regular expression to match the file
name against.
:param namespace: namespace where to insert the configuration file
:param bool raise_on_fail: (optional) raise an exception if the
importing of the config file fails.
>>> timo.config_import([ ".config:/etc/config" ],
>>> re.compile("conf[_-].*.py"), "__main__")
"""
# Compose the path list
_list = path_expand(path_list)
paths_done = set()
# Bring in config files
# FIXME: expand ~ -> $HOME
for path in _list:
abs_path = os.path.abspath(os.path.normpath(path))
if abs_path in paths_done:
# Skip what we have done already
continue
logging.log(8, "%s: loading configuration files %s",
path, file_regex.pattern)
try:
if not os.path.isdir(path):
logging.log(7, "%s: ignoring non-directory", path)
continue
for filename in sorted(os.listdir(path)):
if not file_regex.match(filename):
logging.log(6, "%s/%s: ignored", path, filename)
continue
config_file_path = os.path.join(path, filename)
config_import_file(config_file_path, namespace)
if imported_files != None:
imported_files.append(config_file_path)
except Exception: # pylint: disable = W0703
# throw a wide net to catch any errors in filename
logging.error("%s: can't load config files", path)
if raise_on_fail:
raise
else:
logging.log(9, "%s: loaded configuration files %s",
path, file_regex.pattern)
paths_done.add(abs_path)
def logging_verbosity_inc(level):
if level == 0:
return
if level > logging.DEBUG:
delta = 10
else:
delta = 1
return level - delta
def logfile_open(tag, cls = None, delete = True, bufsize = 0,
suffix = ".log", who = None, directory = None):
assert isinstance(tag, str)
if who == None:
frame = inspect.stack(0)[1][0]
who = frame.f_code.co_name + "__%d" % frame.f_lineno
if tag != "":
tag += "-"
if cls != None:
clstag = cls.__name__ + "."
else:
clstag = ''
# can't use tempfile.NamedTemporaryFile bc then Windows doesn't
# let us opent it again
return open(
os.path.join(
directory,
os.path.basename(sys.argv[0]) + "__" + clstag + who + "-" + tag
+ f"{random.randrange(0, 100000):05d}"
+ suffix
),
"w+b",
bufsize
)
def argparser_add_aka(ap, name, aka):
# UGLY, but...
ap._name_parser_map[aka] = ap._name_parser_map[name]
class _Action_increase_level(argparse.Action):
def __init__(self, option_strings, dest, default = None, required = False,
nargs = None, **kwargs):
super(_Action_increase_level, self).__init__(
option_strings, dest, nargs = 0, required = required,
**kwargs)
#
# Python levels are 50, 40, 30, 20, 10 ... (debug) 9 8 7 6 5 ... :)
def __call__(self, parser, namespace, values, option_string = None):
if namespace.level == None:
namespace.level = logging.ERROR
namespace.level = logging_verbosity_inc(namespace.level)
def log_format_compose(log_format, log_pid, log_time = False):
if log_pid == True:
log_format = log_format.replace(
"%(levelname)s",
"%(levelname)s[%(process)d]", 1)
if log_time == True:
log_format = log_format.replace(
"%(levelname)s",
"%(levelname)s/%(asctime)s", 1)
return log_format
def cmdline_log_options(parser):
"""Initializes a parser with the standard command line options to
control verbosity when using the logging module
:param python:argparse.ArgParser parser: command line argument parser
-v|--verbose to increase verbosity (defaults to print/log errors only)
Note that after processing the command line options, you need to
initialize logging with:
>>> import logging, argparse, timo.core
>>> arg_parser = argparse.ArgumentParser()
>>> timo.core.cmdline_log_options(arg_parser)
>>> args = arg_parser.parse_args()
>>> logging.basicConfig(format = args.log_format, level = args.level)
"""
if not isinstance(parser, argparse.ArgumentParser):
raise TypeError("parser argument has to be an argparse.ArgumentParser")
parser.add_argument("-v", "--verbose",
dest = "level",
action = _Action_increase_level, nargs = 0,
help = "Increase verbosity")
parser.add_argument("--log-pid-tid", action = "store_true",
default = False,
help = "Print PID and TID in the logs")
parser.add_argument("--log-time", action = "store_true",
default = False,
help = "Print Date and time in the logs")
def kws_expand(s: str, kws: dict, nest_limit: int = 5):
"""
Expand a template string with a dictionary
This is a version of *s % kws* that works recursively and supports
templates in the keys too.
Eg:
>>> kws_expand('a simple %(field)s substitution', dict(field = "field"))
'a simple field substitution'
>>> kws_expand('a nested %(nested_field)s substitution',
... dict(nested_field = "field", field = 'nested field'))
'a nested field substitution'
>>> kws_expand('a key %(nested_%(key)s_field)s substitution',
... dict(nested_key_field = "%(field)s", field = 'nested field', key = "key"))
'a key nested field substitution'
:param str s: templated string to expand; if it contains no
*%(FIELD)* templates, it won't be templated.
To include a *%(* chacter sequence that is not expanded, you
need to double the percentage sign as in *%%(*, understanding
that for every level of nested templating done you will need to
double them.
:param dict kws: Dictionary keyed by strings of values to
template.
:param int nest_limit: (optional; default 5) how many iterations
are done when trying to expand all templates before giving up.
:raises KeyError: if a template field is not available
To have missing fields expanded with a default value, pass a
argument to *kws* a :class:`commonl.dict_missing_c`, a
dictionary that returns pre-defined strings for missing keys.
:raises RecursionError: if the nest limit is exceeded
:return str: string with the template fields expanded
"""
assert isinstance(s, str), \
f"s: expected str; got {type(s)}"
if kws != None:
assert_dict_key_strings(kws, 'kws')
assert isinstance(nest_limit, int) and nest_limit >= 1, \
f"nest_limit: expected int <= 1; got {type(nest_limit)} {nest_limit}"
if not kws: # nothing to template
return s
# template until there are no %( or we are 86ed
_s = s
for _count in range(nest_limit+1):
try:
if '%(' not in _s:
break
_s = _s % kws
except KeyError as e:
# missing key?
key = e.args[0]
if '%(' in key:
# this is a templated key, eg someone did:
#
# >>> "this string %(field1.%(field2)s.whatever)s ..."
#
# so first make "this string %(field1.VALUE2.whatever)s"
# and then "this string VALUE3"
_s = _s.replace(key, key % kws)
continue
raise KeyError(
f"configuration error? missing field '{key}' in "
f"template string '{s}'") from e
else:
raise RecursionError(
f"configuration error? nest limit is {nest_limit} and"
f" templates not all resolved for template '{s}'")
return _s
def mkid(something, l = 10):
"""
Generate a 10 character base32 ID out of an iterable object
:param something: anything from which an id has to be generate
(anything iterable)
"""
if isinstance(something, str):
h = hashlib.sha512(something.encode('utf-8'))
else:
h = hashlib.sha512(something)
return base64.b32encode(h.digest())[:l].lower().decode('utf-8', 'ignore')
def trim_trailing(s, trailer):
"""
Trim *trailer* from the end of *s* (if present) and return it.
:param str s: string to trim from
:param str trailer: string to trim
"""
tl = len(trailer)
if s[-tl:] == trailer:
return s[:-tl]
else:
return s
def verify_str_safe(s, safe_chars = None, do_raise = True, name = "string"):
"""
Raise an exception if string contains unsafe chars
:param str s: string to check
:param str safe_chars: (optional) list/set of valid chars
(defaults to ASCII letters, digits, - and _)
"""
assert isinstance(s, str), \
f"{name}: got a {type(s)}; expected a string"
if safe_chars == None:
safe_chars = set('-_' + string.ascii_letters + string.digits)
s_set = set(s)
s_unsafe = s_set - s_set.intersection(safe_chars)
if not do_raise:
return not s_unsafe
assert not s_unsafe, \
f"{name}: contains invalid characters: {''.join(s_unsafe)}" \
f" (valid are: {''.join(safe_chars)})"
return None # keep pylint happy
def name_make_safe(name, safe_chars = None):
"""
Given a filename, return the same filename will all characters not
in the set [-_.0-9a-zA-Z] replaced with _.
:param str name: name to make *safe*
:param set safe_chars: (potional) set of characters that are
considered safe. Defaults to ASCII letters and digits plus - and
_.
"""
if safe_chars == None:
safe_chars = set('-_' + string.ascii_letters + string.digits)
# We don't use string.translate()'s deletions because it doesn't
# take them for Unicode strings.
r = ""
for c in name:
if c not in safe_chars:
c = '_'
r += c
return r
def file_name_make_safe(file_name, extra_chars = ":/"):
"""
Given a filename, return the same filename will all characters not
in the set [-_.0-9a-zA-Z] removed.
This is useful to kinda make a URL into a file name, but it's not
bidirectional (as it is destructive) and not very fool proof.
"""
# We don't use string.translate()'s deletions because it doesn't
# take them for Unicode strings.
r = ""
for c in file_name:
if c in set(extra_chars + string.whitespace):
continue
r += c
return r
def file_touch(file_name):
"""
Set a file's mtime to current time
:param str file_name: name of the file whose timestamp is to be modified
"""
ts = time.time()
os.utime(file_name, ( ts, ts ))
def hash_file(hash_object, filepath, blk_size = 8192):
"""
Run a the contents of a file though a hash generator.
:param hash_object: hash object (from :py:mod:`hashlib`)
:param str filepath: path to the file to feed
:param int blk_size: read the file in chunks of that size (in bytes)
"""
assert hasattr(hash_object, "update")
with open(filepath, 'rb') as f:
for chunk in iter(lambda: f.read(blk_size), b''):
hash_object.update(chunk)
return hash_object
_hash_sha512 = hashlib.sha512()
def _hash_file_cached(filepath, digest, cache_path, cache_entries):
# stat info happens to be iterable, ain't that nice
filepath_stat_hash = mkid(
digest + filepath + "".join([ str(i) for i in os.stat(filepath) ]),
l = 48)
# if there is no cache location, use our preset in the user's home dir
if cache_path == None:
cache_path = os.path.join(
os.path.expanduser("~"), ".cache", "file-hashes")
makedirs_p(cache_path)
symlink_lru_cleanup(cache_path, cache_entries)
cached_filename = os.path.join(cache_path, filepath_stat_hash)
try:
value = os.readlink(cached_filename)
# we have read the value, so now we remove the entry and
# if it is "valid", we recreate it, so the mtime is
# updated and thus an LRU cleanup won't wipe it.
# FIXME: python3 just update utime
rm_f(cached_filename)
if value and isinstance(value, str) \
and len(value) == 2 * _hash_sha512.digest_size:
os.symlink(value, cached_filename)
return value
# fallthrough to re-calculate it
except OSError as e:
if e.errno != errno.ENOENT:
raise
hoc = hash_file(hashlib.new(digest), filepath, blk_size = 8192)
value = hoc.hexdigest()
os.symlink(hoc.hexdigest(), cached_filename)
return value
def hash_file_cached(filepath, digest,
cache_path = None, cache_entries = 1024):
"""
Hash file contents and keep them in a cache in
*~/.cache/file-hashes*.
Next time the same file is being cached, use the cache entries (as
long as the filepath is the same and the os.stat() signature
doesn't change).
:param str filepath: path to the file to hash
:param str digest: digest to use; anything :mod:`python.hashlib` supports
:param str cache_path: (optional; default
*~/.cache/file-hashes*) path where
to store the cached hashes.
:param int cache_entries: (optional; default *1024*) how many
entries to keep in the cache; old entries are removed to give way
to frequently used entries or new ones (LRU).
:returns str: hex digest
"""
# If we have a cache
tries_max = 10
tries = 0
while tries < tries_max:
try:
return _hash_file_cached(filepath, digest,
cache_path, cache_entries)
except FileExistsError as e:
# ops? tried to create a cache entry and found it already
# is there? ok, retry in case *our* version is better
# because the file has been updated...but if we had tried
# 10 times, bail
if tries >= tries_max:
raise
tries += 1
def symlink_lru_cleanup(dirname, max_entries):
"""
Delete the oldest in a list of symlinks that are used as a cache
until only *max_entries* are left
:param str dirname: path where the files are located
:param int max_entries: maximum number of entries which should be
left
"""
assert isinstance(dirname, str)
assert isinstance(max_entries, int) and max_entries > 0
mtimes_sorted_list = []
mtimes = {}
for path, _dirs, filenames in os.walk(dirname):
for filename in filenames:
filepath = os.path.join(path, filename)
mtime = os.lstat(filepath).st_mtime
mtimes[mtime] = filepath
bisect.insort(mtimes_sorted_list, mtime)
break # only one directory level
clean_number = len(mtimes_sorted_list) - max_entries
if clean_number < 0:
return
for mtime in mtimes_sorted_list[:clean_number]:
rm_f(mtimes[mtime])
def hash_file_maybe_compressed(hash_object, filepath, cache_entries = 128,
cache_path = None, tmpdir = None):
"""Run the file's contents through a hash generator, maybe
uncompressing it first.
Uncompression only works if the file is compressed using a filter
like program (eg: gz, bzip2, xz) -- see :func:`maybe_decompress`.
If caching is enabled, the results of uncompressing and hashing
the file will be kept in a cache so that next time it can be used
instead of decompressing the file again.
:param hash_object: :mod:`hashlib` returned object to do hashing
on data.
>>> hashlib.sha512()
:param str filepath: path to file to hash; if not compressed, it
will be passed straight to :func:`hash_file`.
:param int cache_entries: (optional; default *128*) if zero,
caching is disabled. Otherwise, caching is enabled and we'll
keep those many entries.
The results are cached based on the hash of the compressed
data--if the hexdigest for the compressed data is in the cache,
it's value will be the hexdigest of the uncompressed data. If
not, decompress, calculate and store int he cache for future
use.
:param str cache_path: (optional; default
*~/.cache/compressed-hashes*) if caching is enabled, path where
to store the cached hashes.
:param str tmpdir: (optional; default
*/tmp/compressed-hashes.XXXXXX*) temporary directory where to
uncompress to generate the hash.
:returns: hexdigest of the compressed file data in string form
**Cache database**
Cache entries use symlinks as an atomic key/value storage system
in a directory. These are lightweight and setting is POSIX
atomic.
There is an slight race condition until we move to Python3 in that
we can't update the mtime when used. However, it is harmless
because it means a user will decompress and update, but not create
bad results.
"""
assert isinstance(cache_entries, int) and cache_entries >= 0
_basename, ext = file_is_compressed(filepath)
if ext == None: # not compressed, so pass through
return hash_file(hash_object, filepath).hexdigest()
# File is compressed
#
# Let's get the hash of the compressed data, using the same hash
# object algorithm, to see if we have it cached.
hoc = hash_file(hashlib.new(hash_object.name), filepath)
hexdigest_compressed = hoc.hexdigest()
if cache_entries:
# if there is no cache location, use our preset in the user's home dir
if cache_path == None:
cache_path = os.path.join(
os.path.expanduser("~"), ".cache", "compressed-hashes")
makedirs_p(cache_path)
symlink_lru_cleanup(cache_path, cache_entries)
cached_filename = os.path.join(cache_path, hoc.hexdigest())
try:
value = os.readlink(cached_filename)
# we have read the value, so now we remove the entry and
# if it is "valid", we recreate it, so the mtime is
# updated and thus an LRU cleanup won't wipe it.
# FIXME: python3 just update utime
rm_f(cached_filename)
# basic verification, it has to look like the hexdigest()
if len(value) != len(hoc.hexdigest()):
value = None
# recreate it, so that the mtime shows we just used it
try:
os.symlink(value, cached_filename)
except FileExistsError:
# this means someone has created the entry between us
# deleting it and now, which is fine -- it shall be
# the same
pass
return value
except OSError as e:
if e.errno != errno.ENOENT:
raise
value = None
# decompress and generate hash
#
# We need a tmpdir where to decompress
if not tmpdir:
tmpdir = tempfile.mkdtemp(prefix = "compressed-hashes.")
tmpdir_delete = True
else:
tmpdir_delete = False
# Now, because maybe_decompress() works by decompressing to a file
# named without the extension (it is how it is), we link the file
# with the extension to the tmpdir and tell maybe_decompress() to
# do its thing -- then we has the raw data
filename_tmp_compressed = os.path.join(tmpdir, os.path.basename(filepath))
os.symlink(os.path.abspath(filepath), filename_tmp_compressed)
try:
filename_tmp = maybe_decompress(filename_tmp_compressed)
ho = hash_file(hash_object, filename_tmp)
finally:
rm_f(filename_tmp)
rm_f(filename_tmp_compressed)
if tmpdir_delete:
os.rmdir(tmpdir)
hexdigest = ho.hexdigest()
if cache_entries:
symlink_f(hexdigest, os.path.join(cache_path, hexdigest_compressed))
return hexdigest
def request_response_maybe_raise(response):
if not response:
try:
json = response.json()
if json != None:
if '_message' in json:
message = json['_message']
elif 'message' in json: # COMPAT: older daemons
message = json['message']
else:
message = "no specific error text available"
else:
message = "no specific error text available"
except ValueError as e:
message = "no specific error text available"
logging.debug("HTTP Error: %s", response.text)
e = requests.exceptions.HTTPError(
"%d: %s" % (response.status_code, message))
e.status_code = response.status_code
e.message = response.reason
raise e
def _os_path_split_full(path):
"""
Split an absolute path in all the directory components
"""
t = os.path.split(path)
if t[0] == "/":
l = [ t[1] ]
else:
l = _os_path_split_full(t[0])
l.append(t[1])
return l
def os_path_split_full(path):
"""
Split an absolute path in all the directory components
"""
parts = _os_path_split_full(os.path.abspath(path))
return parts
def progress(msg):
"""
Print some sort of progress information banner to standard error
output that will be overriden with real information.
This only works when stdout or stderr are not redirected to files
and is intended to give humans a feel of what's going on.
"""
if not sys.stderr.isatty() or not sys.stdout.isatty():
return
ts = os.get_terminal_size()
if len(msg) < ts.columns:
w_len = ts.columns - len(msg)
msg += w_len * " "
sys.stderr.write(msg + "\r")
sys.stderr.flush()
def digits_in_base(number, base):
"""
Convert a number to a list of the digits it would have if written
in base @base.
For example:
- (16, 2) -> [1, 6] as 1*10 + 6 = 16
- (44, 4) -> [2, 3, 0] as 2*4*4 + 3*4 + 0 = 44
"""
if number == 0:
return [ 0 ]
digits = []
while number != 0:
digit = int(number % base)
number = int(number / base)
digits.append(digit)
digits.reverse()
return digits
def rm_f(filename):
"""
Remove a file (not a directory) unconditionally, ignore errors if
it does not exist.
"""
try:
os.unlink(filename)
except OSError as e:
if e.errno != errno.ENOENT:
raise
def makedirs_p(dirname, mode = None, reason = None):
"""
Create a directory tree, ignoring an error if it already exists
:param str pathname: directory tree to crate
:param int mode: mode set the directory to
"""
try:
os.makedirs(dirname)
# yes, this is a race condition--but setting the umask so
# os.makedirs() gets the right mode would interfere with other
# threads and processes.
if mode:
os.chmod(dirname, mode)
except OSError as e:
if not os.path.isdir(dirname):
raise RuntimeError("%s: path for %s is not a directory: %s"
% (dirname, reason, e))
if not os.access(dirname, os.W_OK):
raise RuntimeError("%s: path for %s does not allow writes: %s"
% (dirname, reason, e))
def symlink_f(source, dest):
"""
Create a symlink, ignoring an error if it already exists
"""
try:
os.symlink(source, dest)
except OSError as e:
if e.errno != errno.EEXIST or not os.path.islink(dest):
raise
def _pid_grok(pid):
if pid == None:
return None, None
if isinstance(pid, str):
# Is it a PID encoded as string?
try:
return int(pid), None
except ValueError:
pass
# Mite be a pidfile
try:
with open(pid) as f:
pids = f.read()
except IOError:
return None, pid
try:
return int(pids), pid
except ValueError:
return None, pid
elif isinstance(pid, int):
# fugly
return pid, None
else:
assert True, "don't know how to convert %s to a PID" % pid
def verify_timeout(what:str, timeout: float,
verify_f: callable, *verify_args,
poll_period: float = 0.25, log = logging,
**verify_kwargs):
"""
Verify a condition is met before a certain timeout
:param str what: short description of what is being verified
:param float timeout: how long to wait for; has to be at least
twice the poll period.
:param callable verify_f: function to call to verify; must return
something that evaluates to boolean *True* when ok, otherwise it
is considered not ok.
:param float poll_period: (optional, default 0.25s) period on
which to call the verification function
:param log: logger to use to report messages with INFO level.
Any other arguments (*\*args* and *\*\*kwargs*) are passed to the
verification function.
"""
assert isinstance(what, str), \
f"what: expected a description string; got {type(what)}"
assert isinstance(timeout, numbers.Real) and timeout > 0, \
f"timeout: expected a positive number; got {type(timeout)}"
assert callable(verify_f), \
f"verify_f: expected a callable; got {type(verify_f)}"
assert isinstance(poll_period, numbers.Real) and poll_period > 0, \
f"poll_period: expected a positive number; got {type(poll_period)}"
assert poll_period < timeout/2, \
f"poll_period: expected a lower than half the timeout"
assert hasattr(log, "info"), \
f"log: expected logging object; got {type(log)}"
t0 = t = time.time()
while True:
if t - t0 > timeout:
log.info(
f"{what}: verifying with {verify_f} timed out at"
f" +{t-t0:.1f}/{timeout}s")
raise TimeoutError(f"{what}: timedout at +{t-t0:.1f}/{timeout}s")
if verify_f(*verify_args, **verify_kwargs):
log.info(
f"{what}: verified with {verify_f} at +{t-t0:.1f}/{timeout}s")
return
time.sleep(poll_period) # Give it .1s to come up
t = time.time()
assert()
def process_alive(pidfile, path = None):
"""
Return if a process path/PID combination is alive from the
standpoint of the calling context (in terms of UID permissions,
etc).
:param str pidfile: path to pid file (or)
:param str pidfile: PID of the process to check (in str form) (or)
:param int pidfile: PID of the process to check
:param str path: path binary that runs the process
:returns: PID number if alive, *None* otherwise (might be running as a
separate user, etc)
"""
if path:
paths = path + ": "
else:
paths = ""
pid, _pidfile = _pid_grok(pidfile)
if pid == None:
return None
try:
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH: # Not running
return None
if e.errno == errno.EPERM: # Running, but not our user?
return None
raise RuntimeError("%scan't signal pid %d to test if running: %s"
% (paths, pid, e))
if not path:
return pid
# Thing is running, let's see what it is
try:
_path = os.readlink("/proc/%d/exe" % pid)
except OSError as e:
# Usually this means it has died while we checked
return None
if path == _path:
return pid
else:
return None
def process_terminate(pid, pidfile = None, tag = None,
use_signal = signal.SIGTERM,
path = None, wait_to_kill = 0.25):
"""Terminate a process (TERM and KILL after 0.25s)
:param pid: PID of the process to kill; this can be an
integer, a string representation of an integer or a path to a
PIDfile.
:param str pidfile: (optional) pidfile to remove [deprecated]
:param str path: (optional) path to the binary
:param str tag: (optional) prefix to error messages
:param int use_signal: (optional; default SIGTERM) signal to send
to stop the process (see signal.SIG*).
"""
if tag == None:
if path:
_tag = path
else:
_tag = ""
else:
_tag = tag + ": "
_pid, _pidfile = _pid_grok(pid)
if _pid == None:
# Nothing to kill
return
if path:
# Thing is running, let's see what it is
try:
_path = os.readlink("/proc/%d/exe" % _pid)
except OSError as e:
# Usually this means it has died while we checked
return None
if os.path.abspath(_path) != os.path.abspath(path):
return None # Not our binary
try:
signal_name = str(use_signal)
os.kill(_pid, use_signal)
time.sleep(wait_to_kill)
signal_name = "SIGKILL"
os.kill(_pid, signal.SIGKILL)
except OSError as e:
if e.errno == errno.ESRCH: # killed already
return
else:
raise RuntimeError("%scan't %s: %s"
% (tag, signal_name, str(e)))
finally:
if _pidfile:
rm_f(_pidfile)
if pidfile: # Extra pidfile to remove, kinda deprecated
rm_f(pidfile)
def process_started(pidfile, path,
tag = None, log = None,
verification_f = None,
verification_f_args = None,
timeout = 5, poll_period = 0.3):
if log == None:
log = logging
if tag == None:
tag = path
t0 = time.time() # Verify it came up
while True:
t = time.time()
if t - t0 > timeout:
log.error("%s: timed out (%ss) starting process", tag, timeout)
return None
time.sleep(poll_period) # Give it .1s to come up
pid = process_alive(pidfile, path)
if pid == None:
log.debug("%s: no %s PID yet (+%.2f/%ss), re-checking",
tag, path, t - t0, timeout)
continue
# PID found, if there is a verification function, let's run it
break
if verification_f:
log.debug("%s: pid %d found at +%.2f/%ss), verifying",
tag, pid, t - t0, timeout)
while True:
if t - t0 > timeout:
log.error("%s: timed out (%ss) verifying process pid %d",
tag, timeout, pid)
return None
if verification_f(*verification_f_args):
log.debug("%s: started (pid %d) and verified at +%.2f/%ss",
tag, pid, t - t0, timeout)
return pid
time.sleep(poll_period) # Give it .1s to come up
t = time.time()
else:
log.debug("%s: started (pid %d) at +%.2f/%ss)",
tag, pid, t - t0, timeout)
return pid
def origin_get(depth = 1):
"""
Return the name of the file and line from which this was called
"""
o = inspect.stack()[depth]
return "%s:%s" % (o[1], o[2])
def origin_get_object_path(o):
return inspect.getsourcefile(o)
def origin_get_object(o):
return "%s:%s" % (inspect.getsourcefile(o),
inspect.getsourcelines(o)[1])
def origin_fn_get(depth = 1, sep = ":"):
"""
Return the name of the function and line from which this was called
"""
frame = inspect.stack()[depth][0]
return frame.f_code.co_name + sep + "%d" % frame.f_lineno
def kws_update_type_string(kws, rt, kws_origin = None, origin = None,
prefix = ""):
# FIXME: rename this to _scalar
# FIXME: make this replace subfields as .
# ['bsps']['x86']['zephyr_board'] = 'arduino_101' becomes
# 'bsps.x86.zephyr_board' = 'arduino_101'
"""
Given a dictionary, update the second only using those keys with
string values
:param dict kws: destination dictionary
:param dict d: source dictionary
"""
assert isinstance(kws, dict)
if not isinstance(rt, dict):
# FIXME: this comes from the remote server...
return
for key, value in rt.items():
if value == None:
kws[prefix + key] = ""
if kws_origin and origin:
kws_origin[prefix + key] = origin
elif isinstance(value, str) \
or isinstance(value, numbers.Integral):
kws[prefix + key] = value
if kws_origin and origin:
kws_origin[prefix + key] = origin
elif isinstance(value, bool):
kws[prefix + key] = value
def _kws_update(kws, rt, kws_origin = None, origin = None,
prefix = ""):
"""
Given a dictionary, update the second only using those keys from
the first string values
:param dict kws: destination dictionary
:param dict d: source dictionary
"""
assert isinstance(kws, dict)
if not isinstance(rt, dict):
return
for key, value in rt.items():
if value == None:
kws[prefix + key] = ""
if kws_origin and origin:
kws_origin[prefix + key] = origin
else:
kws[prefix + key] = value
if kws_origin and origin:
kws_origin[prefix + key] = origin
def kws_update_from_rt(kws, rt, kws_origin = None, origin = None,
prefix = ""):
"""
Given a target's tags, update the keywords valid for exporting and
evaluation
This means filtering out things that are not strings and maybe
others, decided in a case by case basis.
We make sure we fix the type and 'target' as the fullid.
"""
# WARNING!!! This is used by both the client and server code
assert isinstance(kws, dict)
assert isinstance(rt, dict)
if origin == None and 'url' in rt:
origin = rt['url']
if origin == None:
origin = origin_get(2)
else:
assert isinstance(origin, str)
_kws_update(kws, rt, kws_origin = kws_origin,
origin = origin, prefix = prefix)
if 'fullid' in rt:
# Clients have full id in the target tags (as it includes the
# server AKA')
kws[prefix + 'target'] = file_name_make_safe(rt['fullid'])
else:
# Said concept does not exist in the server...
kws[prefix + 'target'] = file_name_make_safe(rt['id'])
kws[prefix + 'type'] = rt.get('type', 'n/a')
if kws_origin:
assert isinstance(kws_origin, dict)
kws_origin[prefix + 'target'] = origin
kws_origin[prefix + 'type'] = origin
# Interconnects need to be exported manually
kws['interconnects'] = {}
if 'interconnects' in rt:
_kws_update(kws['interconnects'], rt['interconnects'],
kws_origin = kws_origin,
origin = origin, prefix = prefix)
def if_present(ifname):
"""
Return if network interface *ifname* is present in the system
:param str ifname: name of the network interface to remove
:returns: True if interface exists, False otherwise
"""
return os.path.exists("/sys/class/net/" + ifname)
def if_index(ifname):
"""
Return the interface index for *ifname* is present in the system
:param str ifname: name of the network interface
:returns: index of the interface, or None if not present
"""
try:
with open("/sys/class/net/" + ifname + "/ifindex") as f:
index = f.read().strip()
return int(index)
except IOError:
raise IndexError("%s: network interface does not exist" % ifname)
def if_find_by_mac(mac, physical = True):
"""
Return the name of the physical network interface whose MAC
address matches *mac*.
Note the comparison is made at the string level, case
insensitive.
:param str mac: MAC address of the network interface to find
:param bool physical: True if only look for physical devices (eg:
not vlans); this means there a *device* symlink in
*/sys/class/net/DEVICE/*
:returns: Name of the interface if it exists, None otherwise
"""
assert isinstance(mac, str)
for path in glob.glob("/sys/class/net/*/address"):
if physical and not os.path.exists(os.path.dirname(path) + "/device"):
continue
with open(path) as f:
path_mac = f.read().strip()
if path_mac.lower() == mac.lower():
return os.path.basename(os.path.dirname(path))
return None
def if_remove(ifname):
"""
Remove from the system a network interface using
*ip link del*.
:param str ifname: name of the network interface to remove
:returns: nothing
"""
subprocess.check_call("ip link del " + ifname, shell = True)
def if_remove_maybe(ifname):
"""
Remove from the system a network interface (if it exists) using
*ip link del*.
:param str ifname: name of the network interface to remove
:returns: nothing
"""
if if_present(ifname):
if_remove(ifname)
def ps_children_list(pid):
"""
List all the PIDs that are children of a give process
:param int pid: PID whose children we are looking for
:return: set of PIDs children of *PID* (if any)
"""
cl = set()
try:
for task_s in os.listdir("/proc/%d/task/" % pid):
task = int(task_s)
with open("/proc/%d/task/%d/children" % (pid, task)) as childrenf:
children = childrenf.read()
for child in children.split():
if child != pid:
cl.add(int(child))
except OSError as e:
if e.errno != errno.ENOENT:
raise
f = set()
for child_pid in cl:
f.update(ps_children_list(child_pid))
f.update(cl)
return f
def ps_zombies_list(pids):
"""
Given a list of PIDs, return which are zombies
:param pids: iterable list of numeric PIDs
:return: set of PIDs which are zombies
"""
zombies = set()
for pid in pids:
try:
with open("/proc/%d/stat" % pid) as statf:
stat = statf.read()
if ") Z " in stat:
zombies.add(pid)
except IOError as e:
if e.errno != errno.ENOENT:
raise
# If the PID doesn't exist, ignore it
return zombies
def version_get(module, name):
try:
# Try version module created during installation by
# {,ttbd}/setup.py into {ttbd,tcfl}/version.py.
#
# We use two different version modules to catch be able to
# catch mismatched installations
importlib.import_module(module.__name__ + ".version")
return module.version.version_string
except ImportError as _e:
pass
# Nay? Maybe a git tree because we are running from the source
# tree during development work?
_src = os.path.abspath(module.__file__)
_srcdir = os.path.dirname(_src)
try:
git_version = subprocess.check_output(
"git describe --tags --always --abbrev=7 --dirty".split(),
cwd = _srcdir, stderr = subprocess.STDOUT, encoding = 'utf-8')
# RPM versions can't have dash (-), so use underscores (_)
return git_version.strip().replace("-", ".")
except subprocess.CalledProcessError as _e:
print("Unable to determine %s (%s) version: %s"
% (name, _srcdir, _e.output), file = sys.stderr)
return "vNA"
except OSError as e:
# At this point, logging is still not initialized; don't
# crash, just report a dummy version
print("Unable to determine %s (%s) version "
" (git not installed?): %s" % (name, _srcdir, e),
file = sys.stderr)
return "vNA"
def tcp_port_busy(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("0.0.0.0", port))
s.close()
del s
return False
except socket.error as e:
if e.errno == errno.EADDRINUSE:
return True
raise
# FIXME: this thing sucks, it is obviously racy, but I can't figure
# out a better way -- we can't bind to (0) because we have plenty of
# daemons that need to get assigned more than one port and then it is
# impossible to get from them where did they bind (assuming they can
# do it)
def tcp_port_assigner(ports = 1, port_range = (1025, 65530)):
assert isinstance(port_range, tuple) and len(port_range) == 2 \
and port_range[0] > 0 and port_range[1] < 65536 \
and port_range[0] + 10 < port_range[1], \
"port range has to be (A, B) with A > 0 and B < 65536, A << B; " \
"got " + str(port_range)
max_tries = 1000
while max_tries > 0:
port_base = random.randrange(port_range[0], port_range[1])
for port_cnt in range(ports):
if tcp_port_busy(port_base + port_cnt):
continue
else:
return port_base
max_tries -= 1
raise RuntimeError("Cannot assign %d ports" % ports)
def tcp_port_connectable(hostname, port):
"""
Return true if we can connect to a TCP port
"""
try:
with contextlib.closing(socket.socket(socket.AF_INET,
socket.SOCK_STREAM)) as sk:
sk.settimeout(5)
sk.connect((hostname, port))
return True
except socket.error:
return False
def conditional_eval(tag, kw, conditional, origin,
kind = "conditional"):
"""
Evaluate an action's conditional string to determine if it
should be considered or not.
:returns bool: True if the action must be considered, False
otherwise.
"""
if conditional == None:
return True
try:
return expr_parser.parse(conditional, kw)
except Exception as e:
raise Exception("error evaluating %s %s "
"'%s' from '%s': %s"
% (tag, kind, conditional, origin, e))
def check_dir(path, what):
if not os.path.isdir(path):
raise RuntimeError("%s: path for %s is not a directory" % (path, what))
def check_dir_writeable(path, what):
check_dir(path, what)
if not os.access(path, os.W_OK):
raise RuntimeError("%s: path for %s does not allow writes"
% (path, what))
def prctl_cap_get_effective():
"""
Return an integer describing the effective capabilities of this process
"""
# FIXME: linux only
# CAP_NET_ADMIN is 12 (from /usr/include/linux/prctl.h
with open("/proc/self/status") as f:
s = f.read()
r = re.compile(r"^CapEff:\s(?P<cap_eff>[0-9a-z]+)$", re.MULTILINE)
m = r.search(s)
if not m or not 'cap_eff' in m.groupdict():
raise RuntimeError("Cannot find effective capabilities "
"in /proc/self/status: %s",
m.groupdict() if m else None)
return int(m.groupdict()['cap_eff'], 16)
def which(cmd, mode = os.F_OK | os.X_OK, path = None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
.. note: Lifted from Python 3.6
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly
# rather than referring to PATH directories. This includes
# checking relative to the current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for _dir in path:
normdir = os.path.normcase(_dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(_dir, thefile)
if _access_check(name, mode):
return name
return None
def ttbd_locate_helper(filename, share_path, log = logging, relsrcpath = ""):
"""
Find the path to a TTBD file, depending on we running from source
or installed system wide.
:param str filename: name of the TTBD file we are looking for.
:param str share_path: path where share data will be installed
:param str relsrcpath: path relative to the running binary in the source
"""
# Simics needs an image with a bootloader, we use grub2 and we
# share the setup-efi-grub2-elf.sh implementation from grub2elf.
_src = os.path.abspath(sys.argv[0])
_srcdir = os.path.dirname(_src)
# Running from source tree
cmd_path = os.path.join(_srcdir, relsrcpath, filename)
if os.path.exists(cmd_path):
return cmd_path
# System-wide install in the same prefix -> ../share/tcf
cmd_path = os.path.join(share_path, filename)
log.debug("looking for %s" % cmd_path)
if os.path.exists(cmd_path):
return cmd_path
raise RuntimeError("Can't find util %s" % filename)
def raise_from(what, cause):
"""
Forward compath to Python 3's raise X from Y
"""
setattr(what, "__cause__", cause)
raise what
#: Regex to filter out ANSI characters from text, to ease up debug printing
#:
#: Use as:
#:
#: >>> data = commonl.ansi_regex.sub('', source_data)
#:
ansi_regex = re.compile(r'\x1b\[\d+(;\d+){0,2}m')
def ansi_strip(s):
"""
Strip ANSI sequences from a string
:param str s: string to strip ANSI sequences from
:returns s: ANSI-stripped string
"""
return ansi_regex.sub('', s)
class dict_missing_c(dict):
"""
A dictionary that returns as a value a string KEY_UNDEFINED_SYMBOL
if KEY is not in the dictionary.
This is useful for things like
>>> "%(idonthavethis)" % dict_missing_c({"ihavethis": True"}
to print "idonthavethis_UNDEFINED_SYMBOL" intead of raising KeyError
"""
def __init__(self, d, missing = None):
assert isinstance(d, dict)
assert missing == None or isinstance(missing, str)
dict.__init__(self, d)
self.missing = missing
def __getitem__(self, key):
if self.__contains__(key):
return dict.__getitem__(self, key)
if self.missing:
return self.missing
return "%s_UNDEFINED_TEMPLATE.%s" % (key, origin_fn_get(2, "."))
def ipv4_len_to_netmask_ascii(length):
return socket.inet_ntoa(struct.pack('>I', 0xffffffff ^ ((1 << (32 - length) ) - 1)))
#: Simple general keyring redirectory
#:
#: Any configuration file can add entries to this dictionary, that
#: then can be used by password_lookup() to find passwords when not
#: specified and needed.
#:
#: This is mainly used when passwords will be shared in different
#: parts of the infrastructure and it is easier to refer to them from
#: central location.
passwords = {
# Simple match username/hostname to password
#"billy@thismachine.com": "badS3cre7",
# Match a regular expression for account/hostname to a password
# located in a file that password_get() will reead
#re.compile("admin@r[0-9]+p[0-9]+..*.deacluster.intel.com"): \
# "FILE:/etc/ttbd-production/pwd.pdu.admin",
}
def password_lookup(entry):
for entry_r, value in passwords.items():
if isinstance(entry_r, str) and entry_r == entry:
return value
elif isinstance(entry_r, re.Pattern):
m = entry_r.search(entry)
if not m:
continue
if '%(' in value:
value = value % m.groupdict()
return value
raise RuntimeError(f"can't find a password for entry '{entry}'")
def password_get(domain, user, password):
"""Get the password for a domain and user
This returns a password obtained from a configuration file, maybe
accessing secure password storage services to get the real
password. This is intended to be use as a service to translate
passwords specified in config files, which in some time might be
cleartext, in others obtained from services.
>>> real_password = password_get("somearea", "rtmorris", "KEYRING")
will query the *keyring* service for the password to use for user
*rtmorris* on domain *somearea*.
>>> real_password = password_get("somearea", "rtmorris", "KEYRING:Area51")
would do the same, but *keyring*'s domain would be *Area51*
instead.
>>> real_password = password_get(None, "rtmorris",
>>> "FILE:/etc/config/some.key")
would obtain the password from the contents of file
*/etc/config/some.key*.
>>> real_password = password_get("somearea", "rtmorris", "sikrit")
would just return *sikrit* as a password.
:param str domain: a domain to which this password operation
applies; see below *password* (can be *None*)
:param str user: the username for maybe obtaining a password from
a password service; see below *password*.
:param str password: a password obtained from the user or a
configuration setting; can be *None*. If the *password* is
- *KEYRING* will ask the accounts keyring for the password
for domain *domain* for username *user*
- *KEYRING=DOMAIN* (or *KEYRING:DOMAIN*) will ask the accounts
keyring for the password for domain *DOMAIN* for username
*user*, ignoring the *domain* parameter.
- *FILE=PATH* (or *FILE:PATH*) will read the password from
filename *PATH*.
Note that using the colon notation *FILE:PATH* can make some URL
parsing not work, hence you can default to using =
:returns: the actual password to use
Password management procedures (FIXME):
- to set a password in the keyring::
$ echo KEYRINGPASSWORD | gnome-keyring-daemon --unlock
$ keyring set "USER" DOMAIN
Password for 'DOMAIN' in 'USER': <ENTER PASSWORD HERE>
- to be able to run the daemon has to be executed under a dbus session::
$ dbus-session -- sh
$ echo KEYRINGPASSWORD | gnome-keyring-daemon --unlock
$ ttbd...etc
"""
assert domain == None or isinstance(domain, str)
assert isinstance(user, str)
assert password == None or isinstance(password, str)
if password == "KEYRING":
if keyring_available == False:
raise RuntimeError(
"keyring: functionality to load passwords not available,"
" please install keyring support")
password = keyring.get_password(domain, user)
if password == None:
raise RuntimeError("keyring: no password for user %s @ %s"
% (user, domain))
elif password and password.startswith("KEYRING:"):
if keyring_available == False:
raise RuntimeError(
"keyring: functionality to load passwords not available,"
" please install keyring support")
_, domain = password.split(":", 1)
password = keyring.get_password(domain, user)
if password == None:
raise RuntimeError("keyring: no password for user %s @ %s"
% (user, domain))
elif password and password.startswith("KEYRING="):
if keyring_available == False:
raise RuntimeError(
"keyring: functionality to load passwords not available,"
" please install keyring support")
_, domain = password.split("=", 1)
password = keyring.get_password(domain, user)
if password == None:
raise RuntimeError("keyring: no password for user %s @ %s"
% (user, domain))
elif password and password.startswith("FILE:"):
_, filename = password.split(":", 1)
with open(filename) as f:
password = f.read().strip()
elif password and password.startswith("FILE="):
_, filename = password.split("=", 1)
with open(filename) as f:
password = f.read().strip()
# fallthrough, if none of them, it's just a password
return password
def split_user_pwd_hostname(s):
"""
Return a tuple decomponsing ``[USER[:PASSWORD]@HOSTNAME``
:returns: tuple *( USER, PASSWORD, HOSTNAME )*, *None* in missing fields.
See :func:`password_get` for details on how the password is handled.
"""
assert isinstance(s, str)
user = None
password = None
hostname = None
if '@' in s:
user_password, hostname = s.split('@', 1)
else:
user_password = ""
hostname = s
if ':' in user_password:
user, password = user_password.split(':', 1)
else:
user = user_password
password = None
password = password_get(hostname, user, password)
return user, password, hostname
def url_remove_user_pwd(url):
"""
Given a URL, remove the username and password if any::
print(url_remove_user_pwd("https://user:password@host:port/path"))
https://host:port/path
"""
_url = url.scheme + "://" + url.hostname
if url.port:
_url += ":%d" % url.port
if url.path:
_url += url.path
return _url
def field_needed(field, projections):
"""
Check if the name *field* matches any of the *patterns* (ala
:mod:`fnmatch`).
:param str field: field name
:param list(str) projections: list of :mod:`fnmatch` patterns
against which to check field. Can be *None* and *[ ]* (empty).
:returns bool: *True* if *field* matches a pattern in *patterns*
or if *patterns* is empty or *None*. *False* otherwise.
"""
if projections:
# there is a list of must haves, check here first
for projection in projections:
if fnmatch.fnmatch(field, projection):
return True # we need this field
# match projection a to fields a.[x.[y.[...]]]
if field.startswith(projection + "."):
return True
return False # we do not need this field
else:
return True # no list, have it
def dict_to_flat(d, projections = None, sort = True, empty_dict = False):
"""
Convert a nested dictionary to a sorted list of tuples *( KEY, VALUE )*
The KEY is like *KEY[.SUBKEY[.SUBSUBKEY[....]]]*, where *SUBKEY*
are keys in nested dictionaries.
:param dict d: dictionary to convert
:param list(str) projections: (optional) list of :mod:`fnmatch`
patterns of flay keys to bring in (default: all)
:param bool sort: (optional, default *True*) sort according to KEY
name or leave the natural order (needed to keep the order of the
dictionaries) -- requires the underlying dict to be a
collections.OrderedDict() in older python versions.
:returns list: sorted list of tuples *KEY, VAL*
"""
assert isinstance(d, collections.Mapping)
fl = []
def _add(field_flat, val):
if sort:
bisect.insort(fl, ( field_flat, val ))
else:
fl.append(( field_flat, val ))
# test dictionary emptiness with 'len(d) == 0' vs 'd == {}', since they
# could be ordereddicts and stuff
def __update_recursive(val, field, field_flat, projections = None,
depth_limit = 10, prefix = " ", sort = True,
empty_dict = False):
# Merge d into dictionary od with a twist
#
# projections is a list of fields to include, if empty, means all
# of them
# a field X.Y.Z means od['X']['Y']['Z']
# GRRRR< has to dig deep first, so that a.a3.* goes all the way
# deep before evaluating if keepers or not -- I think we need to
# change it like that and maybe the evaluation can be done before
# the assignment.
if isinstance(val, collections.Mapping):
if len(val) == 0 and empty_dict == True and field_needed(field_flat, projections):
# append an empty dictionary; do not append VAL --
# why? because otherwise it might be modified later by
# somebody else and modify our SOURCE dictionary, and
# we do not want that.
_add(field_flat, dict())
elif depth_limit > 0: # dict to dig in
for key, value in val.items():
__update_recursive(value, key, field_flat + "." + str(key),
projections, depth_limit - 1,
prefix = prefix + " ",
sort = sort, empty_dict = empty_dict)
elif field_needed(field_flat, projections):
_add(field_flat, val)
if len(d) == 0 and empty_dict == True:
# empty dict, insert it if we want them
# append an empty dictionary; do not append VAL --
# why? because otherwise it might be modified later by
# somebody else and modify our SOURCE dictionary, and
# we do not want that.
_add(field_flat, dict())
for key, _val in d.items():
__update_recursive(d[key], key, key, projections, 10, sort = sort,
empty_dict = empty_dict)
return fl
def _key_rep(r, key, key_flat, val):
# put val in r[key] if key is already fully expanded (it has no
# periods); otherwise expand it recursively
if '.' in key:
# this key has sublevels, iterate over them
lhs, rhs = key.split('.', 1)
if lhs not in r:
r[lhs] = collections.OrderedDict()
elif not isinstance(r[lhs], dict):
r[lhs] = collections.OrderedDict()
_key_rep(r[lhs], rhs, key_flat, val)
else:
r[key] = val
def flat_slist_to_dict(fl):
"""
Given a sorted list of flat keys and values, convert them to a
nested dictionary
:param list((str,object)): list of tuples of key and any value
alphabetically sorted by tuple; same sorting rules as in
:func:`flat_keys_to_dict`.
:return dict: nested dictionary as described by the flat space of
keys and values
"""
# maintain the order in which we add things, we depend on this for
# multiple things later on
tr = collections.OrderedDict()
for key, val in fl:
_key_rep(tr, key, key, val)
return tr
def flat_keys_to_dict(d):
"""
Given a dictionary of flat keys, convert it to a nested dictionary
Similar to :func:`flat_slist_to_dict`, differing in the
keys/values being in a dictionary.
A key/value:
>>> d["a.b.c"] = 34
means:
>>> d['a']['b']['c'] = 34
Key in the input dictonary are processed in alphabetical order
(thus, key a.a is processed before a.b.c); later keys override
earlier keys:
>>> d['a.a'] = 'aa'
>>> d['a.a.a'] = 'aaa'
>>> d['a.a.b'] = 'aab'
will result in:
>>> d['a']['a'] = { 'a': 'aaa', 'b': 'aab' }
The
>>> d['a.a'] = 'aa'
gets overriden by the other settings
:param dict d: dictionary of keys/values
:returns dict: (nested) dictionary
"""
tr = {}
for key in sorted(d.keys()):
_key_rep(tr, key, key, d[key])
return tr
class tls_prefix_c(object):
def __init__(self, tls, prefix):
assert isinstance(tls, threading.local)
assert isinstance(prefix, str)
self.tls = tls
# repr the prefix as bytes, so when we write there is no
# conversion needed
self.prefix = prefix.encode('utf-8')
self.prefix_old = None
def __enter__(self):
self.prefix_old = getattr(self.tls, "prefix_c", b"")
self.tls.prefix_c = self.prefix_old + self.prefix
return self
def __exit__(self, _exct_type, _exce_value, _traceback):
self.tls.prefix_c = self.prefix_old
self.prefix_old = None
def __repr__(self):
return getattr(self.tls, "prefix_c", None)
def data_dump_recursive(d, prefix = u"", separator = u".", of = sys.stdout,
depth_limit = 20):
"""
Dump a general data tree to stdout in a recursive way
For example:
>>> data = [ dict(keya = 1, keyb = 2), [ "one", "two", "three" ], "hello", sys.stdout ]
produces the stdout::
[0].keya: 1
[0].keyb: 2
[1][0]: one
[1][1]: two
[1][2]: three
[2]: hello
[3]: <open file '<stdout>', mode 'w' at 0x7f13ba2861e0>
- in a list/set/tuple, each item is printed prefixing *[INDEX]*
- in a dictionary, each item is prefixed with it's key
- strings and cardinals are printed as such
- others are printed as what their representation as a string produces
- if an attachment is a generator, it is iterated to gather the data.
- if an attachment is of :class:`generator_factory_c`, the method
for creating the generator is called and then the generator
iterated to gather the data.
See also :func:`data_dump_recursive_tls`
:param d: data to print
:param str prefix: prefix to start with (defaults to nothing)
:param str separator: used to separate dictionary keys from the
prefix (defaults to ".")
:param :python:file of: output stream where to print (defaults to
*sys.stdout*)
:param int depth_limit: maximum nesting levels to go deep in the
data structure (defaults to 10)
"""
assert isinstance(prefix, str)
assert isinstance(separator, str)
assert depth_limit > 0, f"depth_limit: expected >0, got {depth_limit}"
if isinstance(d, dict) and depth_limit > 0:
if prefix.strip() != "":
prefix = prefix + separator
for key, val in sorted(d.items(), key = lambda i: i[0]):
data_dump_recursive(val, prefix + str(key),
separator = separator, of = of,
depth_limit = depth_limit - 1)
elif isinstance(d, (list, set, tuple)) and depth_limit > 0:
# could use iter(x), but don't wanna catch strings, etc
count = 0
for v in d:
data_dump_recursive(v, prefix + u"[%d]" % count,
separator = separator, of = of,
depth_limit = depth_limit - 1)
count += 1
# HACK: until we move functions to a helper or something, when
# someone calls the generatory factory as
# commonl.generator_factory_c, this can't pick it up, so fallback
# to use the name
elif isinstance(d, generator_factory_c) \
or type(d).__name__ == "generator_factory_c":
of.write(prefix)
of.writelines(d.make_generator())
elif isinstance(d, types.GeneratorType):
of.write(prefix)
of.writelines(d)
elif isinstance(d, io.IOBase):
# not recommended, prefer generator_factory_c so it reopens the file
d.seek(0, 0)
of.write(prefix)
of.writelines(d)
else:
of.write(prefix + u": " + mkutf8(d) + u"\n")
_dict_print_dotted = data_dump_recursive # COMPAT
def data_dump_recursive_tls(d, tls, separator = u".", of = sys.stdout,
depth_limit = 10):
"""
Dump a general data tree to stdout in a recursive way
This function works as :func:`data_dump_recursive` (see for more
information on the usage and arguments). However, it uses TLS for
storing the prefix as it digs deep into the data structure.
A variable called *prefix_c* is created in the TLS structure on
which the current prefix is stored; this is meant to be used in
conjunction with stream writes such as
:class:`io_tls_prefix_lines_c`.
Parameters are as documented in :func:`data_dump_recursive`,
except for:
:param threading.local tls: thread local storage to use (as returned
by *threading.local()*
"""
assert isinstance(separator, str)
assert depth_limit > 0
if isinstance(d, dict):
for key, val in sorted(d.items(), key = lambda i: i[0]):
with tls_prefix_c(tls, str(key) + ": "):
data_dump_recursive_tls(val, tls,
separator = separator, of = of,
depth_limit = depth_limit - 1)
elif isinstance(d, (list, set, tuple)):
# could use iter(x), but don't wanna catch strings, etc
count = 0
for v in d:
with tls_prefix_c(tls, u"[%d]: " % count):
data_dump_recursive_tls(v, tls,
separator = separator, of = of,
depth_limit = depth_limit - 1)
count += 1
# HACK: until we move functions to a helper or something, when
# someone calls the generatory factory as
# commonl.generator_factory_c, this can't pick it up, so fallback
# to use the name
elif isinstance(d, generator_factory_c) \
or type(d).__name__ == "generator_factory_c":
of.writelines(d.make_generator())
elif isinstance(d, io.IOBase):
# not recommended, prefer generator_factory_c so it reopens the file
d.seek(0, 0)
of.writelines(d)
elif isinstance(d, types.GeneratorType):
of.writelines(d)
else:
of.write(mkutf8(d) + u"\n")
class io_tls_prefix_lines_c(io.BufferedWriter):
"""
Write lines to a stream with a prefix obtained from a thread local
storage variable.
This is a limited hack to transform a string written as::
line1
line2
line3
into::
PREFIXline1
PREFIXline2
PREFIXline3
without any intervention by the caller other than setting the
prefix in thread local storage and writing to the stream; this
allows other clients to write to the stream without needing to
know about the prefixing.
Note the lines yielded are unicode-escaped or UTF-8 escaped, for
being able to see in reports any special character.
Usage:
.. code-block:: python
import io
import commonl
import threading
tls = threading.local()
f = io.open("/dev/stdout", "w")
with commonl.tls_prefix_c(tls, "PREFIX"), \
commonl.io_tls_prefix_lines_c(tls, f.detach()) as of:
of.write(u"line1\\nline2\\nline3\\n")
Limitations:
- hack, only works ok if full lines are being printed
"""
def __init__(self, tls, *args, **kwargs):
assert isinstance(tls, threading.local)
io.BufferedWriter.__init__(self, *args, **kwargs)
self.tls = tls
self.data = u""
def __write_line(self, s, prefix, offset, pos):
# Write a whole (\n ended) line to the stream
#
# - prefix first
# - leftover data since last \n
# - current data from offset to the position where \n was
# (we print them escaping non-visible chars)
# - newline (since the one in s was escaped)
substr = s[offset:pos]
io.BufferedWriter.write(self, prefix)
if self.data:
io.BufferedWriter.write(
self, str_invisible_escape(self.data).encode('utf-8'))
self.data = ""
io.BufferedWriter.write(self, str_invisible_escape(substr).encode('utf-8'))
io.BufferedWriter.write(self, "\n".encode('utf-8'))
# flush after writing one line to avoid corruption from other
# threads/processes printing to the same FD
io.BufferedWriter.flush(self)
return pos + 1
def _write(self, s, prefix, acc_offset = 0):
# write a chunk of data to the stream -- break it by newlines,
# so when one is found __write_line() can write the prefix
# first. Accumulate anything left over after the last newline
# so we can flush it next time we find one.
offset = 0
if not isinstance(s, str):
s = str(s)
while offset < len(s):
pos = s.find('\n', offset)
if pos >= 0:
offset = self.__write_line(s, prefix, offset, pos)
continue
self.data += s[offset:]
break
return acc_offset + len(s)
def flush(self):
"""
Flush any leftover data in the temporary buffer, write it to the
stream, prefixing each line with the prefix obtained from
*self.tls*\'s *prefix_c* attribute.
"""
prefix = getattr(self.tls, "prefix_c", None)
if prefix == None:
io.BufferedWriter.write(
self, str_invisible_escape(self.data).encode('utf-8'))
else:
# flush whatever is accumulated
self._write(u"", prefix)
io.BufferedWriter.flush(self)
def write(self, s):
"""
Write string to the stream, prefixing each line with the
prefix obtained from *self.tls*\'s *prefix_c* attribute.
"""
prefix = getattr(self.tls, "prefix_c", None)
if prefix == None:
io.BufferedWriter.write(self, s)
return
self._write(s, prefix, 0)
def writelines(self, itr):
"""
Write the iterator to the stream, prefixing each line with the
prefix obtained from *self.tls*\'s *prefix_c* attribute.
"""
prefix = getattr(self.tls, "prefix_c", None)
if prefix == None:
io.BufferedWriter.writelines(self, itr)
return
offset = 0
data = None # itr might be empty...and later we want to check
for data in itr:
offset = self._write(data, prefix, offset)
if data:
# if there was an iterator (sometimes we are called with
# an empty one), if the last char was not a \n, the last
# line won't be flushed, so let's flush it manually.
# This is quite hackish but heck...otherwise there will be
# leftovers in self.data and will accumulate to the next
# line printed, that might have nothing to do with it.
last_char = data[-1]
if last_char != '\n':
self._write("\n", prefix, 0)
def mkutf8(s):
#
# Python2 left over FIXME: see all the call sites and fix them
#
if isinstance(s, str):
return s
else:
# represent it in unicode, however the object says
return str(s)
#: Index of ASCII/Unicode points to be translated because they are
#: invisible by :func:`str_invisible_escape`.
str_invisible_table = [
"\0", # 0
"<SOH>", # 1
"<\\x02|Ctrl-B>", # 2
"<ETX>", # 3
"<EOT>", # 4
"<ENQ>", # 5
"<ACK>", # 6
"\\a", # 7
"\\b", # 8
"\\t", # 9
"\\n", # 10
"\\v", # 11
"\\f", # 12
"\\r", # 13
"<SO>", # 14
"<SI>", # 15
"<DLE>", # 16
"<DC1>", # 17
"<DC2>", # 18
"<DC3>", # 19
"<DC4>", # 20
"<NAK>", # 21
"<SYN>", # 22
"<ETB>", # 23
"<CAN>", # 24
"<EM>", # 25
"<SUB>", # 26
"<ESC>", # 27
"<FS>", # 28
"<GS>", # 29
"<RS>", # 30
"<US>", # 31
]
def str_invisible_escape(s):
"""
Translate invisible characters into visible representations
For example, if a string contains new line characters, they are
replaced with *\\n*, or \0x30 with *<RS>*; translation table is
defined by :data:`str_invisible_table`.
:param str s: string to work on
:returns str: translated string
"""
if isinstance(s, bytes):
_s = bytearray()
for b in s:
if b >= 0 and b < 0x20: # printable chars
_s.extend(bytes(str_invisible_table[b], 'ascii'))
else:
_s.append(b)
else:
_s = ""
for c in s:
b = ord(c)
if b >= 0 and b < 0x20: # printable chars
c = str_invisible_table[b]
_s += c
return _s
class generator_factory_c(object):
"""
Create generator objects multiple times
Given a generator function and its arguments, create it when
:func:`make_generator` is called.
>>> factory = generator_factory_c(genrator, arg1, arg2..., arg = value...)
>>> ...
>>> generator = factory.make_generator()
>>> for data in generator:
>>> do_something(data)
>>> ...
>>> another_generator = factory.make_generator()
>>> for data in another_generator:
>>> do_something(data)
generators once created cannot be reset to the beginning, so this
can be used to simulate that behavior.
:param fn: generator function
:param args: arguments to the generator function
:param kwargs: keyword arguments to the generator function
"""
def __init__(self, fn, *args, **kwargs):
self.fn = fn
self.args = args
self.kwargs = kwargs
def __call__(self):
"""
Create and return a generator
"""
return self.fn(*self.args, **self.kwargs)
def make_generator(self):
"""
Create and return a generator
"""
return self.fn(*self.args, **self.kwargs)
def file_iterator(filename, chunk_size = 4096):
"""
Iterate over a file's contents
Commonly used along with generator_factory_c to with the TCF
client API to report attachments:
:param int chunk_size: (optional) read blocks of this size (optional)
>>> import commonl
>>>
>>> class _test(tcfl.tc.tc_c):
>>>
>>> def eval(self):
>>> generator_f = commonl.generator_factory_c(commonl.file_iterator, FILENAME)
>>> testcase.report_pass("some message", dict(content = generator_f))
"""
assert chunk_size > 0
with io.open(filename, "rb") as f:
while True:
data = f.read(chunk_size)
if not data:
break
yield data
def assert_list_of_strings(l, list_name, item_name):
assert isinstance(l, ( tuple, list )), \
"'%s' needs to be None or a list of strings (%s); got %s" % (
list_name, item_name, type(l))
count = -1
for i in l:
count += 1
assert isinstance(i, str), \
"items in '%s' needs to be strings (%s); got %s on #%d" % (
list_name, item_name, type(i), count)
def assert_list_of_types(l, list_name, item_name, item_types):
assert isinstance(l, list), \
"'%s' needs to be a list of items (%s) of types '%s'; got %s" % (
list_name, item_name,
",".join(type(i).__name__ for i in item_types), type(l))
count = -1
for i in l:
count += 1
assert isinstance(i, item_types), \
"items in '%s' needs to be %s (%s); got %s on #%d" % (
list_name, "|".join(i.__name__ for i in item_types),
item_name, type(i), count)
def assert_none_or_list_of_strings(l, list_name, item_name):
if l == None:
return
assert_list_of_strings(l, list_name, item_name)
def assert_dict_key_strings(d, d_name):
"""
Assert a dictionary is keyed by strings
"""
for k in d:
assert isinstance(k, str), \
"'%s' needs to be a dict keyed by string;" \
" got a key type '%s'; expected string" % (d_name, type(k))
def assert_dict_of_strings(d, d_name):
for k, v in d.items():
assert isinstance(k, str), \
"'%s' needs to be a dict of strings keyed by string;" \
" got a key type '%s'; expected string" % (d_name, type(k))
assert isinstance(v, str), \
"'%s' needs to be a dict of strings keyed by string;" \
" for key '%s' got a value type '%s'" % (d_name, k, type(v))
def assert_dict_of_ints(d, d_name):
for k, v in d.items():
assert isinstance(k, str), \
"'%s' needs to be a dict of ints keyed by string;" \
" got a key type '%s'; expected string" % (d_name, type(k))
assert isinstance(v, int), \
"'%s' needs to be a dict of ints keyed by string;" \
" for key '%s' got a value type '%s'" % (d_name, k, type(v))
macaddr_regex = re.compile(
"(?P<n0>[0-9a-fA-F][0-9a-fA-F])"
":(?P<n1>[0-9a-fA-F][0-9a-fA-F])"
":(?P<n2>[0-9a-fA-F][0-9a-fA-F])"
":(?P<n3>[0-9a-fA-F][0-9a-fA-F])"
":(?P<n4>[0-9a-fA-F][0-9a-fA-F])"
":(?P<n5>[0-9a-fA-F][0-9a-fA-F])",
re.IGNORECASE
)
def assert_macaddr(macaddr):
assert macaddr_regex.match(macaddr) != None, \
"invalid MAC address, has to match HH:HH:HH:HH:HH:HH," \
" H being a hex digit"
def assert_none_or_dict_of_strings(d, d_name):
if d == None:
return
assert_dict_of_strings(d, d_name)
#: List of known compressed extensions and ways to decompress them
#: without removing the input file
#:
#: To add more:
#:
#: >>> commonl.decompress_handlers[".gz"] = "gz -fkd"
decompress_handlers = {
# keep compressed files
".gz": "gz -fkd",
".bz2": "bzip2 -fkd",
".xz": "xz -fkd",
}
def file_is_compressed(filename):
assert isinstance(filename, str)
basename, ext = os.path.splitext(filename)
if ext not in decompress_handlers: # compressed logfile support
return filename, None
return basename, ext
def maybe_decompress(filename, force = False):
"""
Decompress a file if it has a compressed file extension and return
the decompressed name
If the decompressed file already exists, assume it is the
decompressed version already and do not decompress.
:param str filename: a filename to maybe decompress
:params bool force: (optional, default *False*) if *True*,
decompress even if the decompressed file already exists
:returns str: the name of the file; if it was compressed. If it
is *file.ext*, where *ext* is a compressed file extension, then
it decompresses the file to *file* and returns *file*, without
removing the original *file.ext*.
The compressed extensions are registered in
:data:`decompress_handlers`.
"""
assert isinstance(filename, str)
basename, ext = file_is_compressed(filename)
if not ext: # compressed logfile support
return filename
if force or not os.path.exists(basename):
# FIXME: we need a lock in case we have multiple
# processes doing this
command = decompress_handlers[ext]
subprocess.check_call(command.split() + [ filename ],
stdin = subprocess.PIPE)
return basename
class dict_lru_c:
"""
Way simple LRU dictionary with maximum size
When getting, the entries get removed, so it kinda works like a FIFO
:param int max_size: maximum number of entries in the dictionary;
when putting a new one, older entries will be removed.
"""
def __init__(self, max_size):
assert isinstance(max_size, int)
self.max_size = max_size
self.cache = dict()
def set(self, key, value):
self.cache[key] = ( value, time.time() )
if len(self.cache) > self.max_size:
# lame LRU purge
ts_earliest = time.time()
key_earliest = None
for key, ( value, ts ) in self.cache.items():
if ts < ts_earliest:
ts_earliest = ts
key_earliest = key
# there has to be one, otherwise, how did we get here past
# the len check?
del self.cache[key_earliest]
def get_and_remove(self, key, default = None):
"""
Get a value for a key
Note this is a destructive get; we can get it only once and
then it is deleted.
"""
value, ts = self.cache.pop(key, ( None, None ) )
return value
def cmdline_str_to_value(value):
"""
Given a string describing a value from the command line, convert
it to an scalar
:params str value: value as read from the command line in the
format *[FORMAT:]VALUE*, format being **i** for integer, **f**
for float, **s** for string, **b** for bool; examples::
i:33
i:-33
i:+33
f:3.2
f:-3.2
f:+3.2
b:true
b:false
s:somestring
somestring
:returns: value as int, float, bool or string
"""
if value.startswith("i:"):
return int(value.split(":", 1)[1])
if value.startswith("f:"):
return float(value.split(":", 1)[1])
if value.startswith("b:"):
val = value.split(":", 1)[1]
if val.lower() == "true":
return True
if val.lower() == "false":
return False
raise ValueError("value %s: bad boolean '%s' (true or false)"
% (value, val))
if value.startswith("s:"):
# string that might start with s: or empty
return value.split(":", 1)[1]
return value
def str_cast_maybe(s):
"""
If given a bytes string, convert to UTF-8; otherwise pass as is
:param s: string of any type; if bytes, it will be encoded.
:returns str: converted string
"""
if isinstance(s, bytes):
s.encode('utf-8')
return s
def str_bytes_cast(s, like):
"""
Convert a string (bytes or str) to be the same type as another one
using UTF-8
:param s: string or bytes to convert
:param str|bytes|type like: another string (str or bytes) to serve
as the destination type; can also be a type
:returns: *s* converted into the same type as *like* using UTF-8
"""
assert isinstance(s, (str, bytes))
if isinstance(like, type):
assert like in (str, bytes)
dest_type = like
else:
assert isinstance(like, (str, bytes))
dest_type = type(like)
if isinstance(s, str): # s is str
if dest_type == str: # like is is str, so nthing
return s
return s.encode('utf-8') # ...like is bytes, encode s to bytes
if dest_type == bytes: # s is bytes
return s # like is bytes, so nothing
return s.decode('utf-8') # ... like is str, so decode s to str
def removeprefix(s, prefix):
"""
Remove a prefix from a string
:param s: string
:param prefix: prefix to remove
:returns: the string with the prefix removed
"""
if hasattr(s, "removeprefix"):
# python >= 3.9
return s.removeprefix(prefix)
if s.startswith(prefix):
return s[len(prefix):]
return s
class late_resolve_realpath(str):
"""
Given a file (symlink or others), resolve it to the file it points
to when we are trying to use it.
:param str name: file path
When converting to a string (and only when doing that) the file
will be resolved to what it is (eg: symlinks will be resolved,
etc).
"""
def __init__(self, name):
self.name = name
def __str__(self):
return os.path.realpath(self.name)
def _sysfs_read(filename):
try:
with open(filename) as fr:
return fr.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
class late_resolve_usb_path_by_serial(str):
"""
Given a USB serial number, resolve it to a USB path only when we
are trying to use it.
:param str serial_number: USB Serial Number
When converting to a string (and only when doing that) it will be
resolved to a USB path. If no such USB device is present, *None*
will be returned; otherwise, something like:
*/sys/bus/usb/devices/1-3.4.3.4*
"""
def __init__(self, serial_number):
assert isinstance(serial_number, str)
self.serial_number = serial_number
def __str__(self):
# Look for the serial number, kinda like:
#
## $ grep -r YK18738 /sys/bus/usb/devices/*/serial
## /sys/bus/usb/devices/1-3.4.3.4/serial:YK18738
for fn_serial in glob.glob("/sys/bus/usb/devices/*/serial"):
serial = _sysfs_read(fn_serial)
if serial == self.serial_number:
devpath = os.path.dirname(fn_serial)
if not os.path.isdir(devpath):
break
return devpath
return None
def rpyc_connection(hostname = None, port = None,
username = None, password = None,
spec = None,
mode: str = None, tag = None):
"""
:param str mode: (optional, default *ssh*) connection mode:
- zerodeploy
- ssh
- direct
if *None*, defaults to what the environment variables
RPYC_<TAG>_MODE or RPYC_MODE say, otherwise defaults to *ssh*.
System Setup
^^^^^^^^^^^^
Python packages needed:
- rpyc
- plumbum
- paramiko
Tips
^^^^
https://rpyc.readthedocs.io/en/latest/docs/howto.html
Redirecting remote's stdout and stderr locally
>>> import sys
>>> c.modules.sys.stdout = sys.stdout
>>> c.execute("print('Hello World')")
TODO/FIXME
^^^^^^^^^^
- use ttbd as a tunnel provider and the TCF cookie
- implement USERNAME:PASSWORD@HOSTNAME:PORT
how to spec the SSH port vs the RPYC port?
"""
# fake lazy import
try:
import rpyc
import rpyc.utils.zerodeploy
import rpyc.core.stream
import plumbum.machines.paramiko_machine
import plumbum
except ImportError:
tcfl.tc.tc_global.report_blck(
"MISSING MODULES: install them with:"
" pip install --user plumbum rpyc")
raise
assert hostname == None or isinstance(hostname, str)
if mode == None:
mode = os.environ.get("RPYC_MODE", "ssh")
assert mode in ( "zerodeploy", "ssh", "direct" )
spec = ""
if not hostname:
hostname = "localhost"
if not username:
username = os.environ.get('RPYC_USERNAME', None)
if username:
spec += username + "@"
spec += hostname
if port:
spec += ":" + str(port)
if not password:
password = os.environ['RPYC_SSHPASS']
if mode == "zerodeploy":
machine = plumbum.machines.paramiko_machine.ParamikoMachine(
hostname, user = username, password = password)
server = rpyc.utils.zerodeploy.DeployedServer(machine)
connection = server.classic_connect()
elif mode == "ssh":
machine = plumbum.machines.paramiko_machine.ParamikoMachine(
hostname, user = username, password = password)
# ParamikoMachine has no tunnel, so use a stram -- copied
# from rpyc.utils.zerodeploy
connection = rpyc.utils.classic.connect_stream(
rpyc.core.stream.SocketStream(machine.connect_sock(port)))
elif mode == "connect":
# passwordless
connection = rpyc.classic.connect(hostname, port = port)
else:
assert()
return connection
def rpyc_compress_dnload_file(remote, remote_name, local_name = None):
try:
# fake lazy import
import rpyc.utils.classic
except ImportError:
tcfl.tc.tc_global.report_blck(
"MISSING MODULES: install them with:"
" pip install --user plumbum rpyc")
raise
if local_name == None:
local_name = remote_name + ".xz"
# Compress the file to download it (way faster!)
remote_subprocess = remote.modules['subprocess']
remote_subprocess.run([ "xz", "-9f", remote_name ])
rpyc.utils.classic.download(remote,
remote_name + ".xz",
local_name)
def buildah_image_create(image_name, dockerfile_s, maybe = True,
timeout = 20, capture_output = True):
"""
Build a container image using buildah
:returns bool: *True* if the image was built, *False* if not
because it already existed
FIXME: add --annotation cfg_hash so we can always refresh them
based on the config file -> if it changes
"""
if maybe:
# since this can take a while, if we see it already exists, we
# don't redo it
p = subprocess.run([ "buildah", "images", "--format", "{{.Name}}" ],
capture_output = True, check = True, timeout = 5,
text = 'utf-8')
if image_name in p.stdout:
return False
with tempfile.NamedTemporaryFile() as f:
# see ttbl.power.daemon_c
f.write(dockerfile_s.encode('utf-8'))
f.flush()
subprocess.run(
[
"buildah", "bud", "-f", f.name, "-t", image_name,
], check = True, capture_output = capture_output, text = 'utf-8',
timeout = timeout)
return True
| 33.248403 | 94 | 0.600092 |
acf8d9621a4a7606a6356d9c661ab49f8118c547 | 440 | py | Python | env/lib/python3.8/site-packages/plotly/validators/scattergl/_textsrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 76 | 2020-07-06T14:44:05.000Z | 2022-02-14T15:30:21.000Z | env/lib/python3.8/site-packages/plotly/validators/scattergl/_textsrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 11 | 2020-08-09T02:30:14.000Z | 2022-03-12T00:50:14.000Z | env/lib/python3.8/site-packages/plotly/validators/scattergl/_textsrc.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 11 | 2020-07-12T16:18:07.000Z | 2022-02-05T16:48:35.000Z | import _plotly_utils.basevalidators
class TextsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="textsrc", parent_name="scattergl", **kwargs):
super(TextsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
| 33.846154 | 81 | 0.652273 |
acf8da93db10aeb62ea5da06696521b4927b3024 | 1,361 | py | Python | aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/AttachNasFileSystemRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/AttachNasFileSystemRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | 1 | 2020-05-31T14:51:47.000Z | 2020-05-31T14:51:47.000Z | aliyun-python-sdk-hbr/aliyunsdkhbr/request/v20170908/AttachNasFileSystemRequest.py | liumihust/aliyun-openapi-python-sdk | c7b5dd4befae4b9c59181654289f9272531207ef | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class AttachNasFileSystemRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'hbr', '2017-09-08', 'AttachNasFileSystem','hbr')
def get_CreateTime(self):
return self.get_query_params().get('CreateTime')
def set_CreateTime(self,CreateTime):
self.add_query_param('CreateTime',CreateTime)
def get_FileSystemId(self):
return self.get_query_params().get('FileSystemId')
def set_FileSystemId(self,FileSystemId):
self.add_query_param('FileSystemId',FileSystemId) | 37.805556 | 78 | 0.756796 |
acf8da9aa4589026b0b617a076401517772e05d1 | 366 | py | Python | aliyun/api/rest/Push20150318QueryBindListRequest.py | francisar/rds_manager | 458298669bf7d1990a85648b466b88f905256690 | [
"MIT"
] | 14 | 2015-11-30T02:35:18.000Z | 2019-05-14T11:49:24.000Z | aliyun/api/rest/Push20150318QueryBindListRequest.py | francisar/rds_manager | 458298669bf7d1990a85648b466b88f905256690 | [
"MIT"
] | 2 | 2015-11-30T02:51:40.000Z | 2017-03-16T01:51:45.000Z | aliyun/api/rest/Push20150318QueryBindListRequest.py | francisar/rds_manager | 458298669bf7d1990a85648b466b88f905256690 | [
"MIT"
] | 12 | 2016-01-04T06:48:17.000Z | 2020-11-07T14:08:25.000Z | '''
Created by auto_sdk on 2015.06.23
'''
from aliyun.api.base import RestApi
class Push20150318QueryBindListRequest(RestApi):
def __init__(self,domain='push.aliyuncs.com',port=80):
RestApi.__init__(self,domain, port)
self.Account = None
self.AppId = None
self.DeviceType = None
def getapiname(self):
return 'push.aliyuncs.com.queryBindList.2015-03-18'
| 26.142857 | 55 | 0.756831 |
acf8dab2e700acd9f7d9ad2ba20dd80edbf0335f | 459 | py | Python | src/constants.py | Azure-Samples/EntityDisambiguation | e8e9a9f608365f7df683a93e7496c334ba2d2d48 | [
"MIT"
] | null | null | null | src/constants.py | Azure-Samples/EntityDisambiguation | e8e9a9f608365f7df683a93e7496c334ba2d2d48 | [
"MIT"
] | null | null | null | src/constants.py | Azure-Samples/EntityDisambiguation | e8e9a9f608365f7df683a93e7496c334ba2d2d48 | [
"MIT"
] | null | null | null | """
This file contains constants used in this module
"""
class Constants:
"""
This class contains constants used in this module
"""
name_search_fields = [
"phonetic",
"edge_n_gram",
"keyword",
"letter",
"ngram",
"camelcase",
"email",
"stemming",
"url_email",
"text_microsoft",
]
retrieved = "retrieved"
expected = "expected"
result = "result"
| 17 | 53 | 0.529412 |
acf8db5902541cd3b8092c4696791779af7d5142 | 321 | py | Python | base/migrations/0063_auto_20200928_0519.py | gade-raghav/project-enhancements | 6303f6d6772f1e1b21693eb4ce6c9dbf6b7f49ca | [
"MIT"
] | null | null | null | base/migrations/0063_auto_20200928_0519.py | gade-raghav/project-enhancements | 6303f6d6772f1e1b21693eb4ce6c9dbf6b7f49ca | [
"MIT"
] | null | null | null | base/migrations/0063_auto_20200928_0519.py | gade-raghav/project-enhancements | 6303f6d6772f1e1b21693eb4ce6c9dbf6b7f49ca | [
"MIT"
] | null | null | null | # Generated by Django 3.1 on 2020-09-28 05:19
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('base', '0062_auto_20200926_0628'),
]
operations = [
migrations.AlterModelOptions(
name='feature',
options={},
),
]
| 17.833333 | 45 | 0.58567 |
acf8dbd7b1ff804e2020a06c196d17141192fe87 | 10,554 | py | Python | demo_server.py | ankye/Tacotron-2 | e0cd46ece5d96948d684f29a224d9b7154976752 | [
"MIT"
] | 5 | 2019-04-11T02:45:04.000Z | 2019-05-22T11:58:15.000Z | demo_server.py | ankye/Tacotron-2 | e0cd46ece5d96948d684f29a224d9b7154976752 | [
"MIT"
] | 1 | 2019-12-30T08:23:36.000Z | 2020-02-22T02:15:31.000Z | demo_server.py | ankye/Tacotron-2 | e0cd46ece5d96948d684f29a224d9b7154976752 | [
"MIT"
] | 4 | 2019-05-29T14:29:46.000Z | 2020-02-21T13:16:57.000Z | import falcon
import tensorflow as tf
import re
import io
from hparams import hparams
from infolog import log
from tacotron.synthesizer import Synthesizer
from wsgiref import simple_server
import argparse
from pypinyin import pinyin, lazy_pinyin, Style
from scipy.io.wavfile import write
import numpy as np
from datasets import audio
import itertools
import jieba
from pinyin2cn import cn2pinyin,cn_format
import hashlib
from pydub import AudioSegment
import json
import os
import mimetypes
from subprocess import Popen, PIPE
import pyaudio
from web_html import isWebUrl,get_article
import time
html_body = '''<html><title>Tcotron-2 Demo</title><meta charset='utf-8'>
<style>
body {padding: 16px; font-family: sans-serif; font-size: 14px; color: #444}
input {font-size: 14px; padding: 8px 12px; outline: none; border: 1px solid #ddd}
input:focus {box-shadow: 0 1px 2px rgba(0,0,0,.15)}
p {padding: 12px}
button {background: #28d; padding: 9px 14px; margin-left: 8px; border: none; outline: none;
color: #fff; font-size: 14px; border-radius: 4px; cursor: pointer;}
button:hover {box-shadow: 0 1px 2px rgba(0,0,0,.15); opacity: 0.9;}
button:active {background: #29f;}
button[disabled] {opacity: 0.4; cursor: default}
</style>
<body>
<form>
<input id="text" type="text" size="40" placeholder="请输入文字或网址">
<br/>
<div>
<input name="type" checked type="radio" value="g1"/><label>正常女</label>
<input name="type" type="radio" value="g2"/><label>小姐姐</label>
<input name="type" type="radio" value="g3"/><label>汤姆猫</label>
<input name="type" type="radio" value="b1"/><label>正常男</label>
</div>
<br/>
<button id="button" name="synthesize">合成</button>
</form>
</br>
<audio id="audio" controls autoplay hidden></audio>
<p id="message"></p>
<script>
function radio(name){
var radios = document.getElementsByName(name)
var value = ""
for(var i=0;i<radios.length;i++){
if(radios[i].checked == true){
value = radios[i].value
}
}
return value
}
contents = []
playlist = []
playerStatus = false
function q(selector) {return document.querySelector(selector)}
q('#text').focus()
q('#button').addEventListener('click', function(e) {
text = q('#text').value.trim()
if (text) {
q('#message').textContent = '合成中...'
q('#button').disabled = true
q('#audio').hidden = true
t = radio("type")
//synthesize(text,t)
contents = []
playlist = []
playerStatus = false
parse(text,t)
}
e.preventDefault()
return false
})
q('#audio').addEventListener("ended", function() {
if(playlist.length == 0){
q('#message').textContent = '播放完成.'
}else{
q('#message').textContent = '合成中...'
}
playerStatus = false
play()
})
function parse(text,t){
fetch('/read?text=' + encodeURIComponent(text), {cache: 'no-cache'})
.then(function(res) {
if (!res.ok) throw Error(res.statusText)
return res.json()
}).then(function(result) {
q('#message').textContent = ''
q('#button').disabled = false
contents = result["content"]
if( contents.length > 0){
content = contents.shift()
q('#message').textContent = '合成中...'
synthesize(content,t)
}
}).catch(function(err) {
q('#message').textContent = '出错: ' + err.message
q('#button').disabled = false
})
}
function play(){
if(playerStatus) return
if( playlist.length > 0){
playerStatus = true
c = playlist.shift()
q('#message').textContent = c[0]
q('#audio').src = URL.createObjectURL(c[1])
q('#audio').hidden = false
}
if( contents.length > 0){
content = contents.shift()
synthesize(content,t)
}
}
function synthesize(text,t) {
fetch('/synthesize?type=' + t + '&text=' + encodeURIComponent(text), {cache: 'no-cache'})
.then(function(res) {
if (!res.ok) throw Error(res.statusText)
return res.blob()
}).then(function(blob) {
//q('#message').textContent = text
playlist.push([text,blob])
play()
q('#button').disabled = false
//q('#audio').src = URL.createObjectURL(blob)
//q('#audio').hidden = false
}).catch(function(err) {
q('#message').textContent = '出错: ' + err.message
q('#button').disabled = false
})
}
</script></body></html>
'''
jieba.load_userdict("user_dict/jieba1.txt")
jieba.load_userdict("user_dict/jieba.txt")
jieba.load_userdict("user_dict/user_dict")
parser = argparse.ArgumentParser()
parser.add_argument('--checkpoint', default='pretrained/', help='Path to model checkpoint')
parser.add_argument('--hparams', default='',help='Hyperparameter overrides as a comma-separated list of name=value pairs')
parser.add_argument('--port', default=9003,help='Port of Http service')
parser.add_argument('--host', default="localhost",help='Host of Http service')
parser.add_argument('--name', help='Name of logging directory if the two models were trained together.')
args = parser.parse_args()
synth = Synthesizer()
modified_hp = hparams.parse(args.hparams)
synth.load(args.checkpoint, modified_hp)
def gen(content,t):
t1 = time.time()
out = io.BytesIO()
output = np.array([])
mhash = hashlib.md5(content.encode(encoding='UTF-8')).hexdigest()
print(content)
content = cn2pinyin(content)
print(len(content))
ts = content.split("E")
t2 = time.time()
for text in ts:
text = text.strip()
if len(text) <= 0:
continue
text += " E"
st1 = time.time()
data,wav = synth.eval(text)
st2 = time.time()
print(">>>>>"+text,"cost=",st2-st1)
output = np.append(output, wav, axis=0)
t3 = time.time()
audio.save_wav(output,out, hparams.sample_rate)
t4 = time.time()
if t == "g1":
mp3_path = "wavs/"+mhash + ".mp3"
song = AudioSegment.from_file(out, format='wav')
song.set_frame_rate(hparams.sample_rate)
song.set_channels(2)
filter = "atempo=0.95,highpass=f=300,lowpass=f=3000,aecho=0.8:0.88:6:0.4"
song.export(mp3_path, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "150"])
t5 = time.time()
out2 = io.BytesIO()
song.export(out2, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "150"])
data = out2.getvalue()
t6 = time.time()
print("gen cost",t2-t1,t3-t2,t4-t3,t5-t4,t6-t5)
return mp3_path, data
else:
effect ="-rate=-5 -pitch=+4"
if t == "g3":
effect ="-rate=+45 -pitch=+3"
elif t == "b1":
effect ="-pitch=-4"
wav_file = "wavs/"+mhash + ".wav"
audio.save_wav(output,wav_file,hparams.sample_rate)
mp3_file = "wavs/"+mhash + ".mp3"
out_file = "wavs/"+mhash + "1.wav"
# effect ="-rate=-5 -pitch=+4" #"-rate=-10 -pitch=+8" 小姐姐 #"-rate=+45 -pitch=+3" 汤姆猫
popen = Popen("soundstretch "+wav_file+" "+out_file+" "+effect, shell=True, stdout=PIPE, stderr=PIPE)
popen.wait()
if popen.returncode != 0:
print("Error.")
song = AudioSegment.from_wav(out_file)
song.set_frame_rate(hparams.sample_rate)
song.set_channels(1)
filter = "atempo=0.95,highpass=f=200,lowpass=f=1000,aecho=0.8:0.88:6:0.4"
song.export(mp3_file, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "200"])
out2 = io.BytesIO()
song.export(out2, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "200"])
data = out2.getvalue()
# mp3_path = "wavs/"+mhash + ".mp3"
# song = AudioSegment.from_file(out, format='wav')
# song.set_frame_rate(hparams.sample_rate)
# song.set_channels(2)
# filter = "atempo=0.95,highpass=f=300,lowpass=f=3000,aecho=0.8:0.88:6:0.4"
# song.export(mp3_path, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "150"])
# out2 = io.BytesIO()
# song.export(out2, format="mp3",parameters=["-filter_complex",filter,"-q:a", "4", "-vol", "150"])
# data = out2.getvalue()
# return mp3_path, data
return mp3_file, data
class Sound:
def on_get(self,req,resp,mp3):
file_path = os.path.join("wavs", mp3)
if not os.path.exists(file_path):
# from pudb import set_trace; set_trace()
msg = 'Resource doesn\'t Exist'
raise falcon.HTTPNotFound('Not Found', msg)
resp.status = falcon.HTTP_200
resp.content_type = mimetypes.guess_type(file_path, strict=False)
resp.stream = open(file_path, 'rb')
resp.stream_len = os.path.getsize(file_path)
class Res:
def on_get(self,req,resp):
resp.status = falcon.HTTP_200
resp.body = html_body
resp.content_type = "text/html"
class SynMp3:
def on_get(self,req,resp):
t1 = time.time()
if not req.params.get('text'):
raise falcon.HTTPBadRequest()
content = req.params.get('text')
mp3_path,out = gen(content,"g1")
result={}
result["code"] =0
result["path"] = "http://"+ args.host + ":" + args.port +"/"+ mp3_path
result["text"] = content
resp.status = falcon.HTTP_200
resp.body = json.dumps(result)
resp.content_length = len(resp.body)
t2 = time.time()
print("cost synMp3 ",t2-t1)
class Read:
def on_get(self,req,resp):
if not req.params.get('text'):
raise falcon.HTTPBadRequest()
content = req.params.get('text')
title = ""
if isWebUrl(content):
title,content = get_article(content)
result = cn_format(content)
print(result)
out = []
splitStr =""
for text in result:
splitStr += text + "。"
if len(splitStr) >= 120:
out.append(splitStr)
splitStr= ""
if len(splitStr) > 0:
out.append(splitStr)
splitStr= ""
print(out)
resp.status = falcon.HTTP_200
result={}
result["code"] = 0
result["content"] = out
result["title"] = title
resp.body = json.dumps(result)
resp.content_length = len(resp.body)
class Syn:
def on_get(self,req,resp):
t1 = time.time()
if not req.params.get('text'):
raise falcon.HTTPBadRequest()
content = req.params.get('text')
t = req.params.get('type')
if isWebUrl(content):
title,content = get_article(content)
print("type :",t)
mp3_path,out = gen(content,t)
resp.status = falcon.HTTP_200
resp.data = out
resp.content_type = "audio/mp3"
t2 = time.time()
print("cost syn ",t2-t1)
api = falcon.API()
api.add_route("/",Res())
api.add_route("/synthesize", Syn())
api.add_route("/create",SynMp3())
api.add_route("/wavs/{mp3}",Sound())
api.add_route("/read", Read())
print("host:{},port:{}".format(args.host,int(args.port)))
simple_server.make_server(args.host,int(args.port),api).serve_forever()
| 30.591304 | 122 | 0.638147 |
acf8dcbe6e0cc6cd840d5c5dd245739416d4b235 | 479 | py | Python | tests/redshift_model.py | saurabhdhupar/pypandas-sql | 4b5d3bd2ffb5e1e566b2cd8aa9ecaba29237a995 | [
"Apache-2.0"
] | null | null | null | tests/redshift_model.py | saurabhdhupar/pypandas-sql | 4b5d3bd2ffb5e1e566b2cd8aa9ecaba29237a995 | [
"Apache-2.0"
] | 23 | 2020-01-23T06:43:39.000Z | 2020-02-08T08:43:50.000Z | tests/redshift_model.py | saurabhdhupar/pypandas-sql | 4b5d3bd2ffb5e1e566b2cd8aa9ecaba29237a995 | [
"Apache-2.0"
] | null | null | null | from sqlalchemy import Column, String, DateTime, Integer
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Person(Base):
__tablename__ = 'test_people'
user_name = Column(String, primary_key=True)
first_name = Column(String)
last_name = Column(String)
team = Column(String)
employment_term = Column(String)
start_date = Column(DateTime)
age = Column(Integer)
end_date = Column(DateTime, nullable=True)
| 26.611111 | 56 | 0.734864 |
acf8dda351c67e5641087871da9c129aefd46948 | 2,371 | py | Python | PyPoll/main.py | CAguayoRb/pyton-challenge | 35216ccd29cf6275710cc56c6fb85eb0b90a3ed1 | [
"RSA-MD"
] | null | null | null | PyPoll/main.py | CAguayoRb/pyton-challenge | 35216ccd29cf6275710cc56c6fb85eb0b90a3ed1 | [
"RSA-MD"
] | null | null | null | PyPoll/main.py | CAguayoRb/pyton-challenge | 35216ccd29cf6275710cc56c6fb85eb0b90a3ed1 | [
"RSA-MD"
] | null | null | null | import os
import csv
poll_csv = 'python-challenge/PyPoll/Resources/election_data.csv'
with open(poll_csv, newline="") as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
csv_header = next(csvreader)
totalvotes = 0
votes = []
candidates = []
for row in csvreader:
votes.append(row[0])
candidates.append(row[2])
total_votes = (len(votes))
#Get info for each candidate:
#Otooley
otooley_votes = (int(candidates.count("O'Tooley")))
otooley_percentage = (otooley_votes/total_votes) * 100
otooley_final = round(otooley_percentage, 3)
#Correy
correy_votes = (int(candidates.count("Correy")))
correy_percentage = (correy_votes/total_votes) * 100
correy_final = round(correy_percentage, 3)
#Li
li_votes = (int(candidates.count("Li")))
li_percentage = (li_votes/total_votes) * 100
li_final = round(li_percentage, 3)
#Khan
khan_votes = (int(candidates.count("Khan")))
khan_percentage = (khan_votes/total_votes) * 100
khan_final = round(khan_percentage, 3)
#Get the winner:
if otooley_votes > khan_votes > correy_votes > li_votes:
winner = "O'Tooley"
elif correy_votes > khan_votes > li_votes > otooley_votes:
winner = "Correy"
elif li_votes > khan_votes > correy_votes > otooley_votes:
winner = "Li"
elif khan_votes > correy_votes > li_votes > otooley_votes:
winner = "Khan"
print("Election Results")
print("----------------------------------------")
print(f"Total Votes: {total_votes}")
print("----------------------------------------")
print(f"Khan: {khan_percentage}% ({khan_votes})")
print(f"Correy: {correy_percentage}% ({correy_votes})")
print(f"Li: {li_percentage}% ({li_votes})")
print(f"O'Tooley: {otooley_percentage}% ({otooley_votes})")
print("-------------------------------------------")
print(f"Winner: {winner}")
import sys
sys.stdout = open("python-challenge/PyPoll/Analysis/election_data_analysis.txt", "w")
print("Election Results")
print("----------------------------------------")
print(f"Total Votes: {total_votes}")
print("----------------------------------------")
print(f"Khan: {khan_percentage}% ({khan_votes})")
print(f"Correy: {correy_percentage}% ({correy_votes})")
print(f"Li: {li_percentage}% ({li_votes})")
print(f"O'Tooley: {otooley_percentage}% ({otooley_votes})")
print("-------------------------------------------")
print(f"Winner: {winner}")
sys.stdout.close() | 30.397436 | 85 | 0.638971 |
acf8dece375ce2026c3db474dd5b39f11b1ee357 | 6,070 | py | Python | etl-case-study/code/ibm/3. reduce/__main__.py | iaas-splab/function-orchestration-modeling | 2ba5cc4d25e84602af0e75b0d3e75a37032e2964 | [
"Apache-2.0"
] | null | null | null | etl-case-study/code/ibm/3. reduce/__main__.py | iaas-splab/function-orchestration-modeling | 2ba5cc4d25e84602af0e75b0d3e75a37032e2964 | [
"Apache-2.0"
] | null | null | null | etl-case-study/code/ibm/3. reduce/__main__.py | iaas-splab/function-orchestration-modeling | 2ba5cc4d25e84602af0e75b0d3e75a37032e2964 | [
"Apache-2.0"
] | null | null | null | import ibm_boto3
import ibm_botocore
from ibm_boto3.s3.transfer import TransferConfig
from ibm_botocore.client import Config
from ibm_botocore import UNSIGNED
import os
import logging
import gzip
import json
import pandas as pd
from pandas.io.json import json_normalize
import numpy as np
from datetime import datetime, timedelta
config = TransferConfig(max_concurrency=2)
IAM_API_KEY = os.environ.get('__OW_IAM_NAMESPACE_API_KEY')
ACTIVATION_ID = os.environ.get('__OW_ACTIVATION_ID')
ENDPOINT = 'https://s3.private.eu-de.cloud-object-storage.appdomain.cloud'
COS_OUTPUT_BUCKET = 'openaq-output'
TEMP_FOLDER_TEMPLATE = 'openaq/temp/{}'
OUTPUT_FOLDER_TEMPLATE = 'openaq/output/{}'
ibm_cos = ibm_boto3.client("s3",
ibm_api_key_id=IAM_API_KEY,
config=Config(signature_version="oauth"),
endpoint_url=ENDPOINT
)
prev_day = datetime.utcnow() - timedelta(days=1)
prev_day = prev_day.strftime('%Y-%m-%d')
log = logging.getLogger()
def download_intermediate_results(filename):
"""Download a file from IBM Cloud Object Storage bucket
Parameters
----------
filename: string, required
Name of the file in IBM COS bucket source bucket (OpenAQ intermediate results)
Returns
-------
processed_file: string
Local path to downloaded file
"""
try:
object_name = TEMP_FOLDER_TEMPLATE.format(filename)
processed_file = os.path.join('/tmp', os.path.basename(filename))
ibm_cos.download_file(COS_OUTPUT_BUCKET, object_name, processed_file, Config=config)
except ibm_botocore.exceptions.ClientError as e:
log.error(f'Unable to download result file: {filename}')
log.debug(e)
raise
return processed_file
def process_intermediate_results(dataframes):
"""Combine hourly air quality ratings and calculate daily ratings for each location.
Parameters
----------
dataframes: list of Pandas dataframes, required
List of dataframes with hourly air quality ratings
Returns
-------
summary_stats: Pandas dataframe
Daily summary of air quality ratings
"""
try:
# combine into single dataframe
data = pd.concat(dataframes, sort=False)
data['date.utc'] = pd.to_datetime(data['date.utc'], utc=True)
# calculate stats
summary_stats = data.set_index('date.utc').groupby([pd.Grouper(freq='D'), 'country', 'city', 'location']).agg([np.nanmin, np.nanmax, np.nanmean])
summary_stats.columns = ["_".join(x)
for x in summary_stats.columns.ravel()]
# format the columns
summary_stats = summary_stats.reset_index()
# there is occasionally historic data in the source
summary_stats = summary_stats[summary_stats['date.utc'].dt.date.astype(str) == prev_day]
summary_stats['date.utc'] = summary_stats['date.utc'].dt.date
summary_stats.drop_duplicates(inplace=True)
new_columns = {'date.utc': 'date',
'bc_nanmin': 'bc_min',
'bc_nanmax': 'bc_max',
'bc_nanmean': 'bc_mean',
'co_nanmin': 'co_min',
'co_nanmax': 'co_max',
'co_nanmean': 'co_mean',
'no2_nanmin': 'no2_min',
'no2_nanmax': 'no2_max',
'no2_nanmean': 'no2_mean',
'o3_nanmin': 'o3_min',
'o3_nanmax': 'o3_max',
'o3_nanmean': 'o3_mean',
'pm10_nanmin': 'pm10_min',
'pm10_nanmax': 'pm10_max',
'pm10_nanmean': 'pm10_mean',
'pm25_nanmin': 'pm25_min',
'pm25_nanmax': 'pm25_max',
'pm25_nanmean': 'pm25_mean',
'so2_nanmin': 'so2_min',
'so2_nanmax': 'so2_max',
'so2_nanmean': 'so2_mean'
}
summary_stats.rename(columns=new_columns, inplace=True)
except Exception as e:
log.error("Error processing data")
log.debug(e)
raise
return summary_stats
def upload_final_results(results):
"""Upload a file to IBM COS bucket
Parameters
----------
results: string, required
Name of the local file with final results
"""
results_path = os.path.join('/tmp', results)
# upload to target IBM COS bucket
try:
response = ibm_cos.upload_file(
results_path,
COS_OUTPUT_BUCKET,
OUTPUT_FOLDER_TEMPLATE.format(results))
log.info("Uploaded final results to bucket {}, path: ".format(COS_OUTPUT_BUCKET) + OUTPUT_FOLDER_TEMPLATE.format(results))
except ibm_botocore.exceptions.ClientError as e:
log.error(f'Unable to upload final results: {results}')
log.debug(e)
raise
def main(event):
dataframes = []
temp_files = []
# download files locally
for item in event['value']:
temp_files.append({'Key': TEMP_FOLDER_TEMPLATE.format(item['processed_file'])})
intermediate_result = download_intermediate_results(item['processed_file'])
# read each file and store as Pandas dataframe
with gzip.GzipFile(intermediate_result, 'r') as data_file:
raw_json = json.loads(data_file.read())
df = pd.DataFrame.from_dict(raw_json)
dataframes.append(df)
summary_stats = process_intermediate_results(dataframes)
# write to file
output_file_name = '{}.csv.gz'.format(prev_day)
output_file = '/tmp/{}'.format(output_file_name)
summary_stats.to_csv(
output_file,
compression='gzip',
index=False,
header=True)
upload_final_results(output_file_name)
return {
"message": "Successfully processed data for {}".format(prev_day),
"intermediate_files": temp_files,
"output_file": "{}/".format(COS_OUTPUT_BUCKET) + OUTPUT_FOLDER_TEMPLATE.format(output_file_name)
} | 35.91716 | 153 | 0.625535 |
acf8def269bef82c995e07146db17b9722718ea9 | 1,846 | py | Python | project-5/RL/agents/critic.py | linuxbender/Deep_Learning | 3df4b26777a71ddbe461ac46dafa36b34be84348 | [
"MIT"
] | null | null | null | project-5/RL/agents/critic.py | linuxbender/Deep_Learning | 3df4b26777a71ddbe461ac46dafa36b34be84348 | [
"MIT"
] | null | null | null | project-5/RL/agents/critic.py | linuxbender/Deep_Learning | 3df4b26777a71ddbe461ac46dafa36b34be84348 | [
"MIT"
] | null | null | null | from keras import layers, models, optimizers
from keras import backend as K
class Critic:
def __init__(self, state_size, action_size, LEARNING_RATE):
self.state_size = state_size
self.action_size = action_size
self.LEARNING_RATE = LEARNING_RATE
self.build_model()
def build_model(self):
states = layers.Input(shape=(self.state_size,), name='states')
actions = layers.Input(shape=(self.action_size,), name='actions')
net_states = layers.Dense(units=128, activation=None)(states)
net_states = layers.BatchNormalization()(net_states)
net_states = layers.Activation('relu')(net_states)
net_states = layers.Dense(units=128, activation=None)(net_states)
net_states = layers.BatchNormalization()(net_states)
net_states = layers.Activation('relu')(net_states)
net_actions = layers.Dense(units=128, activation=None)(actions)
net_actions = layers.BatchNormalization()(net_actions)
net_actions = layers.Activation('relu')(net_actions)
net_actions = layers.Dense(units=128, activation=None)(net_actions)
net_actions = layers.BatchNormalization()(net_actions)
net_actions = layers.Activation('relu')(net_actions)
net = layers.Add()([net_states, net_actions])
net = layers.Activation('sigmoid')(net)
Q_values = layers.Dense(units=1, name='q_values')(net)
self.model = models.Model(inputs=[states, actions], outputs=Q_values)
optimizer = optimizers.Adam(lr=self.LEARNING_RATE)
self.model.compile(optimizer=optimizer, loss='mse')
action_gradients = K.gradients(Q_values, actions)
self.get_action_gradients = K.function(
inputs=[*self.model.input, K.learning_phase()],
outputs=action_gradients) | 40.130435 | 77 | 0.678765 |
acf8e0c322093c45b52d5ffa274d5ec4d61f6eb4 | 2,306 | py | Python | benchmark/startPyquil1306.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil1306.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startPyquil1306.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=5
# total number=56
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=3
prog += H(1) # number=4
prog += H(2) # number=50
prog += CZ(4,2) # number=51
prog += H(2) # number=52
prog += H(2) # number=5
prog += H(3) # number=6
prog += H(4) # number=21
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=7
prog += H(3) # number=8
prog += H(0) # number=28
prog += Z(3) # number=42
prog += CZ(1,0) # number=29
prog += H(0) # number=30
prog += H(0) # number=43
prog += CZ(1,0) # number=44
prog += H(0) # number=45
prog += CNOT(1,0) # number=35
prog += CNOT(1,0) # number=38
prog += X(0) # number=39
prog += H(0) # number=53
prog += CZ(1,0) # number=54
prog += H(0) # number=55
prog += CNOT(1,0) # number=37
prog += H(0) # number=46
prog += CZ(1,0) # number=47
prog += H(0) # number=48
prog += CNOT(1,0) # number=27
prog += X(1) # number=10
prog += X(2) # number=11
prog += X(3) # number=12
prog += X(0) # number=13
prog += CNOT(0,1) # number=22
prog += Y(2) # number=41
prog += X(1) # number=23
prog += CNOT(0,1) # number=24
prog += RX(1.0398671683382215,2) # number=31
prog += X(2) # number=15
prog += X(3) # number=16
prog += H(0) # number=17
prog += H(1) # number=18
prog += H(2) # number=19
prog += H(3) # number=20
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('5q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil1306.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 25.340659 | 64 | 0.54033 |
acf8e0f0be5c764289fdc64d0edefdd0fd361cf7 | 1,046 | py | Python | gaptrain/__init__.py | t-young31/gap-train | 864574abe20cc6072376e7c36ffb2ee1635e74e3 | [
"MIT"
] | 13 | 2021-02-16T15:25:27.000Z | 2022-01-26T12:09:37.000Z | gaptrain/__init__.py | duartegroup/gap-train | 864574abe20cc6072376e7c36ffb2ee1635e74e3 | [
"MIT"
] | 5 | 2021-04-09T16:07:47.000Z | 2021-08-05T15:40:50.000Z | gaptrain/__init__.py | t-young31/gap-train | 864574abe20cc6072376e7c36ffb2ee1635e74e3 | [
"MIT"
] | 3 | 2021-03-23T16:55:22.000Z | 2022-01-03T23:40:28.000Z | from gaptrain.systems import System
from gaptrain.configurations import Configuration, ConfigurationSet
from gaptrain.molecules import Molecule, UniqueMolecule, Ion
from gaptrain.box import Box
from gaptrain.data import Data
from gaptrain.gtconfig import GTConfig
from gaptrain.trajectories import Trajectory
from gaptrain.loss import RMSE, Tau
from gaptrain import md
from gaptrain import descriptors
from gaptrain import cur
from gaptrain import active
from gaptrain import gap
from gaptrain import solvents
from gaptrain.gap import GAP, IntraGAP, InterGAP, IIGAP
__all__ = ['System',
'Configuration',
'ConfigurationSet',
'Molecule',
'UniqueMolecule',
'Ion',
'GAP',
'IntraGAP',
'InterGAP',
'IIGAP',
'Data',
'GTConfig',
'Trajectory',
'RMSE',
'Tau',
'Box',
'gap',
'md',
'active',
'descriptors',
'cur',
'solvents']
| 26.820513 | 67 | 0.610899 |
acf8e1b4c106384dec90ea63f013ec4a03f76425 | 5,389 | py | Python | slacm/instance.py | SLAcM/SLAcM | 62943f4a68725674b103c73fcbcd25bb9cb5890e | [
"Apache-2.0"
] | 1 | 2022-01-13T03:19:24.000Z | 2022-01-13T03:19:24.000Z | slacm/instance.py | SLAcM/SLAcM | 62943f4a68725674b103c73fcbcd25bb9cb5890e | [
"Apache-2.0"
] | null | null | null | slacm/instance.py | SLAcM/SLAcM | 62943f4a68725674b103c73fcbcd25bb9cb5890e | [
"Apache-2.0"
] | null | null | null | '''
Created on Sep 19, 2020
@author: esdev
'''
import time
import zmq
import logging
import importlib
from itertools import count
from slacm.exceptions import LoadError
from slacm.timer import TimerPort
from slacm.pub import PublisherPort
from slacm.sub import SubscriberPort
from slacm.req import RequestPort
from slacm.rep import ReplyPort
from slacm.qry import QueryPort
from slacm.ans import AnswerPort
from slacm.component import Component,ComponentThread
class Instance(object):
'''
Class to represent a component instance
'''
_modules = {}
@property
def modules(self):
'''
Dictionary to maintain the loaded modules.
'''
return self._modules
@modules.setter
def modules(self,val):
self._modules = val
_portTypes = {
"PubPort" : PublisherPort,
"SubPort" : SubscriberPort,
"ReqPort" : RequestPort,
"RepPort" : ReplyPort,
"QryPort" : QueryPort,
"AnsPort" : AnswerPort,
"TimPort" : TimerPort
}
def __init__(self, parent, model):
'''
Consruct for an instance. Loands the module for the component, constructs the
component, and its ports.
:param parent: parent actor
:param model: instance model
'''
self.logger = logging.getLogger(__name__)
self.parent = parent
self.name = model.name
self.type = model.type
self.context = parent.childContext
self.disco = self.parent.get_disco()
self.netInfo = self.parent.get_netInfo()
self.typeName = self.type.name
self.params = self.parent.get_comp_params(self.name)
self.args = self.params if self.params else {}
self.qualName = '%s.%s.%s' % (self.parent.name,self.name,self.typeName)
self.logger.info('Instance.__init__(%s)',self.qualName)
self.load()
self.class_ = getattr(self.module_, self.typeName)
self.class_.OWNER = self # Trick to set the OWNER of the component
self.component = self.class_(**self.args) # Run the component constructor
self.class_.OWNER = None
self.thread = None
self.ports = {}
self.port_index = count(0)
for port in self.type.ports:
_class = self._portTypes[port.__class__.__name__]
self.ports[port.name] = _class(self,port.name,port)
setattr(self.component,port.name,self.ports[port.name])
def get_next_index(self):
'''
Returns the next port index
'''
return next(self.port_index)
def getActor(self):
'''
Returns the parent actor object
'''
return self.parent
def get_netInfo(self):
'''
Returns the network information object
'''
return self.netInfo
def is_local(self,message):
'''
Returns True if the message is 'host local' for the parent actor.
'''
return self.parent.is_local(message)
def load(self):
'''
Load the component implementation code, or retrieve it from the cache.
'''
if self.typeName not in self.modules:
try:
self.module_ = importlib.import_module(self.typeName)
self.modules[self.typeName] = self.module_
except Exception as e:
raise LoadError ("%s: %s" % (type(e),e))
else:
self.module_ = self.modules[self.typeName]
def sendCommand(self,arg):
'''
Send a command to the component thread
'''
self.command.send_pyobj(arg)
def recvResp(self):
'''
Receive response from the component thread
'''
return self.command.recv_pyobj()
def setup(self):
'''
Execute the 'setup' phase of component initialization. Create command socket,
launch component thread, and instruct it to execute the 'setup'.
'''
self.logger.info('Instance.setup(%s: %s)',self.name,self.type.name)
self.context = self.parent.childContext
self.command = self.context.socket(zmq.PAIR)
self.command.bind("inproc://part_" + self.name + '_control')
self.thread = ComponentThread(self)
self.thread.daemon = True
self.thread.start()
time.sleep(0.001)
self.sendCommand(Component.SETUP)
_ack = self.recvResp()
def finalize(self):
'''
Executhe the 'finalize' phase of component initialization by instructing
the component thread.
'''
self.logger.info('Instance.finalize(%s: %s)',self.name,self.type.name)
self.sendCommand(Component.FINALIZE)
_ack = self.recvResp()
def start(self):
'''
Instruct the component thread to run user code.
'''
self.logger.info('Instance.start(%s: %s)',self.name,self.type.name)
self.sendCommand(Component.START)
_ack = self.recvResp()
def stop(self):
'''
Instruct the component thread to stop running user code and terminate.
'''
self.logger.info('Instance.stop(%s: %s)',self.name,self.type.name)
self.sendCommand(Component.STOP)
_ack = self.recvResp()
| 31.150289 | 86 | 0.595843 |
acf8e1f95a9bbcc74144c7690915afdc26320e6e | 3,609 | py | Python | linkml_runtime/utils/context_utils.py | dalito/linkml-runtime | 192a33962aed06f727ffad1a697003ac6ec85c2c | [
"CC0-1.0"
] | null | null | null | linkml_runtime/utils/context_utils.py | dalito/linkml-runtime | 192a33962aed06f727ffad1a697003ac6ec85c2c | [
"CC0-1.0"
] | null | null | null | linkml_runtime/utils/context_utils.py | dalito/linkml-runtime | 192a33962aed06f727ffad1a697003ac6ec85c2c | [
"CC0-1.0"
] | null | null | null | import json
import os
from io import TextIOWrapper
from typing import Optional, Union, List, Any, Dict, Callable
import yaml
from jsonasobj2 import JsonObj, loads
CONTEXT_TYPE = Union[str, dict, JsonObj]
CONTEXTS_PARAM_TYPE = Optional[Union[CONTEXT_TYPE, List[CONTEXT_TYPE]]]
def merge_contexts(contexts: CONTEXTS_PARAM_TYPE = None, base: Optional[Any] = None) -> JsonObj:
""" Take a list of JSON-LD contexts, which can be one of:
* the name of a JSON-LD file
* the URI of a JSON-lD file
* JSON-LD text
* A JsonObj object that contains JSON-LD
* A dictionary that contains JSON-LD
And turn it into an object that can be tacked onto the end of any JSON object for conversion into RDF
The base is added back in because @base is ignored in imported and nested contexts -- it must be at the
root in the object itself.
:param contexts: Ordered list of contexts to add
:param base: base to add in (optional)
:return: aggregated context
"""
def prune_context_node(ctxt: Union[str, JsonObj]) -> Union[str, JsonObj]:
return ctxt['@context'] if isinstance(ctxt, JsonObj) and '@context' in ctxt else ctxt
def to_file_uri(fname: str) -> str:
return 'file://' + fname
context_list = []
for context in [] if contexts is None else [contexts] if not isinstance(contexts, (list, tuple, set)) else contexts:
if isinstance(context, str):
# One of filename, URL or json text
if context.strip().startswith("{"):
context = loads(context)
elif '://' not in context:
context = to_file_uri(context)
elif not isinstance(context, (JsonObj, str)):
context = JsonObj(**context) # dict
context_list.append(prune_context_node(context))
if base:
context_list.append(JsonObj(**{'@base': str(base)}))
return None if not context_list else \
JsonObj(**{"@context": context_list[0] if len(context_list) == 1 else context_list})
def map_import(importmap: Dict[str, str], namespaces: Callable[[None], "Namespaces"], imp: Any) -> str:
sname = str(imp)
if ':' in sname:
prefix, lname = sname.split(':', 1)
prefix += ':'
sname = importmap.get(prefix, prefix) + lname
sname = importmap.get(sname, sname) # Import map may use CURIE
sname = str(namespaces().uri_for(sname)) if ':' in sname else sname
return importmap.get(sname, sname) # It may also use URI or other forms
def parse_import_map(map_: Optional[Union[str, Dict[str, str], TextIOWrapper]],
base: Optional[str] = None) -> Dict[str, str]:
"""
Process the import map
:param map_: A map location, the JSON for a map, YAML for a map or an existing dictionary
:param base: Base location to turn relative locations into absolute
:return: Import map
"""
if map_ is None:
rval = dict()
elif isinstance(map_, TextIOWrapper):
map_.seek(0)
return parse_import_map(map_.read(), base)
elif isinstance(map_, dict):
rval = map_
elif map_.strip().startswith('{'):
rval = json.loads(map_)
elif '\n' in map_ or '\r' in map_ or ' ' in map_:
rval = yaml.safe_load(map_)
else:
with open(map_) as ml:
return parse_import_map(ml.read(), os.path.dirname(map_))
if base:
outmap = dict()
for k, v in rval.items():
if ':' not in v:
v = os.path.join(os.path.abspath(base), v)
outmap[k] = v
rval = outmap
return rval
| 37.989474 | 120 | 0.630369 |
acf8e280065f61b703eb570bef500fbbeecfa15c | 88 | py | Python | collection_modules/btleCollectionPoint/libs/__init__.py | maxakuru/SimpleSensor | 655d10ebed5eddb892d036012cb12ccd6b460d2d | [
"Apache-2.0"
] | null | null | null | collection_modules/btleCollectionPoint/libs/__init__.py | maxakuru/SimpleSensor | 655d10ebed5eddb892d036012cb12ccd6b460d2d | [
"Apache-2.0"
] | null | null | null | collection_modules/btleCollectionPoint/libs/__init__.py | maxakuru/SimpleSensor | 655d10ebed5eddb892d036012cb12ccd6b460d2d | [
"Apache-2.0"
] | null | null | null | from bglib import BGAPIEvent
from bglib import BGAPIEventHandler
from bglib import BGLib | 29.333333 | 35 | 0.875 |
acf8e2895bedf99370d2a3e11a0dc7b52e4a1191 | 22,177 | py | Python | scripts/diff_rulekeys.py | bdd/buck-test | 9158e77c76997333f05e666bd1cbdf34027c3b16 | [
"Apache-2.0"
] | null | null | null | scripts/diff_rulekeys.py | bdd/buck-test | 9158e77c76997333f05e666bd1cbdf34027c3b16 | [
"Apache-2.0"
] | null | null | null | scripts/diff_rulekeys.py | bdd/buck-test | 9158e77c76997333f05e666bd1cbdf34027c3b16 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import collections
import hashlib
import codecs
import itertools
import os
import re
import sys
RULE_LINE_REGEX = re.compile(r'.*(\[[^\]+]\])*\s+RuleKey\s+(.*)')
INVOCATION_LINE_REGEX = re.compile(r'.*(\[[^\]+]\])*\s+InvocationInfo\s+(.*)')
INVOCATION_VALUE_REGEX = re.compile(r'(\w+)=\[([^]]*)\]')
PATH_VALUE_REGEX = re.compile(r'path\(([^:]+):\w+\)')
LOGGER_NAME = 'com.facebook.buck.rules.keys.RuleKeyBuilder'
TAG_NAME = 'RuleKey'
def parseArgs():
description = """Analyze RuleKey differences between two builds.
To use this tool you need to enable RuleKey logging in your build first by
adding the appropriate entry to the .bucklogging.properties file:
> echo '""" + LOGGER_NAME + """.level=FINER' > .bucklogging.properties
You would then perform two builds: a 'before' and 'after' build that you wish
to compare. Ideally these would differ by the minimal amount possible to
reproduce the issue you're investigating (so for example compile the same
target from the same source revision across different machines).
Finally you would invoke this tool on the two log files obtained this way along
with a build_target that you'd like to analyze (it's fine if this is simply
the top-level target you've built.
Make sure to do a 'buck kill', as the changes are JVM settings. Once you're done,
you may want to undo the .bucklogging.properties changes, otherwise builds will be
slower due to the extra logging.
"""
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
'left_log',
help='buck.log file to look at first.')
parser.add_argument(
'right_log',
help='buck.log file to look at second.')
parser.add_argument(
'build_target',
help='Name of the RuleKey that you want to analyze',
nargs='?')
parser.add_argument(
'--verbose',
help='Verbose mode',
action='store_true',
default=False)
# Print full help message if we're invoked with no arguments (otherwise
# you get the shortened 1 line 'usage' message.
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
class KeyValueDiff(object):
ORDER_ONLY = "Only order of entries differs: [{left}] vs [{right}]."
ORDER_ONLY_REMAINING = ("Only order of remaining entries differs: " +
"[{left}] vs [{right}].")
ORDER_REPS_REMAINING = ("Order and repetition count of remaining " +
"entries differs: [{left}] vs [{right}].")
ORDER_AND_CASE_ONLY = ("Only order and letter casing (Upper Case vs " +
"lower case) of entries differs:")
def __init__(self, left_format=None, right_format=None):
self._left = []
self._right = []
self._left_format = left_format or '-[%s]'
self._right_format = right_format or '+[%s]'
self._interesting_paths = set()
def append(self, left, right):
self._left.append(left)
self._right.append(right)
def getInterestingPaths(self):
return self._interesting_paths
def _filterPathValue(self, value):
match = PATH_VALUE_REGEX.search(value)
if match:
return match.groups()[0]
else:
return None
def diff(self):
if self._left == self._right:
return ['No changes']
if sorted(self._left) == sorted(self._right):
return [KeyValueDiff.ORDER_ONLY.format(
left=', '.join(self._left),
right=', '.join(self._right))]
left_lower_index = dict([(v.lower(), v) for v in self._left])
right_lower_index = dict([(v.lower(), v) for v in self._right])
if set(left_lower_index.keys()) == set(right_lower_index.keys()):
differences = []
for k in sorted(left_lower_index.keys()):
if left_lower_index[k] != right_lower_index[k]:
differences.append(
self._left_format % left_lower_index[k])
differences.append(
self._right_format % right_lower_index[k])
return ([KeyValueDiff.ORDER_AND_CASE_ONLY] + differences)
left_set = set(self._left)
right_set = set(self._right)
left_only = left_set.difference(right_set)
right_only = right_set.difference(left_set)
left_common = filter(lambda i: i not in left_only, self._left)
right_common = filter(lambda i: i not in right_only, self._right)
left_not_in_order = []
right_not_in_order = []
for l, r in map(None, left_common, right_common):
if l == r:
continue
if l is not None:
left_not_in_order.append(l)
if r is not None:
right_not_in_order.append(r)
self._interesting_paths.update(
filter(None,
map(self._filterPathValue,
left_only.union(right_only))))
result = [self._left_format % v for v in sorted(left_only)]
result.extend([self._right_format % v for v in sorted(right_only)])
if len(left_not_in_order) > 0:
format_string = KeyValueDiff.ORDER_REPS_REMAINING
if len(left_not_in_order) == len(right_not_in_order):
format_string = KeyValueDiff.ORDER_ONLY_REMAINING
result.append(format_string.format(
left=', '.join(left_not_in_order),
right=', '.join(right_not_in_order)))
return result
class RuleKeyStructureInfo(object):
def __init__(self, buck_out, entries_for_test=None):
if entries_for_test is not None:
self._entries = entries_for_test
elif isinstance(buck_out, basestring):
parsed_log = RuleKeyStructureInfo._parseBuckOut(buck_out)
else:
parsed_log = RuleKeyStructureInfo._parseLogFile(buck_out)
self._entries, self._invocation_info = parsed_log
self._key_to_struct = RuleKeyStructureInfo._makeKeyToStructureMap(
self._entries)
self._name_to_key = RuleKeyStructureInfo._makeNameToKeyMap(
self._entries)
def getInvocationInfo(self, key):
return self._invocation_info.get(key, '<unknown>')
def getByKey(self, key):
return self._key_to_struct.get(key)
def getKeyForName(self, name):
return self._name_to_key.get(name)
def getNameToKeyMap(self):
return self._name_to_key
def getNameForKey(self, key):
struct = self.getByKey(key)
if struct is None:
return None
return RuleKeyStructureInfo._nameFromStruct(struct)
def getByName(self, name):
key = self._name_to_key.get(name)
if key is None:
return None
return self.getByKey(key)
def getAllNames(self):
names = []
for e in self._entries:
top_key, structure = e
name = self.getNameForKey(top_key)
if name is not None:
names.append(name)
return names
def getRuleKeyRefs(self, values):
return [
(value, RuleKeyStructureInfo._extractRuleKeyRef(value))
for value in values
]
def size(self):
return len(self._entries)
@staticmethod
def _nameFromStruct(structure):
name = None
if '.name' in structure and '.type' in structure:
name = list(structure['.name'])[0]
if name.startswith('string("'):
name = name[8:-2]
return name
@staticmethod
def _makeKeyToStructureMap(entries):
result = {}
for e in entries:
top_key, structure = e
if top_key in result:
assert structure == result[top_key]
result[top_key] = structure
return result
@staticmethod
def _makeNameToKeyMap(entries):
result = {}
for e in entries:
top_key, structure = e
name = RuleKeyStructureInfo._nameFromStruct(structure)
if name is None:
continue
result[name] = top_key
return result
@staticmethod
def _parseRuleKeyLine(match):
rule_key = match.groups()[1]
if rule_key.endswith('='):
return (rule_key[:-1], {})
top_key, structure = rule_key.split('=', 1)
# Because BuildTargets have ':' in them we can't just split on that
# character. We know that all values take the form name(..):name(..):
# so we can cheat and split on ): instead
structure_entries = structure.split('):')
structure_entries = [e + ')' for e in structure_entries if len(e) > 0]
structure_map = collections.OrderedDict()
last_key = None
def appendValue(m, key, val):
if key in m:
m[key].append(val)
else:
m[key] = [val]
for e in reversed(structure_entries):
if len(e) == 0:
continue
elif e.startswith('key('):
last_key = e[4:-1]
else:
appendValue(structure_map, last_key, e)
return (top_key, structure_map)
@staticmethod
def _parseLogFile(buck_out):
rule_key_structures = []
invocation_info_line = None
for line in buck_out.readlines():
if invocation_info_line is None:
invocation_match = INVOCATION_LINE_REGEX.match(line)
if invocation_match is not None:
invocation_info_line = invocation_match.groups()[1]
match = RULE_LINE_REGEX.match(line)
if match is None:
continue
parsed_line = RuleKeyStructureInfo._parseRuleKeyLine(match)
rule_key_structures.append(parsed_line)
invocation_info = {}
if invocation_info_line is not None:
invocation_info = dict(
INVOCATION_VALUE_REGEX.findall(invocation_info_line))
return (rule_key_structures, invocation_info)
@staticmethod
def _parseBuckOut(file_path):
with codecs.open(file_path, 'r', 'utf-8') as buck_out:
return RuleKeyStructureInfo._parseLogFile(buck_out)
@staticmethod
def _extractRuleKeyRef(value):
RULE_KEY_REF_START = 'ruleKey(sha1='
RULE_KEY_REF_END = ')'
def isRuleKeyRef(value):
return (value.endswith(RULE_KEY_REF_END) and
value.startswith(RULE_KEY_REF_START))
if not isRuleKeyRef(value):
return None
rk = value[len(RULE_KEY_REF_START):-len(RULE_KEY_REF_END)]
return rk
def reportOnInterestingPaths(paths):
result = []
for path in paths:
if not os.path.exists(path):
result.append(' %s does not exist' % path)
else:
try:
if not os.path.isfile(path):
result.append(' %s is not a file' % (path))
else:
h = hashlib.sha1()
with open(path, 'rb') as f:
while True:
buf = f.read(128 * 1024)
if len(buf) == 0:
break
h.update(buf)
result.append(
' %s exists and hashes to %s' %
(path, h.hexdigest()))
except Exception as e:
result.append(' %s error hashing: %s' % (path, e))
return result
def diffInternal(
label,
left_s,
left_info,
right_s,
right_info,
verbose,
format_tuple,
check_paths):
keys = set(left_s.keys()).union(set(right_s.keys()))
changed_key_pairs_with_labels = set()
changed_key_pairs_with_values = collections.defaultdict(
lambda: KeyValueDiff(format_tuple[0], format_tuple[1]))
changed_key_pairs_with_labels_for_key = set()
interesting_paths = set()
report = []
for key in keys:
if key is None:
continue
left_values = left_s.get(key, set([]))
right_values = right_s.get(key, set([]))
left_with_keys = left_info.getRuleKeyRefs(left_values)
right_with_keys = right_info.getRuleKeyRefs(right_values)
did_align_for_deps = False
if key.endswith(('Deps', 'deps')):
for left_idx, (left_v, left_key) in enumerate(left_with_keys):
left_name = left_info.getNameForKey(left_key)
if left_name is None or left_idx >= len(right_with_keys):
continue
right_idx = None
for j, (right_v, right_key) in enumerate(right_with_keys):
if right_info.getNameForKey(right_key) == left_name:
right_idx = j
break
if right_idx is None:
continue
if right_idx != left_idx:
swap_entries_in_list(right_with_keys, right_idx, left_idx)
did_align_for_deps = True
if did_align_for_deps:
report.append(' (' + key + '): order of deps was name-aligned.')
both_with_keys = map(None, left_with_keys, right_with_keys)
for l, r in both_with_keys:
(left_v, left_key) = l or ('<missing>', None)
(right_v, right_key) = r or ('<missing>', None)
if left_v == right_v:
continue
left_name = None
right_name = None
if left_key is not None:
left_name = left_info.getNameForKey(left_key)
if right_key is not None:
right_name = right_info.getNameForKey(right_key)
if left_key is not None and right_key is not None:
if left_name == right_name and left_name is not None:
changed_key_pairs_with_labels_for_key.add(
(left_name, (left_key, right_key)))
continue
if (left_name is None and right_name is None and
(left_info.getByKey(left_key).keys() ==
right_info.getByKey(right_key).keys())):
# Assume that if the set of keys in the structure of the
# referenced RuleKey matches then it's the same "thing".
# The names need to empty, otherwise we'll end up
# comparing BuildRules for different targets.
q_label = label + '->' + key
changed_key_pairs_with_labels_for_key.add(
(q_label, (left_key, right_key)))
continue
if left_name:
left_v = '"%s"@%s' % (left_name, left_v)
if right_name:
right_v = '"%s"@%s' % (right_name, right_v)
changed_key_pairs_with_values[key].append(left_v, right_v)
for key in sorted(changed_key_pairs_with_values.keys()):
value_pairs = changed_key_pairs_with_values[key]
report.append(' (' + key + '):')
report.extend([' ' + l for l in value_pairs.diff()])
interesting_paths.update(value_pairs.getInterestingPaths())
changed_key_pairs_with_labels.update(
changed_key_pairs_with_labels_for_key)
if len(changed_key_pairs_with_labels_for_key) > 0:
changed_labels = [l for (l, _) in
changed_key_pairs_with_labels_for_key]
if verbose:
report.append(' (' + key + ') : changed because of ' +
','.join(sorted(changed_labels)))
if check_paths and len(interesting_paths) > 0:
report.append('Information on paths the script has seen:')
report.extend(reportOnInterestingPaths(interesting_paths))
if len(report) > 0:
report = ['Change details for [' + label + ']'] + report
return (report, changed_key_pairs_with_labels)
def diffAndReturnSeen(starting_refs, left_info, right_info, verbose,
format_tuple, check_paths, seen_keys):
queue = collections.deque(starting_refs)
result = []
visited_keys = []
while len(queue) > 0:
p = queue.popleft()
label, ref_pair = p
(left_key, right_key) = ref_pair
visited_keys.append(ref_pair)
report, changed_key_pairs_with_labels = diffInternal(
label,
left_info.getByKey(left_key),
left_info,
right_info.getByKey(right_key),
right_info,
verbose,
format_tuple or (None, None),
check_paths)
for e in sorted(changed_key_pairs_with_labels):
label, ref_pair = e
if ref_pair in seen_keys:
continue
seen_keys.add(ref_pair)
queue.append(e)
result += report
return (result, visited_keys)
def diff(name, left_info, right_info, verbose, format_tuple=None,
check_paths=False):
left_key = left_info.getKeyForName(name)
right_key = right_info.getKeyForName(name)
if left_key is None:
raise KeyError('Left log does not contain ' + name)
if right_key is None:
raise KeyError('Right log does not contain ' + name)
result, _ = diffAndReturnSeen([(name, (left_key, right_key))], left_info,
right_info, verbose, format_tuple,
check_paths, set())
if not result and left_key != right_key:
result.append("I don't know why RuleKeys for {} do not match.".format(
name))
return result
def diffAll(left_info, right_info, verbose, format_tuple=None,
check_paths=False):
# Ghetto ordered set implementation.
seen_left_names = collections.OrderedDict(
[(k, True) for k in left_info.getAllNames()])
all_seen = set()
all_results = []
while True:
if not seen_left_names:
break
name, _ = seen_left_names.popitem()
if name is None:
continue
left_key = left_info.getKeyForName(name)
if left_key is None:
all_results.append('Skipping {} because it is missing' +
'from the left log.'.format(name))
continue
right_key = right_info.getKeyForName(name)
if right_key is None:
all_results.append('Skipping {} because it is missing' +
'from the right log.'.format(name))
continue
if left_key == right_key:
continue
print('Analyzing', name, 'for changes...')
all_seen_before = len(all_seen)
single_result, visited_keys = diffAndReturnSeen(
[(name, (left_key, right_key))], left_info, right_info,
verbose, format_tuple, check_paths, all_seen)
if not single_result and left_key != right_key:
single_result.append(
"I don't know why RuleKeys for {} do not match.".format(name))
all_results.extend(single_result)
deleted_names = 0
for l, r in visited_keys:
left_name = left_info.getNameForKey(l)
seen_left_names.pop(left_name, False)
return all_results
def compute_rulekey_mismatches(left_info, right_info, left_name='left',
right_name='right'):
left_name_to_key = left_info.getNameToKeyMap()
right_name_to_key = right_info.getNameToKeyMap()
left_names = set(left_name_to_key.keys())
right_names = set(right_name_to_key.keys())
mismatch = []
for name in left_names.union(right_names):
left_key = left_name_to_key.get(name)
right_key = right_name_to_key.get(name)
if left_key is None:
mismatch.append('{} missing from {}'.format(name, left_name))
elif right_key is None:
mismatch.append('{} missing from {}'.format(name, right_name))
elif left_key != right_key:
mismatch.append(
'{} {}:{} != {}:{}'.format(name, left_name, left_key,
right_name, right_key))
return mismatch
def swap_entries_in_list(l, i, j):
(l[i], l[j]) = (l[j], l[i])
def main():
args = parseArgs()
if not os.path.exists(args.left_log):
raise Exception(args.left_log + ' does not exist')
print('Loading', args.left_log)
left = RuleKeyStructureInfo(args.left_log)
print('Loaded', left.size(), 'rules')
if not os.path.exists(args.right_log):
raise Exception(args.right_log + ' does not exist')
print('Loading', args.right_log)
right = RuleKeyStructureInfo(args.right_log)
print('Loaded', right.size(), 'rules')
print('Comparing rules...')
name = args.build_target
if name:
left_key = left.getKeyForName(name)
right_key = right.getKeyForName(name)
if left_key is None:
raise KeyError(('Left log does not contain {}. Did you forget ' +
'to enable logging? (see help).').format(name))
if right_key is None:
raise KeyError(('Right log does not contain {}. Did you forget ' +
'to enable logging? (see help).').format(name))
print('\n'.join(diff(name, left, right, args.verbose)))
else:
left_args = left.getInvocationInfo('Args')
right_args = right.getInvocationInfo('Args')
if left_args != right_args:
print('Commands used to generate the logs are not identical: [',
left_args, '] vs [', right_args, ']. This might cause ' +
'spurious differences to be listed.')
print('\n'.join(diffAll(left, right, args.verbose)))
if __name__ == '__main__':
main()
| 36.961667 | 82 | 0.590116 |
acf8e34748f381fa4f017c03f39abdea5313a6e9 | 1,618 | py | Python | lightbike/tests/test_trail.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | lightbike/tests/test_trail.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | lightbike/tests/test_trail.py | ethancharles02/cse210-project | 280b67ae69e84a334b807232c208a4ca4d27c37b | [
"MIT"
] | null | null | null | import pytest
from arcade import SpriteList, Sprite
from data.trail import Trail
def test_set_point_list():
trail = Trail()
point_list = (0, 0), (0, 1), (0, 2)
trail.set_point_list(point_list)
assert trail.get_point_list() == ((0, 0), (0, 1), (0, 2))
def test_add_point():
trail = Trail()
point = (0, 0)
trail.add_point(point)
point = (0, 1)
trail.add_point(point)
point = (0, 2)
trail.add_point(point)
assert trail.get_point_list() == [(0, 0), (0, 1), (0, 2)]
def test_add_point_list():
trail = Trail()
point_list = (0, 0), (0, 1), (0, 2)
trail.add_point_list(point_list)
assert trail.get_point_list() == [(0, 0), (0, 1), (0, 2)]
def test_set_sprite_list():
trail = Trail()
sprite_list = SpriteList()
sprite_list.append(Sprite())
sprite_list.append(Sprite())
sprite_list.append(Sprite())
trail.set_sprite_list(sprite_list)
assert type(trail.get_sprite_list()) == SpriteList
assert len(trail.get_sprite_list()) == 3
def test_add_sprite():
trail = Trail()
sprite_list = SpriteList()
trail.set_sprite_list(sprite_list)
trail.add_sprite(Sprite())
assert type(trail.get_sprite_list()) == SpriteList
assert len(trail.get_sprite_list()) == 1
def test_update_temp_list():
trail = Trail()
sprite_list = SpriteList()
point_list = [(0, 0), (0, 1)]
trail.set_sprite_list(sprite_list)
trail.update_temp_list(point_list)
assert type(trail.get_sprite_list()) == SpriteList
assert len(trail.get_sprite_list()) == 1
# pytest.main(["-v", "--tb=no", "test_trail.py"]) | 26.966667 | 61 | 0.644623 |
acf8e4c2545a7734e25e374cf488a03acafeb4f6 | 2,665 | py | Python | showers/pi/tracker1.py | Playaowl/artworks | bfe2abc844851ce054e1233261364a502cd30561 | [
"MIT"
] | 1 | 2020-08-14T01:03:47.000Z | 2020-08-14T01:03:47.000Z | showers/pi/tracker1.py | Playaowl/artworks | bfe2abc844851ce054e1233261364a502cd30561 | [
"MIT"
] | null | null | null | showers/pi/tracker1.py | Playaowl/artworks | bfe2abc844851ce054e1233261364a502cd30561 | [
"MIT"
] | null | null | null | ''' track_yellow_draw_line.py
This program just track a yellow object in front of camera and draws a yellow line according to movement of the object.
Written by Abid.K --mail me at abidrahman2@gmail.com '''
################################################################################################
import cv
posx=0
posy=0
def getthresholdedimg(im):
'''this function take RGB image.Then convert it into HSV for easy colour detection and threshold it with yellow part as white and all other regions as black.Then return that image'''
imghsv=cv.CreateImage(cv.GetSize(im),8,3)
cv.CvtColor(im,imghsv,cv.CV_BGR2HSV) # Convert image from RGB to HSV
imgthreshold=cv.CreateImage(cv.GetSize(im),8,1)
cv.InRangeS(imghsv,cv.Scalar(20,100,100),cv.Scalar(30,255,255),imgthreshold) # Select a range of yellow color
return imgthreshold
def getpositions(im):
''' this function returns leftmost,rightmost,topmost and bottommost values of the white blob in the thresholded image'''
leftmost=0
rightmost=0
topmost=0
bottommost=0
temp=0
for i in range(im.width):
col=cv.GetCol(im,i)
if cv.Sum(col)[0]!=0.0:
rightmost=i
if temp==0:
leftmost=i
temp=1
for i in range(im.height):
row=cv.GetRow(im,i)
if cv.Sum(row)[0]!=0.0:
bottommost=i
if temp==1:
topmost=i
temp=2
return (leftmost,rightmost,topmost,bottommost)
capture=cv.CaptureFromCAM(2)
frame=cv.QueryFrame(capture)
test=cv.CreateImage(cv.GetSize(frame),8,3)
cv.NamedWindow("output")
while(1):
frame=cv.QueryFrame(capture)
cv.Flip(frame,frame,1)
imdraw=cv.CreateImage(cv.GetSize(frame),8,3) # we make all drawings on imdraw.
imgyellowthresh=getthresholdedimg(frame) # we get coordinates from imgyellowthresh
cv.Erode(imgyellowthresh,imgyellowthresh,None,1)# eroding removes small noises
(leftmost,rightmost,topmost,bottommost)=getpositions(imgyellowthresh)
if (leftmost-rightmost!=0) or (topmost-bottommost!=0):
lastx=posx
lasty=posy
posx=cv.Round((rightmost+leftmost)/2)
posy=cv.Round((bottommost+topmost)/2)
if lastx!=0 and lasty!=0:
cv.Line(imdraw,(posx,posy),(lastx,lasty),(0,255,255))
cv.Circle(imdraw,(posx,posy),5,(0,255,255),-1)
cv.Add(test,imdraw,test) # adding imdraw on test keeps all lines there on the test frame. If not, we don't get full drawing, instead we get only that fraction of line at the moment.
cv.ShowImage("output",test)
if cv.WaitKey(33)==1048603: # exit if Esc key is pressed
break
cv.DestroyWindow("output") # releasing window
#######################################################################################################
## Please try mouse_callback.py and then read pick_and_track.py
| 37.013889 | 184 | 0.685178 |
acf8e4ca9e75df354979973a8a7b95011f6c19b7 | 150 | py | Python | lewis_emulators/tekafg3XXX/__init__.py | ISISComputingGroup/EPICS-DeviceEmulator | 026c2a14a16bb204ea7527e3765daa182cafa814 | [
"BSD-3-Clause"
] | 2 | 2020-10-20T16:49:13.000Z | 2021-02-19T10:41:44.000Z | lewis_emulators/tekafg3XXX/__init__.py | ISISComputingGroup/EPICS-DeviceEmulator | 026c2a14a16bb204ea7527e3765daa182cafa814 | [
"BSD-3-Clause"
] | 9 | 2019-03-22T15:35:15.000Z | 2021-07-28T11:05:43.000Z | lewis_emulators/tekafg3XXX/__init__.py | ISISComputingGroup/EPICS-DeviceEmulator | 026c2a14a16bb204ea7527e3765daa182cafa814 | [
"BSD-3-Clause"
] | 1 | 2020-10-21T17:02:44.000Z | 2020-10-21T17:02:44.000Z | from .device import SimulatedTekafg3XXX
from ..lewis_versions import LEWIS_LATEST
framework_version = LEWIS_LATEST
__all__ = ['SimulatedTekafg3XXX']
| 25 | 41 | 0.84 |
acf8e52bf64f74688874d4900e2c60327c0539e4 | 7,749 | py | Python | qiskit_finance/circuit/library/probability_distributions/lognormal.py | manoelmarques/qiskit-finance | f8dae0ba4284f6eeb0c7c92132a7279a952ca821 | [
"Apache-2.0"
] | null | null | null | qiskit_finance/circuit/library/probability_distributions/lognormal.py | manoelmarques/qiskit-finance | f8dae0ba4284f6eeb0c7c92132a7279a952ca821 | [
"Apache-2.0"
] | null | null | null | qiskit_finance/circuit/library/probability_distributions/lognormal.py | manoelmarques/qiskit-finance | f8dae0ba4284f6eeb0c7c92132a7279a952ca821 | [
"Apache-2.0"
] | 1 | 2022-02-19T12:42:11.000Z | 2022-02-19T12:42:11.000Z | # This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""The log-normal probability distribution circuit."""
from typing import Tuple, List, Union, Optional
import numpy as np
from qiskit.circuit import QuantumCircuit
from .normal import _check_bounds_valid, _check_dimensions_match
class LogNormalDistribution(QuantumCircuit):
r"""A circuit to encode a discretized log-normal distribution in qubit amplitudes.
A random variable :math:`X` is log-normal distributed if
.. math::
\log(X) \sim \mathcal{N}(\mu, \sigma^2)
for a normal distribution :math:`\mathcal{N}(\mu, \sigma^2)`.
The probability density function of the log-normal distribution is defined as
.. math::
\mathbb{P}(X = x) = \frac{1}{x\sqrt{2\pi\sigma^2}} e^{-\frac{(\log(x) - \mu)^2}{\sigma^2}}
.. note::
The parameter ``sigma`` in this class equals the **variance**, :math:`\sigma^2` and not the
standard deviation. This is for consistency with multivariate distributions, where the
uppercase sigma, :math:`\Sigma`, is associated with the covariance.
This circuit considers the discretized version of :math:`X` on ``2 ** num_qubits`` equidistant
points, :math:`x_i`, truncated to ``bounds``. The action of this circuit can be written as
.. math::
\mathcal{P}_X |0\rangle^n = \sum_{i=0}^{2^n - 1} \sqrt{\mathbb{P}(x_i)} |i\rangle
where :math:`n` is `num_qubits`.
.. note::
The circuit loads the **square root** of the probabilities into the qubit amplitudes such
that the sampling probability, which is the square of the amplitude, equals the
probability of the distribution.
This circuit is for example used in amplitude estimation applications, such as finance [1, 2],
where customer demand or the return of a portfolio could be modeled using a log-normal
distribution.
Examples:
This class can be used for both univariate and multivariate distributions.
>>> mu = [1, 0.9, 0.2]
>>> sigma = [[1, -0.2, 0.2], [-0.2, 1, 0.4], [0.2, 0.4, 1]]
>>> circuit = LogNormalDistribution([2, 2, 2], mu, sigma)
>>> circuit.num_qubits
6
References:
[1]: Gacon, J., Zoufal, C., & Woerner, S. (2020).
Quantum-Enhanced Simulation-Based Optimization.
`arXiv:2005.10780 <http://arxiv.org/abs/2005.10780>`_
[2]: Woerner, S., & Egger, D. J. (2018).
Quantum Risk Analysis.
`arXiv:1806.06893 <http://arxiv.org/abs/1806.06893>`_
"""
def __init__(
self,
num_qubits: Union[int, List[int]],
mu: Optional[Union[float, List[float]]] = None,
sigma: Optional[Union[float, List[float]]] = None,
bounds: Optional[Union[Tuple[float, float], List[Tuple[float, float]]]] = None,
upto_diag: bool = False,
name: str = "P(X)",
) -> None:
r"""
Args:
num_qubits: The number of qubits used to discretize the random variable. For a 1d
random variable, ``num_qubits`` is an integer, for multiple dimensions a list
of integers indicating the number of qubits to use in each dimension.
mu: The parameter :math:`\mu` of the distribution.
Can be either a float for a 1d random variable or a list of floats for a higher
dimensional random variable.
sigma: The parameter :math:`\sigma^2` or :math:`\Sigma`, which is the variance or
covariance matrix.
bounds: The truncation bounds of the distribution as tuples. For multiple dimensions,
``bounds`` is a list of tuples ``[(low0, high0), (low1, high1), ...]``.
If ``None``, the bounds are set to ``(0, 1)`` for each dimension.
upto_diag: If True, load the square root of the probabilities up to multiplication
with a diagonal for a more efficient circuit.
name: The name of the circuit.
"""
_check_dimensions_match(num_qubits, mu, sigma, bounds)
_check_bounds_valid(bounds)
# set default arguments
dim = 1 if isinstance(num_qubits, int) else len(num_qubits)
if mu is None:
mu = 0 if dim == 1 else [0] * dim
if sigma is None:
sigma = 1 if dim == 1 else np.eye(dim) # type: ignore[assignment]
if bounds is None:
bounds = (0, 1) if dim == 1 else [(0, 1)] * dim
if isinstance(num_qubits, int): # univariate case
inner = QuantumCircuit(num_qubits, name=name)
x = np.linspace(bounds[0], bounds[1], num=2 ** num_qubits)
else: # multivariate case
inner = QuantumCircuit(sum(num_qubits), name=name)
# compute the evaluation points using meshgrid of numpy
# indexing 'ij' yields the "column-based" indexing
meshgrid = np.meshgrid(
*[
np.linspace(bound[0], bound[1], num=2 ** num_qubits[i]) # type: ignore
for i, bound in enumerate(bounds)
],
indexing="ij",
)
# flatten into a list of points
x = list(zip(*[grid.flatten() for grid in meshgrid])) # type: ignore
# compute the normalized, truncated probabilities
probabilities = []
from scipy.stats import multivariate_normal
for x_i in x:
# map probabilities from normal to log-normal reference:
# https://stats.stackexchange.com/questions/214997/multivariate-log-normal-probabiltiy-density-function-pdf
if np.min(x_i) > 0:
det = 1 / np.prod(x_i)
probability = multivariate_normal.pdf(np.log(x_i), mu, sigma) * det
else:
probability = 0
probabilities += [probability]
normalized_probabilities = probabilities / np.sum(probabilities)
# store as properties
self._values = x
self._probabilities = normalized_probabilities
self._bounds = bounds
super().__init__(*inner.qregs, name=name)
# use default the isometry (or initialize w/o resets) algorithm to construct the circuit
# pylint: disable=no-member
if upto_diag:
inner.isometry(np.sqrt(normalized_probabilities), inner.qubits, None)
self.append(inner.to_instruction(), inner.qubits) # Isometry is not a Gate
else:
from qiskit.extensions import Initialize # pylint: disable=cyclic-import
initialize = Initialize(np.sqrt(normalized_probabilities))
circuit = initialize.gates_to_uncompute().inverse()
inner.compose(circuit, inplace=True)
self.append(inner.to_gate(), inner.qubits)
@property
def values(self) -> np.ndarray:
"""Return the discretized points of the random variable."""
return self._values
@property
def probabilities(self) -> np.ndarray:
"""Return the sampling probabilities for the values."""
return self._probabilities
@property
def bounds(self) -> Union[Tuple[float, float], List[Tuple[float, float]]]:
"""Return the bounds of the probability distribution."""
return self._bounds
| 41 | 119 | 0.620338 |
acf8e5f5cdcd676cd82d207b7ec87c221ffc7c9b | 376 | py | Python | .github/actions/cat-facts/src/main.py | sokoliao/write-docker-actions | 6ab1a670e71ee9dfd75d2b76140b24a34e7619e3 | [
"MIT"
] | null | null | null | .github/actions/cat-facts/src/main.py | sokoliao/write-docker-actions | 6ab1a670e71ee9dfd75d2b76140b24a34e7619e3 | [
"MIT"
] | 11 | 2021-07-30T11:51:15.000Z | 2021-08-04T05:33:43.000Z | .github/actions/cat-facts/src/main.py | sokoliao/write-docker-actions | 6ab1a670e71ee9dfd75d2b76140b24a34e7619e3 | [
"MIT"
] | null | null | null | import requests
import random
import sys
# Make an HTTP GET request to the cat-fact API
cat_url = "https://catfact.ninja/fact?max_length=100"
r = requests.get(cat_url)
r_obj = r.json()
fact = r_obj["fact"]
# Print the individual randomly returned cat-fact
print(fact)
# Set the fact-output of the action as the value of random_fact
print(f"::set-output name=fact::{fact}") | 23.5 | 63 | 0.744681 |
acf8e6390932dca4098ab3616795ff7f2fb41c84 | 2,751 | py | Python | colour/models/rgb/datasets/dji_dgamut.py | wenh06/colour | 445fdad2711ae39c95b4375166905568d24a95f4 | [
"BSD-3-Clause"
] | 1 | 2021-09-09T01:53:40.000Z | 2021-09-09T01:53:40.000Z | colour/models/rgb/datasets/dji_dgamut.py | wenh06/colour | 445fdad2711ae39c95b4375166905568d24a95f4 | [
"BSD-3-Clause"
] | null | null | null | colour/models/rgb/datasets/dji_dgamut.py | wenh06/colour | 445fdad2711ae39c95b4375166905568d24a95f4 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
DJI D-Gamut Colourspace
=======================
Defines the *DJI D-Gamut* colourspace:
- :attr:`colour.models.RGB_COLOURSPACE_DJI_D_GAMUT`.
References
----------
- :cite:`DJI2017` : Dji. (2017). White Paper on D-Log and D-Gamut of DJI
Cinema Color System (pp. 1-5).
https://dl.djicdn.com/downloads/zenmuse+x7/20171010/\
D-Log_D-Gamut_Whitepaper.pdf
"""
from __future__ import division, unicode_literals
import numpy as np
from colour.colorimetry import CCS_ILLUMINANTS
from colour.models.rgb import (RGB_Colourspace, log_encoding_DJIDLog,
log_decoding_DJIDLog)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2020 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = [
'PRIMARIES_DJI_D_GAMUT', 'WHITEPOINT_NAME_DJI_D_GAMUT',
'CCS_WHITEPOINT_DJI_D_GAMUT', 'MATRIX_DJI_D_GAMUT_TO_XYZ',
'MATRIX_XYZ_TO_DJI_D_GAMUT', 'RGB_COLOURSPACE_DJI_D_GAMUT'
]
PRIMARIES_DJI_D_GAMUT = np.array([
[0.71, 0.31],
[0.21, 0.88],
[0.09, -0.08],
])
"""
*DJI D-Gamut* colourspace primaries.
PRIMARIES_DJI_D_GAMUT : ndarray, (3, 2)
"""
WHITEPOINT_NAME_DJI_D_GAMUT = 'D65'
"""
*DJI D-Gamut* colourspace whitepoint name.
CCS_WHITEPOINT_DJI_D_GAMUT : unicode
"""
CCS_WHITEPOINT_DJI_D_GAMUT = (CCS_ILLUMINANTS[
'CIE 1931 2 Degree Standard Observer'][WHITEPOINT_NAME_DJI_D_GAMUT])
"""
*DJI D-Gamut* colourspace whitepoint chromaticity coordinates.
CCS_WHITEPOINT_DJI_D_GAMUT : ndarray
"""
MATRIX_DJI_D_GAMUT_TO_XYZ = np.array([[0.6482, 0.1940,
0.1082], [0.2830, 0.8132, -0.0962],
[-0.0183, -0.0832, 1.1903]])
"""
*DJI D-Gamut* colourspace to *CIE XYZ* tristimulus values matrix.
MATRIX_DJI_D_GAMUT_TO_XYZ : array_like, (3, 3)
"""
MATRIX_XYZ_TO_DJI_D_GAMUT = np.array([[1.7257, -0.4314,
-0.1917], [-0.6025, 1.3906, 0.1671],
[-0.0156, 0.0905, 0.8489]])
"""
*CIE XYZ* tristimulus values to *DJI D-Gamut* colourspace matrix.
MATRIX_XYZ_TO_DJI_D_GAMUT : array_like, (3, 3)
"""
RGB_COLOURSPACE_DJI_D_GAMUT = RGB_Colourspace(
'DJI D-Gamut',
PRIMARIES_DJI_D_GAMUT,
CCS_WHITEPOINT_DJI_D_GAMUT,
WHITEPOINT_NAME_DJI_D_GAMUT,
MATRIX_DJI_D_GAMUT_TO_XYZ,
MATRIX_XYZ_TO_DJI_D_GAMUT,
log_encoding_DJIDLog,
log_decoding_DJIDLog,
)
RGB_COLOURSPACE_DJI_D_GAMUT.__doc__ = """
*DJI_D-Gamut* colourspace.
References
----------
:cite:`DJI2017`
RGB_COLOURSPACE_DJI_D_GAMUT : RGB_Colourspace
"""
| 26.970588 | 78 | 0.676481 |
acf8e63f0f1d4de6bcbf36f372fb98bd8b9e95c7 | 2,619 | py | Python | tests/cat/test_Catalogue.py | MonikaSonali/iudx-python-sdk | 11dfa72bbf2c267d2af24a6ae2c6b7854c373cac | [
"MIT"
] | 6 | 2021-06-21T04:45:42.000Z | 2022-02-21T11:00:37.000Z | tests/cat/test_Catalogue.py | MonikaSonali/iudx-python-sdk | 11dfa72bbf2c267d2af24a6ae2c6b7854c373cac | [
"MIT"
] | 6 | 2021-03-03T09:25:00.000Z | 2022-01-27T09:50:27.000Z | tests/cat/test_Catalogue.py | MonikaSonali/iudx-python-sdk | 11dfa72bbf2c267d2af24a6ae2c6b7854c373cac | [
"MIT"
] | 8 | 2021-03-03T09:11:28.000Z | 2022-02-02T07:24:13.000Z | '''
This script creates a test user and display
'''
# import pytest
import unittest
import json
import sys
sys.path.insert(1, './')
from iudx.cat.Catalogue import Catalogue
from iudx.cat.CatalogueQuery import CatalogueQuery
class CatalogueTest(unittest.TestCase):
"""Test different scenarios for the Catalogue class.
"""
def setUp(self):
self.testVector = {}
with open("./tests/cat/testVector_Catalogue.json", "r") as f:
self.testVector = json.load(f)
def tearDown(self):
pass
def __init__(self, *args, **kwargs):
"""CatalogueTest base class constructor
"""
super(CatalogueTest, self).__init__(*args, **kwargs)
self.cat = Catalogue(
cat_url="https://api.catalogue.iudx.org.in/iudx/cat/v1",
headers={"content-type": "application/json"}
)
self.cat_query = CatalogueQuery()
def test_search_entity(self):
"""Function to test the search entity query.
"""
for entity in self.testVector["text_params"]:
query = self.cat_query.text_search(entity)
result = self.cat.search_entity(query)
print(f"DOCUMENTS: {result.documents}")
print(f"STATUS: {result.status}")
print(f"TOTAL HITS: {result.total_hits}")
print("*"*30)
def test_count_entity(self):
"""Function to test the count entity query.
"""
for entity in self.testVector["text_params"]:
query = self.cat_query.text_search(entity)
result = self.cat.count_entity(query)
print(f"DOCUMENTS: {result.documents}")
print(f"STATUS: {result.status}")
print(f"TOTAL HITS: {result.total_hits}")
print("*"*30)
def test_list_entity(self):
"""Function to test the list entity query.
"""
for entity in self.testVector["entity_type"]:
result = self.cat.list_entity(entity)
print(f"DOCUMENTS: {result.documents}")
print(f"STATUS: {result.status}")
print(f"TOTAL HITS: {result.total_hits}")
print("*"*30)
def test_related_entity(self):
"""Function to test the related entity query.
"""
for entity in self.testVector["related_entity"]:
result = self.cat.get_related_entity(entity[0], entity[1])
print(f"DOCUMENTS: {result.documents}")
print(f"STATUS: {result.status}")
print(f"TOTAL HITS: {result.total_hits}")
print("*"*30)
if __name__ == '__main__':
unittest.main()
| 33.151899 | 70 | 0.596411 |
acf8e6ba1aa1aad828f19faac5c5d5253c6ee75c | 3,293 | py | Python | docsrc/conf.py | dzwiedziu-nkg/credo-classify-framework | 45417b505b4f4b20a7248f3487ca57a3fd49ccee | [
"MIT"
] | null | null | null | docsrc/conf.py | dzwiedziu-nkg/credo-classify-framework | 45417b505b4f4b20a7248f3487ca57a3fd49ccee | [
"MIT"
] | null | null | null | docsrc/conf.py | dzwiedziu-nkg/credo-classify-framework | 45417b505b4f4b20a7248f3487ca57a3fd49ccee | [
"MIT"
] | 3 | 2020-06-19T15:41:19.000Z | 2020-06-29T12:47:05.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../src'))
# -- Project information -----------------------------------------------------
project = 'credo_cf'
copyright = '2020, Michał Niedźwiecki, CREDO Team'
author = 'nkg'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
#"sphinx_autodoc_typehints",
"sphinx.ext.napoleon",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
"releases",
"m2r"
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# sphinx_rtd_theme
# html_theme = 'neo_rtd_theme'
# import sphinx_theme
# html_theme_path = [sphinx_theme.get_html_theme_path()]
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
source_suffix = ['.rst', '.md']
add_module_names = False
autodoc_typehints = 'description'
autodoc_member_order = 'bysource'
# https://github.com/miyakogi/m2r/issues/51 - workaround, TODO: please remove when m2r publish new version with fixed it
import sphinx
def monkeypatch(cls):
""" decorator to monkey-patch methods """
def decorator(f):
method = f.__name__
old_method = getattr(cls, method)
setattr(cls, method, lambda self, *args, **kwargs: f(old_method, self, *args, **kwargs))
return decorator
# workaround until https://github.com/miyakogi/m2r/pull/55 is merged
@monkeypatch(sphinx.registry.SphinxComponentRegistry)
def add_source_parser(_old_add_source_parser, self, *args, **kwargs):
# signature is (parser: Type[Parser], **kwargs), but m2r expects
# the removed (str, parser: Type[Parser], **kwargs).
if isinstance(args[0], str):
args = args[1:]
return _old_add_source_parser(self, *args, **kwargs)
# end of workaround
| 33.948454 | 120 | 0.679016 |
acf8e7059d92564d94c28c07a18a655e733602f8 | 786 | py | Python | things/views.py | kylehoac/DjangoX_Lab | ec4f5c3e595c7d91aeeddfdcb8cad40243d91758 | [
"MIT"
] | null | null | null | things/views.py | kylehoac/DjangoX_Lab | ec4f5c3e595c7d91aeeddfdcb8cad40243d91758 | [
"MIT"
] | null | null | null | things/views.py | kylehoac/DjangoX_Lab | ec4f5c3e595c7d91aeeddfdcb8cad40243d91758 | [
"MIT"
] | null | null | null | from django.views.generic import ListView, DetailView,CreateView,UpdateView,DeleteView
from .models import Thing
from django.urls import reverse_lazy
class ThingListView(ListView):
template_name = "things/thing_list.html"
model = Thing
class ThingDetailView(DetailView):
template_name = "things/thing_detail.html"
model = Thing
class ThingCreateView(CreateView):
template_name = "things/thing_create.html"
model = Thing
fields = ["name","reviewer","rating"]
class ThingUpdateView(UpdateView):
template_name = "things/thing_update.html"
model = Thing
fields = ["name","reviewer","rating"]
class ThingDeleteView(DeleteView):
template_name = "things/thing_delete.html"
model = Thing
success_url = reverse_lazy("thing_list") | 25.354839 | 86 | 0.736641 |
acf8e820e5cb1cee6543dfe41b681a14738c161e | 23,331 | py | Python | heat/tests/openstack/heat/test_deployed_server.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 1 | 2019-02-03T07:48:11.000Z | 2019-02-03T07:48:11.000Z | heat/tests/openstack/heat/test_deployed_server.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | heat/tests/openstack/heat/test_deployed_server.py | noironetworks/heat | 7cdadf1155f4d94cf8f967635b98e4012a7acfb7 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
from six.moves.urllib import parse as urlparse
from heat.common import exception
from heat.common import template_format
from heat.engine.clients.os import heat_plugin
from heat.engine.clients.os import swift
from heat.engine.clients.os import zaqar
from heat.engine import environment
from heat.engine.resources.openstack.heat import deployed_server
from heat.engine import scheduler
from heat.engine import stack as parser
from heat.engine import template
from heat.tests import common
from heat.tests import utils
ds_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
"""
server_sc_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_SERVER_CFN
"""
server_heat_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_SERVER_HEAT
"""
server_zaqar_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: ZAQAR_MESSAGE
"""
ds_deployment_data_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
deployment_swift_data:
container: my-custom-container
object: my-custom-object
"""
ds_deployment_data_bad_container_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
deployment_swift_data:
container: ''
object: 'my-custom-object'
"""
ds_deployment_data_bad_object_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
deployment_swift_data:
container: 'my-custom-container'
object: ''
"""
ds_deployment_data_none_container_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
deployment_swift_data:
container: 0
object: 'my-custom-object'
"""
ds_deployment_data_none_object_tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Heat::DeployedServer
properties:
software_config_transport: POLL_TEMP_URL
deployment_swift_data:
container: 'my-custom-container'
object: 0
"""
class DeployedServersTest(common.HeatTestCase):
def _create_test_server(self, name, override_name=False):
server = self._setup_test_server(name, override_name)
scheduler.TaskRunner(server.create)()
return server
def _setup_test_stack(self, stack_name, test_templ=ds_tmpl):
t = template_format.parse(test_templ)
tmpl = template.Template(t, env=environment.Environment())
stack = parser.Stack(utils.dummy_context(region_name="RegionOne"),
stack_name, tmpl,
stack_id=uuidutils.generate_uuid(),
stack_user_project_id='8888')
return (tmpl, stack)
def _server_create_software_config_poll_temp_url(self,
server_name='server'):
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
scheduler.TaskRunner(server.create)()
# self._create_test_server(server_name)
metadata_put_url = server.data().get('metadata_put_url')
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertNotEqual(metadata_url, metadata_put_url)
container_name = server.physical_resource_name()
object_name = server.data().get('metadata_object_name')
self.assertTrue(uuidutils.is_uuid_like(object_name))
test_path = '/v1/AUTH_test_tenant_id/%s/%s' % (
server.physical_resource_name(), object_name)
self.assertEqual(test_path, urlparse.urlparse(metadata_put_url).path)
self.assertEqual(test_path, urlparse.urlparse(metadata_url).path)
sc.put_object.assert_called_once_with(
container_name, object_name, jsonutils.dumps(md))
sc.head_container.return_value = {'x-container-object-count': '0'}
server._delete_temp_url()
sc.delete_object.assert_called_once_with(container_name, object_name)
sc.head_container.assert_called_once_with(container_name)
sc.delete_container.assert_called_once_with(container_name)
return metadata_url, server
def test_server_create_deployment_swift_data(self):
server_name = 'server'
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(
stack_name,
ds_deployment_data_tmpl)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
scheduler.TaskRunner(server.create)()
# self._create_test_server(server_name)
metadata_put_url = server.data().get('metadata_put_url')
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertNotEqual(metadata_url, metadata_put_url)
container_name = 'my-custom-container'
object_name = 'my-custom-object'
test_path = '/v1/AUTH_test_tenant_id/%s/%s' % (
container_name, object_name)
self.assertEqual(test_path, urlparse.urlparse(metadata_put_url).path)
self.assertEqual(test_path, urlparse.urlparse(metadata_url).path)
sc.put_object.assert_called_once_with(
container_name, object_name, jsonutils.dumps(md))
sc.head_container.return_value = {'x-container-object-count': '0'}
server._delete_temp_url()
sc.delete_object.assert_called_once_with(container_name, object_name)
sc.head_container.assert_called_once_with(container_name)
sc.delete_container.assert_called_once_with(container_name)
return metadata_url, server
def test_server_create_deployment_swift_data_bad_container(self):
server_name = 'server'
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(
stack_name,
ds_deployment_data_bad_container_tmpl)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
self.assertRaises(exception.StackValidationFailed, server.validate)
def test_server_create_deployment_swift_data_bad_object(self):
server_name = 'server'
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(
stack_name,
ds_deployment_data_bad_object_tmpl)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
self.assertRaises(exception.StackValidationFailed, server.validate)
def test_server_create_deployment_swift_data_none_container(self):
server_name = 'server'
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(
stack_name,
ds_deployment_data_none_container_tmpl)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
scheduler.TaskRunner(server.create)()
# self._create_test_server(server_name)
metadata_put_url = server.data().get('metadata_put_url')
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertNotEqual(metadata_url, metadata_put_url)
container_name = '0'
object_name = 'my-custom-object'
test_path = '/v1/AUTH_test_tenant_id/%s/%s' % (
container_name, object_name)
self.assertEqual(test_path, urlparse.urlparse(metadata_put_url).path)
self.assertEqual(test_path, urlparse.urlparse(metadata_url).path)
sc.put_object.assert_called_once_with(
container_name, object_name, jsonutils.dumps(md))
sc.head_container.return_value = {'x-container-object-count': '0'}
server._delete_temp_url()
sc.delete_object.assert_called_once_with(container_name, object_name)
sc.head_container.assert_called_once_with(container_name)
sc.delete_container.assert_called_once_with(container_name)
return metadata_url, server
def test_server_create_deployment_swift_data_none_object(self):
server_name = 'server'
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(
stack_name,
ds_deployment_data_none_object_tmpl)
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
server_name, resource_defns[server_name], stack)
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
scheduler.TaskRunner(server.create)()
# self._create_test_server(server_name)
metadata_put_url = server.data().get('metadata_put_url')
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertNotEqual(metadata_url, metadata_put_url)
container_name = 'my-custom-container'
object_name = '0'
test_path = '/v1/AUTH_test_tenant_id/%s/%s' % (
container_name, object_name)
self.assertEqual(test_path, urlparse.urlparse(metadata_put_url).path)
self.assertEqual(test_path, urlparse.urlparse(metadata_url).path)
sc.put_object.assert_called_once_with(
container_name, object_name, jsonutils.dumps(md))
sc.head_container.return_value = {'x-container-object-count': '0'}
server._delete_temp_url()
sc.delete_object.assert_called_once_with(container_name, object_name)
sc.head_container.assert_called_once_with(container_name)
sc.delete_container.assert_called_once_with(container_name)
return metadata_url, server
def test_server_create_software_config_poll_temp_url(self):
metadata_url, server = (
self._server_create_software_config_poll_temp_url())
self.assertEqual({
'os-collect-config': {
'request': {
'metadata_url': metadata_url
},
'collectors': ['request', 'local']
},
'deployments': []
}, server.metadata_get())
def _server_create_software_config(self,
server_name='server_sc',
md=None,
ret_tmpl=False):
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(stack_name, server_sc_tmpl)
self.stack = stack
self.server_props = tmpl.t['resources']['server']['properties']
if md is not None:
tmpl.t['resources']['server']['metadata'] = md
stack.stack_user_project_id = '8888'
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
'server', resource_defns['server'], stack)
self.patchobject(server, 'heat')
scheduler.TaskRunner(server.create)()
self.assertEqual('4567', server.access_key)
self.assertEqual('8901', server.secret_key)
self.assertEqual('1234', server._get_user_id())
self.assertEqual('POLL_SERVER_CFN',
server.properties.get('software_config_transport'))
self.assertTrue(stack.access_allowed('4567', 'server'))
self.assertFalse(stack.access_allowed('45678', 'server'))
self.assertFalse(stack.access_allowed('4567', 'wserver'))
if ret_tmpl:
return server, tmpl
else:
return server
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_create_software_config(self, fake_url):
fake_url.return_value = 'the-cfn-url'
server = self._server_create_software_config()
self.assertEqual({
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'the-cfn-url/v1/',
'path': 'server.Metadata',
'secret_access_key': '8901',
'stack_name': 'server_sc_s'
},
'collectors': ['cfn', 'local']
},
'deployments': []
}, server.metadata_get())
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_create_software_config_metadata(self, fake_url):
md = {'os-collect-config': {'polling_interval': 10}}
fake_url.return_value = 'the-cfn-url'
server = self._server_create_software_config(md=md)
self.assertEqual({
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'the-cfn-url/v1/',
'path': 'server.Metadata',
'secret_access_key': '8901',
'stack_name': 'server_sc_s'
},
'collectors': ['cfn', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def _server_create_software_config_poll_heat(self,
server_name='server_heat',
md=None):
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(stack_name, server_heat_tmpl)
self.stack = stack
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'POLL_SERVER_HEAT'
if md is not None:
tmpl.t['resources']['server']['metadata'] = md
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
'server', resource_defns['server'], stack)
scheduler.TaskRunner(server.create)()
self.assertEqual('1234', server._get_user_id())
self.assertTrue(stack.access_allowed('1234', 'server'))
self.assertFalse(stack.access_allowed('45678', 'server'))
self.assertFalse(stack.access_allowed('4567', 'wserver'))
return stack, server
def test_server_software_config_poll_heat(self):
stack, server = self._server_create_software_config_poll_heat()
md = {
'os-collect-config': {
'heat': {
'auth_url': 'http://server.test:5000/v2.0',
'password': server.password,
'project_id': '8888',
'region_name': 'RegionOne',
'resource_name': 'server',
'stack_id': 'server_heat_s/%s' % stack.id,
'user_id': '1234'
},
'collectors': ['heat', 'local']
},
'deployments': []
}
self.assertEqual(md, server.metadata_get())
# update resource.metadata
md1 = {'os-collect-config': {'polling_interval': 10}}
server.stack.t.t['resources']['server']['metadata'] = md1
resource_defns = server.stack.t.resource_definitions(server.stack)
scheduler.TaskRunner(server.update, resource_defns['server'])()
occ = md['os-collect-config']
occ.update(md1['os-collect-config'])
# os-collect-config merged
self.assertEqual(md, server.metadata_get())
def test_server_create_software_config_poll_heat_metadata(self):
md = {'os-collect-config': {'polling_interval': 10}}
stack, server = self._server_create_software_config_poll_heat(md=md)
self.assertEqual({
'os-collect-config': {
'heat': {
'auth_url': 'http://server.test:5000/v2.0',
'password': server.password,
'project_id': '8888',
'region_name': 'RegionOne',
'resource_name': 'server',
'stack_id': 'server_heat_s/%s' % stack.id,
'user_id': '1234'
},
'collectors': ['heat', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def _server_create_software_config_zaqar(self,
server_name='server_zaqar',
md=None):
stack_name = '%s_s' % server_name
(tmpl, stack) = self._setup_test_stack(stack_name, server_zaqar_tmpl)
self.stack = stack
props = tmpl.t['resources']['server']['properties']
props['software_config_transport'] = 'ZAQAR_MESSAGE'
if md is not None:
tmpl.t['resources']['server']['metadata'] = md
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = deployed_server.DeployedServer(
'server', resource_defns['server'], stack)
zcc = self.patchobject(zaqar.ZaqarClientPlugin, 'create_for_tenant')
zc = mock.Mock()
zcc.return_value = zc
queue = mock.Mock()
zc.queue.return_value = queue
scheduler.TaskRunner(server.create)()
metadata_queue_id = server.data().get('metadata_queue_id')
md = server.metadata_get()
queue_id = md['os-collect-config']['zaqar']['queue_id']
self.assertEqual(queue_id, metadata_queue_id)
zc.queue.assert_called_once_with(queue_id)
queue.post.assert_called_once_with(
{'body': server.metadata_get(), 'ttl': 3600})
zc.queue.reset_mock()
server._delete_queue()
zc.queue.assert_called_once_with(queue_id)
zc.queue(queue_id).delete.assert_called_once_with()
return queue_id, server
def test_server_create_software_config_zaqar(self):
queue_id, server = self._server_create_software_config_zaqar()
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': server.password,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'region_name': 'RegionOne',
'queue_id': queue_id
},
'collectors': ['zaqar', 'local']
},
'deployments': []
}, server.metadata_get())
def test_server_create_software_config_zaqar_metadata(self):
md = {'os-collect-config': {'polling_interval': 10}}
queue_id, server = self._server_create_software_config_zaqar(md=md)
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': server.password,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'region_name': 'RegionOne',
'queue_id': queue_id
},
'collectors': ['zaqar', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def test_resolve_attribute_os_collect_config(self):
metadata_url, server = (
self._server_create_software_config_poll_temp_url())
# FnGetAtt usage belows requires the resource to have a stack set
(tmpl, stack) = self._setup_test_stack('stack_name')
server.stack = stack
self.assertEqual({
'request': {
'metadata_url': metadata_url
},
'collectors': ['request', 'local']
}, server.FnGetAtt('os_collect_config'))
| 38.060359 | 78 | 0.623248 |
acf8e8cd4322aec60aeb93dac7799607c53ac0e3 | 4,924 | py | Python | StockAnalysisSystem/service/provider/provider.py | lifg2000/StockAnalysisSystem | b0bef50f5c1a9565e1a1f953fedbe7821601147c | [
"Apache-2.0"
] | null | null | null | StockAnalysisSystem/service/provider/provider.py | lifg2000/StockAnalysisSystem | b0bef50f5c1a9565e1a1f953fedbe7821601147c | [
"Apache-2.0"
] | null | null | null | StockAnalysisSystem/service/provider/provider.py | lifg2000/StockAnalysisSystem | b0bef50f5c1a9565e1a1f953fedbe7821601147c | [
"Apache-2.0"
] | null | null | null | import os
import base64
import pickle
import traceback
import pandas as pd
import StockAnalysisSystem.core.interface as sasIF
import StockAnalysisSystem.core.Utiltity.time_utility as sasTimeUtil
from .user_manager import UserManager
from .access_control import AccessControl
from .resource_manager import ResourceManager
from ..render.common_render import generate_display_page
class ServiceProvider:
SERVICE_LIST = ['stock_analysis_system', 'offline_analysis_result']
def __init__(self, service_table: dict):
self.__service_table = service_table
self.__config = None
self.__logger = print
self.__user_manager = UserManager()
self.__access_control = AccessControl()
self.__resource_manager = ResourceManager()
self.__sas = None
self.__offline_analysis_result = None
def init(self, config) -> bool:
final_ret = True
from StockAnalysisSystem.core.config import Config
self.__config = config if config is not None else Config()
if self.__service_table.get('stock_analysis_system'):
ret = self.__init_sas()
final_ret = ret and final_ret
if self.__service_table.get('offline_analysis_result'):
ret = self.__init_offline_analysis_result()
final_ret = ret and final_ret
return final_ret
def __init_sas(self) -> bool:
try:
self.log('Init StockAnalysisSystem...')
from StockAnalysisSystem.core.StockAnalysisSystem import StockAnalysisSystem
if not sasIF.sas_init(project_path=os.getcwd(), config=self.__config):
raise Exception(sasIF.sas().get_log_errors())
self.__sas = sasIF.sas()
self.log('Init StockAnalysisSystem Complete.')
return True
except Exception as e:
self.__sas = None
self.log(str(e))
self.log(str(traceback.format_exc()))
self.log('Init StockAnalysisSystem Fail')
return False
finally:
pass
def __init_offline_analysis_result(self) -> bool:
self.log('Init OfflineAnalysisResult...')
from .offline_analysis_result import OfflineAnalysisResult
self.__offline_analysis_result = OfflineAnalysisResult(self.__logger)
self.__offline_analysis_result.init(self.__config)
self.log('Init OfflineAnalysisResult Complete.')
return True
# --------------------------------------------- Offline Analysis Result --------------------------------------------
def get_security_analysis_result_url(self, security: str) -> str:
if self.__offline_analysis_result is None:
return ''
if not self.__offline_analysis_result.security_result_exists(security):
return ''
return 'http://211.149.229.160/analysis?security=%s' % security
def get_security_analysis_result_page(self, security: str) -> str:
if self.__offline_analysis_result is None:
return ''
result_html = self.__offline_analysis_result.get_analysis_result_html(security)
return generate_display_page('分析结果' + security, result_html)
# ---------------------------------------------------- Web API -----------------------------------------------------
@AccessControl.apply('query')
def query(self, uri: str, identity: str or None = None,
since: str or None = None, until: str or None = None, **extra) -> str:
if not isinstance(uri, str):
return ''
if isinstance(identity, str):
identity = identity.split(',')
identity = [s.strip() for s in identity]
elif identity is None:
pass
else:
return ''
time_serial = (sasTimeUtil.text_auto_time(since),
sasTimeUtil.text_auto_time(until))
if time_serial[0] is None and time_serial[1] is None:
time_serial = None
df = sasIF.sas_query(uri, identity, time_serial, **extra)
return df
def analysis(self):
sasIF.sas_update()
# ------------------------------------------------------------------------------------------------------------------
def log(self, text: str):
if self.__logger is not None:
self.__logger(text)
# https://stackoverflow.com/a/57930738/12929244
# @staticmethod
# def serialize_dataframe(df: pd.DataFrame) -> str:
# pickle_bytes = pickle.dumps(df)
# b64_pickle_bytes = base64.b64encode(pickle_bytes)
# b64_pickle_bytes_str = b64_pickle_bytes.decode('utf-8')
# return b64_pickle_bytes_str
#
# @staticmethod
# def deserialize_dataframe(b64_pickle_bytes_str: str) -> pd.DataFrame:
# pickle_bytes = base64.b64decode(b64_pickle_bytes_str)
# df = pickle.loads(pickle_bytes)
# return df
| 33.726027 | 120 | 0.611901 |
acf8eb4c1a5616b23d02931cb40f75cad270754d | 6,566 | py | Python | pptx_tools/fill_style.py | natter1/python_pptx_interface | 6c2c98d5423e791527fd251232fad39a3e4b2bb9 | [
"MIT"
] | 17 | 2020-01-15T20:29:03.000Z | 2022-03-19T10:15:08.000Z | pptx_tools/fill_style.py | natter1/python_pptx_interface | 6c2c98d5423e791527fd251232fad39a3e4b2bb9 | [
"MIT"
] | 6 | 2020-01-15T17:27:23.000Z | 2021-04-10T11:06:58.000Z | pptx_tools/fill_style.py | natter1/python_pptx_interface | 6c2c98d5423e791527fd251232fad39a3e4b2bb9 | [
"MIT"
] | 5 | 2020-08-19T21:56:29.000Z | 2021-09-26T23:34:30.000Z | """
This module provides a helper class to deal with fills (for shapes, table cells ...) in python-pptx.
@author: Nathanael Jöhrmann
"""
from enum import Enum, auto
from typing import Union, Optional, Tuple
from pptx.dml.color import RGBColor
from pptx.dml.fill import FillFormat
from pptx.enum.base import EnumValue
from pptx.enum.dml import MSO_PATTERN_TYPE
from pptx_tools.utils import _DO_NOT_CHANGE
class FillType(Enum):
NOFILL = auto() # fill.background()
SOLID = auto() # fill.solid()
PATTERNED = auto() # # fill.patterned()
GRADIENT = auto() # fill.gradient(); not implemented jet
class PPTXFillStyle:
def __init__(self):
self.fill_type: Optional[FillType] = None # FillType.SOLID
self._fore_color_rgb: Union[RGBColor, Tuple[float, float, float], None] = None
self._fore_color_mso_theme: Optional[EnumValue] = None
self.fore_color_brightness: Optional[float] = None
self._back_color_rgb: Union[RGBColor, Tuple[float, float, float], None] = None
self._back_color_mso_theme: Optional[EnumValue] = None
self.back_color_brightness: Optional[float] = None
self.pattern: Optional[MSO_PATTERN_TYPE] = None # 0 ... 47
@property
def fore_color_rgb(self) -> Optional[RGBColor]:
return self._fore_color_rgb
@property
def fore_color_mso_theme(self) -> Optional[EnumValue]:
return self._fore_color_mso_theme
@property
def back_color_rgb(self) -> Optional[RGBColor]:
return self._back_color_rgb
@property
def back_color_mso_theme(self) -> Optional[EnumValue]:
return self._back_color_mso_theme
@fore_color_rgb.setter
def fore_color_rgb(self, value: Union[RGBColor, Tuple[any, any, any], None]):
if value is not None:
assert isinstance(value, (RGBColor, tuple))
self._fore_color_mso_theme = None # only one color definition at a time!
self._fore_color_rgb = RGBColor(*value) if isinstance(value, tuple) else value
@fore_color_mso_theme.setter
def fore_color_mso_theme(self, value: Optional[EnumValue]):
if value is not None:
assert isinstance(value, EnumValue)
self._fore_color_rgb = None # only one color definition at a time!
self._fore_color_mso_theme = value
@back_color_rgb.setter
def back_color_rgb(self, value: Union[RGBColor, Tuple[any, any, any], None]):
if value is not None:
assert isinstance(value, (RGBColor, tuple))
self._fore_color_mso_theme = None # only one color definition at a time!
self._back_color_rgb = RGBColor(*value) if isinstance(value, tuple) else value
@back_color_mso_theme.setter
def back_color_mso_theme(self, value: Optional[EnumValue]):
if value is not None:
assert isinstance(value, EnumValue)
self._back_color_rgb = None # only one color definition at a time!
self._back_color_mso_theme = value
def set(self, fill_type: FillType = _DO_NOT_CHANGE,
fore_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,
fore_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,
fore_color_brightness: Optional[float] = _DO_NOT_CHANGE,
back_color_rgb: Union[RGBColor, Tuple[any, any, any], None] = _DO_NOT_CHANGE,
back_color_mso_theme: Optional[EnumValue] = _DO_NOT_CHANGE,
back_color_brightness: Optional[float] = _DO_NOT_CHANGE,
pattern: Optional[MSO_PATTERN_TYPE] = _DO_NOT_CHANGE
):
"""Convenience method to set several fill attributes together."""
if fill_type is not _DO_NOT_CHANGE:
self.fill_type = fill_type
if fore_color_rgb is not _DO_NOT_CHANGE:
self.fore_color_rgb = fore_color_rgb
if fore_color_mso_theme is not _DO_NOT_CHANGE:
self.fore_color_mso_theme = fore_color_mso_theme
if fore_color_brightness is not _DO_NOT_CHANGE:
self.fore_color_brightness = fore_color_brightness
if back_color_rgb is not _DO_NOT_CHANGE:
self.back_color_rgb = back_color_rgb
if back_color_mso_theme is not _DO_NOT_CHANGE:
self.back_color_mso_theme = back_color_mso_theme
if back_color_brightness is not _DO_NOT_CHANGE:
self.back_color_brightness = back_color_brightness
if pattern is not _DO_NOT_CHANGE:
self.pattern = pattern
def write_fill(self, fill: FillFormat):
"""Write attributes to a FillFormat object."""
if self.fill_type is not None:
self._write_fill_type(fill)
def _write_fore_color(self, fill: FillFormat):
if self.fore_color_rgb is not None:
fill.fore_color.rgb = self.fore_color_rgb
elif self.fore_color_mso_theme is not None:
fill.fore_color.theme_color = self.fore_color_mso_theme
else:
raise ValueError("No valid rgb_color set")
if self.fore_color_brightness:
fill.fore_color.brightness = self.fore_color_brightness
def _write_back_color(self, fill: FillFormat):
if self.back_color_rgb is not None:
fill.back_color.rgb = self.back_color_rgb
elif self.back_color_mso_theme is not None:
fill.back_color.theme_color = self.back_color_mso_theme
else:
raise ValueError("No valid rgb_color set")
if self.back_color_brightness:
fill.back_color.brightness = self.back_color_brightness
def _write_fill_type(self, fill: FillFormat):
if self.fill_type == FillType.NOFILL:
fill.background()
elif self.fill_type == FillType.SOLID:
if (self.fore_color_rgb is not None) or (self.fore_color_mso_theme is not None):
fill.solid()
self._write_fore_color(fill)
else:
print("Warning: Cannot set FillType.SOLID without a valid fore_color_*.")
elif self.fill_type == FillType.PATTERNED:
fill.patterned()
if self.pattern is not None:
fill.pattern = self.pattern
if (self.fore_color_rgb is not None) or (self.fore_color_mso_theme is not None):
self._write_fore_color(fill)
if (self.back_color_rgb is not None) or (self.back_color_mso_theme is not None):
self._write_back_color(fill)
elif self.fill_type == FillType.GRADIENT:
print("FillType.GRADIENT not implemented jet.")
| 41.821656 | 100 | 0.674992 |
acf8ec50f9418c865443bb095ac65bec70d2fe6b | 1,074 | py | Python | Day_23/main.py | SidhuK/100_days_of_Code | af2701613a64f79d386a2061b93f7db79ed077ab | [
"MIT"
] | 2 | 2022-03-10T11:19:58.000Z | 2022-03-22T17:08:35.000Z | Day_23/main.py | SidhuK/100_days_of_Code | af2701613a64f79d386a2061b93f7db79ed077ab | [
"MIT"
] | null | null | null | Day_23/main.py | SidhuK/100_days_of_Code | af2701613a64f79d386a2061b93f7db79ed077ab | [
"MIT"
] | null | null | null | from flask import Flask, render_template
import random
import datetime
import requests
app = Flask(__name__)
@app.route('/')
def home():
random_number = random.randint(1, 10)
current_year = datetime.datetime.now().year
return render_template("index.html", num=random_number, year=current_year)
@app.route("/guess/<name>")
def guess(name):
gender_url = f"https://api.genderize.io?name={name}"
gender_response = requests.get(gender_url)
gender_data = gender_response.json()
gender = gender_data["gender"]
age_url = f"https://api.agify.io?name={name}"
age_response = requests.get(age_url)
age_data = age_response.json()
age = age_data["age"]
return render_template("guess.html", person_name=name, gender=gender, age=age)
@app.route("/blog/<num>")
def get_blog(num):
print(num)
blog_url = "https://api.npoint.io/5abcca6f4e39b4955965"
response = requests.get(blog_url)
all_posts = response.json()
return render_template("blog.html", posts=all_posts)
if __name__ == "__main__":
app.run(debug=True)
| 26.195122 | 82 | 0.70298 |
acf8ed116a6132731699cd6911a1f69ac3688c69 | 3,127 | py | Python | src/training/run_experiment_from_file.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | 2 | 2019-04-29T15:18:11.000Z | 2019-12-13T18:58:40.000Z | src/training/run_experiment_from_file.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | 275 | 2019-02-19T22:59:39.000Z | 2020-10-03T08:56:08.000Z | src/training/run_experiment_from_file.py | lschlessinger1/MS-project | e1c02d1d1a7a2480ff6f14f30625dc42ee3417e3 | [
"MIT"
] | null | null | null | import argparse
import json
import warnings
from multiprocessing.pool import Pool
from src.training.run_experiment import run_experiment
from src.autoks.postprocessing.summary import summarize
def run_experiments(experiments_filename, save: bool, use_comet: bool):
"""Run experiments from file."""
with open(experiments_filename) as f:
experiments_config = json.load(f)
n_experiments = len(experiments_config['experiments'])
exp_dir_names = []
for i in range(n_experiments):
experiment_config = experiments_config['experiments'][i]
experiment_config['experiment_group'] = experiments_config['experiment_group']
exp_dirname = run_experiment(experiment_config, save_models=save, use_gcp=False, use_comet=use_comet)
exp_dir_names.append(exp_dirname)
return exp_dir_names
def main():
"""Run experiment."""
parser = argparse.ArgumentParser(description='Run model search experiment from a file.')
parser.add_argument(
"--save",
default=False,
dest='save',
action='store_true',
help="If true, then final selected models will be saved to canonical, version-controlled location"
)
parser.add_argument(
"--summarize",
default=False,
dest='summarize',
action='store_true',
help="If true, then the experiment will be summarized"
)
parser.add_argument(
"--n_repeats",
default=1,
type=int,
dest='n_repeats',
help="The experiment will be repeated `n_repeats` times"
)
parser.add_argument(
"--parallel",
default=False,
dest='parallel',
action='store_true',
help="If true, then the experiment will use multiprocessing"
)
parser.add_argument(
"--num_processes",
default=None,
dest='num_processes',
type=int,
help="If using multiprocessing, then the experiment will use `num_processes` processes"
)
parser.add_argument(
"--nocomet",
default=False,
action='store_true',
help='If true, do not use Comet for this run.'
)
parser.add_argument("experiments_filename", type=str, help="Filename of JSON file of experiments to run.")
args = parser.parse_args()
if args.parallel:
with Pool(processes=args.num_processes) as p:
results = p.starmap(run_experiments,
[(args.experiments_filename, args.save, not args.nocomet)] * args.n_repeats)
else:
if args.num_processes:
warnings.warn("--num_processes was set, but --parallel was not. Experiments will be run sequentially.")
results = [run_experiments(args.experiments_filename, args.save, use_comet=not args.nocomet)
for _ in range(args.n_repeats)]
if args.summarize:
for results_dir_names in results:
# Convenience option to summarize experiment after running it.
for exp_dirname in results_dir_names:
summarize(str(exp_dirname))
if __name__ == '__main__':
main()
| 31.27 | 115 | 0.653342 |
acf8ee06205d8a9c58007e476c3470289abb8981 | 11,187 | py | Python | simpleflow/task.py | ybastide/simpleflow | a69d2bfeaa855b5a599c572f64b4715fcd750801 | [
"MIT"
] | null | null | null | simpleflow/task.py | ybastide/simpleflow | a69d2bfeaa855b5a599c572f64b4715fcd750801 | [
"MIT"
] | 1 | 2021-02-10T19:43:02.000Z | 2021-02-10T19:43:02.000Z | simpleflow/task.py | ybastide/simpleflow | a69d2bfeaa855b5a599c572f64b4715fcd750801 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
import abc
import time
from copy import deepcopy
from enum import Enum
from typing import TYPE_CHECKING
import attr
import six
from simpleflow.base import Submittable
from simpleflow.history import History
from . import futures
from .activity import Activity
if TYPE_CHECKING:
from typing import Any, Dict, Optional, Type, Union # NOQA
def get_actual_value(value):
"""
Unwrap the result of a Future or return the value.
"""
if isinstance(value, futures.Future):
return value.result
return value
@six.add_metaclass(abc.ABCMeta)
class Task(Submittable):
"""A Task represents a work that can be scheduled for execution.
"""
@property
@abc.abstractmethod
def name(self):
raise NotImplementedError()
@staticmethod
def resolve_args(*args):
return [get_actual_value(arg) for arg in args]
@staticmethod
def resolve_kwargs(**kwargs):
return {key: get_actual_value(val) for key, val in kwargs.items()}
class ActivityTask(Task):
"""
Activity task.
:type activity: Activity
:type idempotent: Optional[bool]
:type id: Optional[str]
"""
def __init__(self, activity, *args, **kwargs):
if not isinstance(activity, Activity):
raise TypeError(
"Wrong value for `activity`, got {} instead".format(type(activity))
)
# Keep original arguments for use in subclasses
# For instance this helps casting a generic class to a simpleflow.swf.task,
# see simpleflow.swf.task.ActivityTask.from_generic_task() factory
self._args = deepcopy(args)
self._kwargs = deepcopy(kwargs)
self.activity = activity
self.idempotent = activity.idempotent
self.context = kwargs.pop("context", None)
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
self.id = None
@property
def name(self):
return "activity-{}".format(self.activity.name)
def __repr__(self):
return "{}(activity={}, args={}, kwargs={}, id={})".format(
self.__class__.__name__, self.activity, self.args, self.kwargs, self.id
)
def execute(self):
method = self.activity.callable
if getattr(method, "add_context_in_kwargs", False):
self.kwargs["context"] = self.context
if hasattr(method, "execute"):
task = method(*self.args, **self.kwargs)
task.context = self.context
result = task.execute()
if hasattr(task, "post_execute"):
task.post_execute()
return result
else:
# NB: the following line attaches some *state* to the callable, so it
# can be used directly for advanced usage. This works well because we
# don't do multithreading, but if we ever do, DANGER!
method.context = self.context
return method(*self.args, **self.kwargs)
def propagate_attribute(self, attr, val):
"""
Propagate to the activity.
"""
setattr(self.activity, attr, val)
class WorkflowTask(Task):
"""
Child workflow.
:type executor: type(simpleflow.executor.Executor)
:type workflow: type(simpleflow.workflow.Workflow)
:type id: Optional[str]
"""
def __init__(self, executor, workflow, *args, **kwargs):
# Keep original arguments for use in subclasses
# For instance this helps casting a generic class to a simpleflow.swf.task,
# see simpleflow.swf.task.WorkflowTask.from_generic_task() factory
self._args = deepcopy(args)
self._kwargs = deepcopy(kwargs)
self.executor = executor
self.workflow = workflow
self.idempotent = getattr(workflow, "idempotent", False)
get_workflow_id = getattr(workflow, "get_workflow_id", None)
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
if get_workflow_id:
self.id = get_workflow_id(workflow, *self.args, **self.kwargs)
else:
self.id = None
@property
def name(self):
return "workflow-{}".format(self.workflow.name)
def __repr__(self):
return "{}(workflow={}, args={}, kwargs={}, id={})".format(
self.__class__.__name__,
self.workflow.__module__ + "." + self.workflow.__name__,
self.args,
self.kwargs,
self.id,
)
def execute(self):
workflow = self.workflow(self.executor)
return workflow.run(*self.args, **self.kwargs)
def propagate_attribute(self, attr, val):
"""
Propagate to the workflow.
"""
setattr(self.workflow, attr, val)
class ChildWorkflowTask(WorkflowTask):
"""
WorkflowTask subclass for cases where the executor isn't needed
(yet).
"""
def __init__(self, workflow, *args, **kwargs):
super(ChildWorkflowTask, self).__init__(None, workflow, *args, **kwargs)
class SignalTask(Task):
"""
Signal.
"""
def __init__(self, name, *args, **kwargs):
self._name = name
self.args = self.resolve_args(*args)
self.kwargs = self.resolve_kwargs(**kwargs)
@property
def name(self):
"""
:return:
:rtype: str
"""
return self._name
def execute(self):
pass
class MarkerTask(Task):
def __init__(self, name, details):
"""
:param name: Marker name
:param details: Serializable marker details
"""
self._name = name
self.args = self.resolve_args(details)
self.kwargs = {}
@property
def name(self):
"""
:return:
:rtype: str
"""
return self._name
@property
def details(self):
return self.args[0]
def execute(self):
pass
class TimerTask(Task):
"""
Timer.
"""
def __init__(self, timer_id, timeout, control=None):
self.timer_id = timer_id
self.timeout = timeout
self.control = control
self.args = ()
self.kwargs = {}
@property
def name(self):
return self.timer_id
@property
def id(self):
return self.timer_id
def __repr__(self):
return '<{} timer_id="{}" timeout={}>'.format(
self.__class__.__name__, self.timer_id, self.timeout
)
def execute(self):
# Local execution
time.sleep(self.timeout)
class CancelTimerTask(Task):
"""
Timer cancellation.
"""
def __init__(self, timer_id):
self.timer_id = timer_id
self.args = ()
self.kwargs = {}
@property
def name(self):
return self.timer_id
@property
def id(self):
return self.timer_id
def __repr__(self):
return '<{} timer_id="{}">'.format(self.__class__.__name__, self.timer_id)
def execute(self):
# Local execution: no-op
return
@attr.s
class TaskFailureContext(object):
"""
Some context for a task/workflow failure.
"""
class Decision(Enum):
none = 0
abort = 1
ignore = 2
retry_now = 3
retry_later = 4
cancel = 5
handled = 6
a_task = attr.ib() # type: Union[ActivityTask, WorkflowTask]
event = attr.ib() # type: Dict[str, Any]
future = attr.ib() # type: Optional[futures.Future]
exception_class = attr.ib() # type: Type[Exception]
history = attr.ib(default=None) # type: Optional[History]
decision = attr.ib(default=Decision.none) # type: Optional[Decision]
retry_wait_timeout = attr.ib(default=None) # type: Optional[int]
_task_error = attr.ib(default=None) # type: Optional[str]
_task_error_type = attr.ib(default=None) # type: Optional[Type[Exception]]
@property
def retry_count(self):
# type: () -> Optional[int]
return self.event.get("retry")
@property
def attempt_number(self):
# type: () -> int
return self.event.get("retry", 0) + 1
@property
def task_name(self):
# type: () -> Optional[str]
if hasattr(self.a_task, "payload"):
return self.a_task.payload.name
if hasattr(self.a_task, "name"):
return self.a_task.name
return None
@property
def exception(self):
# type: () -> Optional[Exception]
return self.future.exception
@property
def current_started_decision_id(self):
# type: () -> Optional[int]
return self.history.started_decision_id if self.history else None
@property
def last_completed_decision_id(self):
# type: () -> Optional[int]
return self.history.completed_decision_id if self.history else None
@property
def task_error(self):
# type: () -> str
if self._task_error is None:
self._cache_error()
return self._task_error
@property
def task_error_type(self):
# type: () -> Optional[Type[Exception]]
if self._task_error is None:
self._cache_error()
return self._task_error_type
def _cache_error(self):
from simpleflow.exceptions import TaskFailed
from simpleflow.utils import import_from_module, json_loads_or_raw
self._task_error = "" # falsy value different from None
if isinstance(self.exception, TaskFailed) and self.exception.details:
details = json_loads_or_raw(self.exception.details)
if isinstance(details, dict):
if "error" in details:
self._task_error = details["error"]
if "error_type" in details:
try:
self._task_error_type = import_from_module(
details["error_type"]
)
except Exception:
pass
@property
def id(self):
# type: () -> Optional[int]
event = self.event
return History.get_event_id(event)
def decide_abort(self):
# type: () -> TaskFailureContext
self.decision = self.Decision.abort
return self
def decide_ignore(self):
# type: () -> TaskFailureContext
self.decision = self.Decision.ignore
return self
def decide_cancel(self):
# type: () -> TaskFailureContext
self.decision = self.Decision.cancel
return self
def decide_retry(self, retry_wait_timeout=0):
# type: (Optional[int]) -> TaskFailureContext
self.decision = (
self.Decision.retry_now
if not retry_wait_timeout
else self.Decision.retry_later
)
self.retry_wait_timeout = retry_wait_timeout
return self
def decide_handled(self, a_task, future=None):
# type: (Union[ActivityTask, WorkflowTask], Optional[futures.Future]) -> TaskFailureContext
self.a_task = a_task
self.future = future
self.decision = self.Decision.handled
return self
| 27.285366 | 99 | 0.600876 |
acf8ee234e58b6e21ad2053f6bdfe9d0357ebee0 | 11,917 | py | Python | variableProcessing/LSFSVM_class/LS_FSVM.py | ishidaira233/TX-Credit-Assessement | 289f230a609554db32552670c300f992a3fe068f | [
"MIT"
] | null | null | null | variableProcessing/LSFSVM_class/LS_FSVM.py | ishidaira233/TX-Credit-Assessement | 289f230a609554db32552670c300f992a3fe068f | [
"MIT"
] | null | null | null | variableProcessing/LSFSVM_class/LS_FSVM.py | ishidaira233/TX-Credit-Assessement | 289f230a609554db32552670c300f992a3fe068f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 22:46:20 2020
@author: zinan
"""
import numpy as np
from numpy import linalg as LA
from LSFSVM_class import Kernel
from LSFSVM_class import Precision
from imblearn.over_sampling import SVMSMOTE
import math
from sklearn.model_selection import train_test_split
"""
Least Square Fuzzy SVM
linear equation problem Package: NUMPY.LINALG
Parameters
C: penalty
kernel_dict :
'type': 'LINEAR' / 'RBF' 'sigma' / 'POLY' 'd'
fuzzyvalue:
membershape value based on the class of center
'type': 'Cen'
'function' : 'Lin' / 'Exp'
membershape value based on the actuale hyper-plane
'type': 'Hyp'
'function' : 'Lin' / 'Exp'
r_max : radio between 0 and 1
r_min : radio between 0 and 1 for balancing data
usually for the majority class r = len(y_minority)/len(y_majority)
and for the minority class r = 1
Methods
_mvalue(self, X, y)
Calculate fuzzy membership value
fit(self, X, Y)
Fit the model according to the given training data.
predict(self, X)
Predict class labels for samples in X.
Platt_Probabilistic(self,deci,label,prior1,prior0)
For posterior class probability Pr(y = 1|x) = 1/(1+exp(Af+B)) calculate
Position parameter (B) and scale parameter (A)
predict_prob(self,X)
Posterior class probability Pr(y = 1|x)
decision_function(self, X)
Predict confidence scores for samples.
The confidence score for a sample is the signed distance of that sample to the hyperplane.
"""
class LSFSVM:
def __init__(
self,
C=3,
kernel_dict={"type": "LINEAR"},
fuzzyvalue={"type": "Cen", "function": "Lin"},
databalance="origine",
r_max=1,
r_min=1,
):
self.C = C
self.kernel_dict = kernel_dict
self.fuzzyvalue = fuzzyvalue
self.r_max = r_max
self.r_min = r_min
self.databalance = databalance
# self.m_value = None
# self.alpha = None
# self.b = None
# self.K = None
def _mvalue(self, X, y):
# print('fuzzy value:', self.fuzzyvalue )
train_data = np.append(X, y.reshape(len(y), 1), axis=1)
if self.databalance == "LowSampling":
data_maj = train_data[y == 1] # 将多数
data_min = train_data[y != 1]
index = np.random.randint(len(data_maj), size=len(data_min))
lower_data_maj = data_maj[list(index)]
train_data = np.append(lower_data_maj, data_min, axis=0)
X = train_data[:, :-1]
y = train_data[:, -1]
elif self.databalance == "UpSampling":
X, y = SVMSMOTE(random_state=42).fit_sample(
train_data[:, :-1], np.asarray(train_data[:, -1])
)
else:
X = X
y = y
if self.fuzzyvalue["type"] == "Cen":
x_1 = X[y == 1]
x_0 = X[y == -1]
x_centre_1 = np.mean(x_1, axis=0)
x_centre_0 = np.mean(x_0, axis=0)
max_distance_1 = 0
max_distance_0 = 0
for i in range(len(x_1)):
distance = LA.norm(x_centre_1 - x_1[i, :])
if max_distance_1 < distance:
max_distance_1 = distance
for i in range(len(x_0)):
distance = LA.norm(x_centre_0 - x_0[i, :])
if max_distance_0 < distance:
max_distance_0 = distance
memership = []
if self.fuzzyvalue["function"] == "Lin":
for i in range(len(y)):
if y[i] == 1:
memership.append(
(1 - LA.norm(X[i] - x_centre_1) / (max_distance_1 + 0.0001))
* self.r_max
)
if y[i] == -1:
memership.append(
(1 - LA.norm(X[i] - x_centre_0) / (max_distance_0 + 0.0001))
* self.r_min
)
elif self.fuzzyvalue["function"] == "Exp":
for i in range(len(y)):
if y[i] == 1:
memership.append(
(2 / (1 + np.exp(LA.norm(X[i] - x_centre_1)))) * self.r_max
)
if y[i] == -1:
memership.append(
(2 / (1 + np.exp(LA.norm(X[i] - x_centre_0)))) * self.r_min
)
elif self.fuzzyvalue["type"] == "Hyp":
m = y.shape[0]
C = 3
gamma = 1
# Kernel
K = Kernel.RBF(m, gamma)
K.calculate(X)
H = np.multiply(np.dot(np.matrix(y).T, np.matrix(y)), K.kernelMat)
M_BR = H + np.eye(m) / C
# Concatenate
L_L = np.concatenate((np.matrix(0), np.matrix(y).T), axis=0)
L_R = np.concatenate((np.matrix(y), M_BR), axis=0)
L = np.concatenate((L_L, L_R), axis=1)
R = np.ones(m + 1)
R[0] = 0
# solve
b_a = LA.solve(L, R)
b = b_a[0]
alpha = b_a[1:]
K.expand(X)
A = np.multiply(alpha, y)
f = b + np.dot(K.testMat, A)
d_hyp = abs(f * y)
memership = []
if self.fuzzyvalue["function"] == "Lin":
for i in range(len(y)):
if y[i] == 1:
memership.append(
(1 - d_hyp[i] / (max(d_hyp) + 0.0001)) * self.r_max
)
if y[i] == -1:
memership.append(
(1 - d_hyp[i] / (max(d_hyp) + 0.0001)) * self.r_min
)
elif self.fuzzyvalue["function"] == "Exp":
for i in range(len(y)):
if y[i] == 1:
memership.append((2 / (1 + np.exp(d_hyp[i]))) * self.r_max)
if y[i] == -1:
memership.append((2 / (1 + np.exp(d_hyp[i]))) * self.r_min)
self.m_value = np.array(memership)
return self.m_value
def fit(self, X, Y):
# print('Kernel:', self.kernel_dict)
train_data = np.append(X, Y.reshape(len(Y), 1), axis=1)
if self.databalance == "LowSampling":
data_maj = train_data[Y == 1] # 将多数
data_min = train_data[Y != 1]
index = np.random.randint(len(data_maj), size=len(data_min))
lower_data_maj = data_maj[list(index)]
train_data = np.append(lower_data_maj, data_min, axis=0)
X = train_data[:, :-1]
Y = train_data[:, -1]
self.Y = Y
elif self.databalance == "UpSampling":
X, Y = SVMSMOTE(random_state=42).fit_sample(
train_data[:, :-1], np.asarray(train_data[:, -1])
)
self.Y = Y
else:
X = X
Y = Y
self.Y = Y
m = len(Y)
# Kernel
if self.kernel_dict["type"] == "RBF":
K = Kernel.RBF(m, self.kernel_dict["sigma"])
K.calculate(X)
elif self.kernel_dict["type"] == "LINEAR":
K = Kernel.LINEAR(m)
K.calculate(X)
elif self.kernel_dict["type"] == "POLY":
K = Kernel.POLY(m, self.kernel_dict["d"])
K.calculate(X)
H = np.multiply(np.dot(np.matrix(Y).T, np.matrix(Y)), K.kernelMat)
M_BR = H + np.eye(m) / (self.C * (self.m_value[:, None]))
# Concatenate
L_L = np.concatenate((np.matrix(0), np.matrix(Y).T), axis=0)
L_R = np.concatenate((np.matrix(Y), M_BR), axis=0)
L = np.concatenate((L_L, L_R), axis=1)
R = np.ones(m + 1)
R[0] = 0
# solve
b_a = LA.solve(L, R)
b = b_a[0]
alpha = b_a[1:]
self.alpha = alpha
self.b = b
self.K = K
self.kernelMat = K.kernelMat
def predict(self, X):
self.K.expand(X)
A = np.multiply(self.alpha, self.Y)
y_predict = self.b + np.dot(self.K.testMat, A)
self.y_predict = y_predict
y_pred = y_predict.copy()
y_pred[y_pred >= 0] = 1
y_pred[y_pred < 0] = -1
self.y_pred = y_pred
return y_pred
def Platt_Probabilistic(self, deci, label, prior1, prior0):
maxiter = 100
minstep = 1e-10
sigma = 1e-12
hiTarget = (prior1 + 1.0) / (prior1 + 2.0)
loTarget = 1 / (prior0 + 2.0)
leng = prior1 + prior0
t = np.zeros(leng)
for i in range(leng):
if label[i] > 0:
t[i] = hiTarget
else:
t[i] = loTarget
A = 0.0
B = math.log((prior0 + 1.0) / (prior1 + 1.0))
fval = 0.0
for i in range(leng):
fApB = deci[i] * A + B
if fApB >= 0:
fval += t[i] * fApB + math.log(1 + np.exp(-fApB))
else:
fval += (t[i] - 1) * fApB + math.log(1 + np.exp(fApB))
for it in range(maxiter):
# Update Gradient and Hessian (use H’ = H + sigma I)
h11 = h22 = sigma
h21 = g1 = g2 = 0.0
for i in range(leng):
fApB = deci[i] * A + B
if fApB >= 0:
p = np.exp(-fApB) / (1.0 + np.exp(-fApB))
q = 1.0 / (1.0 + np.exp(-fApB))
else:
p = 1.0 / (1.0 + np.exp(fApB))
q = np.exp(fApB) / (1.0 + np.exp(fApB))
d2 = p * q
h11 += deci[i] * deci[i] * d2
h22 += d2
h21 += deci[i] * d2
d1 = t[i] - p
g1 += deci[i] * d1
g2 += d1
if abs(g1) < 1e-5 and abs(g2) < 1e-5: # Stopping criteria
break
# Compute modified Newton directions
det = h11 * h22 - h21 * h21
dA = -(h22 * g1 - h21 * g2) / det
dB = -(-h21 * g1 + h11 * g2) / det
gd = g1 * dA + g2 * dB
stepsize = 1
while stepsize >= minstep:
# Line search
newA = A + stepsize * dA
newB = B + stepsize * dB
newf = 0.0
for i in range(leng):
fApB = deci[i] * newA + newB
if fApB >= 0:
newf += t[i] * fApB + math.log(1 + np.exp(-fApB))
else:
newf += (t[i] - 1) * fApB + math.log(1 + np.exp(fApB))
if newf < fval + 0.0001 * stepsize * gd:
A = newA
B = newB
fval = newf
break # Sufficient decrease satisfied
else:
stepsize /= 2.0
if stepsize < minstep:
print("Line search fails")
break
if it >= maxiter:
print("Reaching maximum iterations")
return A, B
def predict_prob(self, X):
A = np.multiply(self.alpha, self.Y)
y_hat = self.b + np.dot(self.kernelMat, A)
deci = y_hat
label = self.Y
prior1 = len(self.Y[self.Y == 1])
prior0 = len(self.Y[self.Y == -1])
A, B = self.Platt_Probabilistic(deci, label, prior1, prior0)
y_prob = 1 / (1 + np.exp(A * self.y_predict + B))
for i in range(len(y_prob)):
y_prob[i] = round(y_prob[i], 3)
return y_prob
def decision_function(self, X):
return self.y_predict
| 31.863636 | 98 | 0.45691 |
acf8ee3212ca695557768524115729e410159ad2 | 133,163 | py | Python | gui/generateds_gui.py | akretion/generateds | bc106f036fefce9d1659ce8675fe9aa3a07366ab | [
"MIT"
] | null | null | null | gui/generateds_gui.py | akretion/generateds | bc106f036fefce9d1659ce8675fe9aa3a07366ab | [
"MIT"
] | null | null | null | gui/generateds_gui.py | akretion/generateds | bc106f036fefce9d1659ce8675fe9aa3a07366ab | [
"MIT"
] | 1 | 2020-10-18T07:58:16.000Z | 2020-10-18T07:58:16.000Z | #!/usr/bin/env python
import sys
import os
from optparse import OptionParser
from configparser import ConfigParser
from xml.parsers import expat
import subprocess
import re
import locale
import gettext
if sys.version_info.major == 2:
import gtk
else:
# https://sourceforge.net/projects/pygobjectwin32/files/
# https://blogs.gnome.org/kittykat/2014/01/29/developing-gtk-3-apps-with-python-on-windows/
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk as gtk
# import pango
from libgenerateDS.gui import generateds_gui_session
#import generateds_gui_session
## import warnings
## warnings.warn('importing IPShellEmbed', UserWarning)
## from IPython.Shell import IPShellEmbed
## args = ''
## ipshell = IPShellEmbed(args,
## banner = 'Dropping into IPython',
## exit_msg = 'Leaving Interpreter, back to program.')
##
# Then use the following line where and when you want to drop into the
# IPython shell:
# ipshell('<some message> -- Entering ipshell.\\nHit Ctrl-D to exit')
# Globals and constants:
#
# Do not modify the following VERSION comments.
# Used by updateversion.py.
##VERSION##
VERSION = '2.29.3'
##VERSION##
Builder = None
ParamNameList = []
CmdTemplate = (
'%(exec_path)s --no-questions' +
'%(force)s' +
'%(output_superclass)s' +
'%(output_subclass)s' +
'%(prefix)s' +
'%(namespace_prefix)s' +
'%(behavior_filename)s' +
'%(properties)s' +
'%(old_getters_setters)s' +
'%(subclass_suffix)s' +
'%(root_element)s' +
'%(superclass_module)s' +
'%(validator_bodies)s' +
'%(user_methods)s' +
'%(no_dates)s' +
'%(no_versions)s' +
'%(no_process_includes)s' +
'%(silence)s' +
'%(namespace_defs)s' +
'%(external_encoding)s' +
'%(member_specs)s' +
'%(export_spec)s' +
'%(one_file_per_xsd)s' +
'%(output_directory)s' +
'%(module_suffix)s' +
'%(preserve_cdata_tags)s' +
'%(cleanup_name_list)s' +
' %(input_schema)s' +
''
)
CaptureCmdTemplate = (
'%(exec_path)s --no-questions' +
'%(force)s' +
'%(properties)s' +
'%(namespace_prefix)s' +
'%(output_superclass)s' +
'%(output_subclass)s' +
'%(prefix)s' +
'%(behavior_filename)s' +
'%(old_getters_setters)s' +
'%(subclass_suffix)s' +
'%(root_element)s' +
'%(superclass_module)s' +
'%(validator_bodies)s' +
'%(user_methods)s' +
'%(no_dates)s' +
'%(no_versions)s' +
'%(no_process_includes)s' +
'%(silence)s' +
'%(namespace_defs)s' +
'%(external_encoding)s' +
'%(member_specs)s' +
'%(export_spec)s' +
'%(one_file_per_xsd)s' +
'%(output_directory)s' +
'%(module_suffix)s' +
'%(preserve_cdata_tags)s' +
'%(cleanup_name_list)s' +
' \\\n %(input_schema)s' +
''
)
ErrorMessages = [
'',
'Must enter input schema name.',
'Must enter either output superclass name or output subclass file name.',
]
Memberspecs_tooltip_text = '''\
Generate member (type) specifications in each
class: a dictionary of instances of class
MemberSpec_ containing member name, type,
and array or not. Allowed values are
"list" or "dict". Default: None.
'''
#
# Classes
#
class UIItemSpec(object):
def __init__(self, name='', ui_type='', access_action=''):
self.name = name
self.ui_type = ui_type
self.access_action = access_action
def get_name(self):
return self.name
def set_name(self, name):
self.name = name
def get_ui_type(self):
return self.ui_type
def set_ui_type(self, ui_type):
self.ui_type = ui_type
def get_access_action(self):
return self.access_action
def set_access_action(self, access_action):
self.access_action = access_action
class GeneratedsGui(object):
def __init__(self, options):
global Builder
# Default values
Builder = gtk.Builder()
Builder.set_translation_domain('generateds_gui')
self.options = options
# self.ui_spec_filename = ui_spec_filename
self.filename = None
self.about_dialog = None
self.params = generateds_gui_session.sessionType()
self.ui_obj_dict = {}
self.session_filename = None
self.current_folder = None
# use GtkBuilder to build our interface from the XML file
ui_spec_filename = options.impl_gui
try:
if ui_spec_filename is None:
Builder.add_from_string(branch_version(
'Ui_spec, len(Ui_spec)', 'Ui_spec'))
else:
Builder.add_from_file(ui_spec_filename)
except:
msg = "Failed to load UI XML file: %s" % ui_spec_filename
self.error_message(msg)
sys.exit(1)
# get the widgets which will be referenced in callbacks
bgo = Builder.get_object
self.window = bgo("window1")
self.statusbar = bgo("statusbar1")
for item in ParamNameList:
if item.get_ui_type() != 'combobox':
s1 = '%s_%s' % (item.get_name(), item.get_ui_type(), )
setattr(self, s1, bgo(s1))
self.ui_obj_dict[s1] = bgo(s1)
# Create the member-specs combobox.
member_specs_combobox = branch_version(
'gtk.combo_box_new_text()', 'gtk.ComboBoxText()')
member_specs_combobox.set_name('member_specs_combobox')
member_specs_combobox.set_tooltip_text(Memberspecs_tooltip_text)
self.ui_obj_dict['member_specs_combobox'] = member_specs_combobox
member_specs_combobox.append_text("none")
member_specs_combobox.append_text("list")
member_specs_combobox.append_text("dict")
member_specs_combobox_container = bgo(
'member_specs_combobox_container')
member_specs_combobox_container.add(member_specs_combobox)
member_specs_combobox.set_active(0)
member_specs_combobox.show()
self.content_dialog = ContentDialog()
# connect signals
Builder.connect_signals(self)
Builder.connect_signals(self.content_dialog)
# set the default icon to the GTK "edit" icon
branch_version(
'gtk.window_set_default_icon_name(gtk.STOCK_EDIT)',
'gtk.Window.set_default_icon_name(gtk.STOCK_EDIT)')
# setup and initialize our statusbar
self.statusbar_cid = self.statusbar.get_context_id(
"Tutorial GTK+ Text Editor")
self.reset_default_status()
self.params = generateds_gui_session.sessionType()
# Load a session if specified.
session = self.options.session
if session:
session = os.path.abspath(session)
self.session_filename = session
self.load_session(session)
msg = 'Session file: %s' % (self.session_filename, )
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, msg)
else:
self.trans_gui_2_obj()
self.saved_params = self.params.copy()
# When our window is destroyed, we want to break out of the GTK main loop.
# We do this by calling gtk_main_quit(). We could have also just specified
# gtk_main_quit as the handler in Glade!
def on_window_destroy(self, widget, data=None):
self.trans_gui_2_obj()
## self.dump_params('saved_params:', self.saved_params)
## self.dump_params('params:', self.params)
if self.params != self.saved_params:
message = 'Session data has changed.\n\nSave?'
if sys.version_info.major == 2:
dialog = gtk.MessageDialog(
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_NONE,
message)
dialog.add_buttons(
gtk.STOCK_YES, gtk.RESPONSE_YES,
'_Discard', 1,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
)
else:
dialog = gtk.MessageDialog(
None,
(gtk.DialogFlags.MODAL |
gtk.DialogFlags.DESTROY_WITH_PARENT),
gtk.MessageType.ERROR,
gtk.ButtonsType.NONE,
message)
dialog.add_buttons(
gtk.STOCK_YES, gtk.ResponseType.YES,
'_Discard', 1,
gtk.STOCK_CANCEL, gtk.ResponseType.CANCEL,
)
response = dialog.run()
dialog.destroy()
if response == branch_version(
'gtk.RESPONSE_YES', 'gtk.ResponseType.YES'):
self.save_session_action()
elif response == 1:
pass
elif response == branch_version(
'gtk.RESPONSE_CANCEL', 'gtk.ResponseType.CANCEL'):
return
gtk.main_quit()
def on_window_delete_event(self, widget, event, data=None):
self.on_window_destroy(widget, data)
def on_quit_menu_item_activate(self, widget, data=None):
self.on_window_destroy(widget, data)
def on_quit_button_clicked(self, widget, data=None):
self.on_window_destroy(widget, data)
# Get the values from the widgets in the UI.
# Format the command line.
# Generate the output files.
def on_generate_menuitem_activate(self, menuitem, data=None):
self.trans_gui_2_obj()
params_dict = self.trans_params_2_dict()
result, msg = self.validate_params(params_dict)
if result:
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, 'Error: %s' % (msg, ))
self.error_message(msg)
else:
cmd = self.create_command_line(params_dict, CmdTemplate)
#print 'cmd: %s' % (cmd, )
self.run_command(cmd)
return True
on_generate_button_clicked = on_generate_menuitem_activate
def on_capture_cl_menuitem_activate(self, menuitem, data=None):
self.trans_gui_2_obj()
params_dict = self.trans_params_2_dict()
result, msg = self.validate_params(params_dict)
if result:
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, 'Error: %s' % (msg, ))
self.error_message(msg)
else:
cmd = self.create_command_line(params_dict, CaptureCmdTemplate)
cmd = cmd.replace(' --', ' \\\n --')
cmd = cmd.replace(' -o', ' \\\n -o')
cmd = cmd.replace(' -s', ' \\\n -s')
cmd = cmd.replace(' -f', ' \\\n -f')
cmd = cmd.replace(' -m', ' \\\n -m')
self.display_content('Command line', cmd)
return True
def trans_gui_2_obj(self):
for item in ParamNameList:
ui_name = '%s_%s' % (item.get_name(), item.get_ui_type(), )
ui_obj = self.ui_obj_dict[ui_name]
if ui_obj is not None:
if item.get_name() == 'member_specs':
value = ui_obj.get_active()
if value == 1:
self.params.set_member_specs('list')
elif value == 2:
self.params.set_member_specs('dict')
else:
self.params.set_member_specs('none')
else:
#s2 = '%s_%s' % (item.get_name(), item.get_ui_type(), )
method = getattr(
ui_obj, 'get_%s' % item.get_access_action())
value = method()
setattr(self.params, item.get_name(), value)
def trans_obj_2_gui(self):
for item in ParamNameList:
ui_name = '%s_%s' % (item.get_name(), item.get_ui_type(), )
ui_obj = self.ui_obj_dict[ui_name]
if ui_obj is not None:
if item.get_name() == 'member_specs':
if self.params.get_member_specs() == 'list':
ui_obj.set_active(1)
elif self.params.get_member_specs() == 'dict':
ui_obj.set_active(2)
else:
ui_obj.set_active(0)
else:
value = getattr(self.params, item.get_name())
if value is None:
if item.get_ui_type() == 'entry':
value = ''
elif item.get_ui_type() == 'checkbutton':
value = False
elif item.get_ui_type() == 'combobox':
value = 0
method = getattr(
ui_obj,
'set_%s' % item.get_access_action())
method(value)
def dump_params(self, msg, params):
print(msg)
params.export(sys.stdout, 0, name_='session')
def trans_params_2_dict(self):
params = self.params
params_dict = {}
pd = params_dict
pd['input_schema'] = getattr(params, 'input_schema')
self.transform_1_param(params, pd, 'output_superclass', 'o')
self.transform_1_param(params, pd, 'output_subclass', 's')
pd['force'] = (' -f' if params.get_force() else '')
self.transform_1_param(params, pd, 'prefix', 'p')
if params.get_empty_namespace_prefix():
pd['namespace_prefix'] = ' -a ""'
else:
self.transform_1_param(params, pd, 'namespace_prefix', 'a')
self.transform_1_param(params, pd, 'behavior_filename', 'b')
pd['properties'] = (' -m' if params.get_properties() else '')
self.transform_1_param(
params, pd, 'subclass_suffix', 'subclass-suffix', True)
self.transform_1_param(
params, pd, 'root_element', 'root-element', True)
self.transform_1_param(
params, pd, 'superclass_module', 'super', True)
pd['old_getters_setters'] = (
' --use-old-getter-setter'
if params.get_old_getters_setters()
else '')
self.transform_1_param(
params, pd, 'user_methods', 'user-methods', True)
self.transform_1_param(
params, pd, 'validator_bodies', 'validator-bodies', True)
pd['no_dates'] = (' --no-dates' if params.get_no_dates() else '')
pd['no_versions'] = (
' --no-versions' if params.get_no_versions() else '')
pd['no_process_includes'] = (
' --no-process-includes'
if params.get_no_process_includes()
else '')
pd['silence'] = (' --silence' if params.get_silence() else '')
# Special case for namespacedefs because of quoting.
name = 'namespace_defs'
flag = 'namespacedef'
value = getattr(params, name)
params_dict[name] = (
" --%s='%s'" % (flag, value, )
if value.strip()
else '')
self.transform_1_param(
params, pd, 'external_encoding', 'external-encoding', True)
if params.get_member_specs() == 'list':
pd['member_specs'] = ' --member-specs=list'
elif params.get_member_specs() == 'dict':
pd['member_specs'] = ' --member-specs=dict'
else:
pd['member_specs'] = ''
self.transform_1_param(
params, pd, 'export_spec', 'export', True)
pd['one_file_per_xsd'] = (
' --one-file-per-xsd' if params.get_one_file_per_xsd() else '')
self.transform_1_param(
params, pd, 'output_directory', 'output-directory', True)
self.transform_1_param(
params, pd, 'module_suffix', 'module-suffix', True)
pd['preserve_cdata_tags'] = (
' --preserve-cdata-tags'
if params.get_preserve_cdata_tags()
else '')
self.transform_1_param(
params, pd, 'cleanup_name_list', 'cleanup-name-list', True)
return pd
def transform_1_param(
self, params, params_dict, name, flag, longopt=False):
value = getattr(params, name)
if longopt:
params_dict[name] = (
' --%s="%s"' % (flag, value, )
if value.strip()
else '')
else:
params_dict[name] = (
' -%s "%s"' % (flag, value, )
if value.strip()
else '')
def create_command_line(self, params_dict, template):
params_dict['exec_path'] = self.options.exec_path
cmd = template % params_dict
return cmd
def validate_params(self, params_dict):
p = params_dict
#print sorted(p.keys())
result = 0
msg = ''
if not p['input_schema']:
result = 1
elif not (p['output_superclass'] or p['output_subclass']):
result = 2
if result:
msg = ErrorMessages[result]
return result, msg
# Clear all the fields/widgets to default values.
def on_clear_menuitem_activate(self, menuitem, data=None):
message = 'Clear all entries?\nAre you sure?'
if sys.version_info.major == 2:
dialog = gtk.MessageDialog(
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK_CANCEL,
message
)
else:
dialog = gtk.MessageDialog(
None,
gtk.DialogFlags.MODAL | gtk.DialogFlags.DESTROY_WITH_PARENT,
gtk.MessageType.WARNING,
gtk.ButtonsType.OK_CANCEL,
message
)
response = dialog.run()
dialog.destroy()
if response == branch_version(
'gtk.RESPONSE_OK', 'gtk.ResponseType.OK'):
self.session_filename = None
self.params = generateds_gui_session.sessionType(
input_schema='',
output_superclass='',
output_subclass='',
force=False,
prefix='',
namespace_prefix='',
empty_namespace_prefix=False,
behavior_filename='',
properties=False,
subclass_suffix='',
root_element='',
superclass_module='',
auto_super=False,
old_getters_setters=False,
validator_bodies='',
user_methods='',
no_dates=False,
no_versions=False,
no_process_includes=False,
silence=False,
namespace_defs='',
external_encoding='',
member_specs='',
export_spec='',
one_file_per_xsd=False,
output_directory='',
module_suffix='',
preserve_cdata_tags=False,
cleanup_name_list='',
)
self.trans_obj_2_gui()
def run_command(self, cmd):
spobj = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=False)
outcontent = spobj.stdout.read()
errcontent = spobj.stderr.read()
error = False
if outcontent.strip():
self.display_content('Messages', outcontent)
error = True
if errcontent.strip():
self.display_content('Errors', errcontent)
error = True
if not error:
msg = 'Successfully generated.'
self.error_message(
msg,
branch_version('gtk.MESSAGE_INFO', 'gtk.MessageType.INFO'))
def display_content(self, title, content):
#content_dialog = ContentDialog()
self.content_dialog.show(content)
def on_open_session_menuitem_activate(self, menuitem, data=None):
self.trans_gui_2_obj()
## self.dump_params('saved_params:', self.saved_params)
## self.dump_params('params:', self.params)
if self.params != self.saved_params:
message = 'Session data has changed.\n\nSave?'
if sys.version_info.major == 2:
dialog = gtk.MessageDialog(
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_NONE,
message)
dialog.add_buttons(
gtk.STOCK_YES, gtk.RESPONSE_YES,
'_Discard', 1,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
)
else:
dialog = gtk.MessageDialog(
None,
(gtk.DialogFlags.MODAL |
gtk.DialogFlags.DESTROY_WITH_PARENT),
gtk.MessageType.ERROR,
gtk.ButtonsType.NONE,
message)
dialog.add_buttons(
gtk.STOCK_YES, gtk.ResponseType.YES,
'_Discard', 1,
gtk.STOCK_CANCEL, gtk.ResponseType.CANCEL,
)
response = dialog.run()
dialog.destroy()
if response == branch_version(
'gtk.RESPONSE_YES', 'gtk.ResponseType.YES'):
self.save_session_action()
elif response == 1:
pass
elif response == branch_version(
'gtk.RESPONSE_CANCEL', 'gtk.ResponseType.CANCEL'):
return
session_filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_OPEN',
'gtk.FileChooserAction.OPEN'),
(('Session *.session', '*.session'),)
)
if session_filename:
self.session_filename = session_filename
self.load_session(self.session_filename)
msg = 'Session file: %s' % (self.session_filename, )
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, msg)
def on_save_session_menuitem_activate(self, menuitem, data=None):
self.save_session_action()
def save_session_action(self):
if not self.session_filename:
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_SAVE',
'gtk.FileChooserAction.SAVE'),
(('Session *.session', '*.session'),),
confirm_overwrite=True,
initfilename=self.session_filename,
buttons=(
gtk.STOCK_CANCEL,
branch_version(
'gtk.RESPONSE_CANCEL',
'gtk.ResponseType.CANCEL'),
gtk.STOCK_SAVE, branch_version(
'gtk.RESPONSE_OK',
'gtk.ResponseType.OK'),
)
)
if filename:
self.session_filename = filename
if self.session_filename:
stem, ext = os.path.splitext(self.session_filename)
if not ext:
self.session_filename += '.session'
self.save_session(self.session_filename)
msg = 'Session file: %s' % (self.session_filename, )
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, msg)
def on_save_session_as_menuitem_activate(self, menuitem, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_SAVE',
'gtk.FileChooserAction.SAVE'),
(('Session *.session', '*.session'),),
confirm_overwrite=True,
initfilename=self.session_filename,
buttons=(
gtk.STOCK_CANCEL,
branch_version(
'gtk.RESPONSE_CANCEL',
'gtk.ResponseType.CANCEL'),
gtk.STOCK_SAVE, branch_version(
'gtk.RESPONSE_OK',
'gtk.ResponseType.OK'),
)
)
if filename:
self.session_filename = filename
stem, ext = os.path.splitext(self.session_filename)
if not ext:
self.session_filename += '.session'
self.save_session(self.session_filename)
msg = 'Session file: %s' % (self.session_filename, )
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, msg)
def save_session(self, filename):
self.trans_gui_2_obj()
sessionObj = self.params
outfile = open(filename, 'w')
outfile.write('<?xml version="1.0" ?>\n')
sessionObj.export(
outfile, 0, name_="session",
namespacedef_='')
outfile.close()
msg = 'Session saved to file:\n%s' % (filename, )
msgTy = branch_version('gtk.MESSAGE_INFO', 'gtk.MessageType.INFO')
self.error_message(msg, msgTy)
self.saved_params = self.params.copy()
def load_session(self, filename):
try:
doc = generateds_gui_session.parsexml_(filename)
rootNode = doc.getroot()
rootTag, rootClass = generateds_gui_session.get_root_tag(rootNode)
if rootClass is None:
#rootTag = 'session'
rootClass = generateds_gui_session.sessionType
sessionObj = rootClass.factory()
sessionObj.build(rootNode)
self.params = sessionObj
self.trans_obj_2_gui()
self.trans_gui_2_obj()
self.saved_params = self.params.copy()
except IOError as exp:
msg = str(exp)
self.error_message(msg)
except expat.ExpatError as exp:
msg = '%s file: %s' % (str(exp), filename, )
self.error_message(msg)
def on_about_menu_item_activate(self, menuitem, data=None):
if self.about_dialog:
self.about_dialog.present()
return
authors = [
'Dave Kuhlman <dkuhlman@rexx.com>',
]
about_dialog = gtk.AboutDialog()
about_dialog.set_transient_for(self.window)
about_dialog.set_destroy_with_parent(True)
about_dialog.set_name("generateDS.py Python bindings generator")
about_dialog.set_version(VERSION)
about_dialog.set_copyright("Copyright \xc2\xa9 2009 Dave Kuhlman")
about_dialog.set_website("http://www.rexx.com/~dkuhlman")
about_dialog.set_comments("GTK+ and Glade3 GUI front end")
about_dialog.set_authors(authors)
about_dialog.set_logo_icon_name(gtk.STOCK_EDIT)
# callbacks for destroying the dialog
def close(dialog, response, editor):
editor.about_dialog = None
dialog.destroy()
def delete_event(dialog, event, editor):
editor.about_dialog = None
return True
about_dialog.connect("response", close, self)
about_dialog.connect("delete-event", delete_event, self)
self.about_dialog = about_dialog
about_dialog.show()
def error_message(self, message, message_type=None):
# log to terminal window
#print message
# create an error message dialog and display modally to the user
if message_type is None:
message_type = branch_version(
'gtk.MESSAGE_ERROR',
'gtk.MessageType.ERROR')
dialog = gtk.MessageDialog(
None,
branch_version(
'gtk.DIALOG_MODAL',
'gtk.DialogFlags.MODAL') |
branch_version(
'gtk.DIALOG_DESTROY_WITH_PARENT',
'gtk.DialogFlags.DESTROY_WITH_PARENT'),
message_type, branch_version(
'gtk.BUTTONS_OK',
'gtk.ButtonsType.OK'),
message)
dialog.run()
dialog.destroy()
def reset_default_status(self):
msg = "Session file: (UNTITLED)"
self.statusbar.pop(self.statusbar_cid)
self.statusbar.push(self.statusbar_cid, msg)
def on_input_schema_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_OPEN',
'gtk.FileChooserAction.OPEN'),
(('Schemas *.xsd', '*.xsd'),))
if filename:
self.input_schema_entry.set_text(filename)
def on_output_superclass_chooser_button_clicked(self, widget, data=None):
filename = self.choose_filename(patterns=(('Python *.py', '*.py'), ))
if filename:
self.output_superclass_entry.set_text(filename)
#self.on_output_superclass_entry_changed(
# self.output_superclass_entry, data)
def on_output_subclass_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(patterns=(('Python *.py', '*.py'), ))
if filename:
self.output_subclass_entry.set_text(filename)
def on_behavior_filename_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_OPEN',
'gtk.FileChooserAction.OPEN'),
(('Python *.py', '*.py'),))
if filename:
self.behavior_filename_entry.set_text(filename)
def on_validator_bodies_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER',
'gtk.FileChooserAction.SELECT_FOLDER'),
)
if filename:
self.validator_bodies_entry.set_text(filename)
def on_user_methods_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_OPEN',
'gtk.FileChooserAction.OPEN'),
(('Python *.py', '*.py'),))
if filename:
self.user_methods_entry.set_text(filename)
def on_output_directory_chooser_button_clicked(self, button, data=None):
filename = self.choose_filename(
branch_version(
'gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER',
'gtk.FileChooserAction.SELECT_FOLDER'),
)
if filename:
self.output_directory_entry.set_text(filename)
def choose_filename(
self,
action=None,
patterns=(),
confirm_overwrite=False,
initfilename=None,
buttons=None):
if action is None:
action = branch_version(
'gtk.FILE_CHOOSER_ACTION_SAVE', 'gtk.FileChooserAction.SAVE')
filename = None
ty_CANCEL = branch_version(
'gtk.RESPONSE_CANCEL',
'gtk.ResponseType.CANCEL')
ty_OK = branch_version('gtk.RESPONSE_OK', 'gtk.ResponseType.OK')
if buttons is None:
buttons = (
gtk.STOCK_CANCEL, ty_CANCEL,
gtk.STOCK_OPEN, ty_OK,
)
dialog = gtk.FileChooserDialog(
title=None,
action=action,
buttons=buttons,
)
if self.current_folder is not None:
dialog.set_current_folder(self.current_folder)
if initfilename is not None:
dialog.set_filename(initfilename)
if patterns:
filter = gtk.FileFilter()
for name, pattern in patterns:
filter.set_name(name)
filter.add_pattern(pattern)
dialog.add_filter(filter)
filter = gtk.FileFilter()
filter.set_name("All files *.*")
filter.add_pattern("*")
dialog.add_filter(filter)
dialog.set_do_overwrite_confirmation(confirm_overwrite)
response = dialog.run()
if response == branch_version(
'gtk.RESPONSE_OK', 'gtk.ResponseType.OK'):
filename = dialog.get_filename()
self.current_folder = dialog.get_current_folder()
elif response == branch_version(
'gtk.RESPONSE_CANCEL', 'gtk.ResponseType.CANCEL'):
pass
dialog.destroy()
return filename
def on_namespace_prefix_entry_changed(self, widget, data=None):
#entry = self.ui_obj_dict['namespace_prefix_entry']
checkbutton = self.ui_obj_dict['empty_namespace_prefix_checkbutton']
checkbutton.set_active(False)
return True
def on_empty_namespace_prefix_checkbutton_toggled(self, widget, data=None):
entry = self.ui_obj_dict['namespace_prefix_entry']
#checkbutton = self.ui_obj_dict['empty_namespace_prefix_checkbutton']
if widget.get_active():
entry.set_text('')
return True
def on_output_superclass_entry_changed(self, widget, data=None):
entry = self.ui_obj_dict['superclass_module_entry']
checkbutton = self.auto_super_checkbutton
if checkbutton.get_active():
path = widget.get_text()
if path:
stem = os.path.splitext(os.path.split(path)[1])[0]
if stem:
entry.set_text(stem)
return True
def on_auto_super_checkbutton_toggled(self, widget, data=None):
entry = self.ui_obj_dict['superclass_module_entry']
superclass_entry = self.ui_obj_dict['output_superclass_entry']
#checkbutton = self.auto_super_checkbutton
#checkbutton = widget
if widget.get_active():
path = superclass_entry.get_text()
if path:
stem = os.path.splitext(os.path.split(path)[1])[0]
if stem:
entry.set_text(stem)
return True
def on_ok_button_activate(self, widget, data=None):
#print( '(GeneratedsGui) widget:', widget)
response = self.content_dialog.on_ok_button_activate(
self.content_dialog, data)
return response
name_pat1 = re.compile(r'^(.*)_clear_button')
# This method keys off the correspondence between the
# name of the button and the name of the related entry,
# for example, xxx_yyy_entry : xxx_yyy_clear_button.
def on_clear_button_clicked(self, widget, data=None):
# http://python.6.x6.nabble.com/Confused-about-a-widget-s-name-td5015372.html
name = (
widget.get_name()
if sys.version_info.major == 2
else gtk.Buildable.get_name(widget))
mo = GeneratedsGui.name_pat1.search(name)
if mo is not None:
stem = mo.group(1)
name1 = '%s_entry' % (stem, )
ui_obj = self.ui_obj_dict[name1]
ui_obj.set_text('')
# Run main application window
def main(self):
self.window.show()
gtk.main()
class ContentDialog(gtk.Dialog):
def __init__(self):
global Builder
self.content_dialog = Builder.get_object('content_dialog')
self.content_textview = Builder.get_object('content_textview')
self.content_textview.get_buffer().set_text('')
def show(self, content):
#Builder.connect_signals(self)
if isinstance(content, bytes):
content = content.decode('utf-8')
self.content_textview.get_buffer().set_text(content)
self.content_dialog.run()
self.content_dialog.hide()
def on_ok_button_activate(self, widget, data=None):
#print( '(content_dialog) widget:', widget)
return False
#
# Functions for internal use
#
def branch_version(for_2, for_3):
"""
The Branch works depends on the version of Python
"""
if sys.version_info.major == 2:
return eval(for_2)
elif sys.version_info.major == 3:
return eval(for_3)
else:
return eval(for_3)
def capture_options(options):
config_parser = ConfigParser()
config_parser.read([
os.path.expanduser('~/.generateds_gui.ini'),
'./generateds_gui.ini',
])
section = 'general'
names = ('exec-path', 'exec_path')
capture_1_option(options, config_parser, section, names)
## names = ('impl-schema', 'impl_schema')
## capture_1_option(options, config_parser, section, names)
names = ('impl-gui', 'impl_gui')
capture_1_option(options, config_parser, section, names)
names = ('session', 'session')
capture_1_option(options, config_parser, section, names)
# Set some defaults.
if options.exec_path is None:
options.exec_path = 'generateDS.py'
def capture_1_option(options, config_parser, section, names):
if (
getattr(options, names[1]) is None and
config_parser.has_option(section, names[0])):
setattr(options, names[1], config_parser.get(section, names[0]))
def capture_ui_names():
items = generateds_gui_session.sessionType.member_data_items_
for item in items:
ui_item = UIItemSpec(item.get_name())
if item.get_name() == 'member_specs':
ui_item.set_ui_type('combobox')
ui_item.set_access_action('active')
elif item.get_data_type() == 'xs:string':
ui_item.set_ui_type('entry')
ui_item.set_access_action('text')
elif item.get_data_type() == 'xs:boolean':
ui_item.set_ui_type('checkbutton')
ui_item.set_access_action('active')
ParamNameList.append(ui_item)
## print 'ParamNameList:'
## for item in ParamNameList:
## print ' %s %s' % (item.get_name(), item.get_ui_type(), )
USAGE_TEXT = """
python %prog [options] --session=<some_session_file.session>
example:
python %prog --session=generator01.session"""
def usage(parser):
parser.print_help()
sys.exit(1)
def main():
parser = OptionParser(USAGE_TEXT)
parser.add_option(
"--exec-path",
type="string", action="store",
dest="exec_path",
#default="generateDS.py",
help=(
'path to executable generated in command line.'
' Example: "python /path/to/generateDS.py".'
' Default: "./generateDS.py".'
' Use Tools/Generate CL (Ctrl-T) to see it.')
)
parser.add_option(
"--impl-gui",
type="string", action="store",
dest="impl_gui",
help="name of glade file that defines the GUI if not embedded."
)
parser.add_option(
"-s", "--session",
type="string", action="store",
dest="session",
help="name of a session file to be loaded."
)
(options, args) = parser.parse_args()
capture_options(options)
capture_ui_names()
if len(args) > 0:
usage(parser)
# Set up for internationalization.
app_name = 'generateds_gui'
dir_name = 'locale'
locale.setlocale(locale.LC_ALL, '')
gettext.bindtextdomain(app_name, dir_name)
gettext.textdomain(app_name)
# Start the app.
editor = GeneratedsGui(options)
editor.main()
# Do not change the next 3 lines.
## UI_SPECIFICATION ##
Ui_spec = """
<?xml version="1.0" encoding="UTF-8"?>
<!-- Generated with glade 3.18.3 -->
<interface>
<requires lib="gtk+" version="3.0"/>
<object class="GtkAccelGroup" id="accelgroup1"/>
<object class="GtkDialog" id="content_dialog">
<property name="can_focus">False</property>
<property name="border_width">5</property>
<property name="title" translatable="yes">Messages and Content</property>
<property name="default_width">800</property>
<property name="default_height">600</property>
<property name="type_hint">normal</property>
<child internal-child="vbox">
<object class="GtkBox" id="dialog-vbox3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="spacing">2</property>
<child internal-child="action_area">
<object class="GtkButtonBox" id="dialog-action_area3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="layout_style">end</property>
<child>
<object class="GtkButton" id="content_dialog_ok_button">
<property name="label" translatable="yes">OK</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<signal name="activate" handler="on_ok_button_activate" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">False</property>
<property name="position">0</property>
</packing>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">False</property>
<property name="pack_type">end</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkScrolledWindow" id="scrolledwindow1">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="min_content_width">250</property>
<property name="min_content_height">500</property>
<child>
<object class="GtkTextView" id="content_textview">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="editable">False</property>
</object>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
</object>
</child>
<action-widgets>
<action-widget response="0">content_dialog_ok_button</action-widget>
</action-widgets>
</object>
<object class="GtkImage" id="image1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="stock">gtk-save</property>
</object>
<object class="GtkImage" id="image2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="stock">gtk-save-as</property>
</object>
<object class="GtkImage" id="image3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="stock">gtk-open</property>
</object>
<object class="GtkImage" id="image4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="stock">gtk-clear</property>
</object>
<object class="GtkWindow" id="window1">
<property name="can_focus">False</property>
<accel-groups>
<group name="accelgroup1"/>
</accel-groups>
<signal name="delete-event" handler="on_window_delete_event" swapped="no"/>
<child>
<object class="GtkVBox" id="vbox1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkMenuBar" id="menubar1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkMenuItem" id="menuitem1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">_File</property>
<property name="use_underline">True</property>
<child type="submenu">
<object class="GtkMenu" id="menu1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkImageMenuItem" id="clear_menuitem">
<property name="label">Clear</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="image">image4</property>
<property name="use_stock">False</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_clear_menuitem_activate" swapped="no"/>
<accelerator key="n" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
<child>
<object class="GtkImageMenuItem" id="open_session_menuitem">
<property name="label">_Load session</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Load a previous saved session.</property>
<property name="use_underline">True</property>
<property name="image">image3</property>
<property name="use_stock">False</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_open_session_menuitem_activate" swapped="no"/>
<accelerator key="o" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
<child>
<object class="GtkImageMenuItem" id="save_session_menuitem">
<property name="label">_Save session</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Save the current session.</property>
<property name="use_underline">True</property>
<property name="image">image1</property>
<property name="use_stock">False</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_save_session_menuitem_activate" swapped="no"/>
<accelerator key="s" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
<child>
<object class="GtkImageMenuItem" id="save_session_as_menuitem">
<property name="label">Save session as ...</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Save the current session in
file chosen by the user.</property>
<property name="image">image2</property>
<property name="use_stock">False</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_save_session_as_menuitem_activate" swapped="no"/>
</object>
</child>
<child>
<object class="GtkSeparatorMenuItem" id="menuitem5">
<property name="visible">True</property>
<property name="can_focus">False</property>
</object>
</child>
<child>
<object class="GtkImageMenuItem" id="imagemenuitem5">
<property name="label">gtk-quit</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Exit from the application.</property>
<property name="use_underline">True</property>
<property name="use_stock">True</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_quit_menu_item_activate" swapped="no"/>
</object>
</child>
</object>
</child>
</object>
</child>
<child>
<object class="GtkMenuItem" id="menuitem2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">_Tools</property>
<property name="use_underline">True</property>
<child type="submenu">
<object class="GtkMenu" id="menu2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkMenuItem" id="capture_cl_menuitem">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Capture the command line that would be used
to generate the bindings modules.</property>
<property name="label" translatable="yes">_Capture CL</property>
<property name="use_underline">True</property>
<signal name="activate" handler="on_capture_cl_menuitem_activate" swapped="no"/>
<accelerator key="t" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
<child>
<object class="GtkMenuItem" id="generate_menuitem">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="tooltip_text" translatable="yes">Generate the bindings modules.</property>
<property name="label" translatable="yes">_Generate</property>
<property name="use_underline">True</property>
<signal name="activate" handler="on_generate_menuitem_activate" swapped="no"/>
<accelerator key="g" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
</object>
</child>
</object>
</child>
<child>
<object class="GtkMenuItem" id="menuitem4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">_Help</property>
<property name="use_underline">True</property>
<child type="submenu">
<object class="GtkMenu" id="menu3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkImageMenuItem" id="imagemenuitem10">
<property name="label">gtk-about</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="use_underline">True</property>
<property name="use_stock">True</property>
<property name="accel_group">accelgroup1</property>
<signal name="activate" handler="on_about_menu_item_activate" swapped="no"/>
</object>
</child>
</object>
</child>
</object>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkScrolledWindow" id="scrolledwindow2">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="shadow_type">in</property>
<property name="min_content_width">1000</property>
<property name="min_content_height">600</property>
<child>
<object class="GtkViewport" id="viewport1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkTable" id="table1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="n_rows">29</property>
<property name="n_columns">4</property>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<object class="GtkLabel" id="label1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Input schema file:</property>
<property name="xalign">0</property>
</object>
</child>
<child>
<object class="GtkLabel" id="label2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Output superclass file:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">1</property>
<property name="bottom_attach">2</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Output subclass file:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">2</property>
<property name="bottom_attach">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Overwrite without asking:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">3</property>
<property name="bottom_attach">4</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="force_checkbutton">
<property name="label" translatable="yes">Force</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Always overwrite output files.
Do not ask for confirmation.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">3</property>
<property name="bottom_attach">4</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="input_schema_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">The path and name of the
input XML schema defining the
bindings to be generated.</property>
<property name="invisible_char">●</property>
<property name="width_chars">80</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="output_superclass_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">The path and name of the output file
to be generated and to contain the
superclasses.</property>
<property name="invisible_char">●</property>
<signal name="changed" handler="on_output_superclass_entry_changed" swapped="no"/>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">1</property>
<property name="bottom_attach">2</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="output_subclass_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">The path and name of the output file
to be generated and to contain the
subclasses.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">2</property>
<property name="bottom_attach">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label5">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Prefix (for class names):</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">4</property>
<property name="bottom_attach">5</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="prefix_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Prefix for class names.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">4</property>
<property name="bottom_attach">5</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label6">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Namespace prefix:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">5</property>
<property name="bottom_attach">6</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="namespace_prefix_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="events">GDK_KEY_RELEASE_MASK | GDK_STRUCTURE_MASK</property>
<property name="tooltip_text" translatable="yes">Override default namespace
prefix in schema file.
Example: -a "xsd:"
Default: "xs:".</property>
<property name="invisible_char">●</property>
<signal name="changed" handler="on_namespace_prefix_entry_changed" swapped="no"/>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">5</property>
<property name="bottom_attach">6</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label7">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Behavior file name:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">6</property>
<property name="bottom_attach">7</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="behavior_filename_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Input file name for behaviors
added to subclasses.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">6</property>
<property name="bottom_attach">7</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label8">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Generate Python properties:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">7</property>
<property name="bottom_attach">8</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="properties_checkbutton">
<property name="label" translatable="yes">Properties</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Generate Python properties for member variables
so that the value can be retrieved and modified
without calling getter and setter functions.
</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">7</property>
<property name="bottom_attach">8</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label10">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Subclass suffix:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">9</property>
<property name="bottom_attach">10</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="subclass_suffix_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Append this text to the generated subclass names.
Default="Sub".</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">9</property>
<property name="bottom_attach">10</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label11">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Root element:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">10</property>
<property name="bottom_attach">11</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="root_element_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Assume that this value is the name
of the root element of instance docs.
Default is first element defined in schema.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">10</property>
<property name="bottom_attach">11</property>
</packing>
</child>
<child>
<object class="GtkButton" id="input_schema_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the input schema file.</property>
<signal name="clicked" handler="on_input_schema_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_superclass_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the output superclass bindings file.</property>
<signal name="clicked" handler="on_output_superclass_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">1</property>
<property name="bottom_attach">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_subclass_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the output subclass bindings file.</property>
<signal name="clicked" handler="on_output_subclass_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">2</property>
<property name="bottom_attach">3</property>
</packing>
</child>
<child>
<object class="GtkButton" id="behavior_filename_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the nput file name for
behaviors added to subclasses.</property>
<signal name="clicked" handler="on_behavior_filename_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">6</property>
<property name="bottom_attach">7</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label12">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Superclass module:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">11</property>
<property name="bottom_attach">12</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="superclass_module_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Superclass module name in subclass module.
Default="???".</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">11</property>
<property name="bottom_attach">12</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label13">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Use old getters and setters:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">12</property>
<property name="bottom_attach">13</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="old_getters_setters_checkbutton">
<property name="label" translatable="yes">Old getters and setters</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Name getters and setters getVar() and setVar(),
instead of get_var() and set_var().</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">12</property>
<property name="bottom_attach">13</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label14">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Validator bodies path:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">13</property>
<property name="bottom_attach">14</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label15">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">User methods module:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">14</property>
<property name="bottom_attach">15</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label16">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">No dates:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">15</property>
<property name="bottom_attach">16</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label17">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">No versions:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">16</property>
<property name="bottom_attach">17</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="no_dates_checkbutton">
<property name="label" translatable="yes">No dates in generated output</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Do not include the current date in the generated
files. This is useful if you want to minimize
the amount of (no-operation) changes to the
generated python code.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">15</property>
<property name="bottom_attach">16</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="validator_bodies_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Path to a directory containing files that provide
bodies (implementations) of validator methods.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">13</property>
<property name="bottom_attach">14</property>
</packing>
</child>
<child>
<object class="GtkButton" id="validator_bodies_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the path to a directory containing files that provide
bodies (implementations) of validator methods.</property>
<signal name="clicked" handler="on_validator_bodies_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">13</property>
<property name="bottom_attach">14</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="no_versions_checkbutton">
<property name="label" translatable="yes">No version info in generated output</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Do not include the current version in the generated
files. This is useful if you want to minimize
the amount of (no-operation) changes to the
generated python code.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">16</property>
<property name="bottom_attach">17</property>
</packing>
</child>
<child>
<object class="GtkButton" id="user_methods_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the optional module containing user methods. See
section "User Methods" in the documentation.</property>
<signal name="clicked" handler="on_user_methods_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">14</property>
<property name="bottom_attach">15</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="user_methods_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Optional module containing user methods. See
section "User Methods" in the documentation.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">14</property>
<property name="bottom_attach">15</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label18">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">No process includes:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">17</property>
<property name="bottom_attach">18</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label19">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Silence:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">18</property>
<property name="bottom_attach">19</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="no_process_includes_checkbutton">
<property name="label" translatable="yes">Do not process includes in schema</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Do not process included XML Schema files. By
default, generateDS.py will insert content
from files referenced by <include ... />
elements into the XML Schema to be processed.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">17</property>
<property name="bottom_attach">18</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="silence_checkbutton">
<property name="label" translatable="yes">Generate code that does not echo the parsed XML</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Normally, the code generated with generateDS
echoes the information being parsed. Use
this option to turn off that behavior.
</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">18</property>
<property name="bottom_attach">19</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label20">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Namespace definitions:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">19</property>
<property name="bottom_attach">20</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label21">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">External encoding:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">20</property>
<property name="bottom_attach">21</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label22">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Member specs:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">22</property>
<property name="bottom_attach">23</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="namespace_defs_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Namespace definition to be passed in as the
value for the namespacedef_ parameter of
the export() method by the generated
parse() and parseString() functions.
Default=''. Example:
xmlns:abc="http://www.abc.com"</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">19</property>
<property name="bottom_attach">20</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="external_encoding_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Encode output written by the generated export
methods using this encoding. Default, if omitted,
is the value returned by sys.getdefaultencoding().
Example: utf-8.</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">20</property>
<property name="bottom_attach">21</property>
</packing>
</child>
<child>
<object class="GtkHBox" id="member_specs_combobox_container">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<placeholder/>
</child>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">22</property>
<property name="bottom_attach">23</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="empty_namespace_prefix_checkbutton">
<property name="label" translatable="yes">Empty</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Assume an empty namespace
prefix in the XML schema, not
the default ("xs:").</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
<signal name="toggled" handler="on_empty_namespace_prefix_checkbutton_toggled" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">5</property>
<property name="bottom_attach">6</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="auto_super_checkbutton">
<property name="label" translatable="yes">Auto</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Use the superclass file name
stem as the super-class module
name.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
<signal name="toggled" handler="on_auto_super_checkbutton_toggled" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">11</property>
<property name="bottom_attach">12</property>
</packing>
</child>
<child>
<object class="GtkButton" id="input_schema_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the input schema file entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_superclass_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the output superclass file entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">1</property>
<property name="bottom_attach">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_subclass_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the output subclass file entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">2</property>
<property name="bottom_attach">3</property>
</packing>
</child>
<child>
<object class="GtkButton" id="prefix_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the prefix entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">4</property>
<property name="bottom_attach">5</property>
</packing>
</child>
<child>
<object class="GtkButton" id="namespace_prefix_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the XML namespace prefix entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">5</property>
<property name="bottom_attach">6</property>
</packing>
</child>
<child>
<object class="GtkButton" id="behavior_filename_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the behavior file name entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">6</property>
<property name="bottom_attach">7</property>
</packing>
</child>
<child>
<object class="GtkButton" id="subclass_suffix_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the subclass suffix.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">9</property>
<property name="bottom_attach">10</property>
</packing>
</child>
<child>
<object class="GtkButton" id="root_element_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the root element entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">10</property>
<property name="bottom_attach">11</property>
</packing>
</child>
<child>
<object class="GtkButton" id="superclass_module_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the superclass module entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">11</property>
<property name="bottom_attach">12</property>
</packing>
</child>
<child>
<object class="GtkButton" id="validator_bodies_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the validator bodies path entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">13</property>
<property name="bottom_attach">14</property>
</packing>
</child>
<child>
<object class="GtkButton" id="user_methods_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the user methods module entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">14</property>
<property name="bottom_attach">15</property>
</packing>
</child>
<child>
<object class="GtkButton" id="namespace_defs_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the namespace definitions entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">19</property>
<property name="bottom_attach">20</property>
</packing>
</child>
<child>
<object class="GtkButton" id="external_encoding_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the external encoding entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">20</property>
<property name="bottom_attach">21</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label23">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Get encoded:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">21</property>
<property name="bottom_attach">22</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="get_encoded_checkbutton">
<property name="label" translatable="yes">Getters return encoded values by default</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Getters return encoded value by default if true.
Can be changed at run-time by either
(1) changing global variable GetEncodedValue or
(2) using optional parameter to getter.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">21</property>
<property name="bottom_attach">22</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label24">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Exports:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">23</property>
<property name="bottom_attach">24</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="export_spec_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Specifies export functions to be generated. Value is a whitespace separated list of any of the following: "write" (write XML to file), "literal" (write out python code), "etree" (build element tree (can serialize to XML)). Example: "write etree". Default: "write".</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">23</property>
<property name="bottom_attach">24</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label25">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">One file per XSD:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">24</property>
<property name="bottom_attach">25</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="one_file_per_xsd_checkbutton">
<property name="label" translatable="yes">Create a python module for each XSD processed.</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Create a python module for each XSD processed.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">24</property>
<property name="bottom_attach">25</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label26">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Output directory:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">25</property>
<property name="bottom_attach">26</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="output_directory_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Used in conjunction with --one-file-per-xsd. The directory where the modules will be created.</property>
<property name="invisible_char">●</property>
<property name="width_chars">80</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">25</property>
<property name="bottom_attach">26</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_directory_chooser_button">
<property name="label" translatable="yes">Choose</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Choose the output directory for one-file-per-xsd.</property>
<signal name="clicked" handler="on_output_directory_chooser_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">2</property>
<property name="right_attach">3</property>
<property name="top_attach">25</property>
<property name="bottom_attach">26</property>
</packing>
</child>
<child>
<object class="GtkButton" id="output_directory_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the output directory entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">25</property>
<property name="bottom_attach">26</property>
</packing>
</child>
<child>
<object class="GtkButton" id="export_spec_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the exports entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">23</property>
<property name="bottom_attach">24</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label27">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Module suffix:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">26</property>
<property name="bottom_attach">27</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label28">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Preserve CData tags:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">27</property>
<property name="bottom_attach">28</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label29">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Cleanup name list:</property>
<property name="xalign">0</property>
</object>
<packing>
<property name="top_attach">28</property>
<property name="bottom_attach">29</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="module_suffix_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">To be used in conjunction with --one-file-per-xsd. Append XXX to the end of each file created.</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">26</property>
<property name="bottom_attach">27</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="cleanup_name_list_entry">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="tooltip_text" translatable="yes">Specifies list of 2-tuples used for cleaning names. First element is a regular expression search pattern and second is a replacement. Example: "[('[-:.]', '_'), ('^__', 'Special')]". Default: "[('[-:.]', '_')]".</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">28</property>
<property name="bottom_attach">29</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="preserve_cdata_tags_checkbutton">
<property name="label" translatable="yes">Preserve CData tags</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="tooltip_text" translatable="yes">Preserve CDATA tags. Default: False.</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="left_attach">1</property>
<property name="right_attach">2</property>
<property name="top_attach">27</property>
<property name="bottom_attach">28</property>
</packing>
</child>
<child>
<object class="GtkButton" id="module_suffix_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the module suffix entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">26</property>
<property name="bottom_attach">27</property>
</packing>
</child>
<child>
<object class="GtkButton" id="cleanup_name_list_clear_button">
<property name="label">gtk-clear</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Clear the cleanup name list entry.</property>
<property name="use_stock">True</property>
<signal name="clicked" handler="on_clear_button_clicked" swapped="no"/>
</object>
<packing>
<property name="left_attach">3</property>
<property name="right_attach">4</property>
<property name="top_attach">28</property>
<property name="bottom_attach">29</property>
</packing>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
<child>
<placeholder/>
</child>
</object>
</child>
</object>
</child>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkHBox" id="hbox1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="homogeneous">True</property>
<child>
<object class="GtkButton" id="generate_button">
<property name="label" translatable="yes">Generate</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Generate the bindings modules.</property>
<signal name="clicked" handler="on_generate_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkButton" id="quit_button">
<property name="label" translatable="yes">Quit</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="tooltip_text" translatable="yes">Exit from the application.</property>
<signal name="clicked" handler="on_quit_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">False</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkStatusbar" id="statusbar1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="spacing">2</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<placeholder/>
</child>
</object>
</child>
</object>
<object class="GtkImage" id="image5">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="stock">gtk-missing-image</property>
</object>
</interface>
"""
## UI_SPECIFICATION ##
# Do not change the above 3 lines.
if __name__ == "__main__":
main()
| 47.865924 | 361 | 0.507679 |
acf8ef3c77aa8c95ab374ed9efc741d355c0106c | 1,209 | py | Python | third_party/skia_m63/tools/skp/page_sets/skia_chalkboard_desktop.py | kniefliu/WindowsSamples | c841268ef4a0f1c6f89b8e95bf68058ea2548394 | [
"MIT"
] | 4 | 2019-10-18T05:53:30.000Z | 2021-08-21T07:36:37.000Z | third_party/skia_m63/tools/skp/page_sets/skia_chalkboard_desktop.py | kniefliu/WindowsSamples | c841268ef4a0f1c6f89b8e95bf68058ea2548394 | [
"MIT"
] | 2 | 2019-03-14T10:26:45.000Z | 2021-08-06T01:24:06.000Z | third_party/skia_m63/tools/skp/page_sets/skia_chalkboard_desktop.py | kniefliu/WindowsSamples | c841268ef4a0f1c6f89b8e95bf68058ea2548394 | [
"MIT"
] | 4 | 2018-10-14T00:17:11.000Z | 2020-07-01T04:01:25.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
credentials_path='data/credentials.json',
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_chalkboard_desktop.json'
class SkiaChalkboardDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaChalkboardDesktopPageSet, self).__init__(
archive_data_file='data/skia_chalkboard_desktop.json')
urls_list = [
# Why: from fmalita
('http://ie.microsoft.com/testdrive/Performance/Chalkboard/Images/'
'Chalkboard.svg'),
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| 30.225 | 74 | 0.741108 |
acf8ef9a3f6b069daf43f61cb8d6f534d982f732 | 1,762 | py | Python | mergeToFTformat.py | gwallison/OH_injection_wells_compile | 70ffd6bfc99932d1364574335cc289ca23390655 | [
"CC0-1.0"
] | null | null | null | mergeToFTformat.py | gwallison/OH_injection_wells_compile | 70ffd6bfc99932d1364574335cc289ca23390655 | [
"CC0-1.0"
] | null | null | null | mergeToFTformat.py | gwallison/OH_injection_wells_compile | 70ffd6bfc99932d1364574335cc289ca23390655 | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 4 11:20:25 2018
@author: gary.allison
create the injection well dataset
"""
import pandas as pd
import numpy as np
import pandas.api.types as ptypes
import mergeWithMeta
datadir = './sources/'
outdir = './out/'
indir = datadir+'OH_injection/'
pre_proc_out = outdir+'injection_tall_pre.csv'
wide_out = outdir+'injection_wide_format.csv'
tempf = outdir+'temp.csv'
df_pre = pd.read_csv(pre_proc_out)
#### ------ get list of YrQ ---------
yqs = df_pre.YrQ.unique()
yqs.sort()
#### ------ get the metafile and make sure the API lists match
df_wide = mergeWithMeta.makeWholeSet()
assert len(df_wide) == len(df_pre.API10.unique())
df_wide = df_wide.sort_values(by='API10')
assert df_wide.API10.values.sort() == df_pre.API10.unique().sort()
print(f'Len df_wide: {len(df_wide.API10)}, Unique API in df_pre:{len(df_pre.API10.unique())}')
#### -------------- step through each YrQ to get subset to merge ------------
tmp = df_wide.copy()
for yq in yqs:
d = df_pre[df_pre.YrQ==yq].copy()
d['chkAPI'] = d.API10.copy()
# use this drop line for most of the columns of interest
d = d.drop(['YrQ','Year','Quarter','APIstr','Unnamed: 0'],axis=1)
# use this drop line to get just companies...
#d = d.drop(['YrQ','Year','Quarter','APIstr','Unnamed: 0','AltName','Vol_InDist','Vol_OutDist'],axis=1)
newcol = []
for c in d.columns:
if c not in ['API10','chkAPI']:
newcol.append(c+'_'+yq)
else:
newcol.append(c)
d.columns = newcol
tmp = pd.merge(tmp,d,how='left',left_on='API10',right_on='API10',validate='1:1')
#assert tmp.API10 == temp.chkAPI
tmp = tmp.drop(['chkAPI',],axis=1)
#dlst.append(tmp)
tmp.to_csv(wide_out,index=False)
| 28.885246 | 107 | 0.643587 |
acf8efa32fe13a7682282ea9880ff488867eb4c6 | 911 | py | Python | biased_coin_random.py | raysinensis/homeworks | 21b165ee986eff8acb83194bc0a6902a38d98b5b | [
"MIT"
] | null | null | null | biased_coin_random.py | raysinensis/homeworks | 21b165ee986eff8acb83194bc0a6902a38d98b5b | [
"MIT"
] | null | null | null | biased_coin_random.py | raysinensis/homeworks | 21b165ee986eff8acb83194bc0a6902a38d98b5b | [
"MIT"
] | null | null | null | import random
import timeit
import csv
##start=timeit.default_timer()
groupsum=0
groupoversum=0
groupover240=0
filename="f:/random1.csv"
for z in range(1,501):
groupsum=0
groupoversum=0
groupover240=0
for i in range(1,100001):
seq=[0]*500
for j in range(0,500):
if random.random()<=0.6:
seq[j]=1
groupn=1
for x in range(0,len(seq)-1):
if seq[x]+seq[x+1]==1:
groupn+=1
if groupn>250:
groupoversum+=1
if groupn>240:
groupover240+=1
groupsum+=groupn
with open(filename, 'a') as csvfile:
output = csv.writer(csvfile)
output.writerow([groupsum,groupoversum,groupover240])
##print(groupsum)
##print(groupoversum)
##print(groupover240)
##stop=timeit.default_timer()
##print(stop - start)
| 26.028571 | 62 | 0.556531 |
acf8f149a10da1b9a2711c72b4da5d4ebec149c7 | 15,993 | py | Python | holoviews/plotting/mpl/util.py | zjzh/holoviews | cc6b27f01710402fdfee2aeef1507425ca78c91f | [
"BSD-3-Clause"
] | 864 | 2019-11-13T08:18:27.000Z | 2022-03-31T13:36:13.000Z | holoviews/plotting/mpl/util.py | zjzh/holoviews | cc6b27f01710402fdfee2aeef1507425ca78c91f | [
"BSD-3-Clause"
] | 1,117 | 2019-11-12T16:15:59.000Z | 2022-03-30T22:57:59.000Z | holoviews/plotting/mpl/util.py | zjzh/holoviews | cc6b27f01710402fdfee2aeef1507425ca78c91f | [
"BSD-3-Clause"
] | 180 | 2019-11-19T16:44:44.000Z | 2022-03-28T22:49:18.000Z | import inspect
import re
import warnings
import numpy as np
import matplotlib
from matplotlib import units as munits
from matplotlib import ticker
from matplotlib.colors import Normalize, cnames
from matplotlib.lines import Line2D
from matplotlib.markers import MarkerStyle
from matplotlib.patches import Path, PathPatch
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
from matplotlib.rcsetup import (
validate_fontsize, validate_fonttype, validate_hatch)
try: # starting Matplotlib 3.4.0
from matplotlib._enums import CapStyle as validate_capstyle
from matplotlib._enums import JoinStyle as validate_joinstyle
except: # before Matplotlib 3.4.0
from matplotlib.rcsetup import (
validate_capstyle, validate_joinstyle)
try:
from nc_time_axis import NetCDFTimeConverter, CalendarDateTime
nc_axis_available = True
except:
from matplotlib.dates import DateConverter
NetCDFTimeConverter = DateConverter
nc_axis_available = False
from ...core.util import (
LooseVersion, arraylike_types, cftime_types, is_number
)
from ...element import Raster, RGB, Polygons
from ..util import COLOR_ALIASES, RGB_HEX_REGEX
mpl_version = LooseVersion(matplotlib.__version__)
def is_color(color):
"""
Checks if supplied object is a valid color spec.
"""
if not isinstance(color, str):
return False
elif RGB_HEX_REGEX.match(color):
return True
elif color in COLOR_ALIASES:
return True
elif color in cnames:
return True
return False
validators = {
'alpha': lambda x: is_number(x) and (0 <= x <= 1),
'capstyle': validate_capstyle,
'color': is_color,
'fontsize': validate_fontsize,
'fonttype': validate_fonttype,
'hatch': validate_hatch,
'joinstyle': validate_joinstyle,
'marker': lambda x: (x in Line2D.markers or isinstance(x, MarkerStyle)
or isinstance(x, Path) or
(isinstance(x, str) and x.startswith('$')
and x.endswith('$'))),
's': lambda x: is_number(x) and (x >= 0)
}
def get_old_rcparams():
deprecated_rcparams = [
'text.latex.unicode',
'examples.directory',
'savefig.frameon', # deprecated in MPL 3.1, to be removed in 3.3
'verbose.level', # deprecated in MPL 3.1, to be removed in 3.3
'verbose.fileo', # deprecated in MPL 3.1, to be removed in 3.3
'datapath', # deprecated in MPL 3.2.1, to be removed in 3.3
'text.latex.preview', # deprecated in MPL 3.3.1
'animation.avconv_args', # deprecated in MPL 3.3.1
'animation.avconv_path', # deprecated in MPL 3.3.1
'animation.html_args', # deprecated in MPL 3.3.1
'keymap.all_axes', # deprecated in MPL 3.3.1
'savefig.jpeg_quality' # deprecated in MPL 3.3.1
]
old_rcparams = {
k: v for k, v in matplotlib.rcParams.items()
if mpl_version < '3.0' or k not in deprecated_rcparams
}
return old_rcparams
def get_validator(style):
for k, v in validators.items():
if style.endswith(k) and (len(style) != 1 or style == k):
return v
def validate(style, value, vectorized=True):
"""
Validates a style and associated value.
Arguments
---------
style: str
The style to validate (e.g. 'color', 'size' or 'marker')
value:
The style value to validate
vectorized: bool
Whether validator should allow vectorized setting
Returns
-------
valid: boolean or None
If validation is supported returns boolean, otherwise None
"""
validator = get_validator(style)
if validator is None:
return None
if isinstance(value, arraylike_types+(list,)) and vectorized:
return all(validator(v) for v in value)
try:
valid = validator(value)
return False if valid == False else True
except:
return False
def filter_styles(style, group, other_groups, blacklist=[]):
"""
Filters styles which are specific to a particular artist, e.g.
for a GraphPlot this will filter options specific to the nodes and
edges.
Arguments
---------
style: dict
Dictionary of styles and values
group: str
Group within the styles to filter for
other_groups: list
Other groups to filter out
blacklist: list (optional)
List of options to filter out
Returns
-------
filtered: dict
Filtered dictionary of styles
"""
group = group+'_'
filtered = {}
for k, v in style.items():
if (any(k.startswith(p) for p in other_groups)
or k.startswith(group) or k in blacklist):
continue
filtered[k] = v
for k, v in style.items():
if not k.startswith(group) or k in blacklist:
continue
filtered[k[len(group):]] = v
return filtered
def wrap_formatter(formatter):
"""
Wraps formatting function or string in
appropriate matplotlib formatter type.
"""
if isinstance(formatter, ticker.Formatter):
return formatter
elif callable(formatter):
args = [arg for arg in inspect.getfullargspec(formatter).args
if arg != 'self']
wrapped = formatter
if len(args) == 1:
def wrapped(val, pos=None):
return formatter(val)
return ticker.FuncFormatter(wrapped)
elif isinstance(formatter, str):
if re.findall(r"\{(\w+)\}", formatter):
return ticker.StrMethodFormatter(formatter)
else:
return ticker.FormatStrFormatter(formatter)
def unpack_adjoints(ratios):
new_ratios = {}
offset = 0
for k, (num, ratios) in sorted(ratios.items()):
unpacked = [[] for _ in range(num)]
for r in ratios:
nr = len(r)
for i in range(num):
unpacked[i].append(r[i] if i < nr else np.nan)
for i, r in enumerate(unpacked):
new_ratios[k+i+offset] = r
offset += num-1
return new_ratios
def normalize_ratios(ratios):
normalized = {}
for i, v in enumerate(zip(*ratios.values())):
arr = np.array(v)
normalized[i] = arr/float(np.nanmax(arr))
return normalized
def compute_ratios(ratios, normalized=True):
unpacked = unpack_adjoints(ratios)
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'All-NaN (slice|axis) encountered')
if normalized:
unpacked = normalize_ratios(unpacked)
sorted_ratios = sorted(unpacked.items())
return np.nanmax(np.vstack([v for _, v in sorted_ratios]), axis=0)
def axis_overlap(ax1, ax2):
"""
Tests whether two axes overlap vertically
"""
b1, t1 = ax1.get_position().intervaly
b2, t2 = ax2.get_position().intervaly
return t1 > b2 and b1 < t2
def resolve_rows(rows):
"""
Recursively iterate over lists of axes merging
them by their vertical overlap leaving a list
of rows.
"""
merged_rows = []
for row in rows:
overlap = False
for mrow in merged_rows:
if any(axis_overlap(ax1, ax2) for ax1 in row
for ax2 in mrow):
mrow += row
overlap = True
break
if not overlap:
merged_rows.append(row)
if rows == merged_rows:
return rows
else:
return resolve_rows(merged_rows)
def fix_aspect(fig, nrows, ncols, title=None, extra_artists=[],
vspace=0.2, hspace=0.2):
"""
Calculate heights and widths of axes and adjust
the size of the figure to match the aspect.
"""
fig.canvas.draw()
w, h = fig.get_size_inches()
# Compute maximum height and width of each row and columns
rows = resolve_rows([[ax] for ax in fig.axes])
rs, cs = len(rows), max([len(r) for r in rows])
heights = [[] for i in range(cs)]
widths = [[] for i in range(rs)]
for r, row in enumerate(rows):
for c, ax in enumerate(row):
bbox = ax.get_tightbbox(fig.canvas.get_renderer())
heights[c].append(bbox.height)
widths[r].append(bbox.width)
height = (max([sum(c) for c in heights])) + nrows*vspace*fig.dpi
width = (max([sum(r) for r in widths])) + ncols*hspace*fig.dpi
# Compute aspect and set new size (in inches)
aspect = height/width
offset = 0
if title and title.get_text():
offset = title.get_window_extent().height/fig.dpi
fig.set_size_inches(w, (w*aspect)+offset)
# Redraw and adjust title position if defined
fig.canvas.draw()
if title and title.get_text():
extra_artists = [a for a in extra_artists
if a is not title]
bbox = get_tight_bbox(fig, extra_artists)
top = bbox.intervaly[1]
if title and title.get_text():
title.set_y((top/(w*aspect)))
def get_tight_bbox(fig, bbox_extra_artists=[], pad=None):
"""
Compute a tight bounding box around all the artists in the figure.
"""
renderer = fig.canvas.get_renderer()
bbox_inches = fig.get_tightbbox(renderer)
bbox_artists = bbox_extra_artists[:]
bbox_artists += fig.get_default_bbox_extra_artists()
bbox_filtered = []
for a in bbox_artists:
bbox = a.get_window_extent(renderer)
if isinstance(bbox, tuple):
continue
if a.get_clip_on():
clip_box = a.get_clip_box()
if clip_box is not None:
bbox = Bbox.intersection(bbox, clip_box)
clip_path = a.get_clip_path()
if clip_path is not None and bbox is not None:
clip_path = clip_path.get_fully_transformed_path()
bbox = Bbox.intersection(bbox,
clip_path.get_extents())
if bbox is not None and (bbox.width != 0 or
bbox.height != 0):
bbox_filtered.append(bbox)
if bbox_filtered:
_bbox = Bbox.union(bbox_filtered)
trans = Affine2D().scale(1.0 / fig.dpi)
bbox_extra = TransformedBbox(_bbox, trans)
bbox_inches = Bbox.union([bbox_inches, bbox_extra])
return bbox_inches.padded(pad) if pad else bbox_inches
def get_raster_array(image):
"""
Return the array data from any Raster or Image type
"""
if isinstance(image, RGB):
rgb = image.rgb
data = np.dstack([np.flipud(rgb.dimension_values(d, flat=False))
for d in rgb.vdims])
else:
data = image.dimension_values(2, flat=False)
if type(image) is Raster:
data = data.T
else:
data = np.flipud(data)
return data
def ring_coding(array):
"""
Produces matplotlib Path codes for exterior and interior rings
of a polygon geometry.
"""
# The codes will be all "LINETO" commands, except for "MOVETO"s at the
# beginning of each subpath
n = len(array)
codes = np.ones(n, dtype=Path.code_type) * Path.LINETO
codes[0] = Path.MOVETO
codes[-1] = Path.CLOSEPOLY
return codes
def polygons_to_path_patches(element):
"""
Converts Polygons into list of lists of matplotlib.patches.PathPatch
objects including any specified holes. Each list represents one
(multi-)polygon.
"""
paths = element.split(datatype='array', dimensions=element.kdims)
has_holes = isinstance(element, Polygons) and element.interface.has_holes(element)
holes = element.interface.holes(element) if has_holes else None
mpl_paths = []
for i, path in enumerate(paths):
splits = np.where(np.isnan(path[:, :2].astype('float')).sum(axis=1))[0]
arrays = np.split(path, splits+1) if len(splits) else [path]
subpath = []
for j, array in enumerate(arrays):
if j != (len(arrays)-1):
array = array[:-1]
if (array[0] != array[-1]).any():
array = np.append(array, array[:1], axis=0)
interiors = []
for interior in (holes[i][j] if has_holes else []):
if (interior[0] != interior[-1]).any():
interior = np.append(interior, interior[:1], axis=0)
interiors.append(interior)
vertices = np.concatenate([array]+interiors)
codes = np.concatenate([ring_coding(array)]+
[ring_coding(h) for h in interiors])
subpath.append(PathPatch(Path(vertices, codes)))
mpl_paths.append(subpath)
return mpl_paths
class CFTimeConverter(NetCDFTimeConverter):
"""
Defines conversions for cftime types by extending nc_time_axis.
"""
@classmethod
def convert(cls, value, unit, axis):
if not nc_axis_available:
raise ValueError('In order to display cftime types with '
'matplotlib install the nc_time_axis '
'library using pip or from conda-forge '
'using:\n\tconda install -c conda-forge '
'nc_time_axis')
if isinstance(value, cftime_types):
value = CalendarDateTime(value.datetime, value.calendar)
elif isinstance(value, np.ndarray):
value = np.array([CalendarDateTime(v.datetime, v.calendar) for v in value])
return super().convert(value, unit, axis)
class EqHistNormalize(Normalize):
def __init__(self, vmin=None, vmax=None, clip=False, nbins=256**2, ncolors=256):
super().__init__(vmin, vmax, clip)
self._nbins = nbins
self._bin_edges = None
self._ncolors = ncolors
self._color_bins = np.linspace(0, 1, ncolors)
def binning(self, data, n=256):
low = data.min() if self.vmin is None else self.vmin
high = data.max() if self.vmax is None else self.vmax
nbins = self._nbins
eq_bin_edges = np.linspace(low, high, nbins+1)
hist, _ = np.histogram(data, eq_bin_edges)
eq_bin_centers = np.convolve(eq_bin_edges, [0.5, 0.5], mode='valid')
cdf = np.cumsum(hist)
cdf_max = cdf[-1]
norm_cdf = cdf/cdf_max
# Iteratively find as many finite bins as there are colors
finite_bins = n-1
binning = []
iterations = 0
guess = n*2
while ((finite_bins != n) and (iterations < 4) and (finite_bins != 0)):
ratio = guess/finite_bins
if (ratio > 1000):
#Abort if distribution is extremely skewed
break
guess = np.round(max(n*ratio, n))
# Interpolate
palette_edges = np.arange(0, guess)
palette_cdf = norm_cdf*(guess-1)
binning = np.interp(palette_edges, palette_cdf, eq_bin_centers)
# Evaluate binning
uniq_bins = np.unique(binning)
finite_bins = len(uniq_bins)-1
iterations += 1
if (finite_bins == 0):
binning = [low]+[high]*(n-1)
else:
binning = binning[-n:]
if (finite_bins != n):
warnings.warn("EqHistColorMapper warning: Histogram equalization did not converge.")
return binning
def __call__(self, data, clip=None):
return self.process_value(data)[0]
def process_value(self, data):
if isinstance(data, np.ndarray):
self._bin_edges = self.binning(data, self._ncolors)
isscalar = np.isscalar(data)
data = np.array([data]) if isscalar else data
interped = np.interp(data, self._bin_edges, self._color_bins)
return np.ma.array(interped), isscalar
def inverse(self, value):
if self._bin_edges is None:
raise ValueError("Not invertible until eq_hist has been computed")
return np.interp([value], self._color_bins, self._bin_edges)[0]
for cft in cftime_types:
munits.registry[cft] = CFTimeConverter()
| 33.740506 | 100 | 0.614269 |
acf8f252ec040429be08f97b63f49177260f8c5b | 1,251 | py | Python | yogiyo/yogiyo/urls.py | YogiyoCloneTeamProject/YogiYoCloneAPI | 477cfe0508aa685e513b24421466fb0d6707f5ef | [
"MIT"
] | 11 | 2020-10-06T16:59:01.000Z | 2022-03-15T14:47:57.000Z | yogiyo/yogiyo/urls.py | YogiyoCloneTeamProject/YogiYoCloneAPI | 477cfe0508aa685e513b24421466fb0d6707f5ef | [
"MIT"
] | 38 | 2020-08-31T03:33:04.000Z | 2020-10-08T00:46:32.000Z | yogiyo/yogiyo/urls.py | YogiyoCloneTeamProject/YogiYoCloneAPI | 477cfe0508aa685e513b24421466fb0d6707f5ef | [
"MIT"
] | 9 | 2020-08-16T09:07:10.000Z | 2021-09-25T10:30:42.000Z | """yogiyo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
import debug_toolbar
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.auth.models import Group
from django.urls import path, include
from .yasg import *
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('core.urls')),
path('__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += urlpatterns_yasg
# Admin 설정
admin.site.site_header = "Yogiyo API Admin"
admin.site.site_title = "Joystagram Admin Portal"
admin.site.unregister(Group)
| 35.742857 | 77 | 0.739408 |
acf8f2c0905ace5e190064aba1310fcc39eca6cb | 1,384 | py | Python | src/bll/player/videoplayeradapter.py | pgecsenyi/piepy | 37bf6cb5bc8c4f9da3f695216beda7353d79fb29 | [
"MIT"
] | 1 | 2018-03-26T22:39:36.000Z | 2018-03-26T22:39:36.000Z | src/bll/player/videoplayeradapter.py | pgecsenyi/piepy | 37bf6cb5bc8c4f9da3f695216beda7353d79fb29 | [
"MIT"
] | null | null | null | src/bll/player/videoplayeradapter.py | pgecsenyi/piepy | 37bf6cb5bc8c4f9da3f695216beda7353d79fb29 | [
"MIT"
] | null | null | null | from bll.player.playeradapter import PlayerAdapter
class VideoPlayerAdapter(PlayerAdapter):
####################################################################################################################
# Constructor.
####################################################################################################################
def __init__(self, video_dal_retriever, player):
"""
Calls the base class constructor.
Parameters
----------
video_dal_retriever : VideoDataHandler
A reference to the video DAL.
player : IPlayerHandler
A reference to the audio and video player.
"""
### Call base class constructor.
super(VideoPlayerAdapter, self).__init__(player)
### Attributes from outside.
self._video_dal_retriever = video_dal_retriever
####################################################################################################################
# Protected overrides.
####################################################################################################################
def _get_file_by_id(self, file_id):
return self._video_dal_retriever.retrieve_video_path(file_id)
def _get_subtitle_by_id(self, file_id):
return self._video_dal_retriever.retrieve_subtitle_path(file_id)
| 36.421053 | 120 | 0.437861 |
acf8f368a8fcbec140acf0a10ddda6ced141b9c7 | 1,573 | py | Python | cluster/json2ini.py | leoloe326/aws-training | 7aa15731ec24192b373aa646f9d13668a88a44fd | [
"BSD-3-Clause"
] | null | null | null | cluster/json2ini.py | leoloe326/aws-training | 7aa15731ec24192b373aa646f9d13668a88a44fd | [
"BSD-3-Clause"
] | null | null | null | cluster/json2ini.py | leoloe326/aws-training | 7aa15731ec24192b373aa646f9d13668a88a44fd | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# JSON to INI
import argparse
import sys
import json
addrs = {}
def find_key(data, keywords):
if isinstance(data, dict):
for key, value in data.items():
if key in keywords: addrs[key] = value
if isinstance(value, dict):
find_key(value, keywords)
elif isinstance(value, list) and value:
find_key(value[0], keywords)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-i", "--input", dest='input', type=str,
default='-', help="input file")
parser.add_argument("-o", "--output", dest='output', type=str,
default='-', help="output file")
parser.add_argument("-k", "--keywords", dest='keywords', type=str,
default='', help="comma separated keywords for section")
args = parser.parse_args()
keywords = args.keywords.split(',')
data = None
if args.input == '-':
data = json.load(sys.stdin)
else:
with open(args.input, 'r') as f:
data = json.load(f)
find_key(data, keywords)
out = sys.stdout if args.output == '-' else open(args.output, 'w')
newline = ''
for key, value in addrs.items():
out.write("%s[%s]\n" % (newline, key))
newline = '\n'
if isinstance(value, dict):
if 'value' in value:
if isinstance(value['value'], list):
for v in value['value']:
out.write('%s\n' % v)
elif isinstance(value['value'], str):
out.write('%s\n' % value['value'])
out.flush()
out.close()
| 25.786885 | 66 | 0.595677 |
acf8f493e50fd65df4c740bb073d8f563f109913 | 2,723 | py | Python | scripts/typescript_checks.py | nishcthulhu/oppia | 5e27ae12448f6295899d7d7ea99b08061e2dd784 | [
"Apache-2.0"
] | 1 | 2021-06-26T00:31:08.000Z | 2021-06-26T00:31:08.000Z | scripts/typescript_checks.py | nishcthulhu/oppia | 5e27ae12448f6295899d7d7ea99b08061e2dd784 | [
"Apache-2.0"
] | null | null | null | scripts/typescript_checks.py | nishcthulhu/oppia | 5e27ae12448f6295899d7d7ea99b08061e2dd784 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""File for compiling and checking typescript."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import json
import os
import shutil
import subprocess
import sys
import python_utils
from . import common
COMPILED_JS_DIR = os.path.join('local_compiled_js_for_test', '')
TSCONFIG_FILEPATH = 'tsconfig.json'
def validate_compiled_js_dir():
"""Validates that compiled js dir matches out dir in tsconfig."""
with python_utils.open_file(TSCONFIG_FILEPATH, 'r') as f:
config_data = json.load(f)
out_dir = os.path.join(config_data['compilerOptions']['outDir'], '')
if out_dir != COMPILED_JS_DIR:
raise Exception(
'COMPILED_JS_DIR: %s does not match the output directory '
'in %s: %s' % (COMPILED_JS_DIR, TSCONFIG_FILEPATH, out_dir))
def compile_and_check_typescript():
"""Compiles typescript files and checks the compilation errors."""
node_path = common.NODE_PATH
os.environ['PATH'] = '%s/bin:' % node_path + os.environ['PATH']
validate_compiled_js_dir()
if os.path.exists(COMPILED_JS_DIR):
shutil.rmtree(COMPILED_JS_DIR)
python_utils.PRINT('Compiling and testing typescript...')
cmd = [
'./node_modules/typescript/bin/tsc', '--project',
TSCONFIG_FILEPATH]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
error_messages = []
for line in iter(process.stdout.readline, ''):
error_messages.append(line)
if os.path.exists(COMPILED_JS_DIR):
shutil.rmtree(COMPILED_JS_DIR)
if error_messages:
python_utils.PRINT('Errors found during compilation\n')
for message in error_messages:
python_utils.PRINT(message)
sys.exit(1)
else:
python_utils.PRINT('Compilation successful!')
# The 'no coverage' pragma is used as this line is un-testable. This is because
# it will only be called when typescript_checks.py is used as a script.
if __name__ == '__main__': # pragma: no cover
compile_and_check_typescript()
| 35.363636 | 79 | 0.717224 |
acf8f49f8b468acd6929410b4a1b264c9c6d42df | 977 | py | Python | Crafting_Quality_Code_UniToronto/week1_approaches/palindrome2.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | 1 | 2021-01-17T15:13:49.000Z | 2021-01-17T15:13:49.000Z | Crafting_Quality_Code_UniToronto/week1_approaches/palindrome2.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | null | null | null | Crafting_Quality_Code_UniToronto/week1_approaches/palindrome2.py | bounty030/Coursera | ff265343635a0109b6deab31f2a112d304d020cb | [
"MIT"
] | 1 | 2021-01-17T15:13:16.000Z | 2021-01-17T15:13:16.000Z | # Learn to Program: Crafting Quality Code
# Palindrome: Algorithm 2
# Split string into two halves, reverse the second half and compare
# If string has an odd number of chars then leave the middle char out
def is_palindrome_v2(s):
""" (str) -> bool
Return True if and only if s is a palindrome.
>>> is_palindrome_v1('noon')
True
>>> is_palindrome_v1('racecar')
True
>>> is_palindrome_v1('dented')
False
"""
# The number of chars in s.
n = len(s)
# Compare the first half of s to the reverse of the second half.
# Omit the middle character of an odd-length string.
return s[:n //2] == reverse(s[n - n // 2:])
def reverse(s):
""" (str) -> str
Return a reversed version of s.
>>> reverse('hello')
'olleh'
>>> reverse('a')
'a'
"""
rev = ''
# For each character in s, add thart char to the beginning of rev.
for ch in s:
rev = ch + rev
return rev
| 20.354167 | 73 | 0.594678 |
acf8f4ed7d56da9a1620ffeac058b379c07f5fc3 | 3,002 | py | Python | pelee/onnx1.py | DragonGongY/mmdet-ui | 41582b0ee2b3d9c631ee48540ca8a6d87be362e0 | [
"Apache-2.0"
] | 1 | 2021-12-24T05:28:20.000Z | 2021-12-24T05:28:20.000Z | pelee/onnx1.py | DragonGongY/mmdet-ui | 41582b0ee2b3d9c631ee48540ca8a6d87be362e0 | [
"Apache-2.0"
] | null | null | null | pelee/onnx1.py | DragonGongY/mmdet-ui | 41582b0ee2b3d9c631ee48540ca8a6d87be362e0 | [
"Apache-2.0"
] | null | null | null | #-*- coding:utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import cv2
import torch
from layers.functions import Detect
from peleenet import build_net
from data import BaseTransform, VOC_CLASSES
from utils.core import *
parser = argparse.ArgumentParser(description='Pelee Testing')
parser.add_argument('-c', '--config', default='configs/Pelee_COCO.py')
parser.add_argument('-d', '--dataset', default='COCO',
help='VOC or COCO dataset')
parser.add_argument('-m', '--trained_model', default="./weights/Pelee_COCO_size304_epoch40.pth",
type=str, help='Trained state_dict file path to open')
parser.add_argument('-t', '--thresh', default=0.25, type=float,
help='visidutation threshold')
parser.add_argument('--show', action='store_true',
help='Whether to display the images')
args = parser.parse_args()
print_info(' ----------------------------------------------------------------------\n'
'| Pelee Demo Program |\n'
' ----------------------------------------------------------------------', ['yellow', 'bold'])
global cfg
cfg = Config.fromfile(args.config)
anchor_config = anchors(cfg.model)
print_info('The Anchor info: \n{}'.format(anchor_config))
priorbox = PriorBox(anchor_config)
net = build_net('test', cfg.model.input_size, cfg.model)
init_net(net, cfg, args.trained_model)
print_info('===> Finished constructing and loading model', ['yellow', 'bold'])
net.eval()
num_classes = cfg.model.num_classes
with torch.no_grad():
priors = priorbox.forward()
if cfg.test_cfg.cuda:
net = net.cuda()
priors = priors.cuda()
cudnn.benchmark = True
else:
net = net.cpu()
_preprocess = BaseTransform(
cfg.model.input_size, cfg.model.rgb_means, (2, 0, 1))
detector = Detect(num_classes,
cfg.loss.bkg_label, anchor_config)
base = int(np.ceil(pow(num_classes, 1. / 3)))
cats = [_.strip().split(',')[-1]
for _ in open('data/coco_labels.txt', 'r').readlines()]
label_config = {'VOC': VOC_CLASSES, 'COCO': tuple(['__background__'] + cats)}
labels = label_config[args.dataset]
def draw_detection(im, bboxes, scores, cls_inds, fps, thr=0.2):
imgcv = np.copy(im)
h, w, _ = imgcv.shape
for i, box in enumerate(bboxes):
if scores[i] < thr:
continue
cls_indx = int(cls_inds[i])
# box = [int(_) for _ in box]
cv2.rectangle(imgcv,
(int(box[0]), int(box[1])), (int(box[2]), int(box[3])),
(255,0,0), 3)
mess = '%s: %.3f' % (labels[cls_indx], scores[i])
cv2.putText(imgcv, mess, (int(box[0]), int(box[1] - 7)),cv2.FONT_HERSHEY_SIMPLEX, 2, (0,0,255), 3)
return imgcv
img = torch.randn(1,3,304,304)
img = img.cuda()
print(img.shape)
out = net(img)
torch.onnx.export(net, img, "./pelee.onnx", verbose=1)
| 36.609756 | 106 | 0.6006 |
acf8f4ff0bfb694f5e9428afc7082d580d899029 | 4,782 | py | Python | features/eolearn/tests/test_blob.py | dreampedia20/eo-learn | f4994a1d6e910ba5d6ad877726ac2367048a44a1 | [
"MIT"
] | 1 | 2019-04-08T02:26:40.000Z | 2019-04-08T02:26:40.000Z | features/eolearn/tests/test_blob.py | dreampedia20/eo-learn | f4994a1d6e910ba5d6ad877726ac2367048a44a1 | [
"MIT"
] | 1 | 2019-11-27T09:44:40.000Z | 2019-11-27T09:44:40.000Z | features/eolearn/tests/test_blob.py | dreampedia20/eo-learn | f4994a1d6e910ba5d6ad877726ac2367048a44a1 | [
"MIT"
] | 1 | 2019-01-30T09:08:23.000Z | 2019-01-30T09:08:23.000Z | """
Module for computing blobs in EOPatch
Credits:
Copyright (c) 2018-2019 Hugo Fournier (Magellium)
Copyright (c) 2017-2019 Matej Aleksandrov, Devis Peresutti (Sinergise)
This source code is licensed under the MIT license found in the LICENSE
file in the root directory of this source tree.
"""
import unittest
import os.path
import numpy as np
from skimage.feature import blob_dog
from eolearn.core import EOPatch, FeatureType
from eolearn.features import BlobTask, DoGBlobTask, LoGBlobTask, DoHBlobTask
class TestBlob(unittest.TestCase):
TEST_PATCH_FILENAME = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'TestInputs', 'TestPatch')
@classmethod
def setUpClass(cls):
cls.patch = EOPatch.load(cls.TEST_PATCH_FILENAME)
cls._prepare_patch(cls.patch)
BlobTask((FeatureType.DATA, 'ndvi', 'blob'), blob_dog, sigma_ratio=1.6, min_sigma=1, max_sigma=30,
overlap=0.5, threshold=0).execute(cls.patch)
DoGBlobTask((FeatureType.DATA, 'ndvi', 'blob_dog'), threshold=0).execute(cls.patch)
LoGBlobTask((FeatureType.DATA, 'ndvi', 'blob_log'), log_scale=True, threshold=0).execute(cls.patch)
DoHBlobTask((FeatureType.DATA, 'ndvi', 'blob_doh'), num_sigma=5, threshold=0).execute(cls.patch)
cls.initial_patch = EOPatch.load(cls.TEST_PATCH_FILENAME)
cls._prepare_patch(cls.initial_patch)
@staticmethod
def _prepare_patch(patch):
ndvi = patch.data['ndvi'][:10]
ndvi[np.isnan(ndvi)] = 0
patch.data['ndvi'] = ndvi
def test_blob_feature(self):
self.assertTrue(np.allclose(self.patch.data['blob'], self.patch.data['blob_dog']),
msg='DoG derived class result not equal to base class result')
def test_dog_feature(self):
blob = self.patch.data['blob_dog']
delta = 1e-4
test_min = np.min(blob)
exp_min = 0.0
self.assertAlmostEqual(test_min, exp_min, delta=delta, msg="Expected min {}, got {}".format(exp_min, test_min))
test_max = np.max(blob)
exp_max = 37.9625
self.assertAlmostEqual(test_max, exp_max, delta=delta, msg="Expected max {}, got {}".format(exp_max, test_max))
test_mean = np.mean(blob)
exp_mean = 0.0545
self.assertAlmostEqual(test_mean, exp_mean, delta=delta,
msg="Expected mean {}, got {}".format(exp_mean, test_mean))
test_median = np.median(blob)
exp_median = 0.0
self.assertAlmostEqual(test_median, exp_median, delta=delta,
msg="Expected median {}, got {}".format(exp_median, test_median))
def test_log_feature(self):
blob = self.patch.data['blob_log']
delta = 1e-4
test_min = np.min(blob)
exp_min = 0.0
self.assertAlmostEqual(test_min, exp_min, delta=delta, msg="Expected min {}, got {}".format(exp_min, test_min))
test_max = np.max(blob)
exp_max = 13.65408
self.assertAlmostEqual(test_max, exp_max, delta=delta, msg="Expected max {}, got {}".format(exp_max, test_max))
test_mean = np.mean(blob)
exp_mean = 0.05728
self.assertAlmostEqual(test_mean, exp_mean, delta=delta,
msg="Expected mean {}, got {}".format(exp_mean, test_mean))
test_median = np.median(blob)
exp_median = 0.0
self.assertAlmostEqual(test_median, exp_median, delta=delta,
msg="Expected median {}, got {}".format(exp_median, test_median))
def test_doh_feature(self):
blob = self.patch.data['blob_doh']
delta = 1e-4
test_min = np.min(blob)
exp_min = 0.0
self.assertAlmostEqual(test_min, exp_min, delta=delta, msg="Expected min {}, got {}".format(exp_min, test_min))
test_max = np.max(blob)
exp_max = 1.4142
self.assertAlmostEqual(test_max, exp_max, delta=delta, msg="Expected max {}, got {}".format(exp_max, test_max))
test_mean = np.mean(blob)
exp_mean = 0.0007
self.assertAlmostEqual(test_mean, exp_mean, delta=delta,
msg="Expected mean {}, got {}".format(exp_mean, test_mean))
test_median = np.median(blob)
exp_median = 0.0
self.assertAlmostEqual(test_median, exp_median, delta=delta,
msg="Expected median {}, got {}".format(exp_median, test_median))
def test_unchanged_features(self):
for feature, value in self.initial_patch.data.items():
self.assertTrue(np.array_equal(value, self.patch.data[feature]),
msg="EOPatch data feature '{}' was changed in the process".format(feature))
if __name__ == '__main__':
unittest.main()
| 38.878049 | 119 | 0.638645 |
acf8f60eb866c9004e6ddcc7d87e04095f20928f | 1,119 | py | Python | tests/Pyro4-4.17/examples/attributes/server.py | nelmiux/CS347-Data_Management | 1e9d87097b5a373f9312b0d6b413198e495fd6c0 | [
"CNRI-Jython"
] | null | null | null | tests/Pyro4-4.17/examples/attributes/server.py | nelmiux/CS347-Data_Management | 1e9d87097b5a373f9312b0d6b413198e495fd6c0 | [
"CNRI-Jython"
] | null | null | null | tests/Pyro4-4.17/examples/attributes/server.py | nelmiux/CS347-Data_Management | 1e9d87097b5a373f9312b0d6b413198e495fd6c0 | [
"CNRI-Jython"
] | null | null | null | from __future__ import print_function
import sys
import Pyro4
if sys.version_info<(3,0):
input=raw_input
dotted=input("enter value for DOTTEDNAMES config item: ").strip()
Pyro4.config.DOTTEDNAMES = dotted in ("1","true","on","yes")
something="Something"
class SubThingy(object):
def __init__(self):
self.value=42
self._value=123
self.__value=999
def getValue(self):
return self.value
def setValue(self,value):
self.value=value
class Thingy(object):
def __init__(self):
self.sub=SubThingy()
self.value=42
self._value=123
self.__value=999
def getSubValue(self):
return self.sub.getValue()
def setSubValue(self, value):
self.sub.setValue(value)
def dottedNames(self):
return Pyro4.config.DOTTEDNAMES
def printSomething(self):
print("something:",something)
return something
d=Pyro4.Daemon()
uri=d.register(Thingy(), "example.attributes")
print("server object uri:",uri)
print("DOTTEDNAMES=",Pyro4.config.DOTTEDNAMES)
print("attributes server running.")
d.requestLoop()
| 24.866667 | 65 | 0.672922 |
acf8f65c9dc739e8e8d63b6a3198777520ff0d53 | 4,311 | py | Python | src/dfvfs/file_io/fake_file_io.py | nesfit/pyspark-plaso | 4e0680a1a79a5aebfbc7ae983da30841bf984d95 | [
"Apache-2.0"
] | 2 | 2020-02-09T01:11:08.000Z | 2021-09-17T04:16:31.000Z | dfvfs/file_io/fake_file_io.py | Acidburn0zzz/dfvfs | 3db8c4e520e3e7527faffeea8f52187c861fa3b6 | [
"Apache-2.0"
] | null | null | null | dfvfs/file_io/fake_file_io.py | Acidburn0zzz/dfvfs | 3db8c4e520e3e7527faffeea8f52187c861fa3b6 | [
"Apache-2.0"
] | 1 | 2021-03-17T09:47:01.000Z | 2021-03-17T09:47:01.000Z | # -*- coding: utf-8 -*-
"""The fake file-like object implementation."""
from __future__ import unicode_literals
import os
from dfvfs.file_io import file_io
from dfvfs.lib import errors
class FakeFile(file_io.FileIO):
"""Fake file-like object."""
def __init__(self, resolver_context, file_data):
"""Initializes a file-like object.
Args:
resolver_context (Context): resolver context.
file_data (bytes): fake file data.
"""
super(FakeFile, self).__init__(resolver_context)
self._current_offset = 0
self._file_data = file_data
self._size = 0
def _Close(self):
"""Closes the file-like object."""
return
def _Open(self, path_spec=None, mode='rb'):
"""Opens the file-like object defined by path specification.
Args:
path_spec (PathSpec): path specification.
mode (Optional[str]): file access mode.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file-like object could not be opened.
OSError: if the file-like object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec:
raise ValueError('Missing path specification.')
if path_spec.HasParent():
raise errors.PathSpecError('Unsupported path specification with parent.')
location = getattr(path_spec, 'location', None)
if location is None:
raise errors.PathSpecError('Path specification missing location.')
self._current_offset = 0
self._size = len(self._file_data)
# Note: that the following functions do not follow the style guide
# because they are part of the file-like object interface.
# pylint: disable=invalid-name
def read(self, size=None):
"""Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
bytes: data read.
Raises:
IOError: if the read failed.
OSError: if the read failed.
"""
if not self._is_open:
raise IOError('Not opened.')
if self._current_offset < 0:
raise IOError(
'Invalid current offset: {0:d} value less than zero.'.format(
self._current_offset))
if self._file_data is None or self._current_offset >= self._size:
return b''
if size is None:
size = self._size
if self._current_offset + size > self._size:
size = self._size - self._current_offset
start_offset = self._current_offset
self._current_offset += size
return self._file_data[start_offset:self._current_offset]
def seek(self, offset, whence=os.SEEK_SET):
"""Seeks to an offset within the file-like object.
Args:
offset (int): offset to seek to.
whence (Optional(int)): value that indicates whether offset is an absolute
or relative position within the file.
Raises:
IOError: if the seek failed.
OSError: if the seek failed.
"""
if not self._is_open:
raise IOError('Not opened.')
if whence == os.SEEK_CUR:
offset += self._current_offset
elif whence == os.SEEK_END:
offset += self._size
elif whence != os.SEEK_SET:
raise IOError('Unsupported whence.')
if offset < 0:
raise IOError('Invalid offset value less than zero.')
self._current_offset = offset
def get_offset(self):
"""Retrieves the current offset into the file-like object.
Returns:
int: current offset into the file-like object.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened.
"""
if not self._is_open:
raise IOError('Not opened.')
return self._current_offset
def get_size(self):
"""Retrieves the size of the file-like object.
Returns:
int: size of the file data.
Raises:
IOError: if the file-like object has not been opened.
OSError: if the file-like object has not been opened.
"""
if not self._is_open:
raise IOError('Not opened.')
return self._size
| 27.812903 | 80 | 0.66945 |
acf8f6a43f2fa0410b35639b243a09b27d2cb86e | 887 | py | Python | Student Attendance Record I (551)/551.py | WeaverDyl/LeetCode-Solutions | 5b37e26390bcda69ffcb1142cc03e193f74735e6 | [
"MIT"
] | null | null | null | Student Attendance Record I (551)/551.py | WeaverDyl/LeetCode-Solutions | 5b37e26390bcda69ffcb1142cc03e193f74735e6 | [
"MIT"
] | null | null | null | Student Attendance Record I (551)/551.py | WeaverDyl/LeetCode-Solutions | 5b37e26390bcda69ffcb1142cc03e193f74735e6 | [
"MIT"
] | null | null | null | # Runtime: 20 ms
# Beats 100% of Python submissions
class Solution(object):
def checkRecord(self, s):
"""
:type s: str
:rtype: bool
"""
if s.count("LLL") > 0:
return False
if s.count("A") > 2:
return False
return True
# A more worked out solution is:
# class Solution(object):
# def checkRecord(self, s):
# """
# :type s: str
# :rtype: bool
# """
# a_count = 0
# cons_p_count = 0
# for day in s:
# if day == 'A':
# a_count +=1
# if day == 'L':
# cons_p_count += 1
# else:
# cons_p_count = 0
# if a_count > 1:
# return False
# if cons_p_count > 2:
# return False
# return True | 23.342105 | 35 | 0.409245 |
acf8f6a79e27cf5395be78b2996553ab4405479e | 4,057 | py | Python | core/run.py | otto-de/crystal-gazer | 4ce83e02d72e7dda184b03378032c188ffa4f310 | [
"Apache-2.0"
] | 1 | 2018-06-18T10:16:26.000Z | 2018-06-18T10:16:26.000Z | core/run.py | otto-de/crystal-gazer | 4ce83e02d72e7dda184b03378032c188ffa4f310 | [
"Apache-2.0"
] | null | null | null | core/run.py | otto-de/crystal-gazer | 4ce83e02d72e7dda184b03378032c188ffa4f310 | [
"Apache-2.0"
] | null | null | null | import time
from pathlib import Path
import numpy as np
import pandas as pd
import tensorflow as tf
import core.loader as ld
from core.interaction_index import InteractionIndex
from core.interaction_mapper import InteractionMapper
from core.metric_profiler import MetricProfiler
from core.network import Network
from core.tensorboard_writer import TensorboardWriter
from core.trainer import Trainer
def run(config):
tf.reset_default_graph()
cf = config
um = InteractionMapper(cf.path_interaction_map)
ii = None
mp = False
if cf.continnue_previous_run:
pd_df = pd.read_csv(cf.previous_successful_output_run_dir + "/interaction_indexing/interaction_index.txt",
header=None)
for col in pd_df.columns:
pd_df[col] = pd_df[col].astype(np.float32)
network = Network(cf, um, preheated_embeddings=pd_df.values)
else:
network = Network(cf, um)
train_loader = ld.Loader(cf, um, cf.path_train_data)
test_loader = ld.Loader(cf, um, cf.path_test_data)
trainer = Trainer(cf, network)
cf.make_dirs()
tbw = TensorboardWriter(cf)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
log_txt = "Config: " + cf.to_string() + "\n\n" + \
"Interaction mapper: " + um.to_string() + "\n\n" + \
"Train Loader @start: " + train_loader.to_string() + "\n\n" + \
"Test Loader @start: " + test_loader.to_string()
tbw.log_info(sess, log_txt)
while train_loader.epoch_cnt < cf.epochs:
tb = time.time()
batch_x, batch_y, target_distance = train_loader.get_next_batch(cf.batch_size)
x_label = 1000 * train_loader.event_cnt / train_loader.tot_event_cnt + train_loader.epoch_cnt
dt_batching = time.time() - tb
tt = time.time()
tensorboard_log_entry = trainer.train(sess, batch_x, batch_y, target_distance)
dt_tensorflow = time.time() - tt
dt_all = time.time() - tb
events_per_sec_in_thousand = cf.batch_size / dt_all / 1000
tbw.add_train_summary(tensorboard_log_entry, x_label)
tbw.log_scalar(events_per_sec_in_thousand, x_label, tag="performance_metric: 1000 events per second")
tbw.log_scalar(dt_tensorflow / dt_batching, x_label,
tag="performance_metric: delta time tensorflow / delta time batch processing")
if train_loader.new_epoch:
batch_x, batch_y, target_distance = test_loader.get_next_batch(cf.batch_size * 100, fake_factor=0)
print("epochs: " + str(train_loader.epoch_cnt))
print("trainer testing...")
tensorboard_log_entry = trainer.test(sess, batch_x, batch_y, target_distance)
tbw.add_test_summary(tensorboard_log_entry, x_label)
tbw.flush()
print("calculating embedding...")
embedding_vectors = trainer.get_interaction_embeddings(sess)
print("calculating average normalization...")
tbw.log_scalar(np.average(np.linalg.norm(embedding_vectors, axis=1)), x_label,
tag="evaluation_metric: average norm of embedding vectors (normalization condition will force it towards 1)")
print("building index...")
ii = InteractionIndex(um, embedding_vectors)
print("metric profiling...")
mp = MetricProfiler(cf, sess, tbw, train_loader, um, ii)
mp.log_plots(x_label)
print("epoch done")
print("final logging...")
mp.log_results()
print("write timeline profile...")
with open(cf.timeline_profile_path, 'w') as f:
f.write(trainer.chrome_trace())
tbw.flush()
sess.close()
print("saving index...")
ii.safe(cf.index_safe_path)
Path(cf.output_run_dir + '/_SUCCESS').touch()
print("success: _SUCCESS generated")
| 41.824742 | 140 | 0.634952 |
acf8f6d3aaba2e6e5c98126b11f007cd3a38301f | 7,566 | py | Python | src/www/start.py | code-critic/codecritic2 | 1c0f941731411f2d5b4f9f9ba87aabc8dcf3c564 | [
"MIT"
] | null | null | null | src/www/start.py | code-critic/codecritic2 | 1c0f941731411f2d5b4f9f9ba87aabc8dcf3c564 | [
"MIT"
] | null | null | null | src/www/start.py | code-critic/codecritic2 | 1c0f941731411f2d5b4f9f9ba87aabc8dcf3c564 | [
"MIT"
] | null | null | null | #!/bin/python3
# author: Jan Hybs
import argparse
import sys
from loguru import logger
from env import Env
logger.configure(handlers=[
dict(sink=sys.stdout),
dict(sink=Env.log_file, colorize=True)
])
from entities.crates import CaseResult
c = CaseResult(
id='self.id',
status='self.status.str',
cmd='self.cmd',
duration=1.3,
returncode=0,
console=None,
message='self.message',
message_details=None,
attachments=list(),
score=151651,
scores=[13, 65, 8],
)
# app.run(debug=args.debug, host=args.host, port=args.port)
# from geventwebsocket import WebSocketServer
# # from gevent.pywsgi import WSGIServer
# http_server = WebSocketServer(('', 5000), app)
# http_server.serve_forever()
def parse_args(cargs=None):
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--help', action='help', default=argparse.SUPPRESS,
help=argparse._('show this help message and exit'))
flask_server = parser.add_argument_group('flask server')
flask_server.add_argument('-p', '--port', type=int, default=5000)
flask_server.add_argument('-h', '--host', type=str, default='127.0.0.1')
flask_server.add_argument('-d', '--debug', action='store_true')
flask_server.add_argument('-v', '--verbose', action='store_true')
flask_server.add_argument('--backdoor', action='store_true')
args = parser.parse_args(cargs)
if args.verbose:
# do not filter logger
pass
else:
import logging
logger.info('supressing flask warnings')
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
return args
def register_routes(app, socketio):
from www import auth
from www import index
from www import course
from www import sockets
from www import stats
from www import utils_www
auth.register_routes(app, socketio)
index.register_routes(app, socketio)
course.register_routes(app, socketio)
sockets.register_routes(app, socketio)
stats.register_routes(app, socketio)
utils_www.register_routes(app, socketio)
o = {
"_id" : "5cc6be848bfa8a5da73d17e9",
"lang" : "CPP",
"solution" : "#include <iostream>\r\nusing namespace std;\r\n\r\nint main() \r\n{\r\n cout << 10 << endl;\r\n cout << 15 << endl;\r\n cout << 46 << endl;\r\n return 0;\r\n}",
"docker" : False,
"action" : "solve",
"user" : "root",
"course" : "TST-2019",
"problem" : "problem-1",
"result" : {
"id" : "Result",
"status" : "answer-wrong",
"duration" : 9.69145011901855,
"returncode" : None,
"message" : "Submitted solution is wrong",
"score" : 10006,
"scores" : [
1,
0,
6
]
},
"results" : [
{
"id" : "Compilation",
"status" : "ok",
"cmd" : "g++ main.cpp -o main",
"duration" : 7.35814046859741,
"returncode" : 0,
"score" : 0,
"scores" : []
},
{
"id" : "case-1.s",
"status" : "answer-correct",
"cmd" : "./main",
"duration" : 0.0785830020904541,
"returncode" : 0,
"message" : "Submitted solution is correct",
"score" : 0,
"scores" : []
},
{
"id" : "case-2",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0173075199127197,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
},
{
"id" : "case-3.0",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0140361785888672,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
},
{
"id" : "case-3.1",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0361495018005371,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
},
{
"id" : "case-3.2",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0138497352600098,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
},
{
"id" : "case-3.3",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0343863964080811,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
},
{
"id" : "case-3.4",
"status" : "answer-wrong",
"cmd" : "./main",
"duration" : 0.0512194633483887,
"returncode" : 0,
"message" : "Submitted solution is incorrect",
"score" : 0,
"scores" : []
}
],
"output_dir" : "courses/course-template/2019/results/root/problem-1/348-200-W-answer-wrong",
"attempt" : 348
}
def main():
args = parse_args()
async_mode = 'gevent' # eventlet, gevent_uwsgi, gevent, threading
logger.info('Running automate version {}', Env.version)
from utils.io import delete_old_files
delete_old_files(Env.tmp)
# -------------------------------------------------------------------------
if args.debug:
async_mode = 'threading'
Env.debug_mode = True
else:
from gevent import monkey
monkey.patch_all()
# -------------------------------------------------------------------------
from flask_socketio import SocketIO
import flask
from www import app
# -------------------------------------------------------------------------
socketio = SocketIO(app, json=flask.json, async_mode=async_mode, ping_interval=100 * 1000)
register_routes(app, socketio)
# -------------------------------------------------------------------------
if args.backdoor:
from www import backdoor
backdoor.register_routes(app, socketio)
# -------------------------------------------------------------------------
logger.info('Listening on {host}:{port} (debug={debug})', **vars(args))
Env.dump_info('Configuration in env.py')
logger.info('removing old files from {}', Env.tmp)
# -------------------------------------------------------------------------
if args.debug:
app.run(debug=args.debug, host=args.host, port=args.port)
else:
from geventwebsocket import WebSocketServer
http_server = WebSocketServer((args.host, args.port), app)
http_server.serve_forever()
return app, args
if __name__ == '__main__':
main()
# import difflib
# from pathlib import Path
#
# p = '4.1'
# # p = '5.0'
# a = Path('/home/jan-hybs/projects/cc/codecritic/.tmp/a.%s' % p).read_text().splitlines()
# b = Path('/home/jan-hybs/projects/cc/codecritic/.tmp/b.%s' % p).read_text().splitlines()
#
# def j(x):
# print('!', x)
# return x in "\nx"
#
# diff = difflib.SequenceMatcher(j, a, b).ratio()
# diff = difflib.HtmlDiff(charjunk=j)
# f = diff.make_file(a, b)
#
# Path('foo.html').write_text(f)
| 28.768061 | 190 | 0.507005 |
acf8f6f043badd997e9cfa21f5c3b0f75871894b | 7,348 | py | Python | vb_suite/reindex.py | lodagro/pandas | dfcf74679a273395cc9d7b3db78a1fbbc17c4f57 | [
"PSF-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | vb_suite/reindex.py | lodagro/pandas | dfcf74679a273395cc9d7b3db78a1fbbc17c4f57 | [
"PSF-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | vb_suite/reindex.py | lodagro/pandas | dfcf74679a273395cc9d7b3db78a1fbbc17c4f57 | [
"PSF-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2019-07-30T02:42:51.000Z | 2019-07-30T02:42:51.000Z | from vbench.benchmark import Benchmark
from datetime import datetime
common_setup = """from pandas_vb_common import *
"""
#----------------------------------------------------------------------
# DataFrame reindex columns
setup = common_setup + """
df = DataFrame(index=range(10000), data=np.random.rand(10000,30),
columns=range(30))
"""
statement = "df.reindex(columns=df.columns[1:5])"
frame_reindex_columns = Benchmark(statement, setup)
#----------------------------------------------------------------------
setup = common_setup + """
rng = DateRange('1/1/1970', periods=10000, offset=datetools.Minute())
df = DataFrame(np.random.rand(10000, 10), index=rng,
columns=range(10))
df['foo'] = 'bar'
rng2 = Index(rng[::2])
"""
statement = "df.reindex(rng2)"
dataframe_reindex = Benchmark(statement, setup)
#----------------------------------------------------------------------
# multiindex reindexing
setup = common_setup + """
N = 1000
K = 20
level1 = np.array([tm.rands(10) for _ in xrange(N)], dtype='O').repeat(K)
level2 = np.tile(np.array([tm.rands(10) for _ in xrange(K)], dtype='O'),
N)
index = MultiIndex.from_arrays([level1, level2])
s1 = Series(np.random.randn(N * K), index=index)
s2 = s1[::2]
"""
statement = "s1.reindex(s2.index)"
reindex_multi = Benchmark(statement, setup,
name='reindex_multiindex',
start_date=datetime(2011, 9, 1))
#----------------------------------------------------------------------
# Pad / backfill
setup = common_setup + """
rng = DateRange('1/1/2000', periods=10000, offset=datetools.Minute())
ts = Series(np.random.randn(len(rng)), index=rng)
ts2 = ts[::2]
ts3 = ts2.reindex(ts.index)
ts4 = ts3.astype('float32')
def pad():
try:
ts2.reindex(ts.index, method='pad')
except:
ts2.reindex(ts.index, fillMethod='pad')
def backfill():
try:
ts2.reindex(ts.index, method='backfill')
except:
ts2.reindex(ts.index, fillMethod='backfill')
"""
statement = "pad()"
reindex_daterange_pad = Benchmark(statement, setup,
name="reindex_daterange_pad")
statement = "backfill()"
reindex_daterange_backfill = Benchmark(statement, setup,
name="reindex_daterange_backfill")
reindex_fillna_pad = Benchmark("ts3.fillna(method='pad')", setup,
name="reindex_fillna_pad",
start_date=datetime(2011, 3, 1))
reindex_fillna_pad_float32 = Benchmark("ts4.fillna(method='pad')", setup,
name="reindex_fillna_pad_float32",
start_date=datetime(2013, 1, 1))
reindex_fillna_backfill = Benchmark("ts3.fillna(method='backfill')", setup,
name="reindex_fillna_backfill",
start_date=datetime(2011, 3, 1))
reindex_fillna_backfill_float32 = Benchmark("ts4.fillna(method='backfill')", setup,
name="reindex_fillna_backfill_float32",
start_date=datetime(2013, 1, 1))
#----------------------------------------------------------------------
# align on level
setup = common_setup + """
index = MultiIndex(levels=[np.arange(10), np.arange(100), np.arange(100)],
labels=[np.arange(10).repeat(10000),
np.tile(np.arange(100).repeat(100), 10),
np.tile(np.tile(np.arange(100), 100), 10)])
random.shuffle(index.values)
df = DataFrame(np.random.randn(len(index), 4), index=index)
df_level = DataFrame(np.random.randn(100, 4), index=index.levels[1])
"""
reindex_frame_level_align = \
Benchmark("df.align(df_level, level=1, copy=False)", setup,
name='reindex_frame_level_align',
start_date=datetime(2011, 12, 27))
reindex_frame_level_reindex = \
Benchmark("df_level.reindex(df.index, level=1)", setup,
name='reindex_frame_level_reindex',
start_date=datetime(2011, 12, 27))
#----------------------------------------------------------------------
# sort_index, drop_duplicates
# pathological, but realistic
setup = common_setup + """
N = 10000
K = 10
key1 = np.array([rands(10) for _ in xrange(N)], dtype='O').repeat(K)
key2 = np.array([rands(10) for _ in xrange(N)], dtype='O').repeat(K)
df = DataFrame({'key1' : key1, 'key2' : key2,
'value' : np.random.randn(N * K)})
col_array_list = list(df.values.T)
"""
statement = "df.sort_index(by=['key1', 'key2'])"
frame_sort_index_by_columns = Benchmark(statement, setup,
start_date=datetime(2011, 11, 1))
# drop_duplicates
statement = "df.drop_duplicates(['key1', 'key2'])"
frame_drop_duplicates = Benchmark(statement, setup,
start_date=datetime(2011, 11, 15))
statement = "df.drop_duplicates(['key1', 'key2'], inplace=True)"
frame_drop_dup_inplace = Benchmark(statement, setup,
start_date=datetime(2012, 5, 16))
lib_fast_zip = Benchmark('lib.fast_zip(col_array_list)', setup,
name='lib_fast_zip',
start_date=datetime(2012, 1, 1))
setup = setup + """
df.ix[:10000, :] = np.nan
"""
statement2 = "df.drop_duplicates(['key1', 'key2'])"
frame_drop_duplicates_na = Benchmark(statement2, setup,
start_date=datetime(2012, 5, 15))
lib_fast_zip_fillna = Benchmark('lib.fast_zip_fillna(col_array_list)', setup,
start_date=datetime(2012, 5, 15))
statement2 = "df.drop_duplicates(['key1', 'key2'], inplace=True)"
frame_drop_dup_na_inplace = Benchmark(statement2, setup,
start_date=datetime(2012, 5, 16))
setup = common_setup + """
s = Series(np.random.randint(0, 1000, size=10000))
s2 = Series(np.tile([rands(10) for i in xrange(1000)], 10))
"""
series_drop_duplicates_int = Benchmark('s.drop_duplicates()', setup,
start_date=datetime(2012, 11, 27))
series_drop_duplicates_string = \
Benchmark('s2.drop_duplicates()', setup,
start_date=datetime(2012, 11, 27))
#----------------------------------------------------------------------
# fillna, many columns
setup = common_setup + """
values = np.random.randn(1000, 1000)
values[::2] = np.nan
df = DataFrame(values)
"""
frame_fillna_many_columns_pad = Benchmark("df.fillna(method='pad')",
setup,
start_date=datetime(2011, 3, 1))
#----------------------------------------------------------------------
# blog "pandas escaped the zoo"
setup = common_setup + """
n = 50000
indices = Index([rands(10) for _ in xrange(n)])
def sample(values, k):
from random import shuffle
sampler = np.arange(len(values))
shuffle(sampler)
return values.take(sampler[:k])
subsample_size = 40000
x = Series(np.random.randn(50000), indices)
y = Series(np.random.randn(subsample_size),
index=sample(indices, subsample_size))
"""
series_align_irregular_string = Benchmark("x + y", setup,
start_date=datetime(2010, 6, 1))
| 34.176744 | 83 | 0.556478 |
acf8f6f04a275daca2500965100ae34906816522 | 6,026 | py | Python | app/__init__.py | newacropolis-uk-website/api | cb73bdf1b5194167d832efe94a70e9e36a097638 | [
"MIT"
] | null | null | null | app/__init__.py | newacropolis-uk-website/api | cb73bdf1b5194167d832efe94a70e9e36a097638 | [
"MIT"
] | null | null | null | app/__init__.py | newacropolis-uk-website/api | cb73bdf1b5194167d832efe94a70e9e36a097638 | [
"MIT"
] | null | null | null | import os
import logging
from logging.handlers import RotatingFileHandler
import jinja2
from flask import Flask
from flask_jwt_extended import JWTManager
from flask_sqlalchemy import SQLAlchemy
from app.na_celery import NewAcropolisCelery
db = SQLAlchemy()
application = Flask(__name__)
jwt = JWTManager(application)
celery = NewAcropolisCelery()
def create_app(**kwargs):
from app.config import configs
environment_state = get_env()
application.config.from_object(configs[environment_state])
application.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
if kwargs:
application.config.update(kwargs)
configure_logging()
db.init_app(application)
celery.init_app(application)
register_blueprint()
init_app(application)
return application
def init_app(app):
app.jinja_loader = jinja2.FileSystemLoader([os.getcwd() + '/app/templates'])
app.jinja_env.globals['API_BASE_URL'] = app.config['API_BASE_URL']
app.jinja_env.globals['FRONTEND_URL'] = app.config['FRONTEND_URL']
app.jinja_env.globals['IMAGES_URL'] = os.environ.get('IMAGES_URL', app.config['API_BASE_URL'] + '/images/')
@app.before_request
def check_for_apikey():
# print("check: ", request)
pass
def register_blueprint():
from app.rest import base_blueprint
from app.routes.articles.rest import article_blueprint, articles_blueprint
from app.routes.authentication.rest import auth_blueprint
from app.routes.emails.rest import emails_blueprint
from app.routes.events.rest import events_blueprint
from app.routes.fees.rest import fees_blueprint, fee_blueprint
from app.routes.event_dates.rest import event_dates_blueprint, event_date_blueprint
from app.routes.event_types.rest import event_types_blueprint, event_type_blueprint
from app.routes.marketings.rest import marketings_blueprint
from app.routes.members.rest import members_blueprint
from app.routes.speakers.rest import speakers_blueprint, speaker_blueprint
from app.routes.users.rest import users_blueprint, user_blueprint
from app.routes.venues.rest import venues_blueprint, venue_blueprint
application.register_blueprint(base_blueprint)
application.register_blueprint(auth_blueprint)
application.register_blueprint(article_blueprint)
application.register_blueprint(articles_blueprint)
application.register_blueprint(emails_blueprint)
application.register_blueprint(events_blueprint)
application.register_blueprint(event_date_blueprint)
application.register_blueprint(event_dates_blueprint)
application.register_blueprint(event_types_blueprint)
application.register_blueprint(event_type_blueprint)
application.register_blueprint(fees_blueprint)
application.register_blueprint(fee_blueprint)
application.register_blueprint(marketings_blueprint)
application.register_blueprint(members_blueprint)
application.register_blueprint(speakers_blueprint)
application.register_blueprint(speaker_blueprint)
application.register_blueprint(users_blueprint)
application.register_blueprint(user_blueprint)
application.register_blueprint(venues_blueprint)
application.register_blueprint(venue_blueprint)
def get_env():
if 'www-preview' in get_root_path():
return 'preview'
elif 'www-live' in get_root_path():
return 'live'
else:
return os.environ.get('ENVIRONMENT', 'development')
def get_root_path():
return application.root_path
def configure_logging():
if not application.config.get('APP_SERVER'):
return
ch = logging.StreamHandler()
if ch in application.logger.handlers:
return
del application.logger.handlers[:]
f = LogTruncatingFormatter(
"{} %(asctime)s;[%(process)d];%(levelname)s;%(message)s".format(get_env()), "%Y-%m-%d %H:%M:%S")
ch.setFormatter(f)
application.logger.addHandler(ch)
rfh = RotatingFileHandler('logs/app.log', maxBytes=10000, backupCount=3)
rfh.setLevel(logging.DEBUG)
rfh.setFormatter(f)
application.logger.addHandler(rfh)
if application.config.get('APP_SERVER') == 'gunicorn':
gunicorn_access_logger = logging.getLogger('gunicorn.access')
application.logger.handlers.extend(gunicorn_access_logger.handlers)
gunicorn_error_logger = logging.getLogger('gunicorn.error')
application.logger.handlers.extend(gunicorn_error_logger.handlers)
gunicorn_access_logger.addHandler(rfh)
gunicorn_error_logger.addHandler(rfh)
gunicorn_access_logger.addHandler(ch)
gunicorn_error_logger.addHandler(ch)
setup_gce_logging(gunicorn_access_logger, gunicorn_error_logger)
application.logger.info('Gunicorn logging configured')
else:
werkzeug_log = logging.getLogger('werkzeug')
werkzeug_log.setLevel(logging.DEBUG)
werkzeug_log.addHandler(ch)
werkzeug_log.addHandler(rfh)
application.logger.info('Flask logging configured')
db_name = application.config.get('SQLALCHEMY_DATABASE_URI').split('/')[-1]
application.logger.debug("connected to db: {}".format(db_name))
def setup_gce_logging(gunicorn_access_logger, gunicorn_error_logger): # pragma: no cover
if application.config['SQLALCHEMY_DATABASE_URI'][:22] in ['postgresql://localhost', 'db://localhost/test_db']:
return
import google.cloud.logging
from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging
client = google.cloud.logging.Client()
handler = CloudLoggingHandler(client, name=get_env())
setup_logging(handler)
gunicorn_access_logger.addHandler(handler)
gunicorn_error_logger.addHandler(handler)
class LogTruncatingFormatter(logging.Formatter):
def format(self, record):
START_LOG = '127.0.0.1 - - ['
if 'msg' in dir(record) and record.msg[:15] == START_LOG:
record.msg = record.msg[42:]
return super(LogTruncatingFormatter, self).format(record)
| 34.434286 | 114 | 0.754232 |
acf8f8fd1cfec24bdfbea6fd2771ad9708902c32 | 22 | py | Python | fos/actor/dynamic/__init__.py | fos/fos | 8d33bf0cd60292ad5164973b5285122acbc03b86 | [
"BSD-3-Clause"
] | 5 | 2015-08-08T22:04:49.000Z | 2020-05-29T10:30:09.000Z | fos/actor/dynamic/__init__.py | fos/fos | 8d33bf0cd60292ad5164973b5285122acbc03b86 | [
"BSD-3-Clause"
] | 1 | 2018-04-25T12:59:56.000Z | 2018-04-25T13:26:47.000Z | fos/actor/dynamic/__init__.py | fos/fos | 8d33bf0cd60292ad5164973b5285122acbc03b86 | [
"BSD-3-Clause"
] | null | null | null | from skeleton import * | 22 | 22 | 0.818182 |
acf8f905ca59cfcb4530138ae2d8ea2d0b90ed65 | 254 | py | Python | Desafio019 - Sorteando um Item na Lista.py | kleberfsobrinho/python | 34739d127c1a3908f5a2fd5a7ef07d4c78658802 | [
"MIT"
] | null | null | null | Desafio019 - Sorteando um Item na Lista.py | kleberfsobrinho/python | 34739d127c1a3908f5a2fd5a7ef07d4c78658802 | [
"MIT"
] | null | null | null | Desafio019 - Sorteando um Item na Lista.py | kleberfsobrinho/python | 34739d127c1a3908f5a2fd5a7ef07d4c78658802 | [
"MIT"
] | null | null | null | q = 'Desafio 019'
print('{:=^20}'.format(q))
import random
a1 = input('Aluno 1: ')
a2 = input('Aluno 2: ')
a3 = input('Aluno 3: ')
a4 = input('Aluno 4: ')
lista = [a1, a2, a3, a4]
escolhido = random.choice(lista)
print('Escolhido: {}'.format(escolhido))
| 23.090909 | 40 | 0.61811 |
acf8f941ecbab9ba35dba23171deedd332d42900 | 720 | py | Python | commands/varyoutput.py | 1757WestwoodRobotics/mentorbot | 3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a | [
"MIT"
] | 2 | 2021-11-13T20:18:44.000Z | 2021-11-13T20:27:04.000Z | commands/varyoutput.py | 1757WestwoodRobotics/mentorbot | 3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a | [
"MIT"
] | null | null | null | commands/varyoutput.py | 1757WestwoodRobotics/mentorbot | 3db344f3b35c820ada4e1aef3eca9b1fc4c5b85a | [
"MIT"
] | 1 | 2021-11-14T01:38:53.000Z | 2021-11-14T01:38:53.000Z | from subsystems.lightsubsystem import LightSubsystem
import typing
from commands2 import CommandBase
class RelayControl(CommandBase):
def __init__(self, controller: LightSubsystem,
controlPercent: typing.Callable[[], float]) -> None:
CommandBase.__init__(self)
self.control = controller
self.controlPercentCommand = controlPercent
self.setOutputPercent = lambda percent: self.control.light.set(percent)
self.addRequirements([self.control])
self.setName(__class__.__name__)
def execute(self) -> None:
self.setOutputPercent(self.controlPercentCommand())
def end(self, interrupted: bool) -> None:
self.setOutputPercent(0.0)
| 31.304348 | 79 | 0.705556 |
acf8fa9b2855a842728649232be0fec21dc9c9ef | 12,304 | py | Python | detectron2/structures/boxes.py | chongkewu/detectron2 | a881c614e05665015a3f73548126e2847a350226 | [
"Apache-2.0"
] | 4 | 2020-09-11T05:39:22.000Z | 2022-02-03T20:50:51.000Z | detectron2/structures/boxes.py | chongkewu/detectron2 | a881c614e05665015a3f73548126e2847a350226 | [
"Apache-2.0"
] | 10 | 2020-03-25T05:56:53.000Z | 2020-05-31T14:53:14.000Z | detectron2/structures/boxes.py | chongkewu/detectron2 | a881c614e05665015a3f73548126e2847a350226 | [
"Apache-2.0"
] | 3 | 2020-09-05T11:31:59.000Z | 2020-10-03T00:11:48.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import numpy as np
from enum import IntEnum, unique
from typing import Iterator, List, Tuple, Union
import torch
from detectron2.layers import cat
_RawBoxType = Union[List[float], Tuple[float, ...], torch.Tensor, np.ndarray]
@unique
class BoxMode(IntEnum):
"""
Enum of different ways to represent a box.
Attributes:
XYXY_ABS: (x0, y0, x1, y1) in absolute floating points coordinates.
The coordinates in range [0, width or height].
XYWH_ABS: (x0, y0, w, h) in absolute floating points coordinates.
XYXY_REL: (x0, y0, x1, y1) in range [0, 1]. They are relative to the size of the image.
XYWH_REL: (x0, y0, w, h) in range [0, 1]. They are relative to the size of the image.
XYWHA_ABS: (xc, yc, w, h, a) in absolute floating points coordinates.
(xc, yc) is the center of the rotated box, and the angle a is in degrees ccw.
"""
XYXY_ABS = 0
XYWH_ABS = 1
XYXY_REL = 2
XYWH_REL = 3
XYWHA_ABS = 4
@staticmethod
def convert(box: _RawBoxType, from_mode: "BoxMode", to_mode: "BoxMode") -> _RawBoxType:
"""
Args:
box: can be a k-tuple, k-list or an Nxk array/tensor, where k = 4 or 5
from_mode, to_mode (BoxMode)
Returns:
The converted box of the same type.
"""
if from_mode == to_mode:
return box
original_type = type(box)
is_numpy = isinstance(box, np.ndarray)
single_box = isinstance(box, (list, tuple))
if single_box:
assert len(box) == 4 or len(box) == 5, (
"BoxMode.convert takes either a k-tuple/list or an Nxk array/tensor,"
" where k == 4 or 5"
)
arr = torch.tensor(box)[None, :]
else:
# avoid modifying the input box
if is_numpy:
arr = torch.from_numpy(np.asarray(box)).clone()
else:
arr = box.clone()
assert to_mode.value not in [
BoxMode.XYXY_REL,
BoxMode.XYWH_REL,
] and from_mode.value not in [
BoxMode.XYXY_REL,
BoxMode.XYWH_REL,
], "Relative mode not yet supported!"
if from_mode == BoxMode.XYWHA_ABS and to_mode == BoxMode.XYXY_ABS:
assert (
arr.shape[-1] == 5
), "The last dimension of input shape must be 5 for XYWHA format"
original_dtype = arr.dtype
arr = arr.double()
w = arr[:, 2]
h = arr[:, 3]
a = arr[:, 4]
c = torch.abs(torch.cos(a * math.pi / 180.0))
s = torch.abs(torch.sin(a * math.pi / 180.0))
# This basically computes the horizontal bounding rectangle of the rotated box
new_w = c * w + s * h
new_h = c * h + s * w
# convert center to top-left corner
arr[:, 0] -= new_w / 2.0
arr[:, 1] -= new_h / 2.0
# bottom-right corner
arr[:, 2] = arr[:, 0] + new_w
arr[:, 3] = arr[:, 1] + new_h
arr = arr[:, :4].to(dtype=original_dtype)
elif from_mode == BoxMode.XYWH_ABS and to_mode == BoxMode.XYWHA_ABS:
original_dtype = arr.dtype
arr = arr.double()
arr[:, 0] += arr[:, 2] / 2.0
arr[:, 1] += arr[:, 3] / 2.0
angles = torch.zeros((arr.shape[0], 1), dtype=arr.dtype)
arr = torch.cat((arr, angles), axis=1).to(dtype=original_dtype)
else:
if to_mode == BoxMode.XYXY_ABS and from_mode == BoxMode.XYWH_ABS:
arr[:, 2] += arr[:, 0]
arr[:, 3] += arr[:, 1]
elif from_mode == BoxMode.XYXY_ABS and to_mode == BoxMode.XYWH_ABS:
arr[:, 2] -= arr[:, 0]
arr[:, 3] -= arr[:, 1]
else:
raise NotImplementedError(
"Conversion from BoxMode {} to {} is not supported yet".format(
from_mode, to_mode
)
)
if single_box:
return original_type(arr.flatten())
if is_numpy:
return arr.numpy()
else:
return arr
class Boxes:
"""
This structure stores a list of boxes as a Nx4 torch.Tensor.
It supports some common methods about boxes
(`area`, `clip`, `nonempty`, etc),
and also behaves like a Tensor
(support indexing, `to(device)`, `.device`, and iteration over all boxes)
Attributes:
tensor (torch.Tensor): float matrix of Nx4. Each row is (x1, y1, x2, y2).
"""
BoxSizeType = Union[List[int], Tuple[int, int]]
def __init__(self, tensor: torch.Tensor):
"""
Args:
tensor (Tensor[float]): a Nx4 matrix. Each row is (x1, y1, x2, y2).
"""
device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu")
tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device)
if tensor.numel() == 0:
# Use reshape, so we don't end up creating a new tensor that does not depend on
# the inputs (and consequently confuses jit)
tensor = tensor.reshape((0, 4)).to(dtype=torch.float32, device=device)
assert tensor.dim() == 2 and tensor.size(-1) == 4, tensor.size()
self.tensor = tensor
def clone(self) -> "Boxes":
"""
Clone the Boxes.
Returns:
Boxes
"""
return Boxes(self.tensor.clone())
def to(self, device: str) -> "Boxes":
return Boxes(self.tensor.to(device))
def area(self) -> torch.Tensor:
"""
Computes the area of all the boxes.
Returns:
torch.Tensor: a vector with areas of each box.
"""
box = self.tensor
area = (box[:, 2] - box[:, 0]) * (box[:, 3] - box[:, 1])
return area
def clip(self, box_size: BoxSizeType) -> None:
"""
Clip (in place) the boxes by limiting x coordinates to the range [0, width]
and y coordinates to the range [0, height].
Args:
box_size (height, width): The clipping box's size.
"""
assert torch.isfinite(self.tensor).all(), "Box tensor contains infinite or NaN!"
h, w = box_size
self.tensor[:, 0].clamp_(min=0, max=w)
self.tensor[:, 1].clamp_(min=0, max=h)
self.tensor[:, 2].clamp_(min=0, max=w)
self.tensor[:, 3].clamp_(min=0, max=h)
def nonempty(self, threshold: int = 0) -> torch.Tensor:
"""
Find boxes that are non-empty.
A box is considered empty, if either of its side is no larger than threshold.
Returns:
Tensor:
a binary vector which represents whether each box is empty
(False) or non-empty (True).
"""
box = self.tensor
widths = box[:, 2] - box[:, 0]
heights = box[:, 3] - box[:, 1]
keep = (widths > threshold) & (heights > threshold)
return keep
def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Boxes":
"""
Returns:
Boxes: Create a new :class:`Boxes` by indexing.
The following usage are allowed:
1. `new_boxes = boxes[3]`: return a `Boxes` which contains only one box.
2. `new_boxes = boxes[2:10]`: return a slice of boxes.
3. `new_boxes = boxes[vector]`, where vector is a torch.BoolTensor
with `length = len(boxes)`. Nonzero elements in the vector will be selected.
Note that the returned Boxes might share storage with this Boxes,
subject to Pytorch's indexing semantics.
"""
if isinstance(item, int):
return Boxes(self.tensor[item].view(1, -1))
b = self.tensor[item]
assert b.dim() == 2, "Indexing on Boxes with {} failed to return a matrix!".format(item)
return Boxes(b)
def __len__(self) -> int:
return self.tensor.shape[0]
def __repr__(self) -> str:
return "Boxes(" + str(self.tensor) + ")"
def inside_box(self, box_size: BoxSizeType, boundary_threshold: int = 0) -> torch.Tensor:
"""
Args:
box_size (height, width): Size of the reference box.
boundary_threshold (int): Boxes that extend beyond the reference box
boundary by more than boundary_threshold are considered "outside".
Returns:
a binary vector, indicating whether each box is inside the reference box.
"""
height, width = box_size
inds_inside = (
(self.tensor[..., 0] >= -boundary_threshold)
& (self.tensor[..., 1] >= -boundary_threshold)
& (self.tensor[..., 2] < width + boundary_threshold)
& (self.tensor[..., 3] < height + boundary_threshold)
)
return inds_inside
def get_centers(self) -> torch.Tensor:
"""
Returns:
The box centers in a Nx2 array of (x, y).
"""
return (self.tensor[:, :2] + self.tensor[:, 2:]) / 2
def scale(self, scale_x: float, scale_y: float) -> None:
"""
Scale the box with horizontal and vertical scaling factors
"""
self.tensor[:, 0::2] *= scale_x
self.tensor[:, 1::2] *= scale_y
@staticmethod
def cat(boxes_list: List["Boxes"]) -> "Boxes":
"""
Concatenates a list of Boxes into a single Boxes
Arguments:
boxes_list (list[Boxes])
Returns:
Boxes: the concatenated Boxes
"""
assert isinstance(boxes_list, (list, tuple))
assert len(boxes_list) > 0
assert all(isinstance(box, Boxes) for box in boxes_list)
cat_boxes = type(boxes_list[0])(cat([b.tensor for b in boxes_list], dim=0))
return cat_boxes
@property
def device(self) -> torch.device:
return self.tensor.device
def __iter__(self) -> Iterator[torch.Tensor]:
"""
Yield a box as a Tensor of shape (4,) at a time.
"""
yield from self.tensor
# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py
# with slight modifications
def pairwise_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor:
"""
Given two lists of boxes of size N and M,
compute the IoU (intersection over union)
between __all__ N x M pairs of boxes.
The box order must be (xmin, ymin, xmax, ymax).
Args:
boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively.
Returns:
Tensor: IoU, sized [N,M].
"""
area1 = boxes1.area()
area2 = boxes2.area()
boxes1, boxes2 = boxes1.tensor, boxes2.tensor
width_height = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) - torch.max(
boxes1[:, None, :2], boxes2[:, :2]
) # [N,M,2]
width_height.clamp_(min=0) # [N,M,2]
inter = width_height.prod(dim=2) # [N,M]
del width_height
# handle empty boxes
iou = torch.where(
inter > 0,
inter / (area1[:, None] + area2 - inter),
torch.zeros(1, dtype=inter.dtype, device=inter.device),
)
return iou
def matched_boxlist_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor:
"""
Compute pairwise intersection over union (IOU) of two sets of matched
boxes. The box order must be (xmin, ymin, xmax, ymax).
Similar to boxlist_iou, but computes only diagonal elements of the matrix
Arguments:
boxes1: (Boxes) bounding boxes, sized [N,4].
boxes2: (Boxes) bounding boxes, sized [N,4].
Returns:
(tensor) iou, sized [N].
"""
assert len(boxes1) == len(boxes2), (
"boxlists should have the same"
"number of entries, got {}, {}".format(len(boxes1), len(boxes2))
)
area1 = boxes1.area() # [N]
area2 = boxes2.area() # [N]
box1, box2 = boxes1.tensor, boxes2.tensor
lt = torch.max(box1[:, :2], box2[:, :2]) # [N,2]
rb = torch.min(box1[:, 2:], box2[:, 2:]) # [N,2]
wh = (rb - lt).clamp(min=0) # [N,2]
inter = wh[:, 0] * wh[:, 1] # [N]
iou = inter / (area1 + area2 - inter) # [N]
return iou
| 34.368715 | 96 | 0.557461 |
acf8fb8cdf139054933fd2283d83d2fd3d5e3f2c | 12,426 | py | Python | barcode_shrdc/barcode_shrdc/doctype/barcode_printing/barcode_printing.py | leexy0/Barcode-Scanning-System | e88597092387f4c4a0b62d6bde006517a75fdef9 | [
"MIT"
] | 3 | 2022-02-19T05:15:41.000Z | 2022-02-21T01:50:07.000Z | barcode_shrdc/barcode_shrdc/doctype/barcode_printing/barcode_printing.py | leexy0/Barcode-Scanning-System | e88597092387f4c4a0b62d6bde006517a75fdef9 | [
"MIT"
] | null | null | null | barcode_shrdc/barcode_shrdc/doctype/barcode_printing/barcode_printing.py | leexy0/Barcode-Scanning-System | e88597092387f4c4a0b62d6bde006517a75fdef9 | [
"MIT"
] | 2 | 2022-02-21T15:34:48.000Z | 2022-02-25T21:32:41.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2021, lxy and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json, urllib
from frappe.model.document import Document
from frappe.model.mapper import get_mapped_doc
from frappe import msgprint, _
from six import string_types, iteritems
import qrcode, io, os
from io import BytesIO
import base64
from frappe.integrations.utils import make_get_request, make_post_request, create_request_log
from frappe.utils import cstr, flt, cint, nowdate, add_days, comma_and, now_datetime, ceil, get_url
from erpnext.manufacturing.doctype.work_order.work_order import get_item_details
import requests
from PIL import Image
class BarcodePrinting(Document):
pass
# def get_open_purchase_receipt(self):
# """ Pull Purchase Receipt based on criteria selected"""
# open_pr = get_purchase_receipts(self)
# if open_pr:
# self.add_pr_in_table(open_pr)
# else:
# frappe.msgprint(_("Purchase Order not available"))
# def add_pr_in_table(self, open_pr):
# """ Add sales orders in the table"""
# self.set('purchase_receipt', [])
# for data in open_pr:
# self.append('purchase_receipts', {
# 'purchase_receipt': data.name,
# 'pr_date': data.posting_date,
# 'supplier': data.supplier,
# 'grand_total': data.total
# })
# def get_items(self):
# if self.get_items_from == "Purchase Receipt":
# self.get_pr_items()
# elif self.get_items_from == "Stock Entry":
# self.get_se_items()
# def get_pr_se_list(self, field, table):
# """Returns a list of Purchase Orders or Stock Entries from the respective tables"""
# pr_se_list = [d.get(field) for d in self.get(table) if d.get(field)]
# return pr_se_list
# def get_pr_items(self):
# # Check for empty table or empty rows
# if not self.get("purchase_receipts") or not self.get_pr_se_list("purchase_receipt", "purchase_receipts"):
# frappe.throw(_("Please fill the Purchase Receipt table"), title=_("Purchase Receipt Required"))
# pr_list = self.get_pr_se_list("purchase_receipt", "purchase_receipts")
# item_condition = ""
# if self.item_code:
# item_condition = ' and pr_item.item_code = {0}'.format(frappe.db.escape(self.item_code))
# items = frappe.db.sql("""select distinct pr_item.parent, pr_item.item_code, pr_item.warehouse,
# pr_item.qty, pr_item.description, pr_item.name, pr_item.uom, pr.supplier, pr_item.barcode,
# pr_item.serial_no, pr_item.batch_no,
# item.barcode
# from `tabPurchase Receipt Item` pr_item , `tabPurchase Receipt` pr, `tabItem Barcode` item
# where pr_item.parent in (%s) and pr_item.docstatus = 1 and item.parent = %s""" % \
# (", ".join(["%s"] * len(pr_list))),self.item_code, tuple(pr_list), as_dict=1)
# if self.item_code:
# item_condition = ' and so_item.item_code = {0}'.format(frappe.db.escape(self.item_code))
# self.add_items(items)
# def add_items(self, items):
# self.set('items', [])
# for data in items:
# pi = self.append('items', {
# 'warehouse': data.warehouse,
# 'item_code': data.item_code,
# 'description': data.description,
# 'qty': data.qty,
# 'supplier': data.supplier,
# 'uom': data.uom,
# 'barcode': data.barcode,
# 'serial_no': data.serial_no,
# 'batch_no': data.batch_no
# })
# if self.get_items_from == "Purchase Receipt":
# pi.ref_pr = data.parent
# pi.description = data.description
# elif self.get_items_from == "Stock Entry":
# pi.ref_se = data.parent
# pi.description = data.description
# def get_item_barcode(self):
# print(self.items)
# item = frappe.db.sql("""select barcode, barcode_type
# from `tabItem Barcode`
# where parent=%s""",
# "ITM-001", as_dict = 1)
# if not item:
# frappe.throw(_("Item {0} is not active or end of life has been reached"))
# item = item[0]
# return item
def get_item_details(self, args=None, for_update=False):
item = frappe.db.sql("""select i.name, i.stock_uom, i.description, i.image, i.item_name, i.item_group,
i.has_batch_no, i.sample_quantity, i.has_serial_no, i.allow_alternative_item,
id.expense_account, id.buying_cost_center
from `tabItem` i LEFT JOIN `tabItem Default` id ON i.name=id.parent and id.company=%s
where i.name=%s
and i.disabled=0
and (i.end_of_life is null or i.end_of_life='0000-00-00' or i.end_of_life > %s)""",
(self.company, args.get('item_code'), nowdate()), as_dict = 1)
if not item:
frappe.throw(_("Item {0} is not active or end of life has been reached").format(args.get("item_code")))
item = item[0]
ret = frappe._dict({
'uom' : item.stock_uom,
'stock_uom' : item.stock_uom,
'description' : item.description,
'image' : item.image,
'item_name' : item.item_name,
'qty' : args.get("qty"),
'conversion_factor' : 1,
'batch_no' : '',
'actual_qty' : 0,
'basic_rate' : 0,
'serial_no' : '',
'has_serial_no' : item.has_serial_no,
'has_batch_no' : item.has_batch_no,
'sample_quantity' : item.sample_quantity
})
return ret
# def get_purchase_receipts(self):
# pr_filter = item_filter = ""
# if self.from_date:
# pr_filter += " and pr.posting_date >= %(from_date)s"
# if self.to_date:
# pr_filter += " and pr.posting_date <= %(to_date)s"
# if self.warehouse:
# pr_filter += " and pr.set_warehouse = %(warehouse)s"
# if self.supplier:
# pr_filter += " and pr.supplier = %(supplier)s"
# if self.item_code:
# item_filter += " and pr_item.item_code = %(item)s"
# open_pr = frappe.db.sql("""
# select distinct pr.name, pr.posting_date, pr.supplier, pr.base_grand_total
# from `tabPurchase Receipt` pr, `tabPurchase Receipt Item` pr_item
# where pr_item.parent = pr.name
# and pr.docstatus = 1 and pr.status not in ("Stopped", "Closed")
# and pr.company = %(company)s
# """.format(pr_filter, item_filter), {
# "from_date": self.from_date,
# "to_date": self.to_date,
# "supplier": self.supplier,
# "set_warehouse": self.warehouse,
# "item": self.item_code,
# "company": self.company
# }, as_dict=1)
# return open_pr
@frappe.whitelist()
def pr_make_barcode(source_name, target_doc=None):
doc = get_mapped_doc("Purchase Receipt", source_name, {
"Purchase Receipt": {
"doctype": "Barcode Printing",
"validation": {
"docstatus": ["=", 1]
}
},
"Purchase Receipt Item": {
"doctype": "Barcode Generator Items",
"field_map": {
"stock_qty": "qty",
"batch_no": "batch_no",
"parent": "ref_pr",
"price_list_rate":"basic_rate",
"serial_no":"serial_no",
"batch_no":"batch_no",
"set_warehouse":"warehouse"
},
}
}, target_doc)
return doc
@frappe.whitelist()
def se_make_barcode(source_name, target_doc=None):
def check_manufacturing(d):
if frappe.get_doc("Stock Entry",d.parent).stock_entry_type == "Manufacture":
return (d.t_warehouse != None)
return 1
doclist = get_mapped_doc("Stock Entry", source_name, {
"Stock Entry": {
"doctype": "Barcode Printing",
"validation": {
"docstatus": ["=", 1],
},
"field_map": {
"get_items_from" :"doctype"
}
},
"Stock Entry Detail": {
"doctype": "Barcode Generator Items",
"field_map": {
"valuation_rate":"rate",
"qty": "qty",
"uom": "uom",
"parent": "ref_se",
"serial_no":"serial_no",
"batch_no":"batch_no",
"additional_cost":"additional_cost" ,
"t_warehouse":"warehouse"
},
"condition":check_manufacturing
}
}, target_doc)
return doclist
@frappe.whitelist()
def search_item_serial_or_batch_or_barcode_number(search_value,item):
# search barcode no
item = json.loads(item)
barcode_data = frappe.db.get_value('Item Barcode', {'parent': item["item_code"]}, ['barcode', 'barcode_type', 'parent as item_code'], as_dict=True)
if barcode_data:
if barcode_data.barcode_type == "EAN":
barcode_data.barcode_type = "EAN13"
elif barcode_data.barcode_type == "UPC-A":
barcode_data.barcode_type = "UPC"
return barcode_data
# search serial no
serial_no_data = frappe.db.get_value('Serial No', search_value, ['name as serial_no', 'item_code'], as_dict=True)
if serial_no_data:
return serial_no_data
# search batch no
batch_no_data = frappe.db.get_value('Batch', search_value, ['name as batch_no', 'item as item_code'], as_dict=True)
if batch_no_data:
return batch_no_data
return {}
@frappe.whitelist()
def get_item_details(frm):
items = frm.doc.items
item_code_list = [d.get("item_code") for d in items if d.get("item_code")]
item = frappe.db.sql("""select barcode, barcode_type
from `tabItem Barcode`
where i.parent=%s""",
format(frappe.db.escape(frm.item_code)), as_dict = 1)
if not item:
frappe.throw(_("Item {0} is not active or end of life has been reached"))
item = item[0]
return item
@frappe.whitelist()
def create_barcode_printing(throw_if_missing, se_id,pr_id):
bp = frappe.new_doc('Barcode Printing')
if(se_id):
se = frappe.get_doc("Stock Entry", se_id)
for item in se.items:
if item.t_warehouse != None:
row = bp.append('items', {})
row.item_code = item.item_code
row.qty = item.qty
row.basic_rate = item.basic_rate
row.rate = item.valuation_rate
row.uom = item.uom
row.additional_cost = item.additional_cost
row.conversion_factor = item.conversion_factor
row.serial_no = item.serial_no
row.batch_no = item.batch_no
row.ref_se = se_id
if(pr_id):
pr = frappe.get_doc("Purchase Receipt",pr_id)
for item in pr.items:
row = bp.append('items', {})
row.item_code = item.item_code
row.qty = item.qty
row.basic_rate = item.price_list_rate
row.rate = item.rate
row.uom = item.uom
row.serial_no = item.serial_no
row.batch_no = item.batch_no
row.ref_pr = pr_id
row.warehouse = pr.set_warehouse
bp.insert(
ignore_mandatory=True
)
if not frappe.db.exists(bp.doctype, bp.name):
if throw_if_missing:
frappe.throw('Linked document (Stock Entry / Purchase Receipt) not found')
return frappe.get_doc(bp.doctype, bp.name)
@frappe.whitelist()
def make_qrcode(doc, route):
qr_html = ''
barcode_doc = frappe.get_doc("Barcode Printing", json.loads(doc)["name"])
items = barcode_doc.items
for item in items:
if item.get("qty")!= 0:
if item.get("serial_no"):
serials = item.get("serial_no").split("\n")
if serials[-1] == '':
serials.pop()
for serial in serials:
uri = "item_qr?"
if item.get("item_code"): uri += "item_code=" + urllib.parse.quote(item.get_formatted("item_code"))
if item.get("barcode"): uri += "&barcode=" + urllib.parse.quote(item.get_formatted("barcode"))
if serial: uri += "&serial_no=" + urllib.parse.quote(serial)
if item.get("batch_no"): uri += "&batch_no=" + urllib.parse.quote(item.get_formatted("batch_no"))
# if item.get("rate"): uri += "&rate=" + urllib.parse.quote(item.get_formatted("rate"))
img_str = qr_code_img(uri,route)
qr_html += '<img src="' + "data:image/png;base64,{0}".format(img_str.decode("utf-8")) + '" width="240px"/><br>'
else:
uri = "item_qr?"
if item.get("item_code"): uri += "item_code=" + urllib.parse.quote(item.get_formatted("item_code"))
if item.get("barcode"): uri += "&barcode=" + urllib.parse.quote(item.get_formatted("barcode"))
if item.get("batch_no"): uri += "&batch_no=" + urllib.parse.quote(item.get_formatted("batch_no"))
# if item.get("rate"): uri += "&rate=" + urllib.parse.quote(item.get_formatted("rate"))
img_str = qr_code_img(uri,route)
qr_html += '<img src="' + "data:image/png;base64,{0}".format(img_str.decode("utf-8")) + '" width="240px"/><br>'
return qr_html
def qr_code_img(uri,route):
qr_config = frappe.get_doc("QR Code Configuration")
qr = qrcode.QRCode(
border=qr_config.border,
error_correction=qrcode.constants.ERROR_CORRECT_H,
)
url = route + "/" + uri
qr.add_data(url)
qr.make(fit=True)
logo = qr_config.logo
img = qr.make_image(fill_color = qr_config.fill, back_color = qr_config.background)
w,h = img.size
if logo:
logo = Image.open(requests.get(get_url(logo,None), stream=True).raw).resize((w//4, h//4))
pos = ((img.size[0] - logo.size[0]) // 2, (img.size[1] - logo.size[1]) // 2)
img.paste(logo, pos)
buffered = BytesIO()
img.save(buffered, format="PNG")
buffered.seek(0)
img_str = base64.b64encode(buffered.read())
return img_str
| 32.108527 | 148 | 0.674553 |
acf8fc0bcf66ddc5c761772bd0a7caa5b5e78b1b | 13,132 | py | Python | django/utils/html.py | cesar57927/django | 4ea5bd795cfde7ae30517a873d4fa2b852bb084a | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/utils/html.py | cesar57927/django | 4ea5bd795cfde7ae30517a873d4fa2b852bb084a | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/utils/html.py | cesar57927/django | 4ea5bd795cfde7ae30517a873d4fa2b852bb084a | [
"PSF-2.0",
"BSD-3-Clause"
] | 1 | 2022-01-18T23:19:29.000Z | 2022-01-18T23:19:29.000Z | """HTML utilities suitable for global use."""
import html
import json
import re
from html.parser import HTMLParser
from urllib.parse import (
parse_qsl, quote, unquote, urlencode, urlsplit, urlunsplit,
)
from django.utils.functional import Promise, keep_lazy, keep_lazy_text
from django.utils.http import RFC3986_GENDELIMS, RFC3986_SUBDELIMS
from django.utils.safestring import SafeData, SafeString, mark_safe
from django.utils.text import normalize_newlines
# Configuration for urlize() function.
TRAILING_PUNCTUATION_CHARS = '.,:;!'
WRAPPING_PUNCTUATION = [('(', ')'), ('[', ']')]
# List of possible strings used for bullets in bulleted lists.
DOTS = ['·', '*', '\u2022', '•', '•', '•']
unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
word_split_re = re.compile(r'''([\s<>"']+)''')
simple_url_re = re.compile(r'^https?://\[?\w', re.IGNORECASE)
simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net|org)($|/.*)$', re.IGNORECASE)
@keep_lazy(str, SafeString)
def escape(text):
"""
Return the given text with ampersands, quotes and angle brackets encoded
for use in HTML.
Always escape input, even if it's already escaped and marked as such.
This may result in double-escaping. If this is a concern, use
conditional_escape() instead.
"""
return mark_safe(html.escape(str(text)))
_js_escapes = {
ord('\\'): '\\u005C',
ord('\''): '\\u0027',
ord('"'): '\\u0022',
ord('>'): '\\u003E',
ord('<'): '\\u003C',
ord('&'): '\\u0026',
ord('='): '\\u003D',
ord('-'): '\\u002D',
ord(';'): '\\u003B',
ord('`'): '\\u0060',
ord('\u2028'): '\\u2028',
ord('\u2029'): '\\u2029'
}
# Escape every ASCII character with a value less than 32.
_js_escapes.update((ord('%c' % z), '\\u%04X' % z) for z in range(32))
@keep_lazy(str, SafeString)
def escapejs(value):
"""Hex encode characters for use in JavaScript strings."""
return mark_safe(str(value).translate(_js_escapes))
_json_script_escapes = {
ord('>'): '\\u003E',
ord('<'): '\\u003C',
ord('&'): '\\u0026',
}
def json_script(value, element_id):
"""
Escape all the HTML/XML special characters with their unicode escapes, so
value is safe to be output anywhere except for inside a tag attribute. Wrap
the escaped JSON in a script tag.
"""
from django.core.serializers.json import DjangoJSONEncoder
json_str = json.dumps(value, cls=DjangoJSONEncoder).translate(_json_script_escapes)
return format_html(
'<script id="{}" type="application/json">{}</script>',
element_id, mark_safe(json_str)
)
def conditional_escape(text):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
This function relies on the __html__ convention used both by Django's
SafeData class and by third-party libraries like markupsafe.
"""
return str(text)
if isinstance(text, Promise):
text = str(text)
if hasattr(text, '__html__'):
return text.__html__()
else:
return escape(text)
def format_html(format_string, *args, **kwargs):
"""
Similar to str.format, but pass all arguments through conditional_escape(),
and call mark_safe() on the result. This function should be used instead
of str.format or % interpolation to build up small HTML fragments.
"""
args_safe = map(conditional_escape, args)
kwargs_safe = {k: conditional_escape(v) for (k, v) in kwargs.items()}
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator):
"""
A wrapper of format_html, for the common case of a group of arguments that
need to be formatted using the same format string, and then joined using
'sep'. 'sep' is also passed through conditional_escape.
'args_generator' should be an iterator that returns the sequence of 'args'
that will be passed to format_html.
Example:
format_html_join('\n', "<li>{} {}</li>", ((u.first_name, u.last_name)
for u in users))
"""
return mark_safe(conditional_escape(sep).join(
format_html(format_string, *args)
for args in args_generator
))
@keep_lazy_text
def linebreaks(value, autoescape=False):
"""Convert newlines into <p> and <br>s."""
value = normalize_newlines(value)
paras = re.split('\n{2,}', str(value))
if autoescape:
paras = ['<p>%s</p>' % escape(p).replace('\n', '<br>') for p in paras]
else:
paras = ['<p>%s</p>' % p.replace('\n', '<br>') for p in paras]
return '\n\n'.join(paras)
class MLStripper(HTMLParser):
def __init__(self):
super().__init__(convert_charrefs=False)
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def handle_entityref(self, name):
self.fed.append('&%s;' % name)
def handle_charref(self, name):
self.fed.append('&#%s;' % name)
def get_data(self):
return ''.join(self.fed)
def _strip_once(value):
"""
Internal tag stripping utility used by strip_tags.
"""
s = MLStripper()
s.feed(value)
s.close()
return s.get_data()
@keep_lazy_text
def strip_tags(value):
"""Return the given HTML with all tags stripped."""
# Note: in typical case this loop executes _strip_once once. Loop condition
# is redundant, but helps to reduce number of executions of _strip_once.
value = str(value)
while '<' in value and '>' in value:
new_value = _strip_once(value)
if len(new_value) >= len(value):
# _strip_once was not able to detect more tags
break
value = new_value
return value
@keep_lazy_text
def strip_spaces_between_tags(value):
"""Return the given HTML with spaces between tags removed."""
return re.sub(r'>\s+<', '><', str(value))
def smart_urlquote(url):
"""Quote a URL if it isn't already quoted."""
def unquote_quote(segment):
segment = unquote(segment)
# Tilde is part of RFC3986 Unreserved Characters
# https://tools.ietf.org/html/rfc3986#section-2.3
# See also https://bugs.python.org/issue16285
return quote(segment, safe=RFC3986_SUBDELIMS + RFC3986_GENDELIMS + '~')
# Handle IDN before quoting.
try:
scheme, netloc, path, query, fragment = urlsplit(url)
except ValueError:
# invalid IPv6 URL (normally square brackets in hostname part).
return unquote_quote(url)
try:
netloc = netloc.encode('idna').decode('ascii') # IDN -> ACE
except UnicodeError: # invalid domain part
return unquote_quote(url)
if query:
# Separately unquoting key/value, so as to not mix querystring separators
# included in query values. See #22267.
query_parts = [(unquote(q[0]), unquote(q[1]))
for q in parse_qsl(query, keep_blank_values=True)]
# urlencode will take care of quoting
query = urlencode(query_parts)
path = unquote_quote(path)
fragment = unquote_quote(fragment)
return urlunsplit((scheme, netloc, path, query, fragment))
@keep_lazy_text
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
Convert any URLs in text into clickable links.
Works on http://, https://, www. links, and also on links ending in one of
the original seven gTLDs (.com, .edu, .gov, .int, .mil, .net, and .org).
Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, truncate the URLs in the link text longer
than this limit to trim_url_limit - 1 characters and append an ellipsis.
If nofollow is True, give the links a rel="nofollow" attribute.
If autoescape is True, autoescape the link text and URLs.
"""
safe_input = isinstance(text, SafeData)
def trim_url(x, limit=trim_url_limit):
if limit is None or len(x) <= limit:
return x
return '%s…' % x[:max(0, limit - 1)]
def trim_punctuation(lead, middle, trail):
"""
Trim trailing and wrapping punctuation from `middle`. Return the items
of the new state.
"""
# Continue trimming until middle remains unchanged.
trimmed_something = True
while trimmed_something:
trimmed_something = False
# Trim wrapping punctuation.
for opening, closing in WRAPPING_PUNCTUATION:
if middle.startswith(opening):
middle = middle[len(opening):]
lead += opening
trimmed_something = True
# Keep parentheses at the end only if they're balanced.
if (middle.endswith(closing) and
middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)]
trail = closing + trail
trimmed_something = True
# Trim trailing punctuation (after trimming wrapping punctuation,
# as encoded entities contain ';'). Unescape entites to avoid
# breaking them by removing ';'.
middle_unescaped = html.unescape(middle)
stripped = middle_unescaped.rstrip(TRAILING_PUNCTUATION_CHARS)
if middle_unescaped != stripped:
trail = middle[len(stripped):] + trail
middle = middle[:len(stripped) - len(middle_unescaped)]
trimmed_something = True
return lead, middle, trail
def is_email_simple(value):
"""Return True if value looks like an email address."""
# An @ must be in the middle of the value.
if '@' not in value or value.startswith('@') or value.endswith('@'):
return False
try:
p1, p2 = value.split('@')
except ValueError:
# value contains more than one @.
return False
# Dot must be in p2 (e.g. example.com)
if '.' not in p2 or p2.startswith('.'):
return False
return True
words = word_split_re.split(str(text))
for i, word in enumerate(words):
if '.' in word or '@' in word or ':' in word:
# lead: Current punctuation trimmed from the beginning of the word.
# middle: Current state of the word.
# trail: Current punctuation trimmed from the end of the word.
lead, middle, trail = '', word, ''
# Deal with punctuation.
lead, middle, trail = trim_punctuation(lead, middle, trail)
# Make URL we want to point to.
url = None
nofollow_attr = ' rel="nofollow"' if nofollow else ''
if simple_url_re.match(middle):
url = smart_urlquote(html.unescape(middle))
elif simple_url_2_re.match(middle):
url = smart_urlquote('http://%s' % html.unescape(middle))
elif ':' not in middle and is_email_simple(middle):
local, domain = middle.rsplit('@', 1)
try:
domain = domain.encode('idna').decode('ascii')
except UnicodeError:
continue
url = 'mailto:%s@%s' % (local, domain)
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
trimmed = escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (escape(url), nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return ''.join(words)
def avoid_wrapping(value):
"""
Avoid text wrapping in the middle of a phrase by adding non-breaking
spaces where there previously were normal spaces.
"""
return value.replace(" ", "\xa0")
def html_safe(klass):
"""
A decorator that defines the __html__ method. This helps non-Django
templates to detect classes whose __str__ methods return SafeString.
"""
if '__html__' in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it defines "
"__html__()." % klass.__name__
)
if '__str__' not in klass.__dict__:
raise ValueError(
"can't apply @html_safe to %s because it doesn't "
"define __str__()." % klass.__name__
)
klass_str = klass.__str__
klass.__str__ = lambda self: mark_safe(klass_str(self))
klass.__html__ = lambda self: str(self)
return klass
| 34.832891 | 110 | 0.611255 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.