hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dd2613e06414a18ce93d700b09413476e05e1287 | 408 | py | Python | Ch_14_Databases/hidden.py | romitpatel/learn_python | 42230d04be5af5576ac2cfc4b1d2a9413a1e777a | [
"MIT"
] | 1 | 2021-02-24T11:40:05.000Z | 2021-02-24T11:40:05.000Z | Ch_14_Databases/hidden.py | Chatak1/learn_python | 198333e56557301aeff95af321f4daa29834c61e | [
"MIT"
] | null | null | null | Ch_14_Databases/hidden.py | Chatak1/learn_python | 198333e56557301aeff95af321f4daa29834c61e | [
"MIT"
] | 2 | 2020-10-02T17:08:42.000Z | 2021-02-24T11:40:12.000Z | # Keep this file separate
# https://apps.twitter.com/
# Create new App and get the four strings
def oauth():
return {"consumer_key": "LUDJ5tVqzOcGUsnCXYhNQJvYD",
"consumer_secret": "CEWGWvKk61UHG80cT6M75aOl1mztxsSFLUWg1ED5WXYdQHdCZh",
"token_key": "548360554-4FMLvdbJYBrQKj0lYHDR7qyUDLD4iYBcYGWEns16",
"token_secret": "LJXUBW27RBcQh3PVRm5nElA807UckggcFU8xbzlaZdqo0"} | 40.8 | 84 | 0.740196 |
db170c9dc1e074c9e51ae15e324e9d4c8bb686db | 11,146 | py | Python | sdk/python/pulumi_azure_nextgen/network/v20181101/get_load_balancer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/network/v20181101/get_load_balancer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/network/v20181101/get_load_balancer.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetLoadBalancerResult',
'AwaitableGetLoadBalancerResult',
'get_load_balancer',
]
@pulumi.output_type
class GetLoadBalancerResult:
"""
LoadBalancer resource
"""
def __init__(__self__, backend_address_pools=None, etag=None, frontend_ip_configurations=None, id=None, inbound_nat_pools=None, inbound_nat_rules=None, load_balancing_rules=None, location=None, name=None, outbound_rules=None, probes=None, provisioning_state=None, resource_guid=None, sku=None, tags=None, type=None):
if backend_address_pools and not isinstance(backend_address_pools, list):
raise TypeError("Expected argument 'backend_address_pools' to be a list")
pulumi.set(__self__, "backend_address_pools", backend_address_pools)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if frontend_ip_configurations and not isinstance(frontend_ip_configurations, list):
raise TypeError("Expected argument 'frontend_ip_configurations' to be a list")
pulumi.set(__self__, "frontend_ip_configurations", frontend_ip_configurations)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if inbound_nat_pools and not isinstance(inbound_nat_pools, list):
raise TypeError("Expected argument 'inbound_nat_pools' to be a list")
pulumi.set(__self__, "inbound_nat_pools", inbound_nat_pools)
if inbound_nat_rules and not isinstance(inbound_nat_rules, list):
raise TypeError("Expected argument 'inbound_nat_rules' to be a list")
pulumi.set(__self__, "inbound_nat_rules", inbound_nat_rules)
if load_balancing_rules and not isinstance(load_balancing_rules, list):
raise TypeError("Expected argument 'load_balancing_rules' to be a list")
pulumi.set(__self__, "load_balancing_rules", load_balancing_rules)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if outbound_rules and not isinstance(outbound_rules, list):
raise TypeError("Expected argument 'outbound_rules' to be a list")
pulumi.set(__self__, "outbound_rules", outbound_rules)
if probes and not isinstance(probes, list):
raise TypeError("Expected argument 'probes' to be a list")
pulumi.set(__self__, "probes", probes)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if resource_guid and not isinstance(resource_guid, str):
raise TypeError("Expected argument 'resource_guid' to be a str")
pulumi.set(__self__, "resource_guid", resource_guid)
if sku and not isinstance(sku, dict):
raise TypeError("Expected argument 'sku' to be a dict")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="backendAddressPools")
def backend_address_pools(self) -> Optional[Sequence['outputs.BackendAddressPoolResponse']]:
"""
Collection of backend address pools used by a load balancer
"""
return pulumi.get(self, "backend_address_pools")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfigurations")
def frontend_ip_configurations(self) -> Optional[Sequence['outputs.FrontendIPConfigurationResponse']]:
"""
Object representing the frontend IPs to be used for the load balancer
"""
return pulumi.get(self, "frontend_ip_configurations")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="inboundNatPools")
def inbound_nat_pools(self) -> Optional[Sequence['outputs.InboundNatPoolResponse']]:
"""
Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules.
"""
return pulumi.get(self, "inbound_nat_pools")
@property
@pulumi.getter(name="inboundNatRules")
def inbound_nat_rules(self) -> Optional[Sequence['outputs.InboundNatRuleResponse']]:
"""
Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules.
"""
return pulumi.get(self, "inbound_nat_rules")
@property
@pulumi.getter(name="loadBalancingRules")
def load_balancing_rules(self) -> Optional[Sequence['outputs.LoadBalancingRuleResponse']]:
"""
Object collection representing the load balancing rules Gets the provisioning
"""
return pulumi.get(self, "load_balancing_rules")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outboundRules")
def outbound_rules(self) -> Optional[Sequence['outputs.OutboundRuleResponse']]:
"""
The outbound rules.
"""
return pulumi.get(self, "outbound_rules")
@property
@pulumi.getter
def probes(self) -> Optional[Sequence['outputs.ProbeResponse']]:
"""
Collection of probe objects used in the load balancer
"""
return pulumi.get(self, "probes")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[str]:
"""
Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> Optional[str]:
"""
The resource GUID property of the load balancer resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.LoadBalancerSkuResponse']:
"""
The load balancer SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetLoadBalancerResult(GetLoadBalancerResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetLoadBalancerResult(
backend_address_pools=self.backend_address_pools,
etag=self.etag,
frontend_ip_configurations=self.frontend_ip_configurations,
id=self.id,
inbound_nat_pools=self.inbound_nat_pools,
inbound_nat_rules=self.inbound_nat_rules,
load_balancing_rules=self.load_balancing_rules,
location=self.location,
name=self.name,
outbound_rules=self.outbound_rules,
probes=self.probes,
provisioning_state=self.provisioning_state,
resource_guid=self.resource_guid,
sku=self.sku,
tags=self.tags,
type=self.type)
def get_load_balancer(expand: Optional[str] = None,
load_balancer_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetLoadBalancerResult:
"""
LoadBalancer resource
:param str expand: Expands referenced resources.
:param str load_balancer_name: The name of the load balancer.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['loadBalancerName'] = load_balancer_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:network/v20181101:getLoadBalancer', __args__, opts=opts, typ=GetLoadBalancerResult).value
return AwaitableGetLoadBalancerResult(
backend_address_pools=__ret__.backend_address_pools,
etag=__ret__.etag,
frontend_ip_configurations=__ret__.frontend_ip_configurations,
id=__ret__.id,
inbound_nat_pools=__ret__.inbound_nat_pools,
inbound_nat_rules=__ret__.inbound_nat_rules,
load_balancing_rules=__ret__.load_balancing_rules,
location=__ret__.location,
name=__ret__.name,
outbound_rules=__ret__.outbound_rules,
probes=__ret__.probes,
provisioning_state=__ret__.provisioning_state,
resource_guid=__ret__.resource_guid,
sku=__ret__.sku,
tags=__ret__.tags,
type=__ret__.type)
| 42.060377 | 572 | 0.67504 |
6541a1f5dd06abb049ba90813c456f7f889f142c | 2,123 | py | Python | Mars-Craters/code.py | arshee2403/ga-learner-dsmp-repo | f79d26ceaf38952e975404984f7d14d585727c15 | [
"MIT"
] | 2 | 2019-10-12T16:05:04.000Z | 2020-01-22T16:41:58.000Z | Mars-Craters/code.py | arshee2403/ga-learner-dsmp-repo | f79d26ceaf38952e975404984f7d14d585727c15 | [
"MIT"
] | null | null | null | Mars-Craters/code.py | arshee2403/ga-learner-dsmp-repo | f79d26ceaf38952e975404984f7d14d585727c15 | [
"MIT"
] | null | null | null | # --------------
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
import pandas as pd
# Code starts here
df = pd.read_csv(path)
print(df.head())
X = df.drop(['attr1089'],1)
y = df['attr1089']
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size = 0.3,random_state =4)
scaler = MinMaxScaler()
scaler.fit(X_train)
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
# Code ends here
# --------------
from sklearn.metrics import classification_report
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_auc_score
lr = LogisticRegression()
lr.fit(X_train,y_train)
y_pred = lr.predict(X_test)
roc_score = roc_auc_score(y_pred,y_test)
print(roc_score)
# --------------
from sklearn.tree import DecisionTreeClassifier
dt = DecisionTreeClassifier(random_state=4)
dt.fit(X_train,y_train)
y_pred = dt.predict(X_test)
roc_score = roc_auc_score(y_pred,y_test)
print(roc_score)
# --------------
from sklearn.ensemble import RandomForestClassifier
# Code starts here
rfc = RandomForestClassifier(random_state=4)
rfc.fit(X_train,y_train)
y_pred = rfc.predict(X_test)
roc_score = roc_auc_score(y_pred,y_test)
print(roc_score)
# Code ends here
# --------------
# Import Bagging Classifier
from sklearn.ensemble import BaggingClassifier
# Code starts here
bagging_clf = BaggingClassifier(base_estimator=DecisionTreeClassifier(),n_estimators=100 , max_samples=100,random_state=0)
bagging_clf.fit(X_train,y_train)
score_bagging = bagging_clf.score(X_test,y_test)
print(score_bagging)
# Code ends here
# --------------
# Import libraries
from sklearn.ensemble import VotingClassifier
# Various models
clf_1 = LogisticRegression()
clf_2 = DecisionTreeClassifier(random_state=4)
clf_3 = RandomForestClassifier(random_state=4)
model_list = [('lr',clf_1),('DT',clf_2),('RF',clf_3)]
# Code starts here
voting_clf_hard = VotingClassifier(estimators=model_list,voting='hard')
voting_clf_hard.fit(X_train,y_train)
hard_voting_score = voting_clf_hard.score(X_test,y_test)
print(hard_voting_score)
# Code ends here
| 23.588889 | 122 | 0.766368 |
2733e87e2c8d947dc61ba3b35d5ca560692123d6 | 141 | py | Python | archive_for_wyko/Pyro-3.6/examples/agent2/serv/objects/tv.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | null | null | null | archive_for_wyko/Pyro-3.6/examples/agent2/serv/objects/tv.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | 3 | 2022-01-16T01:05:01.000Z | 2022-02-23T14:05:28.000Z | archive_for_wyko/Pyro-3.6/examples/agent2/serv/objects/tv.py | ArcetriAdaptiveOptics/plico_interferometer_server | e14e240229c802333b2aa5bf6458a079dc950bd2 | [
"MIT"
] | 1 | 2022-01-14T14:04:07.000Z | 2022-01-14T14:04:07.000Z |
class tv:
def __init__(self):
pass
def getName(self):
return "TV"
def getDescription(self):
return "Wide-screen stereo television"
| 15.666667 | 40 | 0.716312 |
de6f09e7635e4436962c270ef022c99462ca8ac8 | 25,310 | py | Python | test/functional/test_runner.py | crosswand52/swand | 8e412a4db513c332c73108457d1317e5f1c4a7a6 | [
"MIT"
] | null | null | null | test/functional/test_runner.py | crosswand52/swand | 8e412a4db513c332c73108457d1317e5f1c4a7a6 | [
"MIT"
] | null | null | null | test/functional/test_runner.py | crosswand52/swand | 8e412a4db513c332c73108457d1317e5f1c4a7a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import signal
import sys
import subprocess
import tempfile
import re
import logging
# Formatting. Default colors to empty strings.
BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
if os.name == 'nt':
import ctypes
kernel32 = ctypes.windll.kernel32
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# Enable ascii color control to stdout
stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
stdout_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode))
kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# Enable ascii color control to stderr
stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE)
stderr_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode))
kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# primitive formatting on supported
# terminal via ANSI escape sequences:
BOLD = ('\033[0m', '\033[1m')
GREEN = ('\033[0m', '\033[0;32m')
RED = ('\033[0m', '\033[0;31m')
GREY = ('\033[0m', '\033[1;30m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
BASE_SCRIPTS = [
# Scripts that are run by the travis build process.
# Longest test should go first, to favor running tests in parallel
'feature_fee_estimation.py',
'wallet_hd.py',
'wallet_backup.py',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
'feature_maxuploadtarget.py',
'feature_block.py',
'rpc_fundrawtransaction.py',
'p2p_compactblocks.py',
'feature_segwit.py',
# vv Tests less than 2m vv
'wallet_basic.py',
'wallet_labels.py',
'p2p_segwit.py',
'p2p_timeouts.py',
'wallet_dump.py',
'wallet_listtransactions.py',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
'wallet_zapwallettxes.py',
'wallet_importmulti.py',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py',
'wallet_abandonconflict.py',
'feature_csv_activation.py',
'rpc_rawtransaction.py',
'wallet_address_types.py',
'feature_bip68_sequence.py',
'p2p_feefilter.py',
'feature_reindex.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
'interface_zmq.py',
'interface_bitcoin_cli.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'tool_wallet.py',
'wallet_txn_clone.py',
'wallet_txn_clone.py --segwit',
'rpc_getchaintips.py',
'rpc_misc.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
'wallet_multiwallet.py --usecli',
'wallet_createwallet.py',
'wallet_createwallet.py --usecli',
'interface_http.py',
'interface_rpc.py',
'rpc_psbt.py',
'rpc_users.py',
'feature_proxy.py',
'rpc_signrawtransaction.py',
'wallet_groups.py',
'p2p_disconnect_ban.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_deprecated.py',
'wallet_disable.py',
'rpc_net.py',
'wallet_keypool.py',
'p2p_mempool.py',
'p2p_blocksonly.py',
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
'p2p_invalid_messages.py',
'p2p_invalid_tx.py',
'feature_assumevalid.py',
'example_test.py',
'wallet_txn_doublespend.py',
'wallet_txn_clone.py --mineblock',
'feature_notifications.py',
'rpc_invalidateblock.py',
'feature_rbf.py',
'mempool_packages.py',
'rpc_createmultisig.py',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'p2p_leak_tx.py',
'rpc_signmessage.py',
'wallet_balance.py',
'feature_nulldummy.py',
'mempool_accept.py',
'wallet_import_rescan.py',
'wallet_import_with_label.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'mining_basic.py',
'wallet_bumpfee.py',
'rpc_named_arguments.py',
'wallet_listsinceblock.py',
'p2p_leak.py',
'wallet_encryption.py',
'wallet_scriptaddress2.py',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
'wallet_resendwallettransactions.py',
'wallet_fallbackfee.py',
'feature_minchainwork.py',
'rpc_getblockstats.py',
'wallet_create_tx.py',
'p2p_fingerprint.py',
'feature_uacomment.py',
'wallet_coinbase_category.py',
'feature_filelock.py',
'p2p_unrequested_blocks.py',
'feature_includeconf.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'rpc_scantxoutset.py',
'feature_logging.py',
'p2p_node_network_limited.py',
'feature_blocksdir.py',
'feature_config_args.py',
'rpc_help.py',
'feature_help.py',
'feature_shutdown.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
EXTENDED_SCRIPTS = [
# These tests are not run by the travis build process.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py',
'feature_dbcrash.py',
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
def main():
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print dots, results summary and failure logs')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure')
args, unknown_args = parser.parse_known_args()
# args to be passed on always start with two dashes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/test_runner_Ł_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_bitcoind = config["components"].getboolean("ENABLE_BITCOIND")
if not enable_bitcoind:
print("No functional tests to run.")
print("Rerun ./configure with --with-daemon and then make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept the name with or without .py extension.
tests = [test + ".py" if ".py" not in test else test for test in tests]
for test in tests:
if test in ALL_SCRIPTS:
test_list.append(test)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
elif args.extended:
# Include extended tests
test_list += ALL_SCRIPTS
else:
# Run base tests only
test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
# Remove <test_name>.py and <test_name>.py --arg from the test list
exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test]
for exclude_item in exclude_list:
test_list.remove(exclude_item)
if not exclude_list:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
combined_logs_len=args.combinedlogslen,
failfast=args.failfast,
runs_ci=args.ci,
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, runs_ci):
args = args or []
# Warn if bitcoind is already running (unix only)
try:
if subprocess.check_output(["pidof", "swandd"]) is not None:
print("%sWARNING!%s There is already a swandd process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except (OSError, subprocess.SubprocessError):
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
timeout_duration=40 * 60 if runs_ci else float('inf'), # in seconds
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
test_count = len(test_list)
for i in range(test_count):
test_result, testdir, stdout, stderr = job_queue.get_next()
test_results.append(test_result)
done_str = "{}/{} - {}{}{}".format(i + 1, test_count, BOLD[1], test_result.name, BOLD[0])
if test_result.status == "Passed":
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
elif test_result.status == "Skipped":
logging.debug("%s skipped" % (done_str))
else:
print("%s failed, Duration: %s s\n" % (done_str, test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir]
if BOLD[0]:
combined_logs_args += ['--color']
combined_logs, _ = subprocess.Popen(combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all(map(lambda test_result: test_result.was_successful, test_results))
# This will be a no-op unless failfast is True in which case there may be dangling
# processes which need to be killed.
job_queue.kill_and_join()
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, timeout_duration):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.timeout_duration = timeout_duration
self.test_list = test_list
self.flags = flags
self.num_running = 0
self.jobs = []
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list)
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
dot_count = 0
while True:
# Return first proc that finishes
time.sleep(.5)
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if int(time.time() - start_time) > self.timeout_duration:
# In travis, timeout individual tests (to stop tests hanging and not providing useful output).
proc.send_signal(signal.SIGINT)
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
clearline = '\r' + (' ' * dot_count) + '\r'
print(clearline, end='', flush=True)
dot_count = 0
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
print('.', end='', flush=True)
dot_count += 1
def kill_and_join(self):
"""Send SIGKILL to all jobs and block until all have ended."""
procs = [i[2] for i in self.jobs]
for proc in procs:
proc.kill()
for proc in procs:
proc.wait()
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = GREEN
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = GREY
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On travis this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `swand-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
else:
print("All RPC commands covered.")
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test-framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
covered_cmds = set()
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| 38.878648 | 205 | 0.632201 |
27dffaf07c5232f21c6fa14cca4503a7f038a054 | 5,239 | py | Python | setup.py | bear8421/thumbor | 00a0c44d44b8fa5f06c38deee7123793addda404 | [
"MIT"
] | null | null | null | setup.py | bear8421/thumbor | 00a0c44d44b8fa5f06c38deee7123793addda404 | [
"MIT"
] | null | null | null | setup.py | bear8421/thumbor | 00a0c44d44b8fa5f06c38deee7123793addda404 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com thumbor@googlegroups.com
import glob
import logging
import os
from setuptools import Extension, setup
try:
from thumbor import __version__
except ImportError:
__version__ = "0.0.0"
TESTS_REQUIREMENTS = [
"colorama==0.*,>=0.4.3",
"coverage==6.*,>=6.3.2",
"flake8==3.*,>=3.7.9",
"isort==4.*,>=4.3.21",
"preggy==1.*,>=1.4.4",
"pylint==2.*,>=2.4.4",
"pyssim==0.*,>=0.4.0",
"pytest>=6.2.5",
"pytest-asyncio==0.*,>=0.10.0",
"pytest-cov==3.*,>=3.0.0",
"pytest-tldr==0.*,>=0.2.1",
"pytest-xdist==2.*,>=2.4.0",
"redis==3.*,>=3.4.0",
"remotecv>=2.3.0",
"sentry-sdk==0.*,>=0.14.1",
"snapshottest>=0.6.0,<1.0.0",
"yanc==0.*,>=0.3.3",
]
OPENCV_REQUIREMENTS = [
"opencv-python-headless==4.*,>=4.2.0",
"numpy==1.*,>=1.18.1",
]
EXTRA_LIBS_REQUIREMENTS = [
# Going to update in a proper commit
"cairosvg>=2.5.2",
"pycurl==7.*,>=7.43.0",
"py3exiv2==0.*,>=0.7.1,!=0.7.2,!=0.8.0,!=0.9.3",
]
ALL_REQUIREMENTS = OPENCV_REQUIREMENTS + EXTRA_LIBS_REQUIREMENTS
def filter_extension_module(name, lib_objs, lib_headers):
return Extension(
f"thumbor.ext.filters.{name}",
[f"thumbor/ext/filters/{name}.c"] + lib_objs,
libraries=["m"],
include_dirs=["thumbor/ext/filters/lib"],
depends=["setup.py"] + lib_objs + lib_headers,
extra_compile_args=[
"-Wall",
"-Wextra",
"-Werror",
"-Wno-unused-parameter",
],
)
def gather_filter_extensions():
files = glob.glob("thumbor/ext/filters/_*.c")
lib_objs = glob.glob("thumbor/ext/filters/lib/*.c")
lib_headers = glob.glob("thumbor/ext/filters/lib/*.h")
return [
filter_extension_module(f[0:-2].split("/")[-1], lib_objs, lib_headers)
for f in files
]
def run_setup(extension_modules=None):
if extension_modules is None:
extension_modules = []
if "CFLAGS" not in os.environ:
os.environ["CFLAGS"] = ""
setup(
name="thumbor",
version=__version__,
description="thumbor is an open-source photo thumbnail service by globo.com",
long_description="""
Thumbor is a smart imaging service. It enables on-demand crop, resizing and flipping of images.
It also features a VERY smart detection of important points in the image for better cropping and
resizing, using state-of-the-art face and feature detection algorithms (more on that in Detection Algorithms).
Using thumbor is very easy (after it is running). All you have to do is access it using an url for an image, like this:
http://<thumbor-server>/300x200/smart/thumbor.readthedocs.io/en/latest/_images/logo-thumbor.png
""",
keywords="imaging face detection feature thumbnail imagemagick pil opencv",
author="globo.com",
author_email="thumbor@googlegroups.com",
url="https://github.com/thumbor/thumbor/wiki",
license="MIT",
python_requires=">=3.6",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: MacOS",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Multimedia :: Graphics :: Presentation",
],
packages=["thumbor"],
package_dir={"thumbor": "thumbor"},
include_package_data=True,
package_data={"": ["*.xml"]},
install_requires=[
"derpconf==0.*,>=0.8.3",
"libthumbor==2.*,>=2.0.0",
"Pillow>=9.0.0",
"pytz>=2019.3.0",
"statsd==3.*,>=3.3.0",
"socketfromfd>=0.2.0",
"tornado==6.*,>=6.0.3",
"webcolors==1.*,>=1.10.0",
"colorful==0.*,>=0.5.4",
],
extras_require={
"all": ALL_REQUIREMENTS,
"opencv": OPENCV_REQUIREMENTS,
"tests": ALL_REQUIREMENTS + TESTS_REQUIREMENTS,
},
entry_points={
"console_scripts": [
"thumbor=thumbor.server:main",
"thumbor-url=thumbor.url_composer:main",
"thumbor-config=thumbor.config:generate_config",
"thumbor-doctor=thumbor.doctor:main",
],
},
ext_modules=extension_modules,
)
try:
run_setup(gather_filter_extensions())
except SystemExit as exit_error:
print(f"\n\n{'*' * 66}")
logging.exception(exit_error)
print(f"\n\n{'*' * 66}")
print(
"Couldn't build one or more native extensions"
", skipping compilation.\n\n"
)
run_setup()
| 31 | 119 | 0.576828 |
1b02d8385911c28a85c9f9a8028935b34ed0583b | 656 | py | Python | alano/bullet/visualize.py | zren96/alano | afd412ea37d8d4844cf1c8c9d53ff419b206a878 | [
"MIT"
] | null | null | null | alano/bullet/visualize.py | zren96/alano | afd412ea37d8d4844cf1c8c9d53ff419b206a878 | [
"MIT"
] | null | null | null | alano/bullet/visualize.py | zren96/alano | afd412ea37d8d4844cf1c8c9d53ff419b206a878 | [
"MIT"
] | null | null | null | import numpy as np
import pybullet as p
from ..geometry.transform import quat2rot
def plot_frame_pb(pos, orn=np.array([0., 0., 0., 1.]), w_first=False):
rot = quat2rot(orn, w_first)
endPos = pos + 0.1 * rot[:, 0]
p.addUserDebugLine(pos, endPos, lineColorRGB=[1, 0, 0], lineWidth=5)
endPos = pos + 0.1 * rot[:, 1]
p.addUserDebugLine(pos, endPos, lineColorRGB=[0, 1, 0], lineWidth=5)
endPos = pos + 0.1 * rot[:, 2]
p.addUserDebugLine(pos, endPos, lineColorRGB=[0, 0, 1], lineWidth=5)
def plot_line_pb(p1, p2, lineColorRGB=[1, 0, 0], lineWidth=5):
p.addUserDebugLine(p1, p2, lineColorRGB=lineColorRGB, lineWidth=lineWidth)
| 36.444444 | 78 | 0.667683 |
df587f5fe975c1ae1466b0251d8dd5c71f1dc18a | 120,615 | py | Python | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-manweile | d932acffac20464dba773950a1abbb33ecbf7e6b | [
"MIT"
] | null | null | null | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-manweile | d932acffac20464dba773950a1abbb33ecbf7e6b | [
"MIT"
] | 4 | 2018-10-08T23:35:44.000Z | 2021-06-10T20:50:42.000Z | webresume/frontend/views.py | cmput401-fall2018/web-app-ci-cd-with-travis-ci-manweile | d932acffac20464dba773950a1abbb33ecbf7e6b | [
"MIT"
] | 1 | 2018-11-02T19:34:16.000Z | 2018-11-02T19:34:16.000Z | from django.shortcuts import render
from django.http import HttpResponse
import base64
def index(request):
return render(request, 'index.html', {})
# Example for returning a file from an endpoint
def text_file(request):
return HttpResponse("Hello World", content_type="text/plain")
def image_file(request):
return HttpResponse(base64.b64decode('/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAICAgICAQICAgICAgIDAwYEAwMDAwcFBQQGCAcICAgHCAgJCg0LCQkMCggICw8LDA0ODg4OCQsQEQ8OEQ0ODg7/2wBDAQICAgMDAwYEBAYOCQgJDg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg7/wgARCAHGAkYDASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAYHAwUBBAgCCf/EABsBAQEBAQEBAQEAAAAAAAAAAAABAgMEBQYH/9oADAMBAAIQAxAAAAH38D4UJEpr1RzTUC4e71I8gbT0fP8AVbDmAAAAAAAAAAAAAAAAAAAAAAAAB8H2wEzsHB2GAZ2AZ2AZ3X4Oy+S/QPxI7W2ujr65nn7sw+J7/wA54n7BpTp+U/WeUxOlPo/R9Ibry1Dbj2w8zwOX2pz5Wl9lyYfN+ZfUev8AOcCl9tvN0Hj1rs/JsM3n3Q45mwAAAAAAAAAAAAAHHOKTrPPXz0x6VwfH1jWV8LeXnS2/o/KmKuar+Z9T00paF6enVD9wuxEJbHDhXYE1+P8Aqej1fu97rksU28/HfNe2LQn8r+9+teHNNv2/rrGvvQXyQTRWznsrrYzX4IXrpsSMQW8+/LWHjb9GEtOau9259CaAAAAAAAAAAAAAA4xZkebXpDjfPB95uvnbU7JvFTTWScezwxfbbN4foU983G1KO2NwMtFIcfGpmGddgTX4paX29VX0nnT0RlkHyv1sn8z+j9T8L8x7U3ux7n6DjCcEv2msV99z1FcZbD4srbNYhY5IuWdAoAAAAAAAAAAAAAAAAAD5+h1nZM9Z2R1nZHWdkdZ2R1uO0AaAAx8x/f8Am3kHpwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB89bs1fwx579ReHLw/MX1OP12gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPjxt7Hr/wTx/tMXqz8/wCef9rWbP8AW+jkaoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGq2fnD0Zhr9n5w22pdOem+0XKpa6K4Uvp49Ac0lsktrV1VHD0XrIJoFu7nzxZVT3nz9OEsjnTUzNX6hshZ2OspLRS+nVBddb87PnjsbzeOSitDmenldwO6v/isokl98wKDRe7HkugAAAAAAAAAAAAAAAAPJlizWa4niOzLag/SdzqWV0sqS9PVDb+p515tQtTdmzNFzUVgu6X7kRqu+urm67z167hUbusK7t7rznVRX9U3Ltp5HLJhqQ/zvaUlSsonakqWtenYWpsp/wBHayPXNTS674FjVbbG3opvNaS+34DLaG+x5QGwAAAAAAAAAAAAAAAPnmgL7y++aIkGpaypdrlY3Nd2JpxzB+1Eu4gO6JGq2pa9WK5xRZvEU06buUQyZ28KS1R6CRugsvUSOc1IVTy0lnMK6RYPMdkNchQAAAAAAAAAAAAAAAAAAAPJdj7C2cPFUznWm3nY8zvQy1h6j8/egbnzj2N4iM8bncLCc860tum11h82QP6nNb5nz6g8p+rDzBJ9309W2POHpqmudpjr2zk6Y1GacdHF2nn66NTbrvSFX2hp9CaAAAAAAAAAAAAAAAAAAAA+eahjmZ6AVLp9S8kEj5bXPHmiX0yqHdFi8UZvbLVeYbKltZx52PRFOWfBjezKHTHUCaAAAA+XNXZlpOOdUD5VhZ0n040ku8V3XGs+iuar6MXDz80+tw8+ZbF1LUUHmxb3VTBbPSKsLPt+VU2rl9nGr881Ra6fRwvLjBHYR2Q2chQAAKMh+9xZdPdaGRdOdiVJZtDzXrjxn6GoouaATDFldWsp3Y1FvRlB37CnbjTWLy96lomSBSPdSzUgMJm2kk0/Ytrr7uy1PbyJCcP39RofrsyFYNxYfQyy0Db8b6yc7PH1cWv9fYWyzZDb/nP0brPmOGb30jjVIaTPJuvPawv05575dK7v6kbf786o7smnnHrDa+l8c3yuWxartSb8ibvr7qK02M0m1U/IpH9VCdLu/s33T61xc75tseLSnpiEWzAvQhIROgAAHDiC5TwacI5I05C/PMEnWX0NAAAAAAAAAAAAAAAHHIAAAAAAAAAAAAAAq7WbPVs3HAp6zfLc01LUy9CHWCZetXPdLYuOEzi5+hOgAAAAAAAAAAAAAAAAAAAAAAAAAAAAADjp6mSRkct7zP14+tfJFfMXlMUklfPxr7dmAAAA1e0AAAADq4zvAAOr2D6AdXsH0AAAAAAAAAAAAAAADyjLJBAcpLuag9I750PtYzJD0pzxzN+c59WfpTKra3tKh4sWV1zDd59SwTuU2k8sOldKtkXj56ukhmKJ4pZzaXn30FVM7uA9rFyzXoVNvMi7+w0tks0laaubtKxtZQ2sWBdMAkstM2bCutLsbziWhsrezoT1pdjL9x5yi6N35ymmpJd7DvpfRQmgAAAAAAAAAAAIFDrt5k6VQTGb+Lv5/mlnvdwpe4s40dTXrwR2LWUypKW2FxpR0vsIlF2PLuJaPuvKshWrshLXNj8rKpkM0TVJT+XEivVmhK0wWiXQVNfGGPEnrv6ke8xfVT9nVG3X2FRTVWBxFG23txAvifkhSal+haAAAAAAAAAAABxqNvSvm5eZvWPj63/y+PWo/Y+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADH4h9v/AB4r+ffbtD1B8Xl0t38/f6TYboAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgGdgM52AZ2AZ2AZ2AZ2AZ2AZ2AZ2AZ3TymdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoEEzSvp6xofiX44g+872eyNb/tZprsia1GikeS4iOSRcWd7OZ1yGgMGfBnZ+ePiM+bpKfit9tprIr5Wxzt7i33kD0FecD+q+xef6HrmR09MvV8/52fnaf8Al9NrwXdfnpu+3pX4uuXj6/StYyLwt7PB7mlv5ze3Zxn+qrGM+32+iopDa55+Sa7Lwlufl9/eff8AJGq68vUe98AWR4/T7s70OmP0/n/Q7QAAAAAAACGyLU5fX4I3l7uf1eXD3ejsePaRDwfTA0mDBl9Xh6Lva3txZMvzZL3HPzvqgYM+DOzE6n9BfHh9NIXL2+knhp3ZV6e/F+xaxJ5qP6UfyePlfVV3h5J9+/PMrmHQt9iePfcdJ+L3UhfWgunyfTj9NzmCe/59e+mvL/rTXm1mmsPR+70divbMnk8f5HaT1Np/kejrc+gtT7vN5gnu9+fD9H1pZMGnX0Pk8jcAAAAAAAAMAzsBnOwDOwDOwDPxhGdgGdgGdgGdgDPgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoHHMK2NzJECkpt1d4ksjmP7Wa7fNdyKyRK/7pM0f+VkKuN2zLEMmcvIaAAAAAAAAAAAAAAAAAAAAAAAAAAA0v3tzMX2m0Gt18hHR7zldNk2pIz292rRt4iObPYDS7oUFAAAAAAAAAAAAAAAAAAAAAAAAAAAcRbUXnYLFlnQAADhBudYnHMHm8cibAA4Rmut8rsRmTY2DQABHvlmRo7ITkNAAAAAAAAAAAAAAAAAAAaPRThcaHf8cyg0ABFOlN1xW9kclCaAA6FVXI1jod5znQKABqW2M6nbDQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//xAA3EAABBAIBAgIHBwMFAQEAAAAFAgMEBgABBxITFRYIEBEUFyBQMDI3OEBBYCEzNBgxNTZwI0j/2gAIAQEAAQUC/hdh5GrlcLwuXK5PJ+dhmT+VwI2d8aqtkLl+qSyKVJWj6lvetJ6t51bzq3nVvOredW86t51bzq3nVvOrede9a/b1W5SlcqBoLjdlZYekLI8fGzkSaOeikcqylL4ydlxWZbs+DHIfK7LiMy3Z8GOQ37NajSos2N6ve4nif0FbzLbyXmVSMX9z7Rv/AB/UUj+9c7BJ9dAW57kefJZBPnDwa2MB/MNtHsRLFVPwtvs5Y28UdDc2wGbcbjS5BKxkuTylzIIPS7Mbn7rhIzq+36cobd6MhubYK5MspqRHPGInETJY8H5GdN3FXGK522OXIt+Iaf8A1+/b0abJ2SzvMka1Z2uvcdz+36p8khq1vFOyVtc6SN4/BWDS3x9plSCgexFZGo9zded84a6w5TZYdjf+P6nHXGfSWuLrS+WRpaHGzZ/UaVGKjz1y5CfT5sqn4WkgPiNuk1rq5Ee4+fcFQ65uLeJdTl+Oya13zkQF7ryCTA+I22VWtq5EA1/wQaii9FPmAEyL3Kdh7qDNX6rJAqE6C9+vVr2ocjEq5ZuwTsdnbTtLDn9vJiZa4GxZza4DE1CDgzxmqy4jU0MOq0qOUgVHUJMemusuyaih8wLgIF13G/8AH9Rt/cXm6SOkSCoAlHiJnyWpsCucek9WC3SWZd/qn4WuOIab956Vbls6XuYw2yqTHQtMhhchEqM68yQZeFNEGXZLLyXmvpDv9r5Pb9gj+x6rE3p7l90pvUpBR7vhCgqO7WLLMfDHAzI4ZVPwtm9fdcQ31qhvukHnXn3YDLvthMqyMwtEGMxIS1GivbQM0vwz6R/vrtJztIztIztIztIztIztIztIztIztIztIztI+S3cTTC1u+C1pz4L2n2x+K7rG35A5G03G4jszpWLGahjFoQ43qK11fwL9/4Q8Qjszvb/AAd1xDUeaZNOygRBROt/wa1vqbrJBp2JJ4/n+2T/AAa5E0vWUX2Hpok03DkNOoej/wAE/wB8TVwaV2Gr2Xd4TWRC4kWKxCg/wT3yJ4nkaXFmx8kSI8SG2429H+fUuIolkaXFlp+xjS4s2P6m3G3mcffYjRW3EPR/pKvzN7+7x481G4tRbisyOSeauXEsktqs1mLapabHha3eF3bzu9ELxbXK1aydmXHscC1vuW2OtDXpEhrCs6SDHYq6lHupOeDG2AeTpzd0nPQ6yf1ZK9NmRx4rVsOPDQZmKersqSzCHJuc5yDQ50Yfw95sOrGGzvi/CtVMOweKVWw74WeLRjXBQq0km6V5mFeRXbYeaGPWka1QHLgRgINHogQdu3T4L2t62n6EWFQDPpD74+p/sGod36MQUXZJNSqQtoam1wRZIo8TtlVn49rW/SVN61vnq4/iNJInjPIL0N6DzseCyj3M1MKon1OvfhVSda1xUPQ6v0cBD8T4ecZdO+P7CV2Ep2oFkm1jjH8M76h5fFEB+Hvj+rl/A+AnB9jk1YBr2+jFGMrA+j9KG2J2pDvyq156L8OAhl0Bw2WFWHVLHthn/R1l6udUEWF6SQvRavnX6+Na0xX/AKFsFE3ecDgYYQBuiDELFioIcSZBwDo+JTh0cvmwcXd4lBIsu2kgUQoZJVMeSOs0gNHKNg4rV1RX4bV4h1qDCACxzIoA2NYq1Eioo661x7Ceh8czIkeeMYoglvQMFDADHEIcZTQw6HoFXFwKexRBTehtaHDKvBqA6GBYoglGMVIfHovkAR7vICDJNXZoorSYlZGRaUmhjOkmEGmAyKMN7+taSn6F+/6HYUPud9WVMl/6h9/d4+lyplCkXarxSNskur4qYKRhVDF2kAZl4VsIYJgs2LNMFLMDCvDSw4vAk3OtRHJF3ho5f7sD4lyLdXYmxZoYaiE7UADzYR8SSKY2WEjgdCsaTFfKkI4sBUbpCXT5BODEAJJwV16YRES5JMyLDRRVgDm0lLQBDShxSAVgfSi0efK9IbYS3+wRKfhejSFeORKXSR5YaLtwYpPl+PQJFtwwNPDuSK+XDTb5Q2m5TqWkDfSIpkVhd0V+Zv8A/StRiRl8ljW0RfSG49ablBgEKMP5/wAq3+Fxv+FUv/i6TBSS4JblqLcX7lvt8SWeO3EtEJtsj6QMsaIavVPnHNQAME43yh9K2Cl75h3/ALV+rrhcdxBF1EQAg2SME2IA+UfdA2M2QwkAKt3IcBLOXRdeOCrEEr0uLYa+ElCTBgEZVyB4JL+LgAJKF2NgJKa5ZGxp+itTiSGOaMAAJIxVTCmwEdSetqphDdfQPqD8Pljyg78YzwOUUshavz92oeAKv29NdPhDIYWZaLfW5VLHPGA4MeDgfVjNgmMWSWatwKLZbC4M47ac5BeiCfMndq52WadwUduZxyP578QsFgjgYfvPIO2BR90/WDh28ABsHzr4tku/Q2LREkpmC7UddAVoR477n9oZPODbV8kY85J5P9RBySyCrBhZ2lNXGWvli2HJAGsSDs6PyhlcPTC4QGavJ8BB86+LFTth1ya/I5CixB9kQU43EFb2arw7zh4rkI3Lk8pesUblzuQfkX1dob4n4T9gfrpR20yLNYh0S7uw3+DYZq2JFCphWZH4/wD8r9qgQOQ8hlrM+Unf/b0lcaixWH+S/wDp3qn1BkgX1/RPJIuC7Bt4iKNgG4sSk0L4fjthJheSY9GmfVYiOKdVuKV4tp018hxxdycsdUpNAitCDrpMhZrAAjVEPbUrJ3ewhIlTk3EONkcr2WEJjQpUqvwbEe2+b5NaqKg9pG10Mrnwm4J+JlTixo0L9q5KRX1PjHo3BRZ1B3kuyvTI3O3jdwyh7WqlUcnYYtBGk7DJKlpM+N6QxK4WJqfCBeXuHagUskfj0URPSyOLDJNc+e6Jp/JU0yBL30IYDwOTIoVk1zSHjar3NJ1urotdPkR2uV6jvwizVnXic4YWkBvRs+H47YOWXkmPRqrdaHwmflJwS4XkIifKmAtiByYfA0a3bZGij3isyvEZdeIjLD4oTrRGXXlIuHW9aAk6VP8AN5D3ariCUaVyFDlTap8l9iSpdduMSVJK2oMs7SvM5/wSZXZg30eyceQ5w3CjSU8I0iO/F4xt4aQaq0ywWAkBKhSw+UZkFrhBOwJO+R77ElTK7cI0xNlMIm+e7O8fPYegkYN8VLN2SyuamiOcpRA0NsFTFPouuXMQTeusoezIrPH4wk0RIw5a+fP2p0OXGrNRMSgFLi2v3omqHL/1DWQExYKuGkmJnF9YPSAtIg2j34tjizIvmtlolZ+QktlKndRxg6VsYKJLZ5cciS9+kEOVOq9tFMnV843oeVbLjRmhVAGVyaR9H/zOf8DmV2YN9HsYlSK5/EUn4a736oBD3/5DR+GD39eI+cvF2vP/AL1mgbSeRBU+2njVZLlXLKCscrYKH54MAn7XOi8cyGL1AFKjxbpVvrxCteIF2qh2pPqon/JCfzBh4LxKggbcDZ4/P+Kn+MFwuKkgq+3CZqP8CYiRYq0Q4qCMeJFi46IFSJ2JChkTfr7z7MaPGLCpzuSSwqE8y+zIjuTIjMz1qOBUSvb/AEjS4s2N9g9LixnfsXHEMx2X2JMX5HHENMetxxDTH0CzPQfjCRrleOALQVJhuOhdMBwRpaKik2qy/jP6rZIkkbimn1lI2uByINqjT4wvh5VtO+FqtEZXGcch36jHt8VXGTlwJwY5k9CDCVW4hBUlWlIsJKJBMk7K7Gsoax7JGsl2p9VhCWSOWbTcJ85I+xQydRiXciSAsXR8oPiWSIX45VZ2BvEsm5z48AhYB42pyrqTGiBkzxGvcoPSk0vZ9Q6lOXAnEisPNSYfKD0pNL2fUOpTlwJxIsuyNxbCbtbIW2ljfhp565PauAWzKI2D9S6Ugkr3YKQCggjyp5PhrxiBuqW4tAtDVl/pzJ6rHtInmfW9bSLsLZiyVwwoDwLKG2J2ojvyqxHmmuGhLYd30cZLdyqoexyZBKzla+efr4pnUatXX/tZUVHJcjCjVhh3zOPt6TCNTR0nKY4w5xgC2lyVQUp1xHxp/Tjmu738NSn5XbRrXwqIb6aBeXGUcV1n8POTdb+Gl79iIlidjt0CkIdRxZybrfw0vfsREsTsdugvw5H+nWe1q0N1+Xqy8n138anfzK/qTNdFnWtUYc472mvdVUYYiSigxoEs3V4pyf5MxCehqdAhkxnkKAmOOGQRIqHVhcOnMUQSjBddHi6szQhLSIdcGxaQmhC+2RCjCoRFFGbWlOkoJg4hUgXrA8wQE1geJIYRqQ4gZDgh4SA7RRW5MYKPhVoQKjha4DCRQASFWIEEHIqg+RQp8BoiBsXuADjU3Eq7VQDRVwqmYERDYJivQ0VVFCE+1CEobMCIhsExXoaKqihCfbIhsSRAWvwANdAV2BXBkMHFg2fYOKq7/q5ZPccx+uUhK244gTDk/R5slEIXKc3tNTn++1T+DXKYy2MntNon0V96OT/gv7WKa4Wso+S2KJRZRaA9AmNzxX8F6de05x06WubTWm4v/h3RrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWKTrq6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NfI4/K8UjLlOyde9J0iW+uQ29N8FYRJW2hUvYSI4pyJ6lOL0ZdlyG5zj0pTrzktG0K6mvlV9/wBXt9SlaQ0/aBsaO1yDX3n49igyZk4lDHIXyrXm3W+VK84oYUiFxvtyYaiwkxJ7MzcqS1EgeahicTdBTkiCbiT5jryWkz7mCHSYthGzI7D7chhx1tptZBpGNuIdaOnYFdr4nkEGXkEb0BFnwloEWCROOwoEzzuC958yQfZHfbkw/wBFttvaNQoepHSnW9NNpxMCCjGosWOpTDC46EIba9T0aNIzTLScchxHt7hw1a+ZX38Vv2afItsqJ2KPCEjUTt0y8uKk3SNHa1XqlP2Qg3GShurBYreqSfS1OptNW45WHn3ES7LMa8YqEtmSHPoS7TGVqJHFCxraKMpb0IyveoRWDGdyvvPjS1Xe74IqXbW7ILMIbDkF5zBrSuNKuLWQg3WLEMzOJIvZJckS39XGCHU6NFKkDSwDfVU/0U7Xbxja9w9ve1DsrTrqlr3Bj9bBH5JDLzzkFS9okr6yrSnpkHTrkXNIdiufIr7+TJbjTsiMrREDHmonPf4FrebRfQjrrKK+yhvZ5CJkbStQmtdlyOASwmOevMIROlm5M2yViz7FPEpPvnEk0eyxJiLjqcprqH4ljnpjGSbiJKSEuDFn8eykS6bYorTUnTwuSErkaI9G5cQtfGYw9MHNEHpnf4bWr4icjK6uTGzM3o70/s1J1L/H/wCicVORIZ0qKw+1Kcd2y9FlaiSERI6H1zfkn6lL2h51qBL09t1TM1sa2065BSiY898ivv8AySWlPD18b2qScGceEoc0fXJbMwtBURBweOSDap3HM11Q2E7Fj2bjAuUt0LiewxpmuILF3/B29UcnSCT8FNHN+5AwWxUO8VA1YCUSk3NgVP4puM+fR6y9VKSaBLLP7Bmdg6+Akh3SEJueLm8SF/G18RWRW6JQ5VXMW/j8kctrPGFnjsP8a3OUmuB/AaV9GV9/6p0Iztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztozto/hq170alqcU5rftGxtuNiIE7TkyK+/wCXYTutV5LiXY63ZLUKDtPs0hWwyi7aZrMl596dpHaXuX7q8qR4oyvfiv1hcTTkl2P3mvDU45H07AfjofSgcwh9lrTEPFQ2lJYj6j54cno3E174iHpuQ/D78p2Al7a2UuO6ia96/wDSp5JuHqCWRtjW9KT85UkuMvTs+dGGyZTk/wCcpIcjDPF5nuouQ7IGfPIJR40vZKPooySjvkPpEyCzMajiUoGj47sUd85aO7JGteJO6hCFtFfnfZRIi+DSvf2GUR4vzuw4z8j3ON76iHGblf8AnH//xAA6EQABAwEFAwkGBQUBAAAAAAABAAIRAwQSITFBEFHwBSAyQEJQYYHBEyIwcaHRBhQzkbEHFVJg4fH/2gAIAQMBAT8BVOm57g1uatPJFts7b9WmQOuSpUqVKnbZH+xqNrOyBX4q/qSatnNFlKZ/dcnWs2ineIhRgUDMlZq8E83TC/79NhIAnjKeol+5TgpVHlRj3vaQRdKOizV4Kdok0i0K18l2t1o92mSrLyeLK27twWCdBWiKu4R1CVczUYJwkEKlyfTpvvAn91KGWGwRG0EgyFyL+JDZadRlTG8FXq3p13bLquoMCuhAdVgKFChQFA5j6L2gFwie5HuutJVnu1KBqmtkYDdf/O5AQJBZeB4lVuTnU6bIcChlj3H2o2zijgpROeyZU7Ab2SlNxKYZEqclOCwiVPUz05Ueqb4qMVUF69xuR+yIxPjCaIgbiqTYuhaNHGSnJUvdTQBdG77JpxaSmYAJrYjzTd/GaY2GgLHNAQOpdqNm5Ao7DgJWqBWaOCOAQQQQPVj+oeNFHqm5FYyn5p3a41RGDuNU/GfL+E7Pz9F2ncblUz8yieiiIm7plx5/REaDwTsUM+rFSpR2TkidmYTfhDFXhdvJxhBXkRjCDggVrs1jaDzT0sV/j8vVHFvmnH3fNZghSndIca7D0Sh0/L7pmbVTHQ43Kj0FphmsMY3Jwzjd6p/RPyTtUc3JqI9+7p9+Cuy6eMlq35puSONQ8aKMG+aGaqZu43KpjKcOnxuVXVD9QDxnjzVPSeMVHR8/55p7kdsfm75+qd0vMrdCZlj3Fn3Hm4oY3U/tRu9dhPvoAYDx9EMS3xTHYMnjBA6lYyjmfJDNDF3mmmYPimuloK1j5/RAm6HeBR7Kd0gY0K3J3SBjQozh5px3J/a8P+fGKq2d9NrSdVGajYMFChBRsA2aygNjcFGKlQpWWKgfHqvutJVkfZfYlznkVNBp857ka9zCY/iVaLDTFJgZU+iH+kEGVCDUBsIKuocxrC7JOY5oxVns7qjZTGS+6vyDbsyqjIcQFUswbRD1ZKDKoIOatHJopU7xVjsjq7iAqtF1N5BVGxS2XKvQNN0FVrG9tMPATaTSU6xwE4QfhNMVSiypJj14/ZUw68MNpI9tqnhwqE5hXThnzLNaXUHXmq0V3VnXnKzWoU2QUHw+8v75avy/6WKe6TITqzy2CcFyfaGUjiuUeUKdRl1pwXJFrbQqEuKtto9rVJ0VC1gNAJVWpefMp1qJp3CVSfddKNuZHinOlxPd8oFXggVexRcFKvIHrsKFCAUBQoChR10vAMc0uEppnmOqAFA8wuU9VLQSgIHMLcU1oHMcwHmwO+v/xAA4EQABAwICBQoFBAIDAAAAAAABAAIRAyESMQRBUcHwBRATIEBQYXGBoSIyQrHRMGKR4QYUM2Di/9oACAECAQE/AU4wLpmk0nmGm/cg0Y1aTgM8k3kd2iOxuzWjcoNfVFM5lfUAi24ChYd/smCRKaJ9vdap42JrCYHGcdgKwDWVChaRyW+m1hBmRKGtYSsJlYefkvRK9WmX02zhIlf5CxtOn0mQ8VyVU0Wtyw46PUxBrb7AScpQznmvK4/lAEZK49vZC3HqsV5429gIQftUphgyq2nVKjMJA/hQpPNnz6HypX0VhbSMSuWqukcoaMaL3TK5J5KdolR7sIAdqCCxLEsWaLzKJ7LKlSpKlSepS0qlULmsMkZ9yV6nR0y5cjPwVnEn5u5NPoVKzMLVR5HrCqXApsxfuNximHI5rz5hdQoujZEQoUp3w5rbKdICeMJhRnOpQsJmEOxkHooHjvU3PonXhE2TDGDw/tM/K1DwlP8AiBO0b1Wu5x4zX1OKjNVviT74jt/Kdk4DWn/EXKocWLxhOuDxqTjLyQhGSNz2J1mYkc4W3mF1qTRiMcxUICU28rLnjszp6GB471rPon5hGMKp5JurwG5MMFnluTdXkfumbt6+hqbl6BYZxDaEIJE68+PT3TCcz4+6bY34upsOzBRzC5hFQhda4UQ5HrbOcHWiYuo+LChcxxtUotX0ytZCNkMgebx5pRHVHy2QgYvPch8x8t4TbOPlvWTmmEBdM+U+e4IWK+tvmET8Hr+FVmHbLfdVCYeRxmqsY1rvkhm3Ft3JhuJ27lS+cE7d6ZkOOLJmTU9SQzFrt7IgB7Y8d/8ASd8jvJPuVbo2zlf7lfU70TslS+VnG1UrAJh+Ty3FUosPDcjPRk64jj0hVs3Rn/X5U3dxq6ov1Bt7gZzDUP27gtXoPsjrTomB3AO5QcLURGLzVPNs7T9uYf8AHKk3PgPunCGu8E9t3AcXRExG1WhN/O5HKUXQ30VS2IbAnRjc3Yv/AD7qAZHiPdCIPgmZR+5bUzKP3K0/wg3UUwTh8R+f1hZaNp9KvUe1mbc1OSmyFkbqbIlG4UnmJlDYtUI5yhx6c2K0KFNlCNxCk/r6VV6Ok5y5Iq9HWuZnuTTNGdXbGKFT5BioXSm5X/6RNlKJCJ5gQp6tbSG0xJKp1mv+Uyi5ya4wZX+0ZhGpDQSqWkOdVjUq9SoxwOpN0wuqADJV6opiVTrYjCFMCJ1qq0sKo6aDULHHJPqVI+FP0uoAcLpVFxcwH9J4mmFip8Rx/KqFuEwefCeii10yCwbUSPDgdTT9ApaXT6N60HQmaLS6NiNG+aDbFf65xSgxpAxLo6c5LSaT3OtkqGi1A+SFpVJzm2VGjhjaqoa9rIN1pUEnCZTNFDamMDNFtk7QJNrBNbhbHd8IhYU6xWHWsJ5sN0R22VKlFSpUqe3BpInqhtkR1G0yQiI6mEqOygmESSeoDZOcTn1GvI6s99f/xABcEAABAwIDAwcIBAcLCAcJAAABAgMEABEFEiETMUEGFCIyUWGREBUjQnGBkqEzUrHBFiAwUHLR8CQ0QGBidHWCsrPhQ1Nwc5OiwtIHNmOlw9PxJjVUg4WVo6Ti/9oACAEBAAY/Av4lmBKMuXLT9I3FbCtn7bkCmorEHGw44dMzTdv7dfvef8Kf10WJOHY6FcFJZbsr2dOv3hj/APsWv/MpuOtGKwQs22shlOQfCon5UFJIKSLgj853NfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VaoV+JykKiSfOb4//ACKrDgVqbeWc5SnelFr6+0cKysNOPK7G0k01ZTMAtm6dvx8KdirSgvIWpN2lZkKtvt5OTq1G6lYYwSf/AJaaZjvSo7T730Ta3AFOewcaaivTYjMp36JlbwC1+wcfxmY70qO0+99E2twBTnsHGmoj02IzKd+iZW6AtfsHGrnQVtochiUzf6RlwKT4jy8y5zHEwpzbDaDPl7bb7fmJttx1tC19RKlWKvZS2kutqdT1kBWqfJ/WH2/lUfo+XGWSjaJOMPko+tZxRt8qkq5SxZEzE3GULaypCkt5+koWvvNwb04zyawmHHCEqOZ/UgJtfQWHEVtcaL6EKJsWXMoPb1TUTDcGZbalNLuvZA3Wd+nfvtSX4duaSkbRIG5Cty0+P21ya/oqP/dJrktNbYVKdaU7kZTvWrogDxrGMTxYrXypbd2bzbotsE7hkH3/ALGbMhY2zJRHl5TDi4dtGG03sAt5QBurXdfuIrzRhuKowuMrDUSCTFS7k3br7+ysUjI5Qs4V5v8ARMtrgbUzVp6xJA6Fz2dtcjDhchmArFEr24UyFpuLAnXXTUjWsawDF5zeJmKhDjcgMBs6gaWGnGuSs1thUpxou5GU71q6IArGMTxcuL5Utu7N5DotsE7ugOzv/Y4jKfxpDcCHNcb2AhtkvAeqTwA079TWDycOcix5b+KlkhEVtCFDXQgCw9o1pzC8WxFrGIq8PVKSUxktFGXNoLezjepHKpGLRG2nFdCGIafQpz5bhR3nuN9D21ExN+7pRyb27lvWsCo1BnO43Gk7aTlfwhMEp2KCd4ctqff+YDa2bhXN5paw9WH9ZcfrXO6xv3UlUNQxNeIdBJk9fNcbzekF0BLlukBwNf1h9vlWAt5Locs0gK8KYjKjvnMOkQgnhw7axKbCd2EppAKF5QbdIDcdKmSnuWZxyNEiqeejjCNgQBxzfdUFrEMDkYXHng8wfU+lzaaZrKA6pIrkcqTMzCauXzv0aRnyXy7hpbuqNKXgcpnAZMjYsYgX0kkk2F294F+NQG3IGR56euK+kPX2GVYRm3a6qT409KDGwaElxto577RKVZc27S5B08iP0fLKWyQHPwhcAv3vEViymXEuJD+W43aCxFCQ0woJfsl9JuUtaGyge821ppUZb0Tmuo2PVdWb3SeBAFt9Yfifo2cYTICTHB0WO0GlwkPJeDMh1WibZSog5fdauTX9FR/7pNYJivOtj5vUo7LZX2mbvvpUblDBm8wfCcktvY5hIT36i3+A7Km4Y3yiktYS68Xm43NQcir8VXuR3aV56VMDivN6YpaDNt1ule/du+dT5mDY/JwVM/8AfbaGAvMe1JvdJrk9ME98+a830w2i3rgb1X36VimO86z88aSjYbO2TKAN99d3ZWCYrzrY+b1KOy2V9pm776VG5RQZvMHwnJKb2OYSU9+ot/gOysRj8751zqUt/NssuXNbTeawzCfOl+aT+dbXm3X/AJNs2ntprHlvFxDUFUdUQNX2l83G/fUzAoOMY2pXOMsTA3oOV1K84vmUL349HTXhUPE5L6VBGFCE7F2eitNelf7qixkcp8R8yx3c7cNCAhXblLgNyO635gIvl031GVEdXiz864cQ4m2Yjvp5uW4rCHIIu2htObKTxvfupCVKKyBqo8a94+3yKTDW22/wUvcKzmVDU5wcPWHsOXSr4itiQ6n6NaRqPlUvDNvzbbpttMmbLqDuuKfgyAVMvNFtdtDYi1QXcQxx/FI8AHmLBYS3s9Mt1EdYgVyeCp+2GGKkXGwtttrf+Vpb31GirxyU/gMaRtmMP2Kbgg3F3N5F+FY1LTN2Rmt+iTsf3uvoEr363LaT7qh4ehWdLDQRmtbMeJ9+/wAiP0fLjEkaFrHHVg+x4mpD7dnHVOFS0qPSB++lQ8cwl59l1GQulokAfyrd9jpUbC8BwKbIW2iypYYIClHeqxG/vqNiOJOIh7JwLS0F5nD3m26sTkRl7RpchR999a5Nf0VH/uk1dXbYWG+htWXmAfWVYj5E1JzXQGCM5PeLi1NrkLREKtyXlgH7aSFvsoKuqCsa0plD7S3U9ZAULitm3IYcctfKlYJpcxIcDad6SNaZaAcSp1kOpuOB++itIIGYp17jb80+8fb+VR+j5eUDZJSk4s/f/aq7a9D6RhIsjajpW93GrOLO/eFaCkuuuuyiBqhKlC/2Vi6MTVGbQtKubqa0Wgm4y249u+n2IbbWI7FYU9MCiFtJV1UlO7XuvXJr+io/90mk5L5tmoIt26fO16cDTC2kFjKFKQUp14m/ZbjrU19pTgWhxC2L9RZCR49lXDMmMlbPqxbuKOt0EkWA9vjTBLa2lDD0t5lJ3GoTThxLas70lpIQnSx6VtQe41gnoVoKFHadG2Xoq3++ojGxcS242lbhI6pQNx/3fA010FNOJhNZFKT1VjN+xq7jamll1ZKFDUXUfzTauPxGvW+I163xGvW+I163xGvW+I163xGvW+I163xGvW+I163xGuJ9/wCJKxXBp0NrnSy481KzJyqO8ggGv3/gH+3d/wDLq/P8A/2zv/l16HE+Tyf66z/4dZUY9gjaf5C1D/wqUrFMcgJYe/fK2XFuLWOzVIqPEYFmWGkto9gFhRStIWnsNXO0X2Z3FKHzP+gxLCic3HTd/Ehbi1JQlIuSo6ChOUjZNuE7K1vl7KYkuZNvudCTuP8AEcRkKbSqU4GQpSt1+yuaPOPyA0LpVnJCL9gqXBu2bp2m/pdnv/iOWmXVSWmU2twCuNqdXiinWWcnQy8T30y0uO4wdvcyEDpgXpDrSwttQulQ4/xFtRUIWv8ArFW+2pIwyG/zFxfoC0sBAFuOulNh+Eja5RmKFqGvjSI0dGzZR1U/xF5lzqPzzLm2G0Ge3bbfbybaHJjymr2zsuBY+XkXIlPtRmE9Zx1YSke80h1taHGli6VJNwR+QVDEmOZaU5lMBwZwO22/yOGJKjyUoVkWWnArKocDbj+S20OTHlNXtnZcCxf3eVLjS0OtncpJuD5FvyHW2GU6rccVlSn2mkOtLQ42sXSpJuFDt/NTf9E/eaNPSH1hpluS6palbgBSpmFck587CvVkKkJbUsdqUEXNT/NAW448iyWl2SoLSQcpvuOlYJh3NXsQxN1pLDEZoi61JSL68BUXDMdwN/BHpekVfOEvIWey43HyNYKnDJE512PtGtivpKVc9G3uve9Kw/GsAnYdPUjNFZadD/ONbAAjjUXCsawJ7BnZQPNVmQl1Kz2abq8z4VhcjGcUS3tHG0OBtDY71HdTWC4zgr+CznklTF3w6hy3eKxlxxaW204UkqUToOprUow8PUMHaOVE5xy22V/JRbd33rlBiMTCGIAhPvZ2miBtlITfMbJGppOIYZyUmTYyUXfXzkICTxCbi67dwrz0hZZiBJU7tN7dt96ViTXJfEXMBBvzvapz5frBveR76XPEbmqQ+psJ2ma9ra7h20/NlL2cdlBWs15xhcjpj+GlOdDq5iELUntyWJprEYoWlCyQUL3pI3inpUlYbYaQVrWeAFec08mMQ8w7+ebZOfL9bZ77e+pE+UvZxmpDi1GvOMbkdMdwzJnS6qYhKyntyWJqXiWFRHJDT7SkPBTgQpgWOZXfbsG+9NS8Vg8ww6JGTsnw+Fl8fojdw39tecWeR0xeGZNoHVTUBZR27OxNYjiUTMGXWOqvek5rEGoLsLkxOnYZGjIQ5KD4STlABKUb1Cvwh2yjAyX3dO+7Lb619KOJK5GyxhmXPtTNRnCO3Ja9N8ovTKhuJGzQE9MqvbL7b0zJxvkxLwzDHFBPORKQ6UX3ZkjdTLriXZTz6skaOyMy3Vd1Mq5QcnJODwXFBIlCSl5KSfrW6tAg3H5jRCxJjnEbzZmy5ynW57DX/uj/APad/wCasYSxe4eVu+qFJv8AKsOeg8tUtxFR07NCcJaVkFt2/hurFsmORsadflZ31MoSnZr4ggKNjWDxH8UfwnGs6jhzzaDv0uOzs4ioHnmRBxvDJEkMB1Ccjyb93/r7fJEJ4YSbeKq5JXH+Re/sqrkN/PVf+HWIYJg8+Pg0aClJekFgOuLKhwB0tXJtiVjMrGJmVSnC7YBFwqwCRurGYEaYYl8NClf9pa1knuvahEWyiJPgHm8mOBbKRxt3/rrlx/OZf93WD6W9D95rlGlgHMJazp9UFBPyvWHPhTYhCEm54ABOtPlGiOfOZflU7Ew2Hlso6KTuJJAHu1rzni/KxUKIpjbLYhR0oyJtf6TfQ/nK/urFQzckJSVW+qFC9RX8zYg8ySSb6BOWpmJtth1TchWzQd1yQBfu1pzEsZ5WrixTHLq2IUdKMote2ffWKAf5p+oGJNtpecbioDaVbrk2F6kYhjXK1xmPzcuLjw2EtgaXtn3mpv6K/wC8rCnW1IEZMJNzwFk61ieJxmQ4HcTWISV9XWwv7N9TpuN8rXQ0mMpSo8WOlpJNurm3kHdurDWMckLiRHFkIeQkkoXtFW3A0ucnF4WP4VHHSRJRlcy9x4/F7q5FT48w4OiXGJYfcZDuyWobrHS+oFSGcY5csDDlD0xdwtpCd/bm01qEwHxKS2whIeG5yw3+/wDMY5QbSRzwR9hkzDJbwvf3+RWGx1PPx1LUo7chR138N1OiHiOPYXGcN1xYkzK0fdakQsPYDDCTftKj2nvpMee2s5FZmnEKyrbPaDTEyTNxjGH2DdjzhK2gaPaBbyIx8uP88TH2ARcZMt+y17++sOxlxyQJUNKktJSRkOYWN9KwqdIckJdw93aMhtQAJ036d1ec0SsTwuflyrfgSNkpY79KgTmTNTNjO7Qvl7Mt4n65I191qkY6HJHPHmAypNxksLd176dtO48w7JZlOt5Hm0EbN3vItv3caxXDWnZamJ63FvFShmTnFjbSo2HR1OrZYRlQXD0vfWJJgR5GKIGd8sPLF3L7x1furn6sZmw4ahtHcE84+jzcU7O1zrTW3aLCn3lPJQRuSd3yFPw5TYejupyuIPGkNPTcamwUG6IT8y7Cf6op6JCXIUwt4u2dIOUngLDdS23EhbahZSSND3UUJkYuMOK85w7nZ5sf6v8AjT2B2elQHVKK0vqBOvsAptt+ZjWIQmzduFJllTCezoi1SsHbMh+C+VZ0PKGgVoQLAaVMwxcjEcQgSEhOwlPZktgX6lgMtNtyJmM4jDb+jiS5ZUyj+qLVJ5PIfm8xeUSSVJzpuQdNPuoR25uNMQrAOxWpeVp63FSbbz3V5mciI83ZMoaTpl7Ld/fSG5czGcVjNj0UeZLzNN+waUeT6w9Mw7XR8i+pvvAHGm2pGI49OgoN0w5E27I7NAK5jOjJXHHUA6JbPAjspnnuIY5izDRuiPNmZ2h7rVYaD8685OFYaZOa+1MVOa/be353RC51I5n5sz7DaHJe51tuv5C9MkvynucrGd5wrVbsuaMR7F2A8DY5UKUB7wLViMzC5akq2QcakR3eAUCbEd1YbKxmamP+5m9o48q6lKyi/eTRYw7EWn3/APNlKkKPsCgL+RAxSe3FUrqosVKPfYa0tzDJrUpKetbRSfaDrSW8SxFqO6f8mAVq8Eg1zjDZbUtq9iU8PaOFPIk4ohtbTxZWnZLJCxv4fPdSW14qtGCNRihSdiq22vxFrn7K2XnXEPOHM78wznYZL9e1rZvfUrnOJtNKjPbJ1JQq+bsAtr7qU9hkxuUhJsq1wU+0HWhGxHEWmHz/AJMJKyPblBt76XDgTW5T6Wg6QgG2U8b7vJyhmtY1iEsMyFB0ysyxHcOgQno9W/urm0qcuVjKczj4Uk6Jzaa2t2aCpU6U7sGWk6ryk2O4bu+n3MaxNxye0VOyCWVnKnNYbha2o0FHFH3wiAGwva5SdDuOledkyEnD9jttrY9S171ycleeMRipkPXhoYzoTK3aLGXd7bb6S9icxqIhXVzalXsA1NLOFzm5WTrJsUqHuOtBjEcSaYeP+TCStXvCQbVznDpbUpntSd3cez81ttYbiXmqT5svt+bh3S50saP/ALcf9ztfrrF3mFFLweWnMN+qkpP21FhReQbD8NbAzKOKM2fuOsRbjWIxsQgnDo5lFyGxt0u5En1bjsrB8SwpuPKkYe6V81fPRcvb56Vhf4U8m5mDYo27liSio7PN+kLXHiPIvlLhMCPjLbkcNLYUvK43b6t/27qfLmDTMC5SvM+lbfuNqka6cDu32FY9jMhAcxF3EVoLihdSEi1k92/7KQzBSGWZ2HlyS2jQZgT0re77a5avraQt3zipu6h6uZWlNf0T95r/AOj/APFXLKYtpCpLc7I2sjqglW6sdbYQG23MPS4pCR63R1rEsXfQlzEJU1zbOKHSA+r7K5RR4iEts80SvIkaJvkJ8nLv+fvfYah/6xz+2ak/6s/ZT0BdrP7ZGvfXJrk84Tt3sQ5pJTxCGjc/LLTvJdKjz/zt5tSOOUqzX9ltK/6P4rWjbMzIgdwyCsacmJDpgRm0xUr1yXAJI+fjULGFShExbZqQ20l1KOddxFrq93dUzEonJZOMyJclRdmnEG21H+TYi4AqViT2AjAsPkxrPtJltuJLgOiuj+rt/NaeUG0j8zELYZMxz38LW9/kk4FixYfS+tefYqNrK7yBrXm7DcTwSXARow7NbWHW09mmht30W5uJSsVluLzuvOqNgexI4JqBPw6UmFi8FZUw4tN0m+9J7qg/hHLwdECK8HgzAQv0qh2lW7yKx/k9LhsS3Ww3KYmJOydtuOmt6Rj/ACglwnZjLRbjsQkENIvvN1a9tTZ3JiZhyWJi9o/DnJVswv6ySnWpOOYzNRPxh9Gz9EmzbKPqprlDIkLYWidNL7IbUSQNd+m/Wo3KDBH8ND6Y2wcam5strnXo15/K4/NOYbDJmOfNe/Za3vrlFLkLYU3PlbVoNkkga79O+p2PKXHMR6GGUoCjnv0e61tO2sUk8iMUwp7D3pJ5zGnNOAMO8cul65Rc4l+cH0xk84fCbDOqxtbgBu93k5Qc6cjqTPmLdb2ZJsk9txvpWHS38MfwpGYsKazbW5VfW+lt9KSdxFqcw6S/hr+EJKlMqbzB65I330tvqXjSnmFYcSt2OyCcyXFgZja1u3jX4QbVnzd9JsMxzbXLlva1u/fXJ2ZHWwluBK2rwcJuRpu07qRj+ATI8TE9ns3m5CSWn099tRTeOco5UJ+WwjJFjw0nZNX3nXUmpbvJibhvMZKy4uHiCVZW1H6pTT+JY5ixlyHE5URY5UmO0O4Hee8/nx+dEmYxg775u/5vlbMOHv0pUeA0pOdWZ1xasy3D2k/ndjA8EhNzsWca2qy8vK2yjtV2+yufY1h2DTcNSfTKw1awtsX32Vvo43hojvlQbU1tQSkpUR2Hvpt5P4GhK0hQvzinvP3mTJYbHmG0v33zVjaZTcdHM5qmG9kk6gdtzv8AJiKsOb5MNsRpSmf3Sl4KNvYTTHO/wS5tnG22W3z5b62vxpkqZdmTZC8kWK31nVfqrnIw3k4lO/mheXtvZm6t6mOYa01Exlg7NyNLvlac/lW1tTMqY1yVcbcfDKQyHibkHtI00pnzn+C/Mb+l5tttp7r6eSHGyusRvSc7beiLD4I6mX2/tamJSW3mkuoCgh1OVQ9opMqOwiTKdeSyyhZsnMb7/CnPPvmsP5uhzHPlt35uP5XAsMbjoc84PZVOKPUAt89fxcRwHYJQ1EjBwu5rlZOXwGvlmOw0trlIZUppKxcFQGl6iYk6G0PLuHUtjQKBtXmgx4wwcyFRkSLHOXUpBI3236bqRIhtsvTXX0ssIdHRJPvFYPgRRFLEqIXHl5TmzAK3a7tPJi8mSiOhcWY4y3skkAhIFr676TiMNHJNthSikB4PhWnsJpnzn+C/Mb+l5tttp7r6Unk/greDaxNvnmpc7/qn7qXIVG5Ky0oFy0wXgtXsvpT+ORWg262ysqaXrlWkbj3VHxKKjkkhh6+VLofCtCRwPdSPO/4Ncxsc/M9ttO62bTyYzgi0RxFiMoW2oJOclQG/W3Hs/E5QYS6hgR4OTZKSDmOYcdfxVZLZ7dG+6ked+Y8+uc/M82z7rZtfyLOP8nprMXE0tbJ1t4dB1P7fsKWjlXyUQ/hytHnopzoy96dfmRRew4NCCrZFgNpypCcwtYcKjJRyL2iA0nKrzu0L6eynlYrg3mdaSA2nnaXs/wAO6uVX9LL8mOowvk953aOJOFbnPkM5T2WNMtTOSXMYyldN/wA6NuZPcBrWEIeHo2sPUtkHt6Wv7dnkedYjMMuvG7q0IAKz2ntrDv6Tb+xXll4g5iEtM5bjaor460UJ9VPt1oXufbWF4gtnNLVNbjlec/RnMbVyVwzCQvD2lYpZBQ4olBVoSCb1iMnAWlR5cpSGs6nCux16WvHfW05xO8/ZM3nHnKs+07d9rX9/fU9+Z0pbaksuq+sQ4jX7KONOPzXMcahpfTLL6rosL5QNwFHHsUfmTMZXCMhEkvkbKybgJA0t7qwqVJcU6+W7KWreqxIv8qabgOFmZMkpjIcG9GbjW2wiTKjcoEdJE9yQq61fyu49wrk7yVelmJzhgu4i5FVbPYapSew2NJx/k4X4TsZxPOGtspSH0k21BPfXItMeQ7E5ztPStmy0pITex7bXrBsYwMyIrxnIZfG3UoPA3vmue6uTW2j5+fulMrpq6YTltx091YWziWNqwnAI6MnMG82aRbQC4OYgacKwWXyOTicW8xLb7npNg4Cer0jqe6oPJjnL8bDRF5zLDSspe1sE37KgzOTTohQ81p8R19ZQ6ntF79KsYimHePFaQ+wnaq6K+gb79dTxrFUcum55jFY82m69glHdl1vu+dS/N2O+eMLU5eO3fMY38m9/Zpp5OWeHO9FvD3lSmkn6ihcfYKgYxYnEWpgxNauJKlfqI8K5IQ2ulGQ2cQX7LdD5j51gLsCD5ylJhKyR9sG83X9Y7q/6j/8AfLX6q5RKcb2ThxF4qRe+U5U6Uy1h3JjzpG2q7P8AnFtq+u6xF6S1iPJjzXGIN3/OLbtj2WFNu4bhvnWT5stsOcBrS51uaawhfJ1nCJ8oWYekzwpvxAsT76xGCp0PvmM648sbiopO7urD2YPJXzjFSFZJHnNtvP0j6pFxSm8T5OeaWMlw9z9D1zppYe/yY+xIeeRh6WGlSGm1lO26CbJNuHGsFYwxx9GD4mpTTkRbhUlC9LKF/aPnWLN8qsSkNYbEd2MSCkOZF2uCs5BUDD+TWIvy8FmhSXYqwuzC7aFOccf24VyqZmOv8xQWy7HbcKA8culyNbDWnsDgOPDCpEHbpjqcKg2q/C/v8alr5WY4vEVK/e+HthwJjjs6B3+21TMOwkYixgbsPaIjS777jpJvwrlFyacNm2Hucxb/AObV+rSuVHKdeokrUxFJ/wA0kffp4U3IhnJKW+pppf1SVnXwvW05xP8AP2TN5y5yrPtO3fa1/f31PfmdKW2pLLqvrEOJ1qDjFnnsWdjAvSHHSc+YAnTd7PxnuUeEwlYrFlNBE2KhVnBa1lJ7f/WpGGYXyWxxqRIbLSncQYDLbYIsTcnWm8FitPz5LWzFmWyoqOe5IA4b6YaVyV5ZZkNhJthnd+lTjPmbH8NyozZ50TZJPcNd9coESuTvKaTzjEFuNrjQCpJF+02rm3mPlDh3QzbWbD2aPZe++sZZl8nOU8gvz1uoVGgFSbe8ikI/BflijMq2ZWG2A9vSrDsbwXJ54gK6LazYPIO9P7dtZDyO5R8/3ZNh6G/+s7O+1YljGNKSMTxBYKmEKullI3J9v6qgNw4siWtOIIUUstlZAsrXT8XDG4kaRKWnE21qS02VWHS104VyVVGjSJCWsUQt0ttlWROmptuFSYLKgiTotkn6wrmP4K4z58ybPPs/3Nm+ttN1uP31KwlDbkzEFZVuIYQVkqLiSQLdn3VJioZeXJOGZA0lBKycm63bTcVTDyZXmoo2JQc+bJut21hkeUw9GfSF5m3UFKh01cDQRCKROjPJfj5jopSeFKwyFycxqBjDydmuQ4jIwz2qDn2VyfxrD9rjM7DmdlKQtz0khJGpBPHfTWCw8DxXCorjiVTJOINbLKkG/RHGuRqo0WQ7GjKcDjiEEpbFha54VhiIkaRKWnE21qS02VWHS104VyZxePh8zEGYUhRfRFRncsctrD3GsE5UHA5+JYfzPKqIGbvx1G5vk7dawqXH5PYnHw2JMSvI636dw9uzF7AW+dQuVOGQ14iEMbCXGb65R2p7T3VhqIsHGsAwmM7tZTkm7C3uxFgdRU7EVYVicyHOjNtodis5wg9EdLsGlYjHxfCJ3KPBX1ZoiokVLpbH1FJ+81i2NIwpzAcNkNhDEJYyqPaop9Xdu7/I0cNjSHI+Kx0xZjjbZIbs4k5ldmnb30/hmUJYXHLNuwWtU6bi0WRGeZYbhxw62U3QneRfhurAZiIshcRuGsOPhslCTZehPk5QtyYsmOtzEXlNpcbKSoECxF94pvDpnJjlY48lxSiWcOJTqe8imI34N8q421Xl2r+H5W095N9BTczm0jmfmzJt9mdne50vuvT8FyyXesw6f8mvgaxXDsVgT28TjRnGgpxlX7o6JsU/WPs++oOGyuS/K1x9kKzKaw7o9YniaZi/g9yoh7Q/TSYGRtPtN9PJjmKsYNPn4atltLmyb1V0U6o4KI7Kw7FJOGTMJwnDbqZRLRkddcP8ngN3hWKymcKmYvgmIu7b9xpzOsr49Hj/AOlJ2eDO4VgqU+lXiDZS+4eASm+nvrlZJdjSGo72y2TqmyEuWHA8ajzObSOZjCygv7M7PNmOl916x0zOTuK4oqXKLrE2ExtbpPqnsrztieFPw2JUIpRl6aWhwClDQK0+dRMYwSNIkynI7kOQllsqOVQNjp79fZUfDUC6mo2U24q4/Ok4S607CxAOKcaQ+goIUFki9+3765j+CuM+fMmzz7L9zZvrbTdbj99SsJQ25MxBWVbiGEFZKi4kkC3Z91QELSULTHQFJIsRoP4pOcng3J56hjaleUbO2nfe+vZ5ZP7ixCFsXi3+6mcm0t6ye1Pf+JA523Jc52/sW9kkGx77kfn9zzV+DPMdNnzvbbTdrfLpvvTe2/BDY5htMm3zW427/IvlBtfSKhc32WTvvmv8qxyJHxViBGiTloEpUVLiwL6IA0HDUnWsXwDGnGpMyDlUmQ2jLtEntHhXKifir5fbgTHUtgJCbITuGgpOMs4vCw3ap2kaBzRKwpPDMs6i9Rp8nC3Y+Nvvc2biPIKAp3t11y8aXiZxuDOcbTtHMP5kEotvISsa1gWIpUY6ESUSspTm6twUf4/n9yX5/wCU0LPb0MSdkbTYW0Fqbd/Cfle5kUDlXiN0q7j0d3l5Xf0uuuVH80a/sprl5Cji77mIvZB2kWNqiCZOYiSYbAaejuqs6FJFtE7zu4Vh+N4lhyS0zO2yorKVAqjbrnW/hbSuf2irTluGUzXdr7Mme96hjDoUnDoRTmbjv9dNzfXU/b/EN0xo0eMXV53S22E51dptvNOy0Ro6JTgs48GwFrA3AnjTnNY0eNtF53Nk2E51dptvNc6fwzD3pP8AnXI6Svxt5OcpwnDESL32oioCr+238QC7IdaYaT1luKypFbOHieHy3N+VmQlZ+R8mzm4nh8Rz6r0hKD8zQeYdbfaV1VtquDTMZ2VHakPfRNLcAUv2Dj+JzdWL4Yl+9tmZSM3hfybaHJjymb2zsuBYv7vyLLcmVHYW8rKylxwJLh7B2n8kt11aG2kJupajYJHbSH47zUhhQuhbasyVewj8VTjq0NNpF1KWbAfiKcdWhptIupSzYD8wwG+Ut/MIi3jBd9kXb6lX7dlLVyd8zx8Rb6UaVBKU5Fd5RUNrO2cblFEbaNnQLI1UNP2vQRJgxsTmKF35EpsOKWrielurDcVwrNHwmZI2E6Hf0eu5SRw4+Fci/wBJz7vLhHJOM+7GalguzXGz0i2L9H5Gua+ZYBbtbMW7ufHv+dToT0tEnC9peAColxpP1TUmfLXkjsyXFLPhXnFnkdMXhmTaB1U1AWUduzsTTnKWIyt9lLebYrVkN72I40zimyy54of2eb+Te16RymmR1xWjf0CF51E5ikAbqamYxyXl4fha1Ac5EpLhRfcVIG6mpLu1kLeUERmGRmW8o7gKadx7k1LwnD3FZedJkJeCL/WAGlBSSFJIuCK5PMyMLYxBcmZs2XHLXjnTpC4OvhXmbCcJexrE0t7V1AeDSG096jxqThU7Dn8JxZhOdcdxYWCntChv8kjDMBwaRjkmNpJUHktNtnszHjUxD0d3DJ8M/uuM+fo++/Ed9PSMD5MTMVw5s25yZCWs9vqpIuqpGKw0rOwSrasOdFaFJHVNc/wvkrLmIQDzn91BKUW4JJF1n2Ckv8n+T83FcqbyMzoZDR+rc9Y+zurEMWEMqSw04JEN7tSm5Qe4+yoOOsYU21HXlCYbbmVLYJtoQn7qGKK5Lz/Mdx+61PpC8p9bZ77UjGXVqciOJSWcg6TubqgUZ2KclZsKOpPoF85Csx4Bel0e8VBn7PZc4YS7kzXy3F7XplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEq3yOcJWpAPFTe8e+m5DC0usuJCkKTuIplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEKOsjnCVqQDxU3vHvrAYyWEvwsUvspaXdAbaaW43HGsJwtyPtTMV03NrbYgmwNra8awbDm43On8QeyW2mXZpG9W7WsSwSHgcmfOjqTsg28AFi1yVEiyANO3fUnCMQw17B8VZRnLC3A4FJ7Qob/AOFTOS2K4ZHKUtB1hT5Cg/pwBHD7jUvF8NXIwSZFbLrbjT5y3HDXw0rk7jzzZckxHUPyBbrJ3ZvsNeeUulyDstpmQMxt+uuT+EYNIROckzA4cm9tIuDmG8b769lci/0nPu8vJ3HZHQguNmM44dyD0rX+L5Grg6Vi0KKwVRYRCedhd0uK7BUvEm2g863LUEJVuuVAC/dUjEMa5WuMx+blxceHHS2Bpe2feamfor/vKjvrWkNJwkEqv/2dQ2ccfXFgrWobVCSShW1VlOgNLnMYxCx7CmEZi3KRlXl7jx+L3VyGxBiWcIRKaJaeW0HQytQHA6dgqS1i3LllOHqTZ7a4W0hNr9t9Kw+OiSJiG46EpfG5yw63vrkZ/Sg/4akO4Hjz2EcpWWBzhGyJQtGlr30PDt9lR+TvKJMGW4+yXGJcbS9r9Ye7s8mPsO6TUYo5t0nrcNfka5V4bh8I+eG8NUZEpDKekMuiSrffurB9gU5RHCTb6w3/ADr/AKRZDBvDUtQQRuKglea1YZpvC7/GqlfztyuXg/7eT/YrC/a3/aNYwmwtzJVh7q/6On3v3i3JZ299w0Fr/OsW2xTlU2Ai/FVxasE/mLX9gV7JTd65MPv/ALybxRsyL7gO/wCdYsuQU7Dmi9/G40rBg71tjfXsJJHytWnCU3euTD7/AO8m8UbMi+4Dv+dYuuQU7Hmi9/G40rBJ1lc7w5SZTfblz/8AKb1yvxpjVMaM0iErsyWdVXngdKPAw1ttH+tcF1feK5ZHj6H7Kjf0R/xH+FN8+ZVtm/on2lZXEew02cQxLH8YaQbpYnTc7fgAK2OzRscuXJbS3ZanHMOnY3giXDdbWHzC2gn2a0iZgmLYlh2Ii+eQsh7a335gd9QZT07E4cmIDsnYjoQrXvtX/Wvln/8Ac/8A+aCbk2G876chzmESYy+shVc3RjHKRuD/APCJn+it2WtupMLD46I0dPqp+3vNP4FZ+XAdUVLD6rnXvAFNtyJmM4lDb+jiS5ZUyj2JFqewdvbSYLhVmQ+QdFbxoBpRZMzGX4O9EJ2XdhHeE2+2hyfKXJeH2UCHz0jdWbhakMv4jj8uAk9GC9N9B7LWrzdNioXFFsiU6bO27L2U0JmIY7ikZo3RFmTM7Q91qCUgAAaAVhcmQ5ISuA/tmtmoWJ036btKZmLcmwZ7QsiVDd2bluy9OzUuzp+ILTlVLmvbRy3ZfyKxBuRiWEz1Czj8CRslOe2nGIaHCXVZnnXVZnHT2k0+qJNxnCmHzd6NClZGl+0WpeEw2ebQ1IUkhG/Uam541HwyMp5xhq+UukZtSTw9tcwhuPuM7Qru8QVXPsFYtAZdmKaxBS1PFahmTmFjl0qPydW9NEJq2VaVJ2mhvvtb5VJw55TiWH2i2oo61jUSBIw53FsISUR3szllNo+voN/stS0QccncoJridnh0Nybtg0o6aIHV07awyI59IzFQ2r2hIFO4fNCywvihVik8CKdwiW7NxaI4ekZzudfsv3U0iRNxudCaN24UiXmYT7rUlCEhKUiwAGgp3D5oWWF8UKsUngRTuES3ZuLRHD0jOdzr9l+6mUSJuNzoTRu3CkS8zCfdanYK0AR3Gi0Up0ski1LwyLtnY61FSy8QVKv7BTsWAX1occ2ilPEFXZbQDSsTxZpyQqROy7UKUMoy9mlIx8uP88TH2ARcZMvhe/v/AIYljKjKOtdX7W/h5QtIUk7wRpW2iYXh8V767UdKVfIfmh+U5myNouco1pqWvE0PvPKN0jen2+2mkqcW66wci1K49n8R48JbqxtnRtUI16HaaWiApBjW0v63bTyHkusxnRYKNsil/wAR3HYTJQgDKntV3mnlTUsvv5MuR1QBR400lxtC4odzlpYsDru7bUzLaCkocF7KFiP4jXtUjEW8SbaYfXmWlTd1J9nbTbXWCEgAn/Qf63xGvX+M16/xmvX+M16/xmvX+M16/wAZr1/jNev8Zr1/jNev8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zSNVan6xr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M1vX8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmt6/jNev8Zrev4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M16/xmvX+M16/wAZr1/jP4iwhcnLzhKE/R7LcDY+t20Stc7Z7VYv6LZ2BI/Sqeedvulk9BKko16KVcE1lDnQdXmZNvVF7/YPipySpc7NzbOCsM5L24W18acC3sRZOllObH5ZQfnURYkuuOu2KvowvduTcW8eFHOVlaVFJzgBXvtp4eVpq/QLKjb2FP66WlIU9+6ciWwBr6LNbxqOlt2UvOhSlhhLYIsQLdPs176FnZOVDWZWTZFSTr1weH6PfSVDUEfjN/pfcfxVLUbJAuTSnXtuhpO9ZAt9tJbQZeZXVugD76QyEyG1LNklaRb7aSqW5s8wUU6E3yi5+VKTzXF1expGv+/QAiYwL7iWkf8APXOoS1qavY5kFJB37j5ElxLyrqKRlA4GxO+lhoLGVKVdK3GnJL6srSBcmlF/bxkBJOZxPy0O+mW2Gpz+f1koFk+25rYtB1K9/SAq6r0GpMiy7266NP8AeoOxnNsg8UlJ++s7ZuL1mWbdnfQzJcte2goKQQpNKxLEVLDAUEgNpupajwFJbbbnRSpWVJkoSkE9mijSsNmOSW5KWws+i0sd1SWsOecW4x9IFNkUWHQ8t0DXIBpQa2ygsm1syP8Amq4alEdoSP10h9o3QoafwNSShBCtVAjfW2EWMHr3z7IXvSiALq399Is2gZBZNhuFHJDipJFjZoa0SxGYZJ35GwL1sVstLZ+opAKaCG0JbQNyUiwHlG3YZftu2iAbUMrTabHSyd2lvs0q7saO7rfptg0gGLHIR1LtDo+z8dv9L7j5CaAWtDRUOilVyrwFbRhLkyWuyWmUIPSUd3upKcVP7sWhZe13XKjbwpnCGG1liK3tHMqutm/a1c3Rg2Zb6tntNpdSVDUHdUVt8XlxJ7SFKy8Mw091PROcOxZUlpSWFIazX3XT3XvasIw6RhYmsTGlOuLUjQXVre44C1YkwiDsGoTYcZfy6aKAATpxG+mcz6l7JhDCmii2Rab5vbw8KITmAz6mo7RmsoZ2auhl1zX1oHnDb8gN9Ow1tc2v86xBKhcbEnw1pTWIOrbaYSCBfIDmvvp7YrzvKb6Kdr4EVEkOostalXPuNOpQensjb5085LjS3Xs56RQD27r1zZC3OarbzhKzuuQKfVe9pJHyTToDiGUIRqpY1T3ns/wqA+7MShuStLbSxuUq2iknwouKDrufV4IF+kNCbVCvu85o/u3KdlzGlrgsqsltBspwjXfUjGWMSaadajtgxl6lxO66SN5voR3b6mKTJjvehUClKukm5Rw7KdhNFSEOrTnKTv6CdK2UhLUZxzUKyZ127/2FP4U84X4wa2jDh8CKin9L+0f4Gp5ya+1/mkt6U0pZBWUjNan33Jy2pCFGzN7Ad1uNDbSlxWQhJ9GdVE1GZbkqc2q7bUaHL+ulxlOuPJyZ0FZuR2/ioSHVMs+tkNlGnklwvtJXZC1caLLstcRoIunKrLm99ZEP2CXMqnR1lDuqaEvLfbbRcFetldlRnTIed2isriVnTXs/Fb/S+4+QtbIpRl+nUDlHgP1U48h2JMC0W+lCD7LGoxcgrSyhCk5y4ClN91uJ7Ke49A0l0MqYc5mgFXBWqt1OvlxSencnaHL4bqacQFJTIxEPDMLHpOi1M4TtZMd2XfI6y3my5bE+ylYYl5Jfj9Bg7RF3Up10Guo3WpvB0PtocfOWQS8i7SVG5vfeTuCaeEVyZzVuzIbkIIIUm+Y6773qbGQlEqUhyxSvRI4VLnxkhS3yoraTwta5+ypiHWmnucqzvFR104A+NGUkBvbw21gDW2bL+ulvyW0yku5rgb7jX76jrMPJZmwX9W1QHWwUNrWopSeHQptpSrXYvv71VHUgIyZxn76SczSFhu28DjUtxCgsCcoXH6CKW9dkEtdBkJ32t/iLVEYQ1GEQAc1SU/RWFtOPaNwoTY7yi9tjnBHtFvZ2GoxS2VpRiCFLsOqMjgv86LLeV2ORohZ6tRnFrQprZZWkJ3BJOo8axBNzk83K04fSN1LTmyltxs37PRoptMJVlH1yL0yuQpKsvXsKgOoIUDnFx3OKH8DcCGkSWldW6gMtRY1gsm4Ub7uNOIEWPdWgkdg+2trHYTICkBJGaxFqQ4kI5wHS5s+GvClyZDYZOXKlF7/ioQ0ypxn/AClnMubupxSoYZyDooDg1pNojMtFtyjqk++ghpOd1xZU6UKAt7KdiqiCI2UaHaBWtR0PsoabaOYqzXzH8Vv9L7j+K+yhwsqW2UhwDVNxvpT8/F8PmtkZc63l5wOFujYeylmRMgus5DkAKtTbS4tuqO5KcihLakrKGlFWoN9LgU/EQ84wtaeitDmUg8KcckzWGntNkqK6f617p7Kzx5rTrpUc6pTpPR4DRNIVIcUuSY7bbnpCpN0jeL9t6mT8Om4cGn3C5aQtaVJubkaA0XHJeBr6Nh6ZzT/coA4hgyWr7w64Tb4KZwQOHZtRUMJXb6oAB+VBqFMjZ9rnUp5R+Vk0W1PYVcjXKtdj/u1GStbJU22BlaRZIIFtKiy8GxCLFcQ1s3EPqWkHUkEFN+3srm7kvk86q9wouun/AIaL68RwFPcH3f8AkrzfIkolSXJCnnVIvlBIAsL9yRQcbeRHcAyhVr6d9GEX8PWrqiQb5svst99LW/LTJK27Kt23pcV0JUhW8K3Gnl4bMwsQiboS84sKT4JNfvvAfc85/wAlS8RnSo70l1nZBDJJAFwSbkDs7KexLDpcJAeSnaIkFQsQMulgeAFFKZWBKVe4Jec0/wBygh/FsG2P1EOuAeGSoGFbXbKYScy7WuSoqPzP5nb/AEvuP516ifCuonwrqJ8K6ifCuojwrqI8K6iPCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifD+Jrjj6JiWUrShCw4Uo94vrqbbqjMEltLr+UltZuUhJPu3VmkypAaZcW3ZCyFuEKsnUak93GmlSzZxLfpCaIMpDpkILiUBd9n3d2lvA0I6nnVSHMuRZVdVli9793S8KivPOW9CkrWs92+toyttwEdFQNwanhb7jjvOkozITbKCEdUcN9PIHOkqSvpNvrzFOnbrp76lSw/JEhC3Skl5RT0VGwy3tbTspLR2QGZKVAu2Xc23J4jUVdMdPNs5SHNp0tONuy/fQdefeaYT6jRKVLUd2o191ObZ51D0aGHOiu3S6W+2/q+ysPVtcrCl22aRv6BOp+6lrkImoK3lJaUpwhGm4Zb919355zKefLeYKLRIKLj3X+dAKddCgvOhabXQe7/Go+SRJbU1ex6JuTvJuDrXN3XHFg6KVuJ8KbzFSSheZJTvFRnAXMzLWzTc7x399NsoJKUJyi/kkAlfpl5yfqmwsR4CnPSOOrWbqWu1zw4UttUmSqOpZUpno5Tc3I3Xt76LqXnmwSFLbSbJUR28eysyHpAbzFWyuMtz8/de1NPc4fZU2DlyZTv46g60krekXyZHCLelTfcdPs7aYUbgtKzJt7CPvoOKefdSlRUhtZBCT9vE8f9JeUDaP20TSuevgO59OjwoFJBB4j8glljR3eSRupt6E80ym1nEqHreFSmJLgcLelwPyGdnrFVr9lbPPrfr8azu9YKtft/IJZcz5j2DQVzTp7S9r20pUZGfOO0aH809MWX6qxvFPNPpaW4o9Fdt1Bp1SVEHS35BKGU51hd7X9tCPKjMojFOVSkq1+2i499G2fR/yvyC2l7lVs7DZ3+k4UhpG5I/IJddaCnE7jXONkNt9alPoaSl071f6Of/EACwQAQEAAgICAAUCBwEBAQAAAAERACExQVFhEHGB8PGRoSAwUGCxwdFAcOH/2gAIAQEAAT8h+Doy/wBkGl4UjsDuehXzMilIzCF3G9ePhV5EwYn5SafA5HAES6wqIe9DvAPqSonNH+potAZ+c/6z89/1n57/AKz89/1n57/rPz3/AFn57/rPz3/Wfnv+s/Pf9Z+e/wCs2AT6f6cI1yPxg2UXwAfsGFy5ptmvpbXKIspQv32U4wmREolkjCnB69axhV0iWlDvXH7GJNJHFVO9yqzi+mSY3IzY9XJnj2WM3t28H8Hed4qhkmNyM2PWTtDs8YXbtrRipADa4IbkCEnJRL8UhpLVPzf1JP6EpH4SjwHl+WCkETUninJ8OP7NMuXLneX4XLly/H9i/wAfHh0mlhuDyI+uIZIVMKUDmCgMJN21XR2ACpENvOOiwRdo0BA4NcC1XGLBIAMheGyrXVjgJaM4FfQhWdA+EB3uFlQHBdo4F8Y2LPEXCjpifJOFcHqI6HAtIDiUYEgl9HptKAKVNmArLMi0PJXCaAIaADWlwyQxRYA4FIC6FceJIZjKEiDyjdpInXqioDgu1OBfGJ4TxVwUdonoThXHPXKwjRJNEFbHiMdaQRkuChQPbNrwfrj3LZtpHW7sDrBp7Y0JEUbpKwnijoDoseDvE/5DBw5YgEHtuxM6/wDfNSo0eHK96tKGIojyt1OLm47TKhogvJul+mJ8wr6Q2H1zg+zT4XCobQSLoHCJPnd5tJfnwDwNC7TjOWKBmqYK0vJhH2GSnUVHQObj5HhRkgh3INrrpcDWdbN8d4DpZu4YNjga5FYTZ65WYVdIobUZh04Tvq5Q6TXS6HsCTly5+xf4+NJibhHz6in1zr0sgQs9Ii5xz2MDQEVQpFAJzgv1F0GCNNGCNvDXLERO1JHGwNx2TxMQwd6Ddw2wPHw4P9IvME0aTw45P4GuItyAWO1nbcZGaw2QRdIseTZTVTDd4rL4O+WPj2ZjiWjaWm6sTQCQxEaMVxORY29Z++oaBu26RLn6GTkCaNJ4cEVBfDCLchLHazt+lZxhrsk5pfBnQh8Tkv1Ds/LL0w6BsbIKTJ9d4qpEe1BrpXtwrXJbHIGkLwbSfqy2T37KgdW7gJwGk/8Ad1iENOg5MEvi0kDW4MeV1G6yETlCUOgC6NHxO8DL85DDn659t6ZcYGkioXfTuesX+0d69geh8+XHbL5g0iWOZ/3Omg8Ys5HE5OcHYUulCPTvWC/4PLkgupFm69pgxHiaGw5c3zesFkR5LTJjS6BxxLjYiwQlQWbgob5YUZI40bSsVUV5y5+xf4+Lr6bOHuvlk602j1VBQpdIuN9K+RsQFUAOwg5PmVa5qFfakkNBkvqtJGlCAnm+jGsjbOKAHhNUTp+HC40EAirwBu4mnMAqFAFk55dYKsg4ChobXYSbdbwgar/l8P0ckjOG3snmx+cy/KraetnJnjmEx5g2bP1weKKBx3IMrqb3TJ/TGBpC3oLPAxw0UeTyz49jnf8AB3/Qes6+Hb90ZcuXL8Fy5cuXLi6rM1+U/wAfEgQ7EobZoWeUxLToPECCq18jjYE4fUHCubYfRfQ1/lnXaY2VI0NBF9eSujVBJSmhavafChDSNTkPp+g+uXHNYjlqBArC6usK2W8YlfBLV1ybw0AAXaSjDZwBtAbwVTaJNab74Z8sEWzVQEmvYeVbXuHoyxaqzpqo55UxyQbnpB4V0PHgwU2LcEqNOdwcxfOcG9ZUZ+zz/SUFQInGfb/6Z9j/AKZ9j/pn2P8Apn2P+mfY/wCmfY/6Z9j/AKZ9j/pn2P8Apn2P+maOPUt/3/ATMUQKtLi7iE8ub8wGRbJTfMv87IQWOif9TD7KAKqOhXjSuvDxjHCGtQwX5BjtpyFMCAjsCPcQvv8A+GKojnU1p+uFb/sdE54QPbj2rJAJqPk0osu3I22IZgvM4UjPf9jnA/ENOaGuueufGScV3BfQvZmibKsiSAmm/Ovr/Y11hck7b10aeq+dcBiOIaKt/RXXHlyfcB/lpNs0lkXTgYNI0D2f2KgKOecAqrp/YYSpLaGCVwtt+eC8kiFBtJ5YQszBWVV2t5V/sXT3/wDvW/UkzrN1Tb0nJUlM7yOL9wrCoBtDfnBBpPrbETSJunxn8NRb8H9rg2bk38F7LAMFZMHY73/H38F7zdW2tLkqSnw4MGmNL+STn4C3+BB5TQYEFp9TYE0ibp8O/wCk8ftscDZ0iKvyDAiJoLnaQdajvBOiL29dKDlmzeT2JaAGrQed8+KiPSYL+WYcAG+Sy4cZJcsdWBpgRJwA61iRp67MEgJ+YRqMGUbxVbqSvqop5uRcQvnJviaMnZ5zWV2TCoGFg8Xemas5x1gFKXQB3jho6bNJsobsOtWhCLXsQ0g0iopOXNC5hwKHwBQ2SYSkXEW5+SfUmMGEjaeYYOaRB3nXHtVYrRek15wxMA7A8e3g8ueVr4QRYpslpxhLuMj0Tr6nImcM1xCVcZ8n2B766PNETdyKQrKykA8tAPLi0/6bPdPA2S0x4s3dag3cHYAmNSXubpQC7ALaMUuKhGXVwN/LODyQahAKURx4jCPUkSI7PD4zpx4e52vkLL3N4JCVIktlDW4/ri5uAl3Bs0I7mneX2vGHIIT5rTiLrDj3q24B42V9nKg8IyRSkkR89+sBsxptv9DoIj19FpPb3lXvnN65TlS+mX6ZLaDtAbtLyXdHFVEEXbIiO5qeMYgGvvZAfIUsnMQjw8hRgBYL5SIo4YfgrH4Zf4X9ckQ/6GP3xIotj+9YP4ZYxnBLOnu7A1e0su0QKCpd6cTRjwpLniJtfXGTdqWdYA4IfqDrPt3hgGAWwJ5suWCP+CZw1kunHdvlG/LGE0QepOATE3FKpvYWdXNHM0yaz0eR+bkMFfPgFgC5Kf6QV9DhTBlEu960axB0+wWnm4QvrBqQU+OarWtj83EIV4f1yxkitYkTRah465wvlPNWDqHBs35fgnY/W4Tt3qI35ZfBNsjBbNE1Jsmss+PcDJxSQ02d5cDxRC+BxFSSlKYF6q3UhQ2tm1elxhoYcQLU0ip7ROMN+oCxCWXARvOD76zQIGLqLy88/wBDU+uOxrZ5Hp6xBy/pjh1MB8kyxoc3vOyd/PE6YQKhyztXl9HAGfRihZ1x8mnFM3plZ1ggU1zeB5DOssosFqTfYenrBiPhgkKlYsiY5Xiw1paU14TvLkXVBaBV1DU1LwYtz2qWG1GvRWYlYt5bBDwHMc6wkHzsggquGwOD3X5DHoNoQANUd83I/wAhFFXYAu+gxW+1nA3kSDql93NSkWnt0h9E088YkQlspmvhBPSYxH/Aj/I+E2OVUPdE0gGh8r7uEnNapRQOhLX3itd2wulD05CTsULdxUuBGzIFnuCII8jvA7kgxbAaHhU83GRDokFKCOOX3idOFdEHYG67BzQ/q6xxA4eFTyOJSuPWAWsp2uXIKJcqAJI0V0PAZwwTrHaXIHfa+cj7ArqQRBo4tMcAm3j6KRUTZ5ydklkXCRh1vDsuuqBEnI9ammmsWt8sacajR44mkTCJgEANH9Fazr+f2f1n9ZX3f4e/jP6eAwum58z+pLj2+WcCDeAkoWHjIez7dd1xne9d4atzE2R/CEo8OM+JIpp5KbYLzkQuoADaoI5Zx8HwJstcabF7kyfZQ3RxECzVN4Wa6Bo8KgHtMgvaFF+EBXpDNDBPyZhaFNPqwXmK3iE2YhFI67waFP8A+tlNXb1gSiJbPOwnyUNecvu8lZ4gBY8m83I+aL4jY8mlxvpzhlBNbQQaPIR+FfoWZQUSgAUecGKU3UmITHwPGbLo7CJoC8g4yrePLly4oOi8GPXwwNUAK2mg7z0AE0cBLx1L6zYkJE39uddje8qpg2RcxML0Y3m/dVTI9yYOrrBB4oK6suB2NnOXgdr0g/0vSQ/AxWsN636zZK1mAvB4wwJwxfLEFAgNoqVbsfM6x1U85nQrpTmVVm8mWz44xKhD2TmjTY0pxlqE5AwiFWoV+AWGn6Kq8ocV2+2QFFAUCiiG1Mgxd5v/AFOAgeR4eDwMPxM1YKGqxvmq7cSJIq7I/I9+YeMMKcYO+O9QoSgduFhZ4yMZFA1enbV+a+cG4dQBJXJss9/LHI4gBGIDgqodXOn4/DH2PyzsAb0Ug/Rj9MlHWrgX9MwKEvyBB+TAJieAAfsYZHMomE8NdsPysOSbTQU5XXgZuf1/Y5rwFn7ZNraahgiKU+bbf9L5rNdnWzyPb1gqyzFE3pRAHy1gY2eIWAXwKfoaHt0sw76y8H/4QSoM4gea87703DlzzW+dVt48uuE7xrZ0IRtMAA1+vNNUFya7QoiPnngFcqq650JW60b7hDNUpTEYO3g2h+6rX5hBUFiaNFPeJqEIVRCKu+GSd2YefbHlqfqNvWHt70m+xHThe95CyWhKlTyHlxrNC1N8FwFfcNBuZoZaYzOR0EHjpx8Bm6nnVknoU95ruKa+IiI4CteXC5M/1YBjN2tEiI4CtTbkut42qKBrcrjWVs/D+xKPLa9Y3ngH18oXblOsh4LWrw5whsOjibYLsR3iu4FN/wCieYcaunZnrWoNg5t8FkblXTwU3zf6b3/AmH8Pf8KfRbWm01741tec3cgN5NcvrgrAv/onw1/BM7/h7zv/AMSRPIK0GN09EeOaZN5VmwL4W+D6pzmjggZooDxXOTluTQIO/ef5FSb+T4k94HLiRVpounJD1jkeg2gOdR1N63dZ/rHBeNpZdXPQQIs1dwUrHnQ5xouyx6H61mLAmmwPEROOwHSQjgfJQ1wWDk7vGs9ynh4/PWc+86xdxCFOQRiO9OjeGDJTrLE4fWJpyxsQxyAtU+ZiTFmnQn5m141/NVYqhm8h2uDdTh/hJLpCq5DQAzupycZd5ceMQtYQBGKTThibdBwgqs0PPeLhb7bAOCtPI3jBJwKKtBOB75w5MMiY30NOkXfPwF3SIEKl2bE+Rg6UypUaA/fPcp4ePz1nPvKEIeyBL6iHtvJCNfI6bK+f74ipe0vqhF0HpR6w5nIkVLQ5Xbn+doMfQWW9fAy5fjG20NpBnfxcj0m3jah9QP4f+oy6vrP8QwK+wnN7zr+RsyPGdoOmPXHhFFTpxIdZUSj4yQgDGhMB8kxJZjTsRnC+M4LEj6baDR1HPuvlxwp43xkaqugaa3mq52tY73OhDzhacoGr3B5/48fD2vA5cht7fhTOM6wsVCgrdMImqdlqVMSWbeTjW+U1pSM53ZfeDGFuhIKHfnXWVgZlKjlZCvFmus53/IWun7Bp5bxFNUc0Ze0a93DxPZIAEwmgCnnKLuXRHIIheS1pcRkvfbNPbBXtzyxVWVL0wl5LTZgaC+A1Z2cnZ891oEzjDUAROjspqNjnKHrGt0eturEmYcVBvAUB6W499pDdQarQ9b4oJ41w6QHQ0HmL3nX9LABDQtivKiiUb5oB1Syn0SvIYzEJeKkjfAvzewT6CJJJ2rcqHHG73Pxx63tsMSb4zhI4EA7DKGyUu0cLjOXBjd9k5CJxvPDOgRUVoH0D5uFRGjyD964fI7lmjb9DHya6y7oSBXfMmasrvaUcM0aZxTmZAkpvrnW0dXvLIBa4Gox3xes0Nr2NWNNa17xZfjnXVAgpoqzTQd35a7qK3ABWWWFmedmOJ3Ygqb5l7xZ5vnbBOJRV417+DLtROi2HeqPR8xLYSvuRKK8iuhwkWAcHR0oreKj9DJKBLcmYZEYvd8L8szgIaLYgm08ZGA+x5uxOO1eXBiK/PEJQ24m9nM2RcEySAGDWqgu0W8GPRL4Hez1X1Li3PSAip6j6lndMbIZ27NHuZvP0ia6fsGnlvGa1RzR/mRL7uJVdN1sBgOINGv4gXLeaRV4BoF+qjpDwkqASLD/PCMIUiYMF2emjG3FTUoBmPqVImFKu1ngcIqV7yBpV5JSd5c0S2fJ2dronTjpM2h6G8idUk3nHoOabyuAd5b+h7BiqA7ZUItnOGns6a/wsE2hU2QooxBS8N4EqvpzWBgU3hx/Auo0eZTAwUq6w8o4omnDDy6y/nr4XoPi7L1c/RuPDObk4cdeWN98UGpFQaoeWayaCBEgu2pLhP8gIVN5dtSW5v0WaqFASiP1wYrHChuuiiy6sut5ywkAmrW5tiPCLN3+M2Um4LTrt2QZMHmQQERZXQ48BIqRPWJRxEqdu8WYaPMpAYKVdZ4/wqQHb+h5S5rUy7tQ+sfJHZBbgMuXN7o6K24aV56OVCeBeHg1tQPXB4cjYu7tPMgMRJlNos4ud3Ng5yRyblohZQr1y1h9G4JEQdbKdKmtveHRxV1gBBwukMBICI0k/0MSNFIoYBUmxrnGaUTtAJBaaXs848sHwzObACiMTTjKiTgobR+2cUI5Q+Vh24ormmo8T+hbgtlImw+pHh8i4MVbIEGJNEdrpwRShNdFEHhOQzqPHCYttjU45T4WxhvIOrFBEt2+GEYKQMNtUIvHk3VsGBeqjxKvgnKiYiqZiJ0HqqEl3dY4sOJEtiR3HWAFgDBRU9nFuGA9imqLRJd15upHJbBtHSOotinDvWb14E32BgLs0OKDbEW5X7qcnhXEAICA1Z5Z+gcOGc3Jw468sn59QakVBqh5ZTJQIFhHh/kd/yOv7CiZLcDFB8B4e/j97porTrt/AKRnZTw8B8r8v6/8A/g1L9gJ1M/2Ok9mtLLrHnOIwewdx4kT69YE7lUET0ATRsbyM7nSOiEBitBym5VjjwYFGFV0LXfOQyfOZTsAmy83XAIkKmCyIqHb1e8cy5SoNENAZZfOb7M6FSeybp8nGTX9emXrgxycsrvauenMvDYclwnx+x+X4JZEKeaB9ST64SNNliicnCHmOxxU1LDDkOk2q2XXOLuY5H+g9NgeWbxRqLs05H2q8tP0zx/YS7zIxuZvqO8DMfXyINBCV6zl+W1zmQp5d4QF0TV3G1fvkJNTBAjMR8oq+/wCwCmtRfmVQMeiRR6eYj8DTCUfTzAcBVWl+XTTl0fal+bNj0fwMdvhxfHJcA0InnN0aa0uSpKeP5Pr8NkFX6Bvf8ptaDYhVLwBuuSBCODyhE+X8K7QBRnaugwRKRHv4rtAFGdq6DBEpEe/6BrOWZViDnWm6OelwQYpFNEl4eOFOTZkZSinmED0zREeMvxZ5ew2i+PrWuICVE0OynIBa40NKY79jrHXwnNeFwQ9Dv866oy/cB0+rn7wZ1WxNqoENSL26sGvFQV5AB2qgHlxC4qMZdXA38scGAliushG9OL7Gzl6b6WfTG4GYCGhKVOwm/Fx0/AXcQCfNde3WAr0sMh/Tfy5UMF8gJFQSk7736ct9EFEwmV2qL82nStc5GqK6XEKC1qdm8MxtD9DjBsuu9XcxTKfJaKjz1rhljI3lEUxZ9F4H6RWzVW40VgNdb6QdZV5PkCPLHje+fph1BSvE9BEUGUN5Pz3L1pNjLBw+UxocaG2IIiDK8HZ1k0+IMPAIc6Nw3NpsQhohaioNNlwa8FdBQM2nngG8ZcSyTXpbPBTep3i/SGr+AWWWY8MMqFzPIaaOWbML0J40dmuo7mLqIimIAAeLRzNuLZG6qKJ88eGGVC5nkNNHLNmF6E8SOzXUdzBtAimIAAeLRzNuGUcRIiO7gMcutZObRwItpCvY4zaXo4TF9tB41d7wIzipoIUBVpHGbVtplNIHI4JvSxn/AKaMP2JTxrTsV8EwtYZqKiGpeFm054Tv+2UFUHmNNRXgwkd0d5BDpwnIiYkf/uOhIsUAYnxgh+T+mOvhtQP0SUXAaPyTrABFHNxZekChsp0jsWhe8rMh62aOlqHia5wskPFWDqHBs35cpA29OZuBQkD3m5TNqQCTZ2Tcec0WTpddg2Dir0uMTYTwJBmloL8zjBr2Q9DmfKRvMwaU0kRARdcuX5ufZfOIE5+VVgBssRotGW+Bihu0BuuBHycY0lV5AAvSw+Tl2QyAgitxNiafGM6BHrQv3tnI6+dwLh5H6mSYRenNthEAFZn0P9ZuSlQvF+HEOCMw0TTC52r94egGNHYrsavux+nw+IcKK+TZ/vO5Gn1T0mNQw3pSA9qge3KNjQOX+aWEOKK+TZ/vO5Gn1T0mNRVnpSA9qge8lNj0kFPpD5DGDT82oQ+7r64yQZhwfqYF/kYS8OAfTx/YwA2hVff/AKnf/wCot509dxpesGiRCCcPQeLM6v6fxzwSamWEwQbuiPkaOgxRf6CFQYK/S7RcClSFmK00ddTn4E4C4Wq9vvDfzo99ImxPJswUbqLi3PM/VlBhJRq9l37FXHgB0KoekiCPI94Lv5dI4g4eFTyOK0/pnBX/AGG+cp2qlQtACo7Kd83IozGRZVGwukCQ73kzTKKI0iGHzyZH8pQiRzDROtcZWbnCjjUaPFmBxYAgB6wkTTANUtXRxH3mueW7t0Im3q5RoYPr4APpfhUPLYPpR+cL3c/yCwflc8cbdbcuoCV7zVaeIITgMs2NldhaL7t4PlhAzDh0agOV1ijoZ0F2BNeMVSc2jWBADij9c5cD5NCp8sLpwYAEYol+jlgWnxw3aEOGybMKDC1iBKKEhOmQswKoK+0fuZD/AK6CNREo+RPWNnS6GZAQQgk2eciy9fBwBDDxfncBhwYAaAPGQ/y6CNREo+RPWNnS6GZAQQgk2ec2qvXBwBDDxfncD04hwAdEOPGOCpjAg1AkA4xBY1oUBQImid5upE5NCQJ9VyzCwWpWp5Hp6/8AZXFzB7kr4Wt6zq/xd/y5/L38C0fLyHpzSdZz97B/k9fHv/2bYa3EGWdj67tu7/h9cphwbW1h7Ahvf9j6KxYLfYcyyTvIOOnVU0HVDpwp4dICQV2MUJpWO5/YyxLh+UsnFqHT0HgO8c5HcBjxwpLo1gNGTb22kcP0OchoioFRv1Hff9i95CQHtw8ilWgCKwa1ZOMpV3qVgG8AD/4dXv6f+me/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase8AIJF+Uvn1nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPvD/ee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2PefeH+89/2PefeH+89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se8CAF/X4lhPRkCpHkjw7QN6zXUgCQTrwDzfWW2dwEbkGtTnvjBYuGfNr1sRq7/RnOjO3DQfmOHnODElSdug9MfLNYXdQLUQq7lMO7mpnZQjxVTJvR9cfEBS702gH92SJ2wgjY8bWrr5ayknsJ4IgJCVrn04IKPKhZB01RZ6OAyhIxLfTxnX8dD8FMOE0joCrnTuh/wBT+znBCuhT+uv1wr+SJqwKLvWKxhwwLxnQuNCDh9lqh+iDgAeocYXWBrFwIBCA2JiKnjJ9vjHzAav+OMX0MRsWki+Eb2Z5P1L0B7VmPGfWxnSironWU4tEWBTTd9QfcxMaiAwzk0u/nngy6wVL0AOSVohpxE4iHNg8i8mAHaT5EyaZWDteDA1WFC7eNDf2xQo7P8fPN0jLmIVCwXaEHLilMnxUC9WX55DgB0Rot2vgyB7dM3wumas8mE0QlwUu6nWCaYhcnj2xAIRaJnYt5uvTfd/8aqygUWtvngzoJHH3WW+8DZpWG9A350B9M8E50tNHg0aPBjCXCHQ8jDjNYykz+gw+QBCCcadYfqYT8gHxQ+vsXMprGkhSArlPf7GsoCNFGwLvuAfQwxe6oO801vesP5FAbRAqBXLeYiT1UlD3lEUGr2Fkr3XgyjDRx0z0iCdSYZhvJsDdGkJ2duAKoVJUS2cmmWJc48poVzT2oTdZMGbBImAoHAKpzzrD2tUu6O5CIiwmSGzVHRATYC6DfGTemq05VOQla7U1MM9BFKQr38gwtqcLQXtyjV+h7wZDgpA915YzgnI96P3DN5GTYq3XcDXrFpExbjyLkbN5usTiK7p39feRwLR5swQwbHA22FlsxDs6myABAgZ5fCt7v95SSSVFT0KjOU9sXlmKuSQJHbfr3liQXTTABxqU7I8mVNAjiMTD1iUDs4vPvWAp9dBUWopYAKdJkzcZ9U2g2jXyezGYAxGZP1W+4Y9LQop3jcvpXz3hPwGvFhvh3Lx88EYAeP8A4+2t60lljza+ZiIgkhFh41+mMCokYnSfbz79ZfxaNLvQsDNKMBaQrv5JdP1wJUnkSgv7/wAJI6K7jrfjEGT54Q0vcaXCEhbinl+TiZatXQB4dNLrf+ckbpKoNrvr5ZH29jQ0jqP8dBuX2XtRCzT28ucZLYa4QFJXZzXlYK3ZtcwRSWsFIHOmazXUdPzp1kviyKY1rmaH5Yr2S9V5RXhzZOTBZkwmLqc0jvpMab0eVQf2PnKx9NqygQUpg6PLEEDHVcBALUFl8Cse3vc3dkxV7PECMQJEEQvLETjpzgoUAFCLQ5c/tcAvhBc0BwRSfpgGBrAkp7mGAAapdFncTDmOsCyga6I59CdO/XOJXEyw6n6GBylljkP9YuMxgPK2c6e8lgUFQC9+sfORQd16XzlaQu0BXB0zyLrhziQxukoKDSCw3V7ACFmwfJ6R9HzMW1g05Y9UH1yh+qBSciZEkpEsLheUu+8TKp3zAX57cDUY54NnDH1yBJ6vGU4umC14acdj5piwWsa2X7md/wDi+VBGPh1vH1IjrI6E2dfpmyzxpX1w8pTTl4i+H6UXUTr0ZoL2NCIl+XfFxrkJiwtVTy/wo+QwT8K5DzOchC64PPYazofoh5l0n75dGZDXSePF9YwUx/jD9bksQUIjUDjzvOv4qE3cRowcm73gZ/dKoBHlOcsm3LKrC4PRm3DOvY66EIVLF11h1uAQWdAUN+Os2MIN3ShZZfWEpYAEr7G0B524aM1QN5doaeuwMQ/6jKxgJSXW9OIVPMBtDoKxp8saYdt2SvC6uuG7ziwVIa8QLOqfPNiIDtxB9DMAVQGnV7E2/prAGLTZCRUf2YgSvta0dDtnXHGTbp+QQG2wj4byaKo9wHL642siA5Kvn5xwcD+kMKGwhtdYY1dC6ajtKyJ8964C+Q+lpXGqlToAonkmt8dYCk4K6Ej+uXy/XH8lp43s5xYhO+Hz9eOg0648QtOv1YJ0RdLpXDyJu5ZlomVzo3yKSeKfcGvzct4luTwDopPX9uUWVVXlRvPw/Pw/Pw/Pw3Pw3Pw3Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/AAACHX8nrOv5Xf9YCRS8rNw7ByGt95AChDmGApYY6O95yPNADZSAnka3BgiLDUN3r64FVP7YvEXl01vyORAa32SVugjxGCDHyrql/zlKzfuyOS4p7FuUhR0HNu9rd4vv5E0IFUjfLn6YM96s2RcIE6e94WABXFRppbGncGZd4/KCjplBNJ4Z4ADhGEAlBPIDlTxjJ3YSrV1HEI3Z1vFHLaaLaqbCag8t1CIUoECB5RUy3nj+ssF1iCCcwKDAFONtMm/AE4NIkU0dLcQUazbTedu9k5QhiMNlYO7GA3wwNLgAAJVDXY6RR9OKidKYQdNje/bh/JrzQIZ1mg0aCiGhqU33hy7f1wHAAgGjLNBFFGwO546a4yH2yoQHToCCCGzm6rXSRpXRRVa2PGiSYWSAdGOM1fC+cfm2YeRHSc9NLzjlsegrK+osIriiYSjOgAoLxon/wAL6zr+TP6rt0K4A9uX5SmwIJsJzecPq1Ep/IRSxOIPG/Od7xS+Ua1xjY5gJsUZAp8zH+PigHXxi39p9c+sLT9N8fv/AIzmSnVOA3959P5Cfxita+f/AMwetogNnrn/AFnLQgQIczf+T+kx880fyHrGjZS3EJuXkuKOe1oD1sPf8iOcdQ1B2nnCxRCJJBIt/THwNlCbdMOJ/n+QZdKXw9OfOQ9Hn5+vzhlyS+Xt/kcgnBetmuH64sW42U/44vvOJmYvfOrD6f8Azn//2gAMAwEAAgADAAAAEAAhjAAAAAAAAAAAAAAAAAAAAAAAAABChAggjgAGkEabZ6/z1uSAAAAAAAAAAAAAAFVHwB78sMjgM6ICbjrb126AAAAAAAAAAAAAAAIZLkpMAoofgHRS6CptU0gAAAAAAAAAAAAAAAAAAAAwwwwwwAANwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOJgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD39AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGu5APZ/uIJbP1JctyP6wAAAAAAAAAAAAAAAAAO8UWyf5pZy2EqPuIi3oqwAAAAAAAAAAAAAAAALDu6L3+Tk9EPyAa7AAAAAAAAAAAAAAAAAAAAAMiqgt7hMe/Vszw+4AAAAAAAAAAAAAAAAAAAAAI/JrcWfA6KAAAAAJiAJKWDNfSQiBhHBDDAAAANgZoH0sUi1yZKhzFrFgkjxDqVvwNosbOXMgAAACgNQJgAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAQ6cc+wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwKgB6AAAAAAAAABAACACAAAAAAAAAAAAAAAAAdjaX1a8kuqMg2VFIAsjYgiJQAAAAAAAAAAAAOzgAkT9Pd7wETiNzS3jYheUXAAAAAAAAAAAAAIVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC30gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANG9HgO19wAAI5wGA7AaNMVAa8WcVwAAAAAAAAJYYQAK14CAAIYxrfERzQqTs7O8RdgAAAAAAAADDABBHDBDDDDDDDDDDDDDDDDDDDDDDDDDDDAAKCCToiiNnQAAAAAAAAAAAAAAAAAAAAAAAAAAAEwz+EwY0wgAAAAAAAAAAAAAAAAAAAAAAAAAAAAyAAAAKQAAHwwAABjQAAAAAAAAAAAAAAAAAAAASQAAM0gAAMoQAAM4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/xAArEQACAQIDBwQDAQEAAAAAAAABEQAhMUFRoWFxgZGxwfAQINHhQFDxMGD/2gAIAQMBAT8QgeWVANs2jLI/SAAsbtbKxLxSnnvgNDBmQGLkH0iYLArb9bIDg2agEaHdCAA7tWukUZlq78qZmGj2EOQk9I2Vj9Pp8Q4wIHQXJ8YP9xWXEtfv2lTeikqZnFlebKxhvfQ7wFGPKPXCEDOX38HlPPT12mO0EG2wGgdyVFI+OFytztEwRnCWXu6LpTdAops0CGmMybhyt1hBLxpyNORhLIZvVg94WOP0ukAVtmgA6D8AJDUmPh50gRJttjle8iOOcYEHKEk8YRDziepPOIT1cyEFIKsz1C1gzcPRiwRm1d+ld5cC/KfEAC9JH4wkSJ7gRtWbf0g8hhVZCMZJhkjAAf0iQgCKkhEhCjEGoBpnH/VTRvC+7C2Mb9GEV5VqAe8d95HIkdoSwCLVexL75TAlVHxxK7O9ueEQEuFqVGyAM1x84XgEgBjEptittlJBZLErRxQh3ifN5xhdxVvRfcDARjGnihQPjwz3R/hKTK1OgHURKLA8hLHnxKQcz23zz26QYNu8NRxAA4D4GAWSMtDfUVhmj+Brp9QBgsA8EdQ1tvCBjA9iO8SGwJ0S2G3bKHGF9tlXn1lauZ6n5gglQXw+HCZQa8lQRxcr7a0roou+HUXCIILMkjaWvkLf2MSqfUmh8rKURZQGVQFFIy/CIRZVqAe8GJyJHIkdoqpVvRfMAbSi8NCjKwsPlTF5cPznGBiMVQ6XmRy8W+koDmV503ypLG22A7b+vxKrQBt+MTJhTogsWB1Eo8uMo2716doDyR30jsRitxtfJdhnAsuyxq0FPqNHmXoPQSsWZ0dhgsjsHfQOYNecGt2gGwzuBsYYAhgXzC/sMO48T6JAUcjQSx0mRmONFph/IDM5/A/GIArGAEDmGN1pQHtWj7RAHw7wVM1IjzhHRwHdX5ViKnusCcqx+mJGX0e8EkAMZuCAFnJ8GupgFjaQOJgAhiEGf8h94QAc3peMmIZfR7+hoWSnnP0AZUZ7UyrV4dRbgIGXIxRQrDsMYKydDLeakdCDqlCC2+Vmg7oahSleR6HuoAUbO5v6Rw+9Xyx42+IAh8W/lzPgjpmit9VqoHuJ33F9qayzqIQG+PewuK7SgGLUOpcYoGN3kbDBHjjlK6rILkLZF49o++I5Icw2+cpe+COdZPAtHBBJLgAORbVmY7w9DKa6f3znMRVkeCNasW3odS5crMXsW9WavvKm9TRLvxccT2hc7NZZnBC5ioyCy3ZwVhCGoU1HdCKwqve1+FvmMQBt8CfBe0kH6bPQ0K/QVtfENQvM5eZJwYKMwAp0TQbKPwQKioLnUi+wJc8YBIkmUuDP6C3pY/Q2raD9AQQhegRtboW+cFhFlTey95AXhgBZznenUXgtLTIBcq69BGgZuonzfDJWDdHTGPPib4N1hgDkE7ClbZWMA887Q1utRPgwcuA+UgOFgE1XM4b4VvEAthfRQ2ZEjQnnT6gozAj2QovEi3AHvpHBru9F8w1hBN6NuMLBN3eitzhrCCb0bcYBABchN4AIW+2N4FE2EIb6Pq+EopxjqHf/AGqvLLosW3cInFEblDAoWECEEYSggjCUFiIJXtwgBDUsxbyAAAGETL3635xEGTWx/wAHKEUOXf8AlpcDlEbhqBygqS8QAMqw1bxvp8D/AHtNbAAlmFUiNC8PB+jPC1BFQuCYdiMDcGsYITKSjWOMZV/4iwTx5z0hAoQEesaWeymw8ewjywonHZL8W2UJyMwqfsA2CVEsYIi9AeMGIkRiP8nFEDG93vILwSQvEBljSqp21wwBSpydM0qvHdetsvWpULGqNLZLHN2lYVRpuwQyvlW7gSg2L0LGeCxNLweuNaYlvSECCzfZeHzB7o9JRvR0ggFdzwlBtCiYscpUzehy1KVU1KbaP1+LZGFQGjgJgQC/ooDXlPmMJ/NSJFiB6Cegn5xie0ZMYn7EoxofsALwAfxRAmAQPYxnLD7LrLD2En7r/8QAKxEAAgECBAUEAwEBAQAAAAAAAREAITFBUWGRcYGhwfAQILHRQFDx4TBg/9oACAECAQE/EIF1kV+fzC9FFFF63fVHEyvFPNZRTATse/YwGjxK+fqABmIelfm9Ta8KEA3r0JB6jiYzIPhH8zhRvkgFvXISg1HUAHzARfp/vcDCGJUdQbrlj+ASgEAKht23rKCvQEBhmCrEPB9OVkaeH4RcBAGfncQJ5nXvB6VpWJdVqo6uLRx4RjTszAA0VPNSyFx53gBAAHlQfmoyNpaJvXqSSNzGRXj9t19QuBgZ3vuICQEYdgjZCAKMP97tsI+51J+RJH4DApZuIt22rvKi4iyZxaXgAO8IEEGx/kzPL/ZcNQj419DaEkmfWyf2caWh7d6py3+JTGyqcKDAC19fRRaCgheV+4TPF/v3CGjD+K0aN6De0FxJSYHX9JpGIcJM66nA8cP0lEaxb7TL23aplKHH+j1tBOxI7QAF0B3AgYYuYA1YcIQcFARj2mDXtfbHKCg7EHoH/ZRJOT6gd+MIJJwvGrpBdpLmWBgnQAAdyq8qwCCrd88oMr28rlGY0UdQYUJBwvKkBW3PKAyhw55cfw1F9biSHyIa3PqAXn+xyTBaWyy00oYI1X7eOBADnfMr8iCkDmW6XwaRghP9RTrnlWNiCOaHYp6NQhgZHwB7QhnYgfLvjywlonG2lC6dYNa06AfUZehTuPdRDhgLkXUwwZnbSlQs3jbu14EEXvAAI0AJyADckPnpUZImRwsLjPtLqoMnOhL3HWkNhz/CIjzgnYkdoKzv8B7wF1URA3D7Qgi8oMQVYQwAx+n1ouIjoNftebSgowkApWLAA7+N5QCaYDv4eEJqeF9IShXNefUNCjCQofxkBcXyTtCQX4jgoERvWGzjBVGgTrfveAjNm3KvEwUANg+IvbNlbmXG1QPQuBNX3hmMAB0rHyaxANIuZfqIwAHBwqOxCGwh6H3DRA5fMCogAjYA2L/kdNbiCK5sIGIzPiR+d4HIEaC7qL4OkNBkO586YfjASHh4PnlAVdCjxvACShk+oHeFQZPqu8oI1lh0gCocO8FYG6vbeijuvuCJDOnm3qwDN52gwJYQmM/zwQwhmua7IQAJyD5QoKNIxVoPortCCMoutoEV5yvoQgMwccKBxB9rqj0PH4N3rGBBdH+oadRl48oNVj2IajQCeoIfJyodl4PMp1SFAcR52jBIfAh9OkqCTc3mr+gMIuQcUtou0y2EL6bC3eUrsw+DD6OXDM5WNtGngeRjkWCngz5PvnCNwHsfg+UYcUdBZxY5VWcpTdl7m+YoQ2rNmCOfYpI5UcFgPLx+cYAA2pvfWnGAMqMtwnRIVFYvmYGtvgQIi7g08DrzhIc7hNRX4CgUUhEJsq8M3RdFE4GuFUW+duSUQQwJPucwFKxLE3ldQ5l58cowywubPYHEmMdGWyx6utIDBxmPutG/bUQ8x9lQ2eb/AKCkB/f9lix4woSOc3NjhArxHVqKxqs+sAgFYviqA21JLvlRQQICDa2+v0BP0xXL0F6X/QgCTapYvf5FuQpEDN6DwQI4AknwQiE7CHD6F+glMxL5Gg2S4wmOcjoHM/RgMwnyq4RWGELmQ3VuUEQBgGoYJxxpQ9IARZvtjKgOPQPtXFQ0AY/aiPElugOwdYIXxi+BHJFxAGAHcrbxwAAkjFu6AgFEJ6NydKcYyncOoJlWuA3A/kCIFWXV0OtJVrgNwP5AXOwLcCwXwoYQK4CzwRI6gDnBEJxDzRdv+2TCzVVQb35xjwW0jUegVRON/mFiBxjAQcZYGMCjHeEgkEtGywQiSK5lHT6bYQgkHVPVeGCpmL83qcZYRnGolhGcvFo4k4mnKAgJYW6juf8Ato5AGL1O/wBfpBgUcHDUuac1o4wEXf8AiAFBgo88y6w5qAL9QsN/Z4ZfHzA7gFxg8ODQcgTyGhZ8/S4BEAkzphIMqfIPRQWnoMYoFNBCvxH/ACU2CeVlwBC1bMFIlPCtFBikMVUbxFARXJthYfFL5+oINTJSld2dlDGggUJpC7NnvilKtnVaosti8sV7KrVoVMCNTXWXEElhPL00ARRp+64TOeEUpRIwe6A1uF9/MOOOPSgyClPXqQYhw/X4NYgOExfz+QGDKOvGX3AS0UwH5X6iAPzWjTLCZZjR40aE/mpD2kLRB9jkIwvYCWhIfijJGA+xCe0Vt9rD91//xAAsEAACAgECBQIGAwEBAAAAAAAAAREhMUFREGFxgaEgkUBQscHR8DBg4fFw/9oACAEBAAE/EODJrikWalaWyWn+oXZf0adlU7FAShcA0BjaE34ZskUBS1myOCH/APmGxElo1J9xMARAJiDCCpqFw1+YuA1RJj+MGvXn169evXrxuAOVtzgGWR5SfFKB7MZdkOxn4TATgABgBhAiJXozaECAqQ5JcN4YDGkIMw9cGB9xpRlcJkxkphCUNj5g2iAmRqyxtXpGaJWTT6GIbjhhpJFcajGSmDiUND9Z/IAmR2LGqTJiEkPvsQyhDAmphyQobGqzD3IhrCBEPLfyBjVN1yWWEqGaf8iAt2aT6IjoJqklnc0SnTqzImyMur1ID4E2ib4haCT48igALiQ5RBAAAQZf4Ohf4AyAb/8AIRwxHwAApI2DAAAEqFMAr7EjQQzRohcrADACAt1+rBARcUtLgBS7EiIVAQADkhjt9goCYAMG1hH4BDMAAT6KakAAAPrSoObogQACazeAAAYCeY7sEhFhS0ALPLdBZeygAAcDJ5TKBwAA3HhJlQCAIRZ/UFQYAAApVOBxAgrADIWq6kKAAKE5R8vj3QJPJhmkk1BSMKAAAW3L6YACIBKU/TtpqlLhUnn5HTPnh3fmEgAOcw7DxCQDIiijiCAMUTtox6NACfZpAAEAALxIZgIA2UlmCEQAK8BW1uUnILB2ERAAdXoSdTmSDBBFn60/wYBDjABBIcGAEPEC5lyKABNODwJOBAfevWEEIAJJK4KMx1IObQYqwTWwEQQFJVzNCBIBML9BzXMalm6RsDI7j3YRSsMggDKY4pZ69gIEuPF/AlxAPeIznU0Gj2CQOyCIEAxsjY0RzmV1AHdq1gHAWA4xJyHAHhhhwNXSgRyAAFbYLgLQDVG2axOB2AgI0+OjEaesvcqEZwZqSRkAALqdlgeiAAtgLct8lAG8a5cNMscI34QAIyVpsZgDdB18ReCAAiBELalsBD+IAV36gCEAAyEEJAR+cR5BCIABMdhIQFEaqmICCAJOOqARwJsD8eBSEAIADx5ggBAAANDECIHgAAnhjx2AYBnGk13oCIFkCyDwEbiwBU5KsWADsOYW3ViUgI0jWDICmlc/01TGmTI4YyunCo3sEag5RNnRKF5NxoTWjTT7gHIKjpuK0hIQG3kRdmb8BCbyQCKRi/BHNtnVIjVoa0QvY6dipa/IWupEKU0LBk2NDr8nX5OvyT3nudXk6/J1+Tr8nX5OvydfkhIqXpxSwQZqLHkoHCsDqRhSwkkKNjqK0AEwEEcuACAjLHjdwgAAGfiLyg/gBQZHIXISRxCAWHX02AAAoZobSQcAAAnHCwEYAAI3VPdMkQC81bsX4CgTwBiGiOAAHsoU5JiCIDkiaApACQPvOIFVHMPHoj5JGCA21OWvQHKE5QnKE5QnKE5QnKE5QnKE5Qmo5RXs/Qy0MrxEysicAJVBphlTmUZaDy2O2NQX+DLWB17gAkUUOCADVD2QMtjzZNPZg4O3RpC4VHGfnblUiN2S4fj+j6E4puHAJKu2hASaqUaf0WIIGVGIWdBEpNxIKhAAPcjEhqn9FAmg3MFwZSCkH4AF8AQBAACGF45MTGOYf0UaDzKJ2FdA78TEAACEAQARurCQADQqbTcw5/0PQjIaSLDAWy36MHeyZMgAAESBRxe8An8BrqAGnuBpXxE/LGthUW884DcO3gbvuXFZgLQQeRlk5oswWDKClQLFJOoCUccmRaH2L+opgTvREbI58RgDbnZtUzUqBrwAAsBAqUVdjyuGvDQjVoQk2obfuHFRgIhobUjpER0xkxoAoui0H8eIkVIeWxchliEjUBKGVUlSIGpl8i1roQ/kj+4J/eH6+oCCEMDgp2SJlgHgHQ4bDyBfRACljDPWVCGkIkgfVe9VwMAADB71cpjQ8wAA3GGSYAxhAAB8zLIwlCDSADSBmfQThggEOoCGIF8dhECFgEiIFFoABn1TnBFCAAnfjqxQAIJUUpvyOdYAGxXjYLZdlZG1FdiC+QaByGwBgvHLj7CgB2njxE1ZCNREtGNqdQgAJHgIogrbqhh0dPwSJIPbA/aSRVh3YGgAPf8A72lBmoELh5xFwYgF3RXbwCASABmPLmkwBBAefGxXCHYvmFNubQpjkaAM0zxulqYykAAOatK0frQGQHfcXGBqrAGGSAUpdEYASSV8DvCgJiABCMqsUIAA6L9TT3qAFoDNcCb/ACLQnt7LCONhJ+BMeorb6TO49QypApFpMBs1AQIYs7ymwRqDcCHdKkaQBAJIP4JyggCAGTU7jVac5kAl0061dg7WE9mMejJjYjKgBOwAAxcl0KAAAAy+CIDQIJckmm0QuAIdGEqxkG/yNqhQ4JAevDxNAI3QdQ5OlQ+3QPGwtIYdRNgGNuV4SpoaADQZfWD6bItQoADtswQrNWwSG20H2176hKABjXKmGR1RLgAQGWTW4kloNvAdJwNSJZEgkhO6IsEsJj/DMVpugUhkaAIPhI9RABpkIEUXwIAHXBAx7MFgABLguS4QIAgFliVmgIAdsw/NgqIwEvoDUUQggo3MkOX8i/M4cOELApadTQqFMOmNqqdwZrnqnBEtnQoOQqAIUZF0EUCuLO/Sc8xQyJEISdC1atgI2DyAJJLAjBsI4KxLLgClgcQAREz9zCxB3gjuFhY0VVIpaSSNEZ0Jpj7VMCph1AJD5B4IPqIBO10aQCxfUCBend0YqAG4pDUaOtoEAKGNbAnpnaABPjFwMD9hgAgezgg5AADKzqordZENhIJq0Ku+dsgJXoJAMdLX0KiKoAjmipSwA0OIeZIoKHvcgiGG+eFAM4pBBQLSPHkJD/AUABbuuithkDKCQr7fJAI/IICJIO0EAaa3AArfrWe4Pkag2JiskQIM1KBAvWfER4JQMksrMkkCVnQPB0Bdi1NuliCBMD/ihxEkgFJM0qEPArUoMYVWN+fgZEtABIn0ORshuvkUqk3Jo/Am02ROQ8RMIT2tCjPCFxqBtbCdUJDTmtuDSeR4dWqLkEQkitO5Sb8jcldUG2Gb/kk9dBJv2ERZ59ELbjr8kfowlXQQIGqaYXt0ZwEAMSGQuk/xQQafik7qRQIA/tUAwjcRqAKDt8TUBBgPcTRKKtJL2PTQHVeYrxRjJKCDjAmm/OMBG0QTGT7xnjkQBjPsZQm6gFgjVwbCAZgABLBU/VCjCH1x6CeD0iSdJVVZMQNjEQUFWSSjCS1QNgHOt+pgDkCgwDhIemcZgIeDpAgcV7/MWYCSZ0CMMjGaRhPVl0fmCjC/hgwAoyre7YRICalPSNIbJPsLCSu5/atWb0IYDyzosyhoAEyRNkJm40pl4FwizWluRAbBLm76ZkiIb7/KdCaMLL+OpyAlMJKjV3OAFyGKw9X5O5EBjYEQog1cBgwABWCpYH0ARAQBZaDhh3HAEAD2gRktMGwTsphsU5Q4ABBABKx/SEwugIoBjiAkUAQIQY4WsKQmdSIYAQggmE1KWC2+EWfK3CZBSD7EeQUyABcsCTldfQA2EJph76QCA+BRVPeFVexgj9mIwGWBaj9pkoBI2Jn6IDVwCu4wERosCBAi6svXsjL9AWSDDkfQs78ASIpg8LBcd0ACvDj5ToIF+iBRBgSstVA1RHVrwAE7iRoxPYj7JIGwDZZc2kgaTmOqB3vTEOG0CwgBpc2qGNg3NIA2VEUZv1xOgqRlxhQWp68TD8HIAHANDNwAAACYkzAUCH5AAj1ZomxuoAHIooRkqBAEloogBAIh6rA6qmhIhP1LtMH0EwLM7bJMmAIGg8RA5EEAdtoizNXq0kZIAibLNWtURQAC62niNkg9lL3yMIgAA/8ABsyRgGQClI8wCdADg/gUN4dIJHcRm3dQCACP0GVpwKQGBZ7oRMOIAFsLQlp/pIAQs3lafKq6E9xUnMjUdUJZi2TPsxIbfI0yTrwjJAi+MVZIbxicDtrbaISgSkaoquNjrxSMgHrhbelpy+hEz0NPRFyZYsELRnM2HJiMGyUkSpwMTJzoPVtZTRz4xkO5GnBq1yQ8rOP5kvIIu3EB7AAfUr8QsBR9gAzsAIkBCheqGAQhdwdfOAmx0/4BcwSLxBIA8JsmTYSkiyQwB+oVAvFBJHeH/ZCUsCkihry1sQDJNh+IKAGQAQlgt5pAbgtQGk+JBP8AEAkP1UpxEgSClBiX1nImxEtGCHIDAyAW2q8eMp38AcJJ4zwlbk8yb4Nw2J/G/NaHmBABNNK1wlbk2LuZwCQ5gESAKD9BSSY8pYawJGAGRfVhjuKiFhhKrAGGGmX1AJ8qJdgI26YCbfKbwLNGsAG0pEjd2xsvuSoAMI8aQIAH4A0T/EAbVU4SUAO4ZsQToQCwCORUoKQoBkUFlxEIi2AP+mAY+SA8Ctw5pHaIPSCLE3wN07L0etwHACbJW5K3RMoaXWeU8Ohds6jfrieOABvLhPP1ZDu50x+ACARBvBjsGYGi24CAIoVQowU5wLO5ygsawrRSQh1Ang5s/TgVqToxALiUZSUrLgGlI8lKAE4rKOWapMZHY8nCti6D5uHDohOIgABi1CKF1HFDA7HT1Q30UQh3pJ5KJQE8rcQIX+AMAa9nspwAAY1t3o8ggjhP3q7snUBDeV3AsNAQxoBty3ZCzVHMAyptvSG6KV0ACqVFMuk3hqORwKA78oIJoDD+3VJ0wBAA0f8AUCUYQA5WzxyYAECA+E0Ly+5IAI2ykPFwRBuPAADTgIgJAB0z+u2ACCAvacZYoYcjWBQYJz0WuFECMAiMsfeIEAABfYVb4xmGaGSMg1LMqjA9VMiCwM16BQS2CE+TncjAIXU8jBSADkKSo1Opc70iDAIKu8lLVQsAv4sRGQmrIEHBWWX96cgWm9JxNyYIACAQceUYAnQABtIAYgAD0H5gv+RwBRpMWWl6mwFAAhXk7JUjzgABAD4SzhB4gbgCQXE6ZJAADN9IBIigAAGS7lSCt1E1CQz9ok1Y+wnCAEjQhAgAADyS81Uq6BArbL2KgCq67BT52ABznOBAAB4W3Kn6go3rCCM4gRG69OgopDgIEEANq1iBoYAEgpPgWypEx9gTbeschAlDl9n8OYYAL60EoAAOgFlgiCAD7sTe1zEABBGZREHcAIut1fYCgACIllIYw8wLNcENA2AAWLHVe1QSA1sJaMXQ05GIHGpIxXUXEOYAH2DltVABoDXrTwoNCd0aECpNkMIAELoySIETzAFa4AggkC18VYCAgEYEtyJCACC2eUesgBOQAcv3IByWW4AA2T9GCCgTAAkPcnBEYvYAAOZQV1IZgKKEmtVRcw5gAdmUopTdyDkAUq4JAGQggAGjr6D9iBABNCT29BUDUIDeWnQ8zgAAHUTLPGugAAnV7DGTjFkBkA6K8KSSQAEHVXQrigGXJgqgBkSX8xHcXtwyuq8QgEFxA5DIJkBZCy0MO2bWTAABTEe5iJAOdIdVHZwBtfqF5BAbC5KUFDXIPgCyUhAfkAgGpA04aGQQlQaVAIAOZnQn4a5hhgDAAhGGquWG4AAh0z1pIDAgBoYDyBGLOwIQLo8l4w5I1A6L96NCiXARgHyAUQAFxOBGPLCYmIgu3XygAGVgAvbiWdoJan7i/MmVYlCAChq4EEABiY4aRAieYA3fVJGrAhpqGaemOEUIsjg1VCULi1Shq/54WxHrj1R6YWy9MfEt2LL2HEAYCeDli/8ArIiUBvYnmaDEZg87BV9wTK3+fZQ/XDoL+QDQvpivyAESJ7BAZsOl7HvASvsBQHkACQ6OGVqIBAAUn+jA6FAEB188swMg9IAAVR1JAJ/0ALgXN88xDAB0fwiziUH7wyFFL0NPnr4tf/fgYABZXtTFbCG4RNk0+h3HEK4c00AiqOXVf2CQAgvDejDAgBDoFIYyicc4H7AAeH9JpfQCCp0OAlVvn7TmuObXALAIk5gRpZFIkOEQKICO/g8wCABUlCGeSZNBuqoiD99aJAwMhbfP4X5BxuMO2e+oDBK3F3MjEMTK4IM3bYOtQjidDOoDTg2ktBVf9cEvYQIrhUpHUelgUVGAn04YKi4rTsWT1E+jDFF0GGQNqEabYRMekAM3d7iXcgxLeglJrxzd3uJdyDEt6CUmvyBRu8q4AOlzAAO3LOqIE6MsAAuF7bBP3AABKI+HIAyEKHNIJeMgXkAQACa6OPoaw6cO1KZBAB7hAQ1nGQb/AHIHvpcsRAQAATh5JvkIAEHIGgLhDsXzEeXYAqOjIFJFZHBZIHh+9Z/4Al7xRAilFeoA39zeaAUWRgCN64gE7A57iWBUnEPAWEBDAEsK001oJJMm5BxgAFVLIWGrAfgQWs+oAIaVjFWQNFeTZ2qK3CAgAMJ4REFFQcgYkCet/IMFgADMtipcdoOQQbDRevjCgADoQlaHACWLmgg0N6pzEB7go5MrC8cIcLAppiGpZpBkQARwEihZuSYhIZCdGcL9KG+xjsERcU/BSAHQn4bVY5gvcgKVDxAUOgQWGljWq19UchgGwJ2aKCHFsjkn4bVY5gvcgKVDxAUOkQWGljWQSnUchgGwTFzAANjYgAI9ufCLgANPQ9IIMpm04G9UsvfEAAV5c0CR8VQQAL6IbdDUlZyQBn7ccSUAxwCB4gqACAEJUn95CUncAIZPrwDAAwELh6SBLHoaCuFDesGAICVoxLJNbntzgzooCGwDNohQYBOhATIDwDQBoJITuiAVKaD0DVp7IB7q6mxXdzCAAALIs1HjgQAByY/jgiQB8y7xIOzgBR1tsKgwJJGQH1SmCiBoAAAgIpp0gM0IpBA5Ylp7vkEAh7sAvoRDQEc3YROYj7hzYlNgxiw1AChVTayyDKYJxm5g42yMMM38cPSQahgWi5QhjSOoAFtptY8iPqAeRlz9IRL6yEOJRRqX2BO5K/mbKX7ICAQCY7T9MFDn6AiX1kIcSijUvsCdyX5vWUv3QECIZjnCGf7Mk8eKEhjAaUmDKkCDblWhtMgQhSOXxQSWQeK7gwMixqoiKZEf8AoIh0F2l0REDJbsxQ8KADWGtHIEBSAPyVAs40xhBopMm8aiok1EUAznsBozEH/KFQAjBYJRBnSzGtARV/JvWKAQUCQwVS0KpUACEqQrMDdEVAFVFtgeAAB0AnJ2Z4ROUmwSAhNOggDBW1YKUEAv7gBAg2D9HIkFMw7nOESSYVFwHB2gZuAEjfg9ZOwATTNOAoT7AGnUNORVOCJKSWxMGagbsayM5t5WwXUBiGDiBsSYAoBDHMEqQEn9AJBA+fqdEDBBu3M4eQEEczwQwwA5CHg1ZKktQRsJDSA5goIbgqX9zTKMRAcACwTgqAAAACA07UwqZ6gzkOmNgDyAYpqwUhPYCFAR/LAoqEABGDjyUAwi0HYl0xsAeQDFNWCkJ7AQoCKmH81UIACyGZxjBiw0BMsZmBOIAo2RDywA2CVjfCNLQ44DuwLl9SjiklkDT4lOR4EWaqlMZND1AnBncHnA9a9DfBV8FEwJalS+EqTKMGWdCL7GkG5SRTQ8C1Q4IrcuP8MoocK1SHM8qGaIeUOU3KfZL6wFamTYpLjqUkNSRgOL2SJ5aFQ5FS+fxOgsC7mU9APC/NDvtM9AQbuMHAjUHkH9GewkwU8rZCDcJy4jCIEAHC4gRAAAGn9FW5QkpdESSuHFuABhICAABGUMIAFROJgQJ0GijWTK/ojbhLcdBN2Pgpsejgf8CAAE7oZZmOahbSURhf8Ahzcbczc0vhHr06dOnXr169evXr169evXprCGwfHw9evXr169enXr069enSp069evXr06devTr06t+DU6VOnXr169evXr169evXr1igSi5cVaFWSBQgEKnt3qFEBoGjvSw8AMBDZHdMYAIJdWLPciQBAz8F9A0A5gEKEWkhSXIAJPFT/89QIBgWFwYQhna1DOPcKZRGwwJAGJqmYYZgAAJv0rEwCDYiDFdlaSZydAvS87wmgaRliDD0OwQLtVzEPfc6BMQPybyAhZJoJQRtkJDerphZzAdyNP69BGAsJQGaDZ0RiEVbpACty9AmHypzqAC3CIjlBLsaA6xHcKEBpSzAbB3CPP1PxoshqJGgCSHQc3AGn+aEIfPIGPi0KJ/u3JAQB+kDYEBYCTM01ZBMWrYJOwNWIRUjaI1AYq4s11Zach9W3K2CgAAaDOOIH6EA2cEtSgAgNRX/3TBCuYQdtPAEuecN21ER5MIUDx0NEswJsRrsPPwW1q2AQi30EM4ogsIiM8ptj3A0QXpImsmRoAFDDsZRqBqhQBYdmzaJQk/ooDQVgQpQCDaghYXBqvlcuuhaBMgT44gor/AIAdXqOMdijcn0CEXJBEgBqE6jX0+d4TWr6CEGVr2XdxgDVQQSFQABORAdgg2Wch6JAABAABoAwgAAAdh4SgCA/1wmyUDNAOOCggKEAAGwDAIAAAfCO0q6qZWwWnoARDAAJFCAAAAqoxCgCRTLqMHECAhgAAAIAgIAE+BADAmNOk2DjkpQQAAQEAAABMo6BHogxVAADAMgIAAOAB3ZAAQAB9yFuIkJIJAAECAgAFAABAuOuoikowR6EnOgggAWAoEgASKWAIAxliMIsNV8C8MQ0jZTgTlRnkDVF1AyNOxew38eyQoAIrOEHH9hIE6x8eUF0PABrLW6DHypj0mqxYaPUKyGRzOleT6iCCDdK7I0x8/UcmKj3/AOZ7gOxuLokV/wCRsbupdXXX6IoK1Po87wlC5MrAIAM0sQAC0AAQMwoADyQEHkNBOb44pIgwAgAWAkAEAIBwQwgAAdiXvgjeH0HLirVEgEBAACUAIAAAAAgTjaXM4S7RAY4HxggAANhBAAgAEJQAIQMHLRBiHoCgAgQAAIEDAAAbSOBArs5DuTg2QMAAkAEgAEio4kP9gDgcDoDAADxKgQIgABqh5wMVk35ABqabFycASxVAAHAwoAEACIwCSKBAQzzgALAUBCBAAAQBTE9IZwDuIhyv4F4Ll7u9xNeGfGpVawZmoDsA4xcl6Kj7RAcEoiYw0IJ3AvDW2wnYIgFUQPjwC5Ho0G9GLuMvQdRG4kKxICcIH3E8LuGc5CEYDECT0HvNohIOAiHFkG2JVp3Ni/NM6S0zL/SBYejzvCS4MkMMLVsjpCj2cxYAYdHmCfCiEADABJpBDwJQAA+RA5gQA0QCCypk2gODhYkh4AdKSiCEQAFpWMJh9AEGXJEl/wBxgEABKkSDQBbSAcuGAJGANZLJCfMu8Cueip+hCgdDtgAAmMKW3CrJobAZBr2tkFzAFfAlJkB6EoqNEJCkoMONAAAPiQm5gIIEOoQKAQzJbKtJLUfcGP18hIAF4vdXEwIAirJpsOQpacWkwNOKCh7BngQgweBAEviUAggUvU+RPECBOfjYWyIUzF/wed+azfkEsX9GaaaYYYaYaaaaaYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaKYFQkWOPc14zmpHnmZRkes8G8ZHhGr5xgJ2qOHNYIPE5gIASgEAEpIzGQoAAMFkfTkZlkteQh0+BHvMFFkIGTxAwASUw+8GQ3kLTAClzH2Af6Vw8DDAAAaUpr6lQDAWOAO4ZBAAAQdgEngQIAJC0NVyYGEJ3AB608OCHUgEAaCR5D2NECATX2lAAiDgAETOWQhugNZ3+cMqq1GZsAkAGkws7QCAkWBdugtSErOoAFSz5JHQROwARHBUYByADIQQdqMAWAcad4fUdhNSkai6MCjIKQ7ByMpanYuOO0BgDECQNuEpTEF0H4AR0WjiYKrBEMgQWiV5zeQQAC0RAoo/QgCw3G10BiszcwAqLtQTAUewaGXrJIMTjAAHz0ISFBKmP6JDj+GUxQlT/BkYW/zR52TE6N+30FoQlCEFpTiRbp+vQiTto0KYSxKLyZIjyMJwI3iAaKADQaerNDhAAPwsAApyrl5FQAn102uGGGX3GlODXFK0i4yuUdZb2DN/KME1Cgn+0vjGkaBD0wK63dQ8hWvWvozr2QtAJ3nRdjBA7dsTawAivXY0Vq0Do+GIIQF0Rs6h1Y05letsblCMrEYHNK0I97ewknLDIOh8pA6hDuUR/BD0If8AGm5h/wAGX/cv/9k='), content_type="image/jpeg")
# FIX THIS to return the content as:
#"""
#base64.b64decode('/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAICAgICAQICAgICAgIDAwYEAwMDAwcFBQQGCAcICAgHCAgJCg0LCQkMCggICw8LDA0ODg4OCQsQEQ8OEQ0ODg7/2wBDAQICAgMDAwYEBAYOCQgJDg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg4ODg7/wgARCAHGAkYDASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAYHAwUBBAgCCf/EABsBAQEBAQEBAQEAAAAAAAAAAAABAgMEBQYH/9oADAMBAAIQAxAAAAH38D4UJEpr1RzTUC4e71I8gbT0fP8AVbDmAAAAAAAAAAAAAAAAAAAAAAAAB8H2wEzsHB2GAZ2AZ2AZ3X4Oy+S/QPxI7W2ujr65nn7sw+J7/wA54n7BpTp+U/WeUxOlPo/R9Ibry1Dbj2w8zwOX2pz5Wl9lyYfN+ZfUev8AOcCl9tvN0Hj1rs/JsM3n3Q45mwAAAAAAAAAAAAAHHOKTrPPXz0x6VwfH1jWV8LeXnS2/o/KmKuar+Z9T00paF6enVD9wuxEJbHDhXYE1+P8Aqej1fu97rksU28/HfNe2LQn8r+9+teHNNv2/rrGvvQXyQTRWznsrrYzX4IXrpsSMQW8+/LWHjb9GEtOau9259CaAAAAAAAAAAAAAA4xZkebXpDjfPB95uvnbU7JvFTTWScezwxfbbN4foU983G1KO2NwMtFIcfGpmGddgTX4paX29VX0nnT0RlkHyv1sn8z+j9T8L8x7U3ux7n6DjCcEv2msV99z1FcZbD4srbNYhY5IuWdAoAAAAAAAAAAAAAAAAAD5+h1nZM9Z2R1nZHWdkdZ2R1uO0AaAAx8x/f8Am3kHpwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB89bs1fwx579ReHLw/MX1OP12gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPjxt7Hr/wTx/tMXqz8/wCef9rWbP8AW+jkaoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGq2fnD0Zhr9n5w22pdOem+0XKpa6K4Uvp49Ac0lsktrV1VHD0XrIJoFu7nzxZVT3nz9OEsjnTUzNX6hshZ2OspLRS+nVBddb87PnjsbzeOSitDmenldwO6v/isokl98wKDRe7HkugAAAAAAAAAAAAAAAAPJlizWa4niOzLag/SdzqWV0sqS9PVDb+p515tQtTdmzNFzUVgu6X7kRqu+urm67z167hUbusK7t7rznVRX9U3Ltp5HLJhqQ/zvaUlSsonakqWtenYWpsp/wBHayPXNTS674FjVbbG3opvNaS+34DLaG+x5QGwAAAAAAAAAAAAAAAPnmgL7y++aIkGpaypdrlY3Nd2JpxzB+1Eu4gO6JGq2pa9WK5xRZvEU06buUQyZ28KS1R6CRugsvUSOc1IVTy0lnMK6RYPMdkNchQAAAAAAAAAAAAAAAAAAAPJdj7C2cPFUznWm3nY8zvQy1h6j8/egbnzj2N4iM8bncLCc860tum11h82QP6nNb5nz6g8p+rDzBJ9309W2POHpqmudpjr2zk6Y1GacdHF2nn66NTbrvSFX2hp9CaAAAAAAAAAAAAAAAAAAAA+eahjmZ6AVLp9S8kEj5bXPHmiX0yqHdFi8UZvbLVeYbKltZx52PRFOWfBjezKHTHUCaAAAA+XNXZlpOOdUD5VhZ0n040ku8V3XGs+iuar6MXDz80+tw8+ZbF1LUUHmxb3VTBbPSKsLPt+VU2rl9nGr881Ra6fRwvLjBHYR2Q2chQAAKMh+9xZdPdaGRdOdiVJZtDzXrjxn6GoouaATDFldWsp3Y1FvRlB37CnbjTWLy96lomSBSPdSzUgMJm2kk0/Ytrr7uy1PbyJCcP39RofrsyFYNxYfQyy0Db8b6yc7PH1cWv9fYWyzZDb/nP0brPmOGb30jjVIaTPJuvPawv05575dK7v6kbf786o7smnnHrDa+l8c3yuWxartSb8ibvr7qK02M0m1U/IpH9VCdLu/s33T61xc75tseLSnpiEWzAvQhIROgAAHDiC5TwacI5I05C/PMEnWX0NAAAAAAAAAAAAAAAHHIAAAAAAAAAAAAAAq7WbPVs3HAp6zfLc01LUy9CHWCZetXPdLYuOEzi5+hOgAAAAAAAAAAAAAAAAAAAAAAAAAAAAADjp6mSRkct7zP14+tfJFfMXlMUklfPxr7dmAAAA1e0AAAADq4zvAAOr2D6AdXsH0AAAAAAAAAAAAAAADyjLJBAcpLuag9I750PtYzJD0pzxzN+c59WfpTKra3tKh4sWV1zDd59SwTuU2k8sOldKtkXj56ukhmKJ4pZzaXn30FVM7uA9rFyzXoVNvMi7+w0tks0laaubtKxtZQ2sWBdMAkstM2bCutLsbziWhsrezoT1pdjL9x5yi6N35ymmpJd7DvpfRQmgAAAAAAAAAAAIFDrt5k6VQTGb+Lv5/mlnvdwpe4s40dTXrwR2LWUypKW2FxpR0vsIlF2PLuJaPuvKshWrshLXNj8rKpkM0TVJT+XEivVmhK0wWiXQVNfGGPEnrv6ke8xfVT9nVG3X2FRTVWBxFG23txAvifkhSal+haAAAAAAAAAAABxqNvSvm5eZvWPj63/y+PWo/Y+gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADH4h9v/AB4r+ffbtD1B8Xl0t38/f6TYboAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABgGdgM52AZ2AZ2AZ2AZ2AZ2AZ2AZ2AZ3TymdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoEEzSvp6xofiX44g+872eyNb/tZprsia1GikeS4iOSRcWd7OZ1yGgMGfBnZ+ePiM+bpKfit9tprIr5Wxzt7i33kD0FecD+q+xef6HrmR09MvV8/52fnaf8Al9NrwXdfnpu+3pX4uuXj6/StYyLwt7PB7mlv5ze3Zxn+qrGM+32+iopDa55+Sa7Lwlufl9/eff8AJGq68vUe98AWR4/T7s70OmP0/n/Q7QAAAAAAACGyLU5fX4I3l7uf1eXD3ejsePaRDwfTA0mDBl9Xh6Lva3txZMvzZL3HPzvqgYM+DOzE6n9BfHh9NIXL2+knhp3ZV6e/F+xaxJ5qP6UfyePlfVV3h5J9+/PMrmHQt9iePfcdJ+L3UhfWgunyfTj9NzmCe/59e+mvL/rTXm1mmsPR+70divbMnk8f5HaT1Np/kejrc+gtT7vN5gnu9+fD9H1pZMGnX0Pk8jcAAAAAAAAMAzsBnOwDOwDOwDPxhGdgGdgGdgGdgDPgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGdgGd8mvoHHMK2NzJECkpt1d4ksjmP7Wa7fNdyKyRK/7pM0f+VkKuN2zLEMmcvIaAAAAAAAAAAAAAAAAAAAAAAAAAAA0v3tzMX2m0Gt18hHR7zldNk2pIz292rRt4iObPYDS7oUFAAAAAAAAAAAAAAAAAAAAAAAAAAAcRbUXnYLFlnQAADhBudYnHMHm8cibAA4Rmut8rsRmTY2DQABHvlmRo7ITkNAAAAAAAAAAAAAAAAAAAaPRThcaHf8cyg0ABFOlN1xW9kclCaAA6FVXI1jod5znQKABqW2M6nbDQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAH//xAA3EAABBAIBAgIHBwMFAQEAAAAFAgMEBgABBxITFRYIEBEUFyBQMDI3OEBBYCEzNBgxNTZwI0j/2gAIAQEAAQUC/hdh5GrlcLwuXK5PJ+dhmT+VwI2d8aqtkLl+qSyKVJWj6lvetJ6t51bzq3nVvOredW86t51bzq3nVvOrede9a/b1W5SlcqBoLjdlZYekLI8fGzkSaOeikcqylL4ydlxWZbs+DHIfK7LiMy3Z8GOQ37NajSos2N6ve4nif0FbzLbyXmVSMX9z7Rv/AB/UUj+9c7BJ9dAW57kefJZBPnDwa2MB/MNtHsRLFVPwtvs5Y28UdDc2wGbcbjS5BKxkuTylzIIPS7Mbn7rhIzq+36cobd6MhubYK5MspqRHPGInETJY8H5GdN3FXGK522OXIt+Iaf8A1+/b0abJ2SzvMka1Z2uvcdz+36p8khq1vFOyVtc6SN4/BWDS3x9plSCgexFZGo9zded84a6w5TZYdjf+P6nHXGfSWuLrS+WRpaHGzZ/UaVGKjz1y5CfT5sqn4WkgPiNuk1rq5Ee4+fcFQ65uLeJdTl+Oya13zkQF7ryCTA+I22VWtq5EA1/wQaii9FPmAEyL3Kdh7qDNX6rJAqE6C9+vVr2ocjEq5ZuwTsdnbTtLDn9vJiZa4GxZza4DE1CDgzxmqy4jU0MOq0qOUgVHUJMemusuyaih8wLgIF13G/8AH9Rt/cXm6SOkSCoAlHiJnyWpsCucek9WC3SWZd/qn4WuOIab956Vbls6XuYw2yqTHQtMhhchEqM68yQZeFNEGXZLLyXmvpDv9r5Pb9gj+x6rE3p7l90pvUpBR7vhCgqO7WLLMfDHAzI4ZVPwtm9fdcQ31qhvukHnXn3YDLvthMqyMwtEGMxIS1GivbQM0vwz6R/vrtJztIztIztIztIztIztIztIztIztIztIztI+S3cTTC1u+C1pz4L2n2x+K7rG35A5G03G4jszpWLGahjFoQ43qK11fwL9/4Q8Qjszvb/AAd1xDUeaZNOygRBROt/wa1vqbrJBp2JJ4/n+2T/AAa5E0vWUX2Hpok03DkNOoej/wAE/wB8TVwaV2Gr2Xd4TWRC4kWKxCg/wT3yJ4nkaXFmx8kSI8SG2429H+fUuIolkaXFlp+xjS4s2P6m3G3mcffYjRW3EPR/pKvzN7+7x481G4tRbisyOSeauXEsktqs1mLapabHha3eF3bzu9ELxbXK1aydmXHscC1vuW2OtDXpEhrCs6SDHYq6lHupOeDG2AeTpzd0nPQ6yf1ZK9NmRx4rVsOPDQZmKersqSzCHJuc5yDQ50Yfw95sOrGGzvi/CtVMOweKVWw74WeLRjXBQq0km6V5mFeRXbYeaGPWka1QHLgRgINHogQdu3T4L2t62n6EWFQDPpD74+p/sGod36MQUXZJNSqQtoam1wRZIo8TtlVn49rW/SVN61vnq4/iNJInjPIL0N6DzseCyj3M1MKon1OvfhVSda1xUPQ6v0cBD8T4ecZdO+P7CV2Ep2oFkm1jjH8M76h5fFEB+Hvj+rl/A+AnB9jk1YBr2+jFGMrA+j9KG2J2pDvyq156L8OAhl0Bw2WFWHVLHthn/R1l6udUEWF6SQvRavnX6+Na0xX/AKFsFE3ecDgYYQBuiDELFioIcSZBwDo+JTh0cvmwcXd4lBIsu2kgUQoZJVMeSOs0gNHKNg4rV1RX4bV4h1qDCACxzIoA2NYq1Eioo661x7Ceh8czIkeeMYoglvQMFDADHEIcZTQw6HoFXFwKexRBTehtaHDKvBqA6GBYoglGMVIfHovkAR7vICDJNXZoorSYlZGRaUmhjOkmEGmAyKMN7+taSn6F+/6HYUPud9WVMl/6h9/d4+lyplCkXarxSNskur4qYKRhVDF2kAZl4VsIYJgs2LNMFLMDCvDSw4vAk3OtRHJF3ho5f7sD4lyLdXYmxZoYaiE7UADzYR8SSKY2WEjgdCsaTFfKkI4sBUbpCXT5BODEAJJwV16YRES5JMyLDRRVgDm0lLQBDShxSAVgfSi0efK9IbYS3+wRKfhejSFeORKXSR5YaLtwYpPl+PQJFtwwNPDuSK+XDTb5Q2m5TqWkDfSIpkVhd0V+Zv8A/StRiRl8ljW0RfSG49ablBgEKMP5/wAq3+Fxv+FUv/i6TBSS4JblqLcX7lvt8SWeO3EtEJtsj6QMsaIavVPnHNQAME43yh9K2Cl75h3/ALV+rrhcdxBF1EQAg2SME2IA+UfdA2M2QwkAKt3IcBLOXRdeOCrEEr0uLYa+ElCTBgEZVyB4JL+LgAJKF2NgJKa5ZGxp+itTiSGOaMAAJIxVTCmwEdSetqphDdfQPqD8Pljyg78YzwOUUshavz92oeAKv29NdPhDIYWZaLfW5VLHPGA4MeDgfVjNgmMWSWatwKLZbC4M47ac5BeiCfMndq52WadwUduZxyP578QsFgjgYfvPIO2BR90/WDh28ABsHzr4tku/Q2LREkpmC7UddAVoR477n9oZPODbV8kY85J5P9RBySyCrBhZ2lNXGWvli2HJAGsSDs6PyhlcPTC4QGavJ8BB86+LFTth1ya/I5CixB9kQU43EFb2arw7zh4rkI3Lk8pesUblzuQfkX1dob4n4T9gfrpR20yLNYh0S7uw3+DYZq2JFCphWZH4/wD8r9qgQOQ8hlrM+Unf/b0lcaixWH+S/wDp3qn1BkgX1/RPJIuC7Bt4iKNgG4sSk0L4fjthJheSY9GmfVYiOKdVuKV4tp018hxxdycsdUpNAitCDrpMhZrAAjVEPbUrJ3ewhIlTk3EONkcr2WEJjQpUqvwbEe2+b5NaqKg9pG10Mrnwm4J+JlTixo0L9q5KRX1PjHo3BRZ1B3kuyvTI3O3jdwyh7WqlUcnYYtBGk7DJKlpM+N6QxK4WJqfCBeXuHagUskfj0URPSyOLDJNc+e6Jp/JU0yBL30IYDwOTIoVk1zSHjar3NJ1urotdPkR2uV6jvwizVnXic4YWkBvRs+H47YOWXkmPRqrdaHwmflJwS4XkIifKmAtiByYfA0a3bZGij3isyvEZdeIjLD4oTrRGXXlIuHW9aAk6VP8AN5D3ariCUaVyFDlTap8l9iSpdduMSVJK2oMs7SvM5/wSZXZg30eyceQ5w3CjSU8I0iO/F4xt4aQaq0ywWAkBKhSw+UZkFrhBOwJO+R77ElTK7cI0xNlMIm+e7O8fPYegkYN8VLN2SyuamiOcpRA0NsFTFPouuXMQTeusoezIrPH4wk0RIw5a+fP2p0OXGrNRMSgFLi2v3omqHL/1DWQExYKuGkmJnF9YPSAtIg2j34tjizIvmtlolZ+QktlKndRxg6VsYKJLZ5cciS9+kEOVOq9tFMnV843oeVbLjRmhVAGVyaR9H/zOf8DmV2YN9HsYlSK5/EUn4a736oBD3/5DR+GD39eI+cvF2vP/AL1mgbSeRBU+2njVZLlXLKCscrYKH54MAn7XOi8cyGL1AFKjxbpVvrxCteIF2qh2pPqon/JCfzBh4LxKggbcDZ4/P+Kn+MFwuKkgq+3CZqP8CYiRYq0Q4qCMeJFi46IFSJ2JChkTfr7z7MaPGLCpzuSSwqE8y+zIjuTIjMz1qOBUSvb/AEjS4s2N9g9LixnfsXHEMx2X2JMX5HHENMetxxDTH0CzPQfjCRrleOALQVJhuOhdMBwRpaKik2qy/jP6rZIkkbimn1lI2uByINqjT4wvh5VtO+FqtEZXGcch36jHt8VXGTlwJwY5k9CDCVW4hBUlWlIsJKJBMk7K7Gsoax7JGsl2p9VhCWSOWbTcJ85I+xQydRiXciSAsXR8oPiWSIX45VZ2BvEsm5z48AhYB42pyrqTGiBkzxGvcoPSk0vZ9Q6lOXAnEisPNSYfKD0pNL2fUOpTlwJxIsuyNxbCbtbIW2ljfhp565PauAWzKI2D9S6Ugkr3YKQCggjyp5PhrxiBuqW4tAtDVl/pzJ6rHtInmfW9bSLsLZiyVwwoDwLKG2J2ojvyqxHmmuGhLYd30cZLdyqoexyZBKzla+efr4pnUatXX/tZUVHJcjCjVhh3zOPt6TCNTR0nKY4w5xgC2lyVQUp1xHxp/Tjmu738NSn5XbRrXwqIb6aBeXGUcV1n8POTdb+Gl79iIlidjt0CkIdRxZybrfw0vfsREsTsdugvw5H+nWe1q0N1+Xqy8n138anfzK/qTNdFnWtUYc472mvdVUYYiSigxoEs3V4pyf5MxCehqdAhkxnkKAmOOGQRIqHVhcOnMUQSjBddHi6szQhLSIdcGxaQmhC+2RCjCoRFFGbWlOkoJg4hUgXrA8wQE1geJIYRqQ4gZDgh4SA7RRW5MYKPhVoQKjha4DCRQASFWIEEHIqg+RQp8BoiBsXuADjU3Eq7VQDRVwqmYERDYJivQ0VVFCE+1CEobMCIhsExXoaKqihCfbIhsSRAWvwANdAV2BXBkMHFg2fYOKq7/q5ZPccx+uUhK244gTDk/R5slEIXKc3tNTn++1T+DXKYy2MntNon0V96OT/gv7WKa4Wso+S2KJRZRaA9AmNzxX8F6de05x06WubTWm4v/h3RrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWKTrq6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NZ0azo1nRrOjWdGs6NfI4/K8UjLlOyde9J0iW+uQ29N8FYRJW2hUvYSI4pyJ6lOL0ZdlyG5zj0pTrzktG0K6mvlV9/wBXt9SlaQ0/aBsaO1yDX3n49igyZk4lDHIXyrXm3W+VK84oYUiFxvtyYaiwkxJ7MzcqS1EgeahicTdBTkiCbiT5jryWkz7mCHSYthGzI7D7chhx1tptZBpGNuIdaOnYFdr4nkEGXkEb0BFnwloEWCROOwoEzzuC958yQfZHfbkw/wBFttvaNQoepHSnW9NNpxMCCjGosWOpTDC46EIba9T0aNIzTLScchxHt7hw1a+ZX38Vv2afItsqJ2KPCEjUTt0y8uKk3SNHa1XqlP2Qg3GShurBYreqSfS1OptNW45WHn3ES7LMa8YqEtmSHPoS7TGVqJHFCxraKMpb0IyveoRWDGdyvvPjS1Xe74IqXbW7ILMIbDkF5zBrSuNKuLWQg3WLEMzOJIvZJckS39XGCHU6NFKkDSwDfVU/0U7Xbxja9w9ve1DsrTrqlr3Bj9bBH5JDLzzkFS9okr6yrSnpkHTrkXNIdiufIr7+TJbjTsiMrREDHmonPf4FrebRfQjrrKK+yhvZ5CJkbStQmtdlyOASwmOevMIROlm5M2yViz7FPEpPvnEk0eyxJiLjqcprqH4ljnpjGSbiJKSEuDFn8eykS6bYorTUnTwuSErkaI9G5cQtfGYw9MHNEHpnf4bWr4icjK6uTGzM3o70/s1J1L/H/wCicVORIZ0qKw+1Kcd2y9FlaiSERI6H1zfkn6lL2h51qBL09t1TM1sa2065BSiY898ivv8AySWlPD18b2qScGceEoc0fXJbMwtBURBweOSDap3HM11Q2E7Fj2bjAuUt0LiewxpmuILF3/B29UcnSCT8FNHN+5AwWxUO8VA1YCUSk3NgVP4puM+fR6y9VKSaBLLP7Bmdg6+Akh3SEJueLm8SF/G18RWRW6JQ5VXMW/j8kctrPGFnjsP8a3OUmuB/AaV9GV9/6p0Iztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztoztozto/hq170alqcU5rftGxtuNiIE7TkyK+/wCXYTutV5LiXY63ZLUKDtPs0hWwyi7aZrMl596dpHaXuX7q8qR4oyvfiv1hcTTkl2P3mvDU45H07AfjofSgcwh9lrTEPFQ2lJYj6j54cno3E174iHpuQ/D78p2Al7a2UuO6ia96/wDSp5JuHqCWRtjW9KT85UkuMvTs+dGGyZTk/wCcpIcjDPF5nuouQ7IGfPIJR40vZKPooySjvkPpEyCzMajiUoGj47sUd85aO7JGteJO6hCFtFfnfZRIi+DSvf2GUR4vzuw4z8j3ON76iHGblf8AnH//xAA6EQABAwEFAwkGBQUBAAAAAAABAAIRAwQSITFBEFHwBSAyQEJQYYHBEyIwcaHRBhQzkbEHFVJg4fH/2gAIAQMBAT8BVOm57g1uatPJFts7b9WmQOuSpUqVKnbZH+xqNrOyBX4q/qSatnNFlKZ/dcnWs2ineIhRgUDMlZq8E83TC/79NhIAnjKeol+5TgpVHlRj3vaQRdKOizV4Kdok0i0K18l2t1o92mSrLyeLK27twWCdBWiKu4R1CVczUYJwkEKlyfTpvvAn91KGWGwRG0EgyFyL+JDZadRlTG8FXq3p13bLquoMCuhAdVgKFChQFA5j6L2gFwie5HuutJVnu1KBqmtkYDdf/O5AQJBZeB4lVuTnU6bIcChlj3H2o2zijgpROeyZU7Ab2SlNxKYZEqclOCwiVPUz05Ueqb4qMVUF69xuR+yIxPjCaIgbiqTYuhaNHGSnJUvdTQBdG77JpxaSmYAJrYjzTd/GaY2GgLHNAQOpdqNm5Ao7DgJWqBWaOCOAQQQQPVj+oeNFHqm5FYyn5p3a41RGDuNU/GfL+E7Pz9F2ncblUz8yieiiIm7plx5/REaDwTsUM+rFSpR2TkidmYTfhDFXhdvJxhBXkRjCDggVrs1jaDzT0sV/j8vVHFvmnH3fNZghSndIca7D0Sh0/L7pmbVTHQ43Kj0FphmsMY3Jwzjd6p/RPyTtUc3JqI9+7p9+Cuy6eMlq35puSONQ8aKMG+aGaqZu43KpjKcOnxuVXVD9QDxnjzVPSeMVHR8/55p7kdsfm75+qd0vMrdCZlj3Fn3Hm4oY3U/tRu9dhPvoAYDx9EMS3xTHYMnjBA6lYyjmfJDNDF3mmmYPimuloK1j5/RAm6HeBR7Kd0gY0K3J3SBjQozh5px3J/a8P+fGKq2d9NrSdVGajYMFChBRsA2aygNjcFGKlQpWWKgfHqvutJVkfZfYlznkVNBp857ka9zCY/iVaLDTFJgZU+iH+kEGVCDUBsIKuocxrC7JOY5oxVns7qjZTGS+6vyDbsyqjIcQFUswbRD1ZKDKoIOatHJopU7xVjsjq7iAqtF1N5BVGxS2XKvQNN0FVrG9tMPATaTSU6xwE4QfhNMVSiypJj14/ZUw68MNpI9tqnhwqE5hXThnzLNaXUHXmq0V3VnXnKzWoU2QUHw+8v75avy/6WKe6TITqzy2CcFyfaGUjiuUeUKdRl1pwXJFrbQqEuKtto9rVJ0VC1gNAJVWpefMp1qJp3CVSfddKNuZHinOlxPd8oFXggVexRcFKvIHrsKFCAUBQoChR10vAMc0uEppnmOqAFA8wuU9VLQSgIHMLcU1oHMcwHmwO+v/xAA4EQABAwICBQoFBAIDAAAAAAABAAIRAyESMQRBUcHwBRATIEBQYXGBoSIyQrHRMGKR4QYUM2Di/9oACAECAQE/AU4wLpmk0nmGm/cg0Y1aTgM8k3kd2iOxuzWjcoNfVFM5lfUAi24ChYd/smCRKaJ9vdap42JrCYHGcdgKwDWVChaRyW+m1hBmRKGtYSsJlYefkvRK9WmX02zhIlf5CxtOn0mQ8VyVU0Wtyw46PUxBrb7AScpQznmvK4/lAEZK49vZC3HqsV5429gIQftUphgyq2nVKjMJA/hQpPNnz6HypX0VhbSMSuWqukcoaMaL3TK5J5KdolR7sIAdqCCxLEsWaLzKJ7LKlSpKlSepS0qlULmsMkZ9yV6nR0y5cjPwVnEn5u5NPoVKzMLVR5HrCqXApsxfuNximHI5rz5hdQoujZEQoUp3w5rbKdICeMJhRnOpQsJmEOxkHooHjvU3PonXhE2TDGDw/tM/K1DwlP8AiBO0b1Wu5x4zX1OKjNVviT74jt/Kdk4DWn/EXKocWLxhOuDxqTjLyQhGSNz2J1mYkc4W3mF1qTRiMcxUICU28rLnjszp6GB471rPon5hGMKp5JurwG5MMFnluTdXkfumbt6+hqbl6BYZxDaEIJE68+PT3TCcz4+6bY34upsOzBRzC5hFQhda4UQ5HrbOcHWiYuo+LChcxxtUotX0ytZCNkMgebx5pRHVHy2QgYvPch8x8t4TbOPlvWTmmEBdM+U+e4IWK+tvmET8Hr+FVmHbLfdVCYeRxmqsY1rvkhm3Ft3JhuJ27lS+cE7d6ZkOOLJmTU9SQzFrt7IgB7Y8d/8ASd8jvJPuVbo2zlf7lfU70TslS+VnG1UrAJh+Ty3FUosPDcjPRk64jj0hVs3Rn/X5U3dxq6ov1Bt7gZzDUP27gtXoPsjrTomB3AO5QcLURGLzVPNs7T9uYf8AHKk3PgPunCGu8E9t3AcXRExG1WhN/O5HKUXQ30VS2IbAnRjc3Yv/AD7qAZHiPdCIPgmZR+5bUzKP3K0/wg3UUwTh8R+f1hZaNp9KvUe1mbc1OSmyFkbqbIlG4UnmJlDYtUI5yhx6c2K0KFNlCNxCk/r6VV6Ok5y5Iq9HWuZnuTTNGdXbGKFT5BioXSm5X/6RNlKJCJ5gQp6tbSG0xJKp1mv+Uyi5ya4wZX+0ZhGpDQSqWkOdVjUq9SoxwOpN0wuqADJV6opiVTrYjCFMCJ1qq0sKo6aDULHHJPqVI+FP0uoAcLpVFxcwH9J4mmFip8Rx/KqFuEwefCeii10yCwbUSPDgdTT9ApaXT6N60HQmaLS6NiNG+aDbFf65xSgxpAxLo6c5LSaT3OtkqGi1A+SFpVJzm2VGjhjaqoa9rIN1pUEnCZTNFDamMDNFtk7QJNrBNbhbHd8IhYU6xWHWsJ5sN0R22VKlFSpUqe3BpInqhtkR1G0yQiI6mEqOygmESSeoDZOcTn1GvI6s99f/xABcEAABAwIDAwcIBAcLCAcJAAABAgMEABEFEiETMUEGFCIyUWGREBUjQnGBkqEzUrHBFiAwUHLR8CQ0QGBidHWCsrPhQ1Nwc5OiwtIHNmOlw9PxJjVUg4WVo6Ti/9oACAEBAAY/Av4lmBKMuXLT9I3FbCtn7bkCmorEHGw44dMzTdv7dfvef8Kf10WJOHY6FcFJZbsr2dOv3hj/APsWv/MpuOtGKwQs22shlOQfCon5UFJIKSLgj853NfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VfRr+VaoV+JykKiSfOb4//ACKrDgVqbeWc5SnelFr6+0cKysNOPK7G0k01ZTMAtm6dvx8KdirSgvIWpN2lZkKtvt5OTq1G6lYYwSf/AJaaZjvSo7T730Ta3AFOewcaaivTYjMp36JlbwC1+wcfxmY70qO0+99E2twBTnsHGmoj02IzKd+iZW6AtfsHGrnQVtochiUzf6RlwKT4jy8y5zHEwpzbDaDPl7bb7fmJttx1tC19RKlWKvZS2kutqdT1kBWqfJ/WH2/lUfo+XGWSjaJOMPko+tZxRt8qkq5SxZEzE3GULaypCkt5+koWvvNwb04zyawmHHCEqOZ/UgJtfQWHEVtcaL6EKJsWXMoPb1TUTDcGZbalNLuvZA3Wd+nfvtSX4duaSkbRIG5Cty0+P21ya/oqP/dJrktNbYVKdaU7kZTvWrogDxrGMTxYrXypbd2bzbotsE7hkH3/ALGbMhY2zJRHl5TDi4dtGG03sAt5QBurXdfuIrzRhuKowuMrDUSCTFS7k3br7+ysUjI5Qs4V5v8ARMtrgbUzVp6xJA6Fz2dtcjDhchmArFEr24UyFpuLAnXXTUjWsawDF5zeJmKhDjcgMBs6gaWGnGuSs1thUpxou5GU71q6IArGMTxcuL5Utu7N5DotsE7ugOzv/Y4jKfxpDcCHNcb2AhtkvAeqTwA079TWDycOcix5b+KlkhEVtCFDXQgCw9o1pzC8WxFrGIq8PVKSUxktFGXNoLezjepHKpGLRG2nFdCGIafQpz5bhR3nuN9D21ExN+7pRyb27lvWsCo1BnO43Gk7aTlfwhMEp2KCd4ctqff+YDa2bhXN5paw9WH9ZcfrXO6xv3UlUNQxNeIdBJk9fNcbzekF0BLlukBwNf1h9vlWAt5Locs0gK8KYjKjvnMOkQgnhw7axKbCd2EppAKF5QbdIDcdKmSnuWZxyNEiqeejjCNgQBxzfdUFrEMDkYXHng8wfU+lzaaZrKA6pIrkcqTMzCauXzv0aRnyXy7hpbuqNKXgcpnAZMjYsYgX0kkk2F294F+NQG3IGR56euK+kPX2GVYRm3a6qT409KDGwaElxto577RKVZc27S5B08iP0fLKWyQHPwhcAv3vEViymXEuJD+W43aCxFCQ0woJfsl9JuUtaGyge821ppUZb0Tmuo2PVdWb3SeBAFt9Yfifo2cYTICTHB0WO0GlwkPJeDMh1WibZSog5fdauTX9FR/7pNYJivOtj5vUo7LZX2mbvvpUblDBm8wfCcktvY5hIT36i3+A7Km4Y3yiktYS68Xm43NQcir8VXuR3aV56VMDivN6YpaDNt1ule/du+dT5mDY/JwVM/8AfbaGAvMe1JvdJrk9ME98+a830w2i3rgb1X36VimO86z88aSjYbO2TKAN99d3ZWCYrzrY+b1KOy2V9pm776VG5RQZvMHwnJKb2OYSU9+ot/gOysRj8751zqUt/NssuXNbTeawzCfOl+aT+dbXm3X/AJNs2ntprHlvFxDUFUdUQNX2l83G/fUzAoOMY2pXOMsTA3oOV1K84vmUL349HTXhUPE5L6VBGFCE7F2eitNelf7qixkcp8R8yx3c7cNCAhXblLgNyO635gIvl031GVEdXiz864cQ4m2Yjvp5uW4rCHIIu2htObKTxvfupCVKKyBqo8a94+3yKTDW22/wUvcKzmVDU5wcPWHsOXSr4itiQ6n6NaRqPlUvDNvzbbpttMmbLqDuuKfgyAVMvNFtdtDYi1QXcQxx/FI8AHmLBYS3s9Mt1EdYgVyeCp+2GGKkXGwtttrf+Vpb31GirxyU/gMaRtmMP2Kbgg3F3N5F+FY1LTN2Rmt+iTsf3uvoEr363LaT7qh4ehWdLDQRmtbMeJ9+/wAiP0fLjEkaFrHHVg+x4mpD7dnHVOFS0qPSB++lQ8cwl59l1GQulokAfyrd9jpUbC8BwKbIW2iypYYIClHeqxG/vqNiOJOIh7JwLS0F5nD3m26sTkRl7RpchR999a5Nf0VH/uk1dXbYWG+htWXmAfWVYj5E1JzXQGCM5PeLi1NrkLREKtyXlgH7aSFvsoKuqCsa0plD7S3U9ZAULitm3IYcctfKlYJpcxIcDad6SNaZaAcSp1kOpuOB++itIIGYp17jb80+8fb+VR+j5eUDZJSk4s/f/aq7a9D6RhIsjajpW93GrOLO/eFaCkuuuuyiBqhKlC/2Vi6MTVGbQtKubqa0Wgm4y249u+n2IbbWI7FYU9MCiFtJV1UlO7XuvXJr+io/90mk5L5tmoIt26fO16cDTC2kFjKFKQUp14m/ZbjrU19pTgWhxC2L9RZCR49lXDMmMlbPqxbuKOt0EkWA9vjTBLa2lDD0t5lJ3GoTThxLas70lpIQnSx6VtQe41gnoVoKFHadG2Xoq3++ojGxcS242lbhI6pQNx/3fA010FNOJhNZFKT1VjN+xq7jamll1ZKFDUXUfzTauPxGvW+I163xGvW+I163xGvW+I163xGvW+I163xGvW+I163xGuJ9/wCJKxXBp0NrnSy481KzJyqO8ggGv3/gH+3d/wDLq/P8A/2zv/l16HE+Tyf66z/4dZUY9gjaf5C1D/wqUrFMcgJYe/fK2XFuLWOzVIqPEYFmWGkto9gFhRStIWnsNXO0X2Z3FKHzP+gxLCic3HTd/Ehbi1JQlIuSo6ChOUjZNuE7K1vl7KYkuZNvudCTuP8AEcRkKbSqU4GQpSt1+yuaPOPyA0LpVnJCL9gqXBu2bp2m/pdnv/iOWmXVSWmU2twCuNqdXiinWWcnQy8T30y0uO4wdvcyEDpgXpDrSwttQulQ4/xFtRUIWv8ArFW+2pIwyG/zFxfoC0sBAFuOulNh+Eja5RmKFqGvjSI0dGzZR1U/xF5lzqPzzLm2G0Ge3bbfbybaHJjymr2zsuBY+XkXIlPtRmE9Zx1YSke80h1taHGli6VJNwR+QVDEmOZaU5lMBwZwO22/yOGJKjyUoVkWWnArKocDbj+S20OTHlNXtnZcCxf3eVLjS0OtncpJuD5FvyHW2GU6rccVlSn2mkOtLQ42sXSpJuFDt/NTf9E/eaNPSH1hpluS6palbgBSpmFck587CvVkKkJbUsdqUEXNT/NAW448iyWl2SoLSQcpvuOlYJh3NXsQxN1pLDEZoi61JSL68BUXDMdwN/BHpekVfOEvIWey43HyNYKnDJE512PtGtivpKVc9G3uve9Kw/GsAnYdPUjNFZadD/ONbAAjjUXCsawJ7BnZQPNVmQl1Kz2abq8z4VhcjGcUS3tHG0OBtDY71HdTWC4zgr+CznklTF3w6hy3eKxlxxaW204UkqUToOprUow8PUMHaOVE5xy22V/JRbd33rlBiMTCGIAhPvZ2miBtlITfMbJGppOIYZyUmTYyUXfXzkICTxCbi67dwrz0hZZiBJU7tN7dt96ViTXJfEXMBBvzvapz5frBveR76XPEbmqQ+psJ2ma9ra7h20/NlL2cdlBWs15xhcjpj+GlOdDq5iELUntyWJprEYoWlCyQUL3pI3inpUlYbYaQVrWeAFec08mMQ8w7+ebZOfL9bZ77e+pE+UvZxmpDi1GvOMbkdMdwzJnS6qYhKyntyWJqXiWFRHJDT7SkPBTgQpgWOZXfbsG+9NS8Vg8ww6JGTsnw+Fl8fojdw39tecWeR0xeGZNoHVTUBZR27OxNYjiUTMGXWOqvek5rEGoLsLkxOnYZGjIQ5KD4STlABKUb1Cvwh2yjAyX3dO+7Lb619KOJK5GyxhmXPtTNRnCO3Ja9N8ovTKhuJGzQE9MqvbL7b0zJxvkxLwzDHFBPORKQ6UX3ZkjdTLriXZTz6skaOyMy3Vd1Mq5QcnJODwXFBIlCSl5KSfrW6tAg3H5jRCxJjnEbzZmy5ynW57DX/uj/APad/wCasYSxe4eVu+qFJv8AKsOeg8tUtxFR07NCcJaVkFt2/hurFsmORsadflZ31MoSnZr4ggKNjWDxH8UfwnGs6jhzzaDv0uOzs4ioHnmRBxvDJEkMB1Ccjyb93/r7fJEJ4YSbeKq5JXH+Re/sqrkN/PVf+HWIYJg8+Pg0aClJekFgOuLKhwB0tXJtiVjMrGJmVSnC7YBFwqwCRurGYEaYYl8NClf9pa1knuvahEWyiJPgHm8mOBbKRxt3/rrlx/OZf93WD6W9D95rlGlgHMJazp9UFBPyvWHPhTYhCEm54ABOtPlGiOfOZflU7Ew2Hlso6KTuJJAHu1rzni/KxUKIpjbLYhR0oyJtf6TfQ/nK/urFQzckJSVW+qFC9RX8zYg8ySSb6BOWpmJtth1TchWzQd1yQBfu1pzEsZ5WrixTHLq2IUdKMote2ffWKAf5p+oGJNtpecbioDaVbrk2F6kYhjXK1xmPzcuLjw2EtgaXtn3mpv6K/wC8rCnW1IEZMJNzwFk61ieJxmQ4HcTWISV9XWwv7N9TpuN8rXQ0mMpSo8WOlpJNurm3kHdurDWMckLiRHFkIeQkkoXtFW3A0ucnF4WP4VHHSRJRlcy9x4/F7q5FT48w4OiXGJYfcZDuyWobrHS+oFSGcY5csDDlD0xdwtpCd/bm01qEwHxKS2whIeG5yw3+/wDMY5QbSRzwR9hkzDJbwvf3+RWGx1PPx1LUo7chR138N1OiHiOPYXGcN1xYkzK0fdakQsPYDDCTftKj2nvpMee2s5FZmnEKyrbPaDTEyTNxjGH2DdjzhK2gaPaBbyIx8uP88TH2ARcZMt+y17++sOxlxyQJUNKktJSRkOYWN9KwqdIckJdw93aMhtQAJ036d1ec0SsTwuflyrfgSNkpY79KgTmTNTNjO7Qvl7Mt4n65I191qkY6HJHPHmAypNxksLd176dtO48w7JZlOt5Hm0EbN3vItv3caxXDWnZamJ63FvFShmTnFjbSo2HR1OrZYRlQXD0vfWJJgR5GKIGd8sPLF3L7x1furn6sZmw4ahtHcE84+jzcU7O1zrTW3aLCn3lPJQRuSd3yFPw5TYejupyuIPGkNPTcamwUG6IT8y7Cf6op6JCXIUwt4u2dIOUngLDdS23EhbahZSSND3UUJkYuMOK85w7nZ5sf6v8AjT2B2elQHVKK0vqBOvsAptt+ZjWIQmzduFJllTCezoi1SsHbMh+C+VZ0PKGgVoQLAaVMwxcjEcQgSEhOwlPZktgX6lgMtNtyJmM4jDb+jiS5ZUyj+qLVJ5PIfm8xeUSSVJzpuQdNPuoR25uNMQrAOxWpeVp63FSbbz3V5mciI83ZMoaTpl7Ld/fSG5czGcVjNj0UeZLzNN+waUeT6w9Mw7XR8i+pvvAHGm2pGI49OgoN0w5E27I7NAK5jOjJXHHUA6JbPAjspnnuIY5izDRuiPNmZ2h7rVYaD8685OFYaZOa+1MVOa/be353RC51I5n5sz7DaHJe51tuv5C9MkvynucrGd5wrVbsuaMR7F2A8DY5UKUB7wLViMzC5akq2QcakR3eAUCbEd1YbKxmamP+5m9o48q6lKyi/eTRYw7EWn3/APNlKkKPsCgL+RAxSe3FUrqosVKPfYa0tzDJrUpKetbRSfaDrSW8SxFqO6f8mAVq8Eg1zjDZbUtq9iU8PaOFPIk4ohtbTxZWnZLJCxv4fPdSW14qtGCNRihSdiq22vxFrn7K2XnXEPOHM78wznYZL9e1rZvfUrnOJtNKjPbJ1JQq+bsAtr7qU9hkxuUhJsq1wU+0HWhGxHEWmHz/AJMJKyPblBt76XDgTW5T6Wg6QgG2U8b7vJyhmtY1iEsMyFB0ysyxHcOgQno9W/urm0qcuVjKczj4Uk6Jzaa2t2aCpU6U7sGWk6ryk2O4bu+n3MaxNxye0VOyCWVnKnNYbha2o0FHFH3wiAGwva5SdDuOledkyEnD9jttrY9S171ycleeMRipkPXhoYzoTK3aLGXd7bb6S9icxqIhXVzalXsA1NLOFzm5WTrJsUqHuOtBjEcSaYeP+TCStXvCQbVznDpbUpntSd3cez81ttYbiXmqT5svt+bh3S50saP/ALcf9ztfrrF3mFFLweWnMN+qkpP21FhReQbD8NbAzKOKM2fuOsRbjWIxsQgnDo5lFyGxt0u5En1bjsrB8SwpuPKkYe6V81fPRcvb56Vhf4U8m5mDYo27liSio7PN+kLXHiPIvlLhMCPjLbkcNLYUvK43b6t/27qfLmDTMC5SvM+lbfuNqka6cDu32FY9jMhAcxF3EVoLihdSEi1k92/7KQzBSGWZ2HlyS2jQZgT0re77a5avraQt3zipu6h6uZWlNf0T95r/AOj/APFXLKYtpCpLc7I2sjqglW6sdbYQG23MPS4pCR63R1rEsXfQlzEJU1zbOKHSA+r7K5RR4iEts80SvIkaJvkJ8nLv+fvfYah/6xz+2ak/6s/ZT0BdrP7ZGvfXJrk84Tt3sQ5pJTxCGjc/LLTvJdKjz/zt5tSOOUqzX9ltK/6P4rWjbMzIgdwyCsacmJDpgRm0xUr1yXAJI+fjULGFShExbZqQ20l1KOddxFrq93dUzEonJZOMyJclRdmnEG21H+TYi4AqViT2AjAsPkxrPtJltuJLgOiuj+rt/NaeUG0j8zELYZMxz38LW9/kk4FixYfS+tefYqNrK7yBrXm7DcTwSXARow7NbWHW09mmht30W5uJSsVluLzuvOqNgexI4JqBPw6UmFi8FZUw4tN0m+9J7qg/hHLwdECK8HgzAQv0qh2lW7yKx/k9LhsS3Ww3KYmJOydtuOmt6Rj/ACglwnZjLRbjsQkENIvvN1a9tTZ3JiZhyWJi9o/DnJVswv6ySnWpOOYzNRPxh9Gz9EmzbKPqprlDIkLYWidNL7IbUSQNd+m/Wo3KDBH8ND6Y2wcam5strnXo15/K4/NOYbDJmOfNe/Za3vrlFLkLYU3PlbVoNkkga79O+p2PKXHMR6GGUoCjnv0e61tO2sUk8iMUwp7D3pJ5zGnNOAMO8cul65Rc4l+cH0xk84fCbDOqxtbgBu93k5Qc6cjqTPmLdb2ZJsk9txvpWHS38MfwpGYsKazbW5VfW+lt9KSdxFqcw6S/hr+EJKlMqbzB65I330tvqXjSnmFYcSt2OyCcyXFgZja1u3jX4QbVnzd9JsMxzbXLlva1u/fXJ2ZHWwluBK2rwcJuRpu07qRj+ATI8TE9ns3m5CSWn099tRTeOco5UJ+WwjJFjw0nZNX3nXUmpbvJibhvMZKy4uHiCVZW1H6pTT+JY5ixlyHE5URY5UmO0O4Hee8/nx+dEmYxg775u/5vlbMOHv0pUeA0pOdWZ1xasy3D2k/ndjA8EhNzsWca2qy8vK2yjtV2+yufY1h2DTcNSfTKw1awtsX32Vvo43hojvlQbU1tQSkpUR2Hvpt5P4GhK0hQvzinvP3mTJYbHmG0v33zVjaZTcdHM5qmG9kk6gdtzv8AJiKsOb5MNsRpSmf3Sl4KNvYTTHO/wS5tnG22W3z5b62vxpkqZdmTZC8kWK31nVfqrnIw3k4lO/mheXtvZm6t6mOYa01Exlg7NyNLvlac/lW1tTMqY1yVcbcfDKQyHibkHtI00pnzn+C/Mb+l5tttp7r6eSHGyusRvSc7beiLD4I6mX2/tamJSW3mkuoCgh1OVQ9opMqOwiTKdeSyyhZsnMb7/CnPPvmsP5uhzHPlt35uP5XAsMbjoc84PZVOKPUAt89fxcRwHYJQ1EjBwu5rlZOXwGvlmOw0trlIZUppKxcFQGl6iYk6G0PLuHUtjQKBtXmgx4wwcyFRkSLHOXUpBI3236bqRIhtsvTXX0ssIdHRJPvFYPgRRFLEqIXHl5TmzAK3a7tPJi8mSiOhcWY4y3skkAhIFr676TiMNHJNthSikB4PhWnsJpnzn+C/Mb+l5tttp7r6Unk/greDaxNvnmpc7/qn7qXIVG5Ky0oFy0wXgtXsvpT+ORWg262ysqaXrlWkbj3VHxKKjkkhh6+VLofCtCRwPdSPO/4Ncxsc/M9ttO62bTyYzgi0RxFiMoW2oJOclQG/W3Hs/E5QYS6hgR4OTZKSDmOYcdfxVZLZ7dG+6ked+Y8+uc/M82z7rZtfyLOP8nprMXE0tbJ1t4dB1P7fsKWjlXyUQ/hytHnopzoy96dfmRRew4NCCrZFgNpypCcwtYcKjJRyL2iA0nKrzu0L6eynlYrg3mdaSA2nnaXs/wAO6uVX9LL8mOowvk953aOJOFbnPkM5T2WNMtTOSXMYyldN/wA6NuZPcBrWEIeHo2sPUtkHt6Wv7dnkedYjMMuvG7q0IAKz2ntrDv6Tb+xXll4g5iEtM5bjaor460UJ9VPt1oXufbWF4gtnNLVNbjlec/RnMbVyVwzCQvD2lYpZBQ4olBVoSCb1iMnAWlR5cpSGs6nCux16WvHfW05xO8/ZM3nHnKs+07d9rX9/fU9+Z0pbaksuq+sQ4jX7KONOPzXMcahpfTLL6rosL5QNwFHHsUfmTMZXCMhEkvkbKybgJA0t7qwqVJcU6+W7KWreqxIv8qabgOFmZMkpjIcG9GbjW2wiTKjcoEdJE9yQq61fyu49wrk7yVelmJzhgu4i5FVbPYapSew2NJx/k4X4TsZxPOGtspSH0k21BPfXItMeQ7E5ztPStmy0pITex7bXrBsYwMyIrxnIZfG3UoPA3vmue6uTW2j5+fulMrpq6YTltx091YWziWNqwnAI6MnMG82aRbQC4OYgacKwWXyOTicW8xLb7npNg4Cer0jqe6oPJjnL8bDRF5zLDSspe1sE37KgzOTTohQ81p8R19ZQ6ntF79KsYimHePFaQ+wnaq6K+gb79dTxrFUcum55jFY82m69glHdl1vu+dS/N2O+eMLU5eO3fMY38m9/Zpp5OWeHO9FvD3lSmkn6ihcfYKgYxYnEWpgxNauJKlfqI8K5IQ2ulGQ2cQX7LdD5j51gLsCD5ylJhKyR9sG83X9Y7q/6j/8AfLX6q5RKcb2ThxF4qRe+U5U6Uy1h3JjzpG2q7P8AnFtq+u6xF6S1iPJjzXGIN3/OLbtj2WFNu4bhvnWT5stsOcBrS51uaawhfJ1nCJ8oWYekzwpvxAsT76xGCp0PvmM648sbiopO7urD2YPJXzjFSFZJHnNtvP0j6pFxSm8T5OeaWMlw9z9D1zppYe/yY+xIeeRh6WGlSGm1lO26CbJNuHGsFYwxx9GD4mpTTkRbhUlC9LKF/aPnWLN8qsSkNYbEd2MSCkOZF2uCs5BUDD+TWIvy8FmhSXYqwuzC7aFOccf24VyqZmOv8xQWy7HbcKA8culyNbDWnsDgOPDCpEHbpjqcKg2q/C/v8alr5WY4vEVK/e+HthwJjjs6B3+21TMOwkYixgbsPaIjS777jpJvwrlFyacNm2Hucxb/AObV+rSuVHKdeokrUxFJ/wA0kffp4U3IhnJKW+pppf1SVnXwvW05xP8AP2TN5y5yrPtO3fa1/f31PfmdKW2pLLqvrEOJ1qDjFnnsWdjAvSHHSc+YAnTd7PxnuUeEwlYrFlNBE2KhVnBa1lJ7f/WpGGYXyWxxqRIbLSncQYDLbYIsTcnWm8FitPz5LWzFmWyoqOe5IA4b6YaVyV5ZZkNhJthnd+lTjPmbH8NyozZ50TZJPcNd9coESuTvKaTzjEFuNrjQCpJF+02rm3mPlDh3QzbWbD2aPZe++sZZl8nOU8gvz1uoVGgFSbe8ikI/BflijMq2ZWG2A9vSrDsbwXJ54gK6LazYPIO9P7dtZDyO5R8/3ZNh6G/+s7O+1YljGNKSMTxBYKmEKullI3J9v6qgNw4siWtOIIUUstlZAsrXT8XDG4kaRKWnE21qS02VWHS104VyVVGjSJCWsUQt0ttlWROmptuFSYLKgiTotkn6wrmP4K4z58ybPPs/3Nm+ttN1uP31KwlDbkzEFZVuIYQVkqLiSQLdn3VJioZeXJOGZA0lBKycm63bTcVTDyZXmoo2JQc+bJut21hkeUw9GfSF5m3UFKh01cDQRCKROjPJfj5jopSeFKwyFycxqBjDydmuQ4jIwz2qDn2VyfxrD9rjM7DmdlKQtz0khJGpBPHfTWCw8DxXCorjiVTJOINbLKkG/RHGuRqo0WQ7GjKcDjiEEpbFha54VhiIkaRKWnE21qS02VWHS104VyZxePh8zEGYUhRfRFRncsctrD3GsE5UHA5+JYfzPKqIGbvx1G5vk7dawqXH5PYnHw2JMSvI636dw9uzF7AW+dQuVOGQ14iEMbCXGb65R2p7T3VhqIsHGsAwmM7tZTkm7C3uxFgdRU7EVYVicyHOjNtodis5wg9EdLsGlYjHxfCJ3KPBX1ZoiokVLpbH1FJ+81i2NIwpzAcNkNhDEJYyqPaop9Xdu7/I0cNjSHI+Kx0xZjjbZIbs4k5ldmnb30/hmUJYXHLNuwWtU6bi0WRGeZYbhxw62U3QneRfhurAZiIshcRuGsOPhslCTZehPk5QtyYsmOtzEXlNpcbKSoECxF94pvDpnJjlY48lxSiWcOJTqe8imI34N8q421Xl2r+H5W095N9BTczm0jmfmzJt9mdne50vuvT8FyyXesw6f8mvgaxXDsVgT28TjRnGgpxlX7o6JsU/WPs++oOGyuS/K1x9kKzKaw7o9YniaZi/g9yoh7Q/TSYGRtPtN9PJjmKsYNPn4atltLmyb1V0U6o4KI7Kw7FJOGTMJwnDbqZRLRkddcP8ngN3hWKymcKmYvgmIu7b9xpzOsr49Hj/AOlJ2eDO4VgqU+lXiDZS+4eASm+nvrlZJdjSGo72y2TqmyEuWHA8ajzObSOZjCygv7M7PNmOl916x0zOTuK4oqXKLrE2ExtbpPqnsrztieFPw2JUIpRl6aWhwClDQK0+dRMYwSNIkynI7kOQllsqOVQNjp79fZUfDUC6mo2U24q4/Ok4S607CxAOKcaQ+goIUFki9+3765j+CuM+fMmzz7L9zZvrbTdbj99SsJQ25MxBWVbiGEFZKi4kkC3Z91QELSULTHQFJIsRoP4pOcng3J56hjaleUbO2nfe+vZ5ZP7ixCFsXi3+6mcm0t6ye1Pf+JA523Jc52/sW9kkGx77kfn9zzV+DPMdNnzvbbTdrfLpvvTe2/BDY5htMm3zW427/IvlBtfSKhc32WTvvmv8qxyJHxViBGiTloEpUVLiwL6IA0HDUnWsXwDGnGpMyDlUmQ2jLtEntHhXKifir5fbgTHUtgJCbITuGgpOMs4vCw3ap2kaBzRKwpPDMs6i9Rp8nC3Y+Nvvc2biPIKAp3t11y8aXiZxuDOcbTtHMP5kEotvISsa1gWIpUY6ESUSspTm6twUf4/n9yX5/wCU0LPb0MSdkbTYW0Fqbd/Cfle5kUDlXiN0q7j0d3l5Xf0uuuVH80a/sprl5Cji77mIvZB2kWNqiCZOYiSYbAaejuqs6FJFtE7zu4Vh+N4lhyS0zO2yorKVAqjbrnW/hbSuf2irTluGUzXdr7Mme96hjDoUnDoRTmbjv9dNzfXU/b/EN0xo0eMXV53S22E51dptvNOy0Ro6JTgs48GwFrA3AnjTnNY0eNtF53Nk2E51dptvNc6fwzD3pP8AnXI6Svxt5OcpwnDESL32oioCr+238QC7IdaYaT1luKypFbOHieHy3N+VmQlZ+R8mzm4nh8Rz6r0hKD8zQeYdbfaV1VtquDTMZ2VHakPfRNLcAUv2Dj+JzdWL4Yl+9tmZSM3hfybaHJjymb2zsuBYv7vyLLcmVHYW8rKylxwJLh7B2n8kt11aG2kJupajYJHbSH47zUhhQuhbasyVewj8VTjq0NNpF1KWbAfiKcdWhptIupSzYD8wwG+Ut/MIi3jBd9kXb6lX7dlLVyd8zx8Rb6UaVBKU5Fd5RUNrO2cblFEbaNnQLI1UNP2vQRJgxsTmKF35EpsOKWrielurDcVwrNHwmZI2E6Hf0eu5SRw4+Fci/wBJz7vLhHJOM+7GalguzXGz0i2L9H5Gua+ZYBbtbMW7ufHv+dToT0tEnC9peAColxpP1TUmfLXkjsyXFLPhXnFnkdMXhmTaB1U1AWUduzsTTnKWIyt9lLebYrVkN72I40zimyy54of2eb+Te16RymmR1xWjf0CF51E5ikAbqamYxyXl4fha1Ac5EpLhRfcVIG6mpLu1kLeUERmGRmW8o7gKadx7k1LwnD3FZedJkJeCL/WAGlBSSFJIuCK5PMyMLYxBcmZs2XHLXjnTpC4OvhXmbCcJexrE0t7V1AeDSG096jxqThU7Dn8JxZhOdcdxYWCntChv8kjDMBwaRjkmNpJUHktNtnszHjUxD0d3DJ8M/uuM+fo++/Ed9PSMD5MTMVw5s25yZCWs9vqpIuqpGKw0rOwSrasOdFaFJHVNc/wvkrLmIQDzn91BKUW4JJF1n2Ckv8n+T83FcqbyMzoZDR+rc9Y+zurEMWEMqSw04JEN7tSm5Qe4+yoOOsYU21HXlCYbbmVLYJtoQn7qGKK5Lz/Mdx+61PpC8p9bZ77UjGXVqciOJSWcg6TubqgUZ2KclZsKOpPoF85Csx4Bel0e8VBn7PZc4YS7kzXy3F7XplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEq3yOcJWpAPFTe8e+m5DC0usuJCkKTuIplhETPEU8kuSNqBkI3Jy7zf7qViuPwVYUUnKI6X0vKX2WI4mkT8T5KT4OEKOsjnCVqQDxU3vHvrAYyWEvwsUvspaXdAbaaW43HGsJwtyPtTMV03NrbYgmwNra8awbDm43On8QeyW2mXZpG9W7WsSwSHgcmfOjqTsg28AFi1yVEiyANO3fUnCMQw17B8VZRnLC3A4FJ7Qob/AOFTOS2K4ZHKUtB1hT5Cg/pwBHD7jUvF8NXIwSZFbLrbjT5y3HDXw0rk7jzzZckxHUPyBbrJ3ZvsNeeUulyDstpmQMxt+uuT+EYNIROckzA4cm9tIuDmG8b769lci/0nPu8vJ3HZHQguNmM44dyD0rX+L5Grg6Vi0KKwVRYRCedhd0uK7BUvEm2g863LUEJVuuVAC/dUjEMa5WuMx+blxceHHS2Bpe2feamfor/vKjvrWkNJwkEqv/2dQ2ccfXFgrWobVCSShW1VlOgNLnMYxCx7CmEZi3KRlXl7jx+L3VyGxBiWcIRKaJaeW0HQytQHA6dgqS1i3LllOHqTZ7a4W0hNr9t9Kw+OiSJiG46EpfG5yw63vrkZ/Sg/4akO4Hjz2EcpWWBzhGyJQtGlr30PDt9lR+TvKJMGW4+yXGJcbS9r9Ye7s8mPsO6TUYo5t0nrcNfka5V4bh8I+eG8NUZEpDKekMuiSrffurB9gU5RHCTb6w3/ADr/AKRZDBvDUtQQRuKglea1YZpvC7/GqlfztyuXg/7eT/YrC/a3/aNYwmwtzJVh7q/6On3v3i3JZ299w0Fr/OsW2xTlU2Ai/FVxasE/mLX9gV7JTd65MPv/ALybxRsyL7gO/wCdYsuQU7Dmi9/G40rBg71tjfXsJJHytWnCU3euTD7/AO8m8UbMi+4Dv+dYuuQU7Hmi9/G40rBJ1lc7w5SZTfblz/8AKb1yvxpjVMaM0iErsyWdVXngdKPAw1ttH+tcF1feK5ZHj6H7Kjf0R/xH+FN8+ZVtm/on2lZXEew02cQxLH8YaQbpYnTc7fgAK2OzRscuXJbS3ZanHMOnY3giXDdbWHzC2gn2a0iZgmLYlh2Ii+eQsh7a335gd9QZT07E4cmIDsnYjoQrXvtX/Wvln/8Ac/8A+aCbk2G876chzmESYy+shVc3RjHKRuD/APCJn+it2WtupMLD46I0dPqp+3vNP4FZ+XAdUVLD6rnXvAFNtyJmM4lDb+jiS5ZUyj2JFqewdvbSYLhVmQ+QdFbxoBpRZMzGX4O9EJ2XdhHeE2+2hyfKXJeH2UCHz0jdWbhakMv4jj8uAk9GC9N9B7LWrzdNioXFFsiU6bO27L2U0JmIY7ikZo3RFmTM7Q91qCUgAAaAVhcmQ5ISuA/tmtmoWJ036btKZmLcmwZ7QsiVDd2bluy9OzUuzp+ILTlVLmvbRy3ZfyKxBuRiWEz1Czj8CRslOe2nGIaHCXVZnnXVZnHT2k0+qJNxnCmHzd6NClZGl+0WpeEw2ebQ1IUkhG/Uam541HwyMp5xhq+UukZtSTw9tcwhuPuM7Qru8QVXPsFYtAZdmKaxBS1PFahmTmFjl0qPydW9NEJq2VaVJ2mhvvtb5VJw55TiWH2i2oo61jUSBIw53FsISUR3szllNo+voN/stS0QccncoJridnh0Nybtg0o6aIHV07awyI59IzFQ2r2hIFO4fNCywvihVik8CKdwiW7NxaI4ekZzudfsv3U0iRNxudCaN24UiXmYT7rUlCEhKUiwAGgp3D5oWWF8UKsUngRTuES3ZuLRHD0jOdzr9l+6mUSJuNzoTRu3CkS8zCfdanYK0AR3Gi0Up0ski1LwyLtnY61FSy8QVKv7BTsWAX1occ2ilPEFXZbQDSsTxZpyQqROy7UKUMoy9mlIx8uP88TH2ARcZMvhe/v/AIYljKjKOtdX7W/h5QtIUk7wRpW2iYXh8V767UdKVfIfmh+U5myNouco1pqWvE0PvPKN0jen2+2mkqcW66wci1K49n8R48JbqxtnRtUI16HaaWiApBjW0v63bTyHkusxnRYKNsil/wAR3HYTJQgDKntV3mnlTUsvv5MuR1QBR400lxtC4odzlpYsDru7bUzLaCkocF7KFiP4jXtUjEW8SbaYfXmWlTd1J9nbTbXWCEgAn/Qf63xGvX+M16/xmvX+M16/xmvX+M16/wAZr1/jNev8Zr1/jNev8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zSNVan6xr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M1vX8Zr1/jNev8Zr1/jNev8AGa9f4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmt6/jNev8Zrev4zXr/Ga9f4zXr/Ga9f4zXr/Ga9f4zXr/ABmvX+M16/xmvX+M16/xmvX+M16/xmvX+M16/wAZr1/jP4iwhcnLzhKE/R7LcDY+t20Stc7Z7VYv6LZ2BI/Sqeedvulk9BKko16KVcE1lDnQdXmZNvVF7/YPipySpc7NzbOCsM5L24W18acC3sRZOllObH5ZQfnURYkuuOu2KvowvduTcW8eFHOVlaVFJzgBXvtp4eVpq/QLKjb2FP66WlIU9+6ciWwBr6LNbxqOlt2UvOhSlhhLYIsQLdPs176FnZOVDWZWTZFSTr1weH6PfSVDUEfjN/pfcfxVLUbJAuTSnXtuhpO9ZAt9tJbQZeZXVugD76QyEyG1LNklaRb7aSqW5s8wUU6E3yi5+VKTzXF1expGv+/QAiYwL7iWkf8APXOoS1qavY5kFJB37j5ElxLyrqKRlA4GxO+lhoLGVKVdK3GnJL6srSBcmlF/bxkBJOZxPy0O+mW2Gpz+f1koFk+25rYtB1K9/SAq6r0GpMiy7266NP8AeoOxnNsg8UlJ++s7ZuL1mWbdnfQzJcte2goKQQpNKxLEVLDAUEgNpupajwFJbbbnRSpWVJkoSkE9mijSsNmOSW5KWws+i0sd1SWsOecW4x9IFNkUWHQ8t0DXIBpQa2ygsm1syP8Amq4alEdoSP10h9o3QoafwNSShBCtVAjfW2EWMHr3z7IXvSiALq399Is2gZBZNhuFHJDipJFjZoa0SxGYZJ35GwL1sVstLZ+opAKaCG0JbQNyUiwHlG3YZftu2iAbUMrTabHSyd2lvs0q7saO7rfptg0gGLHIR1LtDo+z8dv9L7j5CaAWtDRUOilVyrwFbRhLkyWuyWmUIPSUd3upKcVP7sWhZe13XKjbwpnCGG1liK3tHMqutm/a1c3Rg2Zb6tntNpdSVDUHdUVt8XlxJ7SFKy8Mw091PROcOxZUlpSWFIazX3XT3XvasIw6RhYmsTGlOuLUjQXVre44C1YkwiDsGoTYcZfy6aKAATpxG+mcz6l7JhDCmii2Rab5vbw8KITmAz6mo7RmsoZ2auhl1zX1oHnDb8gN9Ow1tc2v86xBKhcbEnw1pTWIOrbaYSCBfIDmvvp7YrzvKb6Kdr4EVEkOostalXPuNOpQensjb5085LjS3Xs56RQD27r1zZC3OarbzhKzuuQKfVe9pJHyTToDiGUIRqpY1T3ns/wqA+7MShuStLbSxuUq2iknwouKDrufV4IF+kNCbVCvu85o/u3KdlzGlrgsqsltBspwjXfUjGWMSaadajtgxl6lxO66SN5voR3b6mKTJjvehUClKukm5Rw7KdhNFSEOrTnKTv6CdK2UhLUZxzUKyZ127/2FP4U84X4wa2jDh8CKin9L+0f4Gp5ya+1/mkt6U0pZBWUjNan33Jy2pCFGzN7Ad1uNDbSlxWQhJ9GdVE1GZbkqc2q7bUaHL+ulxlOuPJyZ0FZuR2/ioSHVMs+tkNlGnklwvtJXZC1caLLstcRoIunKrLm99ZEP2CXMqnR1lDuqaEvLfbbRcFetldlRnTIed2isriVnTXs/Fb/S+4+QtbIpRl+nUDlHgP1U48h2JMC0W+lCD7LGoxcgrSyhCk5y4ClN91uJ7Ke49A0l0MqYc5mgFXBWqt1OvlxSencnaHL4bqacQFJTIxEPDMLHpOi1M4TtZMd2XfI6y3my5bE+ylYYl5Jfj9Bg7RF3Up10Guo3WpvB0PtocfOWQS8i7SVG5vfeTuCaeEVyZzVuzIbkIIIUm+Y6773qbGQlEqUhyxSvRI4VLnxkhS3yoraTwta5+ypiHWmnucqzvFR104A+NGUkBvbw21gDW2bL+ulvyW0yku5rgb7jX76jrMPJZmwX9W1QHWwUNrWopSeHQptpSrXYvv71VHUgIyZxn76SczSFhu28DjUtxCgsCcoXH6CKW9dkEtdBkJ32t/iLVEYQ1GEQAc1SU/RWFtOPaNwoTY7yi9tjnBHtFvZ2GoxS2VpRiCFLsOqMjgv86LLeV2ORohZ6tRnFrQprZZWkJ3BJOo8axBNzk83K04fSN1LTmyltxs37PRoptMJVlH1yL0yuQpKsvXsKgOoIUDnFx3OKH8DcCGkSWldW6gMtRY1gsm4Ub7uNOIEWPdWgkdg+2trHYTICkBJGaxFqQ4kI5wHS5s+GvClyZDYZOXKlF7/ioQ0ypxn/AClnMubupxSoYZyDooDg1pNojMtFtyjqk++ghpOd1xZU6UKAt7KdiqiCI2UaHaBWtR0PsoabaOYqzXzH8Vv9L7j+K+yhwsqW2UhwDVNxvpT8/F8PmtkZc63l5wOFujYeylmRMgus5DkAKtTbS4tuqO5KcihLakrKGlFWoN9LgU/EQ84wtaeitDmUg8KcckzWGntNkqK6f617p7Kzx5rTrpUc6pTpPR4DRNIVIcUuSY7bbnpCpN0jeL9t6mT8Om4cGn3C5aQtaVJubkaA0XHJeBr6Nh6ZzT/coA4hgyWr7w64Tb4KZwQOHZtRUMJXb6oAB+VBqFMjZ9rnUp5R+Vk0W1PYVcjXKtdj/u1GStbJU22BlaRZIIFtKiy8GxCLFcQ1s3EPqWkHUkEFN+3srm7kvk86q9wouun/AIaL68RwFPcH3f8AkrzfIkolSXJCnnVIvlBIAsL9yRQcbeRHcAyhVr6d9GEX8PWrqiQb5svst99LW/LTJK27Kt23pcV0JUhW8K3Gnl4bMwsQiboS84sKT4JNfvvAfc85/wAlS8RnSo70l1nZBDJJAFwSbkDs7KexLDpcJAeSnaIkFQsQMulgeAFFKZWBKVe4Jec0/wBygh/FsG2P1EOuAeGSoGFbXbKYScy7WuSoqPzP5nb/AEvuP516ifCuonwrqJ8K6ifCuojwrqI8K6iPCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqI8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifCuonwrqJ8K6ifD+Jrjj6JiWUrShCw4Uo94vrqbbqjMEltLr+UltZuUhJPu3VmkypAaZcW3ZCyFuEKsnUak93GmlSzZxLfpCaIMpDpkILiUBd9n3d2lvA0I6nnVSHMuRZVdVli9793S8KivPOW9CkrWs92+toyttwEdFQNwanhb7jjvOkozITbKCEdUcN9PIHOkqSvpNvrzFOnbrp76lSw/JEhC3Skl5RT0VGwy3tbTspLR2QGZKVAu2Xc23J4jUVdMdPNs5SHNp0tONuy/fQdefeaYT6jRKVLUd2o191ObZ51D0aGHOiu3S6W+2/q+ysPVtcrCl22aRv6BOp+6lrkImoK3lJaUpwhGm4Zb919355zKefLeYKLRIKLj3X+dAKddCgvOhabXQe7/Go+SRJbU1ex6JuTvJuDrXN3XHFg6KVuJ8KbzFSSheZJTvFRnAXMzLWzTc7x399NsoJKUJyi/kkAlfpl5yfqmwsR4CnPSOOrWbqWu1zw4UttUmSqOpZUpno5Tc3I3Xt76LqXnmwSFLbSbJUR28eysyHpAbzFWyuMtz8/de1NPc4fZU2DlyZTv46g60krekXyZHCLelTfcdPs7aYUbgtKzJt7CPvoOKefdSlRUhtZBCT9vE8f9JeUDaP20TSuevgO59OjwoFJBB4j8glljR3eSRupt6E80ym1nEqHreFSmJLgcLelwPyGdnrFVr9lbPPrfr8azu9YKtft/IJZcz5j2DQVzTp7S9r20pUZGfOO0aH809MWX6qxvFPNPpaW4o9Fdt1Bp1SVEHS35BKGU51hd7X9tCPKjMojFOVSkq1+2i499G2fR/yvyC2l7lVs7DZ3+k4UhpG5I/IJddaCnE7jXONkNt9alPoaSl071f6Of/EACwQAQEAAgICAAUCBwEBAQAAAAERACExQVFhEHGB8PGRoSAwUGCxwdFAcOH/2gAIAQEAAT8h+Doy/wBkGl4UjsDuehXzMilIzCF3G9ePhV5EwYn5SafA5HAES6wqIe9DvAPqSonNH+potAZ+c/6z89/1n57/AKz89/1n57/rPz3/AFn57/rPz3/Wfnv+s/Pf9Z+e/wCs2AT6f6cI1yPxg2UXwAfsGFy5ptmvpbXKIspQv32U4wmREolkjCnB69axhV0iWlDvXH7GJNJHFVO9yqzi+mSY3IzY9XJnj2WM3t28H8Hed4qhkmNyM2PWTtDs8YXbtrRipADa4IbkCEnJRL8UhpLVPzf1JP6EpH4SjwHl+WCkETUninJ8OP7NMuXLneX4XLly/H9i/wAfHh0mlhuDyI+uIZIVMKUDmCgMJN21XR2ACpENvOOiwRdo0BA4NcC1XGLBIAMheGyrXVjgJaM4FfQhWdA+EB3uFlQHBdo4F8Y2LPEXCjpifJOFcHqI6HAtIDiUYEgl9HptKAKVNmArLMi0PJXCaAIaADWlwyQxRYA4FIC6FceJIZjKEiDyjdpInXqioDgu1OBfGJ4TxVwUdonoThXHPXKwjRJNEFbHiMdaQRkuChQPbNrwfrj3LZtpHW7sDrBp7Y0JEUbpKwnijoDoseDvE/5DBw5YgEHtuxM6/wDfNSo0eHK96tKGIojyt1OLm47TKhogvJul+mJ8wr6Q2H1zg+zT4XCobQSLoHCJPnd5tJfnwDwNC7TjOWKBmqYK0vJhH2GSnUVHQObj5HhRkgh3INrrpcDWdbN8d4DpZu4YNjga5FYTZ65WYVdIobUZh04Tvq5Q6TXS6HsCTly5+xf4+NJibhHz6in1zr0sgQs9Ii5xz2MDQEVQpFAJzgv1F0GCNNGCNvDXLERO1JHGwNx2TxMQwd6Ddw2wPHw4P9IvME0aTw45P4GuItyAWO1nbcZGaw2QRdIseTZTVTDd4rL4O+WPj2ZjiWjaWm6sTQCQxEaMVxORY29Z++oaBu26RLn6GTkCaNJ4cEVBfDCLchLHazt+lZxhrsk5pfBnQh8Tkv1Ds/LL0w6BsbIKTJ9d4qpEe1BrpXtwrXJbHIGkLwbSfqy2T37KgdW7gJwGk/8Ad1iENOg5MEvi0kDW4MeV1G6yETlCUOgC6NHxO8DL85DDn659t6ZcYGkioXfTuesX+0d69geh8+XHbL5g0iWOZ/3Omg8Ys5HE5OcHYUulCPTvWC/4PLkgupFm69pgxHiaGw5c3zesFkR5LTJjS6BxxLjYiwQlQWbgob5YUZI40bSsVUV5y5+xf4+Lr6bOHuvlk602j1VBQpdIuN9K+RsQFUAOwg5PmVa5qFfakkNBkvqtJGlCAnm+jGsjbOKAHhNUTp+HC40EAirwBu4mnMAqFAFk55dYKsg4ChobXYSbdbwgar/l8P0ckjOG3snmx+cy/KraetnJnjmEx5g2bP1weKKBx3IMrqb3TJ/TGBpC3oLPAxw0UeTyz49jnf8AB3/Qes6+Hb90ZcuXL8Fy5cuXLi6rM1+U/wAfEgQ7EobZoWeUxLToPECCq18jjYE4fUHCubYfRfQ1/lnXaY2VI0NBF9eSujVBJSmhavafChDSNTkPp+g+uXHNYjlqBArC6usK2W8YlfBLV1ybw0AAXaSjDZwBtAbwVTaJNab74Z8sEWzVQEmvYeVbXuHoyxaqzpqo55UxyQbnpB4V0PHgwU2LcEqNOdwcxfOcG9ZUZ+zz/SUFQInGfb/6Z9j/AKZ9j/pn2P8Apn2P+mfY/wCmfY/6Z9j/AKZ9j/pn2P8Apn2P+maOPUt/3/ATMUQKtLi7iE8ub8wGRbJTfMv87IQWOif9TD7KAKqOhXjSuvDxjHCGtQwX5BjtpyFMCAjsCPcQvv8A+GKojnU1p+uFb/sdE54QPbj2rJAJqPk0osu3I22IZgvM4UjPf9jnA/ENOaGuueufGScV3BfQvZmibKsiSAmm/Ovr/Y11hck7b10aeq+dcBiOIaKt/RXXHlyfcB/lpNs0lkXTgYNI0D2f2KgKOecAqrp/YYSpLaGCVwtt+eC8kiFBtJ5YQszBWVV2t5V/sXT3/wDvW/UkzrN1Tb0nJUlM7yOL9wrCoBtDfnBBpPrbETSJunxn8NRb8H9rg2bk38F7LAMFZMHY73/H38F7zdW2tLkqSnw4MGmNL+STn4C3+BB5TQYEFp9TYE0ibp8O/wCk8ftscDZ0iKvyDAiJoLnaQdajvBOiL29dKDlmzeT2JaAGrQed8+KiPSYL+WYcAG+Sy4cZJcsdWBpgRJwA61iRp67MEgJ+YRqMGUbxVbqSvqop5uRcQvnJviaMnZ5zWV2TCoGFg8Xemas5x1gFKXQB3jho6bNJsobsOtWhCLXsQ0g0iopOXNC5hwKHwBQ2SYSkXEW5+SfUmMGEjaeYYOaRB3nXHtVYrRek15wxMA7A8e3g8ueVr4QRYpslpxhLuMj0Tr6nImcM1xCVcZ8n2B766PNETdyKQrKykA8tAPLi0/6bPdPA2S0x4s3dag3cHYAmNSXubpQC7ALaMUuKhGXVwN/LODyQahAKURx4jCPUkSI7PD4zpx4e52vkLL3N4JCVIktlDW4/ri5uAl3Bs0I7mneX2vGHIIT5rTiLrDj3q24B42V9nKg8IyRSkkR89+sBsxptv9DoIj19FpPb3lXvnN65TlS+mX6ZLaDtAbtLyXdHFVEEXbIiO5qeMYgGvvZAfIUsnMQjw8hRgBYL5SIo4YfgrH4Zf4X9ckQ/6GP3xIotj+9YP4ZYxnBLOnu7A1e0su0QKCpd6cTRjwpLniJtfXGTdqWdYA4IfqDrPt3hgGAWwJ5suWCP+CZw1kunHdvlG/LGE0QepOATE3FKpvYWdXNHM0yaz0eR+bkMFfPgFgC5Kf6QV9DhTBlEu960axB0+wWnm4QvrBqQU+OarWtj83EIV4f1yxkitYkTRah465wvlPNWDqHBs35fgnY/W4Tt3qI35ZfBNsjBbNE1Jsmss+PcDJxSQ02d5cDxRC+BxFSSlKYF6q3UhQ2tm1elxhoYcQLU0ip7ROMN+oCxCWXARvOD76zQIGLqLy88/wBDU+uOxrZ5Hp6xBy/pjh1MB8kyxoc3vOyd/PE6YQKhyztXl9HAGfRihZ1x8mnFM3plZ1ggU1zeB5DOssosFqTfYenrBiPhgkKlYsiY5Xiw1paU14TvLkXVBaBV1DU1LwYtz2qWG1GvRWYlYt5bBDwHMc6wkHzsggquGwOD3X5DHoNoQANUd83I/wAhFFXYAu+gxW+1nA3kSDql93NSkWnt0h9E088YkQlspmvhBPSYxH/Aj/I+E2OVUPdE0gGh8r7uEnNapRQOhLX3itd2wulD05CTsULdxUuBGzIFnuCII8jvA7kgxbAaHhU83GRDokFKCOOX3idOFdEHYG67BzQ/q6xxA4eFTyOJSuPWAWsp2uXIKJcqAJI0V0PAZwwTrHaXIHfa+cj7ArqQRBo4tMcAm3j6KRUTZ5ydklkXCRh1vDsuuqBEnI9ammmsWt8sacajR44mkTCJgEANH9Fazr+f2f1n9ZX3f4e/jP6eAwum58z+pLj2+WcCDeAkoWHjIez7dd1xne9d4atzE2R/CEo8OM+JIpp5KbYLzkQuoADaoI5Zx8HwJstcabF7kyfZQ3RxECzVN4Wa6Bo8KgHtMgvaFF+EBXpDNDBPyZhaFNPqwXmK3iE2YhFI67waFP8A+tlNXb1gSiJbPOwnyUNecvu8lZ4gBY8m83I+aL4jY8mlxvpzhlBNbQQaPIR+FfoWZQUSgAUecGKU3UmITHwPGbLo7CJoC8g4yrePLly4oOi8GPXwwNUAK2mg7z0AE0cBLx1L6zYkJE39uddje8qpg2RcxML0Y3m/dVTI9yYOrrBB4oK6suB2NnOXgdr0g/0vSQ/AxWsN636zZK1mAvB4wwJwxfLEFAgNoqVbsfM6x1U85nQrpTmVVm8mWz44xKhD2TmjTY0pxlqE5AwiFWoV+AWGn6Kq8ocV2+2QFFAUCiiG1Mgxd5v/AFOAgeR4eDwMPxM1YKGqxvmq7cSJIq7I/I9+YeMMKcYO+O9QoSgduFhZ4yMZFA1enbV+a+cG4dQBJXJss9/LHI4gBGIDgqodXOn4/DH2PyzsAb0Ug/Rj9MlHWrgX9MwKEvyBB+TAJieAAfsYZHMomE8NdsPysOSbTQU5XXgZuf1/Y5rwFn7ZNraahgiKU+bbf9L5rNdnWzyPb1gqyzFE3pRAHy1gY2eIWAXwKfoaHt0sw76y8H/4QSoM4gea87703DlzzW+dVt48uuE7xrZ0IRtMAA1+vNNUFya7QoiPnngFcqq650JW60b7hDNUpTEYO3g2h+6rX5hBUFiaNFPeJqEIVRCKu+GSd2YefbHlqfqNvWHt70m+xHThe95CyWhKlTyHlxrNC1N8FwFfcNBuZoZaYzOR0EHjpx8Bm6nnVknoU95ruKa+IiI4CteXC5M/1YBjN2tEiI4CtTbkut42qKBrcrjWVs/D+xKPLa9Y3ngH18oXblOsh4LWrw5whsOjibYLsR3iu4FN/wCieYcaunZnrWoNg5t8FkblXTwU3zf6b3/AmH8Pf8KfRbWm01741tec3cgN5NcvrgrAv/onw1/BM7/h7zv/AMSRPIK0GN09EeOaZN5VmwL4W+D6pzmjggZooDxXOTluTQIO/ef5FSb+T4k94HLiRVpounJD1jkeg2gOdR1N63dZ/rHBeNpZdXPQQIs1dwUrHnQ5xouyx6H61mLAmmwPEROOwHSQjgfJQ1wWDk7vGs9ynh4/PWc+86xdxCFOQRiO9OjeGDJTrLE4fWJpyxsQxyAtU+ZiTFmnQn5m141/NVYqhm8h2uDdTh/hJLpCq5DQAzupycZd5ceMQtYQBGKTThibdBwgqs0PPeLhb7bAOCtPI3jBJwKKtBOB75w5MMiY30NOkXfPwF3SIEKl2bE+Rg6UypUaA/fPcp4ePz1nPvKEIeyBL6iHtvJCNfI6bK+f74ipe0vqhF0HpR6w5nIkVLQ5Xbn+doMfQWW9fAy5fjG20NpBnfxcj0m3jah9QP4f+oy6vrP8QwK+wnN7zr+RsyPGdoOmPXHhFFTpxIdZUSj4yQgDGhMB8kxJZjTsRnC+M4LEj6baDR1HPuvlxwp43xkaqugaa3mq52tY73OhDzhacoGr3B5/48fD2vA5cht7fhTOM6wsVCgrdMImqdlqVMSWbeTjW+U1pSM53ZfeDGFuhIKHfnXWVgZlKjlZCvFmus53/IWun7Bp5bxFNUc0Ze0a93DxPZIAEwmgCnnKLuXRHIIheS1pcRkvfbNPbBXtzyxVWVL0wl5LTZgaC+A1Z2cnZ891oEzjDUAROjspqNjnKHrGt0eturEmYcVBvAUB6W499pDdQarQ9b4oJ41w6QHQ0HmL3nX9LABDQtivKiiUb5oB1Syn0SvIYzEJeKkjfAvzewT6CJJJ2rcqHHG73Pxx63tsMSb4zhI4EA7DKGyUu0cLjOXBjd9k5CJxvPDOgRUVoH0D5uFRGjyD964fI7lmjb9DHya6y7oSBXfMmasrvaUcM0aZxTmZAkpvrnW0dXvLIBa4Gox3xes0Nr2NWNNa17xZfjnXVAgpoqzTQd35a7qK3ABWWWFmedmOJ3Ygqb5l7xZ5vnbBOJRV417+DLtROi2HeqPR8xLYSvuRKK8iuhwkWAcHR0oreKj9DJKBLcmYZEYvd8L8szgIaLYgm08ZGA+x5uxOO1eXBiK/PEJQ24m9nM2RcEySAGDWqgu0W8GPRL4Hez1X1Li3PSAip6j6lndMbIZ27NHuZvP0ia6fsGnlvGa1RzR/mRL7uJVdN1sBgOINGv4gXLeaRV4BoF+qjpDwkqASLD/PCMIUiYMF2emjG3FTUoBmPqVImFKu1ngcIqV7yBpV5JSd5c0S2fJ2dronTjpM2h6G8idUk3nHoOabyuAd5b+h7BiqA7ZUItnOGns6a/wsE2hU2QooxBS8N4EqvpzWBgU3hx/Auo0eZTAwUq6w8o4omnDDy6y/nr4XoPi7L1c/RuPDObk4cdeWN98UGpFQaoeWayaCBEgu2pLhP8gIVN5dtSW5v0WaqFASiP1wYrHChuuiiy6sut5ywkAmrW5tiPCLN3+M2Um4LTrt2QZMHmQQERZXQ48BIqRPWJRxEqdu8WYaPMpAYKVdZ4/wqQHb+h5S5rUy7tQ+sfJHZBbgMuXN7o6K24aV56OVCeBeHg1tQPXB4cjYu7tPMgMRJlNos4ud3Ng5yRyblohZQr1y1h9G4JEQdbKdKmtveHRxV1gBBwukMBICI0k/0MSNFIoYBUmxrnGaUTtAJBaaXs848sHwzObACiMTTjKiTgobR+2cUI5Q+Vh24ormmo8T+hbgtlImw+pHh8i4MVbIEGJNEdrpwRShNdFEHhOQzqPHCYttjU45T4WxhvIOrFBEt2+GEYKQMNtUIvHk3VsGBeqjxKvgnKiYiqZiJ0HqqEl3dY4sOJEtiR3HWAFgDBRU9nFuGA9imqLRJd15upHJbBtHSOotinDvWb14E32BgLs0OKDbEW5X7qcnhXEAICA1Z5Z+gcOGc3Jw468sn59QakVBqh5ZTJQIFhHh/kd/yOv7CiZLcDFB8B4e/j97porTrt/AKRnZTw8B8r8v6/8A/g1L9gJ1M/2Ok9mtLLrHnOIwewdx4kT69YE7lUET0ATRsbyM7nSOiEBitBym5VjjwYFGFV0LXfOQyfOZTsAmy83XAIkKmCyIqHb1e8cy5SoNENAZZfOb7M6FSeybp8nGTX9emXrgxycsrvauenMvDYclwnx+x+X4JZEKeaB9ST64SNNliicnCHmOxxU1LDDkOk2q2XXOLuY5H+g9NgeWbxRqLs05H2q8tP0zx/YS7zIxuZvqO8DMfXyINBCV6zl+W1zmQp5d4QF0TV3G1fvkJNTBAjMR8oq+/wCwCmtRfmVQMeiRR6eYj8DTCUfTzAcBVWl+XTTl0fal+bNj0fwMdvhxfHJcA0InnN0aa0uSpKeP5Pr8NkFX6Bvf8ptaDYhVLwBuuSBCODyhE+X8K7QBRnaugwRKRHv4rtAFGdq6DBEpEe/6BrOWZViDnWm6OelwQYpFNEl4eOFOTZkZSinmED0zREeMvxZ5ew2i+PrWuICVE0OynIBa40NKY79jrHXwnNeFwQ9Dv866oy/cB0+rn7wZ1WxNqoENSL26sGvFQV5AB2qgHlxC4qMZdXA38scGAliushG9OL7Gzl6b6WfTG4GYCGhKVOwm/Fx0/AXcQCfNde3WAr0sMh/Tfy5UMF8gJFQSk7736ct9EFEwmV2qL82nStc5GqK6XEKC1qdm8MxtD9DjBsuu9XcxTKfJaKjz1rhljI3lEUxZ9F4H6RWzVW40VgNdb6QdZV5PkCPLHje+fph1BSvE9BEUGUN5Pz3L1pNjLBw+UxocaG2IIiDK8HZ1k0+IMPAIc6Nw3NpsQhohaioNNlwa8FdBQM2nngG8ZcSyTXpbPBTep3i/SGr+AWWWY8MMqFzPIaaOWbML0J40dmuo7mLqIimIAAeLRzNuLZG6qKJ88eGGVC5nkNNHLNmF6E8SOzXUdzBtAimIAAeLRzNuGUcRIiO7gMcutZObRwItpCvY4zaXo4TF9tB41d7wIzipoIUBVpHGbVtplNIHI4JvSxn/AKaMP2JTxrTsV8EwtYZqKiGpeFm054Tv+2UFUHmNNRXgwkd0d5BDpwnIiYkf/uOhIsUAYnxgh+T+mOvhtQP0SUXAaPyTrABFHNxZekChsp0jsWhe8rMh62aOlqHia5wskPFWDqHBs35cpA29OZuBQkD3m5TNqQCTZ2Tcec0WTpddg2Dir0uMTYTwJBmloL8zjBr2Q9DmfKRvMwaU0kRARdcuX5ufZfOIE5+VVgBssRotGW+Bihu0BuuBHycY0lV5AAvSw+Tl2QyAgitxNiafGM6BHrQv3tnI6+dwLh5H6mSYRenNthEAFZn0P9ZuSlQvF+HEOCMw0TTC52r94egGNHYrsavux+nw+IcKK+TZ/vO5Gn1T0mNQw3pSA9qge3KNjQOX+aWEOKK+TZ/vO5Gn1T0mNRVnpSA9qge8lNj0kFPpD5DGDT82oQ+7r64yQZhwfqYF/kYS8OAfTx/YwA2hVff/AKnf/wCot509dxpesGiRCCcPQeLM6v6fxzwSamWEwQbuiPkaOgxRf6CFQYK/S7RcClSFmK00ddTn4E4C4Wq9vvDfzo99ImxPJswUbqLi3PM/VlBhJRq9l37FXHgB0KoekiCPI94Lv5dI4g4eFTyOK0/pnBX/AGG+cp2qlQtACo7Kd83IozGRZVGwukCQ73kzTKKI0iGHzyZH8pQiRzDROtcZWbnCjjUaPFmBxYAgB6wkTTANUtXRxH3mueW7t0Im3q5RoYPr4APpfhUPLYPpR+cL3c/yCwflc8cbdbcuoCV7zVaeIITgMs2NldhaL7t4PlhAzDh0agOV1ijoZ0F2BNeMVSc2jWBADij9c5cD5NCp8sLpwYAEYol+jlgWnxw3aEOGybMKDC1iBKKEhOmQswKoK+0fuZD/AK6CNREo+RPWNnS6GZAQQgk2eciy9fBwBDDxfncBhwYAaAPGQ/y6CNREo+RPWNnS6GZAQQgk2ec2qvXBwBDDxfncD04hwAdEOPGOCpjAg1AkA4xBY1oUBQImid5upE5NCQJ9VyzCwWpWp5Hp6/8AZXFzB7kr4Wt6zq/xd/y5/L38C0fLyHpzSdZz97B/k9fHv/2bYa3EGWdj67tu7/h9cphwbW1h7Ahvf9j6KxYLfYcyyTvIOOnVU0HVDpwp4dICQV2MUJpWO5/YyxLh+UsnFqHT0HgO8c5HcBjxwpLo1gNGTb22kcP0OchoioFRv1Hff9i95CQHtw8ilWgCKwa1ZOMpV3qVgG8AD/4dXv6f+me/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase8AIJF+Uvn1nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPf9j3nv8Ase89/wBj3nv+x7z3/Y957/se89/2Pee/7HvPvD/ee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2PefeH+89/2PefeH+89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se89/2Pee/wCx7z3/AGPee/7HvPf9j3nv+x7z3/Y957/se8CAF/X4lhPRkCpHkjw7QN6zXUgCQTrwDzfWW2dwEbkGtTnvjBYuGfNr1sRq7/RnOjO3DQfmOHnODElSdug9MfLNYXdQLUQq7lMO7mpnZQjxVTJvR9cfEBS702gH92SJ2wgjY8bWrr5ayknsJ4IgJCVrn04IKPKhZB01RZ6OAyhIxLfTxnX8dD8FMOE0joCrnTuh/wBT+znBCuhT+uv1wr+SJqwKLvWKxhwwLxnQuNCDh9lqh+iDgAeocYXWBrFwIBCA2JiKnjJ9vjHzAav+OMX0MRsWki+Eb2Z5P1L0B7VmPGfWxnSironWU4tEWBTTd9QfcxMaiAwzk0u/nngy6wVL0AOSVohpxE4iHNg8i8mAHaT5EyaZWDteDA1WFC7eNDf2xQo7P8fPN0jLmIVCwXaEHLilMnxUC9WX55DgB0Rot2vgyB7dM3wumas8mE0QlwUu6nWCaYhcnj2xAIRaJnYt5uvTfd/8aqygUWtvngzoJHH3WW+8DZpWG9A350B9M8E50tNHg0aPBjCXCHQ8jDjNYykz+gw+QBCCcadYfqYT8gHxQ+vsXMprGkhSArlPf7GsoCNFGwLvuAfQwxe6oO801vesP5FAbRAqBXLeYiT1UlD3lEUGr2Fkr3XgyjDRx0z0iCdSYZhvJsDdGkJ2duAKoVJUS2cmmWJc48poVzT2oTdZMGbBImAoHAKpzzrD2tUu6O5CIiwmSGzVHRATYC6DfGTemq05VOQla7U1MM9BFKQr38gwtqcLQXtyjV+h7wZDgpA915YzgnI96P3DN5GTYq3XcDXrFpExbjyLkbN5usTiK7p39feRwLR5swQwbHA22FlsxDs6myABAgZ5fCt7v95SSSVFT0KjOU9sXlmKuSQJHbfr3liQXTTABxqU7I8mVNAjiMTD1iUDs4vPvWAp9dBUWopYAKdJkzcZ9U2g2jXyezGYAxGZP1W+4Y9LQop3jcvpXz3hPwGvFhvh3Lx88EYAeP8A4+2t60lljza+ZiIgkhFh41+mMCokYnSfbz79ZfxaNLvQsDNKMBaQrv5JdP1wJUnkSgv7/wAJI6K7jrfjEGT54Q0vcaXCEhbinl+TiZatXQB4dNLrf+ckbpKoNrvr5ZH29jQ0jqP8dBuX2XtRCzT28ucZLYa4QFJXZzXlYK3ZtcwRSWsFIHOmazXUdPzp1kviyKY1rmaH5Yr2S9V5RXhzZOTBZkwmLqc0jvpMab0eVQf2PnKx9NqygQUpg6PLEEDHVcBALUFl8Cse3vc3dkxV7PECMQJEEQvLETjpzgoUAFCLQ5c/tcAvhBc0BwRSfpgGBrAkp7mGAAapdFncTDmOsCyga6I59CdO/XOJXEyw6n6GBylljkP9YuMxgPK2c6e8lgUFQC9+sfORQd16XzlaQu0BXB0zyLrhziQxukoKDSCw3V7ACFmwfJ6R9HzMW1g05Y9UH1yh+qBSciZEkpEsLheUu+8TKp3zAX57cDUY54NnDH1yBJ6vGU4umC14acdj5piwWsa2X7md/wDi+VBGPh1vH1IjrI6E2dfpmyzxpX1w8pTTl4i+H6UXUTr0ZoL2NCIl+XfFxrkJiwtVTy/wo+QwT8K5DzOchC64PPYazofoh5l0n75dGZDXSePF9YwUx/jD9bksQUIjUDjzvOv4qE3cRowcm73gZ/dKoBHlOcsm3LKrC4PRm3DOvY66EIVLF11h1uAQWdAUN+Os2MIN3ShZZfWEpYAEr7G0B524aM1QN5doaeuwMQ/6jKxgJSXW9OIVPMBtDoKxp8saYdt2SvC6uuG7ziwVIa8QLOqfPNiIDtxB9DMAVQGnV7E2/prAGLTZCRUf2YgSvta0dDtnXHGTbp+QQG2wj4byaKo9wHL642siA5Kvn5xwcD+kMKGwhtdYY1dC6ajtKyJ8964C+Q+lpXGqlToAonkmt8dYCk4K6Ej+uXy/XH8lp43s5xYhO+Hz9eOg0648QtOv1YJ0RdLpXDyJu5ZlomVzo3yKSeKfcGvzct4luTwDopPX9uUWVVXlRvPw/Pw/Pw/Pw3Pw3Pw3Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw3Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/Pw/AAACHX8nrOv5Xf9YCRS8rNw7ByGt95AChDmGApYY6O95yPNADZSAnka3BgiLDUN3r64FVP7YvEXl01vyORAa32SVugjxGCDHyrql/zlKzfuyOS4p7FuUhR0HNu9rd4vv5E0IFUjfLn6YM96s2RcIE6e94WABXFRppbGncGZd4/KCjplBNJ4Z4ADhGEAlBPIDlTxjJ3YSrV1HEI3Z1vFHLaaLaqbCag8t1CIUoECB5RUy3nj+ssF1iCCcwKDAFONtMm/AE4NIkU0dLcQUazbTedu9k5QhiMNlYO7GA3wwNLgAAJVDXY6RR9OKidKYQdNje/bh/JrzQIZ1mg0aCiGhqU33hy7f1wHAAgGjLNBFFGwO546a4yH2yoQHToCCCGzm6rXSRpXRRVa2PGiSYWSAdGOM1fC+cfm2YeRHSc9NLzjlsegrK+osIriiYSjOgAoLxon/wAL6zr+TP6rt0K4A9uX5SmwIJsJzecPq1Ep/IRSxOIPG/Od7xS+Ua1xjY5gJsUZAp8zH+PigHXxi39p9c+sLT9N8fv/AIzmSnVOA3959P5Cfxita+f/AMwetogNnrn/AFnLQgQIczf+T+kx880fyHrGjZS3EJuXkuKOe1oD1sPf8iOcdQ1B2nnCxRCJJBIt/THwNlCbdMOJ/n+QZdKXw9OfOQ9Hn5+vzhlyS+Xt/kcgnBetmuH64sW42U/44vvOJmYvfOrD6f8Azn//2gAMAwEAAgADAAAAEAAhjAAAAAAAAAAAAAAAAAAAAAAAAABChAggjgAGkEabZ6/z1uSAAAAAAAAAAAAAAFVHwB78sMjgM6ICbjrb126AAAAAAAAAAAAAAAIZLkpMAoofgHRS6CptU0gAAAAAAAAAAAAAAAAAAAAwwwwwwAANwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOJgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD39AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGu5APZ/uIJbP1JctyP6wAAAAAAAAAAAAAAAAAO8UWyf5pZy2EqPuIi3oqwAAAAAAAAAAAAAAAALDu6L3+Tk9EPyAa7AAAAAAAAAAAAAAAAAAAAAMiqgt7hMe/Vszw+4AAAAAAAAAAAAAAAAAAAAAI/JrcWfA6KAAAAAJiAJKWDNfSQiBhHBDDAAAANgZoH0sUi1yZKhzFrFgkjxDqVvwNosbOXMgAAACgNQJgAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAQ6cc+wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACwKgB6AAAAAAAAABAACACAAAAAAAAAAAAAAAAAdjaX1a8kuqMg2VFIAsjYgiJQAAAAAAAAAAAAOzgAkT9Pd7wETiNzS3jYheUXAAAAAAAAAAAAAIVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC30gAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANG9HgO19wAAI5wGA7AaNMVAa8WcVwAAAAAAAAJYYQAK14CAAIYxrfERzQqTs7O8RdgAAAAAAAADDABBHDBDDDDDDDDDDDDDDDDDDDDDDDDDDDAAKCCToiiNnQAAAAAAAAAAAAAAAAAAAAAAAAAAAEwz+EwY0wgAAAAAAAAAAAAAAAAAAAAAAAAAAAAyAAAAKQAAHwwAABjQAAAAAAAAAAAAAAAAAAAASQAAM0gAAMoQAAM4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP/xAArEQACAQIDBwQDAQEAAAAAAAABEQAhMUFRoWFxgZGxwfAQINHhQFDxMGD/2gAIAQMBAT8QgeWVANs2jLI/SAAsbtbKxLxSnnvgNDBmQGLkH0iYLArb9bIDg2agEaHdCAA7tWukUZlq78qZmGj2EOQk9I2Vj9Pp8Q4wIHQXJ8YP9xWXEtfv2lTeikqZnFlebKxhvfQ7wFGPKPXCEDOX38HlPPT12mO0EG2wGgdyVFI+OFytztEwRnCWXu6LpTdAops0CGmMybhyt1hBLxpyNORhLIZvVg94WOP0ukAVtmgA6D8AJDUmPh50gRJttjle8iOOcYEHKEk8YRDziepPOIT1cyEFIKsz1C1gzcPRiwRm1d+ld5cC/KfEAC9JH4wkSJ7gRtWbf0g8hhVZCMZJhkjAAf0iQgCKkhEhCjEGoBpnH/VTRvC+7C2Mb9GEV5VqAe8d95HIkdoSwCLVexL75TAlVHxxK7O9ueEQEuFqVGyAM1x84XgEgBjEptittlJBZLErRxQh3ifN5xhdxVvRfcDARjGnihQPjwz3R/hKTK1OgHURKLA8hLHnxKQcz23zz26QYNu8NRxAA4D4GAWSMtDfUVhmj+Brp9QBgsA8EdQ1tvCBjA9iO8SGwJ0S2G3bKHGF9tlXn1lauZ6n5gglQXw+HCZQa8lQRxcr7a0roou+HUXCIILMkjaWvkLf2MSqfUmh8rKURZQGVQFFIy/CIRZVqAe8GJyJHIkdoqpVvRfMAbSi8NCjKwsPlTF5cPznGBiMVQ6XmRy8W+koDmV503ypLG22A7b+vxKrQBt+MTJhTogsWB1Eo8uMo2716doDyR30jsRitxtfJdhnAsuyxq0FPqNHmXoPQSsWZ0dhgsjsHfQOYNecGt2gGwzuBsYYAhgXzC/sMO48T6JAUcjQSx0mRmONFph/IDM5/A/GIArGAEDmGN1pQHtWj7RAHw7wVM1IjzhHRwHdX5ViKnusCcqx+mJGX0e8EkAMZuCAFnJ8GupgFjaQOJgAhiEGf8h94QAc3peMmIZfR7+hoWSnnP0AZUZ7UyrV4dRbgIGXIxRQrDsMYKydDLeakdCDqlCC2+Vmg7oahSleR6HuoAUbO5v6Rw+9Xyx42+IAh8W/lzPgjpmit9VqoHuJ33F9qayzqIQG+PewuK7SgGLUOpcYoGN3kbDBHjjlK6rILkLZF49o++I5Icw2+cpe+COdZPAtHBBJLgAORbVmY7w9DKa6f3znMRVkeCNasW3odS5crMXsW9WavvKm9TRLvxccT2hc7NZZnBC5ioyCy3ZwVhCGoU1HdCKwqve1+FvmMQBt8CfBe0kH6bPQ0K/QVtfENQvM5eZJwYKMwAp0TQbKPwQKioLnUi+wJc8YBIkmUuDP6C3pY/Q2raD9AQQhegRtboW+cFhFlTey95AXhgBZznenUXgtLTIBcq69BGgZuonzfDJWDdHTGPPib4N1hgDkE7ClbZWMA887Q1utRPgwcuA+UgOFgE1XM4b4VvEAthfRQ2ZEjQnnT6gozAj2QovEi3AHvpHBru9F8w1hBN6NuMLBN3eitzhrCCb0bcYBABchN4AIW+2N4FE2EIb6Pq+EopxjqHf/AGqvLLosW3cInFEblDAoWECEEYSggjCUFiIJXtwgBDUsxbyAAAGETL3635xEGTWx/wAHKEUOXf8AlpcDlEbhqBygqS8QAMqw1bxvp8D/AHtNbAAlmFUiNC8PB+jPC1BFQuCYdiMDcGsYITKSjWOMZV/4iwTx5z0hAoQEesaWeymw8ewjywonHZL8W2UJyMwqfsA2CVEsYIi9AeMGIkRiP8nFEDG93vILwSQvEBljSqp21wwBSpydM0qvHdetsvWpULGqNLZLHN2lYVRpuwQyvlW7gSg2L0LGeCxNLweuNaYlvSECCzfZeHzB7o9JRvR0ggFdzwlBtCiYscpUzehy1KVU1KbaP1+LZGFQGjgJgQC/ooDXlPmMJ/NSJFiB6Cegn5xie0ZMYn7EoxofsALwAfxRAmAQPYxnLD7LrLD2En7r/8QAKxEAAgECBAUEAwEBAQAAAAAAAREAITFBUWGRcYGhwfAQILHRQFDx4TBg/9oACAECAQE/EIF1kV+fzC9FFFF63fVHEyvFPNZRTATse/YwGjxK+fqABmIelfm9Ta8KEA3r0JB6jiYzIPhH8zhRvkgFvXISg1HUAHzARfp/vcDCGJUdQbrlj+ASgEAKht23rKCvQEBhmCrEPB9OVkaeH4RcBAGfncQJ5nXvB6VpWJdVqo6uLRx4RjTszAA0VPNSyFx53gBAAHlQfmoyNpaJvXqSSNzGRXj9t19QuBgZ3vuICQEYdgjZCAKMP97tsI+51J+RJH4DApZuIt22rvKi4iyZxaXgAO8IEEGx/kzPL/ZcNQj419DaEkmfWyf2caWh7d6py3+JTGyqcKDAC19fRRaCgheV+4TPF/v3CGjD+K0aN6De0FxJSYHX9JpGIcJM66nA8cP0lEaxb7TL23aplKHH+j1tBOxI7QAF0B3AgYYuYA1YcIQcFARj2mDXtfbHKCg7EHoH/ZRJOT6gd+MIJJwvGrpBdpLmWBgnQAAdyq8qwCCrd88oMr28rlGY0UdQYUJBwvKkBW3PKAyhw55cfw1F9biSHyIa3PqAXn+xyTBaWyy00oYI1X7eOBADnfMr8iCkDmW6XwaRghP9RTrnlWNiCOaHYp6NQhgZHwB7QhnYgfLvjywlonG2lC6dYNa06AfUZehTuPdRDhgLkXUwwZnbSlQs3jbu14EEXvAAI0AJyADckPnpUZImRwsLjPtLqoMnOhL3HWkNhz/CIjzgnYkdoKzv8B7wF1URA3D7Qgi8oMQVYQwAx+n1ouIjoNftebSgowkApWLAA7+N5QCaYDv4eEJqeF9IShXNefUNCjCQofxkBcXyTtCQX4jgoERvWGzjBVGgTrfveAjNm3KvEwUANg+IvbNlbmXG1QPQuBNX3hmMAB0rHyaxANIuZfqIwAHBwqOxCGwh6H3DRA5fMCogAjYA2L/kdNbiCK5sIGIzPiR+d4HIEaC7qL4OkNBkO586YfjASHh4PnlAVdCjxvACShk+oHeFQZPqu8oI1lh0gCocO8FYG6vbeijuvuCJDOnm3qwDN52gwJYQmM/zwQwhmua7IQAJyD5QoKNIxVoPortCCMoutoEV5yvoQgMwccKBxB9rqj0PH4N3rGBBdH+oadRl48oNVj2IajQCeoIfJyodl4PMp1SFAcR52jBIfAh9OkqCTc3mr+gMIuQcUtou0y2EL6bC3eUrsw+DD6OXDM5WNtGngeRjkWCngz5PvnCNwHsfg+UYcUdBZxY5VWcpTdl7m+YoQ2rNmCOfYpI5UcFgPLx+cYAA2pvfWnGAMqMtwnRIVFYvmYGtvgQIi7g08DrzhIc7hNRX4CgUUhEJsq8M3RdFE4GuFUW+duSUQQwJPucwFKxLE3ldQ5l58cowywubPYHEmMdGWyx6utIDBxmPutG/bUQ8x9lQ2eb/AKCkB/f9lix4woSOc3NjhArxHVqKxqs+sAgFYviqA21JLvlRQQICDa2+v0BP0xXL0F6X/QgCTapYvf5FuQpEDN6DwQI4AknwQiE7CHD6F+glMxL5Gg2S4wmOcjoHM/RgMwnyq4RWGELmQ3VuUEQBgGoYJxxpQ9IARZvtjKgOPQPtXFQ0AY/aiPElugOwdYIXxi+BHJFxAGAHcrbxwAAkjFu6AgFEJ6NydKcYyncOoJlWuA3A/kCIFWXV0OtJVrgNwP5AXOwLcCwXwoYQK4CzwRI6gDnBEJxDzRdv+2TCzVVQb35xjwW0jUegVRON/mFiBxjAQcZYGMCjHeEgkEtGywQiSK5lHT6bYQgkHVPVeGCpmL83qcZYRnGolhGcvFo4k4mnKAgJYW6juf8Ato5AGL1O/wBfpBgUcHDUuac1o4wEXf8AiAFBgo88y6w5qAL9QsN/Z4ZfHzA7gFxg8ODQcgTyGhZ8/S4BEAkzphIMqfIPRQWnoMYoFNBCvxH/ACU2CeVlwBC1bMFIlPCtFBikMVUbxFARXJthYfFL5+oINTJSld2dlDGggUJpC7NnvilKtnVaosti8sV7KrVoVMCNTXWXEElhPL00ARRp+64TOeEUpRIwe6A1uF9/MOOOPSgyClPXqQYhw/X4NYgOExfz+QGDKOvGX3AS0UwH5X6iAPzWjTLCZZjR40aE/mpD2kLRB9jkIwvYCWhIfijJGA+xCe0Vt9rD91//xAAsEAACAgECBQIGAwEBAAAAAAAAAREhMUFREGFxgaEgkUBQscHR8DBg4fFw/9oACAEBAAE/EODJrikWalaWyWn+oXZf0adlU7FAShcA0BjaE34ZskUBS1myOCH/APmGxElo1J9xMARAJiDCCpqFw1+YuA1RJj+MGvXn169evXrxuAOVtzgGWR5SfFKB7MZdkOxn4TATgABgBhAiJXozaECAqQ5JcN4YDGkIMw9cGB9xpRlcJkxkphCUNj5g2iAmRqyxtXpGaJWTT6GIbjhhpJFcajGSmDiUND9Z/IAmR2LGqTJiEkPvsQyhDAmphyQobGqzD3IhrCBEPLfyBjVN1yWWEqGaf8iAt2aT6IjoJqklnc0SnTqzImyMur1ID4E2ib4haCT48igALiQ5RBAAAQZf4Ohf4AyAb/8AIRwxHwAApI2DAAAEqFMAr7EjQQzRohcrADACAt1+rBARcUtLgBS7EiIVAQADkhjt9goCYAMG1hH4BDMAAT6KakAAAPrSoObogQACazeAAAYCeY7sEhFhS0ALPLdBZeygAAcDJ5TKBwAA3HhJlQCAIRZ/UFQYAAApVOBxAgrADIWq6kKAAKE5R8vj3QJPJhmkk1BSMKAAAW3L6YACIBKU/TtpqlLhUnn5HTPnh3fmEgAOcw7DxCQDIiijiCAMUTtox6NACfZpAAEAALxIZgIA2UlmCEQAK8BW1uUnILB2ERAAdXoSdTmSDBBFn60/wYBDjABBIcGAEPEC5lyKABNODwJOBAfevWEEIAJJK4KMx1IObQYqwTWwEQQFJVzNCBIBML9BzXMalm6RsDI7j3YRSsMggDKY4pZ69gIEuPF/AlxAPeIznU0Gj2CQOyCIEAxsjY0RzmV1AHdq1gHAWA4xJyHAHhhhwNXSgRyAAFbYLgLQDVG2axOB2AgI0+OjEaesvcqEZwZqSRkAALqdlgeiAAtgLct8lAG8a5cNMscI34QAIyVpsZgDdB18ReCAAiBELalsBD+IAV36gCEAAyEEJAR+cR5BCIABMdhIQFEaqmICCAJOOqARwJsD8eBSEAIADx5ggBAAANDECIHgAAnhjx2AYBnGk13oCIFkCyDwEbiwBU5KsWADsOYW3ViUgI0jWDICmlc/01TGmTI4YyunCo3sEag5RNnRKF5NxoTWjTT7gHIKjpuK0hIQG3kRdmb8BCbyQCKRi/BHNtnVIjVoa0QvY6dipa/IWupEKU0LBk2NDr8nX5OvyT3nudXk6/J1+Tr8nX5OvydfkhIqXpxSwQZqLHkoHCsDqRhSwkkKNjqK0AEwEEcuACAjLHjdwgAAGfiLyg/gBQZHIXISRxCAWHX02AAAoZobSQcAAAnHCwEYAAI3VPdMkQC81bsX4CgTwBiGiOAAHsoU5JiCIDkiaApACQPvOIFVHMPHoj5JGCA21OWvQHKE5QnKE5QnKE5QnKE5QnKE5Qmo5RXs/Qy0MrxEysicAJVBphlTmUZaDy2O2NQX+DLWB17gAkUUOCADVD2QMtjzZNPZg4O3RpC4VHGfnblUiN2S4fj+j6E4puHAJKu2hASaqUaf0WIIGVGIWdBEpNxIKhAAPcjEhqn9FAmg3MFwZSCkH4AF8AQBAACGF45MTGOYf0UaDzKJ2FdA78TEAACEAQARurCQADQqbTcw5/0PQjIaSLDAWy36MHeyZMgAAESBRxe8An8BrqAGnuBpXxE/LGthUW884DcO3gbvuXFZgLQQeRlk5oswWDKClQLFJOoCUccmRaH2L+opgTvREbI58RgDbnZtUzUqBrwAAsBAqUVdjyuGvDQjVoQk2obfuHFRgIhobUjpER0xkxoAoui0H8eIkVIeWxchliEjUBKGVUlSIGpl8i1roQ/kj+4J/eH6+oCCEMDgp2SJlgHgHQ4bDyBfRACljDPWVCGkIkgfVe9VwMAADB71cpjQ8wAA3GGSYAxhAAB8zLIwlCDSADSBmfQThggEOoCGIF8dhECFgEiIFFoABn1TnBFCAAnfjqxQAIJUUpvyOdYAGxXjYLZdlZG1FdiC+QaByGwBgvHLj7CgB2njxE1ZCNREtGNqdQgAJHgIogrbqhh0dPwSJIPbA/aSRVh3YGgAPf8A72lBmoELh5xFwYgF3RXbwCASABmPLmkwBBAefGxXCHYvmFNubQpjkaAM0zxulqYykAAOatK0frQGQHfcXGBqrAGGSAUpdEYASSV8DvCgJiABCMqsUIAA6L9TT3qAFoDNcCb/ACLQnt7LCONhJ+BMeorb6TO49QypApFpMBs1AQIYs7ymwRqDcCHdKkaQBAJIP4JyggCAGTU7jVac5kAl0061dg7WE9mMejJjYjKgBOwAAxcl0KAAAAy+CIDQIJckmm0QuAIdGEqxkG/yNqhQ4JAevDxNAI3QdQ5OlQ+3QPGwtIYdRNgGNuV4SpoaADQZfWD6bItQoADtswQrNWwSG20H2176hKABjXKmGR1RLgAQGWTW4kloNvAdJwNSJZEgkhO6IsEsJj/DMVpugUhkaAIPhI9RABpkIEUXwIAHXBAx7MFgABLguS4QIAgFliVmgIAdsw/NgqIwEvoDUUQggo3MkOX8i/M4cOELApadTQqFMOmNqqdwZrnqnBEtnQoOQqAIUZF0EUCuLO/Sc8xQyJEISdC1atgI2DyAJJLAjBsI4KxLLgClgcQAREz9zCxB3gjuFhY0VVIpaSSNEZ0Jpj7VMCph1AJD5B4IPqIBO10aQCxfUCBend0YqAG4pDUaOtoEAKGNbAnpnaABPjFwMD9hgAgezgg5AADKzqordZENhIJq0Ku+dsgJXoJAMdLX0KiKoAjmipSwA0OIeZIoKHvcgiGG+eFAM4pBBQLSPHkJD/AUABbuuithkDKCQr7fJAI/IICJIO0EAaa3AArfrWe4Pkag2JiskQIM1KBAvWfER4JQMksrMkkCVnQPB0Bdi1NuliCBMD/ihxEkgFJM0qEPArUoMYVWN+fgZEtABIn0ORshuvkUqk3Jo/Am02ROQ8RMIT2tCjPCFxqBtbCdUJDTmtuDSeR4dWqLkEQkitO5Sb8jcldUG2Gb/kk9dBJv2ERZ59ELbjr8kfowlXQQIGqaYXt0ZwEAMSGQuk/xQQafik7qRQIA/tUAwjcRqAKDt8TUBBgPcTRKKtJL2PTQHVeYrxRjJKCDjAmm/OMBG0QTGT7xnjkQBjPsZQm6gFgjVwbCAZgABLBU/VCjCH1x6CeD0iSdJVVZMQNjEQUFWSSjCS1QNgHOt+pgDkCgwDhIemcZgIeDpAgcV7/MWYCSZ0CMMjGaRhPVl0fmCjC/hgwAoyre7YRICalPSNIbJPsLCSu5/atWb0IYDyzosyhoAEyRNkJm40pl4FwizWluRAbBLm76ZkiIb7/KdCaMLL+OpyAlMJKjV3OAFyGKw9X5O5EBjYEQog1cBgwABWCpYH0ARAQBZaDhh3HAEAD2gRktMGwTsphsU5Q4ABBABKx/SEwugIoBjiAkUAQIQY4WsKQmdSIYAQggmE1KWC2+EWfK3CZBSD7EeQUyABcsCTldfQA2EJph76QCA+BRVPeFVexgj9mIwGWBaj9pkoBI2Jn6IDVwCu4wERosCBAi6svXsjL9AWSDDkfQs78ASIpg8LBcd0ACvDj5ToIF+iBRBgSstVA1RHVrwAE7iRoxPYj7JIGwDZZc2kgaTmOqB3vTEOG0CwgBpc2qGNg3NIA2VEUZv1xOgqRlxhQWp68TD8HIAHANDNwAAACYkzAUCH5AAj1ZomxuoAHIooRkqBAEloogBAIh6rA6qmhIhP1LtMH0EwLM7bJMmAIGg8RA5EEAdtoizNXq0kZIAibLNWtURQAC62niNkg9lL3yMIgAA/8ABsyRgGQClI8wCdADg/gUN4dIJHcRm3dQCACP0GVpwKQGBZ7oRMOIAFsLQlp/pIAQs3lafKq6E9xUnMjUdUJZi2TPsxIbfI0yTrwjJAi+MVZIbxicDtrbaISgSkaoquNjrxSMgHrhbelpy+hEz0NPRFyZYsELRnM2HJiMGyUkSpwMTJzoPVtZTRz4xkO5GnBq1yQ8rOP5kvIIu3EB7AAfUr8QsBR9gAzsAIkBCheqGAQhdwdfOAmx0/4BcwSLxBIA8JsmTYSkiyQwB+oVAvFBJHeH/ZCUsCkihry1sQDJNh+IKAGQAQlgt5pAbgtQGk+JBP8AEAkP1UpxEgSClBiX1nImxEtGCHIDAyAW2q8eMp38AcJJ4zwlbk8yb4Nw2J/G/NaHmBABNNK1wlbk2LuZwCQ5gESAKD9BSSY8pYawJGAGRfVhjuKiFhhKrAGGGmX1AJ8qJdgI26YCbfKbwLNGsAG0pEjd2xsvuSoAMI8aQIAH4A0T/EAbVU4SUAO4ZsQToQCwCORUoKQoBkUFlxEIi2AP+mAY+SA8Ctw5pHaIPSCLE3wN07L0etwHACbJW5K3RMoaXWeU8Ohds6jfrieOABvLhPP1ZDu50x+ACARBvBjsGYGi24CAIoVQowU5wLO5ygsawrRSQh1Ang5s/TgVqToxALiUZSUrLgGlI8lKAE4rKOWapMZHY8nCti6D5uHDohOIgABi1CKF1HFDA7HT1Q30UQh3pJ5KJQE8rcQIX+AMAa9nspwAAY1t3o8ggjhP3q7snUBDeV3AsNAQxoBty3ZCzVHMAyptvSG6KV0ACqVFMuk3hqORwKA78oIJoDD+3VJ0wBAA0f8AUCUYQA5WzxyYAECA+E0Ly+5IAI2ykPFwRBuPAADTgIgJAB0z+u2ACCAvacZYoYcjWBQYJz0WuFECMAiMsfeIEAABfYVb4xmGaGSMg1LMqjA9VMiCwM16BQS2CE+TncjAIXU8jBSADkKSo1Opc70iDAIKu8lLVQsAv4sRGQmrIEHBWWX96cgWm9JxNyYIACAQceUYAnQABtIAYgAD0H5gv+RwBRpMWWl6mwFAAhXk7JUjzgABAD4SzhB4gbgCQXE6ZJAADN9IBIigAAGS7lSCt1E1CQz9ok1Y+wnCAEjQhAgAADyS81Uq6BArbL2KgCq67BT52ABznOBAAB4W3Kn6go3rCCM4gRG69OgopDgIEEANq1iBoYAEgpPgWypEx9gTbeschAlDl9n8OYYAL60EoAAOgFlgiCAD7sTe1zEABBGZREHcAIut1fYCgACIllIYw8wLNcENA2AAWLHVe1QSA1sJaMXQ05GIHGpIxXUXEOYAH2DltVABoDXrTwoNCd0aECpNkMIAELoySIETzAFa4AggkC18VYCAgEYEtyJCACC2eUesgBOQAcv3IByWW4AA2T9GCCgTAAkPcnBEYvYAAOZQV1IZgKKEmtVRcw5gAdmUopTdyDkAUq4JAGQggAGjr6D9iBABNCT29BUDUIDeWnQ8zgAAHUTLPGugAAnV7DGTjFkBkA6K8KSSQAEHVXQrigGXJgqgBkSX8xHcXtwyuq8QgEFxA5DIJkBZCy0MO2bWTAABTEe5iJAOdIdVHZwBtfqF5BAbC5KUFDXIPgCyUhAfkAgGpA04aGQQlQaVAIAOZnQn4a5hhgDAAhGGquWG4AAh0z1pIDAgBoYDyBGLOwIQLo8l4w5I1A6L96NCiXARgHyAUQAFxOBGPLCYmIgu3XygAGVgAvbiWdoJan7i/MmVYlCAChq4EEABiY4aRAieYA3fVJGrAhpqGaemOEUIsjg1VCULi1Shq/54WxHrj1R6YWy9MfEt2LL2HEAYCeDli/8ArIiUBvYnmaDEZg87BV9wTK3+fZQ/XDoL+QDQvpivyAESJ7BAZsOl7HvASvsBQHkACQ6OGVqIBAAUn+jA6FAEB188swMg9IAAVR1JAJ/0ALgXN88xDAB0fwiziUH7wyFFL0NPnr4tf/fgYABZXtTFbCG4RNk0+h3HEK4c00AiqOXVf2CQAgvDejDAgBDoFIYyicc4H7AAeH9JpfQCCp0OAlVvn7TmuObXALAIk5gRpZFIkOEQKICO/g8wCABUlCGeSZNBuqoiD99aJAwMhbfP4X5BxuMO2e+oDBK3F3MjEMTK4IM3bYOtQjidDOoDTg2ktBVf9cEvYQIrhUpHUelgUVGAn04YKi4rTsWT1E+jDFF0GGQNqEabYRMekAM3d7iXcgxLeglJrxzd3uJdyDEt6CUmvyBRu8q4AOlzAAO3LOqIE6MsAAuF7bBP3AABKI+HIAyEKHNIJeMgXkAQACa6OPoaw6cO1KZBAB7hAQ1nGQb/AHIHvpcsRAQAATh5JvkIAEHIGgLhDsXzEeXYAqOjIFJFZHBZIHh+9Z/4Al7xRAilFeoA39zeaAUWRgCN64gE7A57iWBUnEPAWEBDAEsK001oJJMm5BxgAFVLIWGrAfgQWs+oAIaVjFWQNFeTZ2qK3CAgAMJ4REFFQcgYkCet/IMFgADMtipcdoOQQbDRevjCgADoQlaHACWLmgg0N6pzEB7go5MrC8cIcLAppiGpZpBkQARwEihZuSYhIZCdGcL9KG+xjsERcU/BSAHQn4bVY5gvcgKVDxAUOgQWGljWq19UchgGwJ2aKCHFsjkn4bVY5gvcgKVDxAUOkQWGljWQSnUchgGwTFzAANjYgAI9ufCLgANPQ9IIMpm04G9UsvfEAAV5c0CR8VQQAL6IbdDUlZyQBn7ccSUAxwCB4gqACAEJUn95CUncAIZPrwDAAwELh6SBLHoaCuFDesGAICVoxLJNbntzgzooCGwDNohQYBOhATIDwDQBoJITuiAVKaD0DVp7IB7q6mxXdzCAAALIs1HjgQAByY/jgiQB8y7xIOzgBR1tsKgwJJGQH1SmCiBoAAAgIpp0gM0IpBA5Ylp7vkEAh7sAvoRDQEc3YROYj7hzYlNgxiw1AChVTayyDKYJxm5g42yMMM38cPSQahgWi5QhjSOoAFtptY8iPqAeRlz9IRL6yEOJRRqX2BO5K/mbKX7ICAQCY7T9MFDn6AiX1kIcSijUvsCdyX5vWUv3QECIZjnCGf7Mk8eKEhjAaUmDKkCDblWhtMgQhSOXxQSWQeK7gwMixqoiKZEf8AoIh0F2l0REDJbsxQ8KADWGtHIEBSAPyVAs40xhBopMm8aiok1EUAznsBozEH/KFQAjBYJRBnSzGtARV/JvWKAQUCQwVS0KpUACEqQrMDdEVAFVFtgeAAB0AnJ2Z4ROUmwSAhNOggDBW1YKUEAv7gBAg2D9HIkFMw7nOESSYVFwHB2gZuAEjfg9ZOwATTNOAoT7AGnUNORVOCJKSWxMGagbsayM5t5WwXUBiGDiBsSYAoBDHMEqQEn9AJBA+fqdEDBBu3M4eQEEczwQwwA5CHg1ZKktQRsJDSA5goIbgqX9zTKMRAcACwTgqAAAACA07UwqZ6gzkOmNgDyAYpqwUhPYCFAR/LAoqEABGDjyUAwi0HYl0xsAeQDFNWCkJ7AQoCKmH81UIACyGZxjBiw0BMsZmBOIAo2RDywA2CVjfCNLQ44DuwLl9SjiklkDT4lOR4EWaqlMZND1AnBncHnA9a9DfBV8FEwJalS+EqTKMGWdCL7GkG5SRTQ8C1Q4IrcuP8MoocK1SHM8qGaIeUOU3KfZL6wFamTYpLjqUkNSRgOL2SJ5aFQ5FS+fxOgsC7mU9APC/NDvtM9AQbuMHAjUHkH9GewkwU8rZCDcJy4jCIEAHC4gRAAAGn9FW5QkpdESSuHFuABhICAABGUMIAFROJgQJ0GijWTK/ojbhLcdBN2Pgpsejgf8CAAE7oZZmOahbSURhf8Ahzcbczc0vhHr06dOnXr169evXr169evXprCGwfHw9evXr169enXr069enSp069evXr06devTr06t+DU6VOnXr169evXr169evXr1igSi5cVaFWSBQgEKnt3qFEBoGjvSw8AMBDZHdMYAIJdWLPciQBAz8F9A0A5gEKEWkhSXIAJPFT/89QIBgWFwYQhna1DOPcKZRGwwJAGJqmYYZgAAJv0rEwCDYiDFdlaSZydAvS87wmgaRliDD0OwQLtVzEPfc6BMQPybyAhZJoJQRtkJDerphZzAdyNP69BGAsJQGaDZ0RiEVbpACty9AmHypzqAC3CIjlBLsaA6xHcKEBpSzAbB3CPP1PxoshqJGgCSHQc3AGn+aEIfPIGPi0KJ/u3JAQB+kDYEBYCTM01ZBMWrYJOwNWIRUjaI1AYq4s11Zach9W3K2CgAAaDOOIH6EA2cEtSgAgNRX/3TBCuYQdtPAEuecN21ER5MIUDx0NEswJsRrsPPwW1q2AQi30EM4ogsIiM8ptj3A0QXpImsmRoAFDDsZRqBqhQBYdmzaJQk/ooDQVgQpQCDaghYXBqvlcuuhaBMgT44gor/AIAdXqOMdijcn0CEXJBEgBqE6jX0+d4TWr6CEGVr2XdxgDVQQSFQABORAdgg2Wch6JAABAABoAwgAAAdh4SgCA/1wmyUDNAOOCggKEAAGwDAIAAAfCO0q6qZWwWnoARDAAJFCAAAAqoxCgCRTLqMHECAhgAAAIAgIAE+BADAmNOk2DjkpQQAAQEAAABMo6BHogxVAADAMgIAAOAB3ZAAQAB9yFuIkJIJAAECAgAFAABAuOuoikowR6EnOgggAWAoEgASKWAIAxliMIsNV8C8MQ0jZTgTlRnkDVF1AyNOxew38eyQoAIrOEHH9hIE6x8eUF0PABrLW6DHypj0mqxYaPUKyGRzOleT6iCCDdK7I0x8/UcmKj3/AOZ7gOxuLokV/wCRsbupdXXX6IoK1Po87wlC5MrAIAM0sQAC0AAQMwoADyQEHkNBOb44pIgwAgAWAkAEAIBwQwgAAdiXvgjeH0HLirVEgEBAACUAIAAAAAgTjaXM4S7RAY4HxggAANhBAAgAEJQAIQMHLRBiHoCgAgQAAIEDAAAbSOBArs5DuTg2QMAAkAEgAEio4kP9gDgcDoDAADxKgQIgABqh5wMVk35ABqabFycASxVAAHAwoAEACIwCSKBAQzzgALAUBCBAAAQBTE9IZwDuIhyv4F4Ll7u9xNeGfGpVawZmoDsA4xcl6Kj7RAcEoiYw0IJ3AvDW2wnYIgFUQPjwC5Ho0G9GLuMvQdRG4kKxICcIH3E8LuGc5CEYDECT0HvNohIOAiHFkG2JVp3Ni/NM6S0zL/SBYejzvCS4MkMMLVsjpCj2cxYAYdHmCfCiEADABJpBDwJQAA+RA5gQA0QCCypk2gODhYkh4AdKSiCEQAFpWMJh9AEGXJEl/wBxgEABKkSDQBbSAcuGAJGANZLJCfMu8Cueip+hCgdDtgAAmMKW3CrJobAZBr2tkFzAFfAlJkB6EoqNEJCkoMONAAAPiQm5gIIEOoQKAQzJbKtJLUfcGP18hIAF4vdXEwIAirJpsOQpacWkwNOKCh7BngQgweBAEviUAggUvU+RPECBOfjYWyIUzF/wed+azfkEsX9GaaaYYYaYaaaaaYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaKYFQkWOPc14zmpHnmZRkes8G8ZHhGr5xgJ2qOHNYIPE5gIASgEAEpIzGQoAAMFkfTkZlkteQh0+BHvMFFkIGTxAwASUw+8GQ3kLTAClzH2Af6Vw8DDAAAaUpr6lQDAWOAO4ZBAAAQdgEngQIAJC0NVyYGEJ3AB608OCHUgEAaCR5D2NECATX2lAAiDgAETOWQhugNZ3+cMqq1GZsAkAGkws7QCAkWBdugtSErOoAFSz5JHQROwARHBUYByADIQQdqMAWAcad4fUdhNSkai6MCjIKQ7ByMpanYuOO0BgDECQNuEpTEF0H4AR0WjiYKrBEMgQWiV5zeQQAC0RAoo/QgCw3G10BiszcwAqLtQTAUewaGXrJIMTjAAHz0ISFBKmP6JDj+GUxQlT/BkYW/zR52TE6N+30FoQlCEFpTiRbp+vQiTto0KYSxKLyZIjyMJwI3iAaKADQaerNDhAAPwsAApyrl5FQAn102uGGGX3GlODXFK0i4yuUdZb2DN/KME1Cgn+0vjGkaBD0wK63dQ8hWvWvozr2QtAJ3nRdjBA7dsTawAivXY0Vq0Do+GIIQF0Rs6h1Y05letsblCMrEYHNK0I97ewknLDIOh8pA6hDuUR/BD0If8AGm5h/wAGX/cv/9k=')
#"""
# Hint: content_type is a MIME type and it should be "image/jpeg"
# The content is a base64 encoded image, one way of storing images in a database
#def image_file(request):
# return HttpResponse("Hello World", content_type="text/plain")
| 5,244.130435 | 60,032 | 0.970435 |
c9c53678ef7e8d410bb7a429f092f1e537a6db9e | 10,853 | py | Python | flagsmith/flagsmith.py | SolidStateGroup/bullet-train-python-client | 31a27d0887e1cde8182bd7d0963ee74ccc104b98 | [
"BSD-3-Clause"
] | 2 | 2018-07-10T00:19:14.000Z | 2018-12-18T16:59:50.000Z | flagsmith/flagsmith.py | SolidStateGroup/bullet-train-python-client | 31a27d0887e1cde8182bd7d0963ee74ccc104b98 | [
"BSD-3-Clause"
] | 2 | 2020-04-21T11:04:45.000Z | 2020-05-15T08:39:37.000Z | flagsmith/flagsmith.py | BulletTrainHQ/bullet-train-python-client | ae5c50266d25925aa9551901480623d57a8bfbf9 | [
"BSD-3-Clause"
] | 3 | 2020-05-15T07:14:23.000Z | 2020-09-18T17:07:21.000Z | import logging
import typing
from json import JSONDecodeError
import requests
from flag_engine import engine
from flag_engine.environments.builders import build_environment_model
from flag_engine.environments.models import EnvironmentModel
from flag_engine.identities.models import IdentityModel, TraitModel
from flag_engine.segments.evaluator import get_identity_segments
from requests.adapters import HTTPAdapter
from urllib3 import Retry
from flagsmith.analytics import AnalyticsProcessor
from flagsmith.exceptions import FlagsmithAPIError, FlagsmithClientError
from flagsmith.models import DefaultFlag, Flags, Segment
from flagsmith.polling_manager import EnvironmentDataPollingManager
from flagsmith.utils.identities import generate_identities_data
logger = logging.getLogger(__name__)
DEFAULT_API_URL = "https://edge.api.flagsmith.com/api/v1/"
class Flagsmith:
"""A Flagsmith client.
Provides an interface for interacting with the Flagsmith http API.
Basic Usage::
>>> from flagsmith import Flagsmith
>>> flagsmith = Flagsmith(environment_key="<your API key>")
>>> environment_flags = flagsmith.get_environment_flags()
>>> feature_enabled = environment_flags.is_feature_enabled("foo")
>>> identity_flags = flagsmith.get_identity_flags("identifier", {"foo": "bar"})
>>> feature_enabled_for_identity = identity_flags.is_feature_enabled("foo")
"""
def __init__(
self,
environment_key: str,
api_url: str = DEFAULT_API_URL,
custom_headers: typing.Dict[str, typing.Any] = None,
request_timeout_seconds: int = None,
enable_local_evaluation: bool = False,
environment_refresh_interval_seconds: typing.Union[int, float] = 60,
retries: Retry = None,
enable_analytics: bool = False,
default_flag_handler: typing.Callable[[str], DefaultFlag] = None,
):
"""
:param environment_key: The environment key obtained from Flagsmith interface
:param api_url: Override the URL of the Flagsmith API to communicate with
:param custom_headers: Additional headers to add to requests made to the
Flagsmith API
:param request_timeout_seconds: Number of seconds to wait for a request to
complete before terminating the request
:param enable_local_evaluation: Enables local evaluation of flags
:param environment_refresh_interval_seconds: If using local evaluation,
specify the interval period between refreshes of local environment data
:param retries: a urllib3.Retry object to use on all http requests to the
Flagsmith API
:param enable_analytics: if enabled, sends additional requests to the Flagsmith
API to power flag analytics charts
:param default_flag_handler: callable which will be used in the case where
flags cannot be retrieved from the API or a non existent feature is
requested
"""
self.session = requests.Session()
self.session.headers.update(
**{"X-Environment-Key": environment_key}, **(custom_headers or {})
)
retries = retries or Retry(total=3, backoff_factor=0.1)
self.api_url = api_url if api_url.endswith("/") else f"{api_url}/"
self.request_timeout_seconds = request_timeout_seconds
self.session.mount(self.api_url, HTTPAdapter(max_retries=retries))
self.environment_flags_url = f"{self.api_url}flags/"
self.identities_url = f"{self.api_url}identities/"
self.environment_url = f"{self.api_url}environment-document/"
self._environment = None
if enable_local_evaluation:
if not environment_key.startswith("ser."):
raise ValueError(
"In order to use local evaluation, please generate a server key "
"in the environment settings page."
)
self.environment_data_polling_manager_thread = (
EnvironmentDataPollingManager(
main=self,
refresh_interval_seconds=environment_refresh_interval_seconds,
)
)
self.environment_data_polling_manager_thread.start()
self._analytics_processor = (
AnalyticsProcessor(
environment_key, self.api_url, timeout=self.request_timeout_seconds
)
if enable_analytics
else None
)
self.default_flag_handler = default_flag_handler
def get_environment_flags(self) -> Flags:
"""
Get all the default for flags for the current environment.
:return: Flags object holding all the flags for the current environment.
"""
if self._environment:
return self._get_environment_flags_from_document()
return self._get_environment_flags_from_api()
def get_identity_flags(
self, identifier: str, traits: typing.Dict[str, typing.Any] = None
) -> Flags:
"""
Get all the flags for the current environment for a given identity. Will also
upsert all traits to the Flagsmith API for future evaluations. Providing a
trait with a value of None will remove the trait from the identity if it exists.
:param identifier: a unique identifier for the identity in the current
environment, e.g. email address, username, uuid
:param traits: a dictionary of traits to add / update on the identity in
Flagsmith, e.g. {"num_orders": 10}
:return: Flags object holding all the flags for the given identity.
"""
traits = traits or {}
if self._environment:
return self._get_identity_flags_from_document(identifier, traits)
return self._get_identity_flags_from_api(identifier, traits)
def get_identity_segments(
self, identifier: str, traits: typing.Dict[str, typing.Any] = None
) -> typing.List[Segment]:
"""
Get a list of segments that the given identity is in.
:param identifier: a unique identifier for the identity in the current
environment, e.g. email address, username, uuid
:param traits: a dictionary of traits to add / update on the identity in
Flagsmith, e.g. {"num_orders": 10}
:return: list of Segment objects that the identity is part of.
"""
if not self._environment:
raise FlagsmithClientError(
"Local evaluation required to obtain identity segments."
)
traits = traits or {}
identity_model = self._build_identity_model(identifier, **traits)
segment_models = get_identity_segments(self._environment, identity_model)
return [Segment(id=sm.id, name=sm.name) for sm in segment_models]
def update_environment(self):
self._environment = self._get_environment_from_api()
def _get_environment_from_api(self) -> EnvironmentModel:
environment_data = self._get_json_response(self.environment_url, method="GET")
return build_environment_model(environment_data)
def _get_environment_flags_from_document(self) -> Flags:
return Flags.from_feature_state_models(
feature_states=engine.get_environment_feature_states(self._environment),
analytics_processor=self._analytics_processor,
default_flag_handler=self.default_flag_handler,
)
def _get_identity_flags_from_document(
self, identifier: str, traits: typing.Dict[str, typing.Any]
) -> Flags:
identity_model = self._build_identity_model(identifier, **traits)
feature_states = engine.get_identity_feature_states(
self._environment, identity_model
)
return Flags.from_feature_state_models(
feature_states=feature_states,
analytics_processor=self._analytics_processor,
identity_id=identity_model.composite_key,
default_flag_handler=self.default_flag_handler,
)
def _get_environment_flags_from_api(self) -> Flags:
try:
api_flags = self._get_json_response(
url=self.environment_flags_url, method="GET"
)
return Flags.from_api_flags(
api_flags=api_flags,
analytics_processor=self._analytics_processor,
default_flag_handler=self.default_flag_handler,
)
except FlagsmithAPIError:
if self.default_flag_handler:
return Flags(default_flag_handler=self.default_flag_handler)
raise
def _get_identity_flags_from_api(
self, identifier: str, traits: typing.Dict[str, typing.Any]
) -> Flags:
try:
data = generate_identities_data(identifier, traits)
json_response = self._get_json_response(
url=self.identities_url, method="POST", body=data
)
return Flags.from_api_flags(
api_flags=json_response["flags"],
analytics_processor=self._analytics_processor,
default_flag_handler=self.default_flag_handler,
)
except FlagsmithAPIError:
if self.default_flag_handler:
return Flags(default_flag_handler=self.default_flag_handler)
raise
def _get_json_response(self, url: str, method: str, body: dict = None):
try:
request_method = getattr(self.session, method.lower())
response = request_method(
url, json=body, timeout=self.request_timeout_seconds
)
if response.status_code != 200:
raise FlagsmithAPIError(
"Invalid request made to Flagsmith API. Response status code: %d",
response.status_code,
)
return response.json()
except (requests.ConnectionError, JSONDecodeError) as e:
raise FlagsmithAPIError(
"Unable to get valid response from Flagsmith API."
) from e
def _build_identity_model(self, identifier: str, **traits):
if not self._environment:
raise FlagsmithClientError(
"Unable to build identity model when no local environment present."
)
trait_models = [
TraitModel(trait_key=key, trait_value=value)
for key, value in traits.items()
]
return IdentityModel(
identifier=identifier,
environment_api_key=self._environment.api_key,
identity_traits=trait_models,
)
def __del__(self):
if hasattr(self, "environment_data_polling_manager_thread"):
self.environment_data_polling_manager_thread.stop()
| 41.903475 | 88 | 0.664885 |
2e30a268969753b5763b9c230b821aa7d8f72b17 | 1,065 | py | Python | cms_articles/utils/placeholder.py | snegovick/django-cms-articles | f7397bd5e303be6ed50bc53b71e27e40f4087f94 | [
"BSD-3-Clause"
] | 9 | 2016-04-18T15:59:50.000Z | 2019-09-12T07:11:15.000Z | cms_articles/utils/placeholder.py | snegovick/django-cms-articles | f7397bd5e303be6ed50bc53b71e27e40f4087f94 | [
"BSD-3-Clause"
] | 6 | 2019-01-22T17:53:48.000Z | 2020-07-19T17:35:31.000Z | cms_articles/utils/placeholder.py | snegovick/django-cms-articles | f7397bd5e303be6ed50bc53b71e27e40f4087f94 | [
"BSD-3-Clause"
] | 4 | 2017-02-10T17:19:30.000Z | 2020-02-02T16:58:20.000Z | # -*- coding: utf-8 -*-
import warnings
from cms.exceptions import DuplicatePlaceholderWarning
from cms.utils.placeholder import (
_get_nodelist, _scan_placeholders, validate_placeholder_name,
)
from django.template.loader import get_template
def get_placeholders(template):
from ..templatetags.cms_articles import ArticlePlaceholder
compiled_template = get_template(template)
placeholders = []
nodes = _scan_placeholders(_get_nodelist(compiled_template), ArticlePlaceholder)
clean_placeholders = []
for node in nodes:
placeholder = node.get_declaration()
slot = placeholder.slot
if slot in clean_placeholders:
warnings.warn("Duplicate {{% placeholder \"{0}\" %}} "
"in template {1}."
.format(slot, template, slot),
DuplicatePlaceholderWarning)
else:
validate_placeholder_name(slot)
placeholders.append(placeholder)
clean_placeholders.append(slot)
return placeholders
| 31.323529 | 84 | 0.664789 |
23c51474b8f2f6d632946e259a72cf2c49c7f875 | 8,884 | py | Python | src/sas/sasgui/plottools/plottable_interactor.py | opendatafit/sasview | c470220eecfc9f6d8a0e27e2ea8919dcb1b38e39 | [
"BSD-3-Clause"
] | null | null | null | src/sas/sasgui/plottools/plottable_interactor.py | opendatafit/sasview | c470220eecfc9f6d8a0e27e2ea8919dcb1b38e39 | [
"BSD-3-Clause"
] | 1 | 2021-09-20T13:20:35.000Z | 2021-09-20T13:20:35.000Z | src/sas/sasgui/plottools/plottable_interactor.py | opendatafit/sasview | c470220eecfc9f6d8a0e27e2ea8919dcb1b38e39 | [
"BSD-3-Clause"
] | null | null | null | """
This module allows more interaction with the plot
"""
from __future__ import print_function
from .BaseInteractor import _BaseInteractor
class PointInteractor(_BaseInteractor):
"""
"""
def __init__(self, base, axes, color='black', zorder=3, id=''):
"""
"""
_BaseInteractor.__init__(self, base, axes, color=color)
self.zorder = zorder
self.id = id
self.color = color
self.colorlist = ['b', 'g', 'r', 'c', 'm', 'y', 'k']
self.symbollist = ['o', 'x', '^', 'v', '<', '>',
'+', 's', 'd', 'D', 'h', 'H', 'p', '-', '--',
'vline', 'step']
self.markersize = None
self.marker = None
self.marker2 = None
self._button_down = False
self._context_menu = False
self._dragged = False
self.connect_markers([self.axes])
def _color(self, c):
"""Return a particular colour"""
return self.colorlist[c % len(self.colorlist)]
def _symbol(self, s):
"""Return a particular symbol"""
return self.symbollist[s % len(self.symbollist)]
def points(self, x, y, dx=None, dy=None, color=0, symbol=0, zorder=1,
markersize=5, label=None, hide_error=False):
"""
"""
# Draw curve
if self._symbol(symbol) == '-' or self._symbol(symbol) == '--':
l_width = markersize * 0.4
return self.curve(x=x, y=y, color=color, symbol=symbol,
label=label, width=l_width)
# return
if self._symbol(symbol) == 'vline':
l_width = markersize * 0.4
return self.vline(x=x, y=y, color=color, label=label, width=l_width)
if self._symbol(symbol) == 'step':
l_width = markersize * 0.4
return self.step(x=x, y=y, color=color, label=label, width=l_width)
if self.marker is not None:
self.base.connect.clear([self.marker])
self.color = self._color(color)
if self.markersize is not None:
markersize = self.markersize
# Convert tuple (lo,hi) to array [(x-lo),(hi-x)]
if dx is not None and type(dx) == type(()):
dx = nx.vstack((x - dx[0], dx[1] - x)).transpose()
if dy is not None and type(dy) == type(()):
dy = nx.vstack((y - dy[0], dy[1] - y)).transpose()
if dx is None and dy is None:
# zorder = 1
self.marker = self.axes.plot(x, y, color=self.color,
marker=self._symbol(symbol),
markersize=markersize,
linestyle='', label=label,
zorder=zorder)[0]
else:
if hide_error:
# zorder = 1
self.marker = self.axes.plot(x, y, color=self.color,
marker=self._symbol(symbol),
markersize=markersize,
linestyle='', label=label,
zorder=1)[0]
else:
# zorder = 2
self.marker = self.axes.errorbar(x, y, yerr=dy,
xerr=None,
ecolor=self.color,
color=self.color,
capsize=2,
linestyle='',
barsabove=False,
marker=self._symbol(symbol),
markersize=markersize,
lolims=False, uplims=False,
xlolims=False, xuplims=False,
label=label,
zorder=1)[0]
self.connect_markers([self.marker])
self.update()
def curve(self, x, y, dy=None, color=0, symbol=0, zorder=10,
label=None, width=2.0):
"""
"""
if self.marker is not None:
self.base.connect.clear([self.marker])
self.color = self._color(color)
self.marker = self.axes.plot(x, y, color=self.color, lw=width,
marker='', linestyle=self._symbol(symbol),
label=label, zorder=zorder)[0]
self.connect_markers([self.marker])
self.update()
def vline(self, x, y, dy=None, color=0, symbol=0, zorder=1,
label=None, width=2.0):
"""
"""
if self.marker is not None:
self.base.connect.clear([self.marker])
self.color = self._color(color)
if min(y) < 0:
y_min = 0.0
else:
y_min = min(y) * 9 / 10
self.marker = self.axes.vlines(x=x, ymin=y_min, ymax=y,
color=self.color,
linestyle='-', label=label,
lw=width, zorder=zorder)
self.connect_markers([self.marker])
self.update()
def step(self, x, y, dy=None, color=0, symbol=0, zorder=1,
label=None, width=2.0):
"""
"""
if self.marker is not None:
self.base.connect.clear([self.marker])
self.color = self._color(color)
self.marker = self.axes.step(x, y, color=self.color,
marker='',
linestyle='-', label=label,
lw=width, zorder=zorder)[0]
self.connect_markers([self.marker])
self.update()
def connect_markers(self, markers):
"""
Connect markers to callbacks
"""
for h in markers:
connect = self.base.connect
connect('enter', h, self._on_enter)
connect('leave', h, self._on_leave)
connect('click', h, self._on_click)
connect('release', h, self._on_release)
connect('key', h, self.onKey)
def clear(self):
print("plottable_interactor.clear()")
def _on_click(self, evt):
"""
Called when a mouse button is clicked
from within the boundaries of an artist.
"""
if self._context_menu:
self._context_menu = False
evt.artist = self.marker
self._on_leave(evt)
def _on_release(self, evt):
"""
Called when a mouse button is released
within the boundaries of an artist
"""
# Check to see whether we are about to pop
# the context menu up
if evt.button == 3:
self._context_menu = True
def _on_enter(self, evt):
"""
Called when we are entering the boundaries
of an artist.
"""
if not evt.artist.__class__.__name__ == "AxesSubplot":
self.base.plottable_selected(self.id)
if evt.artist.get_color() == 'y':
try:
evt.artist.set_color('b')
except:
evt.artist.set_color_cycle('b')
if hasattr(evt.artist, "set_facecolor"):
evt.artist.set_facecolor('b')
if hasattr(evt.artist, "set_edgecolor"):
evt.artist.set_edgecolor('b')
else:
try:
evt.artist.set_color('y')
except:
evt.artist.set_color_cycle('y')
if hasattr(evt.artist, "set_facecolor"):
evt.artist.set_facecolor('y')
if hasattr(evt.artist, "set_edgecolor"):
evt.artist.set_edgecolor('y')
self.axes.figure.canvas.draw_idle()
def _on_leave(self, evt):
"""
Called when we are leaving the boundaries
of an artist.
"""
if not evt.artist.__class__.__name__ == "AxesSubplot":
if not self._context_menu:
self.base.plottable_selected(None)
try:
evt.artist.set_color(self.color)
except:
evt.artist.set_color_cycle(self.color)
if hasattr(evt.artist, "set_facecolor"):
evt.artist.set_facecolor(self.color)
if hasattr(evt.artist, "set_edgecolor"):
evt.artist.set_edgecolor(self.color)
self.axes.figure.canvas.draw_idle()
def update(self):
"""
Update
"""
pass
| 37.804255 | 80 | 0.463417 |
3789005f2802aa738a444f140cd336513d5afddb | 436 | py | Python | 04.100 Python Exercises Evaluate and Improve Your Skills - AS/Exercise 76-100/exercise92.py | ptyadana/python-dojo | 98c7234b84f0afea99a091c7198342d66bbdff5b | [
"MIT"
] | 3 | 2020-06-01T04:17:18.000Z | 2020-12-18T03:05:55.000Z | 04.100 Python Exercises Evaluate and Improve Your Skills - AS/Exercise 76-100/exercise92.py | ptyadana/python-dojo | 98c7234b84f0afea99a091c7198342d66bbdff5b | [
"MIT"
] | 1 | 2020-04-25T08:01:59.000Z | 2020-04-25T08:01:59.000Z | 04.100 Python Exercises Evaluate and Improve Your Skills - AS/Exercise 76-100/exercise92.py | ptyadana/python-dojo | 98c7234b84f0afea99a091c7198342d66bbdff5b | [
"MIT"
] | 7 | 2020-04-26T10:02:36.000Z | 2021-06-08T05:12:46.000Z | # download the attached ZIP file and extract its files in a folder.
# Then, write a script that counts and prints out the number of .py files in that folder.
import pathlib
count = 0;
for path in pathlib.Path('./92_files').iterdir():
if path.is_file() and path.suffix == '.py':
count += 1;
print(f'Number of files {count}');
#alternative way
# import glob
# file_list=glob.glob1("files","*.py")
# print(len(file_list))
| 25.647059 | 89 | 0.68578 |
b034e8a07cb3f8c844975bb53c6d5bb190c885c8 | 18,603 | py | Python | gamestonk_terminal/cryptocurrency/discovery/discovery_controller.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | null | null | null | gamestonk_terminal/cryptocurrency/discovery/discovery_controller.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | null | null | null | gamestonk_terminal/cryptocurrency/discovery/discovery_controller.py | minhhoang1023/GamestonkTerminal | 195dc19b491052df080178c0cc6a9d535a91a704 | [
"MIT"
] | null | null | null | """Cryptocurrency Discovery Controller"""
__docformat__ = "numpy"
# pylint: disable=R0904, C0302, W0622, C0201
import argparse
from typing import List
from prompt_toolkit.completion import NestedCompleter
from gamestonk_terminal.rich_config import console
from gamestonk_terminal.parent_classes import BaseController
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.helper_funcs import (
EXPORT_ONLY_RAW_DATA_ALLOWED,
parse_known_args_and_warn,
check_positive,
)
from gamestonk_terminal.menu import session
from gamestonk_terminal.cryptocurrency.discovery import (
coinmarketcap_model,
coinpaprika_model,
dappradar_model,
dappradar_view,
pycoingecko_model,
pycoingecko_view,
coinpaprika_view,
coinmarketcap_view,
)
class DiscoveryController(BaseController):
"""Discovery Controller class"""
CHOICES_COMMANDS = [
"cpsearch",
"cmctop",
"cgtrending",
"cggainers",
"cglosers",
"cgtop",
"drnft",
"drgames",
"drdapps",
"drdex",
]
PATH = "/crypto/disc/"
def __init__(self, queue: List[str] = None):
"""Constructor"""
super().__init__(queue)
if session and gtff.USE_PROMPT_TOOLKIT:
choices: dict = {c: {} for c in self.controller_choices}
choices["cggainers"]["-p"] = {c: {} for c in pycoingecko_model.API_PERIODS}
choices["cggainers"]["--sort"] = {
c: {} for c in pycoingecko_model.GAINERS_LOSERS_COLUMNS
}
choices["cglosers"]["--sort"] = {
c: {} for c in pycoingecko_model.GAINERS_LOSERS_COLUMNS
}
choices["cglosers"]["-p"] = {c: {} for c in pycoingecko_model.API_PERIODS}
choices["cgtop"] = {
c: None for c in pycoingecko_model.get_categories_keys()
}
choices["cgtop"]["--category"] = {
c: None for c in pycoingecko_model.get_categories_keys()
}
choices["cgtop"]["--sort"] = {
c: None for c in pycoingecko_view.COINS_COLUMNS
}
choices["cmctop"]["-s"] = {c: {} for c in coinmarketcap_model.FILTERS}
choices["cpsearch"]["-s"] = {c: {} for c in coinpaprika_model.FILTERS}
choices["cpsearch"]["-c"] = {c: {} for c in coinpaprika_model.CATEGORIES}
choices["drnft"]["--sort"] = {c: {} for c in dappradar_model.NFT_COLUMNS}
choices["drgames"]["--sort"] = {c: {} for c in dappradar_model.DEX_COLUMNS}
choices["drdex"]["--sort"] = {c: {} for c in dappradar_model.DEX_COLUMNS}
choices["drdapps"]["--sort"] = {
c: {} for c in dappradar_model.DAPPS_COLUMNS
}
self.completer = NestedCompleter.from_nested_dict(choices)
def print_help(self):
"""Print help"""
help_text = """[cmds]
[src][CoinGecko][/src]
cgtop top coins (with or without category)
cgtrending trending coins
cggainers top gainers - coins which price gained the most in given period
cglosers top losers - coins which price dropped the most in given period
[src][CoinPaprika][/src]
cpsearch search for coins
[src][CoinMarketCap][/src]
cmctop top coins
[src][DappRadar][/src]
drnft top non fungible tokens
drgames top blockchain games
drdapps top decentralized apps
drdex top decentralized exchanges
[/cmds]
"""
console.print(text=help_text, menu="Cryptocurrency - Discovery")
def call_cgtop(self, other_args):
"""Process cgtop command"""
parser = argparse.ArgumentParser(
prog="cgtop",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""Display N coins from CoinGecko [Source: CoinGecko]
can receive a category as argument (-c decentralized-finance-defi or -c stablecoins)
and will show only the top coins in that category.
can also receive sort arguments, e.g., --sort Volume [$]
You can sort by {Symbol,Name,Price [$],Market Cap [$],Market Cap Rank,Volume [$]}
Number of coins to show: -l 10
""",
)
parser.add_argument(
"-c",
"--category",
default="",
dest="category",
help="Category (e.g., stablecoins). Empty for no category",
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
help="Limit of records",
type=check_positive,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Market Cap Rank",
default="Market Cap Rank",
)
if other_args and not other_args[0][0] == "-":
other_args.insert(0, "-c")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
pycoingecko_view.display_coins(
sortby=" ".join(ns_parser.sortby),
category=ns_parser.category,
top=ns_parser.limit,
export=ns_parser.export,
)
def call_drdapps(self, other_args):
"""Process drdapps command"""
parser = argparse.ArgumentParser(
prog="drdapps",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows top decentralized applications [Source: https://dappradar.com/]
Accepts --sort {Name,Category,Protocols,Daily Users,Daily Volume [$]}
to sort by column
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Daily Volume [$]",
default="Daily Volume [$]",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
dappradar_view.display_top_dapps(
sortby=" ".join(ns_parser.sortby),
top=ns_parser.limit,
export=ns_parser.export,
)
def call_drgames(self, other_args):
"""Process drgames command"""
parser = argparse.ArgumentParser(
prog="drgames",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows top blockchain games [Source: https://dappradar.com/]
Accepts --sort {Name,Daily Users,Daily Volume [$]}
to sort by column
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Daily Volume [$]",
default="Daily Volume [$]",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
dappradar_view.display_top_games(
sortby=" ".join(ns_parser.sortby),
top=ns_parser.limit,
export=ns_parser.export,
)
def call_drdex(self, other_args):
"""Process drdex command"""
parser = argparse.ArgumentParser(
prog="drdex",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows top decentralized exchanges [Source: https://dappradar.com/]
Accepts --sort {Name,Daily Users,Daily Volume [$]}
to sort by column
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Daily Volume [$]",
default="Daily Volume [$]",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
dappradar_view.display_top_dexes(
sortby=" ".join(ns_parser.sortby),
top=ns_parser.limit,
export=ns_parser.export,
)
def call_drnft(self, other_args):
"""Process drnft command"""
parser = argparse.ArgumentParser(
prog="drnft",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows top NFT collections [Source: https://dappradar.com/]
Accepts --sort {Name,Protocols,Floor Price [$],Avg Price [$],Market Cap [$],Volume [$]}
to sort by column
""",
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Market Cap [$]",
default="Market Cap [$]",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
dappradar_view.display_top_nfts(
sortby=" ".join(ns_parser.sortby),
top=ns_parser.limit,
export=ns_parser.export,
)
def call_cggainers(self, other_args):
"""Process gainers command"""
parser = argparse.ArgumentParser(
prog="cggainers",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows Largest Gainers - coins which gain the most in given period.
You can use parameter --period to set which timeframe are you interested in: {14d,1h,1y,200d,24h,30d,7d}
You can look on only N number of records with --limit,
You can sort by {Symbol,Name,Price [$],Market Cap [$],Market Cap Rank,Volume [$]} with --sort.
""",
)
parser.add_argument(
"-p",
"--period",
dest="period",
type=str,
help="time period, one from {14d,1h,1y,200d,24h,30d,7d}",
default="1h",
choices=pycoingecko_model.API_PERIODS,
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Market Cap Rank",
default="Market Cap Rank",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
pycoingecko_view.display_gainers(
period=ns_parser.period,
top=ns_parser.limit,
export=ns_parser.export,
sortby=" ".join(ns_parser.sortby),
)
def call_cglosers(self, other_args):
"""Process losers command"""
parser = argparse.ArgumentParser(
prog="cglosers",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""
Shows Largest Losers - coins which price dropped the most in given period
You can use parameter --period to set which timeframe are you interested in: {14d,1h,1y,200d,24h,30d,7d}
You can look on only N number of records with --limit,
You can sort by {Symbol,Name,Price [$],Market Cap [$],Market Cap Rank,Volume [$]} with --sort.
""",
)
parser.add_argument(
"-p",
"--period",
dest="period",
type=str,
help="time period, one from {14d,1h,1y,200d,24h,30d,7d}",
default="1h",
choices=pycoingecko_model.API_PERIODS,
)
parser.add_argument(
"-l",
"--limit",
dest="limit",
type=check_positive,
help="Number of records to display",
default=15,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
nargs="+",
help="Sort by given column. Default: Market Cap Rank",
default="Market Cap Rank",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
pycoingecko_view.display_losers(
period=ns_parser.period,
top=ns_parser.limit,
export=ns_parser.export,
sortby=" ".join(ns_parser.sortby),
)
def call_cgtrending(self, other_args):
"""Process trending command"""
parser = argparse.ArgumentParser(
prog="cgtrending",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="""Discover trending coins.
Use --limit parameter to display only N number of records,
""",
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
pycoingecko_view.display_trending(
export=ns_parser.export,
)
def call_cmctop(self, other_args):
"""Process cmctop command"""
parser = argparse.ArgumentParser(
prog="cmctop",
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="This gets the top ranked coins from coinmarketcap.com",
)
parser.add_argument(
"-l",
"--limit",
default=15,
dest="limit",
help="Limit of records",
type=check_positive,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="column to sort data by.",
default="CMC_Rank",
choices=coinmarketcap_model.FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
coinmarketcap_view.display_cmc_top_coins(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
)
def call_cpsearch(self, other_args):
"""Process search command"""
parser = argparse.ArgumentParser(
add_help=False,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
prog="cpsearch",
description="""Search over CoinPaprika API
You can display only N number of results with --limit parameter.
You can sort data by id, name , category --sort parameter and also with --descend flag to sort descending.
To choose category in which you are searching for use --cat/-c parameter. Available categories:
currencies|exchanges|icos|people|tags|all
Displays:
id, name, category""",
)
parser.add_argument(
"-q",
"--query",
help="phrase for search",
dest="query",
nargs="+",
type=str,
required="-h" not in other_args,
)
parser.add_argument(
"-c",
"--cat",
help="Categories to search: currencies|exchanges|icos|people|tags|all. Default: all",
dest="category",
default="all",
type=str,
choices=coinpaprika_model.CATEGORIES,
)
parser.add_argument(
"-l",
"--limit",
default=10,
dest="limit",
help="Limit of records",
type=check_positive,
)
parser.add_argument(
"-s",
"--sort",
dest="sortby",
type=str,
help="Sort by given column. Default: id",
default="id",
choices=coinpaprika_model.FILTERS,
)
parser.add_argument(
"--descend",
action="store_false",
help="Flag to sort in descending order (lowest first)",
dest="descend",
default=True,
)
if other_args:
if not other_args[0][0] == "-":
other_args.insert(0, "-q")
ns_parser = parse_known_args_and_warn(
parser, other_args, EXPORT_ONLY_RAW_DATA_ALLOWED
)
if ns_parser:
coinpaprika_view.display_search_results(
top=ns_parser.limit,
sortby=ns_parser.sortby,
descend=ns_parser.descend,
export=ns_parser.export,
query=" ".join(ns_parser.query),
category=ns_parser.category,
)
| 32.984043 | 118 | 0.535344 |
65025a5ac1793e09f299bc562cd35aaaf3eb7382 | 884 | py | Python | IVTp/2014/Lyach_A_A/Task_4_12.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | IVTp/2014/Lyach_A_A/Task_4_12.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | IVTp/2014/Lyach_A_A/Task_4_12.py | YukkaSarasti/pythonintask | eadf4245abb65f4400a3bae30a4256b4658e009c | [
"Apache-2.0"
] | null | null | null | # Задача 4. Вариант 12.
# Напишите программу, которая выводит имя, под которым скрывается Лариса Петровна Косач-Квитка
# Лях.А.А.
# 22.05.2016
real_name = 'Лариса Петровна Косач-Квитка'
p_name = 'Леся Украинка '
interests = ('поэзия, лирика, эпос, драма')
born_place = 'Новоград-Волынский '
born_year = 1871
death_year = 1913
death_oldness = death_year - born_year
print(real_name +', ' + ' \nболее известна под псевдонимом '
+ p_name + ',')
print('родилась в ' + born_place +
' в ' + str(born_year) + ' году.')
print(str (interests) + ' сопровождали её всю сознательную жизнь.')
print('В честь писательницы именем Леси Украинки названы : Бульвар , улицы , театр , швейная фабрика и много всего другого. Умерла в ' + str (death_year) + ' году,')
print('прожив ' + str (death_oldness ) + ' лет.')
input('\nНажмите Enter для выхода.')
| 35.36 | 167 | 0.671946 |
5465be2d11b813626b0ea8a8c1e573e5aa2beac5 | 4,300 | py | Python | sk_classification_NaiveBayes/naive_bayes.py | AccessibleAI/ailibrary | f283f7c1608f5998694efc3cdbd0a29ebf3239c2 | [
"Apache-2.0"
] | 5 | 2020-11-09T21:50:10.000Z | 2021-07-09T00:22:16.000Z | sk_classification_NaiveBayes/naive_bayes.py | AccessibleAI/ailibrary | f283f7c1608f5998694efc3cdbd0a29ebf3239c2 | [
"Apache-2.0"
] | 14 | 2019-12-03T06:08:07.000Z | 2021-11-21T11:56:13.000Z | sk_classification_NaiveBayes/naive_bayes.py | AccessibleAI/ailibrary | f283f7c1608f5998694efc3cdbd0a29ebf3239c2 | [
"Apache-2.0"
] | 2 | 2020-01-07T15:35:43.000Z | 2021-08-19T15:41:16.000Z | """
All rights reserved to cnvrg.io
http://www.cnvrg.io
cnvrg.io - AI library
Written by: Michal Ettudgi
Last update: Oct 06, 2019
Updated by: Omer Liberman
logistic_regression.py
==============================================================================
"""
import argparse
import pandas as pd
from SKTrainer import *
from sklearn.naive_bayes import MultinomialNB
from sklearn.model_selection import train_test_split
def _cast_types(args):
"""
This method performs casting to all types of inputs passed via cmd.
:param args: argparse.ArgumentParser object.
:return: argparse.ArgumentParser object.
"""
args.x_val = None if args.x_val == 'None' else int(args.x_val)
args.test_size = float(args.test_size)
args.alpha = float(args.alpha)
args.fit_prior = (args.fit_prior in ['True', "True", 'true', "true"])
# class_prior - array like type (problem to convert)
if args.class_prior == "None" or args.class_prior == 'None':
args.class_prior = None
# --------- #
return args
def main(args):
args = _cast_types(args)
# Minimal number of rows and columns in the csv file.
MINIMAL_NUM_OF_ROWS = 10
MINIMAL_NUM_OF_COLUMNS = 2
# Loading data, and splitting it to train and test based on user input
data = pd.read_csv(args.data, index_col=0)
# Check for unfit given dataset and splitting to X and y.
rows_num, cols_num = data.shape
if rows_num < MINIMAL_NUM_OF_ROWS: raise ValueError("LibraryError: The given csv doesn't have enough rows (at least 10 examples must be given).")
if cols_num < MINIMAL_NUM_OF_COLUMNS: raise ValueError("DatasetError: Not enough columns in the csv (at least 2 columns must be given).")
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=args.test_size)
# Initializing classifier with user input
model = MultinomialNB(alpha=args.alpha,
fit_prior=args.fit_prior,
class_prior=args.class_prior)
folds = None if args.x_val is None else args.x_val
trainer = SKTrainer(model=model,
train_set=(X_train, y_train),
test_set=(X_test, y_test),
output_model_name=args.output_model,
testing_mode=args.test_mode,
folds=folds)
trainer.run()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="""MultinomialNB""")
# ----- cnvrg.io params.
parser.add_argument('--data', action='store', dest='data', required=True,
help="""String. path to csv file: The data set for the classifier. Assumes the last column includes the labels. """)
parser.add_argument('--project_dir', action='store', dest='project_dir',
help="""--- For inner use of cnvrg.io ---""")
parser.add_argument('--output_dir', action='store', dest='output_dir',
help="""--- For inner use of cnvrg.io ---""")
parser.add_argument('--x_val', action='store', default="None", dest='x_val',
help="""Integer. Number of folds for the cross-validation. Default is None.""")
parser.add_argument('--test_size', action='store', default="0.2", dest='test_size',
help="""Float. The portion of the data of testing. Default is 0.2""")
parser.add_argument('--output_model', action='store', default="model.sav", dest='output_model',
help="""String. The name of the output file which is a trained random forests model. Default is logistic_regression_model.sav""")
parser.add_argument('--test_mode', action='store', default=False, dest='test_mode',
help="""--- For inner use of cnvrg.io ---""")
# ----- model's params.
parser.add_argument('--alpha', action='store', default="0.1", dest='alpha',
help="""float: Additive (Laplace/Lidstone) smoothing parameter (0 for no smoothing)""")
parser.add_argument('--fit_prior', action='store', default="True", dest='fit_prior',
help="""boolean Whether to learn class prior probabilities or not. If false, a uniform prior will be used.""")
parser.add_argument('--class_prior', action='store', default=None, dest='class_prior',
help="""Prior probabilities of the classes. If specified the priors are not adjusted according to the data.""")
args = parser.parse_args()
main(args)
| 36.752137 | 150 | 0.667209 |
af73f760a08d0142bb00fe4fef708b1b8c0674d3 | 335 | py | Python | unileaks/task12.py | zahessi/unileaks | 3ed2462e11f8e3decc64ed8faceee42438ec06ff | [
"MIT"
] | null | null | null | unileaks/task12.py | zahessi/unileaks | 3ed2462e11f8e3decc64ed8faceee42438ec06ff | [
"MIT"
] | null | null | null | unileaks/task12.py | zahessi/unileaks | 3ed2462e11f8e3decc64ed8faceee42438ec06ff | [
"MIT"
] | null | null | null | # Задача: Дан числовой массив. Проверить, есть ли такие два числа в массиве,
# перемножив которые мы получим заданное число X.
def find(arr, x):
u = [e for e in arr for m in arr if e*m == x]
return len([e for e in arr for m in arr if e*m == x]) >= 2
assert find([1, 2, 3], 17) == False
assert find([23, 4, 1, 3], 12) == True | 37.222222 | 77 | 0.626866 |
6eb742f1304eff0228b46bf60a876899bba407f5 | 4,334 | py | Python | sdmetrics/timeseries/efficacy/base.py | ppeddada97/SDMetrics | 3b83f25577292b24099668d0273f3282b61d7542 | [
"MIT"
] | 66 | 2020-04-09T02:51:00.000Z | 2022-03-07T20:18:09.000Z | sdmetrics/timeseries/efficacy/base.py | ppeddada97/SDMetrics | 3b83f25577292b24099668d0273f3282b61d7542 | [
"MIT"
] | 68 | 2020-03-23T00:48:14.000Z | 2022-03-10T10:47:34.000Z | sdmetrics/timeseries/efficacy/base.py | ppeddada97/SDMetrics | 3b83f25577292b24099668d0273f3282b61d7542 | [
"MIT"
] | 21 | 2020-04-23T14:22:42.000Z | 2022-03-10T09:29:50.000Z | """Base class for Machine Learning Efficacy based metrics for Time Series."""
import numpy as np
import pandas as pd
import rdt
from sklearn.model_selection import train_test_split
from sdmetrics.goal import Goal
from sdmetrics.timeseries.base import TimeSeriesMetric
class TimeSeriesEfficacyMetric(TimeSeriesMetric):
"""Base class for Machine Learning Efficacy based metrics on time series.
These metrics build a Machine Learning Classifier that learns to tell the synthetic
data apart from the real data, which later on is evaluated using Cross Validation.
The output of the metric is one minus the average ROC AUC score obtained.
Attributes:
name (str):
Name to use when reports about this metric are printed.
goal (sdmetrics.goal.Goal):
The goal of this metric.
min_value (Union[float, tuple[float]]):
Minimum value or values that this metric can take.
max_value (Union[float, tuple[float]]):
Maximum value or values that this metric can take.
"""
name = 'TimeSeries Efficacy'
goal = Goal.MAXIMIZE
min_value = 0.0
max_value = np.inf
@classmethod
def _validate_inputs(cls, real_data, synthetic_data, metadata, entity_columns, target):
metadata, entity_columns = super()._validate_inputs(
real_data, synthetic_data, metadata, entity_columns)
if 'target' in metadata:
target = metadata['target']
elif target is None:
raise TypeError('`target` must be passed either directly or inside `metadata`')
return entity_columns, target
@staticmethod
def _build_xy(transformer, data, entity_columns, target_column):
X = pd.DataFrame()
y = pd.Series()
for entity_id, group in data.groupby(entity_columns):
y = y.append(pd.Series({entity_id: group.pop(target_column).iloc[0]}))
entity_data = group.drop(entity_columns, axis=1)
entity_data = transformer.transform(entity_data)
entity_data = pd.Series({
column: entity_data[column].to_numpy()
for column in entity_data.columns
}, name=entity_id)
X = X.append(entity_data)
return X, y
@classmethod
def _compute_score(cls, real_data, synthetic_data, entity_columns, target):
transformer = rdt.HyperTransformer(dtype_transformers={
'O': 'one_hot_encoding',
'M': rdt.transformers.DatetimeTransformer(strip_constant=True),
})
transformer.fit(real_data.drop(entity_columns + [target], axis=1))
real_x, real_y = cls._build_xy(transformer, real_data, entity_columns, target)
synt_x, synt_y = cls._build_xy(transformer, synthetic_data, entity_columns, target)
train, test = train_test_split(real_x.index, shuffle=True)
real_x_train, real_x_test = real_x.loc[train], real_x.loc[test]
real_y_train, real_y_test = real_y.loc[train], real_y.loc[test]
real_acc = cls._scorer(real_x_train, real_x_test, real_y_train, real_y_test)
synt_acc = cls._scorer(synt_x, real_x_test, synt_y, real_y_test)
return synt_acc / real_acc
@classmethod
def compute(cls, real_data, synthetic_data, metadata=None, entity_columns=None, target=None):
"""Compute this metric.
Args:
real_data (pandas.DataFrame):
The values from the real dataset, passed as a pandas.DataFrame.
synthetic_data (pandas.DataFrame):
The values from the synthetic dataset, passed as a pandas.DataFrame.
metadata (dict):
TimeSeries metadata dict. If not passed, it is build based on the
real_data fields and dtypes.
entity_columns (list[str]):
Names of the columns which identify different time series
sequences.
target (str):
Name of the column to use as the target.
Returns:
Union[float, tuple[float]]:
Metric output.
"""
entity_columns, target = cls._validate_inputs(
real_data, synthetic_data, metadata, entity_columns, target)
return cls._compute_score(real_data, synthetic_data, entity_columns, target)
| 39.4 | 97 | 0.659437 |
e2ad93b2ecda24d3faf110e03deafef3e0dfec71 | 1,312 | py | Python | seq_predictor/util/infolog.py | zlijingtao/Neurobfuscator | 39fc8eaa1819bdaba4a64ca86cd5a340343ac94a | [
"Apache-2.0"
] | 3 | 2021-07-20T21:01:43.000Z | 2022-01-02T03:33:05.000Z | seq_predictor/util/infolog.py | zlijingtao/Neurobfuscator | 39fc8eaa1819bdaba4a64ca86cd5a340343ac94a | [
"Apache-2.0"
] | null | null | null | seq_predictor/util/infolog.py | zlijingtao/Neurobfuscator | 39fc8eaa1819bdaba4a64ca86cd5a340343ac94a | [
"Apache-2.0"
] | 1 | 2021-12-10T03:15:52.000Z | 2021-12-10T03:15:52.000Z | import atexit
from datetime import datetime
import json
from threading import Thread
from urllib.request import Request, urlopen
_format = '%Y-%m-%d %H:%M:%S.%f'
_file = None
_run_name = None
_slack_url = None
def init(filename, run_name, slack_url=None):
global _file, _run_name, _slack_url
_close_logfile()
_file = open(filename, 'a')
_file.write('\n-----------------------------------------------------------------\n')
_file.write('Starting new training run\n')
_file.write('-----------------------------------------------------------------\n')
_run_name = run_name
_slack_url = slack_url
def log(msg, slack=False):
print(msg)
if _file is not None:
_file.write('[%s] %s\n' % (datetime.now().strftime(_format)[:-3], msg))
_file.flush()
if slack and _slack_url is not None:
Thread(target=_send_slack, args=(msg,)).start()
def _close_logfile():
global _file
if _file is not None:
_file.close()
_file = None
def _send_slack(msg):
req = Request(_slack_url)
req.add_header('Content-Type', 'application/json')
urlopen(req, json.dumps({
'username': 'tacotron',
'icon_emoji': ':taco:',
'text': '*%s*: %s' % (_run_name, msg)
}).encode())
atexit.register(_close_logfile)
| 25.230769 | 88 | 0.571646 |
5745a1d7fb1ef15914df12085010ef696cb9f4ba | 4,522 | py | Python | benchmark/startQiskit_QC3182.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_QC3182.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_QC3182.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=43
import cirq
import qiskit
from qiskit import IBMQ
from qiskit.providers.ibmq import least_busy
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=35
prog.cz(input_qubit[0],input_qubit[3]) # number=36
prog.h(input_qubit[3]) # number=37
prog.h(input_qubit[3]) # number=22
prog.cx(input_qubit[0],input_qubit[3]) # number=32
prog.x(input_qubit[3]) # number=33
prog.cx(input_qubit[0],input_qubit[3]) # number=34
prog.h(input_qubit[3]) # number=19
prog.cz(input_qubit[0],input_qubit[3]) # number=20
prog.h(input_qubit[3]) # number=21
prog.z(input_qubit[3]) # number=10
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[0]) # number=26
prog.cz(input_qubit[1],input_qubit[0]) # number=27
prog.h(input_qubit[0]) # number=28
prog.cx(input_qubit[1],input_qubit[0]) # number=40
prog.z(input_qubit[1]) # number=41
prog.cx(input_qubit[1],input_qubit[0]) # number=42
prog.cx(input_qubit[3],input_qubit[2]) # number=38
prog.h(input_qubit[0]) # number=29
prog.cz(input_qubit[1],input_qubit[0]) # number=30
prog.h(input_qubit[0]) # number=31
prog.h(input_qubit[3]) # number=39
prog.h(input_qubit[1]) # number=18
prog.rx(2.8902652413026093,input_qubit[2]) # number=13
prog.y(input_qubit[1]) # number=11
prog.y(input_qubit[1]) # number=12
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= 2 and not x.configuration().simulator and x.status().operational == True))
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_QC3182.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 35.606299 | 165 | 0.656568 |
32ac321cb26bede0238f2c59c142591c360f2dbf | 299 | py | Python | module/{{cookiecutter.project}}/{{cookiecutter.package_dir}}/{{cookiecutter.module}}.py | chhsiao1981/cc_flask_template | cbd7d5f3775176f88002490877f7607973dca958 | [
"MIT"
] | null | null | null | module/{{cookiecutter.project}}/{{cookiecutter.package_dir}}/{{cookiecutter.module}}.py | chhsiao1981/cc_flask_template | cbd7d5f3775176f88002490877f7607973dca958 | [
"MIT"
] | null | null | null | module/{{cookiecutter.project}}/{{cookiecutter.package_dir}}/{{cookiecutter.module}}.py | chhsiao1981/cc_flask_template | cbd7d5f3775176f88002490877f7607973dca958 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import ujson as json
import re
from {{cookiecutter.project_name}}.constants import *
from {{cookiecutter.project_name}} import cfg
from {{cookiecutter.project_name}} import util
from {{cookiecutter.project_name}}.util import Error
def {{cookiecutter.module}}():
pass
| 21.357143 | 53 | 0.73913 |
33d34fc9ea8ddc7bccce47dec956c90c132f00df | 1,491 | py | Python | lib-src/lv2/suil/waflib/Tools/suncc.py | joshrose/audacity | e2b1a2be6b92661628bbb054f915bc50b211c020 | [
"CC-BY-3.0"
] | 7,892 | 2015-03-31T09:24:05.000Z | 2022-03-31T12:30:32.000Z | lib-src/lv2/suil/waflib/Tools/suncc.py | joshrose/audacity | e2b1a2be6b92661628bbb054f915bc50b211c020 | [
"CC-BY-3.0"
] | 2,050 | 2015-04-03T13:27:52.000Z | 2022-03-31T19:14:10.000Z | lib-src/lv2/suil/waflib/Tools/suncc.py | joshrose/audacity | e2b1a2be6b92661628bbb054f915bc50b211c020 | [
"CC-BY-3.0"
] | 2,613 | 2015-03-26T11:28:10.000Z | 2022-03-30T13:17:03.000Z | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
from waflib import Errors
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@conf
def find_scc(conf):
"""
Detects the Sun C compiler
"""
v = conf.env
cc = conf.find_program('cc', var='CC')
try:
conf.cmd_and_log(cc + ['-flags'])
except Errors.WafError:
conf.fatal('%r is not a Sun compiler' % cc)
v.CC_NAME = 'sun'
conf.get_suncc_version(cc)
@conf
def scc_common_flags(conf):
"""
Flags required for executing the sun C compiler
"""
v = conf.env
v.CC_SRC_F = []
v.CC_TGT_F = ['-c', '-o', '']
if not v.LINK_CC:
v.LINK_CC = v.CC
v.CCLNK_SRC_F = ''
v.CCLNK_TGT_F = ['-o', '']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.SONAME_ST = '-Wl,-h,%s'
v.SHLIB_MARKER = '-Bdynamic'
v.STLIB_MARKER = '-Bstatic'
v.cprogram_PATTERN = '%s'
v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC']
v.LINKFLAGS_cshlib = ['-G']
v.cshlib_PATTERN = 'lib%s.so'
v.LINKFLAGS_cstlib = ['-Bstatic']
v.cstlib_PATTERN = 'lib%s.a'
def configure(conf):
conf.find_scc()
conf.find_ar()
conf.scc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| 21.926471 | 62 | 0.594232 |
cd1f8653fd781dff837b1a1594582eed5be6a53c | 6,064 | py | Python | sdk/python/pulumi_azure_native/hybridcompute/private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/hybridcompute/private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/hybridcompute/private_endpoint_connection.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['PrivateEndpointConnection']
class PrivateEndpointConnection(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
private_endpoint_connection_name: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['PrivateEndpointConnectionPropertiesArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
scope_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A private endpoint connection
API Version: 2021-01-28-preview.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] private_endpoint_connection_name: The name of the private endpoint connection.
:param pulumi.Input[pulumi.InputType['PrivateEndpointConnectionPropertiesArgs']] properties: Resource properties.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] scope_name: The name of the Azure Arc PrivateLinkScope resource.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['private_endpoint_connection_name'] = private_endpoint_connection_name
__props__['properties'] = properties
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if scope_name is None and not opts.urn:
raise TypeError("Missing required property 'scope_name'")
__props__['scope_name'] = scope_name
__props__['name'] = None
__props__['system_data'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:hybridcompute:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:hybridcompute/v20200815preview:PrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:hybridcompute/v20200815preview:PrivateEndpointConnection"), pulumi.Alias(type_="azure-native:hybridcompute/v20210128preview:PrivateEndpointConnection"), pulumi.Alias(type_="azure-nextgen:hybridcompute/v20210128preview:PrivateEndpointConnection")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(PrivateEndpointConnection, __self__).__init__(
'azure-native:hybridcompute:PrivateEndpointConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PrivateEndpointConnection':
"""
Get an existing PrivateEndpointConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["name"] = None
__props__["properties"] = None
__props__["system_data"] = None
__props__["type"] = None
return PrivateEndpointConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> pulumi.Output['outputs.PrivateEndpointConnectionPropertiesResponse']:
"""
Resource properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 45.253731 | 504 | 0.670844 |
8f02dc5d991629be695eb3dd0042f067e4e1795f | 6,874 | py | Python | tests/unit/states/test_boto_kinesis.py | velom/salt | f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e | [
"Apache-2.0"
] | 1 | 2021-04-05T19:46:35.000Z | 2021-04-05T19:46:35.000Z | tests/unit/states/test_boto_kinesis.py | dv-trading/salt | f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e | [
"Apache-2.0"
] | null | null | null | tests/unit/states/test_boto_kinesis.py | dv-trading/salt | f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.unit import skipIf, TestCase
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch)
# Import Salt Libs
from salt.states import boto_kinesis
boto_kinesis.__salt__ = {}
boto_kinesis.__opts__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class BotoKinesisTestCase(TestCase):
'''
Test cases for salt.states.boto_kinesis
'''
# 'present' function tests: 1
maxDiff = None
def test_stream_present(self):
'''
Test to ensure the kinesis stream exists.
'''
name = 'new_stream'
retention_hours = 24
enhanced_monitoring = ['IteratorAgeMilliseconds']
different_enhanced_monitoring = ['IncomingBytes']
num_shards = 1
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
shards = [{'ShardId': 'shardId-000000000000',
'HashKeyRange': {'EndingHashKey': 'big number', 'StartingHashKey': '0'},
'SequenceNumberRange': {'StartingSequenceNumber': 'bigger number'}}]
stream_description = {'HasMoreShards': False,
'RetentionPeriodHours': retention_hours,
'StreamName': name,
'Shards': shards,
'StreamARN': "",
'EnhancedMonitoring': [{'ShardLevelMetrics': enhanced_monitoring}],
'StreamStatus': 'ACTIVE'}
exists_mock = MagicMock(side_effect=[{'result': True}, {'result': False}, {'result': True}, {'result': False}])
get_stream_mock = MagicMock(return_value={'result': {'StreamDescription': stream_description}})
shard_mock = MagicMock(return_value=[0, 0, {'OpenShards': shards}])
dict_mock = MagicMock(return_value={'result': True})
mock_bool = MagicMock(return_value=True)
with patch.dict(boto_kinesis.__salt__,
{'boto_kinesis.exists': exists_mock,
'boto_kinesis.create_stream': dict_mock,
'boto_kinesis.get_stream_when_active': get_stream_mock,
'boto_kinesis.get_info_for_reshard': shard_mock,
'boto_kinesis.num_shards_matches': mock_bool}):
# already present, no change required
comt = ('Kinesis stream {0} already exists,\n'
'Kinesis stream {0}: retention hours did not require change, already set at {1},\n'
'Kinesis stream {0}: enhanced monitoring did not require change, already set at {2},\n'
'Kinesis stream {0}: did not require resharding, remains at {3} shards'
.format(name, retention_hours, enhanced_monitoring, num_shards))
ret.update({'comment': comt})
self.assertDictEqual(boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards), ret)
with patch.dict(boto_kinesis.__opts__, {'test': True}):
# not present, test environment (dry run)
comt = ('Kinesis stream {0} would be created'
.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards), ret)
# already present, changes required, test environment (dry run)
comt = ('Kinesis stream {0} already exists,\n'
'Kinesis stream {0}: retention hours would be updated to {1},\n'
'Kinesis stream {0}: would enable enhanced monitoring for {2},\n'
'Kinesis stream {0}: would disable enhanced monitoring for {3},\n'
'Kinesis stream {0}: would be resharded from {4} to {5} shards'
.format(name, retention_hours+1, different_enhanced_monitoring,
enhanced_monitoring, num_shards, num_shards+1))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(boto_kinesis.present(name, retention_hours+1, different_enhanced_monitoring,
num_shards+1), ret)
# not present, create and configure
changes = {'new': {'name': name,
'num_shards': num_shards}}
with patch.dict(boto_kinesis.__opts__, {'test': False}):
comt = ('Kinesis stream {0} successfully created,\n'
'Kinesis stream {0}: retention hours did not require change, already set at {1},\n'
'Kinesis stream {0}: enhanced monitoring did not require change, already set at {2},\n'
'Kinesis stream {0}: did not require resharding, remains at {3} shards'
.format(name, retention_hours, enhanced_monitoring, num_shards))
ret.update({'comment': comt, 'result': True,
'changes': changes})
self.assertDictEqual(ret, boto_kinesis.present(name, retention_hours, enhanced_monitoring, num_shards))
# 'absent' function tests: 1
def test_absent(self):
'''
Test to ensure the Kinesis stream does not exist.
'''
name = 'new_stream'
ret = {'name': name,
'result': True,
'changes': {},
'comment': ''}
mock = MagicMock(side_effect=[{'result': False}, {'result': True}, {'result': True}])
mock_bool = MagicMock(return_value={'result': True})
with patch.dict(boto_kinesis.__salt__,
{'boto_kinesis.exists': mock,
'boto_kinesis.delete_stream': mock_bool}):
comt = ('Kinesis stream {0} does not exist'.format(name))
ret.update({'comment': comt})
self.assertDictEqual(boto_kinesis.absent(name), ret)
with patch.dict(boto_kinesis.__opts__, {'test': True}):
comt = ('Kinesis stream {0} would be deleted'.format(name))
ret.update({'comment': comt, 'result': None})
self.assertDictEqual(boto_kinesis.absent(name), ret)
changes = {'new': 'Stream {0} deleted'.format(name),
'old': 'Stream {0} exists'.format(name)}
with patch.dict(boto_kinesis.__opts__, {'test': False}):
comt = ('Deleted stream {0}'.format(name))
ret.update({'comment': comt, 'result': True,
'changes': changes})
self.assertDictEqual(boto_kinesis.absent(name), ret)
| 48.06993 | 119 | 0.561682 |
3f82312af42ed7edfa2d479bff37b33799bd1046 | 241 | py | Python | setup.py | XiaolinLiu1998/pbpe | 5cc29805cc26dfc7e1aa49399c2425964b6e6614 | [
"BSD-2-Clause"
] | null | null | null | setup.py | XiaolinLiu1998/pbpe | 5cc29805cc26dfc7e1aa49399c2425964b6e6614 | [
"BSD-2-Clause"
] | null | null | null | setup.py | XiaolinLiu1998/pbpe | 5cc29805cc26dfc7e1aa49399c2425964b6e6614 | [
"BSD-2-Clause"
] | null | null | null | from distutils.core import setup
from Cython.Build import cythonize
import os
import numpy
os.environ['CFLAGS'] = '-O3 -Wall -ffast-math -fopenmp'
setup(
ext_modules = cythonize("src/*.pyx"),
include_dirs = [numpy.get_include()]
)
| 20.083333 | 55 | 0.717842 |
6f33b3510dadbfe0040ef34cf5d8e049ea25b86b | 2,391 | py | Python | psydac/utilities/quadratures.py | mayuri-dhote/psydac | 01ddbe2d049a599684c45060912d01c2658160a3 | [
"MIT"
] | 20 | 2019-07-30T12:37:57.000Z | 2022-03-09T11:35:04.000Z | psydac/utilities/quadratures.py | mayuri-dhote/psydac | 01ddbe2d049a599684c45060912d01c2658160a3 | [
"MIT"
] | 98 | 2019-04-01T16:32:27.000Z | 2022-03-21T19:30:35.000Z | psydac/utilities/quadratures.py | mayuri-dhote/psydac | 01ddbe2d049a599684c45060912d01c2658160a3 | [
"MIT"
] | 7 | 2019-10-03T03:49:47.000Z | 2022-03-01T09:11:49.000Z | # -*- coding: UTF-8 -*-
#! /usr/bin/python
"""
This module contains some routines to generate quadrature points in 1D
it has also a routine uniform, which generates uniform points
with weights equal to 1
"""
import numpy as np
# ....
def gauss_legendre(ordergl,tol=10e-14):
"""
Returns nodal abscissas {x} and weights {A} of
Gauss-Legendre m-point quadrature.
"""
m = ordergl + 1
from math import cos,pi
from numpy import zeros
def legendre(t,m):
p0 = 1.0; p1 = t
for k in range(1,m):
p = ((2.0*k + 1.0)*t*p1 - k*p0)/(1.0 + k )
p0 = p1; p1 = p
dp = m*(p0 - t*p1)/(1.0 - t**2)
return p1,dp
A = zeros(m)
x = zeros(m)
nRoots = (m + 1)// 2 # Number of non-neg. roots
for i in range(nRoots):
t = cos(pi*(i + 0.75)/(m + 0.5)) # Approx. root
for j in range(30):
p,dp = legendre(t,m) # Newton-Raphson
dt = -p/dp; t = t + dt # method
if abs(dt) < tol:
x[i] = t; x[m-i-1] = -t
A[i] = 2.0/(1.0 - t**2)/(dp**2) # Eq.(6.25)
A[m-i-1] = A[i]
break
return x,A
# ...
# ...
def gauss_lobatto(k):
"""
Returns nodal abscissas {x} and weights {A} of
Gauss-Legendre m-point quadrature.
"""
beta = .5 / np.sqrt(1-(2 * np.arange(1., k + 1)) ** (-2)) #3-term recurrence coeffs
beta[-1] = np.sqrt((k / (2 * k-1.)))
T = np.diag(beta, 1) + np.diag(beta, -1) # jacobi matrix
D, V = np.linalg.eig(T) # eigenvalue decomposition
xg = np.real(D); i = xg.argsort(); xg.sort() # nodes (= Legendres points)
w = 2 * (V[0, :]) ** 2; # weights
return xg, w[i]
# ....
# ....
def quadrature(a, k, method="legendre"):
"""
this routine generates a quad pts on the grid linspace(a,b,N)
"""
if method == "legendre":
x, w = gauss_legendre(k)
elif method == "lobatto":
x, w = gauss_lobatto(k)
else:
raise NotImplemented("> Only Gauss-Legendre is implemented.")
grid = a
N = len(a)
xgl = np.zeros((N-1, k + 1))
wgl = np.zeros((N-1, k + 1))
for i in range (0, N-1):
xmin = grid[i];xmax = grid[i + 1];dx = 0.5 * (xmax-xmin)
tab = dx * x + dx + xmin
xgl[i, :] = tab[::-1]
wgl[i, :] = 0.5 * ( xmax - xmin ) * w
return xgl,wgl
# ....
| 27.170455 | 87 | 0.49519 |
a06c32ce09a5fd0c2b04b4602f3e77724574d587 | 938 | py | Python | spamclib/spamc_body.py | wevsty/spamclib | 4e844f9644ecdf4f9dfadbe1e3a784dc7d273a31 | [
"MIT"
] | 1 | 2019-12-06T09:00:43.000Z | 2019-12-06T09:00:43.000Z | spamclib/spamc_body.py | wevsty/spamclib | 4e844f9644ecdf4f9dfadbe1e3a784dc7d273a31 | [
"MIT"
] | null | null | null | spamclib/spamc_body.py | wevsty/spamclib | 4e844f9644ecdf4f9dfadbe1e3a784dc7d273a31 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from .type_convert import anything_to_string
from .type_convert import anything_to_bytes
import zlib
class SpamcBody(object):
_body = b''
# _compressed_body = b''
def __init__(self):
self._body = b''
# self._compressed_body = b''
@staticmethod
def format_body(input_body):
return_body = anything_to_bytes(input_body)
if return_body.endswith(b'\n') == True:
return return_body
else:
return return_body + b'\n'
# body函数可当成属性使用
@property
def body(self):
return self._body
@body.setter
def body(self, value):
self._body = self.format_body(value)
@staticmethod
def zlib_compress_data(value):
return zlib.compress(value)
@staticmethod
def zlib_decompress_data(value):
return zlib.decompress(value)
if __name__ == '__main__':
pass
| 20.844444 | 51 | 0.634328 |
c89761a02d3f18321cfb4b9046f3b674f5a2bc64 | 5,913 | py | Python | dqn/exercise/dqn_agent.py | aopina1/DRLND-Course | 1140e101221cfcee16a24d146b4f86cd79d53832 | [
"MIT"
] | null | null | null | dqn/exercise/dqn_agent.py | aopina1/DRLND-Course | 1140e101221cfcee16a24d146b4f86cd79d53832 | [
"MIT"
] | 7 | 2019-12-16T22:13:37.000Z | 2022-02-10T01:05:42.000Z | dqn/exercise/dqn_agent.py | aopina1/DRLND-Course | 1140e101221cfcee16a24d146b4f86cd79d53832 | [
"MIT"
] | null | null | null | import numpy as np
import random
from collections import namedtuple, deque
from model import QNetwork
import torch
import torch.nn.functional as F
import torch.optim as optim
BUFFER_SIZE = int(1e5) # replay buffer size
BATCH_SIZE = 64 # minibatch size
GAMMA = 0.99 # discount factor
TAU = 1e-3 # for soft update of target parameters
LR = 5e-4 # learning rate
UPDATE_EVERY = 4 # how often to update the network
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, seed):
"""Initialize an Agent object.
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(seed)
# Q-Network
self.qnetwork_local = QNetwork(state_size, action_size, seed).to(device)
self.qnetwork_target = QNetwork(state_size, action_size, seed).to(device)
self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR)
# Replay memory
self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE, seed)
# Initialize time step (for updating every UPDATE_EVERY steps)
self.t_step = 0
def step(self, state, action, reward, next_state, done):
# Save experience in replay memory
self.memory.add(state, action, reward, next_state, done)
# Learn every UPDATE_EVERY time steps.
self.t_step = (self.t_step + 1) % UPDATE_EVERY
if self.t_step == 0:
# If enough samples are available in memory, get random subset and learn
if len(self.memory) > BATCH_SIZE:
experiences = self.memory.sample()
self.learn(experiences, GAMMA)
def act(self, state, eps=0.):
"""Returns actions for given state as per current policy.
Params
======
state (array_like): current state
eps (float): epsilon, for epsilon-greedy action selection
"""
state = torch.from_numpy(state).float().unsqueeze(0).to(device)
self.qnetwork_local.eval()
with torch.no_grad():
action_values = self.qnetwork_local(state)
self.qnetwork_local.train()
# Epsilon-greedy action selection
if random.random() > eps:
return np.argmax(action_values.cpu().data.numpy())
else:
return random.choice(np.arange(self.action_size))
def learn(self, experiences, gamma):
"""Update value parameters using given batch of experience tuples.
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
## TODO: compute and minimize the loss
"*** YOUR CODE HERE ***"
# ------------------- update target network ------------------- #
self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU)
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model (PyTorch model): weights will be copied from
target_model (PyTorch model): weights will be copied to
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data)
class ReplayBuffer:
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed):
"""Initialize a ReplayBuffer object.
Params
======
action_size (int): dimension of each action
buffer_size (int): maximum size of buffer
batch_size (int): size of each training batch
seed (int): random seed
"""
self.action_size = action_size
self.memory = deque(maxlen=buffer_size)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"])
self.seed = random.seed(seed)
def add(self, state, action, reward, next_state, done):
"""Add a new experience to memory."""
e = self.experience(state, action, reward, next_state, done)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory."""
experiences = random.sample(self.memory, k=self.batch_size)
states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).long().to(device)
rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device)
dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device)
return (states, actions, rewards, next_states, dones)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory) | 40.5 | 128 | 0.604769 |
36ef4dc32466dc2a08770a6884f4b81c18d4fa72 | 1,097 | py | Python | testdb.py | bradneuman/BlameOverTime | a41ea352d6d0a5ab91af45160da7fe937b7a6e2d | [
"MIT"
] | null | null | null | testdb.py | bradneuman/BlameOverTime | a41ea352d6d0a5ab91af45160da7fe937b7a6e2d | [
"MIT"
] | null | null | null | testdb.py | bradneuman/BlameOverTime | a41ea352d6d0a5ab91af45160da7fe937b7a6e2d | [
"MIT"
] | null | null | null | import blameDBQuery as query
import sqlite3
db_filename = 'blame.db'
def showCurrBlame(conn):
blame = query.GetCurrentBlame(conn.cursor())
filenameLength = max(max([len(row[0]) for row in blame]), len('filename'))
authorLength = max(max([len(row[1]) for row in blame]), len('author'))
linesLength = 6
format_str = "%%%ds | %%%ds | %%%ds" % (filenameLength, authorLength, linesLength)
break_str = format_str % (filenameLength * '-', authorLength * '-', linesLength * '-')
print format_str % ('filename', 'author', 'lines')
print break_str
lastFilename = None
for line in blame:
filename = line[0]
if filename == lastFilename:
print format_str % ('', line[1], line[2])
else:
print format_str % line
lastFilename = filename
def blameOverTime(conn):
# blame = query.GetBlameOverTime(conn.cursor())
blame = query.GetFullBlameOverTime(conn.cursor())
import pprint
pprint.pprint(blame)
with sqlite3.connect(db_filename) as conn:
# showCurrBlame(conn)
blameOverTime(conn)
| 26.119048 | 90 | 0.642662 |
2c938af8ad78e733327b307f8098deff1fb5631a | 8,919 | py | Python | maya/plug-ins/heimer.py | cedricB/circeCharacterWorksTools | cf7d793239c291a8a8aec5c60ede1250415581d9 | [
"MIT"
] | 34 | 2015-03-13T08:40:02.000Z | 2022-03-31T12:30:48.000Z | maya/plug-ins/heimer.py | cedricB/circeCharacterWorksTools | cf7d793239c291a8a8aec5c60ede1250415581d9 | [
"MIT"
] | null | null | null | maya/plug-ins/heimer.py | cedricB/circeCharacterWorksTools | cf7d793239c291a8a8aec5c60ede1250415581d9 | [
"MIT"
] | 9 | 2015-03-13T08:40:04.000Z | 2020-11-06T09:15:45.000Z | '''
########################################################################
# #
# heimer.py #
# #
# Email: cedricbazillou@gmail.com #
# blog: http://circecharacterworks.wordpress.com/ #
########################################################################
L I C E N S E:
1. The MIT License (MIT)
Copyright (c) 2009-2015 Cedric BAZILLOU cedricbazillou@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software
and associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sub-license, and/or sell copies of the Software, and to permit persons
to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
P U R P O S E:
- compute an aim constraint without using any up vector
- use quaternion to build a reliable orientation
I N S T A L L A T I O N:
Copy the "heimer.py" to your Maya plugins directory
Windows: Program Files\Autodesk\MayaXXXX\bin\plug-ins\
or better in your maya user directory:
%MAYA_APP_DIR%\%mayaNumber%\scripts\plug-ins\( create one if it does not exists )
@author Cedric Bazillou <cedric.bazillou@digital-district.ca>
@blog http://circecharacterworks.wordpress.com/
@note version 0.1.0
@see see statement goes here.
'''
import math, sys
import maya.OpenMaya as OpenMaya
import maya.OpenMayaMPx as OpenMayaMPx
kPluginNodeName = "heimer"
kPluginNodeId = OpenMaya.MTypeId(0xAAC0F775)
kPluginNodeAuthor = "Bazillou cedric2009"
kPluginNodeVersion = "1.0.1"
class heimer(OpenMayaMPx.MPxNode):
referenceVector = OpenMaya.MVector(1,0,0)
def __init__(self):
OpenMayaMPx.MPxNode.__init__(self)
def computeLocalOrient(self,Data):
worldToLocalValue = Data.inputValue(self.worldToLocal).asMatrix()
targetPosition_Hdle = Data.inputValue(self.targetPosition)
parentHandle = Data.outputValue(self.local)
outputHandle = parentHandle.child(self.outRotate)
inVec = targetPosition_Hdle.asVector()
pointCnv = OpenMaya.MPoint(inVec)*worldToLocalValue.inverse()
theTargetVector = OpenMaya.MVector(pointCnv).normal()
aimQuaternion = self.referenceVector.rotateTo(theTargetVector)
eulerRotationValue = aimQuaternion.asEulerRotation()
outputHandle.set3Double( eulerRotationValue.x,eulerRotationValue.y,eulerRotationValue.z )
parentHandle.setClean()
def computeWorldData(self,Data):
worldToLocalValue = Data.inputValue(self.worldToLocal).asMatrix()
targetMatrixValue = Data.inputValue(self.targetMatrix).asMatrix()
parentHandle = Data.outputValue(self.world)
outRotate_DH = parentHandle.child(self.rotate )
outTranslate_DH = parentHandle.child(self.translate)
outMatrix_DH = parentHandle.child(self.outMatrix)
convertWorldToLocal_Value = Data.inputValue(self.convertWorldToLocal).asBool()
worldMat = worldToLocalValue.inverse()
pointCnv = OpenMaya.MPoint()*targetMatrixValue*worldMat
theTargetVector = OpenMaya.MVector(pointCnv).normal()
aimMatrix = self.referenceVector.rotateTo(theTargetVector).asMatrix()
finalMat = aimMatrix*worldToLocalValue
if convertWorldToLocal_Value == True:
finalMat = aimMatrix
matFn = OpenMaya.MTransformationMatrix(finalMat)
blendRot = matFn.eulerRotation()
outRotate_DH.set3Double(blendRot.x,blendRot.y,blendRot.z)
outPnt = OpenMaya.MPoint()*finalMat
outTranslate_DH.set3Double(outPnt.x,outPnt.y,outPnt.z)
outMatrix_DH.setMMatrix(finalMat)
parentHandle.setClean()
def compute(self,Plug,Data):
self.computeLocalOrient(Data)
self.computeWorldData(Data)
def nodeCreator():
return OpenMayaMPx.asMPxPtr(heimer())
def nodeInitializer():
nAttr = OpenMaya.MFnNumericAttribute()
matAttr = OpenMaya.MFnMatrixAttribute()
unitAttr = OpenMaya.MFnUnitAttribute()
cAttr = OpenMaya.MFnCompoundAttribute()
heimer.worldToLocal = matAttr.create("worldToLocal", "wtlMat",OpenMaya.MFnMatrixAttribute.kDouble)
matAttr.setHidden(True)
heimer.addAttribute(heimer.worldToLocal)
heimer.targetMatrix = matAttr.create("targetMatrix", "trgMat",OpenMaya.MFnMatrixAttribute.kDouble)
matAttr.setHidden(True)
heimer.addAttribute(heimer.targetMatrix)
heimer.targetPosition = nAttr.create( "targetPosition", "trgPos", OpenMaya.MFnNumericData.k3Double )
nAttr.setStorable(0)
nAttr.setKeyable(1)
nAttr.setHidden(0)
heimer.addAttribute( heimer.targetPosition )
defaultAngle = OpenMaya.MAngle ( 0.0, OpenMaya.MAngle.kDegrees )
defaultDist = OpenMaya.MDistance ( 0.0, OpenMaya.MDistance.kCentimeters )
heimer.outRotateX = unitAttr.create( "outRotateX", "orx", defaultAngle)
heimer.outRotateY = unitAttr.create( "outRotateY", "ory", defaultAngle)
heimer.outRotateZ = unitAttr.create( "outRotateZ", "orz", defaultAngle)
heimer.outRotate = nAttr.create( "outRotate", "or", heimer.outRotateX,heimer.outRotateY,heimer.outRotateZ)
heimer.local = cAttr.create( "local", "lcl" )
cAttr.addChild(heimer.outRotate)
cAttr.setStorable(0)
cAttr.setKeyable(0)
cAttr.setHidden(True)
heimer.addAttribute(heimer.local)
heimer.translateX = unitAttr.create( "translateX", "tx", defaultDist)
heimer.translateY = unitAttr.create( "translateY", "ty", defaultDist)
heimer.translateZ = unitAttr.create( "translateZ", "tz", defaultDist)
heimer.translate = nAttr.create( "translate", "t",heimer.translateX,heimer.translateY,heimer.translateZ)
heimer.rotateX = unitAttr.create( "rotateX", "rx", defaultAngle)
heimer.rotateY = unitAttr.create( "rotateY", "ry", defaultAngle)
heimer.rotateZ = unitAttr.create( "rotateZ", "rz", defaultAngle)
heimer.rotate = nAttr.create( "rotate", "r", heimer.rotateX,heimer.rotateY,heimer.rotateZ)
heimer.outMatrix = matAttr.create("outMatrix", "oMat",OpenMaya.MFnMatrixAttribute.kDouble)
heimer.outScale = nAttr.create( "outScale", "outS", OpenMaya.MFnNumericData.k3Double,1.0 )
heimer.convertWorldToLocal = nAttr.create( "convertWorldToLocal", "cnv", OpenMaya.MFnNumericData.kBoolean,False )
heimer.addAttribute(heimer.convertWorldToLocal)
heimer.world = cAttr.create( "world", "wrl" )
cAttr.addChild(heimer.rotate)
cAttr.addChild(heimer.translate)
cAttr.addChild( heimer.outScale)
cAttr.addChild(heimer.outMatrix)
cAttr.setStorable(0)
cAttr.setKeyable(0)
cAttr.setHidden(True)
heimer.addAttribute(heimer.world)
heimer.attributeAffects( heimer.convertWorldToLocal , heimer.local )
heimer.attributeAffects( heimer.targetPosition, heimer.local )
heimer.attributeAffects( heimer.worldToLocal , heimer.local )
heimer.attributeAffects( heimer.targetMatrix , heimer.local )
heimer.attributeAffects( heimer.worldToLocal , heimer.world )
heimer.attributeAffects( heimer.targetMatrix , heimer.world )
heimer.attributeAffects( heimer.convertWorldToLocal , heimer.world )
return
def initializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject, kPluginNodeAuthor, kPluginNodeVersion, "Any")
try:
mplugin.registerNode( kPluginNodeName, kPluginNodeId, nodeCreator, nodeInitializer, OpenMayaMPx.MPxNode.kDependNode)
except:
sys.stderr.write( "Failed to register node: %s" % kPluginNodeName ); raise
def uninitializePlugin(mobject):
mplugin = OpenMayaMPx.MFnPlugin(mobject)
try:
mplugin.deregisterNode( kPluginNodeId )
except:
sys.stderr.write( "Failed to deregister node: %s" % kPluginNodeName ); raise
| 43.935961 | 124 | 0.679448 |
58922f9eedd9439c635600f11327b3c334723a89 | 6,969 | py | Python | samples/feature-tools/ftools.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | samples/feature-tools/ftools.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | samples/feature-tools/ftools.py | Matheus158257/projects | 26a6148046533476e625a872a2950c383aa975a8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 11 08:05:00 2019
@author: ascalet / lladeira
Feature Tools implementation utilizing feature tools Data Science Machine
"""
import io
import os
import numpy as np
import pandas as pd
import warnings
import featuretools as ft
import featuretools.variable_types as vtypes
class FeatureTools():
def __init__(self,
data: pd.DataFrame,
feature_types: pd.DataFrame,
target_var: str,
group_var=None,
date_var=None,
names='id'):
self.data = data.dropna()
self.feature_types = feature_types
self.group_var = group_var
self.date_var = date_var
self.names = names
if date_var is not None:
self.data[date_var] = pd.to_datetime(self.data[date_var])
warnings.filterwarnings("ignore")
def __name__(self):
return 'FeatureTools'
def parse_data(self):
"""
Parse data from dataframe to EntitySet (FeatureTools formatting)
Parameters
----------
Nothing
Returns
----------
es: EntitySet
The entity set of grouped data.
"""
es = ft.EntitySet(id="Dados")
columns = list(self.data.columns)
variable_types = {}
for indx, ftype in enumerate(self.feature_types[0].values):
if ftype == 'Categorical':
variable_types[columns[indx]] = vtypes.Categorical
# Create EntitySet and load data
es = es.entity_from_dataframe(entity_id="entity", dataframe=self.data,
make_index=True, index=self.names,
time_index=self.date_var, variable_types=variable_types)
# Groups data if required
# Commented because variable grouped doesn't exist on dataset
es.normalize_entity(new_entity_id="normal", base_entity_id="entity", index=self.group_var[0])
return es
def add_feature_types(self, result):
"""
Add new feature types according to created columns
Parameters
----------
result: pd.DataFrame
The resulting data with old and new columns
Returns
----------
Nothing
"""
df_indx = self.feature_types.shape[0]
qtd = len(list(result.columns)) - df_indx
for indx in range(qtd):
new_line = pd.DataFrame({0: 'Numerical'}, index=[df_indx+indx])
self.feature_types = pd.concat([self.feature_types, new_line])
def create_feat_template(self, data, result):
"""
Create feature template so that it may be recreated in the implementation
Parameters
----------
data: pd.DataFrame
Data input
result: pd.DataFrame
The resulting data with old and new columns
Returns
----------
trans_applied: dict
Transformations applied by the feature tools algorithm
"""
trans_applied = {}
gv = self.group_var[0]
for column in list(result.columns):
if column not in list(self.data.columns):
if 'first_entity_time' in column or 'normal' not in column:
trans = column.split('(')[0].lower()
col = column.split('(')[1].replace(')', '')
if '.' in trans:
trans = trans.split('.')[1]
trans_applied['%s---%s' % (trans, col)] = ""
else:
# Remove unsuned names
col_aux = column.split('.')[1:]
trans = col_aux[0].split('(')[0].lower()
col = col_aux[1].replace(')', '')
labels = list(result[gv].unique())
trans_grouped = {}
for x in range(len(labels)):
label_index = result.query("%s == '%s'" % (gv, labels[x])).iloc[0].name
value = result.query("%s == '%s'" % (gv, labels[x])).iloc[0][column]
trans_grouped[str(data.iloc[label_index][gv])] = float(value)
trans_applied['%s---%s---%s' % (trans, col, gv)] = trans_grouped
return trans_applied
def auto_feat(self):
"""
Executes auto featuring.
Parameters
----------
Nothing
Returns
----------
data: pandas.DataFrame
Data result
feature_types: pd.DataFrame
All feature types
trans_applied: dict
Transformations applied in the data
"""
if self.group_var == None or len(self.group_var) == 0:
return self.data, self.feature_types, {}
# Parse data dataframe to EntitySet
es = self.parse_data()
# Get the amplitude of data
def amplitude(values):
amp=values.max()-values.min()
return amp
amplitude = ft.primitives.make_agg_primitive(amplitude, input_types=[ft.variable_types.Numeric],
return_type=ft.variable_types.Numeric, name="amplitude",
description="Calcula a amplitude geral de cada variável numérica",
cls_attributes=None, uses_calc_time=False, commutative=False, number_output_features=1)
# Generate aggregated primitives
ft_matrix1, relationships1 = ft.dfs(entityset=es, target_entity="entity",
agg_primitives=[amplitude, "avg_time_between",
"mean","median", "std", "sum"],
verbose=True)
# Generate transformation primitives
ft_matrix2, relationships2 = ft.dfs(entityset=es, target_entity="entity",
trans_primitives=["second", "minute", "hour",
"day", "month", "year",
"weekday"],
verbose=True, max_depth=1)
# Concatenate the results and remove duplicated columns
features = pd.concat([self.data, ft_matrix1, ft_matrix2], axis=1, copy=False)
result = features.loc[:, ~features.columns.duplicated()].dropna()
# Remove unused columns
cols = [c for c in result.columns if 'first_entity_time' not in c]
result = result[cols]
if 'id' in list(result.columns):
result.drop("id", axis=1, inplace=True)
self.add_feature_types(result)
trans_applied = self.create_feat_template(self.data, result)
return result, self.feature_types, trans_applied
| 31.96789 | 142 | 0.52834 |
2fe5b6bac0bab9fa0485fd2074bd54722cef47b1 | 1,686 | py | Python | monitoring/prober/test_isa_expiry.py | jgrossmac/dss | 48360fd43fe09c2df511ef16a506e22085ac07e1 | [
"Apache-2.0"
] | 1 | 2019-08-21T20:18:09.000Z | 2019-08-21T20:18:09.000Z | monitoring/prober/test_isa_expiry.py | BenjaminPelletier/dss | 042e42c318c9d9ee2a50c7c9952581a185f1e660 | [
"Apache-2.0"
] | null | null | null | monitoring/prober/test_isa_expiry.py | BenjaminPelletier/dss | 042e42c318c9d9ee2a50c7c9952581a185f1e660 | [
"Apache-2.0"
] | 1 | 2019-08-21T20:25:36.000Z | 2019-08-21T20:25:36.000Z | """Test ISAs aren't returned after they expire."""
import datetime
import time
import common
def test_create(session, isa1_uuid):
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(seconds=5)
resp = session.put(
'/identification_service_areas/{}'.format(isa1_uuid),
json={
'extents': {
'spatial_volume': {
'footprint': {
'vertices': common.VERTICES,
},
'altitude_lo': 20,
'altitude_hi': 400,
},
'time_start': time_start.strftime(common.DATE_FORMAT),
'time_end': time_end.strftime(common.DATE_FORMAT),
},
'flights_url': 'https://example.com/dss',
})
assert resp.status_code == 200
def test_valid_immediately(session, isa1_uuid):
# The ISA is still valid immediately after we create it.
resp = session.get('/identification_service_areas/{}'.format(isa1_uuid))
assert resp.status_code == 200
def test_sleep_5_seconds():
# But if we wait 5 seconds it will expire...
time.sleep(5)
def test_not_returned_by_id(session, isa1_uuid):
# And we can't get it by ID...
resp = session.get('/identification_service_areas/{}'.format(isa1_uuid))
assert resp.status_code == 404
assert resp.json()['message'] == 'resource not found: {}'.format(isa1_uuid)
def test_not_returned_by_search(session, isa1_uuid):
# Or by search.
resp = session.get('/identification_service_areas?area={}'.format(
common.GEO_POLYGON_STRING))
assert resp.status_code == 200
assert isa1_uuid not in [x['id'] for x in resp.json()['service_areas']]
| 30.107143 | 77 | 0.643535 |
a24648df6ba7ab9bc4ab1f3a4349beb14fc545b6 | 16,859 | py | Python | joblib/test/test_pool.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 3 | 2019-04-01T11:03:04.000Z | 2019-12-31T02:17:15.000Z | joblib/test/test_pool.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1 | 2021-04-15T18:46:45.000Z | 2021-04-15T18:46:45.000Z | joblib/test/test_pool.py | EnjoyLifeFund/macHighSierra-py36-pkgs | 5668b5785296b314ea1321057420bcd077dba9ea | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | 1 | 2015-11-25T15:38:53.000Z | 2015-11-25T15:38:53.000Z | import os
import mmap
from joblib.test.common import with_numpy, np
from joblib.test.common import setup_autokill
from joblib.test.common import teardown_autokill
from joblib.test.common import with_multiprocessing
from joblib.test.common import with_dev_shm
from joblib.testing import raises
from joblib.backports import make_memmap
from joblib.pool import MemmapingPool
from joblib.pool import has_shareable_memory
from joblib.pool import ArrayMemmapReducer
from joblib.pool import reduce_memmap
from joblib.pool import _strided_from_memmap
from joblib.pool import _get_backing_memmap
def setup_module():
setup_autokill(__name__, timeout=300)
def teardown_module():
teardown_autokill(__name__)
def check_array(args):
"""Dummy helper function to be executed in subprocesses
Check that the provided array has the expected values in the provided
range.
"""
data, position, expected = args
np.testing.assert_array_equal(data[position], expected)
def inplace_double(args):
"""Dummy helper function to be executed in subprocesses
Check that the input array has the right values in the provided range
and perform an inplace modification to double the values in the range by
two.
"""
data, position, expected = args
assert data[position] == expected
data[position] *= 2
np.testing.assert_array_equal(data[position], 2 * expected)
@with_numpy
@with_multiprocessing
def test_memmap_based_array_reducing(tmpdir):
"""Check that it is possible to reduce a memmap backed array"""
assert_array_equal = np.testing.assert_array_equal
filename = tmpdir.join('test.mmap').strpath
# Create a file larger than what will be used by a
buffer = np.memmap(filename, dtype=np.float64, shape=500, mode='w+')
# Fill the original buffer with negative markers to detect over of
# underflow in case of test failures
buffer[:] = - 1.0 * np.arange(buffer.shape[0], dtype=buffer.dtype)
buffer.flush()
# Memmap a 2D fortran array on a offseted subsection of the previous
# buffer
a = np.memmap(filename, dtype=np.float64, shape=(3, 5, 4),
mode='r+', order='F', offset=4)
a[:] = np.arange(60).reshape(a.shape)
# Build various views that share the buffer with the original memmap
# b is an memmap sliced view on an memmap instance
b = a[1:-1, 2:-1, 2:4]
# c and d are array views
c = np.asarray(b)
d = c.T
# Array reducer with auto dumping disabled
reducer = ArrayMemmapReducer(None, tmpdir.strpath, 'c')
def reconstruct_array(x):
cons, args = reducer(x)
return cons(*args)
def reconstruct_memmap(x):
cons, args = reduce_memmap(x)
return cons(*args)
# Reconstruct original memmap
a_reconstructed = reconstruct_memmap(a)
assert has_shareable_memory(a_reconstructed)
assert isinstance(a_reconstructed, np.memmap)
assert_array_equal(a_reconstructed, a)
# Reconstruct strided memmap view
b_reconstructed = reconstruct_memmap(b)
assert has_shareable_memory(b_reconstructed)
assert_array_equal(b_reconstructed, b)
# Reconstruct arrays views on memmap base
c_reconstructed = reconstruct_array(c)
assert not isinstance(c_reconstructed, np.memmap)
assert has_shareable_memory(c_reconstructed)
assert_array_equal(c_reconstructed, c)
d_reconstructed = reconstruct_array(d)
assert not isinstance(d_reconstructed, np.memmap)
assert has_shareable_memory(d_reconstructed)
assert_array_equal(d_reconstructed, d)
# Test graceful degradation on fake memmap instances with in-memory
# buffers
a3 = a * 3
assert not has_shareable_memory(a3)
a3_reconstructed = reconstruct_memmap(a3)
assert not has_shareable_memory(a3_reconstructed)
assert not isinstance(a3_reconstructed, np.memmap)
assert_array_equal(a3_reconstructed, a * 3)
# Test graceful degradation on arrays derived from fake memmap instances
b3 = np.asarray(a3)
assert not has_shareable_memory(b3)
b3_reconstructed = reconstruct_array(b3)
assert isinstance(b3_reconstructed, np.ndarray)
assert not has_shareable_memory(b3_reconstructed)
assert_array_equal(b3_reconstructed, b3)
@with_numpy
@with_multiprocessing
def test_high_dimension_memmap_array_reducing(tmpdir):
assert_array_equal = np.testing.assert_array_equal
filename = tmpdir.join('test.mmap').strpath
# Create a high dimensional memmap
a = np.memmap(filename, dtype=np.float64, shape=(100, 15, 15, 3),
mode='w+')
a[:] = np.arange(100 * 15 * 15 * 3).reshape(a.shape)
# Create some slices/indices at various dimensions
b = a[0:10]
c = a[:, 5:10]
d = a[:, :, :, 0]
e = a[1:3:4]
def reconstruct_memmap(x):
cons, args = reduce_memmap(x)
res = cons(*args)
return res
a_reconstructed = reconstruct_memmap(a)
assert has_shareable_memory(a_reconstructed)
assert isinstance(a_reconstructed, np.memmap)
assert_array_equal(a_reconstructed, a)
b_reconstructed = reconstruct_memmap(b)
assert has_shareable_memory(b_reconstructed)
assert_array_equal(b_reconstructed, b)
c_reconstructed = reconstruct_memmap(c)
assert has_shareable_memory(c_reconstructed)
assert_array_equal(c_reconstructed, c)
d_reconstructed = reconstruct_memmap(d)
assert has_shareable_memory(d_reconstructed)
assert_array_equal(d_reconstructed, d)
e_reconstructed = reconstruct_memmap(e)
assert has_shareable_memory(e_reconstructed)
assert_array_equal(e_reconstructed, e)
@with_numpy
@with_multiprocessing
def test_pool_with_memmap(tmpdir):
"""Check that subprocess can access and update shared memory memmap"""
assert_array_equal = np.testing.assert_array_equal
# Fork the subprocess before allocating the objects to be passed
pool_temp_folder = tmpdir.mkdir('pool').strpath
p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)
try:
filename = tmpdir.join('test.mmap').strpath
a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')
a.fill(1.0)
p.map(inplace_double, [(a, (i, j), 1.0)
for i in range(a.shape[0])
for j in range(a.shape[1])])
assert_array_equal(a, 2 * np.ones(a.shape))
# Open a copy-on-write view on the previous data
b = np.memmap(filename, dtype=np.float32, shape=(5, 3), mode='c')
p.map(inplace_double, [(b, (i, j), 2.0)
for i in range(b.shape[0])
for j in range(b.shape[1])])
# Passing memmap instances to the pool should not trigger the creation
# of new files on the FS
assert os.listdir(pool_temp_folder) == []
# the original data is untouched
assert_array_equal(a, 2 * np.ones(a.shape))
assert_array_equal(b, 2 * np.ones(b.shape))
# readonly maps can be read but not updated
c = np.memmap(filename, dtype=np.float32, shape=(10,), mode='r',
offset=5 * 4)
with raises(AssertionError):
p.map(check_array, [(c, i, 3.0) for i in range(c.shape[0])])
# depending on the version of numpy one can either get a RuntimeError
# or a ValueError
with raises((RuntimeError, ValueError)):
p.map(inplace_double, [(c, i, 2.0) for i in range(c.shape[0])])
finally:
# Clean all filehandlers held by the pool
p.terminate()
del p
@with_numpy
@with_multiprocessing
def test_pool_with_memmap_array_view(tmpdir):
"""Check that subprocess can access and update shared memory array"""
assert_array_equal = np.testing.assert_array_equal
# Fork the subprocess before allocating the objects to be passed
pool_temp_folder = tmpdir.mkdir('pool').strpath
p = MemmapingPool(10, max_nbytes=2, temp_folder=pool_temp_folder)
try:
filename = tmpdir.join('test.mmap').strpath
a = np.memmap(filename, dtype=np.float32, shape=(3, 5), mode='w+')
a.fill(1.0)
# Create an ndarray view on the memmap instance
a_view = np.asarray(a)
assert not isinstance(a_view, np.memmap)
assert has_shareable_memory(a_view)
p.map(inplace_double, [(a_view, (i, j), 1.0)
for i in range(a.shape[0])
for j in range(a.shape[1])])
# Both a and the a_view have been updated
assert_array_equal(a, 2 * np.ones(a.shape))
assert_array_equal(a_view, 2 * np.ones(a.shape))
# Passing memmap array view to the pool should not trigger the
# creation of new files on the FS
assert os.listdir(pool_temp_folder) == []
finally:
p.terminate()
del p
@with_numpy
@with_multiprocessing
def test_memmaping_pool_for_large_arrays(tmpdir):
"""Check that large arrays are not copied in memory"""
# Check that the tempfolder is empty
assert os.listdir(tmpdir.strpath) == []
# Build an array reducers that automaticaly dump large array content
# to filesystem backed memmap instances to avoid memory explosion
p = MemmapingPool(3, max_nbytes=40, temp_folder=tmpdir.strpath)
try:
# The temporary folder for the pool is not provisioned in advance
assert os.listdir(tmpdir.strpath) == []
assert not os.path.exists(p._temp_folder)
small = np.ones(5, dtype=np.float32)
assert small.nbytes == 20
p.map(check_array, [(small, i, 1.0) for i in range(small.shape[0])])
# Memory has been copied, the pool filesystem folder is unused
assert os.listdir(tmpdir.strpath) == []
# Try with a file larger than the memmap threshold of 40 bytes
large = np.ones(100, dtype=np.float64)
assert large.nbytes == 800
p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])])
# The data has been dumped in a temp folder for subprocess to share it
# without per-child memory copies
assert os.path.isdir(p._temp_folder)
dumped_filenames = os.listdir(p._temp_folder)
assert len(dumped_filenames) == 1
# Check that memory mapping is not triggered for arrays with
# dtype='object'
objects = np.array(['abc'] * 100, dtype='object')
results = p.map(has_shareable_memory, [objects])
assert not results[0]
finally:
# check FS garbage upon pool termination
p.terminate()
assert not os.path.exists(p._temp_folder)
del p
@with_numpy
@with_multiprocessing
def test_memmaping_pool_for_large_arrays_disabled(tmpdir):
"""Check that large arrays memmaping can be disabled"""
# Set max_nbytes to None to disable the auto memmaping feature
p = MemmapingPool(3, max_nbytes=None, temp_folder=tmpdir.strpath)
try:
# Check that the tempfolder is empty
assert os.listdir(tmpdir.strpath) == []
# Try with a file largish than the memmap threshold of 40 bytes
large = np.ones(100, dtype=np.float64)
assert large.nbytes == 800
p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])])
# Check that the tempfolder is still empty
assert os.listdir(tmpdir.strpath) == []
finally:
# Cleanup open file descriptors
p.terminate()
del p
@with_numpy
@with_multiprocessing
@with_dev_shm
def test_memmaping_on_dev_shm():
"""Check that MemmapingPool uses /dev/shm when possible"""
p = MemmapingPool(3, max_nbytes=10)
try:
# Check that the pool has correctly detected the presence of the
# shared memory filesystem.
pool_temp_folder = p._temp_folder
folder_prefix = '/dev/shm/joblib_memmaping_pool_'
assert pool_temp_folder.startswith(folder_prefix)
assert os.path.exists(pool_temp_folder)
# Try with a file larger than the memmap threshold of 10 bytes
a = np.ones(100, dtype=np.float64)
assert a.nbytes == 800
p.map(id, [a] * 10)
# a should have been memmaped to the pool temp folder: the joblib
# pickling procedure generate one .pkl file:
assert len(os.listdir(pool_temp_folder)) == 1
# create a new array with content that is different from 'a' so that
# it is mapped to a different file in the temporary folder of the
# pool.
b = np.ones(100, dtype=np.float64) * 2
assert b.nbytes == 800
p.map(id, [b] * 10)
# A copy of both a and b are now stored in the shared memory folder
assert len(os.listdir(pool_temp_folder)) == 2
finally:
# Cleanup open file descriptors
p.terminate()
del p
# The temp folder is cleaned up upon pool termination
assert not os.path.exists(pool_temp_folder)
@with_numpy
@with_multiprocessing
def test_memmaping_pool_for_large_arrays_in_return(tmpdir):
"""Check that large arrays are not copied in memory in return"""
assert_array_equal = np.testing.assert_array_equal
# Build an array reducers that automaticaly dump large array content
# but check that the returned datastructure are regular arrays to avoid
# passing a memmap array pointing to a pool controlled temp folder that
# might be confusing to the user
# The MemmapingPool user can always return numpy.memmap object explicitly
# to avoid memory copy
p = MemmapingPool(3, max_nbytes=10, temp_folder=tmpdir.strpath)
try:
res = p.apply_async(np.ones, args=(1000,))
large = res.get()
assert not has_shareable_memory(large)
assert_array_equal(large, np.ones(1000))
finally:
p.terminate()
del p
def _worker_multiply(a, n_times):
"""Multiplication function to be executed by subprocess"""
assert has_shareable_memory(a)
return a * n_times
@with_numpy
@with_multiprocessing
def test_workaround_against_bad_memmap_with_copied_buffers(tmpdir):
"""Check that memmaps with a bad buffer are returned as regular arrays
Unary operations and ufuncs on memmap instances return a new memmap
instance with an in-memory buffer (probably a numpy bug).
"""
assert_array_equal = np.testing.assert_array_equal
p = MemmapingPool(3, max_nbytes=10, temp_folder=tmpdir.strpath)
try:
# Send a complex, large-ish view on a array that will be converted to
# a memmap in the worker process
a = np.asarray(np.arange(6000).reshape((1000, 2, 3)),
order='F')[:, :1, :]
# Call a non-inplace multiply operation on the worker and memmap and
# send it back to the parent.
b = p.apply_async(_worker_multiply, args=(a, 3)).get()
assert not has_shareable_memory(b)
assert_array_equal(b, 3 * a)
finally:
p.terminate()
del p
@with_numpy
def test__strided_from_memmap(tmpdir):
fname = tmpdir.join('test.mmap').strpath
size = 5 * mmap.ALLOCATIONGRANULARITY
offset = mmap.ALLOCATIONGRANULARITY + 1
# This line creates the mmap file that is reused later
memmap_obj = np.memmap(fname, mode='w+', shape=size + offset)
# filename, dtype, mode, offset, order, shape, strides, total_buffer_len
memmap_obj = _strided_from_memmap(fname, dtype='uint8', mode='r',
offset=offset, order='C', shape=size,
strides=None, total_buffer_len=None)
assert isinstance(memmap_obj, np.memmap)
assert memmap_obj.offset == offset
memmap_backed_obj = _strided_from_memmap(fname, dtype='uint8', mode='r',
offset=offset, order='C',
shape=(size // 2,), strides=(2,),
total_buffer_len=size)
assert _get_backing_memmap(memmap_backed_obj).offset == offset
def identity(arg):
return arg
@with_numpy
@with_multiprocessing
def test_pool_memmap_with_big_offset(tmpdir):
# Test that numpy memmap offset is set correctly if greater than
# mmap.ALLOCATIONGRANULARITY, see
# https://github.com/joblib/joblib/issues/451 and
# https://github.com/numpy/numpy/pull/8443 for more details.
fname = tmpdir.join('test.mmap').strpath
size = 5 * mmap.ALLOCATIONGRANULARITY
offset = mmap.ALLOCATIONGRANULARITY + 1
obj = make_memmap(fname, mode='w+', shape=size, dtype='uint8',
offset=offset)
p = MemmapingPool(2, temp_folder=tmpdir.strpath)
result = p.apply_async(identity, args=(obj,)).get()
assert isinstance(result, np.memmap)
assert result.offset == offset
np.testing.assert_array_equal(obj, result)
| 35.049896 | 78 | 0.67329 |
1b5a788e2ef607aaf7284cc651fe58993a2d60db | 1,305 | py | Python | app/__init__.py | MutuaFranklin/BlogPool | d5781a62a1ad6e5242392dad28e41f536377a4a6 | [
"MIT"
] | null | null | null | app/__init__.py | MutuaFranklin/BlogPool | d5781a62a1ad6e5242392dad28e41f536377a4a6 | [
"MIT"
] | null | null | null | app/__init__.py | MutuaFranklin/BlogPool | d5781a62a1ad6e5242392dad28e41f536377a4a6 | [
"MIT"
] | null | null | null | from flask import Flask
from config import config_options
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_mail import Mail
from flask_uploads import UploadSet,configure_uploads,IMAGES
from flask_simplemde import SimpleMDE
from flask_bootstrap import Bootstrap
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
simple = SimpleMDE()
db = SQLAlchemy()
mail = Mail()
photos = UploadSet('photos',IMAGES)
bootstrap = Bootstrap()
def create_app(config_name):
app = Flask(__name__)
# Creating the app configurations
app.config.from_object(config_options[config_name])
config_options[config_name].init_app(app)
# Initializing flask extensions
bootstrap.init_app(app)
mail.init_app(app)
db.init_app(app)
login_manager.init_app(app)
simple.init_app(app)
# Registering the blueprint
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint,url_prefix = '/auth')
# Setting config
from .requests import configure_request
configure_request(app)
# configure UploadSet
configure_uploads(app,photos)
return app
| 23.303571 | 63 | 0.76705 |
4e6bf252971bf6e34f349e543347de8755d0c98c | 2,303 | py | Python | tests/test_observable/test_startwith.py | Affirm/RxPY | 7c23939ea497761c85b382257f9f0954998ab91e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_observable/test_startwith.py | Affirm/RxPY | 7c23939ea497761c85b382257f9f0954998ab91e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/test_observable/test_startwith.py | Affirm/RxPY | 7c23939ea497761c85b382257f9f0954998ab91e | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import unittest
from rx.core import Scheduler
from rx.testing import TestScheduler, ReactiveTest
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestStartWith(unittest.TestCase):
def test_start_with(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(150, 1), on_next(220, 2), on_completed(250))
def create():
return xs.start_with(1)
results = scheduler.start(create)
results.messages.assert_equal(on_next(200, 1), on_next(220, 2), on_completed(250))
def test_start_with_scheduler(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(150, 1), on_next(220, 2), on_completed(250))
def create():
return xs.start_with(scheduler)
results = scheduler.start(create)
results.messages.assert_equal(on_next(220, 2), on_completed(250))
def test_start_with_scheduler_and_arg(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(150, 1), on_next(220, 2), on_completed(250))
def create():
return xs.start_with(scheduler, 42)
results = scheduler.start(create)
results.messages.assert_equal(on_next(201, 42), on_next(220, 2), on_completed(250))
def test_start_with_immediate_scheduler_and_arg(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(150, 1), on_next(220, 2), on_completed(250))
def create():
return xs.start_with(Scheduler.immediate, 42)
results = scheduler.start(create)
results.messages.assert_equal(on_next(200, 42), on_next(220, 2), on_completed(250))
def test_start_with_scheduler_keyword_and_arg(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(150, 1), on_next(220, 2), on_completed(250))
def create():
return xs.start_with(42, scheduler=scheduler)
results = scheduler.start(create)
results.messages.assert_equal(on_next(201, 42), on_next(220, 2), on_completed(250))
| 38.383333 | 97 | 0.700391 |
d9f086bae6565c46edc4268cef8b9114e809ec70 | 10,899 | py | Python | tests/test_assembler.py | andypalmer9669/74_series_computer | 0b8a4776b49a2380a51863634b48bcc441bf74ec | [
"MIT"
] | null | null | null | tests/test_assembler.py | andypalmer9669/74_series_computer | 0b8a4776b49a2380a51863634b48bcc441bf74ec | [
"MIT"
] | 46 | 2019-02-22T16:46:02.000Z | 2020-03-08T20:26:37.000Z | tests/test_assembler.py | andypalmer9669/74_series_computer | 0b8a4776b49a2380a51863634b48bcc441bf74ec | [
"MIT"
] | null | null | null | import pytest
from copy import deepcopy
from eight_bit_computer import assembler
from eight_bit_computer.data_structures import (
get_assembly_line_template, get_machine_code_byte_template
)
from eight_bit_computer.exceptions import LineProcessingError, AssemblyError
@pytest.mark.parametrize("test_input,variable_start_offset", [
(
[
"fegwkefjghwfjkhgwekjfgh",
],
0,
)
])
def test_process_assembly_lines_raises(test_input, variable_start_offset):
with pytest.raises(AssemblyError):
assembler.process_assembly_lines(
test_input, variable_start_offset=variable_start_offset
)
def get_test_process_line_data():
"""
Test data for the process line test
"""
tests = []
test_input = ""
test_output = get_assembly_line_template()
tests.append((test_input, test_output))
test_input = "// comment"
test_output = get_assembly_line_template()
test_output["raw"] = "// comment"
tests.append((test_input, test_output))
test_input = "@label"
test_output = get_assembly_line_template()
test_output["raw"] = "@label"
test_output["clean"] = "@label"
test_output["defined_label"] = "@label"
test_output["defines_label"] = True
tests.append((test_input, test_output))
test_input = "$variable"
test_output = get_assembly_line_template()
test_output["raw"] = "$variable"
test_output["clean"] = "$variable"
test_output["defined_variable"] = "$variable"
test_output["defines_variable"] = True
tests.append((test_input, test_output))
test_input = " @label // comment"
test_output = get_assembly_line_template()
test_output["raw"] = " @label // comment"
test_output["clean"] = "@label"
test_output["defined_label"] = "@label"
test_output["defines_label"] = True
tests.append((test_input, test_output))
test_input = " $variable // comment"
test_output = get_assembly_line_template()
test_output["raw"] = " $variable // comment"
test_output["clean"] = "$variable"
test_output["defined_variable"] = "$variable"
test_output["defines_variable"] = True
tests.append((test_input, test_output))
return tests
@pytest.mark.parametrize("test_input,expected", get_test_process_line_data())
def test_process_line(test_input, expected):
assert assembler.process_line(test_input) == expected
@pytest.mark.parametrize("test_input", [
"fwgfkwghfkjhwgekjhgwkejg",
])
def test_process_line_raises(test_input):
with pytest.raises(LineProcessingError):
assembler.process_line(test_input)
@pytest.mark.parametrize("test_input,expected", [
(
"",
"",
),
(
"//",
"",
),
(
"/hello /world!",
"/hello /world!",
),
(
"blah blah//",
"blah blah",
),
(
"before//after",
"before",
),
(
" before //after ",
" before ",
),
(
"LOAD [A] B",
"LOAD [A] B",
),
])
def test_remove_comments(test_input, expected):
assert assembler.remove_comments(test_input) == expected
@pytest.mark.parametrize("test_input,expected", [
(
"",
"",
),
(
" ",
"",
),
(
"LOAD [$foo] A",
"LOAD [$foo] A",
),
(
" LOAD [$foo] A ",
"LOAD [$foo] A",
),
(
"\tLOAD\t\t[$foo] A \t ",
"LOAD [$foo] A",
),
(
"LOAD [ $foo ] A",
"LOAD [ $foo ] A",
),
(
" LOAD [$foo] A",
"LOAD [$foo] A",
),
(
"LOAD [$foo] A ",
"LOAD [$foo] A",
),
(
"SET A #14 ",
"SET A #14",
),
])
def test_remove_excess_whitespace(test_input, expected):
assert assembler.remove_excess_whitespace(test_input) == expected
@pytest.mark.parametrize("test_input", [
"fwgfkwghfkjhwgekjhgwkejg",
])
def test_machine_code_templates_from_line_raises(test_input):
with pytest.raises(LineProcessingError):
assembler.machine_code_bytes_from_line(test_input)
def gen_test_validate_and_identify_constants_data():
ret = []
test_input = get_machine_code_byte_template()
test_input["bitstring"] = "01101100"
test_input["byte_type"] = "instruction"
expected_output = deepcopy(test_input)
ret.append(([test_input], [expected_output]))
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "@label"
expected_output = deepcopy(test_input)
expected_output["constant_type"] = "label"
ret.append(([test_input], [expected_output]))
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "$variable"
expected_output = deepcopy(test_input)
expected_output["constant_type"] = "variable"
ret.append(([test_input], [expected_output]))
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "#123"
expected_output = deepcopy(test_input)
expected_output["constant_type"] = "number"
expected_output["number_value"] = 123
ret.append(([test_input], [expected_output]))
test_input_0 = get_machine_code_byte_template()
test_input_0["bitstring"] = "01101100"
test_input_0["byte_type"] = "instruction"
expected_output_0 = deepcopy(test_input_0)
test_input_1 = get_machine_code_byte_template()
test_input_1["byte_type"] = "constant"
test_input_1["constant"] = "@label"
expected_output_1 = deepcopy(test_input_1)
expected_output_1["constant_type"] = "label"
ret.append(
(
[test_input_0, test_input_1],
[expected_output_0, expected_output_1]
)
)
return ret
@pytest.mark.parametrize(
"test_input,expected", gen_test_validate_and_identify_constants_data()
)
def test_validate_and_identify_constants(test_input, expected):
assembler.validate_and_identify_constants(test_input)
assert test_input == expected
def gen_validate_and_identify_constants_raises_data():
ret = []
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "fwgjfgwjfgkjh"
ret.append([test_input])
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "@number$variable#123"
ret.append([test_input])
test_input = get_machine_code_byte_template()
test_input["byte_type"] = "constant"
test_input["constant"] = "#9999"
ret.append([test_input])
return ret
@pytest.mark.parametrize(
"test_input", gen_validate_and_identify_constants_raises_data()
)
def test_validate_and_identify_constants_raises(test_input):
with pytest.raises(LineProcessingError):
assembler.validate_and_identify_constants(test_input)
def test_assign_labels(processed_assembly_lines):
expected_lines = deepcopy(processed_assembly_lines)
expected_lines[2]["assigned_label"] = "@label1"
expected_lines[2]["has_label_assigned"] = True
expected_lines[5]["assigned_label"] = "@label2"
expected_lines[5]["has_label_assigned"] = True
expected_lines[10]["assigned_label"] = "@label3"
expected_lines[10]["has_label_assigned"] = True
assembler.assign_labels(processed_assembly_lines)
assert processed_assembly_lines == expected_lines
def test_resolve_labels(processed_assembly_lines):
processed_assembly_lines[2]["assigned_label"] = "@label1"
processed_assembly_lines[2]["has_label_assigned"] = True
processed_assembly_lines[5]["assigned_label"] = "@label2"
processed_assembly_lines[5]["has_label_assigned"] = True
processed_assembly_lines[10]["assigned_label"] = "@label3"
processed_assembly_lines[10]["has_label_assigned"] = True
expected_lines = deepcopy(processed_assembly_lines)
expected_lines[6]["mc_bytes"][1]["bitstring"] = "00000000"
assembler.resolve_labels(processed_assembly_lines)
assert processed_assembly_lines == expected_lines
def test_label_map(processed_assembly_lines):
processed_assembly_lines[2]["assigned_label"] = "@label1"
processed_assembly_lines[2]["has_label_assigned"] = True
processed_assembly_lines[5]["assigned_label"] = "@label2"
processed_assembly_lines[5]["has_label_assigned"] = True
processed_assembly_lines[10]["assigned_label"] = "@label3"
processed_assembly_lines[10]["has_label_assigned"] = True
expected_label_map = {
"@label1": "00000000",
"@label2": "00000010",
"@label3": "00001000",
}
label_map = assembler.create_label_map(processed_assembly_lines)
assert label_map == expected_label_map
def test_resolve_numbers(processed_assembly_lines):
expected_lines = deepcopy(processed_assembly_lines)
expected_lines[8]["mc_bytes"][1]["bitstring"] = "01111011"
assembler.resolve_numbers(processed_assembly_lines)
assert processed_assembly_lines == expected_lines
def test_resolve_variables_no_offset(processed_assembly_lines):
expected_lines = deepcopy(processed_assembly_lines)
expected_lines[2]["mc_bytes"][1]["bitstring"] = "00000001"
expected_lines[5]["mc_bytes"][1]["bitstring"] = "00000010"
expected_lines[10]["mc_bytes"][1]["bitstring"] = "00000011"
expected_lines[11]["mc_bytes"][1]["bitstring"] = "00000000"
assembler.resolve_variables(processed_assembly_lines, 0)
assert processed_assembly_lines == expected_lines
def test_resolve_variables_with_offset(processed_assembly_lines):
expected_lines = deepcopy(processed_assembly_lines)
expected_lines[2]["mc_bytes"][1]["bitstring"] = "00001001"
expected_lines[5]["mc_bytes"][1]["bitstring"] = "00001010"
expected_lines[10]["mc_bytes"][1]["bitstring"] = "00001011"
expected_lines[11]["mc_bytes"][1]["bitstring"] = "00001000"
assembler.resolve_variables(processed_assembly_lines, 8)
assert processed_assembly_lines == expected_lines
def test_create_variable_map_no_offset(processed_assembly_lines):
exected_variable_map = {
"$variable0": "00000000",
"$variable1": "00000001",
"$variable2": "00000010",
"$variable3": "00000011",
"$variable4": "00000100",
}
variable_map = assembler.create_variable_map(processed_assembly_lines, 0)
assert variable_map == exected_variable_map
def test_create_variable_map_with_offset(processed_assembly_lines):
exected_variable_map = {
"$variable0": "00001000",
"$variable1": "00001001",
"$variable2": "00001010",
"$variable3": "00001011",
"$variable4": "00001100",
}
variable_map = assembler.create_variable_map(processed_assembly_lines, 8)
assert variable_map == exected_variable_map
| 31.229226 | 77 | 0.681622 |
e3834bff75166bfbfdcb80ce2ff1da812d2be01f | 814 | py | Python | examples/vhdl/vivado/run.py | olafvandenberg/vunit | 4f78fc4fa0f10b4cf50a6d377793d374ae19df64 | [
"Artistic-2.0",
"Apache-2.0"
] | null | null | null | examples/vhdl/vivado/run.py | olafvandenberg/vunit | 4f78fc4fa0f10b4cf50a6d377793d374ae19df64 | [
"Artistic-2.0",
"Apache-2.0"
] | 1 | 2021-09-05T19:34:01.000Z | 2021-09-05T19:34:01.000Z | examples/vhdl/vivado/run.py | olafvandenberg/vunit | 4f78fc4fa0f10b4cf50a6d377793d374ae19df64 | [
"Artistic-2.0",
"Apache-2.0"
] | null | null | null | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2014-2022, Lars Asplund lars.anders.asplund@gmail.com
"""
Vivado IP
---------
Demonstrates compiling and performing behavioral simulation of
Vivado IPs with VUnit.
"""
from pathlib import Path
from vunit import VUnit
from vivado_util import add_vivado_ip
ROOT = Path(__file__).parent
SRC_PATH = ROOT / "src"
VU = VUnit.from_argv()
VU.add_library("lib").add_source_files(SRC_PATH / "*.vhd")
VU.add_library("tb_lib").add_source_files(SRC_PATH / "test" / "*.vhd")
add_vivado_ip(
VU,
output_path=ROOT / "vivado_libs",
project_file=ROOT / "myproject" / "myproject.xpr",
)
VU.main()
| 23.941176 | 75 | 0.719902 |
8441276cf28ea59ef762dc64c81fbe452672041b | 6,263 | py | Python | adafruit_sgp30.py | kattni/Adafruit_CircuitPython_SGP30 | c204720d889b5c0cb9d034d5022d751b72dac134 | [
"MIT"
] | null | null | null | adafruit_sgp30.py | kattni/Adafruit_CircuitPython_SGP30 | c204720d889b5c0cb9d034d5022d751b72dac134 | [
"MIT"
] | null | null | null | adafruit_sgp30.py | kattni/Adafruit_CircuitPython_SGP30 | c204720d889b5c0cb9d034d5022d751b72dac134 | [
"MIT"
] | null | null | null | # The MIT License (MIT)
#
# Copyright (c) 2017 ladyada for Adafruit Industries
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
`adafruit_sgp30`
====================================================
I2C driver for SGP30 Sensirion VoC sensor
* Author(s): ladyada
Implementation Notes
--------------------
**Hardware:**
* Adafruit `SGP30 Air Quality Sensor Breakout - VOC and eCO2
<https://www.adafruit.com/product/3709>`_ (Product ID: 3709)
**Software and Dependencies:**
* Adafruit CircuitPython firmware for the ESP8622 and M0-based boards:
https://github.com/adafruit/circuitpython/releases
* Adafruit's Bus Device library: https://github.com/adafruit/Adafruit_CircuitPython_BusDevice
"""
import time
from adafruit_bus_device.i2c_device import I2CDevice
from micropython import const
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/adafruit/Adafruit_CircuitPython_SGP30.git"
# pylint: disable=bad-whitespace
_SGP30_DEFAULT_I2C_ADDR = const(0x58)
_SGP30_FEATURESET = const(0x0020)
_SGP30_CRC8_POLYNOMIAL = const(0x31)
_SGP30_CRC8_INIT = const(0xFF)
_SGP30_WORD_LEN = const(2)
# pylint: enable=bad-whitespace
class Adafruit_SGP30:
"""
A driver for the SGP30 gas sensor.
"""
def __init__(self, i2c, address=_SGP30_DEFAULT_I2C_ADDR):
"""Initialize the sensor, get the serial # and verify that we found a proper SGP30"""
self._device = I2CDevice(i2c, address)
# get unique serial, its 48 bits so we store in an array
self.serial = self._i2c_read_words_from_cmd([0x36, 0x82], 0.01, 3)
# get featuerset
featureset = self._i2c_read_words_from_cmd([0x20, 0x2f], 0.01, 1)
if featureset[0] != _SGP30_FEATURESET:
raise RuntimeError('SGP30 Not detected')
self.iaq_init()
@property
def tvoc(self):
"""Total Volatile Organic Compound in parts per billion."""
return self.iaq_measure()[1]
@property
def baseline_tvoc(self):
"""Total Volatile Organic Compound baseline value"""
return self.get_iaq_baseline()[1]
@property
def co2eq(self):
"""Carbon Dioxide Equivalent in parts per million"""
return self.iaq_measure()[0]
@property
def baseline_co2eq(self):
"""Carbon Dioxide Equivalent baseline value"""
return self.get_iaq_baseline()[0]
def iaq_init(self):
"""Initialize the IAQ algorithm"""
# name, command, signals, delay
self._run_profile(["iaq_init", [0x20, 0x03], 0, 0.01])
def iaq_measure(self):
"""Measure the CO2eq and TVOC"""
# name, command, signals, delay
return self._run_profile(["iaq_measure", [0x20, 0x08], 2, 0.05])
def get_iaq_baseline(self):
"""Retreive the IAQ algorithm baseline for CO2eq and TVOC"""
# name, command, signals, delay
return self._run_profile(["iaq_get_baseline", [0x20, 0x15], 2, 0.01])
def set_iaq_baseline(self, co2eq, tvoc):
"""Set the previously recorded IAQ algorithm baseline for CO2eq and TVOC"""
if co2eq == 0 and tvoc == 0:
raise RuntimeError('Invalid baseline')
buffer = []
for value in [tvoc, co2eq]:
arr = [value >> 8, value & 0xFF]
arr.append(self._generate_crc(arr))
buffer += arr
self._run_profile(["iaq_set_baseline", [0x20, 0x1e] + buffer, 0, 0.01])
# Low level command functions
def _run_profile(self, profile):
"""Run an SGP 'profile' which is a named command set"""
# pylint: disable=unused-variable
name, command, signals, delay = profile
# pylint: enable=unused-variable
#print("\trunning profile: %s, command %s, %d, delay %0.02f" %
# (name, ["0x%02x" % i for i in command], signals, delay))
return self._i2c_read_words_from_cmd(command, delay, signals)
def _i2c_read_words_from_cmd(self, command, delay, reply_size):
"""Run an SGP command query, get a reply and CRC results if necessary"""
with self._device:
self._device.write(bytes(command))
time.sleep(delay)
if not reply_size:
return None
crc_result = bytearray(reply_size * (_SGP30_WORD_LEN +1))
self._device.readinto(crc_result)
#print("\tRaw Read: ", crc_result)
result = []
for i in range(reply_size):
word = [crc_result[3*i], crc_result[3*i+1]]
crc = crc_result[3*i+2]
if self._generate_crc(word) != crc:
raise RuntimeError('CRC Error')
result.append(word[0] << 8 | word[1])
#print("\tOK Data: ", [hex(i) for i in result])
return result
# pylint: disable=no-self-use
def _generate_crc(self, data):
"""8-bit CRC algorithm for checking data"""
crc = _SGP30_CRC8_INIT
# calculates 8-Bit checksum with given polynomial
for byte in data:
crc ^= byte
for _ in range(8):
if crc & 0x80:
crc = (crc << 1) ^ _SGP30_CRC8_POLYNOMIAL
else:
crc <<= 1
return crc & 0xFF
| 35.384181 | 93 | 0.640109 |
48ad652ea0741c283d5c36bfb55544f66b509641 | 78,186 | py | Python | mesonbuild/mesonlib/universal.py | mscofield0/meson | 007c4659c2154755fc1f57d415afc8a736f81af2 | [
"Apache-2.0"
] | null | null | null | mesonbuild/mesonlib/universal.py | mscofield0/meson | 007c4659c2154755fc1f57d415afc8a736f81af2 | [
"Apache-2.0"
] | null | null | null | mesonbuild/mesonlib/universal.py | mscofield0/meson | 007c4659c2154755fc1f57d415afc8a736f81af2 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012-2020 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A library of random helper functionality."""
from pathlib import Path
import argparse
import enum
import sys
import stat
import time
import abc
import platform, subprocess, operator, os, shlex, shutil, re
import collections
from functools import lru_cache, wraps, total_ordering
from itertools import tee, filterfalse
from tempfile import TemporaryDirectory
import typing as T
import uuid
import textwrap
import copy
from mesonbuild import mlog
if T.TYPE_CHECKING:
from .._typing import ImmutableListProtocol
from ..build import ConfigurationData
from ..coredata import KeyedOptionDictType, UserOption
from ..compilers.compilers import Compiler
FileOrString = T.Union['File', str]
_T = T.TypeVar('_T')
_U = T.TypeVar('_U')
__all__ = [
'GIT',
'python_command',
'project_meson_versions',
'HoldableObject',
'SecondLevelHolder',
'File',
'FileMode',
'GitException',
'LibType',
'MachineChoice',
'MesonException',
'MesonBugException',
'EnvironmentException',
'FileOrString',
'GitException',
'OptionKey',
'dump_conf_header',
'OptionOverrideProxy',
'OptionType',
'OrderedSet',
'PerMachine',
'PerMachineDefaultable',
'PerThreeMachine',
'PerThreeMachineDefaultable',
'ProgressBar',
'RealPathAction',
'TemporaryDirectoryWinProof',
'Version',
'check_direntry_issues',
'classify_unity_sources',
'current_vs_supports_modules',
'darwin_get_object_archs',
'default_libdir',
'default_libexecdir',
'default_prefix',
'detect_subprojects',
'detect_vcs',
'do_conf_file',
'do_conf_str',
'do_replacement',
'exe_exists',
'expand_arguments',
'extract_as_list',
'generate_list',
'get_compiler_for_source',
'get_filenames_templates_dict',
'get_library_dirs',
'get_variable_regex',
'get_wine_shortpath',
'git',
'has_path_sep',
'is_aix',
'is_android',
'is_ascii_string',
'is_cygwin',
'is_debianlike',
'is_dragonflybsd',
'is_freebsd',
'is_haiku',
'is_hurd',
'is_irix',
'is_linux',
'is_netbsd',
'is_openbsd',
'is_osx',
'is_qnx',
'is_sunos',
'is_windows',
'is_wsl',
'iter_regexin_iter',
'join_args',
'listify',
'partition',
'path_is_in_root',
'Popen_safe',
'quiet_git',
'quote_arg',
'relative_to_if_possible',
'relpath',
'replace_if_different',
'run_once',
'get_meson_command',
'set_meson_command',
'split_args',
'stringlistify',
'substitute_values',
'substring_is_in_list',
'typeslistify',
'verbose_git',
'version_compare',
'version_compare_condition_with_min',
'version_compare_many',
'search_version',
'windows_proof_rm',
'windows_proof_rmtree',
]
# TODO: this is such a hack, this really should be either in coredata or in the
# interpreter
# {subproject: project_meson_version}
project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str]
from glob import glob
if os.path.basename(sys.executable) == 'meson.exe':
# In Windows and using the MSI installed executable.
python_command = [sys.executable, 'runpython']
else:
python_command = [sys.executable]
_meson_command = None
class MesonException(Exception):
'''Exceptions thrown by Meson'''
def __init__(self, *args: object, file: T.Optional[str] = None,
lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
super().__init__(*args)
self.file = file
self.lineno = lineno
self.colno = colno
class MesonBugException(MesonException):
'''Exceptions thrown when there is a clear Meson bug that should be reported'''
def __init__(self, msg: str, file: T.Optional[str] = None,
lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
super().__init__(msg + '\n\n This is a Meson bug and should be reported!',
file=file, lineno=lineno, colno=colno)
class EnvironmentException(MesonException):
'''Exceptions thrown while processing and creating the build environment'''
class GitException(MesonException):
def __init__(self, msg: str, output: T.Optional[str] = None):
super().__init__(msg)
self.output = output.strip() if output else ''
GIT = shutil.which('git')
def git(cmd: T.List[str], workingdir: str, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen, str, str]:
cmd = [GIT] + cmd
p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
if check and p.returncode != 0:
raise GitException('Git command failed: ' + str(cmd), e)
return p, o, e
def quiet_git(cmd: T.List[str], workingdir: str, check: bool = False) -> T.Tuple[bool, str]:
if not GIT:
m = 'Git program not found.'
if check:
raise GitException(m)
return False, m
p, o, e = git(cmd, workingdir, check)
if p.returncode != 0:
return False, e
return True, o
def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
if not GIT:
m = 'Git program not found.'
if check:
raise GitException(m)
return False
p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
return p.returncode == 0
def set_meson_command(mainfile: str) -> None:
global python_command
global _meson_command
# On UNIX-like systems `meson` is a Python script
# On Windows `meson` and `meson.exe` are wrapper exes
if not mainfile.endswith('.py'):
_meson_command = [mainfile]
elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
# Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
_meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
else:
# Either run uninstalled, or full path to meson-script.py
_meson_command = python_command + [mainfile]
# We print this value for unit tests.
if 'MESON_COMMAND_TESTS' in os.environ:
mlog.log(f'meson_command is {_meson_command!r}')
def get_meson_command() -> T.Optional[T.List[str]]:
return _meson_command
def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
try:
if isinstance(astring, str):
astring.encode('ascii')
elif isinstance(astring, bytes):
astring.decode('ascii')
except UnicodeDecodeError:
return False
return True
def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
import locale
# Warn if the locale is not UTF-8. This can cause various unfixable issues
# such as os.stat not being able to decode filenames with unicode in them.
# There is no way to reset both the preferred encoding and the filesystem
# encoding, so we can just warn about it.
e = locale.getpreferredencoding()
if e.upper() != 'UTF-8' and not is_windows():
if not isinstance(direntry_array, list):
direntry_array = [direntry_array]
for de in direntry_array:
if is_ascii_string(de):
continue
mlog.warning(textwrap.dedent(f'''
You are using {e!r} which is not a Unicode-compatible
locale but you are trying to access a file system entry called {de!r} which is
not pure ASCII. This may cause problems.
'''), file=sys.stderr)
class HoldableObject(metaclass=abc.ABCMeta):
''' Dummy base class for all objects that can be
held by an interpreter.baseobjects.ObjectHolder '''
class SecondLevelHolder(HoldableObject, metaclass=abc.ABCMeta):
''' A second level object holder. The primary purpose
of such objects is to hold multiple objects with one
default option. '''
@abc.abstractmethod
def get_default_object(self) -> HoldableObject: ...
class FileMode:
# The first triad is for owner permissions, the second for group permissions,
# and the third for others (everyone else).
# For the 1st character:
# 'r' means can read
# '-' means not allowed
# For the 2nd character:
# 'w' means can write
# '-' means not allowed
# For the 3rd character:
# 'x' means can execute
# 's' means can execute and setuid/setgid is set (owner/group triads only)
# 'S' means cannot execute and setuid/setgid is set (owner/group triads only)
# 't' means can execute and sticky bit is set ("others" triads only)
# 'T' means cannot execute and sticky bit is set ("others" triads only)
# '-' means none of these are allowed
#
# The meanings of 'rwx' perms is not obvious for directories; see:
# https://www.hackinglinuxexposed.com/articles/20030424.html
#
# For information on this notation such as setuid/setgid/sticky bits, see:
# https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
'[r-][w-][xsS-]' # Group perms
'[r-][w-][xtT-]') # Others perms
def __init__(self, perms: T.Optional[str] = None, owner: T.Union[str, int, None] = None,
group: T.Union[str, int, None] = None):
self.perms_s = perms
self.perms = self.perms_s_to_bits(perms)
self.owner = owner
self.group = group
def __repr__(self) -> str:
ret = '<FileMode: {!r} owner={} group={}'
return ret.format(self.perms_s, self.owner, self.group)
@classmethod
def perms_s_to_bits(cls, perms_s: T.Optional[str]) -> int:
'''
Does the opposite of stat.filemode(), converts strings of the form
'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
'''
if perms_s is None:
# No perms specified, we will not touch the permissions
return -1
eg = 'rwxr-xr-x'
if not isinstance(perms_s, str):
raise MesonException(f'Install perms must be a string. For example, {eg!r}')
if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
raise MesonException(f'File perms {perms_s!r} must be exactly 9 chars. For example, {eg!r}')
perms = 0
# Owner perms
if perms_s[0] == 'r':
perms |= stat.S_IRUSR
if perms_s[1] == 'w':
perms |= stat.S_IWUSR
if perms_s[2] == 'x':
perms |= stat.S_IXUSR
elif perms_s[2] == 'S':
perms |= stat.S_ISUID
elif perms_s[2] == 's':
perms |= stat.S_IXUSR
perms |= stat.S_ISUID
# Group perms
if perms_s[3] == 'r':
perms |= stat.S_IRGRP
if perms_s[4] == 'w':
perms |= stat.S_IWGRP
if perms_s[5] == 'x':
perms |= stat.S_IXGRP
elif perms_s[5] == 'S':
perms |= stat.S_ISGID
elif perms_s[5] == 's':
perms |= stat.S_IXGRP
perms |= stat.S_ISGID
# Others perms
if perms_s[6] == 'r':
perms |= stat.S_IROTH
if perms_s[7] == 'w':
perms |= stat.S_IWOTH
if perms_s[8] == 'x':
perms |= stat.S_IXOTH
elif perms_s[8] == 'T':
perms |= stat.S_ISVTX
elif perms_s[8] == 't':
perms |= stat.S_IXOTH
perms |= stat.S_ISVTX
return perms
dot_C_dot_H_warning = """You are using .C or .H files in your project. This is deprecated.
Currently, Meson treats this as C++ code, but they
used to be treated as C code.
Note that the situation is a bit more complex if you are using the
Visual Studio compiler, as it treats .C files as C code, unless you add
the /TP compiler flag, but this is unreliable.
See https://github.com/mesonbuild/meson/pull/8747 for the discussions."""
class File(HoldableObject):
def __init__(self, is_built: bool, subdir: str, fname: str):
if fname.endswith(".C") or fname.endswith(".H"):
mlog.warning(dot_C_dot_H_warning, once=True)
self.is_built = is_built
self.subdir = subdir
self.fname = fname
self.hash = hash((is_built, subdir, fname))
def __str__(self) -> str:
return self.relative_name()
def __repr__(self) -> str:
ret = '<File: {0}'
if not self.is_built:
ret += ' (not built)'
ret += '>'
return ret.format(self.relative_name())
@staticmethod
@lru_cache(maxsize=None)
def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException(f'File {fname} does not exist.')
return File(False, subdir, fname)
@staticmethod
def from_built_file(subdir: str, fname: str) -> 'File':
return File(True, subdir, fname)
@staticmethod
def from_absolute_file(fname: str) -> 'File':
return File(False, '', fname)
@lru_cache(maxsize=None)
def rel_to_builddir(self, build_to_src: str) -> str:
if self.is_built:
return self.relative_name()
else:
return os.path.join(build_to_src, self.subdir, self.fname)
@lru_cache(maxsize=None)
def absolute_path(self, srcdir: str, builddir: str) -> str:
absdir = srcdir
if self.is_built:
absdir = builddir
return os.path.join(absdir, self.relative_name())
@property
def suffix(self) -> str:
return os.path.splitext(self.fname)[1][1:].lower()
def endswith(self, ending: str) -> bool:
return self.fname.endswith(ending)
def split(self, s: str, maxsplit: int = -1) -> T.List[str]:
return self.fname.split(s, maxsplit=maxsplit)
def rsplit(self, s: str, maxsplit: int = -1) -> T.List[str]:
return self.fname.rsplit(s, maxsplit=maxsplit)
def __eq__(self, other: object) -> bool:
if not isinstance(other, File):
return NotImplemented
if self.hash != other.hash:
return False
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
def __hash__(self) -> int:
return self.hash
@lru_cache(maxsize=None)
def relative_name(self) -> str:
return os.path.join(self.subdir, self.fname)
def get_compiler_for_source(compilers: T.Iterable['Compiler'], src: 'FileOrString') -> 'Compiler':
"""Given a set of compilers and a source, find the compiler for that source type."""
for comp in compilers:
if comp.can_compile(src):
return comp
raise MesonException(f'No specified compiler can handle file {src!s}')
def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequence['FileOrString']) -> T.Dict['Compiler', T.List['FileOrString']]:
compsrclist: T.Dict['Compiler', T.List['FileOrString']] = {}
for src in sources:
comp = get_compiler_for_source(compilers, src)
if comp not in compsrclist:
compsrclist[comp] = [src]
else:
compsrclist[comp].append(src)
return compsrclist
class MachineChoice(enum.IntEnum):
"""Enum class representing one of the two abstract machine names used in
most places: the build, and host, machines.
"""
BUILD = 0
HOST = 1
def get_lower_case_name(self) -> str:
return PerMachine('build', 'host')[self]
def get_prefix(self) -> str:
return PerMachine('build.', '')[self]
class PerMachine(T.Generic[_T]):
def __init__(self, build: _T, host: _T) -> None:
self.build = build
self.host = host
def __getitem__(self, machine: MachineChoice) -> _T:
return {
MachineChoice.BUILD: self.build,
MachineChoice.HOST: self.host,
}[machine]
def __setitem__(self, machine: MachineChoice, val: _T) -> None:
setattr(self, machine.get_lower_case_name(), val)
def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
"""Unset definition duplicated from their previous to None
This is the inverse of ''default_missing''. By removing defaulted
machines, we can elaborate the original and then redefault them and thus
avoid repeating the elaboration explicitly.
"""
unfreeze = PerMachineDefaultable() # type: PerMachineDefaultable[T.Optional[_T]]
unfreeze.build = self.build
unfreeze.host = self.host
if unfreeze.host == unfreeze.build:
unfreeze.host = None
return unfreeze
def __repr__(self) -> str:
return f'PerMachine({self.build!r}, {self.host!r})'
class PerThreeMachine(PerMachine[_T]):
"""Like `PerMachine` but includes `target` too.
It turns out just one thing do we need track the target machine. There's no
need to computer the `target` field so we don't bother overriding the
`__getitem__`/`__setitem__` methods.
"""
def __init__(self, build: _T, host: _T, target: _T) -> None:
super().__init__(build, host)
self.target = target
def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
"""Unset definition duplicated from their previous to None
This is the inverse of ''default_missing''. By removing defaulted
machines, we can elaborate the original and then redefault them and thus
avoid repeating the elaboration explicitly.
"""
unfreeze = PerThreeMachineDefaultable() # type: PerThreeMachineDefaultable[T.Optional[_T]]
unfreeze.build = self.build
unfreeze.host = self.host
unfreeze.target = self.target
if unfreeze.target == unfreeze.host:
unfreeze.target = None
if unfreeze.host == unfreeze.build:
unfreeze.host = None
return unfreeze
def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
def __repr__(self) -> str:
return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
"""
def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
super().__init__(build, host)
def default_missing(self) -> "PerMachine[_T]":
"""Default host to build
This allows just specifying nothing in the native case, and just host in the
cross non-compiler case.
"""
freeze = PerMachine(self.build, self.host)
if freeze.host is None:
freeze.host = freeze.build
return freeze
def __repr__(self) -> str:
return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
@classmethod
def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
"""Easy way to get a defaulted value
This allows simplifying the case where you can control whether host and
build are separate or not with a boolean. If the is_cross value is set
to true then the optional host value will be used, otherwise the host
will be set to the build value.
"""
m = cls(build)
if is_cross:
m.host = host
return m.default_missing()
class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
"""Extends `PerThreeMachine` with the ability to default from `None`s.
"""
def __init__(self) -> None:
PerThreeMachine.__init__(self, None, None, None)
def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
"""Default host to build and target to host.
This allows just specifying nothing in the native case, just host in the
cross non-compiler case, and just target in the native-built
cross-compiler case.
"""
freeze = PerThreeMachine(self.build, self.host, self.target)
if freeze.host is None:
freeze.host = freeze.build
if freeze.target is None:
freeze.target = freeze.host
return freeze
def __repr__(self) -> str:
return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
def is_sunos() -> bool:
return platform.system().lower() == 'sunos'
def is_osx() -> bool:
return platform.system().lower() == 'darwin'
def is_linux() -> bool:
return platform.system().lower() == 'linux'
def is_android() -> bool:
return platform.system().lower() == 'android'
def is_haiku() -> bool:
return platform.system().lower() == 'haiku'
def is_openbsd() -> bool:
return platform.system().lower() == 'openbsd'
def is_windows() -> bool:
platname = platform.system().lower()
return platname == 'windows'
def is_wsl() -> bool:
return is_linux() and 'microsoft' in platform.release().lower()
def is_cygwin() -> bool:
return sys.platform == 'cygwin'
def is_debianlike() -> bool:
return os.path.isfile('/etc/debian_version')
def is_dragonflybsd() -> bool:
return platform.system().lower() == 'dragonfly'
def is_netbsd() -> bool:
return platform.system().lower() == 'netbsd'
def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
def is_irix() -> bool:
return platform.system().startswith('irix')
def is_hurd() -> bool:
return platform.system().lower() == 'gnu'
def is_qnx() -> bool:
return platform.system().lower() == 'qnx'
def is_aix() -> bool:
return platform.system().lower() == 'aix'
def exe_exists(arglist: T.List[str]) -> bool:
try:
if subprocess.run(arglist, timeout=10).returncode == 0:
return True
except (FileNotFoundError, subprocess.TimeoutExpired):
pass
return False
@lru_cache(maxsize=None)
def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
'''
For a specific object (executable, static library, dylib, etc), run `lipo`
to fetch the list of archs supported by it. Supports both thin objects and
'fat' objects.
'''
_, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
if not stdo:
mlog.debug(f'lipo {objpath}: {stderr}')
return None
stdo = stdo.rsplit(': ', 1)[1]
# Convert from lipo-style archs to meson-style CPUs
stdo = stdo.replace('i386', 'x86')
stdo = stdo.replace('arm64', 'aarch64')
# Add generic name for armv7 and armv7s
if 'armv7' in stdo:
stdo += ' arm'
return stdo.split()
def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
]
if isinstance(source_dir, str):
source_dir = Path(source_dir)
parent_paths_and_self = collections.deque(source_dir.parents)
# Prepend the source directory to the front so we can check it;
# source_dir.parents doesn't include source_dir
parent_paths_and_self.appendleft(source_dir)
for curdir in parent_paths_and_self:
for vcs in vcs_systems:
if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
vcs['wc_dir'] = str(curdir)
return vcs
return None
def current_vs_supports_modules() -> bool:
vsver = os.environ.get('VSCMD_VER', '')
nums = vsver.split('.', 2)
major = int(nums[0])
if major >= 17:
return True
if major == 16 and int(nums[1]) >= 10:
return True
return vsver.startswith('16.9.0') and '-pre.' in vsver
# a helper class which implements the same version ordering as RPM
class Version:
def __init__(self, s: str) -> None:
self._s = s
# split into numeric, alphabetic and non-alphanumeric sequences
sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
# non-alphanumeric separators are discarded
sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
# numeric sequences are converted from strings to ints
sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
self._v = sequences3
def __str__(self) -> str:
return '{} (V={})'.format(self._s, str(self._v))
def __repr__(self) -> str:
return f'<Version: {self._s}>'
def __lt__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.lt)
return NotImplemented
def __gt__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.gt)
return NotImplemented
def __le__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.le)
return NotImplemented
def __ge__(self, other: object) -> bool:
if isinstance(other, Version):
return self.__cmp(other, operator.ge)
return NotImplemented
def __eq__(self, other: object) -> bool:
if isinstance(other, Version):
return self._v == other._v
return NotImplemented
def __ne__(self, other: object) -> bool:
if isinstance(other, Version):
return self._v != other._v
return NotImplemented
def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
# compare each sequence in order
for ours, theirs in zip(self._v, other._v):
# sort a non-digit sequence before a digit sequence
ours_is_int = isinstance(ours, int)
theirs_is_int = isinstance(theirs, int)
if ours_is_int != theirs_is_int:
return comparator(ours_is_int, theirs_is_int)
if ours != theirs:
return comparator(ours, theirs)
# if equal length, all components have matched, so equal
# otherwise, the version with a suffix remaining is greater
return comparator(len(self._v), len(other._v))
def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
if vstr2.startswith('>='):
cmpop = operator.ge
vstr2 = vstr2[2:]
elif vstr2.startswith('<='):
cmpop = operator.le
vstr2 = vstr2[2:]
elif vstr2.startswith('!='):
cmpop = operator.ne
vstr2 = vstr2[2:]
elif vstr2.startswith('=='):
cmpop = operator.eq
vstr2 = vstr2[2:]
elif vstr2.startswith('='):
cmpop = operator.eq
vstr2 = vstr2[1:]
elif vstr2.startswith('>'):
cmpop = operator.gt
vstr2 = vstr2[1:]
elif vstr2.startswith('<'):
cmpop = operator.lt
vstr2 = vstr2[1:]
else:
cmpop = operator.eq
return (cmpop, vstr2)
def version_compare(vstr1: str, vstr2: str) -> bool:
(cmpop, vstr2) = _version_extract_cmpop(vstr2)
return cmpop(Version(vstr1), Version(vstr2))
def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
if isinstance(conditions, str):
conditions = [conditions]
found = []
not_found = []
for req in conditions:
if not version_compare(vstr1, req):
not_found.append(req)
else:
found.append(req)
return not_found == [], not_found, found
# determine if the minimum version satisfying the condition |condition| exceeds
# the minimum version for a feature |minimum|
def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
if condition.startswith('>='):
cmpop = operator.le
condition = condition[2:]
elif condition.startswith('<='):
return False
elif condition.startswith('!='):
return False
elif condition.startswith('=='):
cmpop = operator.le
condition = condition[2:]
elif condition.startswith('='):
cmpop = operator.le
condition = condition[1:]
elif condition.startswith('>'):
cmpop = operator.lt
condition = condition[1:]
elif condition.startswith('<'):
return False
else:
cmpop = operator.le
# Declaring a project(meson_version: '>=0.46') and then using features in
# 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
# the lowest version which satisfies the constraint '>=0.46'.
#
# But this will fail here, because the minimum version required by the
# version constraint ('0.46') is strictly less (in our version comparison)
# than the minimum version needed for the feature ('0.46.0').
#
# Map versions in the constraint of the form '0.46' to '0.46.0', to embed
# this knowledge of the meson versioning scheme.
condition = condition.strip()
if re.match(r'^\d+.\d+$', condition):
condition += '.0'
return T.cast(bool, cmpop(Version(minimum), Version(condition)))
def search_version(text: str) -> str:
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
# (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
# Limiting major version number to two digits seems to work
# thus far. When we get to GCC 100, this will break, but
# if we are still relevant when that happens, it can be
# considered an achievement in itself.
#
# This regex is reaching magic levels. If it ever needs
# to be updated, do not complexify but convert to something
# saner instead.
# We'll demystify it a bit with a verbose definition.
version_regex = re.compile(r"""
(?<! # Zero-width negative lookbehind assertion
(
\d # One digit
| \. # Or one period
) # One occurrence
)
# Following pattern must not follow a digit or period
(
\d{1,2} # One or two digits
(
\.\d+ # Period and one or more digits
)+ # One or more occurrences
(
-[a-zA-Z0-9]+ # Hyphen and one or more alphanumeric
)? # Zero or one occurrence
) # One occurrence
""", re.VERBOSE)
match = version_regex.search(text)
if match:
return match.group(0)
# try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
match = version_regex.search(text)
if match:
return match.group(0)
return 'unknown version'
def default_libdir() -> str:
if is_debianlike():
try:
pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL)
(stdo, _) = pc.communicate()
if pc.returncode == 0:
archpath = stdo.decode().strip()
return 'lib/' + archpath
except Exception:
pass
if is_freebsd() or is_irix():
return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
return 'lib'
def default_libexecdir() -> str:
# There is no way to auto-detect this, so it must be set at build time
return 'libexec'
def default_prefix() -> str:
return 'c:/' if is_windows() else '/usr/local'
def get_library_dirs() -> T.List[str]:
if is_windows():
return ['C:/mingw/lib'] # TODO: get programmatically
if is_osx():
return ['/usr/lib'] # TODO: get programmatically
# The following is probably Debian/Ubuntu specific.
# /usr/local/lib is first because it contains stuff
# installed by the sysadmin and is probably more up-to-date
# than /usr/lib. If you feel that this search order is
# problematic, please raise the issue on the mailing list.
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
if is_freebsd():
return unixdirs
# FIXME: this needs to be further genericized for aarch64 etc.
machine = platform.machine()
if machine in ('i386', 'i486', 'i586', 'i686'):
plat = 'i386'
elif machine.startswith('arm'):
plat = 'arm'
else:
plat = ''
# Solaris puts 32-bit libraries in the main /lib & /usr/lib directories
# and 64-bit libraries in platform specific subdirectories.
if is_sunos():
if machine == 'i86pc':
plat = 'amd64'
elif machine.startswith('sun4'):
plat = 'sparcv9'
usr_platdir = Path('/usr/lib/') / plat
if usr_platdir.is_dir():
unixdirs += [str(x) for x in (usr_platdir).iterdir() if x.is_dir()]
if os.path.exists('/usr/lib64'):
unixdirs.append('/usr/lib64')
lib_platdir = Path('/lib/') / plat
if lib_platdir.is_dir():
unixdirs += [str(x) for x in (lib_platdir).iterdir() if x.is_dir()]
if os.path.exists('/lib64'):
unixdirs.append('/lib64')
return unixdirs
def has_path_sep(name: str, sep: str = '/\\') -> bool:
'Checks if any of the specified @sep path separators are in @name'
for each in sep:
if each in name:
return True
return False
if is_windows():
# shlex.split is not suitable for splitting command line on Window (https://bugs.python.org/issue1724822);
# shlex.quote is similarly problematic. Below are "proper" implementations of these functions according to
# https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
# https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
_whitespace = ' \t\n\r'
_find_unsafe_char = re.compile(fr'[{_whitespace}"]').search
def quote_arg(arg: str) -> str:
if arg and not _find_unsafe_char(arg):
return arg
result = '"'
num_backslashes = 0
for c in arg:
if c == '\\':
num_backslashes += 1
else:
if c == '"':
# Escape all backslashes and the following double quotation mark
num_backslashes = num_backslashes * 2 + 1
result += num_backslashes * '\\' + c
num_backslashes = 0
# Escape all backslashes, but let the terminating double quotation
# mark we add below be interpreted as a metacharacter
result += (num_backslashes * 2) * '\\' + '"'
return result
def split_args(cmd: str) -> T.List[str]:
result = []
arg = ''
num_backslashes = 0
num_quotes = 0
in_quotes = False
for c in cmd:
if c == '\\':
num_backslashes += 1
else:
if c == '"' and not num_backslashes % 2:
# unescaped quote, eat it
arg += (num_backslashes // 2) * '\\'
num_quotes += 1
in_quotes = not in_quotes
elif c in _whitespace and not in_quotes:
if arg or num_quotes:
# reached the end of the argument
result.append(arg)
arg = ''
num_quotes = 0
else:
if c == '"':
# escaped quote
num_backslashes = (num_backslashes - 1) // 2
arg += num_backslashes * '\\' + c
num_backslashes = 0
if arg or num_quotes:
result.append(arg)
return result
else:
def quote_arg(arg: str) -> str:
return shlex.quote(arg)
def split_args(cmd: str) -> T.List[str]:
return shlex.split(cmd)
def join_args(args: T.Iterable[str]) -> str:
return ' '.join([quote_arg(x) for x in args])
def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
missing_variables = set() # type: T.Set[str]
if variable_format == 'cmake':
start_tag = '${'
backslash_tag = '\\${'
else:
assert variable_format in ['meson', 'cmake@']
start_tag = '@'
backslash_tag = '\\@'
def variable_replace(match: T.Match[str]) -> str:
# Pairs of escape characters before '@' or '\@'
if match.group(0).endswith('\\'):
num_escapes = match.end(0) - match.start(0)
return '\\' * (num_escapes // 2)
# Single escape character and '@'
elif match.group(0) == backslash_tag:
return start_tag
# Template variable to be replaced
else:
varname = match.group(1)
var_str = ''
if varname in confdata:
var, _ = confdata.get(varname)
if isinstance(var, str):
var_str = var
elif isinstance(var, int):
var_str = str(var)
else:
msg = f'Tried to replace variable {varname!r} value with ' \
f'something other than a string or int: {var!r}'
raise MesonException(msg)
else:
missing_variables.add(varname)
return var_str
return re.sub(regex, variable_replace, line), missing_variables
def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', variable_format: str) -> str:
def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
arr = line.split()
define_value = []
for token in arr[2:]:
try:
(v, desc) = confdata.get(token)
define_value += [str(v)]
except KeyError:
define_value += [token]
return ' '.join(define_value)
arr = line.split()
if variable_format == 'meson' and len(arr) != 2:
raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
varname = arr[1]
try:
(v, desc) = confdata.get(varname)
except KeyError:
return '/* #undef %s */\n' % varname
if isinstance(v, bool):
if v:
return '#define %s\n' % varname
else:
return '#undef %s\n' % varname
elif isinstance(v, int):
return '#define %s %d\n' % (varname, v)
elif isinstance(v, str):
if variable_format == 'meson':
result = v
else:
result = get_cmake_define(line, confdata)
result = f'#define {varname} {result}\n'
(result, missing_variable) = do_replacement(regex, result, variable_format, confdata)
return result
else:
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]:
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
# Also allow escaping '@' with '\@'
if variable_format in ['meson', 'cmake@']:
regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
elif variable_format == 'cmake':
regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
else:
raise MesonException(f'Format "{variable_format}" not handled')
return regex
def do_conf_str(src: str, data: list, confdata: 'ConfigurationData', variable_format: str,
encoding: str = 'utf-8') -> T.Tuple[T.List[str], T.Set[str], bool]:
def line_is_valid(line: str, variable_format: str) -> bool:
if variable_format == 'meson':
if '#cmakedefine' in line:
return False
else: # cmake format
if '#mesondefine' in line:
return False
return True
regex = get_variable_regex(variable_format)
search_token = '#mesondefine'
if variable_format != 'meson':
search_token = '#cmakedefine'
result = []
missing_variables = set()
# Detect when the configuration data is empty and no tokens were found
# during substitution so we can warn the user to use the `copy:` kwarg.
confdata_useless = not confdata.keys()
for line in data:
if line.startswith(search_token):
confdata_useless = False
line = do_define(regex, line, confdata, variable_format)
else:
if not line_is_valid(line, variable_format):
raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
line, missing = do_replacement(regex, line, variable_format, confdata)
missing_variables.update(missing)
if missing:
confdata_useless = False
result.append(line)
return result, missing_variables, confdata_useless
def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
try:
with open(src, encoding=encoding, newline='') as f:
data = f.readlines()
except Exception as e:
raise MesonException(f'Could not read input file {src}: {e!s}')
(result, missing_variables, confdata_useless) = do_conf_str(src, data, confdata, variable_format, encoding)
dst_tmp = dst + '~'
try:
with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
f.writelines(result)
except Exception as e:
raise MesonException(f'Could not write output file {dst}: {e!s}')
shutil.copymode(src, dst_tmp)
replace_if_different(dst, dst_tmp)
return missing_variables, confdata_useless
CONF_C_PRELUDE = '''/*
* Autogenerated by the Meson build system.
* Do not edit, your changes will be lost.
*/
#pragma once
'''
CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
; Do not edit, your changes will be lost.
'''
def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
if output_format == 'c':
prelude = CONF_C_PRELUDE
prefix = '#'
elif output_format == 'nasm':
prelude = CONF_NASM_PRELUDE
prefix = '%'
else:
raise MesonBugException(f'Undefined output_format: "{output_format}"')
ofilename_tmp = ofilename + '~'
with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
ofile.write(prelude)
for k in sorted(cdata.keys()):
(v, desc) = cdata.get(k)
if desc:
if output_format == 'c':
ofile.write('/* %s */\n' % desc)
elif output_format == 'nasm':
for line in desc.split('\n'):
ofile.write('; %s\n' % line)
if isinstance(v, bool):
if v:
ofile.write(f'{prefix}define {k}\n\n')
else:
ofile.write(f'{prefix}undef {k}\n\n')
elif isinstance(v, (int, str)):
ofile.write(f'{prefix}define {k} {v}\n\n')
else:
raise MesonException('Unknown data type in configuration file entry: ' + k)
replace_if_different(ofilename, ofilename_tmp)
def replace_if_different(dst: str, dst_tmp: str) -> None:
# If contents are identical, don't touch the file to prevent
# unnecessary rebuilds.
different = True
try:
with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
if f1.read() == f2.read():
different = False
except FileNotFoundError:
pass
if different:
os.replace(dst_tmp, dst)
else:
os.unlink(dst_tmp)
def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
'''
Returns a list with all args embedded in a list if they are not a list.
This function preserves order.
@flatten: Convert lists of lists to a flat list
'''
if not isinstance(item, list):
return [item]
result = [] # type: T.List[T.Any]
for i in item:
if flatten and isinstance(i, list):
result += listify(i, flatten=True)
else:
result.append(i)
return result
def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
'''
Extracts all values from given dict_object and listifies them.
'''
fetch = dict_object.get
if pop:
fetch = dict_object.pop
# If there's only one key, we don't return a list with one element
return listify(fetch(key, []), flatten=True)
def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
'''
Ensure that type(@item) is one of @types or a
list of items all of which are of type @types
'''
if isinstance(item, types):
item = T.cast(T.List[_T], [item])
if not isinstance(item, list):
raise MesonException('Item must be a list or one of {!r}, not {!r}'.format(types, type(item)))
for i in item:
if i is not None and not isinstance(i, types):
raise MesonException('List item must be one of {!r}, not {!r}'.format(types, type(i)))
return item
def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
return typeslistify(item, str)
def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
expended_args = [] # type: T.List[str]
for arg in args:
if not arg.startswith('@'):
expended_args.append(arg)
continue
args_file = arg[1:]
try:
with open(args_file, encoding='utf-8') as f:
extended_args = f.read().split()
expended_args += extended_args
except Exception as e:
mlog.error('Expanding command line arguments:', args_file, 'not found')
mlog.exception(e)
return None
return expended_args
def partition(pred: T.Callable[[_T], object], iterable: T.Iterable[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
"""Use a predicate to partition entries into false entries and true
entries.
>>> x, y = partition(is_odd, range(10))
>>> (list(x), list(y))
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
"""
t1, t2 = tee(iterable)
return filterfalse(pred, t1), filter(pred, t2)
def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
import locale
encoding = locale.getpreferredencoding()
# Redirect stdin to DEVNULL otherwise the command run by us here might mess
# up the console and ANSI colors will stop working on Windows.
if 'stdin' not in kwargs:
kwargs['stdin'] = subprocess.DEVNULL
if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
p, o, e = Popen_safe_legacy(args, write=write, stdout=stdout, stderr=stderr, **kwargs)
else:
p = subprocess.Popen(args, universal_newlines=True, close_fds=False,
stdout=stdout, stderr=stderr, **kwargs)
o, e = p.communicate(write)
# Sometimes the command that we run will call another command which will be
# without the above stdin workaround, so set the console mode again just in
# case.
mlog.setup_console()
return p, o, e
def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
**kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
stdout=stdout, stderr=stderr, **kwargs)
input_ = None # type: T.Optional[bytes]
if write is not None:
input_ = write.encode('utf-8')
o, e = p.communicate(input_)
if o is not None:
if sys.stdout.encoding:
o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
else:
o = o.decode(errors='replace').replace('\r\n', '\n')
if e is not None:
if sys.stderr.encoding:
e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
else:
e = e.decode(errors='replace').replace('\r\n', '\n')
return p, o, e
def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
'''
Takes each regular expression in @regexiter and tries to search for it in
every item in @initer. If there is a match, returns that match.
Else returns False.
'''
for regex in regexiter:
for ii in initer:
if not isinstance(ii, str):
continue
match = re.search(regex, ii)
if match:
return match.group()
return None
def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
# Error checking
inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] # type: T.List[str]
outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] # type: T.List[str]
if '@INPUT@' not in values:
# Error out if any input-derived templates are present in the command
match = iter_regexin_iter(inregex, command)
if match:
raise MesonException(f'Command cannot have {match!r}, since no input files were specified')
else:
if len(values['@INPUT@']) > 1:
# Error out if @PLAINNAME@ or @BASENAME@ is present in the command
match = iter_regexin_iter(inregex[1:], command)
if match:
raise MesonException(f'Command cannot have {match!r} when there is '
'more than one input file')
# Error out if an invalid @INPUTnn@ template was specified
for each in command:
if not isinstance(each, str):
continue
match2 = re.search(inregex[0], each)
if match2 and match2.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} inputs'
raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
if '@OUTPUT@' not in values:
# Error out if any output-derived templates are present in the command
match = iter_regexin_iter(outregex, command)
if match:
raise MesonException(f'Command cannot have {match!r} since there are no outputs')
else:
# Error out if an invalid @OUTPUTnn@ template was specified
for each in command:
if not isinstance(each, str):
continue
match2 = re.search(outregex[0], each)
if match2 and match2.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} outputs'
raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
see get_filenames_templates_dict()
If multiple inputs/outputs are given in the @values dictionary, we
substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
just a part of it, and in that case we substitute *all* of them.
The typing of this function is difficult, as only @OUTPUT@ and @INPUT@ can
be lists, everything else is a string. However, TypeDict cannot represent
this, as you can have optional keys, but not extra keys. We end up just
having to us asserts to convince type checkers that this is okay.
https://github.com/python/mypy/issues/4617
'''
def replace(m: T.Match[str]) -> str:
v = values[m.group(0)]
assert isinstance(v, str), 'for mypy'
return v
# Error checking
_substitute_values_check_errors(command, values)
# Substitution
outcmd = [] # type: T.List[str]
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
more: T.Optional[str] = None
if not isinstance(vv, str):
outcmd.append(vv)
elif '@INPUT@' in vv:
inputs = values['@INPUT@']
if vv == '@INPUT@':
outcmd += inputs
elif len(inputs) == 1:
outcmd.append(vv.replace('@INPUT@', inputs[0]))
else:
raise MesonException("Command has '@INPUT@' as part of a "
"string and more than one input file")
elif '@OUTPUT@' in vv:
outputs = values['@OUTPUT@']
if vv == '@OUTPUT@':
outcmd += outputs
elif len(outputs) == 1:
outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
else:
raise MesonException("Command has '@OUTPUT@' as part of a "
"string and more than one output file")
# Append values that are exactly a template string.
# This is faster than a string replace.
elif vv in values:
o = values[vv]
assert isinstance(o, str), 'for mypy'
more = o
# Substitute everything else with replacement
elif value_rx:
more = value_rx.sub(replace, vv)
else:
more = vv
if more is not None:
outcmd.append(more)
return outcmd
def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
'''
Create a dictionary with template strings as keys and values as values for
the following templates:
@INPUT@ - the full path to one or more input files, from @inputs
@OUTPUT@ - the full path to one or more output files, from @outputs
@OUTDIR@ - the full path to the directory containing the output files
If there is only one input file, the following keys are also created:
@PLAINNAME@ - the filename of the input file
@BASENAME@ - the filename of the input file with the extension removed
If there is more than one input file, the following keys are also created:
@INPUT0@, @INPUT1@, ... one for each input file
If there is more than one output file, the following keys are also created:
@OUTPUT0@, @OUTPUT1@, ... one for each output file
'''
values = {} # type: T.Dict[str, T.Union[str, T.List[str]]]
# Gather values derived from the input
if inputs:
# We want to substitute all the inputs.
values['@INPUT@'] = inputs
for (ii, vv) in enumerate(inputs):
# Write out @INPUT0@, @INPUT1@, ...
values[f'@INPUT{ii}@'] = vv
if len(inputs) == 1:
# Just one value, substitute @PLAINNAME@ and @BASENAME@
values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
values['@BASENAME@'] = os.path.splitext(plain)[0]
if outputs:
# Gather values derived from the outputs, similar to above.
values['@OUTPUT@'] = outputs
for (ii, vv) in enumerate(outputs):
values[f'@OUTPUT{ii}@'] = vv
# Outdir should be the same for all outputs
values['@OUTDIR@'] = os.path.dirname(outputs[0])
# Many external programs fail on empty arguments.
if values['@OUTDIR@'] == '':
values['@OUTDIR@'] = '.'
return values
def _make_tree_writable(topdir: str) -> None:
# Ensure all files and directories under topdir are writable
# (and readable) by owner.
for d, _, files in os.walk(topdir):
os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
for fname in files:
fpath = os.path.join(d, fname)
if os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
def windows_proof_rmtree(f: str) -> None:
# On Windows if anyone is holding a file open you can't
# delete it. As an example an anti virus scanner might
# be scanning files you are trying to delete. The only
# way to fix this is to try again and again.
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
writable = False
for d in delays:
try:
# Start by making the tree writable.
if not writable:
_make_tree_writable(f)
writable = True
except PermissionError:
time.sleep(d)
continue
try:
shutil.rmtree(f)
return
except FileNotFoundError:
return
except OSError:
time.sleep(d)
# Try one last time and throw if it fails.
shutil.rmtree(f)
def windows_proof_rm(fpath: str) -> None:
"""Like windows_proof_rmtree, but for a single file."""
if os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
for d in delays:
try:
os.unlink(fpath)
return
except FileNotFoundError:
return
except OSError:
time.sleep(d)
os.unlink(fpath)
class TemporaryDirectoryWinProof(TemporaryDirectory):
"""
Like TemporaryDirectory, but cleans things up using
windows_proof_rmtree()
"""
def __exit__(self, exc: T.Any, value: T.Any, tb: T.Any) -> None:
try:
super().__exit__(exc, value, tb)
except OSError:
windows_proof_rmtree(self.name)
def cleanup(self) -> None:
try:
super().cleanup()
except OSError:
windows_proof_rmtree(self.name)
def detect_subprojects(spdir_name: str, current_dir: str = '',
result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
if result is None:
result = {}
spdir = os.path.join(current_dir, spdir_name)
if not os.path.exists(spdir):
return result
for trial in glob(os.path.join(spdir, '*')):
basename = os.path.basename(trial)
if trial == 'packagecache':
continue
append_this = True
if os.path.isdir(trial):
detect_subprojects(spdir_name, trial, result)
elif trial.endswith('.wrap') and os.path.isfile(trial):
basename = os.path.splitext(basename)[0]
else:
append_this = False
if append_this:
if basename in result:
result[basename].append(trial)
else:
result[basename] = [trial]
return result
def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
for s in strlist:
if substr in s:
return True
return False
class OrderedSet(T.MutableSet[_T]):
"""A set that preserves the order in which items are added, by first
insertion.
"""
def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
# typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
# use MutableMapping, which is fine in this case.
self.__container = collections.OrderedDict() # type: T.MutableMapping[_T, None]
if iterable:
self.update(iterable)
def __contains__(self, value: object) -> bool:
return value in self.__container
def __iter__(self) -> T.Iterator[_T]:
return iter(self.__container.keys())
def __len__(self) -> int:
return len(self.__container)
def __repr__(self) -> str:
# Don't print 'OrderedSet("")' for an empty set.
if self.__container:
return 'OrderedSet("{}")'.format(
'", "'.join(repr(e) for e in self.__container.keys()))
return 'OrderedSet()'
def __reversed__(self) -> T.Iterator[_T]:
# Mypy is complaining that sets can't be reversed, which is true for
# unordered sets, but this is an ordered, set so reverse() makes sense.
return reversed(self.__container.keys()) # type: ignore
def add(self, value: _T) -> None:
self.__container[value] = None
def discard(self, value: _T) -> None:
if value in self.__container:
del self.__container[value]
def move_to_end(self, value: _T, last: bool = True) -> None:
# Mypy does not know about move_to_end, because it is not part of MutableMapping
self.__container.move_to_end(value, last) # type: ignore
def pop(self, last: bool = True) -> _T:
# Mypy does not know about the last argument, because it is not part of MutableMapping
item, _ = self.__container.popitem(last) # type: ignore
return item
def update(self, iterable: T.Iterable[_T]) -> None:
for item in iterable:
self.__container[item] = None
def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
return type(self)(e for e in self if e not in set_)
def relpath(path: str, start: str) -> str:
# On Windows a relative path can't be evaluated for paths on two different
# drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the
# original absolute path.
try:
return os.path.relpath(path, start)
except (TypeError, ValueError):
return path
def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
# Check whether a path is within the root directory root
try:
if resolve:
path.resolve().relative_to(root.resolve())
else:
path.relative_to(root)
except ValueError:
return False
return True
def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
try:
if resolve:
return path.resolve().relative_to(root.resolve())
else:
return path.relative_to(root)
except ValueError:
return path
class LibType(enum.IntEnum):
"""Enumeration for library types."""
SHARED = 0
STATIC = 1
PREFER_SHARED = 2
PREFER_STATIC = 3
class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
'''
Fallback progress bar implementation when tqdm is not found
Since this class is not an actual iterator, but only provides a minimal
fallback, it is safe to ignore the 'Iterator does not return self from
__iter__ method' warning.
'''
def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
if iterable is not None:
self.iterable = iter(iterable)
return
self.total = total
self.done = 0
self.printed_dots = 0
if self.total and bar_type == 'download':
print('Download size:', self.total)
if desc:
print(f'{desc}: ', end='')
# Pretend to be an iterator when called as one and don't print any
# progress
def __iter__(self) -> T.Iterator[str]:
return self.iterable
def __next__(self) -> str:
return next(self.iterable)
def print_dot(self) -> None:
print('.', end='')
sys.stdout.flush()
self.printed_dots += 1
def update(self, progress: int) -> None:
self.done += progress
if not self.total:
# Just print one dot per call if we don't have a total length
self.print_dot()
return
ratio = int(self.done / self.total * 10)
while self.printed_dots < ratio:
self.print_dot()
def close(self) -> None:
print('')
try:
from tqdm import tqdm
except ImportError:
# ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
ProgressBar = ProgressBarFallback # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
else:
class ProgressBarTqdm(tqdm):
def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
if bar_type == 'download':
kwargs.update({'unit': 'bytes', 'leave': True})
else:
kwargs.update({'leave': False})
kwargs['ncols'] = 100
super().__init__(*args, **kwargs)
ProgressBar = ProgressBarTqdm
class RealPathAction(argparse.Action):
def __init__(self, option_strings: T.List[str], dest: str, default: str = '.', **kwargs: T.Any):
default = os.path.abspath(os.path.realpath(default))
super().__init__(option_strings, dest, nargs=None, default=default, **kwargs)
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
values: T.Union[str, T.Sequence[T.Any], None], option_string: str = None) -> None:
assert isinstance(values, str)
setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
"""Get A short version of @wine_paths to avoid reaching WINEPATH number
of char limit.
"""
wine_paths = list(OrderedSet(wine_paths))
getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
with open(getShortPathScript, mode='w', encoding='utf-8') as f:
f.write("@ECHO OFF\nfor %%x in (%*) do (\n echo|set /p=;%~sx\n)\n")
f.flush()
try:
with open(os.devnull, 'w', encoding='utf-8') as stderr:
wine_path = subprocess.check_output(
winecmd +
['cmd', '/C', getShortPathScript] + wine_paths,
stderr=stderr).decode('utf-8')
except subprocess.CalledProcessError as e:
print("Could not get short paths: %s" % e)
wine_path = ';'.join(wine_paths)
finally:
os.remove(getShortPathScript)
if len(wine_path) > 2048:
raise MesonException(
'WINEPATH size {} > 2048'
' this will cause random failure.'.format(
len(wine_path)))
return wine_path.strip(';')
def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
ret = [] # type: T.List[_T]
@wraps(func)
def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
if ret:
return ret[0]
val = func(*args, **kwargs)
ret.append(val)
return val
return wrapper
def generate_list(func: T.Callable[..., T.Generator[_T, None, None]]) -> T.Callable[..., T.List[_T]]:
@wraps(func)
def wrapper(*args: T.Any, **kwargs: T.Any) -> T.List[_T]:
return list(func(*args, **kwargs))
return wrapper
class OptionOverrideProxy(collections.abc.MutableMapping):
'''Mimic an option list but transparently override selected option
values.
'''
# TODO: the typing here could be made more explicit using a TypeDict from
# python 3.8 or typing_extensions
def __init__(self, overrides: T.Dict['OptionKey', T.Any], *options: 'KeyedOptionDictType'):
self.overrides = overrides.copy()
self.options: T.Dict['OptionKey', UserOption] = {}
for o in options:
self.options.update(o)
def __getitem__(self, key: 'OptionKey') -> T.Union['UserOption']:
if key in self.options:
opt = self.options[key]
if key in self.overrides:
opt = copy.copy(opt)
opt.set_value(self.overrides[key])
return opt
raise KeyError('Option not found', key)
def __setitem__(self, key: 'OptionKey', value: T.Union['UserOption']) -> None:
self.overrides[key] = value.value
def __delitem__(self, key: 'OptionKey') -> None:
del self.overrides[key]
def __iter__(self) -> T.Iterator['OptionKey']:
return iter(self.options)
def __len__(self) -> int:
return len(self.options)
def copy(self) -> 'OptionOverrideProxy':
return OptionOverrideProxy(self.overrides.copy(), self.options.copy())
class OptionType(enum.IntEnum):
"""Enum used to specify what kind of argument a thing is."""
BUILTIN = 0
BACKEND = 1
BASE = 2
COMPILER = 3
PROJECT = 4
# This is copied from coredata. There is no way to share this, because this
# is used in the OptionKey constructor, and the coredata lists are
# OptionKeys...
_BUILTIN_NAMES = {
'prefix',
'bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'sbindir',
'sharedstatedir',
'sysconfdir',
'auto_features',
'backend',
'buildtype',
'debug',
'default_library',
'errorlogs',
'install_umask',
'layout',
'optimization',
'stdsplit',
'strip',
'unity',
'unity_size',
'warning_level',
'werror',
'wrap_mode',
'force_fallback_for',
'pkg_config_path',
'cmake_prefix_path',
}
def _classify_argument(key: 'OptionKey') -> OptionType:
"""Classify arguments into groups so we know which dict to assign them to."""
if key.name.startswith('b_'):
return OptionType.BASE
elif key.lang is not None:
return OptionType.COMPILER
elif key.name in _BUILTIN_NAMES or key.module:
return OptionType.BUILTIN
elif key.name.startswith('backend_'):
assert key.machine is MachineChoice.HOST, str(key)
return OptionType.BACKEND
else:
assert key.machine is MachineChoice.HOST, str(key)
return OptionType.PROJECT
@total_ordering
class OptionKey:
"""Represents an option key in the various option dictionaries.
This provides a flexible, powerful way to map option names from their
external form (things like subproject:build.option) to something that
internally easier to reason about and produce.
"""
__slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type', 'module']
name: str
subproject: str
machine: MachineChoice
lang: T.Optional[str]
_hash: int
type: OptionType
module: T.Optional[str]
def __init__(self, name: str, subproject: str = '',
machine: MachineChoice = MachineChoice.HOST,
lang: T.Optional[str] = None,
module: T.Optional[str] = None,
_type: T.Optional[OptionType] = None):
# the _type option to the constructor is kinda private. We want to be
# able tos ave the state and avoid the lookup function when
# pickling/unpickling, but we need to be able to calculate it when
# constructing a new OptionKey
object.__setattr__(self, 'name', name)
object.__setattr__(self, 'subproject', subproject)
object.__setattr__(self, 'machine', machine)
object.__setattr__(self, 'lang', lang)
object.__setattr__(self, 'module', module)
object.__setattr__(self, '_hash', hash((name, subproject, machine, lang, module)))
if _type is None:
_type = _classify_argument(self)
object.__setattr__(self, 'type', _type)
def __setattr__(self, key: str, value: T.Any) -> None:
raise AttributeError('OptionKey instances do not support mutation.')
def __getstate__(self) -> T.Dict[str, T.Any]:
return {
'name': self.name,
'subproject': self.subproject,
'machine': self.machine,
'lang': self.lang,
'_type': self.type,
'module': self.module,
}
def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
"""De-serialize the state of a pickle.
This is very clever. __init__ is not a constructor, it's an
initializer, therefore it's safe to call more than once. We create a
state in the custom __getstate__ method, which is valid to pass
splatted to the initializer.
"""
# Mypy doesn't like this, because it's so clever.
self.__init__(**state) # type: ignore
def __hash__(self) -> int:
return self._hash
def _to_tuple(self) -> T.Tuple[str, OptionType, str, str, MachineChoice, str]:
return (self.subproject, self.type, self.lang or '', self.module or '', self.machine, self.name)
def __eq__(self, other: object) -> bool:
if isinstance(other, OptionKey):
return self._to_tuple() == other._to_tuple()
return NotImplemented
def __lt__(self, other: object) -> bool:
if isinstance(other, OptionKey):
return self._to_tuple() < other._to_tuple()
return NotImplemented
def __str__(self) -> str:
out = self.name
if self.lang:
out = f'{self.lang}_{out}'
if self.machine is MachineChoice.BUILD:
out = f'build.{out}'
if self.module:
out = f'{self.module}.{out}'
if self.subproject:
out = f'{self.subproject}:{out}'
return out
def __repr__(self) -> str:
return f'OptionKey({self.name!r}, {self.subproject!r}, {self.machine!r}, {self.lang!r}, {self.module!r}, {self.type!r})'
@classmethod
def from_string(cls, raw: str) -> 'OptionKey':
"""Parse the raw command line format into a three part tuple.
This takes strings like `mysubproject:build.myoption` and Creates an
OptionKey out of them.
"""
try:
subproject, raw2 = raw.split(':')
except ValueError:
subproject, raw2 = '', raw
module = None
for_machine = MachineChoice.HOST
try:
prefix, raw3 = raw2.split('.')
if prefix == 'build':
for_machine = MachineChoice.BUILD
else:
module = prefix
except ValueError:
raw3 = raw2
from ..compilers import all_languages
if any(raw3.startswith(f'{l}_') for l in all_languages):
lang, opt = raw3.split('_', 1)
else:
lang, opt = None, raw3
assert ':' not in opt
assert '.' not in opt
return cls(opt, subproject, for_machine, lang, module)
def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '',
module: T.Optional[str] = '') -> 'OptionKey':
"""Create a new copy of this key, but with alterted members.
For example:
>>> a = OptionKey('foo', '', MachineChoice.Host)
>>> b = OptionKey('foo', 'bar', MachineChoice.Host)
>>> b == a.evolve(subproject='bar')
True
"""
# We have to be a little clever with lang here, because lang is valid
# as None, for non-compiler options
return OptionKey(
name if name is not None else self.name,
subproject if subproject is not None else self.subproject,
machine if machine is not None else self.machine,
lang if lang != '' else self.lang,
module if module != '' else self.module
)
def as_root(self) -> 'OptionKey':
"""Convenience method for key.evolve(subproject='')."""
return self.evolve(subproject='')
def as_build(self) -> 'OptionKey':
"""Convenience method for key.evolve(machine=MachinceChoice.BUILD)."""
return self.evolve(machine=MachineChoice.BUILD)
def as_host(self) -> 'OptionKey':
"""Convenience method for key.evolve(machine=MachinceChoice.HOST)."""
return self.evolve(machine=MachineChoice.HOST)
def is_backend(self) -> bool:
"""Convenience method to check if this is a backend option."""
return self.type is OptionType.BACKEND
def is_builtin(self) -> bool:
"""Convenience method to check if this is a builtin option."""
return self.type is OptionType.BUILTIN
def is_compiler(self) -> bool:
"""Convenience method to check if this is a builtin option."""
return self.type is OptionType.COMPILER
def is_project(self) -> bool:
"""Convenience method to check if this is a project option."""
return self.type is OptionType.PROJECT
def is_base(self) -> bool:
"""Convenience method to check if this is a base option."""
return self.type is OptionType.BASE
| 35.29842 | 152 | 0.605006 |
7755e6ba3b1155e42300f6f4eaf873559545f7ef | 229 | py | Python | motif/minibatch.py | NThande/matched-motif | 3d5338a5db8c8ae69d42c3141d81a8812cd82bd7 | [
"MIT"
] | 1 | 2019-11-06T00:53:58.000Z | 2019-11-06T00:53:58.000Z | motif/minibatch.py | NThande/matched-motif | 3d5338a5db8c8ae69d42c3141d81a8812cd82bd7 | [
"MIT"
] | null | null | null | motif/minibatch.py | NThande/matched-motif | 3d5338a5db8c8ae69d42c3141d81a8812cd82bd7 | [
"MIT"
] | null | null | null | ## FUTURE WORK
# Split the audio into disjoint 30-second super-segments
# Analyze each segment separately
# Merge the final labels
def run_minibatch():
return
def main():
return
if __name__ == '__main__':
main()
| 15.266667 | 56 | 0.69869 |
1d073f994ce8378a7818af00ba4f5af039ded11a | 12,855 | py | Python | test/test_make_bdv.py | martinschorb/pybdv | 4c28b13ebc306f8dba77e7ec964cee485c9c4b6b | [
"MIT"
] | null | null | null | test/test_make_bdv.py | martinschorb/pybdv | 4c28b13ebc306f8dba77e7ec964cee485c9c4b6b | [
"MIT"
] | null | null | null | test/test_make_bdv.py | martinschorb/pybdv | 4c28b13ebc306f8dba77e7ec964cee485c9c4b6b | [
"MIT"
] | null | null | null | import os
import unittest
from abc import ABC
from shutil import rmtree
import numpy as np
from pybdv import has_dask, make_bdv, make_bdv_from_dask_array
from pybdv.util import get_key, open_file, n5_file
if has_dask:
import dask
class MakeBdvTestMixin(ABC):
tmp_folder = './tmp'
xml_path = './tmp/test.xml'
def _make_bdv(self, data, *args, **kwargs):
make_bdv(data, *args, **kwargs)
def setUp(self):
os.makedirs(self.tmp_folder, exist_ok=True)
def tearDown(self):
try:
rmtree(self.tmp_folder)
except OSError:
pass
def _test_simple(self, shape, affine=None):
data = np.random.rand(*shape).astype('float32')
self._make_bdv(data, self.out_path, affine=affine)
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=0)
self.assertTrue(os.path.exists(self.out_path))
with open_file(self.out_path, 'r') as f:
self.assertTrue(key in f)
ds = f[key]
self.assertEqual(ds.shape, shape)
out_data = ds[:]
self.assertTrue(np.allclose(data, out_data))
def test_simple(self):
shape = (100, 100, 100)
self._test_simple(shape)
def test_affine(self):
from pybdv.metadata import get_affine
shape = (100, 100, 100)
affine = np.random.rand(12).tolist()
affine = [round(aff, 4) for aff in affine]
self._test_simple(shape, affine)
affine_out = get_affine(self.xml_path, 0)['affine0']
self.assertEqual(affine, affine_out)
def test_multi_setup(self):
from pybdv.metadata import get_affine
shape = (64,) * 3
n_views = 2
data_dict = {}
affine_dict = {}
for vid in range(n_views):
data = np.random.rand(*shape).astype('float32')
affine = {'trafo1': [round(aff, 4) for aff in np.random.rand(12)],
'trafo2': [round(aff, 4) for aff in np.random.rand(12)]}
self._make_bdv(data, self.out_path, setup_id=vid, affine=affine)
data_dict[vid] = data
affine_dict[vid] = affine
# check implicit setup id
data = np.random.rand(*shape).astype('float32')
self._make_bdv(data, self.out_path)
data_dict[n_views] = data
for vid in range(n_views + 1):
expected_key = get_key(self.is_h5, timepoint=0, setup_id=vid, scale=0)
with open_file(self.out_path, 'r') as f:
self.assertTrue(expected_key in f)
data = f[expected_key][:]
exp_data = data_dict[vid]
self.assertTrue(np.allclose(data, exp_data))
# check affine trafos (only for explicit setup-ids)
for vid in range(n_views):
affine = affine_dict[vid]
affine_out = get_affine(self.xml_path, vid)
self.assertEqual(affine, affine_out)
def test_multi_timepoint(self):
from pybdv.metadata import get_time_range
n_timepoints = 6
shape = (64,) * 3
tp_data = []
tp_setups = []
for tp in range(n_timepoints):
data = np.random.rand(*shape)
# make sure that we at least have 2 setup ids that agree
setup_id = np.random.randint(0, 20) if tp > 1 else 0
self._make_bdv(data, self.out_path, setup_id=setup_id, timepoint=tp)
tp_data.append(data)
tp_setups.append(setup_id)
tstart, tstop = get_time_range(self.xml_path)
self.assertEqual(tstart, 0)
self.assertEqual(tstop, n_timepoints - 1)
for tp in range(n_timepoints):
setup_id = tp_setups[tp]
tp_key = get_key(self.is_h5, timepoint=tp, setup_id=setup_id, scale=0)
with open_file(self.out_path, 'r') as f:
data = f[tp_key][:]
data_exp = tp_data[tp]
self.assertTrue(np.allclose(data, data_exp))
def _test_ds(self, shape, mode):
data = np.random.rand(*shape).astype('float32')
if mode in ("nearest", "interpolate") and not getattr(self, "supports_interpolation", True):
return
n_scales = 4
ndim = len(shape)
downscale_factors = n_scales * [[2] * ndim]
self._make_bdv(data, self.out_path, downscale_factors,
downscale_mode=mode)
exp_shape = shape
self.assertTrue(os.path.exists(self.out_path))
with open_file(self.out_path, 'r') as f:
for scale in range(n_scales):
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=scale)
self.assertTrue(key in f)
ds = f[key]
self.assertEqual(ds.shape, exp_shape)
exp_shape = tuple(sh // sf
for sh, sf in zip(exp_shape, downscale_factors[scale]))
def test_ds_nearest(self):
shape = (256,) * 3
self._test_ds(shape, 'nearest')
def test_ds_mean(self):
shape = (256,) * 3
self._test_ds(shape, 'mean')
def test_dtype(self):
if not self.is_h5:
return
shape = (128,) * 3
val = np.iinfo('uint16').max + 1
data = np.full(shape, val, dtype='uint32')
self._make_bdv(data, self.out_path, convert_dtype=False)
with open_file(self.out_path, 'r') as f:
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=0)
d = f[key][:]
self.assertTrue(np.array_equal(d, data))
with self.assertRaises(RuntimeError):
self._make_bdv(d, './tmp.test2.h5', convert_dtype=True)
def test_custom_chunks(self):
shape = (128,) * 3
chunks = (64, 42, 59)
data = np.random.rand(*shape)
self._make_bdv(data, self.out_path, chunks=chunks)
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=0)
with open_file(self.out_path, 'r') as f:
ds = f[key]
chunks_out = ds.chunks
d = ds[:]
self.assertEqual(chunks, chunks_out)
self.assertTrue(np.allclose(d, data))
def test_multi_threaded(self):
shape = (128,) * 3
chunks = (64,) * 3
data = np.random.rand(*shape)
scale_factors = 2 * [[2, 2, 2]]
self._make_bdv(data, self.out_path, chunks=chunks,
n_threads=4, downscale_factors=scale_factors)
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=0)
with open_file(self.out_path, 'r') as f:
d = f[key][:]
self.assertTrue(np.allclose(d, data))
def test_custom_attributes(self):
from pybdv.metadata import get_attributes
shape = (64,) * 3
data = np.random.rand(*shape)
chan_name = 'DAPI'
tile_name = 'some-tile'
angle_name = 'some-angle'
# write setup 0
self._make_bdv(data, self.out_path, setup_id=0,
attributes={'channel': {'id': None, 'name': chan_name},
'tile': {'id': 2, 'name': tile_name},
'angle': {'id': 0, 'name': angle_name}})
attrs_out = get_attributes(self.xml_path, 0)
attrs_exp = {'channel': {'id': 0, 'name': chan_name},
'tile': {'id': 2, 'name': tile_name},
'angle': {'id': 0, 'name': angle_name}}
self.assertEqual(attrs_out, attrs_exp)
# write setup 1
self._make_bdv(data, self.out_path, setup_id=None,
attributes={'channel': {'id': None},
'tile': {'id': 2},
'angle': {'id': 0}})
attrs_out = get_attributes(self.xml_path, 1)
attrs_exp = {'channel': {'id': 1},
'tile': {'id': 2, 'name': tile_name},
'angle': {'id': 0, 'name': angle_name}}
self.assertEqual(attrs_out, attrs_exp)
# write to setup 0 again with different timepoint
self._make_bdv(data, self.out_path, setup_id=0, timepoint=1,
attributes={'channel': {'id': None},
'tile': {'id': 2},
'angle': {'id': 0}})
attrs_out = get_attributes(self.xml_path, 0)
attrs_exp = {'channel': {'id': 0, 'name': chan_name},
'tile': {'id': 2, 'name': tile_name},
'angle': {'id': 0, 'name': angle_name}}
self.assertEqual(attrs_out, attrs_exp)
# write next setup id without specifying all attribute names
# -> should fail
with self.assertRaises(ValueError):
self._make_bdv(data, self.out_path, setup_id=None,
attributes={'channel': {'id': 5}, 'tile': {'id': 2}})
# write next setup id with a new attribute name
# -> should fail
with self.assertRaises(ValueError):
self._make_bdv(data, self.out_path, setup_id=None,
attributes={'channel': {'id': 5}, 'settings': {'id': 2}})
# write exisiting setup id with different attribute setup
# -> should fail
with self.assertRaises(ValueError):
self._make_bdv(data, self.out_path, setup_id=0, timepoint=2,
attributes={'channel': {'id': 5}, 'tile': {'id': 2}, 'angle': {'id': 0}})
def _test_overwrite(self, mode):
from pybdv.util import get_scale_factors, absolute_to_relative_scale_factors
from pybdv.metadata import get_attributes, get_affine
def _check(exp_data, exp_sf, exp_attrs, exp_affine):
key = get_key(self.is_h5, timepoint=0, setup_id=0, scale=0)
with open_file(self.out_path, 'r') as f:
data = f[key][:]
self.assertTrue(np.allclose(data, exp_data))
sf = get_scale_factors(self.out_path, setup_id=0)
sf = absolute_to_relative_scale_factors(sf)
self.assertEqual(sf, [[1, 1, 1]] + exp_sf)
attrs = get_attributes(self.xml_path, setup_id=0)
self.assertEqual(attrs, exp_attrs)
affine = get_affine(self.xml_path, setup_id=0, timepoint=0)['affine0']
self.assertTrue(np.allclose(np.array(affine), np.array(exp_affine), atol=1e-4))
shape1 = (64,) * 3
data1 = np.random.rand(*shape1)
sf1 = [[2, 2, 2]]
attrs1 = {'channel': {'id': 1}, 'angle': {'id': 2}}
affine1 = np.random.rand(12).tolist()
shape2 = (72,) * 3
data2 = np.random.rand(*shape2)
sf2 = [[1, 2, 2], [2, 2, 2]] if mode != 'metadata' else sf1
attrs2 = {'channel': {'id': 3}, 'angle': {'id': 6}}
affine2 = np.random.rand(12).tolist()
self._make_bdv(data1, self.out_path, setup_id=0, timepoint=0,
downscale_factors=sf1, attributes=attrs1,
affine=affine1)
_check(data1, sf1, attrs1, affine1)
self._make_bdv(data2, self.out_path, setup_id=0, timepoint=0,
downscale_factors=sf2, attributes=attrs2, affine=affine2,
overwrite=mode)
if mode == 'skip':
_check(data1, sf1, attrs1, affine1)
elif mode == 'all':
_check(data2, sf2, attrs2, affine2)
elif mode == 'data':
_check(data2, sf2, attrs1, affine1)
elif mode == 'metadata':
_check(data1, sf1, attrs2, affine2)
else:
raise ValueError("Invalid over-write mode")
def test_overwrite_skip(self):
self._test_overwrite('skip')
def test_overwrite_all(self):
self._test_overwrite('all')
def test_overwrite_metadata(self):
self._test_overwrite('metadata')
def test_overwrite_data(self):
self._test_overwrite('data')
class TestMakeBdvH5(MakeBdvTestMixin, unittest.TestCase):
out_path = './tmp/test.h5'
is_h5 = True
@unittest.skipIf(n5_file is None, "Need zarr or z5py for n5 support")
class TestMakeBdvN5(MakeBdvTestMixin, unittest.TestCase):
out_path = './tmp/test.n5'
is_h5 = False
@unittest.skipUnless(has_dask, "Need dask")
class TestMakeBdvDaskN5(MakeBdvTestMixin, unittest.TestCase):
out_path = './tmp/test.n5'
is_h5 = False
supports_interpolation = False
def _make_bdv(self, data, *args, **kwargs):
make_bdv_from_dask_array(dask.array.from_array(data), *args, **kwargs)
@unittest.skipUnless(has_dask, "Need dask")
class TestMakeBdvDaskZarr(MakeBdvTestMixin, unittest.TestCase):
out_path = './tmp/test.zarr'
is_h5 = False
supports_interpolation = False
def _make_bdv(self, data, *args, **kwargs):
make_bdv_from_dask_array(dask.array.from_array(data), *args, **kwargs)
if __name__ == '__main__':
unittest.main()
| 36.211268 | 100 | 0.572229 |
dd1ee76999e1b94eb8b854a6330b861170ccf515 | 4,686 | py | Python | dev_tools/notebooks/utils.py | a3ahmad/Cirq-cupy | 2d7641b8baf3ff7e506f02202b46ee0203dca190 | [
"Apache-2.0"
] | null | null | null | dev_tools/notebooks/utils.py | a3ahmad/Cirq-cupy | 2d7641b8baf3ff7e506f02202b46ee0203dca190 | [
"Apache-2.0"
] | null | null | null | dev_tools/notebooks/utils.py | a3ahmad/Cirq-cupy | 2d7641b8baf3ff7e506f02202b46ee0203dca190 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import glob
import re
import os
import subprocess
import tempfile
from logging import warning
from typing import Set, List
def list_all_notebooks() -> Set[str]:
"""Returns the relative paths to all notebooks in the git repo.
In case the folder is not a git repo, it returns an empty set.
"""
try:
output = subprocess.check_output(['git', 'ls-files', '*.ipynb'])
return set(output.decode('utf-8').splitlines())
except subprocess.CalledProcessError as ex:
warning("It seems that tests are not running in a git repo, skipping notebook tests", ex)
return set()
def filter_notebooks(all_notebooks: Set[str], skip_list: List[str]):
"""Returns the absolute path for notebooks except those that are skipped.
Args:
all_notebooks: set of interesting relative notebook paths.
skip_list: list of glob patterns. Notebooks matching any of these glob
in `all_notebooks` will not be returned.
Returns:
a sorted list of absolute paths to the notebooks that don't match any of
the `skip_list` glob patterns.
"""
skipped_notebooks = functools.reduce(
lambda a, b: a.union(b), list(set(glob.glob(g, recursive=True)) for g in skip_list)
)
# sorted is important otherwise pytest-xdist will complain that
# the workers have different parametrization:
# https://github.com/pytest-dev/pytest-xdist/issues/432
return sorted(os.path.abspath(n) for n in all_notebooks.difference(skipped_notebooks))
def rewrite_notebook(notebook_path):
"""Rewrtes a notebook given an extra file describing the replacements.
This rewrites a notebook of a given path, by looking for a file corresponding to the given
one, but with the suffix replaced with `.tst`.
The contents of this `.tst` file are then used as replacements
* Lines in this file without `->` are ignored.
* Lines in this file with `->` are split into two (if there are mulitple `->` it is an
error). The first of these is compiled into a pattern match, via `re.compile`, and
the second is the replacement for that match.
These replacements are then applied to the notebook_path and written to a new temporary
file.
All replacements must be used (this is enforced as it is easy to write a replacement rule
which does not match).
It is the responsibility of the caller of this method to delete the new file.
Returns:
Tuple of a file descriptor and the file path for the rewritten file. If no `.tst` file
was found, then the file descriptor is None and the path is `notebook_path`.
Raises:
AssertionError: If there are multiple `->` per line, or not all of the replacements
are used.
"""
notebook_test_path = os.path.splitext(notebook_path)[0] + '.tst'
if not os.path.exists(notebook_test_path):
return None, notebook_path
# Get the rewrite rules.
patterns = []
with open(notebook_test_path, 'r') as f:
for line in f:
if '->' in line:
parts = line.rstrip().split('->')
assert len(parts) == 2, f'Replacement lines may only contain one -> but was {line}'
patterns.append((re.compile(parts[0]), parts[1]))
used_patterns = set()
with open(notebook_path, 'r') as original_file:
new_file_descriptor, new_file_path = tempfile.mkstemp(suffix='.ipynb')
with open(new_file_path, 'w') as new_file:
for line in original_file:
new_line = line
for pattern, replacement in patterns:
new_line = pattern.sub(replacement, new_line)
if new_line != line:
used_patterns.add(pattern)
break
new_file.write(new_line)
assert len(patterns) == len(used_patterns), (
'Not all patterns where used. Patterns not used: '
f'{set(x for x, _ in patterns) - used_patterns}'
)
return new_file_descriptor, new_file_path
| 37.790323 | 99 | 0.671362 |
56400b69480cdfca91868932b5be74d18b69ff30 | 20,831 | py | Python | nova/network/model.py | bopopescu/nova-rocky-system-reader-role | 50a9d96f117b3c90aec214d1732f63fc6a1b98ea | [
"Apache-2.0"
] | null | null | null | nova/network/model.py | bopopescu/nova-rocky-system-reader-role | 50a9d96f117b3c90aec214d1732f63fc6a1b98ea | [
"Apache-2.0"
] | 2 | 2021-03-31T19:25:14.000Z | 2021-12-13T20:15:06.000Z | nova/network/model.py | bopopescu/nova-rocky-system-reader-role | 50a9d96f117b3c90aec214d1732f63fc6a1b98ea | [
"Apache-2.0"
] | 1 | 2020-07-22T22:15:29.000Z | 2020-07-22T22:15:29.000Z | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import netaddr
from oslo_serialization import jsonutils
import six
from nova import exception
from nova.i18n import _
from nova import utils
# Constants for the 'vif_type' field in VIF class
VIF_TYPE_OVS = 'ovs'
VIF_TYPE_IVS = 'ivs'
VIF_TYPE_DVS = 'dvs'
VIF_TYPE_IOVISOR = 'iovisor'
VIF_TYPE_BRIDGE = 'bridge'
VIF_TYPE_802_QBG = '802.1qbg'
VIF_TYPE_802_QBH = '802.1qbh'
VIF_TYPE_HW_VEB = 'hw_veb'
VIF_TYPE_HYPERV = 'hyperv'
VIF_TYPE_HOSTDEV = 'hostdev_physical'
VIF_TYPE_IB_HOSTDEV = 'ib_hostdev'
VIF_TYPE_MIDONET = 'midonet'
VIF_TYPE_VHOSTUSER = 'vhostuser'
VIF_TYPE_VROUTER = 'vrouter'
VIF_TYPE_OTHER = 'other'
VIF_TYPE_TAP = 'tap'
VIF_TYPE_MACVTAP = 'macvtap'
VIF_TYPE_AGILIO_OVS = 'agilio_ovs'
VIF_TYPE_BINDING_FAILED = 'binding_failed'
VIF_TYPE_VIF = 'vif'
# Constants for dictionary keys in the 'vif_details' field in the VIF
# class
VIF_DETAILS_PORT_FILTER = 'port_filter'
VIF_DETAILS_OVS_HYBRID_PLUG = 'ovs_hybrid_plug'
VIF_DETAILS_PHYSICAL_NETWORK = 'physical_network'
VIF_DETAILS_BRIDGE_NAME = 'bridge_name'
VIF_DETAILS_OVS_DATAPATH_TYPE = 'datapath_type'
# The following constant defines an SR-IOV related parameter in the
# 'vif_details'. 'profileid' should be used for VIF_TYPE_802_QBH
VIF_DETAILS_PROFILEID = 'profileid'
# The following constant defines an SR-IOV and macvtap related parameter in
# the 'vif_details'. 'vlan' should be used for VIF_TYPE_HW_VEB or
# VIF_TYPE_MACVTAP
VIF_DETAILS_VLAN = 'vlan'
# The following three constants define the macvtap related fields in
# the 'vif_details'.
VIF_DETAILS_MACVTAP_SOURCE = 'macvtap_source'
VIF_DETAILS_MACVTAP_MODE = 'macvtap_mode'
VIF_DETAILS_PHYS_INTERFACE = 'physical_interface'
# Constants for vhost-user related fields in 'vif_details'.
# Sets mode on vhost-user socket, valid values are 'client'
# and 'server'
VIF_DETAILS_VHOSTUSER_MODE = 'vhostuser_mode'
# vhost-user socket path
VIF_DETAILS_VHOSTUSER_SOCKET = 'vhostuser_socket'
# Specifies whether vhost-user socket should be plugged
# into ovs bridge. Valid values are True and False
VIF_DETAILS_VHOSTUSER_OVS_PLUG = 'vhostuser_ovs_plug'
# Specifies whether vhost-user socket should be used to
# create a fp netdevice interface.
VIF_DETAILS_VHOSTUSER_FP_PLUG = 'vhostuser_fp_plug'
# Specifies whether vhost-user socket should be used to
# create a vrouter netdevice interface
# TODO(mhenkel): Consider renaming this to be contrail-specific.
VIF_DETAILS_VHOSTUSER_VROUTER_PLUG = 'vhostuser_vrouter_plug'
# Constants for dictionary keys in the 'vif_details' field that are
# valid for VIF_TYPE_TAP.
VIF_DETAILS_TAP_MAC_ADDRESS = 'mac_address'
# Open vSwitch datapath types.
VIF_DETAILS_OVS_DATAPATH_SYSTEM = 'system'
VIF_DETAILS_OVS_DATAPATH_NETDEV = 'netdev'
# Define supported virtual NIC types. VNIC_TYPE_DIRECT and VNIC_TYPE_MACVTAP
# are used for SR-IOV ports
VNIC_TYPE_NORMAL = 'normal'
VNIC_TYPE_DIRECT = 'direct'
VNIC_TYPE_MACVTAP = 'macvtap'
VNIC_TYPE_DIRECT_PHYSICAL = 'direct-physical'
VNIC_TYPE_BAREMETAL = 'baremetal'
VNIC_TYPE_VIRTIO_FORWARDER = 'virtio-forwarder'
# Define list of ports which needs pci request.
# Note: The macvtap port needs a PCI request as it is a tap interface
# with VF as the lower physical interface.
# Note: Currently, VNIC_TYPE_VIRTIO_FORWARDER assumes a 1:1
# relationship with a VF. This is expected to change in the future.
VNIC_TYPES_SRIOV = (VNIC_TYPE_DIRECT, VNIC_TYPE_MACVTAP,
VNIC_TYPE_DIRECT_PHYSICAL, VNIC_TYPE_VIRTIO_FORWARDER)
# Define list of ports which are passthrough to the guest
# and need a special treatment on snapshot and suspend/resume
VNIC_TYPES_DIRECT_PASSTHROUGH = (VNIC_TYPE_DIRECT,
VNIC_TYPE_DIRECT_PHYSICAL)
# Constants for the 'vif_model' values
VIF_MODEL_VIRTIO = 'virtio'
VIF_MODEL_NE2K_PCI = 'ne2k_pci'
VIF_MODEL_PCNET = 'pcnet'
VIF_MODEL_RTL8139 = 'rtl8139'
VIF_MODEL_E1000 = 'e1000'
VIF_MODEL_E1000E = 'e1000e'
VIF_MODEL_NETFRONT = 'netfront'
VIF_MODEL_SPAPR_VLAN = 'spapr-vlan'
VIF_MODEL_LAN9118 = 'lan9118'
VIF_MODEL_SRIOV = 'sriov'
VIF_MODEL_VMXNET = 'vmxnet'
VIF_MODEL_VMXNET3 = 'vmxnet3'
VIF_MODEL_ALL = (
VIF_MODEL_VIRTIO,
VIF_MODEL_NE2K_PCI,
VIF_MODEL_PCNET,
VIF_MODEL_RTL8139,
VIF_MODEL_E1000,
VIF_MODEL_E1000E,
VIF_MODEL_NETFRONT,
VIF_MODEL_SPAPR_VLAN,
VIF_MODEL_LAN9118,
VIF_MODEL_SRIOV,
VIF_MODEL_VMXNET,
VIF_MODEL_VMXNET3,
)
# these types have been leaked to guests in network_data.json
LEGACY_EXPOSED_VIF_TYPES = (
VIF_TYPE_BRIDGE,
VIF_TYPE_DVS,
VIF_TYPE_HW_VEB,
VIF_TYPE_HYPERV,
VIF_TYPE_OVS,
VIF_TYPE_TAP,
VIF_TYPE_VHOSTUSER,
VIF_TYPE_VIF,
)
# Constant for max length of network interface names
# eg 'bridge' in the Network class or 'devname' in
# the VIF class
NIC_NAME_LEN = 14
class Model(dict):
"""Defines some necessary structures for most of the network models."""
def __repr__(self):
return jsonutils.dumps(self)
def _set_meta(self, kwargs):
# pull meta out of kwargs if it's there
self['meta'] = kwargs.pop('meta', {})
# update meta with any additional kwargs that may exist
self['meta'].update(kwargs)
def get_meta(self, key, default=None):
"""calls get(key, default) on self['meta']."""
return self['meta'].get(key, default)
class IP(Model):
"""Represents an IP address in Nova."""
def __init__(self, address=None, type=None, **kwargs):
super(IP, self).__init__()
self['address'] = address
self['type'] = type
self['version'] = kwargs.pop('version', None)
self._set_meta(kwargs)
# determine version from address if not passed in
if self['address'] and not self['version']:
try:
self['version'] = netaddr.IPAddress(self['address']).version
except netaddr.AddrFormatError:
msg = _("Invalid IP format %s") % self['address']
raise exception.InvalidIpAddressError(msg)
def __eq__(self, other):
keys = ['address', 'type', 'version']
return all(self[k] == other[k] for k in keys)
def __ne__(self, other):
return not self.__eq__(other)
def is_in_subnet(self, subnet):
if self['address'] and subnet['cidr']:
return (netaddr.IPAddress(self['address']) in
netaddr.IPNetwork(subnet['cidr']))
else:
return False
@classmethod
def hydrate(cls, ip):
if ip:
return cls(**ip)
return None
class FixedIP(IP):
"""Represents a Fixed IP address in Nova."""
def __init__(self, floating_ips=None, **kwargs):
super(FixedIP, self).__init__(**kwargs)
self['floating_ips'] = floating_ips or []
if not self['type']:
self['type'] = 'fixed'
def add_floating_ip(self, floating_ip):
if floating_ip not in self['floating_ips']:
self['floating_ips'].append(floating_ip)
def floating_ip_addresses(self):
return [ip['address'] for ip in self['floating_ips']]
@staticmethod
def hydrate(fixed_ip):
fixed_ip = FixedIP(**fixed_ip)
fixed_ip['floating_ips'] = [IP.hydrate(floating_ip)
for floating_ip in fixed_ip['floating_ips']]
return fixed_ip
def __eq__(self, other):
keys = ['address', 'type', 'version', 'floating_ips']
return all(self[k] == other[k] for k in keys)
def __ne__(self, other):
return not self.__eq__(other)
class Route(Model):
"""Represents an IP Route in Nova."""
def __init__(self, cidr=None, gateway=None, interface=None, **kwargs):
super(Route, self).__init__()
self['cidr'] = cidr
self['gateway'] = gateway
# FIXME(mriedem): Is this actually used? It's never set.
self['interface'] = interface
self._set_meta(kwargs)
@classmethod
def hydrate(cls, route):
route = cls(**route)
route['gateway'] = IP.hydrate(route['gateway'])
return route
class Subnet(Model):
"""Represents a Subnet in Nova."""
def __init__(self, cidr=None, dns=None, gateway=None, ips=None,
routes=None, **kwargs):
super(Subnet, self).__init__()
self['cidr'] = cidr
self['dns'] = dns or []
self['gateway'] = gateway
self['ips'] = ips or []
self['routes'] = routes or []
self['version'] = kwargs.pop('version', None)
self._set_meta(kwargs)
if self['cidr'] and not self['version']:
self['version'] = netaddr.IPNetwork(self['cidr']).version
def __eq__(self, other):
keys = ['cidr', 'dns', 'gateway', 'ips', 'routes', 'version']
return all(self[k] == other[k] for k in keys)
def __ne__(self, other):
return not self.__eq__(other)
def add_route(self, new_route):
if new_route not in self['routes']:
self['routes'].append(new_route)
def add_dns(self, dns):
if dns not in self['dns']:
self['dns'].append(dns)
def add_ip(self, ip):
if ip not in self['ips']:
self['ips'].append(ip)
def as_netaddr(self):
"""Convenient function to get cidr as a netaddr object."""
return netaddr.IPNetwork(self['cidr'])
@classmethod
def hydrate(cls, subnet):
subnet = cls(**subnet)
subnet['dns'] = [IP.hydrate(dns) for dns in subnet['dns']]
subnet['ips'] = [FixedIP.hydrate(ip) for ip in subnet['ips']]
subnet['routes'] = [Route.hydrate(route) for route in subnet['routes']]
subnet['gateway'] = IP.hydrate(subnet['gateway'])
return subnet
class Network(Model):
"""Represents a Network in Nova."""
def __init__(self, id=None, bridge=None, label=None,
subnets=None, **kwargs):
super(Network, self).__init__()
self['id'] = id
self['bridge'] = bridge
self['label'] = label
self['subnets'] = subnets or []
self._set_meta(kwargs)
def add_subnet(self, subnet):
if subnet not in self['subnets']:
self['subnets'].append(subnet)
@classmethod
def hydrate(cls, network):
if network:
network = cls(**network)
network['subnets'] = [Subnet.hydrate(subnet)
for subnet in network['subnets']]
return network
def __eq__(self, other):
keys = ['id', 'bridge', 'label', 'subnets']
return all(self[k] == other[k] for k in keys)
def __ne__(self, other):
return not self.__eq__(other)
class VIF8021QbgParams(Model):
"""Represents the parameters for a 802.1qbg VIF."""
def __init__(self, managerid, typeid, typeidversion, instanceid):
super(VIF8021QbgParams, self).__init__()
self['managerid'] = managerid
self['typeid'] = typeid
self['typeidversion'] = typeidversion
self['instanceid'] = instanceid
class VIF8021QbhParams(Model):
"""Represents the parameters for a 802.1qbh VIF."""
def __init__(self, profileid):
super(VIF8021QbhParams, self).__init__()
self['profileid'] = profileid
class VIF(Model):
"""Represents a Virtual Interface in Nova."""
def __init__(self, id=None, address=None, network=None, type=None,
details=None, devname=None, ovs_interfaceid=None,
qbh_params=None, qbg_params=None, active=False,
vnic_type=VNIC_TYPE_NORMAL, profile=None,
preserve_on_delete=False, **kwargs):
super(VIF, self).__init__()
self['id'] = id
self['address'] = address
self['network'] = network or None
self['type'] = type
self['details'] = details or {}
self['devname'] = devname
self['ovs_interfaceid'] = ovs_interfaceid
self['qbh_params'] = qbh_params
self['qbg_params'] = qbg_params
self['active'] = active
self['vnic_type'] = vnic_type
self['profile'] = profile
self['preserve_on_delete'] = preserve_on_delete
self._set_meta(kwargs)
def __eq__(self, other):
keys = ['id', 'address', 'network', 'vnic_type',
'type', 'profile', 'details', 'devname',
'ovs_interfaceid', 'qbh_params', 'qbg_params',
'active', 'preserve_on_delete']
return all(self[k] == other[k] for k in keys)
def __ne__(self, other):
return not self.__eq__(other)
def fixed_ips(self):
if self['network']:
return [fixed_ip for subnet in self['network']['subnets']
for fixed_ip in subnet['ips']]
else:
return []
def floating_ips(self):
return [floating_ip for fixed_ip in self.fixed_ips()
for floating_ip in fixed_ip['floating_ips']]
def labeled_ips(self):
"""Returns the list of all IPs
The return value looks like this flat structure::
{'network_label': 'my_network',
'network_id': 'n8v29837fn234782f08fjxk3ofhb84',
'ips': [{'address': '123.123.123.123',
'version': 4,
'type: 'fixed',
'meta': {...}},
{'address': '124.124.124.124',
'version': 4,
'type': 'floating',
'meta': {...}},
{'address': 'fe80::4',
'version': 6,
'type': 'fixed',
'meta': {...}}]
"""
if self['network']:
# remove unnecessary fields on fixed_ips
ips = [IP(**ip) for ip in self.fixed_ips()]
for ip in ips:
# remove floating ips from IP, since this is a flat structure
# of all IPs
del ip['meta']['floating_ips']
# add floating ips to list (if any)
ips.extend(self.floating_ips())
return {'network_label': self['network']['label'],
'network_id': self['network']['id'],
'ips': ips}
return []
def has_bind_time_event(self, migration):
"""Returns whether this VIF's network-vif-plugged external event will
be sent by Neutron at "bind-time" - in other words, as soon as the port
binding is updated. This is in the context of updating the port binding
to a host that already has the instance in a shutoff state - in
practice, this means reverting either a cold migration or a
non-same-host resize.
"""
return (self.is_hybrid_plug_enabled() and not
migration.is_same_host())
def is_hybrid_plug_enabled(self):
return self['details'].get(VIF_DETAILS_OVS_HYBRID_PLUG, False)
def is_neutron_filtering_enabled(self):
return self['details'].get(VIF_DETAILS_PORT_FILTER, False)
def get_physical_network(self):
phy_network = self['network']['meta'].get('physical_network')
if not phy_network:
phy_network = self['details'].get(VIF_DETAILS_PHYSICAL_NETWORK)
return phy_network
@classmethod
def hydrate(cls, vif):
vif = cls(**vif)
vif['network'] = Network.hydrate(vif['network'])
return vif
def get_netmask(ip, subnet):
"""Returns the netmask appropriate for injection into a guest."""
if ip['version'] == 4:
return str(subnet.as_netaddr().netmask)
return subnet.as_netaddr()._prefixlen
class NetworkInfo(list):
"""Stores and manipulates network information for a Nova instance."""
# NetworkInfo is a list of VIFs
def fixed_ips(self):
"""Returns all fixed_ips without floating_ips attached."""
return [ip for vif in self for ip in vif.fixed_ips()]
def floating_ips(self):
"""Returns all floating_ips."""
return [ip for vif in self for ip in vif.floating_ips()]
@classmethod
def hydrate(cls, network_info):
if isinstance(network_info, six.string_types):
network_info = jsonutils.loads(network_info)
return cls([VIF.hydrate(vif) for vif in network_info])
def wait(self, do_raise=True):
"""Wait for asynchronous call to finish."""
# There is no asynchronous call for this class, so this is a no-op
# here, but subclasses may override to provide asynchronous
# capabilities. Must be defined here in the parent class so that code
# which works with both parent and subclass types can reference this
# method.
pass
def json(self):
return jsonutils.dumps(self)
def get_bind_time_events(self, migration):
"""Returns whether any of our VIFs have "bind-time" events. See
has_bind_time_event() docstring for more details.
"""
return [('network-vif-plugged', vif['id'])
for vif in self if vif.has_bind_time_event(migration)]
def get_plug_time_events(self, migration):
"""Complementary to get_bind_time_events(), any event that does not
fall in that category is a plug-time event.
"""
return [('network-vif-plugged', vif['id'])
for vif in self if not vif.has_bind_time_event(migration)]
class NetworkInfoAsyncWrapper(NetworkInfo):
"""Wrapper around NetworkInfo that allows retrieving NetworkInfo
in an async manner.
This allows one to start querying for network information before
you know you will need it. If you have a long-running
operation, this allows the network model retrieval to occur in the
background. When you need the data, it will ensure the async
operation has completed.
As an example:
def allocate_net_info(arg1, arg2)
return call_neutron_to_allocate(arg1, arg2)
network_info = NetworkInfoAsyncWrapper(allocate_net_info, arg1, arg2)
[do a long running operation -- real network_info will be retrieved
in the background]
[do something with network_info]
"""
def __init__(self, async_method, *args, **kwargs):
super(NetworkInfoAsyncWrapper, self).__init__()
self._gt = utils.spawn(async_method, *args, **kwargs)
methods = ['json', 'fixed_ips', 'floating_ips']
for method in methods:
fn = getattr(self, method)
wrapper = functools.partial(self._sync_wrapper, fn)
functools.update_wrapper(wrapper, fn)
setattr(self, method, wrapper)
def _sync_wrapper(self, wrapped, *args, **kwargs):
"""Synchronize the model before running a method."""
self.wait()
return wrapped(*args, **kwargs)
def __getitem__(self, *args, **kwargs):
fn = super(NetworkInfoAsyncWrapper, self).__getitem__
return self._sync_wrapper(fn, *args, **kwargs)
def __iter__(self, *args, **kwargs):
fn = super(NetworkInfoAsyncWrapper, self).__iter__
return self._sync_wrapper(fn, *args, **kwargs)
def __len__(self, *args, **kwargs):
fn = super(NetworkInfoAsyncWrapper, self).__len__
return self._sync_wrapper(fn, *args, **kwargs)
def __str__(self, *args, **kwargs):
fn = super(NetworkInfoAsyncWrapper, self).__str__
return self._sync_wrapper(fn, *args, **kwargs)
def __repr__(self, *args, **kwargs):
fn = super(NetworkInfoAsyncWrapper, self).__repr__
return self._sync_wrapper(fn, *args, **kwargs)
def wait(self, do_raise=True):
"""Wait for asynchronous call to finish."""
if self._gt is not None:
try:
# NOTE(comstud): This looks funky, but this object is
# subclassed from list. In other words, 'self' is really
# just a list with a bunch of extra methods. So this
# line just replaces the current list (which should be
# empty) with the result.
self[:] = self._gt.wait()
except Exception:
if do_raise:
raise
finally:
self._gt = None
| 33.92671 | 79 | 0.640104 |
90e6c536bb9cc834777969a6050e7db1ba03dd97 | 24,104 | py | Python | calvin/runtime/south/transports/tests/test_calvin_transport.py | gabrielcercel/calvin-base | c0315f100643230d65aed1745e1c22df3e7a7c2c | [
"Apache-2.0"
] | 334 | 2015-06-04T15:14:28.000Z | 2022-02-09T11:14:17.000Z | calvin/runtime/south/transports/tests/test_calvin_transport.py | gabrielcercel/calvin-base | c0315f100643230d65aed1745e1c22df3e7a7c2c | [
"Apache-2.0"
] | 89 | 2015-06-13T19:15:35.000Z | 2019-12-03T19:23:20.000Z | calvin/runtime/south/transports/tests/test_calvin_transport.py | gabrielcercel/calvin-base | c0315f100643230d65aed1745e1c22df3e7a7c2c | [
"Apache-2.0"
] | 112 | 2015-06-06T19:16:54.000Z | 2020-10-19T01:27:55.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import sys
import os
import random
import time
import json
import uuid
import Queue
import multiprocessing
import traceback
from mock import Mock
from twisted.internet import reactor
from calvin.utilities import calvinlogger
from calvin.utilities.calvin_callback import CalvinCB, CalvinCBClass
from calvin.runtime.south.transports.calvinip import calvinip_transport
_log = calvinlogger.get_logger(__name__)
"""
@pytest.fixture(scope="session", autouse=True)
def cleanup(request):
def fin():
reactor.callFromThread(reactor.stop)
request.addfinalizer(fin)
print "hejsan"
"""
def slay(plist):
import signal
for p in plist:
if p.is_alive():
p.terminate()
p.join(timeout=.2)
if p.is_alive():
print "Warning: process %s still alive slay!!" % p._name
os.kill(p.pid, signal.SIGKILL)
time.sleep(.1)
if len(multiprocessing.active_children()) > 1:
print "Error: children is still alive", multiprocessing.active_children()
for a in multiprocessing.active_children():
a.terminate()
class BaseTHandler(multiprocessing.Process):
def __init__(self, uri, outqueue, inqueue, timeout=5):
multiprocessing.Process.__init__(self)
self._timeout = timeout
self._item = None
self._uri = uri
self._outqueue = outqueue
self._inqueue = inqueue
self._running = False
def set_ttf(self, ttf):
self._ttf = ttf
def _return(self, test=False, variables={}, stack=None):
if stack is None:
stack = traceback.format_stack(limit=15)[:-1]
else:
stack = []
self._outqueue.put([test, stack, variables])
def _stop_reactor(self, timeout=False):
if timeout:
self.__timeout()
if self._item:
# Server not stopped fail
self._return(False, {'self._item': repr(self._item)})
self._running = False
print(reactor, reactor.running)
if reactor.running:
reactor.callLater(.1, reactor.stop)
def _read_thread(self):
print("%s - Read thread started" % self._name)
while self._running:
try:
cmd = self._inqueue.get(timeout=.1)
except:
continue
func = getattr(self, cmd[0])
print("Running: %s(%s, %s)" % (func.__name__, cmd[1], cmd[2]))
reactor.callFromThread(func, *cmd[1], **cmd[2])
print("%s - Read thread died" % self._name)
def start(self):
self._running = True
self.daemon = True
multiprocessing.Process.start(self)
def __timeout(self, command=None, *args):
print("Timeout in", self)
self._return("timeout", {command: args})
def _base_run(self):
# make it work with twisted py.test plugin also
reactor._started = False
print "timeout %s", self._timeout
reactor.callLater(self._timeout, self._stop_reactor, timeout=True)
reactor.callInThread(self._read_thread)
reactor.run()
def run(self):
self._base_run()
class TransportTestServerHandler(BaseTHandler):
def __init__(self, *args, **kwargs):
self._name = "TestServerHandler"
BaseTHandler.__init__(self, *args, **kwargs)
self._tp = None
def get_callbacks(self):
return {'server_started': [CalvinCB(self._server_started)],
'server_stopped': [CalvinCB(self._server_stopped)],
'join_failed': [CalvinCB(self._join_failed)],
'peer_disconnected': [CalvinCB(self._peer_disconnected)],
'peer_connected': [CalvinCB(self._peer_connected)]}
def _data_received(self, *args):
print("server_data_received", args)
def _peer_connected(self, transport, uri):
print("server_peer_connected", transport)
transport.callback_register('join_finished', CalvinCB(self._join_finished))
transport.callback_register('data_received', CalvinCB(self._data_received))
def _join_failed(self, transport, _id, uri, is_orginator, reason):
_log.debug("Server join failed on uri %s, reason %s", uri, reason)
self._return('server_join_failed', {'transport': repr(transport), 'uri': uri, 'reason': reason})
def _join_finished(self, transport, _id, uri, is_orginator):
print("server_join_finished", transport, _id, uri)
self._return(transport._coder is not None and _id and uri, {'transport._coder': transport._coder , 'id': _id, 'uri': uri})
self._return('server_join_finished', {'transport': repr(transport), '_id': _id, 'uri': uri})
pass
def _peer_disconnected(self, *args):
print("server peer disconnected", args)
def _server_stopped(self, *args):
print("Server stopped", args)
self._item = None
self._outqueue.put(["server_stopped", repr(args)])
# Die here ?
self._stop_reactor()
def _stop_server(self):
print("_stop_server")
self._item.stop()
self._return(not self._item.is_listening())
def stop(self):
print("server_stop", self._item)
if self._item:
self._stop_server()
# Timeout
reactor.callLater(1, self._stop_reactor)
def _server_started(self, server, port):
print("Server started", server, port)
self._item = server
# put in queue
self._return(port > 0 and port < 65536, {'port': port})
self._return('server_started', port)
def _start_server(self):
self._tp = self._ttf.listen(self._uri)
def run(self):
print("start server")
reactor.callLater(0, self._start_server)
self._base_run()
print("server finished")
def _run_command(self, command, *args):
comand(args)
reactor.callLater(0, self.start_server)
class TransportTestClientHandler(BaseTHandler):
def __init__(self, *args, **kwargs):
self._name = "TestClientHandler"
self._port = None
self._stop = False
self._tp = None
BaseTHandler.__init__(self, *args, **kwargs)
def set_ttf(self, ttf):
self._ttf = ttf
def set_port(self, port):
print("set_port", port)
self._port = port
def get_callbacks(self):
return {'peer_disconnected': [CalvinCB(self._peer_disconnected)],
'peer_connection_failed': [CalvinCB(self._connection_failed)],
'join_failed': [CalvinCB(self._join_failed)],
'peer_connected': [CalvinCB(self._peer_connected)]}
def _data_received(self, data):
print("client_data_received", data)
self._return('client_data_received', {'data': data})
def _peer_connected(self, transport, uri):
print("client_peer_connected", transport)
transport.callback_register('join_finished', CalvinCB(self._join_finished))
transport.callback_register('data_received', CalvinCB(self._data_received))
self._return('client_connected', {'transport': repr(transport), 'uri': uri})
self._item = transport
def _connection_failed(self, tp_link, uri, reason):
_log.debug("Client connection failed on uri %s, reason %s", uri, reason)
self._return('client_connection_failed', {'link': repr(tp_link), 'uri': uri, 'reason': reason})
def _join_failed(self, transport, _id, uri, is_orginator, reason):
_log.debug("Client join failed on uri %s, reason %s", uri, reason)
self._return('client_join_failed', {'transport': repr(transport), 'uri': uri, 'reason': reason})
def _join_finished(self, transport, _id, uri, is_orginator):
print("client_join_finished", transport, _id, uri)
self._return(transport._coder is not None and _id and uri, {'transport._coder': transport._coder , 'id': _id, 'uri': uri})
self._return('client_join_finished', {'transport': repr(transport), '_id': _id, 'uri': uri})
def _peer_disconnected(self, transport, uri, reason):
print("client_peer_disconnected", transport, uri, reason)
#self._return(not self._item.is_connected(), variables={'is_connected': self._item.is_connected()})
self._return('client_disconnected', {'transport': repr(transport), 'reason': reason, 'uri': uri})
# If we have stop stop everything
if self._stop:
self._item = None
self._stop_reactor()
def _stop_client(self):
print("_stop_client(disconnect)")
self._stop = True
self._item.disconnect()
def stop(self):
print("client_stop", self._item)
if self._item:
self._stop_client()
# Timeout
reactor.callLater(1, self._stop_reactor)
def _join(self):
self._tp = self._ttf.join(self._uri)
def run(self):
print("start client")
self._uri = "%s:%s" % (self._uri, self._port)
reactor.callLater(0, self._join)
self._base_run()
print("client finished")
class ConnectionFailed(Exception):
pass
class ServerJoinFailed(Exception):
pass
class ClientJoinFailed(Exception):
pass
# @pytest.mark.interactive
class TestTransportServer(object):
@pytest.mark.essential
def test_start_stop(self, monkeypatch):
_mmanager = multiprocessing.Manager()
shqs = [_mmanager.Queue(), _mmanager.Queue()]
sh = TransportTestServerHandler("calvinip://localhost", shqs[0], shqs[1], timeout=2)
ttf_uuid = str(uuid.uuid4())
ttf = calvinip_transport.CalvinTransportFactory(ttf_uuid, ttf_uuid, sh.get_callbacks())
sh.set_ttf(ttf)
sh.start()
error = None
try:
while sh.is_alive():
try:
mess = shqs[0].get(timeout=.3)
# print(mess)
except:
continue
if mess[0] == 'timeout':
print(mess[1])
raise Exception("Timeout: %s" % "\n".join(mess[1][11:]))
elif mess[0] == 'server_started':
pass
shqs[1].put(['stop', [], {}])
elif mess[0] == 'server_stopped':
break
else:
# print mess
if not mess[0]:
for a in mess[1]:
print a,
for k,v in mess[2].items():
print "%s = %s" % (k, repr(v))
raise Exception("\n".join(mess[1][11:]))
except Exception as e:
import traceback
traceback.print_exc()
error = e
shqs[1].put(['stop', [], {}])
sh.join(timeout=.2)
slay([sh])
if error:
pytest.fail(error)
def test_callbacks(self, monkeypatch):
#self.test_start_stop(monkeypatch)
pass
def test_peer_connected(self, monkeypatch):
pass
class TestTransportClient(object):
test_nodes = 2
@pytest.mark.essential
def test_connect(self, monkeypatch):
queues = []
_mmanager = multiprocessing.Manager()
shqs = [_mmanager.Queue(), _mmanager.Queue()]
chqs = [_mmanager.Queue(), _mmanager.Queue()]
sh = TransportTestServerHandler("calvinip://127.0.0.1", shqs[0], shqs[1], timeout=2)
ch = TransportTestClientHandler("calvinip://127.0.0.1", chqs[0], chqs[1], timeout=2)
ttfs_uuid = str(uuid.uuid4())
ttfs = calvinip_transport.CalvinTransportFactory(ttfs_uuid, ttfs_uuid, sh.get_callbacks())
ttfc_uuid = str(uuid.uuid4())
ttfc = calvinip_transport.CalvinTransportFactory(ttfc_uuid, ttfc_uuid, ch.get_callbacks())
sh.set_ttf(ttfs)
ch.set_ttf(ttfc)
sh.start()
#ch.start()
queues = [shqs, chqs]
cstop = sstop = False
stop = False
error = None
try:
while not stop:
for q in queues:
try:
mess = q[0].get(timeout=.1)
#print(mess[0])
except:
continue
if mess[0] == 'timeout':
print(mess[1])
# TODO: terminate
raise Exception("Timeout: %s" % "\n".join(mess[1][11:]))
elif mess[0] == 'server_stopped':
print "Hej hej"
sstop = True
stop = (sstop and cstop)
elif mess[0] == 'server_started':
ch.set_port(mess[2])
ch.start()
elif mess[0] == 'client_disconnected':
if mess[2]['reason'] != "OK":
raise Exception("Did not disconnect cleanly")
cstop = True
stop = (sstop and cstop)
elif mess[0] == 'client_join_finished':
stop = True
elif mess[0] == 'client_join_failed':
raise ClientJoinFailed(str(mess[2]))
elif mess[0] == 'server_join_failed':
raise ServerJoinFailed(str(mess[2]))
elif mess[0] == 'client_connection_failed':
raise ConnectionFailed(str(mess[1:]))
else:
# print mess
if not mess[0]:
for a in mess[1][11:-1]:
print a,
for k,v in mess[2].items():
print "%s = %s" % (k, repr(v))
raise Exception("\n".join(mess[1][11:]))
except Exception as e:
error = e
for tq in queues:
print(repr(tq))
tq[1].put(['stop', [], {}])
time.sleep(.2)
slay([sh, ch])
if error:
pytest.fail(error)
def test_connect_client_join_fail(self, monkeypatch):
_mmanager = multiprocessing.Manager()
queues = []
shqs = [_mmanager.Queue(), _mmanager.Queue()]
chqs = [_mmanager.Queue(), _mmanager.Queue()]
sh = TransportTestServerHandler("calvinip://127.0.0.1", shqs[0], shqs[1])
ch = TransportTestClientHandler("calvinip://127.0.0.1", chqs[0], chqs[1])
ttfs_uuid = str(uuid.uuid4())
ttfs = calvinip_transport.CalvinTransportFactory(ttfs_uuid, ttfs_uuid, sh.get_callbacks())
ttfc_uuid = str(uuid.uuid4())
ttfc = calvinip_transport.CalvinTransportFactory(ttfc_uuid, ttfc_uuid, ch.get_callbacks())
sh.set_ttf(ttfs)
ch.set_ttf(ttfc)
monkeypatch.setattr(ttfc, "_client_validator", lambda x: False)
sh.start()
queues = [shqs, chqs]
cstop = sstop = False
stop = False
error = None
try:
while not stop:
for q in queues:
try:
mess = q[0].get(timeout=.1)
#print(mess[0])
except:
continue
if mess[0] == 'timeout':
print(mess[1])
# TODO: terminate
raise Exception("Timeout: %s" % "\n".join(mess[1][11:]))
elif mess[0] == 'server_stopped':
print "Hej hej"
sstop = True
stop = (sstop and cstop)
elif mess[0] == 'server_started':
ch.set_port(mess[2])
ch.start()
elif mess[0] == 'client_disconnected':
cstop = True
stop = (sstop and cstop)
elif mess[0] == 'client_join_finished':
stop = True
elif mess[0] == 'client_join_failed':
raise ClientJoinFailed(str(mess[2]))
elif mess[0] == 'server_join_failed':
raise ServerJoinFailed(str(mess[2]))
elif mess[0] == 'client_connection_failed':
raise ConnectionFailed(str(mess[2]))
else:
# print mess
if not mess[0]:
for a in mess[1][11:-1]:
print a,
for k,v in mess[2].items():
print "%s = %s" % (k, repr(v))
raise Exception("\n".join(mess[1][11:]))
except Exception as e:
error = e
for tq in queues:
print(repr(tq))
tq[1].put(['stop', [], {}])
slay([sh, ch])
if error:
with pytest.raises(ClientJoinFailed):
import traceback
traceback.print_exc(error)
raise error
else:
pytest.fail("No exception")
def test_connect_server_join_fail(self, monkeypatch):
_mmanager = multiprocessing.Manager()
queues = []
shqs = [_mmanager.Queue(), _mmanager.Queue()]
chqs = [_mmanager.Queue(), _mmanager.Queue()]
sh = TransportTestServerHandler("calvinip://127.0.0.1", shqs[0], shqs[1])
ch = TransportTestClientHandler("calvinip://127.0.0.1", chqs[0], chqs[1])
ttfs_uuid = str(uuid.uuid4())
ttfs = calvinip_transport.CalvinTransportFactory(ttfs_uuid, ttfs_uuid, sh.get_callbacks())
ttfc_uuid = str(uuid.uuid4())
ttfc = calvinip_transport.CalvinTransportFactory(ttfc_uuid, ttfc_uuid, ch.get_callbacks())
sh.set_ttf(ttfs)
ch.set_ttf(ttfc)
monkeypatch.setattr(ttfs, "_client_validator", lambda x: False)
sh.start()
queues = [shqs, chqs]
cstop = sstop = False
stop = False
error = None
try:
while not stop:
for q in queues:
try:
mess = q[0].get(timeout=.1)
#print(mess[0])
except:
continue
if mess[0] == 'timeout':
print(mess[1])
# TODO: terminate
raise Exception("Timeout: %s" % "\n".join(mess[1][11:]))
elif mess[0] == 'server_stopped':
print "Hej hej"
sstop = True
stop = (sstop and cstop)
elif mess[0] == 'server_started':
ch.set_port(mess[2])
ch.start()
elif mess[0] == 'client_disconnected':
cstop = True
stop = (sstop and cstop)
elif mess[0] == 'client_join_finished':
stop = True
elif mess[0] == 'client_join_failed':
raise ClientJoinFailed(str(mess[2]))
elif mess[0] == 'server_join_failed':
raise ServerJoinFailed(str(mess[2]))
elif mess[0] == 'client_connection_failed':
raise ConnectionFailed(str(mess[2]))
else:
# print mess
if not mess[0]:
for a in mess[1][11:-1]:
print a,
for k,v in mess[2].items():
print "%s = %s" % (k, repr(v))
raise Exception("\n".join(mess[1][11:]))
except Exception as e:
error = e
for tq in queues:
print(repr(tq))
tq[1].put(['stop', [], {}])
slay([sh, ch])
if error:
with pytest.raises(ServerJoinFailed):
import traceback
traceback.print_exc(error)
raise error
else:
pytest.fail("No exception")
def test_connect_fail(self, monkeypatch):
_mmanager = multiprocessing.Manager()
queues = []
shqs = [_mmanager.Queue(), _mmanager.Queue()]
chqs = [_mmanager.Queue(), _mmanager.Queue()]
sh = TransportTestServerHandler("calvinip://127.0.0.1", shqs[0], shqs[1])
ch = TransportTestClientHandler("calvinip://127.0.0.1", chqs[0], chqs[1])
ttfs_uuid = str(uuid.uuid4())
ttfs = calvinip_transport.CalvinTransportFactory(ttfs_uuid, ttfs_uuid, sh.get_callbacks())
ttfc_uuid = str(uuid.uuid4())
ttfc = calvinip_transport.CalvinTransportFactory(ttfc_uuid, ttfc_uuid, ch.get_callbacks())
sh.set_ttf(ttfs)
ch.set_ttf(ttfc)
sh.start()
#ch.start()
queues = [shqs, chqs]
cstop = sstop = False
stop = False
error = None
try:
while not stop:
for q in queues:
try:
mess = q[0].get(timeout=.1)
#print(mess[0])
except:
continue
if mess[0] == 'timeout':
print(mess[1])
# TODO: terminate
raise Exception("Timeout: %s" % "\n".join(mess[1][11:]))
elif mess[0] == 'server_stopped':
print "Hej hej"
sstop = True
stop = (sstop and cstop)
elif mess[0] == 'server_started':
ch.set_port(str(int(mess[2])+1))
ch.start()
elif mess[0] == 'client_disconnected':
cstop = True
stop = (sstop and cstop)
elif mess[0] == 'client_join_finished':
stop = True
elif mess[0] == 'client_join_failed':
raise ClientJoinFailed(str(mess[2]))
elif mess[0] == 'server_join_failed':
raise ServerJoinFailed(str(mess[2]))
elif mess[0] == 'client_connection_failed':
raise ConnectionFailed(str(mess[2]))
else:
# print mess
if not mess[0]:
for a in mess[1][11:-1]:
print a,
for k,v in mess[2].items():
print "%s = %s" % (k, repr(v))
raise Exception("\n".join(mess[1][11:]))
except Exception as e:
error = e
for tq in queues:
print "hej", repr(tq)
tq[1].put(['stop', [], {}])
print sh, ch
slay([sh, ch])
if error:
with pytest.raises(ConnectionFailed):
import traceback
traceback.print_exc(error)
raise error
else:
pytest.fail("No exception")
def test_data(self, monkeypatch):
pass
def test_callback(self, monkeypatch):
pass
| 35.188321 | 130 | 0.524685 |
e0649066f1167ce5a9748348c77ed0edcb1c7b53 | 2,787 | py | Python | mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Notify/Nsca/Nsca.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 282 | 2016-06-16T14:41:44.000Z | 2022-03-02T03:43:02.000Z | mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Notify/Nsca/Nsca.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 146 | 2016-06-16T08:55:45.000Z | 2020-09-08T10:37:32.000Z | mongodb/mongodb_consistent_backup/official/mongodb_consistent_backup/Notify/Nsca/Nsca.py | smthkissinger/docker-images | 35e868295d04fa780325ada4168381f1e80e8fe4 | [
"BSD-3-Clause"
] | 94 | 2016-06-16T10:49:07.000Z | 2022-03-28T09:14:03.000Z | import logging
import sys
from pynsca import NSCANotifier
from mongodb_consistent_backup.Errors import NotifyError, OperationError
from mongodb_consistent_backup.Pipeline import Task
class Nsca(Task):
def __init__(self, manager, config, timer, base_dir, backup_dir, **kwargs):
super(Nsca, self).__init__(self.__class__.__name__, manager, config, timer, base_dir, backup_dir, **kwargs)
self.server = self.config.notify.nsca.server
self.check_name = self.config.notify.nsca.check_name
self.check_host = self.config.notify.nsca.check_host
self.password = self.config.notify.nsca.password
self.success = 0
self.warning = 1
self.critical = 2
self.failed = self.critical
self.notifier = None
self.mode_type = ''
self.encryption = 1
if self.password:
self.mode_type = 'Secure '
self.encryption = 16
req_attrs = ['server', 'check_name', 'check_host']
for attr in req_attrs:
if not getattr(self, attr):
raise OperationError('NSCA notifier module requires attribute: %s!' % attr)
self.server_name = self.server
self.server_port = 5667
if ':' in self.server:
self.server_name, port = self.server.split(":")
self.server_port = int(port)
self.server = "%s:%i" % (self.server_name, self.server_port)
try:
self.notifier = NSCANotifier(
monitoring_server=self.server_name,
monitoring_port=self.server_port,
encryption_mode=self.encryption,
password=self.password
)
except Exception, e:
logging.error('Error initiating NSCANotifier! Error: %s' % e)
raise OperationError(e)
def close(self):
pass
def run(self, ret_code, output):
if self.notifier:
logging.info("Sending %sNSCA report to check host/name '%s/%s' at NSCA host %s" % (
self.mode_type,
self.check_host,
self.check_name,
self.server
))
logging.debug('NSCA report message: "%s", return code: %i, check host/name: "%s/%s"' % (output, ret_code, self.check_host, self.check_name))
# noinspection PyBroadException
try:
self.notifier.svc_result(self.check_host, self.check_name, ret_code, str(output))
logging.debug('Sent %sNSCA report to host %s' % (self.mode_type, self.server))
except Exception, e:
logging.error('Failed to send %sNSCA report to host %s: %s' % (self.mode_type, self.server, sys.exc_info()[1]))
raise NotifyError(e)
| 39.253521 | 152 | 0.598134 |
f37b8be56905dfbc3c7c439c95b415df337a1aae | 1,027 | py | Python | code/preprocessing.py | smmspiers/Feed-ForwardNN-CFAR-10 | de8d75b13e1a7a1955532064c49ced16a5b8f4a2 | [
"MIT"
] | null | null | null | code/preprocessing.py | smmspiers/Feed-ForwardNN-CFAR-10 | de8d75b13e1a7a1955532064c49ced16a5b8f4a2 | [
"MIT"
] | null | null | null | code/preprocessing.py | smmspiers/Feed-ForwardNN-CFAR-10 | de8d75b13e1a7a1955532064c49ced16a5b8f4a2 | [
"MIT"
] | null | null | null | import numpy as np
def minmax_initialisation(data):
return (data - np.min(data)) / (np.max(data) - np.min(data))
def get_pca_matrix(data, pov=0.9):
# Compute covariance matrix
covariance = np.cov(data, rowvar=False)
# Calculate eigen values and vectors
eigen_vals, eigen_vecs = np.linalg.eig(covariance)
# Get proportion of variance (pov) for each component
percens_of_var = eigen_vals / np.sum(eigen_vals)
# Calculate cumulative percentages
cum_percens_of_var = np.cumsum(percens_of_var)
# Calculate num of principal components which give desired pov
n_princ = np.count_nonzero(cum_percens_of_var < pov)
# Sort eigenvalues according to sorted eigenvalues
idx = np.argsort(eigen_vals)[::-1]
eigen_vecs = eigen_vecs[:, idx]
princ_eigh = eigen_vecs[:, :n_princ]
return princ_eigh
def reduce_dimensions(data, pca_matrix):
# subtract mean from all data points
data -= np.mean(data)
# project points onto PCA axes
return np.dot(data, pca_matrix)
| 33.129032 | 66 | 0.714703 |
3c35ce6641cb567cbed262c84fb617531f61b8ce | 6,704 | py | Python | networkapi/api_environment/facade.py | juanaugusto/Serializer-Training-GloboNetworkAPI | 7ca9e176262551c4fda7275623af781276cc3f4a | [
"Apache-2.0"
] | null | null | null | networkapi/api_environment/facade.py | juanaugusto/Serializer-Training-GloboNetworkAPI | 7ca9e176262551c4fda7275623af781276cc3f4a | [
"Apache-2.0"
] | 3 | 2020-06-05T18:41:53.000Z | 2021-06-10T20:37:56.000Z | networkapi/api_environment/facade.py | juanaugusto/Serializer-Training-GloboNetworkAPI | 7ca9e176262551c4fda7275623af781276cc3f4a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import logging
from django.core.exceptions import FieldError
from django.db.models import Q
from networkapi.equipamento.models import Equipamento
from networkapi.ambiente.models import Ambiente
from networkapi.ambiente.models import AmbienteError
from networkapi.ambiente.models import AmbienteNotFoundError
from networkapi.ambiente.models import AmbienteUsedByEquipmentVlanError
from networkapi.ambiente.models import EnvironmentErrorV3
from networkapi.api_environment_vip.facade import get_environmentvip_by_id
from networkapi.api_pools import exceptions
from networkapi.api_rest.exceptions import NetworkAPIException
from networkapi.api_rest.exceptions import ObjectDoesNotExistException
from networkapi.api_rest.exceptions import ValidationAPIException
from networkapi.infrastructure.datatable import build_query_to_datatable_v3
from networkapi.plugins.factory import PluginFactory
from networkapi.api_environment.tasks.flows import async_add_flow
log = logging.getLogger(__name__)
def get_environment_by_search(search=dict()):
"""Return a list of environments by dict."""
try:
environments = Ambiente.objects.filter()
env_map = build_query_to_datatable_v3(environments, search)
except FieldError as e:
raise ValidationAPIException(str(e))
except Exception as e:
raise NetworkAPIException(str(e))
else:
return env_map
def get_environment_by_id(environment_id):
"""Return a environment by id.
Args:
environment_id: Id of environment
"""
try:
environment = Ambiente.get_by_pk(id=environment_id)
except AmbienteNotFoundError, e:
raise exceptions.EnvironmentDoesNotExistException(str(e))
return environment
def get_environment_by_ids(environment_ids):
"""Return environment list by ids.
Args:
environment_ids: List of Ids of environments.
"""
env_ids = list()
for environment_id in environment_ids:
try:
env = get_environment_by_id(environment_id).id
env_ids.append(env)
except exceptions.EnvironmentDoesNotExistException, e:
raise ObjectDoesNotExistException(str(e))
except Exception, e:
raise NetworkAPIException(str(e))
environments = Ambiente.objects.filter(id__in=env_ids)
return environments
def list_environment_environment_vip_related(env_id=None):
"""List of environments related with environment vip.
Args:
env_id: Id of environment(optional).
"""
try:
if env_id is None:
env_list_net_related = Ambiente.objects.filter(
Q(vlan__networkipv4__ambient_vip__id__isnull=False) |
Q(vlan__networkipv6__ambient_vip__id__isnull=False)
)
else:
env_vip = get_environmentvip_by_id(env_id)
env_list_net_related = Ambiente.objects.filter(
Q(vlan__networkipv4__ambient_vip=env_vip) |
Q(vlan__networkipv6__ambient_vip=env_vip)
)
env_list_net_related = env_list_net_related.order_by(
'divisao_dc__nome', 'ambiente_logico__nome', 'grupo_l3__nome'
).select_related(
'grupo_l3', 'ambiente_logico', 'divisao_dc', 'filter'
).distinct()
except ObjectDoesNotExistException, e:
raise ObjectDoesNotExistException(str(e))
except Exception, e:
raise NetworkAPIException(str(e))
return env_list_net_related
def update_environment(env):
"""Update environment."""
try:
env_obj = get_environment_by_id(env.get('id'))
env_obj.update_v3(env)
except EnvironmentErrorV3, e:
raise ValidationAPIException(str(e))
except ValidationAPIException, e:
raise ValidationAPIException(str(e))
except exceptions.EnvironmentDoesNotExistException, e:
raise ObjectDoesNotExistException(str(e))
except Exception, e:
raise NetworkAPIException(str(e))
return env_obj
def create_environment(env):
"""Create environment."""
try:
env_obj = Ambiente()
env_obj.create_v3(env)
except EnvironmentErrorV3, e:
raise ValidationAPIException(str(e))
except ValidationAPIException, e:
raise ValidationAPIException(str(e))
except Exception, e:
raise NetworkAPIException(str(e))
return env_obj
def delete_environment(env_ids):
"""Delete environment."""
for env_id in env_ids:
try:
env_obj = get_environment_by_id(env_id)
env_obj.delete_v3()
except AmbienteUsedByEquipmentVlanError, e:
raise ValidationAPIException(str(e))
except exceptions.EnvironmentDoesNotExistException, e:
raise ObjectDoesNotExistException(str(e))
except AmbienteError, e:
raise NetworkAPIException(str(e))
except Exception, e:
raise NetworkAPIException(str(e))
def get_controller_by_envid(env_id):
q_filter_environment = {
'equipamentoambiente__ambiente': env_id,
'equipamentoambiente__is_controller': env_id
}
return Equipamento.objects.filter(Q(**q_filter_environment)).uniqueResult()
def list_flows_by_envid(env_id, flow_id=0):
eqpt = get_controller_by_envid(env_id)
plugin = PluginFactory.factory(eqpt)
try:
if flow_id > 0:
return plugin.get_flow(flow_id=flow_id)
else:
return plugin.get_flows()
except Exception as e:
log.error(e)
raise NetworkAPIException("Failed to list flow(s)"
"plugin. %s" % e)
def insert_flow(env_id, data):
eqpt = get_controller_by_envid(env_id)
plugin = PluginFactory.factory(eqpt)
try:
return async_add_flow.apply_async(
args=[plugin, data], queue="napi.odl_flow"
)
except Exception as e:
log.error(e)
raise NetworkAPIException("Failed to insert flow(s) "
"plugin. %s" % e)
def delete_flow(env_id, flow_id):
eqpt = get_controller_by_envid(env_id)
plugin = PluginFactory.factory(eqpt)
try:
return plugin.del_flow(flow_id=flow_id)
except Exception as e:
log.error(e)
raise NetworkAPIException("Failed to delete flow "
"plugin. %s" % e)
def flush_flows(env_id):
eqpt = get_controller_by_envid(env_id)
plugin = PluginFactory.factory(eqpt)
try:
return plugin.flush_flows()
except Exception as e:
log.error(e)
raise NetworkAPIException("Failed to flush Controller "
"plugin. %s" % e) | 30.334842 | 79 | 0.682578 |
88dae23b9f17a6f4f6ee7197c84672fe5772f719 | 1,462 | py | Python | questionary/constants.py | heeplr/questionary | 90af7c4cd345315c6ce424a5941f50054a6b68c3 | [
"MIT"
] | null | null | null | questionary/constants.py | heeplr/questionary | 90af7c4cd345315c6ce424a5941f50054a6b68c3 | [
"MIT"
] | null | null | null | questionary/constants.py | heeplr/questionary | 90af7c4cd345315c6ce424a5941f50054a6b68c3 | [
"MIT"
] | null | null | null | from prompt_toolkit.styles import Style
# Value to display as an answer when "affirming" a confirmation question
YES = "Yes"
# Value to display as an answer when "denying" a confirmation question
NO = "No"
# Instruction text for a confirmation question (yes is default)
YES_OR_NO = "(Y/n)"
# Instruction text for a confirmation question (no is default)
NO_OR_YES = "(y/N)"
# Instruction for multiline input
INSTRUCTION_MULTILINE = "(Finish with 'Alt+Enter' or 'Esc, Enter')\n>"
# Selection token used to indicate the selection cursor in a list
SELECTED_POINTER = "»"
# Item prefix to identify selected items in a checkbox list
INDICATOR_SELECTED = "●"
# Item prefix to identify unselected items in a checkbox list
INDICATOR_UNSELECTED = "○"
# Prefix displayed in front of questions
DEFAULT_QUESTION_PREFIX = "?"
# Message shown when a user aborts a question prompt using CTRL-C
DEFAULT_KBI_MESSAGE = "Cancelled by user"
# Default message style
DEFAULT_STYLE = Style(
[
("qmark", "fg:#5f819d"), # token in front of the question
("question", "bold"), # question text
("answer", "fg:#FF9D00 bold"), # submitted answer text behind the question
("pointer", ""), # pointer used in select and checkbox prompts
("selected", ""), # style for a selected item of a checkbox
("separator", ""), # separator in lists
("instruction", ""), # user instructions for select, rawselect, checkbox
]
)
| 32.488889 | 83 | 0.699042 |
f7bf8446d8ee34ab9565c17809b772320ad73a96 | 1,952 | py | Python | testbench/controller_test.py | DEFAULTTEXHAPb/SampleRateConverter | 880555ac486b5cf85a46118f3dc6c3c02ffc80c9 | [
"MIT"
] | null | null | null | testbench/controller_test.py | DEFAULTTEXHAPb/SampleRateConverter | 880555ac486b5cf85a46118f3dc6c3c02ffc80c9 | [
"MIT"
] | null | null | null | testbench/controller_test.py | DEFAULTTEXHAPb/SampleRateConverter | 880555ac486b5cf85a46118f3dc6c3c02ffc80c9 | [
"MIT"
] | null | null | null | import os
import os.path as path
import pytest
import cocotb as ctb
import cocotb_test.simulator as tester
class TB(object):
pass
class MicroBlaze():
pass
@ctb.test()
def controller_test(dut):
pass
tests_dir = path.dirname(__file__)
hdl_dir = path.abspath(path.join(tests_dir, '..', 'hdl'))
@pytest.mark.skipif(os.getenv("SIM") == "ghdl", reason="Verilog not suported")
def test_controller():
dut = "ctrl_top"
module = path.splitext(path.basename(__file__))[0]
parameters = {}
parameters['DATA_ADDR_WIDTH'] = 5
parameters['']
verilog_sources = [
path.join(hdl_dir, f"/controller/{dut}.v")
]
compile_args = [
"-g2005",
"-Wall"
]
includes = [
f"{hdl_dir}/src/",
f"{hdl_dir}/include/",
]
tester.run(
verilog_sources = verilog_sources,
toplevel = dut,
module = module,
compile_args = compile_args
)
# -*- coding: utf-8 -*-
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import Timer
from cocotb.regression import TestFactory
@cocotb.test()
async def run_test(dut):
PERIOD = 10
cocotb.fork(Clock(dut.clk, PERIOD, 'ns').start(start_high=False))
dut.rst = 0
dut.en = 0
dut.prog = 0
dut.iw_valid = 0
dut.load_coef_addr = 0
dut.instr_word = 0
await Timer(20*PERIOD, units='ns')
dut.rst = 1
dut.en = 1
dut.prog = 1
dut.iw_valid = 1
dut.load_coef_addr = 1
dut.instr_word = 1
dut.ptr_req = 1
dut.en_calc = 1
dut.mac_init = 1
dut.en_ram_pa = 1
dut.en_ram_pb = 1
dut.wr_ram_pa = 1
dut.wr_ram_pb = 1
dut.regf_rd = 1
dut.regf_wr = 1
dut.regf_en = 1
dut.new_in = 1
dut.new_out = 1
dut.data_addr = 1
dut.coef_addr = 1
dut.ars1 = 1
dut.ars2 = 1
dut.ard1 = 1
dut.ard2 = 1
await Timer(20*PERIOD, units='ns')
# Register the test.
factory = TestFactory(run_test)
factory.generate_tests()
| 19.326733 | 79 | 0.623975 |
5a7c2367766c61aa47db7a2eff21c936f2299ab1 | 981 | py | Python | gen3_replicator/config_helper.py | ohsu-comp-bio/gen3-etl-lite | c91093e6c7b269db5baca41619a93356a68c4e97 | [
"MIT"
] | null | null | null | gen3_replicator/config_helper.py | ohsu-comp-bio/gen3-etl-lite | c91093e6c7b269db5baca41619a93356a68c4e97 | [
"MIT"
] | 1 | 2021-03-25T22:26:08.000Z | 2021-03-25T22:26:08.000Z | gen3_replicator/config_helper.py | ohsu-comp-bio/gen3-etl-lite | c91093e6c7b269db5baca41619a93356a68c4e97 | [
"MIT"
] | null | null | null | """Useful helpers."""
import json
import os
from datetime import date, datetime
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, (datetime, date)):
return obj.isoformat()
raise TypeError ("Type %s not serializable" % type(obj))
def load_json(file_name):
"""Return parsed json file."""
for prefix in config_paths():
path = os.path.join(prefix,file_name)
if (os.path.exists(path)):
with open(path, 'r') as reader:
return json.load(reader)
return {}
def save_json(object, file_name):
"""Saves object in file_name."""
prefix = config_paths()[0]
path = os.path.join(prefix,file_name)
with open(path, 'w') as output:
output.write(json.dumps(object, separators=(',', ': '), default=json_serial))
def config_paths():
"""Return path to config."""
return [os.path.dirname(os.path.abspath(__file__)), '/config']
| 28.028571 | 85 | 0.641182 |
c3afd4e434bb58b14b5ca290b358c1941385a141 | 1,054 | py | Python | setup.py | yoratoni/python-logger | d23e871ec8a44c4220d21f15fecd10b77de8b3db | [
"BSD-3-Clause"
] | 1 | 2022-03-13T23:51:25.000Z | 2022-03-13T23:51:25.000Z | setup.py | yoratoni/pyostra | d23e871ec8a44c4220d21f15fecd10b77de8b3db | [
"BSD-3-Clause"
] | null | null | null | setup.py | yoratoni/pyostra | d23e871ec8a44c4220d21f15fecd10b77de8b3db | [
"BSD-3-Clause"
] | null | null | null | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
NAME = 'pyostra'
VERSION = '0.1.4'
DESCRIPTION = 'THe internal Python package used for the Ostra projects such as the advanced NFTs generator.'
LONG_DESCRIPTION = open('README.md').read() + '\n\n' + open('CHANGELOG.md').read()
setup(
name=NAME,
version=VERSION,
author='Yoratoni',
author_email='twingearas@gmail.com',
url='https://github.com/yoratoni/pyostra',
license='BSD',
description=DESCRIPTION,
long_description_content_type = 'text/markdown',
long_description=LONG_DESCRIPTION,
keywords='ostra internal color colour terminal windows crossplatform',
packages=[NAME],
install_requires=['colorama'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Terminals'
]
)
| 27.736842 | 108 | 0.669829 |
c1f393c8b94cc7e5842409be6d0e7043f9d4b4e5 | 6,468 | py | Python | test/functional/nulldummy.py | RossClelland/uscbuild | db77df86e94ba4362040d5bedf1c71e5b4f01654 | [
"MIT"
] | null | null | null | test/functional/nulldummy.py | RossClelland/uscbuild | db77df86e94ba4362040d5bedf1c71e5b4f01654 | [
"MIT"
] | null | null | null | test/functional/nulldummy.py | RossClelland/uscbuild | db77df86e94ba4362040d5bedf1c71e5b4f01654 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016 The Uscoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test NULLDUMMY softfork.
Connect to a single node.
Generate 2 blocks (save the coinbases for later).
Generate 427 more blocks.
[Policy/Consensus] Check that NULLDUMMY compliant transactions are accepted in the 430th block.
[Policy] Check that non-NULLDUMMY transactions are rejected before activation.
[Consensus] Check that the new NULLDUMMY rules are not enforced on the 431st block.
[Policy/Consensus] Check that the new NULLDUMMY rules are enforced on the 432nd block.
"""
from test_framework.test_framework import UscoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block, add_witness_commitment
from test_framework.script import CScript
from io import BytesIO
import time
NULLDUMMY_ERROR = "64: non-mandatory-script-verify-flag (Dummy CHECKMULTISIG argument must be zero)"
def trueDummy(tx):
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
assert(len(i) == 0)
newscript.append(b'\x51')
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
tx.rehash()
class NULLDUMMYTest(UscoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
self.extra_args = [['-whitelist=127.0.0.1', '-walletprematurewitness']]
def run_test(self):
self.address = self.nodes[0].getnewaddress()
self.ms_address = self.nodes[0].addmultisigaddress(1,[self.address])
self.wit_address = self.nodes[0].addwitnessaddress(self.address)
self.wit_ms_address = self.nodes[0].addwitnessaddress(self.ms_address)
NetworkThread().start() # Start up network handling in another thread
self.coinbase_blocks = self.nodes[0].generate(2) # Block 2
coinbase_txid = []
for i in self.coinbase_blocks:
coinbase_txid.append(self.nodes[0].getblock(i)['tx'][0])
self.nodes[0].generate(427) # Block 429
self.lastblockhash = self.nodes[0].getbestblockhash()
self.tip = int("0x" + self.lastblockhash, 0)
self.lastblockheight = 429
self.lastblocktime = int(time.time()) + 429
self.log.info("Test 1: NULLDUMMY compliant base transactions should be accepted to mempool and mined before activation [430]")
test1txs = [self.create_transaction(self.nodes[0], coinbase_txid[0], self.ms_address, 49)]
txid1 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[0].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], txid1, self.ms_address, 48))
txid2 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[1].serialize_with_witness()), True)
test1txs.append(self.create_transaction(self.nodes[0], coinbase_txid[1], self.wit_ms_address, 49))
txid3 = self.nodes[0].sendrawtransaction(bytes_to_hex_str(test1txs[2].serialize_with_witness()), True)
self.block_submit(self.nodes[0], test1txs, False, True)
self.log.info("Test 2: Non-NULLDUMMY base multisig transaction should not be accepted to mempool before activation")
test2tx = self.create_transaction(self.nodes[0], txid2, self.ms_address, 47)
trueDummy(test2tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test2tx.serialize_with_witness()), True)
self.log.info("Test 3: Non-NULLDUMMY base transactions should be accepted in a block before activation [431]")
self.block_submit(self.nodes[0], [test2tx], False, True)
self.log.info("Test 4: Non-NULLDUMMY base multisig transaction is invalid after activation")
test4tx = self.create_transaction(self.nodes[0], test2tx.hash, self.address, 46)
test6txs=[CTransaction(test4tx)]
trueDummy(test4tx)
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test4tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test4tx])
self.log.info("Test 5: Non-NULLDUMMY P2WSH multisig transaction invalid after activation")
test5tx = self.create_transaction(self.nodes[0], txid3, self.wit_address, 48)
test6txs.append(CTransaction(test5tx))
test5tx.wit.vtxinwit[0].scriptWitness.stack[0] = b'\x01'
assert_raises_rpc_error(-26, NULLDUMMY_ERROR, self.nodes[0].sendrawtransaction, bytes_to_hex_str(test5tx.serialize_with_witness()), True)
self.block_submit(self.nodes[0], [test5tx], True)
self.log.info("Test 6: NULLDUMMY compliant base/witness transactions should be accepted to mempool and in block after activation [432]")
for i in test6txs:
self.nodes[0].sendrawtransaction(bytes_to_hex_str(i.serialize_with_witness()), True)
self.block_submit(self.nodes[0], test6txs, True, True)
def create_transaction(self, node, txid, to_address, amount):
inputs = [{ "txid" : txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def block_submit(self, node, txs, witness = False, accept = False):
block = create_block(self.tip, create_coinbase(self.lastblockheight + 1), self.lastblocktime + 1)
block.nVersion = 4
for tx in txs:
tx.rehash()
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
witness and add_witness_commitment(block)
block.rehash()
block.solve()
node.submitblock(bytes_to_hex_str(block.serialize(True)))
if (accept):
assert_equal(node.getbestblockhash(), block.hash)
self.tip = block.sha256
self.lastblockhash = block.hash
self.lastblocktime += 1
self.lastblockheight += 1
else:
assert_equal(node.getbestblockhash(), self.lastblockhash)
if __name__ == '__main__':
NULLDUMMYTest().main()
| 49 | 145 | 0.700989 |
e21658e6d5c89d18bb9c7e6d4017d96d9cc23431 | 6,792 | py | Python | mnist_qkeras3.py | filipemlins/nas-hls4ml | b35afc4f684d803d352776c40f3a6cbbf47c4b1c | [
"MIT"
] | null | null | null | mnist_qkeras3.py | filipemlins/nas-hls4ml | b35afc4f684d803d352776c40f3a6cbbf47c4b1c | [
"MIT"
] | null | null | null | mnist_qkeras3.py | filipemlins/nas-hls4ml | b35afc4f684d803d352776c40f3a6cbbf47c4b1c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 2 19:57:08 2020
@author: filipe
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 1 16:44:49 2020
@author: filipe
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 1 15:05:57 2020
@author: filipe
"""
from tensorflow.keras.utils import to_categorical
from sklearn.datasets import fetch_openml
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder, StandardScaler
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
##pre processing
train = pd.read_csv('train.csv')
test = pd.read_csv('test.csv')
Y_train1 = train[['label']]
X_train1 = train.drop(train.columns[[0]], axis=1)
X_test1 = test
X_train1 = np.array(X_train1)
X_test1 = np.array(X_test1)
#Reshape the training and test set
X_train1 = X_train1.reshape(X_train1.shape[0], 28, 28, 1)/255
X_test1 = X_test1.reshape(X_test1.shape[0], 28, 28, 1)/255
#Padding the images by 2 pixels since in the paper input images were 32x32
X_train1 = np.pad(X_train1, ((0,0),(2,2),(2,2),(0,0)), 'constant')
X_test1 = np.pad(X_test1, ((0,0),(2,2),(2,2),(0,0)), 'constant')
X_train, X_test, Y_train, Y_test = train_test_split(X_train1, Y_train1, test_size=0.2, random_state=42)
#Standardization
mean_px = X_train.mean().astype(np.float32)
std_px = X_train.std().astype(np.float32)
X_train = (X_train - mean_px)/(std_px)
#One-hot encoding the labels
Y_train = to_categorical(Y_train)
print(X_train.shape[0], "train samples")
print(X_test.shape[0], "test samples")
#scaler = StandardScaler().fit(X_train)
#X_train = scaler.transform(X_train)
#X_test = scaler.transform(X_test)
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l1
from callbacks import all_callbacks
from tensorflow.keras.layers import Activation, MaxPooling2D, Flatten
from qkeras.qlayers import QDense, QActivation
from qkeras.qconvolutional import QConv2D
from qkeras.quantizers import quantized_bits, quantized_relu
model = Sequential()
model.add(QConv2D(8, (4, 4), strides=(1,1), input_shape=(32,32, 1),
kernel_quantizer=quantized_bits(8,1),bias_quantizer=quantized_bits(8,1), name="conv2d_0_m"))
model.add(QActivation(activation=quantized_relu(8,1), name='relu1'))
model.add(MaxPooling2D(pool_size = (2,2), strides = (2,2), name='max1'))
model.add(QConv2D(
16, (2, 2), strides=(1,1),
kernel_quantizer=quantized_bits(8,1),
bias_quantizer=quantized_bits(8,1),
name="conv2d_1_m"))
model.add(QActivation(activation=quantized_relu(8,1), name='relu2'))
model.add(MaxPooling2D(pool_size = (2,2), strides = (2,2), name='max2'))
model.add(Flatten())
model.add(QDense(120, name='fc1',
kernel_quantizer=quantized_bits(8,1), bias_quantizer=quantized_bits(7,1),
kernel_initializer='lecun_uniform', kernel_regularizer=l1(0.0001)))
model.add(QActivation(activation=quantized_relu(8,1), name='relu3'))
model.add(QDense(84, name='fc2',
kernel_quantizer=quantized_bits(8,1), bias_quantizer=quantized_bits(8,1),
kernel_initializer='lecun_uniform', kernel_regularizer=l1(0.0001)))
model.add(QActivation(activation=quantized_relu(8,1), name='relu4'))
model.add(QDense(10, name='output',
kernel_quantizer=quantized_bits(8,1), bias_quantizer=quantized_bits(8,1),
kernel_initializer='lecun_uniform', kernel_regularizer=l1(0.0001)))
model.add(Activation(activation='softmax', name='softmax'))
#from tensorflow_model_optimization.python.core.sparsity.keras import prune, pruning_callbacks, pruning_schedule
#from tensorflow_model_optimization.sparsity.keras import strip_pruning
#pruning_params = {"pruning_schedule" : pruning_schedule.ConstantSparsity(0.75, begin_step=2000, frequency=100)}
#model = prune.prune_low_magnitude(model, **pruning_params)
train = True
import keras
if train:
adam = Adam(lr=0.0001)
model.compile(optimizer=adam, loss=keras.losses.categorical_crossentropy, metrics=['accuracy'])
# callbacks = all_callbacks(stop_patience = 1000,
# lr_factor = 0.5,
# lr_patience = 10,
# lr_epsilon = 0.000001,
# lr_cooldown = 2,
# lr_minimum = 0.0000001,
# outputDir = 'model_3')
# callbacks.callbacks.append(pruning_callbacks.UpdatePruningStep())
model.fit(X_train, Y_train, batch_size=1024,
epochs=10, validation_split=0.25, shuffle=True)#, callbacks = callbacks.callbacks)
# Save the model again but with the pruning 'stripped' to use the regular layer types
# model = strip_pruning(model)
model.save('model_5/KERAS_check_best_model.h5')
else:
from tensorflow.keras.models import load_model
from qkeras.utils import _add_supported_quantized_objects
co = {}
_add_supported_quantized_objects(co)
model = load_model('model_5/KERAS_check_best_model.h5', custom_objects=co)
import hls4ml
hls4ml.model.optimizer.OutputRoundingSaturationMode.layers = ['Activation']
hls4ml.model.optimizer.OutputRoundingSaturationMode.rounding_mode = 'AP_RND'
hls4ml.model.optimizer.OutputRoundingSaturationMode.saturation_mode = 'AP_SAT'
config = hls4ml.utils.config_from_keras_model(model, granularity='name',
default_precision='ap_fixed<9,2,AP_RND,AP_SAT>', default_reuse_factor=30000)
config['LayerName']['softmax']['exp_table_t'] = 'ap_fixed<18,8>'
config['LayerName']['softmax']['inv_table_t'] = 'ap_fixed<18,4>'
print(config)
hls_model = hls4ml.converters.convert_from_keras_model(model, hls_config=config, output_dir='model_5/hls4ml_prj')
hls_model.compile()
import plotting
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score
from tensorflow.keras.models import load_model
#model_ref = load_model('model_1/KERAS_check_best_model.h5')
print("Accuracy quantized: {}".format(accuracy_score(Y_test, np.argmax(model.predict(X_test), axis=1))))
z = np.argmax(hls_model.predict(X_test), axis=1)
print("Accuracy hls4ml: {}".format(accuracy_score(Y_test, z)))
#print("Accuracy unpruned: {}".format(accuracy_score(np.argmax(y_test, axis=1), np.argmax(model_ref.predict(X_test), axis=1))))
#plt.figure(figsize=(9, 9))
#_ = plotting.makeRoc(X_train, Y_train, le.classes_, model)
##plt.gca().set_prop_cycle(None) # reset the colors
##_ = plotting.makeRoc(X_test, y_test, le.classes_, model_ref, linestyle='--')
#plt.gca().set_prop_cycle(None) # reset the colors
#_ = plotting.makeRoc(X_train, Y_train, le.classes_, hls_model, linestyle=':')
#
#hls_model.build(synth=True)
#
#hls4ml.report.read_vivado_report('model_3/hls4ml_prj')
| 36.913043 | 128 | 0.727915 |
95afc7c4f2518cad2f62a0ffd2f953374558d713 | 279 | py | Python | src/napari_live_recording/common/common.py | jethro33/napari-live-recording | 6c3fcc33bd18cd090e3f89971b630d1800e29e4d | [
"MIT"
] | 7 | 2021-10-11T17:45:33.000Z | 2022-02-07T16:10:42.000Z | src/napari_live_recording/common/common.py | jethro33/napari-live-recording | 6c3fcc33bd18cd090e3f89971b630d1800e29e4d | [
"MIT"
] | 2 | 2021-11-01T09:00:11.000Z | 2022-01-24T16:21:05.000Z | src/napari_live_recording/common/common.py | jethro33/napari-live-recording | 6c3fcc33bd18cd090e3f89971b630d1800e29e4d | [
"MIT"
] | null | null | null | from dataclasses import dataclass
@dataclass
class ROI:
"""Dataclass for ROI settings.
"""
offset_x: int = 0
offset_y: int = 0
height: int = 0
width: int = 0
ofs_x_step: int = 1
ofs_y_step: int = 1
width_step: int = 1
height_step: int = 1 | 19.928571 | 34 | 0.609319 |
fdd8ec53840cd3d076b869005edbbfedc70e6a57 | 185 | py | Python | tests/parser/aggregates.count.grounding.5.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/aggregates.count.grounding.5.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | tests/parser/aggregates.count.grounding.5.test.py | veltri/DLV2 | 944aaef803aa75e7ec51d7e0c2b0d964687fdd0e | [
"Apache-2.0"
] | null | null | null | input = """
b(1).
c(a).
a(A,B) | na(A,B) :- c(A), b(B).
:- 1 < #count{A:a(A,B)}, b(B).
"""
output = """
b(1).
c(a).
a(A,B) | na(A,B) :- c(A), b(B).
:- 1 < #count{A:a(A,B)}, b(B).
"""
| 12.333333 | 31 | 0.351351 |
849127de739f0a24c7da77f54f2f52a4c287b9c7 | 164 | py | Python | kattis/mixedfractions.py | terror/Solutions | 1ad33daec95b565a38ac4730261593bcf249ac86 | [
"CC0-1.0"
] | 2 | 2021-04-05T14:26:37.000Z | 2021-06-10T04:22:01.000Z | kattis/mixedfractions.py | terror/Solutions | 1ad33daec95b565a38ac4730261593bcf249ac86 | [
"CC0-1.0"
] | null | null | null | kattis/mixedfractions.py | terror/Solutions | 1ad33daec95b565a38ac4730261593bcf249ac86 | [
"CC0-1.0"
] | null | null | null | import sys
for i in sys.stdin:
n1, n2 = list(map(int, i.split()))
if n1 == 0:
break
a = n1 // n2
b = n1 % n2
print("{0} {1} / {2}".format(a, b, n2))
| 16.4 | 41 | 0.493902 |
f0eedc0fde6a63c53a8dde9ca5f2c6c80e9cf44d | 4,099 | py | Python | pyglet/media/codecs/__init__.py | AnantTiwari-Naman/pyglet | 4774f2889057da95a78785a69372112931e6a620 | [
"BSD-3-Clause"
] | 1 | 2021-07-30T17:36:39.000Z | 2021-07-30T17:36:39.000Z | pyglet/media/codecs/__init__.py | AnantTiwari-Naman/pyglet | 4774f2889057da95a78785a69372112931e6a620 | [
"BSD-3-Clause"
] | null | null | null | pyglet/media/codecs/__init__.py | AnantTiwari-Naman/pyglet | 4774f2889057da95a78785a69372112931e6a620 | [
"BSD-3-Clause"
] | 1 | 2021-09-16T20:47:07.000Z | 2021-09-16T20:47:07.000Z | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from pyglet.util import Codecs, Decoder, Encoder
from .base import *
import pyglet
_debug = pyglet.options['debug_media']
_codecs = Codecs()
add_decoders = _codecs.add_decoders
get_decoders = _codecs.get_decoders
add_encoders = _codecs.add_encoders
get_encoders = _codecs.get_encoders
class MediaDecoder(Decoder):
def decode(self, file, filename, streaming):
"""Read the given file object and return an instance of `Source`
or `StreamingSource`.
Throws MediaDecodeException if there is an error. `filename`
can be a file type hint.
"""
raise NotImplementedError()
class MediaEncoder(Encoder):
def encode(self, source, file, filename):
"""Encode the given source to the given file. `filename`
provides a hint to the file format desired. options are
encoder-specific, and unknown options should be ignored or
issue warnings.
"""
raise NotImplementedError()
def add_default_media_codecs():
# Add all bundled codecs. These should be listed in order of
# preference. This is called automatically by pyglet.media.
try:
from . import wave
add_decoders(wave)
add_encoders(wave)
except ImportError:
pass
if pyglet.compat_platform.startswith('linux'):
try:
from . import gstreamer
add_decoders(gstreamer)
except ImportError:
pass
try:
if pyglet.compat_platform in ('win32', 'cygwin'):
from pyglet.libs.win32.constants import WINDOWS_VISTA_OR_GREATER
if WINDOWS_VISTA_OR_GREATER: # Supports Vista and above.
from . import wmf
add_decoders(wmf)
except ImportError:
pass
try:
if have_ffmpeg():
from . import ffmpeg
add_decoders(ffmpeg)
except ImportError:
pass
def have_ffmpeg():
"""Check if FFmpeg library is available.
Returns:
bool: True if FFmpeg is found.
.. versionadded:: 1.4
"""
try:
from . import ffmpeg_lib
if _debug:
print('FFmpeg available, using to load media files.')
return True
except (ImportError, FileNotFoundError):
if _debug:
print('FFmpeg not available.')
return False
| 32.275591 | 78 | 0.664552 |
4b605742a04ef96398c1b781fdbf7144f3aa1a93 | 2,670 | py | Python | yt_dlp/extractor/gab.py | tameiki000/ytdl-patched | feba3d336892845cef553436818170554cfa5228 | [
"Unlicense"
] | 80 | 2021-05-25T11:33:49.000Z | 2022-03-29T20:36:53.000Z | yt_dlp/extractor/gab.py | tameiki000/ytdl-patched | feba3d336892845cef553436818170554cfa5228 | [
"Unlicense"
] | 22 | 2021-05-08T13:44:12.000Z | 2022-03-30T01:27:23.000Z | yt_dlp/extractor/gab.py | tameiki000/ytdl-patched | feba3d336892845cef553436818170554cfa5228 | [
"Unlicense"
] | 22 | 2021-05-07T05:01:27.000Z | 2022-03-26T19:10:54.000Z | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
clean_html,
str_to_int,
)
class GabTVIE(InfoExtractor):
_VALID_URL = r'https?://tv\.gab\.com/channel/[^/]+/view/(?P<id>[a-z0-9-]+)'
_TESTS = [{
'url': 'https://tv.gab.com/channel/wurzelroot/view/why-was-america-in-afghanistan-61217eacea5665de450d0488',
'info_dict': {
'id': '61217eacea5665de450d0488',
'ext': 'mp4',
'title': 'WHY WAS AMERICA IN AFGHANISTAN - AMERICA FIRST AGAINST AMERICAN OLIGARCHY',
'description': None,
'uploader': 'Wurzelroot',
'uploader_id': '608fb0a85738fd1974984f7d',
'thumbnail': 'https://tv.gab.com/image/61217eacea5665de450d0488',
}
}]
def _real_extract(self, url):
id = self._match_id(url).split('-')[-1]
webpage = self._download_webpage(url, id)
channel_id = self._search_regex(r'data-channel-id=\"(?P<channel_id>[^\"]+)', webpage, 'channel_id')
channel_name = self._search_regex(r'data-channel-name=\"(?P<channel_id>[^\"]+)', webpage, 'channel_name')
title = self._search_regex(r'data-episode-title=\"(?P<channel_id>[^\"]+)', webpage, 'title')
view_key = self._search_regex(r'data-view-key=\"(?P<channel_id>[^\"]+)', webpage, 'view_key')
description = clean_html(
self._html_search_regex(self._meta_regex('description'), webpage, 'description', group='content')) or None
available_resolutions = re.findall(r'<a\ data-episode-id=\"%s\"\ data-resolution=\"(?P<resolution>[^\"]+)' % id,
webpage)
formats = []
for resolution in available_resolutions:
frmt = {
'url': f'https://tv.gab.com/media/{id}?viewKey={view_key}&r={resolution}',
'format_id': resolution,
'vcodec': 'h264',
'acodec': 'aac',
'ext': 'mp4'
}
if 'audio-' in resolution:
frmt['abr'] = str_to_int(resolution.replace('audio-', ''))
frmt['height'] = 144
frmt['quality'] = -10
else:
frmt['height'] = str_to_int(resolution.replace('p', ''))
formats.append(frmt)
self._sort_formats(formats)
return {
'id': id,
'title': title,
'formats': formats,
'description': description,
'uploader': channel_name,
'uploader_id': channel_id,
'thumbnail': f'https://tv.gab.com/image/{id}',
}
| 39.850746 | 120 | 0.55206 |
3d28b49bdc9507aa2f5ccac1f0a213d1b9e6d522 | 3,552 | py | Python | SentyectorAPI/SentyectorAPI/settings.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | null | null | null | SentyectorAPI/SentyectorAPI/settings.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | null | null | null | SentyectorAPI/SentyectorAPI/settings.py | vaibhavarora102/Sentyector | c9023fe38e3517bd39b932a3282f5aebe5e84fbf | [
"MIT"
] | 3 | 2021-04-10T06:33:07.000Z | 2021-04-10T12:04:19.000Z | """
Django settings for SentyectorAPI project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h@3)hy+_nbn4^io1#8#h#ls_7m54x%lpzya-f32q*qa_vo0=pm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['*']
CORS_ORIGIN_ALLOW_ALL = True
CORS_ORIGIN_WHITELIST = [
'http://google.com',
'http://hostname.example.com',
'http://localhost:8000',
'http://127.0.0.1:8000'
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api',
'corsheaders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'SentyectorAPI.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'SentyectorAPI.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT=os.path.join(BASE_DIR,'static')
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
| 26.311111 | 91 | 0.69848 |
61e6c850d206ef7715fe38c40388c7e2451d3689 | 2,291 | py | Python | sources/main.py | guillaumelamirand/manga-downloader | 7d93b0dd4b44f865b8e0511f17f5a4e5ac470a8b | [
"MIT"
] | null | null | null | sources/main.py | guillaumelamirand/manga-downloader | 7d93b0dd4b44f865b8e0511f17f5a4e5ac470a8b | [
"MIT"
] | null | null | null | sources/main.py | guillaumelamirand/manga-downloader | 7d93b0dd4b44f865b8e0511f17f5a4e5ac470a8b | [
"MIT"
] | null | null | null |
import logging
from setup import config
from models import Sources, Mangas, CalibreApi, Notification, HomeAssistant
# Load datas
Sources.load_items(config)
Mangas.load_items(config)
# Init calibre client
calibre_api = CalibreApi(config)
# Init home assistant client
notifications = []
home_assistant = HomeAssistant(config)
# Run
logger = logging.getLogger(__name__)
logger.info("Browse all mangas")
# Start processing mangas
for manga in Mangas.get_all():
try:
logger.info(" -> %s on %s" %(manga.serie, manga.source))
calibre_serie = calibre_api.get_serie(manga.serie)
source = Sources.get(manga.source)
logger.info(" - Last chapiter in Calibre %s" % calibre_serie.last_chapiter )
available_chapiters = source.get_available_chapiters(manga.id)
new_chapiters = [chapiter for chapiter in available_chapiters if chapiter not in getattr(manga, 'chapiter_ignored', []) and chapiter > calibre_serie.last_chapiter]
if len(new_chapiters) == 0:
logger.info(" - No new chapiters available")
else:
logger.info(" - New chapiters available %s" % new_chapiters)
for chapiter in new_chapiters:
cbz_file = None
try:
logger.info(" ---- Chapiter %s" % chapiter)
logger.info(" - Build")
cbz_file = source.build_chapiter(manga.id, chapiter)
logger.info(" - Add to Calibre")
next_index = calibre_serie.get_next_index(manga.serie_index_increment, manga.serie_sub_index_max)
calibre_chapiter_name = calibre_serie.get_chapiter_name(chapiter);
calibre_api.add_chapiter_to_serie(calibre_serie, next_index, calibre_chapiter_name, cbz_file)
calibre_serie.last_index = next_index
notifications.append(Notification(manga.serie, chapiter, True))
except Exception as error:
logger.error(" - Error: %s" % error)
notifications.append(Notification(manga.serie, chapiter, False))
break
finally:
logger.debug("Remove cbz file '%s'" % cbz_file)
try:
if os.path.exists(cbz_file):
os.remove(cbz_file)
except Exception:
pass
except Exception as error:
logger.error(" - Error: %s" % error)
notifications.append(Notification(manga.serie, "unknown", False))
#Send notification
if len(notifications) > 0:
home_assistant.notify(notifications)
| 33.691176 | 165 | 0.721955 |
9e54ae6e84db3e95e7d925ba08e55467da2a62ee | 16,541 | py | Python | ravendb/documents/indexes/index_creation.py | ravendb/RavenDB-Python-Client | 6286b459b501e755fe8e8591a48acf8616605ccd | [
"MIT"
] | 8 | 2016-10-08T17:45:44.000Z | 2018-05-29T12:16:43.000Z | ravendb/documents/indexes/index_creation.py | ravendb/RavenDB-Python-Client | 6286b459b501e755fe8e8591a48acf8616605ccd | [
"MIT"
] | 5 | 2017-02-12T15:50:53.000Z | 2017-09-18T12:25:01.000Z | ravendb/documents/indexes/index_creation.py | ravendb/RavenDB-Python-Client | 6286b459b501e755fe8e8591a48acf8616605ccd | [
"MIT"
] | 8 | 2016-07-03T07:59:12.000Z | 2017-09-18T11:22:23.000Z | import logging
from abc import abstractmethod, ABC
from typing import Generic, TypeVar, Union, Dict, Set, Callable, Optional, List, Collection
from ravendb import constants
from ravendb.documents.conventions.document_conventions import DocumentConventions
from ravendb.documents.store.definition import DocumentStore, DocumentStoreBase
from ravendb.documents.indexes.definitions import (
IndexDefinition,
AbstractCommonApiForIndexes,
IndexPriority,
IndexLockMode,
IndexDeploymentMode,
IndexState,
FieldIndexing,
FieldStorage,
FieldTermVector,
AdditionalAssembly,
SpatialOptions,
IndexFieldOptions,
)
from ravendb.documents.operations.indexes import PutIndexesOperation
_T_IndexDefinition = TypeVar("_T_IndexDefinition", bound=IndexDefinition)
class AbstractIndexCreationTaskBase(AbstractCommonApiForIndexes, Generic[_T_IndexDefinition]):
@abstractmethod
def create_index_definition(self) -> Union[IndexDefinition, _T_IndexDefinition]:
pass
def __init__(
self,
conventions: DocumentConventions = None,
priority: IndexPriority = None,
lock_mode: IndexLockMode = None,
deployment_mode: IndexDeploymentMode = None,
state: IndexState = None,
):
super().__init__()
self.conventions = conventions
self.priority = priority
self.lock_mode = lock_mode
self.deployment_mode = deployment_mode
self.state = state
def execute(self, store: DocumentStore, conventions: DocumentConventions = None, database: str = None):
old_conventions = self.conventions
database = DocumentStoreBase.get_effective_database(store, database)
try:
self.conventions = conventions or self.conventions or store.get_request_executor(database).conventions
index_definition = self.create_index_definition()
index_definition.name = self.index_name
if self.lock_mode is not None:
index_definition.lock_mode = self.lock_mode
if self.priority is not None:
index_definition.priority = self.priority
if self.state is not None:
index_definition.state = self.state
if self.deployment_mode is not None:
index_definition.deployment_mode = self.deployment_mode
store.maintenance.for_database(database).send(PutIndexesOperation(index_definition))
finally:
self.conventions = old_conventions
class AbstractGenericIndexCreationTask(
Generic[_T_IndexDefinition], AbstractIndexCreationTaskBase[_T_IndexDefinition], ABC
):
def __init__(self):
super().__init__()
self._reduce: Union[None, str] = None
self._stores_strings: Dict[str, FieldStorage] = {}
self._indexes_strings: Dict[str, FieldIndexing] = {}
self._analyzers_strings: Dict[str, str] = {}
self._index_suggestions: Set[str] = set()
self._term_vectors_strings: Dict[str, FieldTermVector] = {}
self._spatial_options_strings: Dict[str, SpatialOptions] = {}
self._output_reduce_to_collection: Union[None, str] = None
self._pattern_for_output_reduce_to_collection_references: Union[None, str] = None
self._pattern_references_collection_name: Union[None, str] = None
@property
def reduce(self) -> str:
return self._reduce
@reduce.setter
def reduce(self, value: str):
self._reduce = value
@property
def is_map_reduce(self) -> bool:
return self._reduce is not None
def _index(self, field: str, indexing: FieldIndexing) -> None:
self._indexes_strings[field] = indexing
# todo: def _spatial(self, field:str, indexing:Callable[[SpatialOptionsFactory],SpatialOptions]) -> None
def _spatial(self, field: str, indexing: Callable) -> None:
raise NotImplementedError()
def _store_all_fields(self, storage: FieldStorage) -> None:
self._stores_strings[constants.Documents.Indexing.Fields.ALL_FIELDS] = storage
def _store(self, field: str, storage: FieldStorage) -> None:
"""
Register a field to be stored
@param field: Field name
@param storage: Field storage value to use
"""
self._stores_strings[field] = storage
def _analyze(self, field: str, analyzer: str) -> None:
"""
Register a field to be analyzed
@param field: Field name
@param analyzer: analyzer to use
"""
self._analyzers_strings[field] = analyzer
def _term_vector(self, field: str, term_vector: FieldTermVector) -> None:
"""
Register a field to have term vectors
@param field: Field name
@param term_vector: TermVector type
"""
self._term_vectors_strings[field] = term_vector
def _suggestion(self, field: str) -> None:
self._index_suggestions.add(field)
def _add_assembly(self, assembly: AdditionalAssembly) -> None:
if assembly is None:
raise ValueError("Assembly cannot be None")
if self.additional_assemblies is None:
self.additional_assemblies = set()
self.additional_assemblies.add(assembly)
class AbstractIndexDefinitionBuilder(Generic[_T_IndexDefinition]):
def __init__(self, index_name: str):
self._index_name = index_name or self.__class__.__name__
if len(self._index_name) > 256:
raise ValueError(f"The index name is limited to 256 characters, but was: {self._index_name}")
self.reduce: Union[None, str] = None
self.stores_strings: Dict[str, FieldStorage] = {}
self.indexes_strings: Dict[str, FieldIndexing] = {}
self.analyzers_strings: Dict[str, str] = {}
self.suggestions_options: Set[str] = set()
self.term_vectors_strings: Dict[str, FieldTermVector] = {}
self.spatial_indexes_strings: Dict[str, SpatialOptions] = {}
self.lock_mode: Union[None, IndexLockMode] = None
self.priority: Union[None, IndexLockMode] = None
self.state: Union[None, IndexState] = None
self.deployment_mode: Union[None, IndexDeploymentMode] = None
self.output_reduce_to_collection: Union[None, str] = None
self.pattern_for_output_reduce_to_collection_references: Union[None, str] = None
self.pattern_references_collection_name: Union[None, str] = None
self.additional_sources: Union[None, Dict[str, str]] = None
self.additional_assemblies: Union[None, Set[AdditionalAssembly]] = None
self.configuration: Dict[str, str] = {}
@abstractmethod
def _new_index_definition(self) -> Union[IndexDefinition, _T_IndexDefinition]:
pass
@abstractmethod
def _to_index_definition(self, index_definition: _T_IndexDefinition, conventions: DocumentConventions) -> None:
pass
def __apply_values(
self,
index_definition: IndexDefinition,
values: Dict[str, object],
action: Callable[[IndexFieldOptions, object], None],
) -> None:
for key, value in values.items():
field = index_definition.fields.get(key, IndexFieldOptions())
action(field, value)
index_definition.fields[key] = field # if the field wasn't indexed yet, we need to set it.
def to_index_definition(self, conventions: DocumentConventions, validate_map: bool = True) -> _T_IndexDefinition:
try:
index_definition = self._new_index_definition()
index_definition.name = self._index_name
index_definition.reduce = self.reduce
index_definition.lock_mode = self.lock_mode
index_definition.priority = self.priority
index_definition.state = self.state
index_definition.output_reduce_to_collection = self.output_reduce_to_collection
index_definition.pattern_for_output_reduce_to_collection_references = (
self.pattern_for_output_reduce_to_collection_references
)
index_definition.pattern_references_collection_name = self.pattern_references_collection_name
suggestions = {}
for suggestions_option in self.suggestions_options:
suggestions[suggestions_option] = True
def __set_indexing(options, value):
options.indexing = value
def __set_storage(options, value):
options.storage = value
def __set_analyzer(options, value):
options.analyzer = value
def __set_term_vector(options, value):
options.term_vector = value
def __set_spatial(options, value):
options.spatial = value
def __set_suggestions(options, value):
options.suggestions = value
self.__apply_values(index_definition, self.indexes_strings, __set_indexing)
self.__apply_values(index_definition, self.stores_strings, __set_storage)
self.__apply_values(index_definition, self.analyzers_strings, __set_analyzer)
self.__apply_values(index_definition, self.term_vectors_strings, __set_term_vector)
self.__apply_values(index_definition, self.spatial_indexes_strings, __set_spatial)
self.__apply_values(index_definition, suggestions, __set_suggestions)
index_definition.additional_sources = self.additional_sources
index_definition.additional_assemblies = self.additional_assemblies
index_definition.configuration = self.configuration
self._to_index_definition(index_definition, conventions)
return index_definition
except Exception as e:
raise RuntimeError(f"Failed to create index {self._index_name}", e) # todo: IndexCompilationException
class IndexDefinitionBuilder(AbstractIndexDefinitionBuilder[IndexDefinition]):
def __init__(self, index_name: Optional[str] = None):
super().__init__(index_name)
self.map: Union[None, str] = None
def _new_index_definition(self) -> IndexDefinition:
return IndexDefinition()
def to_index_definition(self, conventions: DocumentConventions, validate_map: bool = True) -> IndexDefinition:
if self.map is None and validate_map:
raise ValueError(
f"Map is required to generate an index, "
f"you cannot create an index without a valid map property (in index {self._index_name})."
)
return super().to_index_definition(conventions, validate_map)
def _to_index_definition(self, index_definition: IndexDefinition, conventions: DocumentConventions) -> None:
if self.map is None:
return
index_definition.maps.add(self.map)
class AbstractIndexCreationTask(AbstractGenericIndexCreationTask[IndexDefinition], ABC):
def __init__(self):
super().__init__()
self._map: Union[None, str] = None
@property
def map(self) -> str:
return self._map
@map.setter
def map(self, value: str):
self._map = value
def create_index_definition(self) -> IndexDefinition:
if self.conventions is None:
self.conventions = DocumentConventions()
index_definition_builder = IndexDefinitionBuilder(self.index_name)
index_definition_builder.indexes_strings = self._indexes_strings
index_definition_builder.analyzers_strings = self._analyzers_strings
index_definition_builder.map = self._map
index_definition_builder.reduce = self._reduce
index_definition_builder.stores_strings = self._stores_strings
index_definition_builder.suggestions_options = self._index_suggestions
index_definition_builder.term_vectors_strings = self._term_vectors_strings
index_definition_builder.spatial_indexes_strings = self._spatial_options_strings
index_definition_builder.output_reduce_to_collection = self._output_reduce_to_collection
index_definition_builder.pattern_for_output_reduce_to_collection_references = (
self._pattern_for_output_reduce_to_collection_references
)
index_definition_builder.pattern_references_collection_name = self._pattern_references_collection_name
index_definition_builder.additional_sources = self.additional_sources
index_definition_builder.additional_assemblies = self.additional_assemblies
index_definition_builder.configuration = self.configuration
index_definition_builder.lock_mode = self.lock_mode
index_definition_builder.priority = self.priority
index_definition_builder.state = self.state
index_definition_builder.deployment_mode = self.deployment_mode
return index_definition_builder.to_index_definition(self.conventions)
_T_AbstractIndexCreationTask = TypeVar("_T_AbstractIndexCreationTask", bound=AbstractIndexCreationTask)
class AbstractMultiMapIndexCreationTask(AbstractGenericIndexCreationTask[IndexDefinition]):
def __init__(self):
super().__init__()
self.__maps: List[str] = []
def _add_map(self, map: str) -> None:
if map is None:
raise ValueError("Map cannot be None")
self.__maps.append(map)
def create_index_definition(self):
if self.conventions is None:
self.conventions = DocumentConventions()
index_definition_builder = IndexDefinitionBuilder(self.index_name)
index_definition_builder.indexes_strings = self._indexes_strings
index_definition_builder.analyzers_strings = self._analyzers_strings
index_definition_builder.reduce = self._reduce
index_definition_builder.stores_strings = self._stores_strings
index_definition_builder.suggestions_options = self._index_suggestions
index_definition_builder.term_vectors_strings = self._term_vectors_strings
index_definition_builder.spatial_indexes_strings = self._spatial_options_strings
index_definition_builder.output_reduce_to_collection = self._output_reduce_to_collection
index_definition_builder.pattern_for_output_reduce_to_collection_references = (
self._pattern_for_output_reduce_to_collection_references
)
index_definition_builder.pattern_references_collection_name = self._pattern_references_collection_name
index_definition_builder.additional_sources = self.additional_sources
index_definition_builder.additional_assemblies = self.additional_assemblies
index_definition_builder.configuration = self.configuration
index_definition_builder.lock_mode = self.lock_mode
index_definition_builder.priority = self.priority
index_definition_builder.state = self.state
index_definition_builder.deployment_mode = self.deployment_mode
index_definition = index_definition_builder.to_index_definition(self.conventions, False)
index_definition.maps = set(self.__maps)
return index_definition
class IndexCreation:
@staticmethod
def create_indexes(
indexes: Collection[_T_AbstractIndexCreationTask],
store: DocumentStore,
conventions: Optional[DocumentConventions] = None,
) -> None:
if conventions is None:
conventions = store.conventions
try:
indexes_to_add = IndexCreation.create_indexes_to_add(indexes, conventions)
store.maintenance.send(PutIndexesOperation(indexes_to_add))
except Exception as e:
logging.info("Could not create indexes in one shot (maybe using older version of RavenDB ?)", exc_info=e)
for index in indexes:
index.execute(store, conventions)
@staticmethod
def create_indexes_to_add(
index_creation_tasks: Collection[_T_AbstractIndexCreationTask], conventions: DocumentConventions
) -> List[IndexDefinition]:
def __map(x: _T_AbstractIndexCreationTask):
old_conventions = x.conventions
try:
x.conventions = conventions
definition = x.create_index_definition()
definition.name = x.index_name
definition.priority = x.priority or IndexPriority.NORMAL
definition.state = x.state or IndexState.NORMAL
return definition
finally:
x.conventions = old_conventions
return list(map(__map, index_creation_tasks))
| 41.560302 | 117 | 0.703162 |
d9bf3e6653b7deccba603eafbb06b05e748d56b1 | 2,542 | py | Python | PyBank/main.py | palolo02/Python-Challenge | ab7b2a7be34bcd2de1b569904113e746730f22e2 | [
"ADSL"
] | null | null | null | PyBank/main.py | palolo02/Python-Challenge | ab7b2a7be34bcd2de1b569904113e746730f22e2 | [
"ADSL"
] | null | null | null | PyBank/main.py | palolo02/Python-Challenge | ab7b2a7be34bcd2de1b569904113e746730f22e2 | [
"ADSL"
] | null | null | null | # ===============================================
# Paolo Vega
# Bootcamp Data Analytics
# Version 1.0.0 03/02/2020
# 1.0.1 03/04/2020
# 1.0.2 03/05/2020
# 1.0.3 03/06/2020
# File to run the PyBank analysis of the financial records of our company
# ===============================================
# Add required modules to facilitate the implementation
import os
import csv
import datetime
import statistics
# Define the folder and file name location
folder = 'PyBank'
csvName = "budget_data.csv"
csvpath = os.path.join(folder,csvName)
# Lists that will store the months, profits and the highest increase
months = []
profits = []
incprofits = []
# initialize variables
totalValue = 0
noMonths = 0
avgChange = 0
# Descriptions for the greaates increase and greatest decrease
greatestIncrease = 0
greatestIncMonth = ""
greatestDecrease = 0
greatestDecMonth = ""
# Open and read the csv file
with open(csvpath) as csvfile:
csvreader = csv.reader(csvfile, delimiter=",")
# Store the header to avoid column names
header = next(csvreader)
# Go through each line to create two lists for holding months and their profit
for row in csvreader:
months.append(row[0])
profits.append(float((row[1])))
# Calculate the differences each month with respect the previous one to
# identify the profit increases
# iterate from the second row to the last one
for i in range(1,(len(profits))):
# Calculate the increase
incTemp = profits[i]-profits[i-1]
# Add the difference to the increase values
incprofits.append(incTemp)
# Assess whether this increase is the highest increase among the dataset
if(incTemp > greatestIncrease):
greatestIncrease = incTemp
greatestIncMonth = months[i]
# Assess whether this increase is the highest decrease among the dataset
if(incTemp < greatestDecrease):
greatestDecrease = incTemp
greatestDecMonth = months[i]
# Calculate the totals of the months and their profit of the differences
avgChange = statistics.mean(incprofits)
totalValue = sum(profits)
noMonths = len(months)
# Print results with appropiate format
print("Financial Analysis")
print("----------------------------------------------")
print(f"Total months: {noMonths}")
print(f"Total value: {totalValue:,.3f} ")
print(f"Average: $ {avgChange:,.3f}")
print(f"Greatest increase in Profit: {greatestIncMonth} | $ {greatestIncrease:,.3f}")
print(f"Greatest decrease in Profit: {greatestDecMonth} | $ {greatestDecrease:,.3f}")
| 31.382716 | 85 | 0.678206 |
48bfae94a1fb650370c10c7ce22111b37b29f4c4 | 382 | py | Python | park_management/park_management/report/item_wise_purchase_register_simple/item_wise_purchase_register_simple.py | iRaySpace/park_management | e4b95043fd2410c057f6353e5e8a65f51550033c | [
"MIT"
] | null | null | null | park_management/park_management/report/item_wise_purchase_register_simple/item_wise_purchase_register_simple.py | iRaySpace/park_management | e4b95043fd2410c057f6353e5e8a65f51550033c | [
"MIT"
] | null | null | null | park_management/park_management/report/item_wise_purchase_register_simple/item_wise_purchase_register_simple.py | iRaySpace/park_management | e4b95043fd2410c057f6353e5e8a65f51550033c | [
"MIT"
] | null | null | null | # Copyright (c) 2013, 9t9it and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def execute(filters=None):
from park_management.park_management.report.item_wise_sales_register_simple.item_wise_sales_register_simple import (
execute,
)
return execute(filters, transaction_type="Purchase")
| 27.285714 | 120 | 0.790576 |
f75ba048348160d9a556d711adf79144a574bf0a | 6,099 | py | Python | tools/lib/uobjnew.py | mrks/bcc | 321c9c979889abce48d0844b3d539ec9a01e6f3c | [
"Apache-2.0"
] | 2 | 2020-08-14T16:41:10.000Z | 2021-07-07T15:10:33.000Z | tools/lib/uobjnew.py | mrks/bcc | 321c9c979889abce48d0844b3d539ec9a01e6f3c | [
"Apache-2.0"
] | 9 | 2021-07-29T21:15:28.000Z | 2022-02-16T18:17:49.000Z | tools/lib/uobjnew.py | mrks/bcc | 321c9c979889abce48d0844b3d539ec9a01e6f3c | [
"Apache-2.0"
] | 3 | 2016-09-16T12:09:38.000Z | 2022-03-26T22:45:56.000Z | #!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# uobjnew Summarize object allocations in high-level languages.
# For Linux, uses BCC, eBPF.
#
# USAGE: uobjnew [-h] [-T TOP] [-v] {c,java,ruby,tcl} pid [interval]
#
# Copyright 2016 Sasha Goldshtein
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 25-Oct-2016 Sasha Goldshtein Created this.
from __future__ import print_function
import argparse
from bcc import BPF, USDT, utils
from time import sleep
import os
# C needs to be the last language.
languages = ["c", "java", "ruby", "tcl"]
examples = """examples:
./uobjnew -l java 145 # summarize Java allocations in process 145
./uobjnew -l c 2020 1 # grab malloc() sizes and print every second
./uobjnew -l ruby 6712 -C 10 # top 10 Ruby types by number of allocations
./uobjnew -l ruby 6712 -S 10 # top 10 Ruby types by total size
"""
parser = argparse.ArgumentParser(
description="Summarize object allocations in high-level languages.",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-l", "--language", choices=languages,
help="language to trace")
parser.add_argument("pid", type=int, help="process id to attach to")
parser.add_argument("interval", type=int, nargs='?',
help="print every specified number of seconds")
parser.add_argument("-C", "--top-count", type=int,
help="number of most frequently allocated types to print")
parser.add_argument("-S", "--top-size", type=int,
help="number of largest types by allocated bytes to print")
parser.add_argument("-v", "--verbose", action="store_true",
help="verbose mode: print the BPF program (for debugging purposes)")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
language = args.language
if not language:
language = utils.detect_language(languages, args.pid)
program = """
#include <linux/ptrace.h>
struct key_t {
#if MALLOC_TRACING
u64 size;
#else
char name[50];
#endif
};
struct val_t {
u64 total_size;
u64 num_allocs;
};
BPF_HASH(allocs, struct key_t, struct val_t);
""".replace("MALLOC_TRACING", "1" if language == "c" else "0")
usdt = USDT(pid=args.pid)
#
# C
#
if language == "c":
program += """
int alloc_entry(struct pt_regs *ctx, size_t size) {
struct key_t key = {};
struct val_t *valp, zero = {};
key.size = size;
valp = allocs.lookup_or_try_init(&key, &zero);
if (valp) {
valp->total_size += size;
valp->num_allocs += 1;
}
return 0;
}
"""
#
# Java
#
elif language == "java":
program += """
int alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0, size = 0;
bpf_usdt_readarg(2, ctx, &classptr);
bpf_usdt_readarg(4, ctx, &size);
bpf_probe_read_user(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_try_init(&key, &zero);
if (valp) {
valp->total_size += size;
valp->num_allocs += 1;
}
return 0;
}
"""
usdt.enable_probe_or_bail("object__alloc", "alloc_entry")
#
# Ruby
#
elif language == "ruby":
create_template = """
int THETHING_alloc_entry(struct pt_regs *ctx) {
struct key_t key = { .name = "THETHING" };
struct val_t *valp, zero = {};
u64 size = 0;
bpf_usdt_readarg(1, ctx, &size);
valp = allocs.lookup_or_try_init(&key, &zero);
if (valp) {
valp->total_size += size;
valp->num_allocs += 1;
}
return 0;
}
"""
program += """
int object_alloc_entry(struct pt_regs *ctx) {
struct key_t key = {};
struct val_t *valp, zero = {};
u64 classptr = 0;
bpf_usdt_readarg(1, ctx, &classptr);
bpf_probe_read_user(&key.name, sizeof(key.name), (void *)classptr);
valp = allocs.lookup_or_try_init(&key, &zero);
if (valp) {
valp->num_allocs += 1; // We don't know the size, unfortunately
}
return 0;
}
"""
usdt.enable_probe_or_bail("object__create", "object_alloc_entry")
for thing in ["string", "hash", "array"]:
program += create_template.replace("THETHING", thing)
usdt.enable_probe_or_bail("%s__create" % thing,
"%s_alloc_entry" % thing)
#
# Tcl
#
elif language == "tcl":
program += """
int alloc_entry(struct pt_regs *ctx) {
struct key_t key = { .name = "<ALL>" };
struct val_t *valp, zero = {};
valp = allocs.lookup_or_try_init(&key, &zero);
if (valp) {
valp->num_allocs += 1;
}
return 0;
}
"""
usdt.enable_probe_or_bail("obj__create", "alloc_entry")
else:
print("No language detected; use -l to trace a language.")
exit(1)
if args.ebpf or args.verbose:
if args.verbose:
print(usdt.get_text())
print(program)
if args.ebpf:
exit()
bpf = BPF(text=program, usdt_contexts=[usdt])
if language == "c":
bpf.attach_uprobe(name="c", sym="malloc", fn_name="alloc_entry",
pid=args.pid)
exit_signaled = False
print("Tracing allocations in process %d (language: %s)... Ctrl-C to quit." %
(args.pid, language or "none"))
while True:
try:
sleep(args.interval or 99999999)
except KeyboardInterrupt:
exit_signaled = True
print()
data = bpf["allocs"]
if args.top_count:
data = sorted(data.items(), key=lambda kv: kv[1].num_allocs)
data = data[-args.top_count:]
elif args.top_size:
data = sorted(data.items(), key=lambda kv: kv[1].total_size)
data = data[-args.top_size:]
else:
data = sorted(data.items(), key=lambda kv: kv[1].total_size)
print("%-30s %8s %12s" % ("NAME/TYPE", "# ALLOCS", "# BYTES"))
for key, value in data:
if language == "c":
obj_type = "block size %d" % key.size
else:
obj_type = key.name
print("%-30s %8d %12d" %
(obj_type, value.num_allocs, value.total_size))
if args.interval and not exit_signaled:
bpf["allocs"].clear()
else:
exit()
| 28.905213 | 78 | 0.627152 |
7417a99b41340623450e661369cbd40a37294af9 | 5,248 | py | Python | pepdb/core/templatetags/jinja_filters.py | dchaplinsky/pep.org.ua | 8633a65fb657d7f04dbdb12eb8ae705fa6be67e3 | [
"MIT"
] | 7 | 2015-12-21T03:52:46.000Z | 2020-07-24T19:17:23.000Z | pepdb/core/templatetags/jinja_filters.py | dchaplinsky/pep.org.ua | 8633a65fb657d7f04dbdb12eb8ae705fa6be67e3 | [
"MIT"
] | 12 | 2016-03-05T18:11:05.000Z | 2021-06-17T20:20:03.000Z | pepdb/core/templatetags/jinja_filters.py | dchaplinsky/pep.org.ua | 8633a65fb657d7f04dbdb12eb8ae705fa6be67e3 | [
"MIT"
] | 4 | 2016-07-17T20:19:38.000Z | 2021-03-23T12:47:20.000Z | # coding: utf-8
from __future__ import unicode_literals
from itertools import groupby
from urlparse import urlsplit, urlunsplit
from urllib import unquote_plus
from decimal import Decimal, InvalidOperation
from django.utils.safestring import mark_safe
from django_markdown.utils import markdown as _markdown
from django.core.urlresolvers import reverse, resolve, Resolver404, NoReverseMatch
from django.utils.translation import override, get_language
from django.conf import settings
from core.utils import get_localized_field, get_exchange_rate
from django_jinja import library
from jinja2.filters import _GroupTuple
@library.filter
def markdown(*args, **kwargs):
return mark_safe('<div class="richtext">%s</div>' % _markdown(*args, **kwargs))
@library.filter
def rule_val(value):
return mark_safe('<strong>%s</strong>' % value)
@library.filter
def rule_money(value):
return mark_safe('<strong>%s</strong>' % curformat(value))
@library.global_function
def updated_querystring(request, params):
"""Updates current querystring with a given dict of params, removing
existing occurrences of such params. Returns a urlencoded querystring."""
original_params = request.GET.copy()
for key in params:
if key in original_params:
original_params.pop(key)
original_params.update(params)
return original_params.urlencode()
@library.filter
def curformat(value):
if not isinstance(value, basestring):
value = unicode(value)
if value and value != "0" and value != "0.0":
currency = ""
if "$" in value:
value = value.replace("$", "")
currency = "USD "
if "£" in value:
value = value.replace("£", "")
currency = "GBP "
if "€" in value or "Є" in value:
value = value.replace("€", "").replace("Є", "")
currency = "EUR "
try:
return '{}{:,.2f}'.format(
currency,
float(value.replace(',', '.'))).replace(
',', ' ').replace('.', ',')
except ValueError:
return value
else:
return mark_safe('<i class="i-value-empty">—</i>')
@library.filter
def spaceformat(value):
try:
return '{:,.2f}'.format(
float(value.replace(',', '.'))).rstrip("0").rstrip(".")
except ValueError:
if value.startswith("."):
return "0" + value
else:
return value
@library.filter
def groupbyandsort(value, attribute, sort_attribute, reverse):
attr = lambda x: getattr(x, attribute)
grouped = [
_GroupTuple(key, list(values)) for key, values
in groupby(sorted(value, key=attr), attr)
]
return sorted(grouped, key=lambda x: getattr(x.list[0], sort_attribute), reverse=reverse)
@library.filter
def is_list(value):
return isinstance(value, list)
@library.filter
def xmlize(value):
if isinstance(value, bool):
return int(value)
else:
return value
def smart_unquote_plus(v):
if isinstance(v, basestring):
return unquote_plus(v)
if isinstance(v, tuple):
return tuple(unquote_plus(m) for m in v)
if isinstance(v, list):
return [unquote_plus(m) for m in v]
return v
def orig_translate_url(url, lang_code, orig_lang_code=None):
"""
Given a URL (absolute or relative), try to get its translated version in
the `lang_code` language (either by i18n_patterns or by translated regex).
Return the original URL if no translated version is found.
"""
parsed = urlsplit(url)
try:
if orig_lang_code is None:
match = resolve(parsed.path)
else:
with override(orig_lang_code):
match = resolve(parsed.path)
except Resolver404:
pass
else:
to_be_reversed = "%s:%s" % (match.namespace, match.url_name) if match.namespace else match.url_name
with override(lang_code):
try:
match.kwargs = {k: smart_unquote_plus(v) for k, v in match.kwargs.items()}
match.args = [smart_unquote_plus(v) for v in match.args]
url = reverse(to_be_reversed, args=match.args, kwargs=match.kwargs)
except NoReverseMatch:
pass
else:
url = urlunsplit(("https", parsed.netloc, url, parsed.query, parsed.fragment))
return url
@library.global_function
def translate_url(request, language):
if isinstance(request, str):
url = request
else:
url = request.build_absolute_uri()
return orig_translate_url(url, language)
@library.filter
def translated(value, field, fallback=True):
lang = get_language()
val = get_localized_field(value, field, lang)
if val or not fallback:
return val
else:
return get_localized_field(value, field, settings.LANGUAGE_CODE)
@library.filter
def convert_curr(value, year):
lang = get_language()
if lang == "en" and value:
try:
value = Decimal(str(value).replace(" ", "").replace(",", ".")) / get_exchange_rate("USD", year)
except (TypeError, ZeroDivisionError, InvalidOperation):
pass
return value
| 28.835165 | 107 | 0.635671 |
c0f8c117a0475d7b6f508713cf62dbbd9a62b092 | 2,289 | py | Python | setup.py | Dresdn/python-semantic-release | 772573f6636f0a93c617cff29297e98edcb240df | [
"MIT"
] | null | null | null | setup.py | Dresdn/python-semantic-release | 772573f6636f0a93c617cff29297e98edcb240df | [
"MIT"
] | null | null | null | setup.py | Dresdn/python-semantic-release | 772573f6636f0a93c617cff29297e98edcb240df | [
"MIT"
] | null | null | null | import re
import sys
from setuptools import find_packages, setup
def _read_long_description():
try:
with open("readme.rst") as fd:
return fd.read()
except Exception:
return None
with open("semantic_release/__init__.py", "r") as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE
).group(1)
try:
from semantic_release import setup_hook
setup_hook(sys.argv)
except ImportError:
pass
setup(
name="python-semantic-release",
version=version,
url="http://github.com/relekang/python-semantic-release",
author="Rolf Erik Lekang",
author_email="me@rolflekang.com",
description="Automatic Semantic Versioning for Python projects",
long_description=_read_long_description(),
packages=find_packages(exclude=("tests",)),
license="MIT",
install_requires=[
"click>=7,<9",
"click_log>=0.3,<1",
"gitpython>=3.0.8,<4",
"invoke>=1.4.1,<2",
"semver>=2.10,<3",
"twine>=3,<4",
"requests>=2.25,<3",
"wheel",
"python-gitlab>=2,<4",
# tomlkit used to be pinned to 0.7.0
# See https://github.com/relekang/python-semantic-release/issues/336
# and https://github.com/relekang/python-semantic-release/pull/337
"tomlkit>=0.10.0,<0.11.0",
"dotty-dict>=1.3.0,<2",
"dataclasses==0.8; python_version < '3.7.0'",
],
extras_require={
"test": [
"coverage>=5,<6",
"pytest>=5,<6",
"pytest-xdist>=1,<2",
"pytest-mock>=2,<3",
"responses==0.13.3",
"mock==1.3.0",
],
"docs": ["Sphinx==1.3.6", "Jinja2==3.0.3"],
"dev": ["tox", "isort", "black"],
"mypy": ["mypy", "types-requests"],
},
entry_points="""
[console_scripts]
semantic-release=semantic_release.cli:entry
""",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
)
| 28.259259 | 76 | 0.558759 |
e2927691a186ca1d1e943dfbfcce71404b43afe5 | 963 | py | Python | dags/exercise2.py | cg-mihabatic/airflow-training-skeleton | dd57da610815f494269380f935c4ca26a5e8f5cc | [
"Apache-2.0"
] | null | null | null | dags/exercise2.py | cg-mihabatic/airflow-training-skeleton | dd57da610815f494269380f935c4ca26a5e8f5cc | [
"Apache-2.0"
] | null | null | null | dags/exercise2.py | cg-mihabatic/airflow-training-skeleton | dd57da610815f494269380f935c4ca26a5e8f5cc | [
"Apache-2.0"
] | null | null | null | import airflow
from airflow.models import DAG
from airflow.operators.python_operator import PythonOperator
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from datetime import datetime
args = {
'owner': 'Miha',
'start_date': datetime(2019,11,17),
}
dag = DAG(
dag_id='exercise2',
default_args=args,
schedule_interval="@daily",
)
def print_date (**context):
print("Date = " + str(datetime.today()))
print_execution_date = PythonOperator(
task_id="print_execution_date",
python_callable=print_date,
provide_context=True,
dag=dag,
)
the_end = DummyOperator(
task_id='the_end',
dag=dag,
)
w1 = BashOperator(task_id="wait_5", bash_command="sleep 5", dag=dag,)
w2 = BashOperator(task_id="wait_1", bash_command="sleep 1", dag=dag,)
w3 = BashOperator(task_id="wait_10", bash_command="sleep 10", dag=dag,)
print_execution_date >> [w1, w2, w3]
[w1, w2, w3] >> the_end
| 24.075 | 71 | 0.739356 |
92d7db132e9ab90f5e6d4fceb6453a89ab784dc4 | 674 | py | Python | vendor-local/lib/python/djcelery/__init__.py | Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c | f80e7c0cff97a1e9b301aa04015db983c7645778 | [
"BSD-3-Clause"
] | 4 | 2015-05-08T16:58:53.000Z | 2019-09-06T05:30:59.000Z | vendor-local/lib/python/djcelery/__init__.py | Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c | f80e7c0cff97a1e9b301aa04015db983c7645778 | [
"BSD-3-Clause"
] | 2 | 2019-02-17T17:44:53.000Z | 2019-03-28T03:54:39.000Z | vendor-local/lib/python/djcelery/__init__.py | Mozilla-GitHub-Standards/54c69db06ef83bda60e995a6c34ecfd168ca028994e40ce817295415bb409f0c | f80e7c0cff97a1e9b301aa04015db983c7645778 | [
"BSD-3-Clause"
] | 7 | 2015-05-21T15:38:29.000Z | 2019-10-28T23:39:06.000Z | """Django Celery Integration."""
# :copyright: (c) 2009 - 2012 by Ask Solem.
# :license: BSD, see LICENSE for more details.
from __future__ import absolute_import
import os
VERSION = (2, 5, 3)
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "ask@celeryproject.org"
__homepage__ = "http://celeryproject.org"
__docformat__ = "restructuredtext"
__license__ = "BSD (3 clause)"
# -eof meta-
def setup_loader():
os.environ.setdefault("CELERY_LOADER", "djcelery.loaders.DjangoLoader")
# Importing this module enables the Celery Django loader.
setup_loader()
from celery import current_app as celery # noqa
| 25.923077 | 75 | 0.725519 |
e75200729f106399ca87e01a27cc41b6b3d13ac5 | 4,249 | py | Python | Lib/site-packages/pygments/lexers/_vbscript_builtins.py | nemarugommula/ecommerce | 60185e79655fbaf0fcad9e877a886fe9eb3c4451 | [
"bzip2-1.0.6"
] | 6,989 | 2017-07-18T06:23:18.000Z | 2022-03-31T15:58:36.000Z | Lib/site-packages/pygments/lexers/_vbscript_builtins.py | nemarugommula/ecommerce | 60185e79655fbaf0fcad9e877a886fe9eb3c4451 | [
"bzip2-1.0.6"
] | 1,978 | 2017-07-18T09:17:58.000Z | 2022-03-31T14:28:43.000Z | Lib/site-packages/pygments/lexers/_vbscript_builtins.py | nemarugommula/ecommerce | 60185e79655fbaf0fcad9e877a886fe9eb3c4451 | [
"bzip2-1.0.6"
] | 1,228 | 2017-07-18T09:03:13.000Z | 2022-03-29T05:57:40.000Z | # -*- coding: utf-8 -*-
"""
pygments.lexers._vbscript_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
These are manually translated lists from
http://www.indusoft.com/pdf/VBScript%20Reference.pdf.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
KEYWORDS = [
'ByRef',
'ByVal',
# dim: special rule
'call',
'case',
'class',
# const: special rule
'do',
'each',
'else',
'elseif',
'end',
'erase',
'execute',
'function',
'exit',
'for',
'function',
'GetRef',
'global',
'if',
'let',
'loop',
'next',
'new',
# option: special rule
'private',
'public',
'redim',
'select',
'set',
'sub',
'then',
'wend',
'while',
'with',
]
BUILTIN_FUNCTIONS = [
'Abs',
'Array',
'Asc',
'Atn',
'CBool',
'CByte',
'CCur',
'CDate',
'CDbl',
'Chr',
'CInt',
'CLng',
'Cos',
'CreateObject',
'CSng',
'CStr',
'Date',
'DateAdd',
'DateDiff',
'DatePart',
'DateSerial',
'DateValue',
'Day',
'Eval',
'Exp',
'Filter',
'Fix',
'FormatCurrency',
'FormatDateTime',
'FormatNumber',
'FormatPercent',
'GetObject',
'GetLocale',
'Hex',
'Hour',
'InStr',
'inStrRev',
'Int',
'IsArray',
'IsDate',
'IsEmpty',
'IsNull',
'IsNumeric',
'IsObject',
'Join',
'LBound',
'LCase',
'Left',
'Len',
'LoadPicture',
'Log',
'LTrim',
'Mid',
'Minute',
'Month',
'MonthName',
'MsgBox',
'Now',
'Oct',
'Randomize',
'RegExp',
'Replace',
'RGB',
'Right',
'Rnd',
'Round',
'RTrim',
'ScriptEngine',
'ScriptEngineBuildVersion',
'ScriptEngineMajorVersion',
'ScriptEngineMinorVersion',
'Second',
'SetLocale',
'Sgn',
'Space',
'Split',
'Sqr',
'StrComp',
'String',
'StrReverse',
'Tan',
'Time',
'Timer',
'TimeSerial',
'TimeValue',
'Trim',
'TypeName',
'UBound',
'UCase',
'VarType',
'Weekday',
'WeekdayName',
'Year',
]
BUILTIN_VARIABLES = [
'Debug',
'Dictionary',
'Drive',
'Drives',
'Err',
'File',
'Files',
'FileSystemObject',
'Folder',
'Folders',
'Match',
'Matches',
'RegExp',
'Submatches',
'TextStream',
]
OPERATORS = [
'+',
'-',
'*',
'/',
'\\',
'^',
'|',
'<',
'<=',
'>',
'>=',
'=',
'<>',
'&',
'$',
]
OPERATOR_WORDS = [
'mod',
'and',
'or',
'xor',
'eqv',
'imp',
'is',
'not',
]
BUILTIN_CONSTANTS = [
'False',
'True',
'vbAbort',
'vbAbortRetryIgnore',
'vbApplicationModal',
'vbArray',
'vbBinaryCompare',
'vbBlack',
'vbBlue',
'vbBoole',
'vbByte',
'vbCancel',
'vbCr',
'vbCritical',
'vbCrLf',
'vbCurrency',
'vbCyan',
'vbDataObject',
'vbDate',
'vbDefaultButton1',
'vbDefaultButton2',
'vbDefaultButton3',
'vbDefaultButton4',
'vbDouble',
'vbEmpty',
'vbError',
'vbExclamation',
'vbFalse',
'vbFirstFullWeek',
'vbFirstJan1',
'vbFormFeed',
'vbFriday',
'vbGeneralDate',
'vbGreen',
'vbIgnore',
'vbInformation',
'vbInteger',
'vbLf',
'vbLong',
'vbLongDate',
'vbLongTime',
'vbMagenta',
'vbMonday',
'vbMsgBoxHelpButton',
'vbMsgBoxRight',
'vbMsgBoxRtlReading',
'vbMsgBoxSetForeground',
'vbNewLine',
'vbNo',
'vbNull',
'vbNullChar',
'vbNullString',
'vbObject',
'vbObjectError',
'vbOK',
'vbOKCancel',
'vbOKOnly',
'vbQuestion',
'vbRed',
'vbRetry',
'vbRetryCancel',
'vbSaturday',
'vbShortDate',
'vbShortTime',
'vbSingle',
'vbString',
'vbSunday',
'vbSystemModal',
'vbTab',
'vbTextCompare',
'vbThursday',
'vbTrue',
'vbTuesday',
'vbUseDefault',
'vbUseSystem',
'vbUseSystem',
'vbVariant',
'vbVerticalTab',
'vbWednesday',
'vbWhite',
'vbYellow',
'vbYes',
'vbYesNo',
'vbYesNoCancel',
]
| 15.120996 | 70 | 0.480348 |
1e81afe73e3a2ef5f01de51e47bc7e230019d41d | 1,586 | py | Python | azure-mgmt-network/azure/mgmt/network/v2018_10_01/models/topology_association.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-mgmt-network/azure/mgmt/network/v2018_10_01/models/topology_association.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-network/azure/mgmt/network/v2018_10_01/models/topology_association.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class TopologyAssociation(Model):
"""Resources that have an association with the parent resource.
:param name: The name of the resource that is associated with the parent
resource.
:type name: str
:param resource_id: The ID of the resource that is associated with the
parent resource.
:type resource_id: str
:param association_type: The association type of the child resource to the
parent resource. Possible values include: 'Associated', 'Contains'
:type association_type: str or
~azure.mgmt.network.v2018_10_01.models.AssociationType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'association_type': {'key': 'associationType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TopologyAssociation, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.resource_id = kwargs.get('resource_id', None)
self.association_type = kwargs.get('association_type', None)
| 38.682927 | 78 | 0.626734 |
cf00777d2380cb59d5bb2ebe1d771c0e79f85eaa | 106,368 | py | Python | neutron/tests/unit/agent/linux/test_dhcp.py | lhyzxj314/neutron | 5600261f782f10c2dd04cdc2ea6a64287499c8ad | [
"Apache-2.0"
] | null | null | null | neutron/tests/unit/agent/linux/test_dhcp.py | lhyzxj314/neutron | 5600261f782f10c2dd04cdc2ea6a64287499c8ad | [
"Apache-2.0"
] | 1 | 2019-08-16T14:02:19.000Z | 2019-08-16T14:02:19.000Z | neutron/tests/unit/agent/linux/test_dhcp.py | MrCocoaCat/Neutron_Ocata | d7d8e0278cfaa91801083f476d5dd3520ad5d0bd | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
import netaddr
from neutron_lib import constants
from oslo_config import cfg
import oslo_messaging
from oslo_utils import fileutils
import testtools
from neutron.agent.common import config
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.common import constants as n_const
from neutron.conf.agent import dhcp as dhcp_config
from neutron.conf import common as base_config
from neutron.extensions import extra_dhcp_opt as edo_ext
from neutron.tests import base
from neutron.tests import tools
class FakeIPAllocation(object):
def __init__(self, address, subnet_id=None):
self.ip_address = address
self.subnet_id = subnet_id
class FakeDNSAssignment(object):
def __init__(self, ip_address, dns_name='', domain='openstacklocal'):
if dns_name:
self.hostname = dns_name
else:
self.hostname = 'host-%s' % ip_address.replace(
'.', '-').replace(':', '-')
self.ip_address = ip_address
self.fqdn = self.hostname
if domain:
self.fqdn = '%s.%s.' % (self.hostname, domain)
class DhcpOpt(object):
def __init__(self, **kwargs):
self.__dict__.update(ip_version=4)
self.__dict__.update(kwargs)
def __str__(self):
return str(self.__dict__)
# A base class where class attributes can also be accessed by treating
# an instance as a dict.
class Dictable(object):
def __getitem__(self, k):
return self.__class__.__dict__.get(k)
class FakeDhcpPort(object):
def __init__(self):
self.id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaa'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_DHCP
self.fixed_ips = [
FakeIPAllocation('192.168.0.1',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.mac_address = '00:00:80:aa:bb:ee'
self.device_id = 'fake_dhcp_port'
self.extra_dhcp_opts = []
class FakeReservedPort(object):
def __init__(self, id='reserved-aaaa-aaaa-aaaa-aaaaaaaaaaa'):
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_DHCP
self.fixed_ips = [
FakeIPAllocation('192.168.0.6',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('fdca:3ba5:a17a:4ba3::2',
'ffffffff-ffff-ffff-ffff-ffffffffffff')]
self.mac_address = '00:00:80:aa:bb:ee'
self.device_id = n_const.DEVICE_ID_RESERVED_DHCP_PORT
self.extra_dhcp_opts = []
self.id = id
class FakePort1(object):
def __init__(self, domain='openstacklocal'):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.admin_state_up = True
self.device_owner = 'foo1'
self.fixed_ips = [
FakeIPAllocation('192.168.0.2',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.mac_address = '00:00:80:aa:bb:cc'
self.device_id = 'fake_port1'
self.extra_dhcp_opts = []
self.dns_assignment = [FakeDNSAssignment('192.168.0.2', domain=domain)]
class FakePort2(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.admin_state_up = False
self.device_owner = 'foo2'
self.fixed_ips = [
FakeIPAllocation('192.168.0.3',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.mac_address = '00:00:f3:aa:bb:cc'
self.device_id = 'fake_port2'
self.dns_assignment = [FakeDNSAssignment('192.168.0.3')]
self.extra_dhcp_opts = []
class FakePort3(object):
def __init__(self):
self.id = '44444444-4444-4444-4444-444444444444'
self.admin_state_up = True
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('192.168.0.4',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('192.168.1.2',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
self.dns_assignment = [FakeDNSAssignment('192.168.0.4'),
FakeDNSAssignment('192.168.1.2')]
self.mac_address = '00:00:0f:aa:bb:cc'
self.device_id = 'fake_port3'
self.extra_dhcp_opts = []
class FakePort4(object):
def __init__(self):
self.id = 'gggggggg-gggg-gggg-gggg-gggggggggggg'
self.admin_state_up = False
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('192.168.0.4',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('ffda:3ba5:a17a:4ba3:0216:3eff:fec2:771d',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
self.dns_assignment = [
FakeDNSAssignment('192.168.0.4'),
FakeDNSAssignment('ffda:3ba5:a17a:4ba3:0216:3eff:fec2:771d')]
self.mac_address = '00:16:3E:C2:77:1D'
self.device_id = 'fake_port4'
self.extra_dhcp_opts = []
class FakePort5(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeee'
self.admin_state_up = True
self.device_owner = 'foo5'
self.fixed_ips = [
FakeIPAllocation('192.168.0.5',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment('192.168.0.5')]
self.mac_address = '00:00:0f:aa:bb:55'
self.device_id = 'fake_port5'
self.extra_dhcp_opts = [
DhcpOpt(opt_name=edo_ext.CLIENT_ID,
opt_value='test5')]
class FakePort6(object):
def __init__(self):
self.id = 'ccccccccc-cccc-cccc-cccc-ccccccccc'
self.admin_state_up = True
self.device_owner = 'foo6'
self.fixed_ips = [
FakeIPAllocation('192.168.0.6',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment('192.168.0.6')]
self.mac_address = '00:00:0f:aa:bb:66'
self.device_id = 'fake_port6'
self.extra_dhcp_opts = [
DhcpOpt(opt_name=edo_ext.CLIENT_ID,
opt_value='test6',
ip_version=4),
DhcpOpt(opt_name='dns-server',
opt_value='123.123.123.45',
ip_version=4)]
class FakeV6Port(object):
def __init__(self, domain='openstacklocal'):
self.id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
self.admin_state_up = True
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('fdca:3ba5:a17a:4ba3::2',
'ffffffff-ffff-ffff-ffff-ffffffffffff')]
self.mac_address = '00:00:f3:aa:bb:cc'
self.device_id = 'fake_port6'
self.extra_dhcp_opts = []
self.dns_assignment = [FakeDNSAssignment('fdca:3ba5:a17a:4ba3::2',
domain=domain)]
class FakeV6PortExtraOpt(object):
def __init__(self):
self.id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
self.admin_state_up = True
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('ffea:3ba5:a17a:4ba3:0216:3eff:fec2:771d',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
self.dns_assignment = [
FakeDNSAssignment('ffea:3ba5:a17a:4ba3:0216:3eff:fec2:771d')]
self.mac_address = '00:16:3e:c2:77:1d'
self.device_id = 'fake_port6'
self.extra_dhcp_opts = [
DhcpOpt(opt_name='dns-server',
opt_value='ffea:3ba5:a17a:4ba3::100',
ip_version=6)]
class FakeDualPortWithV6ExtraOpt(object):
def __init__(self):
self.id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
self.admin_state_up = True
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('192.168.0.3',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('ffea:3ba5:a17a:4ba3:0216:3eff:fec2:771d',
'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
self.dns_assignment = [
FakeDNSAssignment('192.168.0.3'),
FakeDNSAssignment('ffea:3ba5:a17a:4ba3:0216:3eff:fec2:771d')]
self.mac_address = '00:16:3e:c2:77:1d'
self.device_id = 'fake_port6'
self.extra_dhcp_opts = [
DhcpOpt(opt_name='dns-server',
opt_value='ffea:3ba5:a17a:4ba3::100',
ip_version=6)]
class FakeDualPort(object):
def __init__(self, domain='openstacklocal'):
self.id = 'hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh'
self.admin_state_up = True
self.device_owner = 'foo3'
self.fixed_ips = [
FakeIPAllocation('192.168.0.3',
'dddddddd-dddd-dddd-dddd-dddddddddddd'),
FakeIPAllocation('fdca:3ba5:a17a:4ba3::3',
'ffffffff-ffff-ffff-ffff-ffffffffffff')]
self.mac_address = '00:00:0f:aa:bb:cc'
self.device_id = 'fake_dual_port'
self.extra_dhcp_opts = []
self.dns_assignment = [FakeDNSAssignment('192.168.0.3', domain=domain),
FakeDNSAssignment('fdca:3ba5:a17a:4ba3::3',
domain=domain)]
class FakeRouterPort(object):
def __init__(self, dev_owner=constants.DEVICE_OWNER_ROUTER_INTF,
ip_address='192.168.0.1', domain='openstacklocal'):
self.id = 'rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_ROUTER_INTF
self.mac_address = '00:00:0f:rr:rr:rr'
self.device_id = 'fake_router_port'
self.dns_assignment = []
self.extra_dhcp_opts = []
self.device_owner = dev_owner
self.fixed_ips = [FakeIPAllocation(
ip_address, 'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment(ip.ip_address, domain=domain)
for ip in self.fixed_ips]
class FakeRouterPortNoDHCP(object):
def __init__(self, dev_owner=constants.DEVICE_OWNER_ROUTER_INTF,
ip_address='192.168.0.1', domain='openstacklocal'):
self.id = 'ssssssss-ssss-ssss-ssss-ssssssssssss'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_ROUTER_INTF
self.mac_address = '00:00:0f:rr:rr:rr'
self.device_id = 'fake_router_port_no_dhcp'
self.dns_assignment = []
self.extra_dhcp_opts = []
self.device_owner = dev_owner
self.fixed_ips = [FakeIPAllocation(
ip_address, 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee')]
self.dns_assignment = [FakeDNSAssignment(ip.ip_address, domain=domain)
for ip in self.fixed_ips]
class FakeRouterPort2(object):
def __init__(self):
self.id = 'rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_ROUTER_INTF
self.fixed_ips = [
FakeIPAllocation('192.168.1.1',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment('192.168.1.1')]
self.mac_address = '00:00:0f:rr:rr:r2'
self.device_id = 'fake_router_port2'
self.extra_dhcp_opts = []
class FakePortMultipleAgents1(object):
def __init__(self):
self.id = 'rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_DHCP
self.fixed_ips = [
FakeIPAllocation('192.168.0.5',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment('192.168.0.5')]
self.mac_address = '00:00:0f:dd:dd:dd'
self.device_id = 'fake_multiple_agents_port'
self.extra_dhcp_opts = []
class FakePortMultipleAgents2(object):
def __init__(self):
self.id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
self.admin_state_up = True
self.device_owner = constants.DEVICE_OWNER_DHCP
self.fixed_ips = [
FakeIPAllocation('192.168.0.6',
'dddddddd-dddd-dddd-dddd-dddddddddddd')]
self.dns_assignment = [FakeDNSAssignment('192.168.0.6')]
self.mac_address = '00:00:0f:ee:ee:ee'
self.device_id = 'fake_multiple_agents_port2'
self.extra_dhcp_opts = []
class FakeV4HostRoute(object):
def __init__(self):
self.destination = '20.0.0.1/24'
self.nexthop = '20.0.0.1'
class FakeV4HostRouteGateway(object):
def __init__(self):
self.destination = constants.IPv4_ANY
self.nexthop = '10.0.0.1'
class FakeV6HostRoute(object):
def __init__(self):
self.destination = '2001:0200:feed:7ac0::/64'
self.nexthop = '2001:0200:feed:7ac0::1'
class FakeV4Subnet(Dictable):
def __init__(self):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.ip_version = 4
self.cidr = '192.168.0.0/24'
self.gateway_ip = '192.168.0.1'
self.enable_dhcp = True
self.host_routes = [FakeV4HostRoute()]
self.dns_nameservers = ['8.8.8.8']
class FakeV4Subnet2(FakeV4Subnet):
def __init__(self):
super(FakeV4Subnet2, self).__init__()
self.cidr = '192.168.1.0/24'
self.gateway_ip = '192.168.1.1'
self.host_routes = []
class FakeV4MetadataSubnet(FakeV4Subnet):
def __init__(self):
super(FakeV4MetadataSubnet, self).__init__()
self.cidr = '169.254.169.254/30'
self.gateway_ip = '169.254.169.253'
self.host_routes = []
self.dns_nameservers = []
class FakeV4SubnetGatewayRoute(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetGatewayRoute, self).__init__()
self.host_routes = [FakeV4HostRouteGateway()]
class FakeV4SubnetMultipleAgentsWithoutDnsProvided(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetMultipleAgentsWithoutDnsProvided, self).__init__()
self.dns_nameservers = []
self.host_routes = []
class FakeV4SubnetAgentWithManyDnsProvided(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetAgentWithManyDnsProvided, self).__init__()
self.dns_nameservers = ['2.2.2.2', '9.9.9.9', '1.1.1.1', '3.3.3.3']
self.host_routes = []
class FakeV4MultipleAgentsWithoutDnsProvided(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.subnets = [FakeV4SubnetMultipleAgentsWithoutDnsProvided()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1(), FakePortMultipleAgents2()]
self.namespace = 'qdhcp-ns'
class FakeV4AgentWithoutDnsProvided(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.subnets = [FakeV4SubnetMultipleAgentsWithoutDnsProvided()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1()]
self.namespace = 'qdhcp-ns'
class FakeV4AgentWithManyDnsProvided(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.subnets = [FakeV4SubnetAgentWithManyDnsProvided()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1()]
self.namespace = 'qdhcp-ns'
class FakeV4SubnetMultipleAgentsWithDnsProvided(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetMultipleAgentsWithDnsProvided, self).__init__()
self.host_routes = []
class FakeV4MultipleAgentsWithDnsProvided(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.subnets = [FakeV4SubnetMultipleAgentsWithDnsProvided()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort(),
FakePortMultipleAgents1(), FakePortMultipleAgents2()]
self.namespace = 'qdhcp-ns'
class FakeV6Subnet(object):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.ip_version = 6
self.cidr = 'fdca:3ba5:a17a:4ba3::/64'
self.gateway_ip = 'fdca:3ba5:a17a:4ba3::1'
self.enable_dhcp = True
self.host_routes = [FakeV6HostRoute()]
self.dns_nameservers = ['2001:0200:feed:7ac0::1']
self.ipv6_ra_mode = None
self.ipv6_address_mode = None
class FakeV4SubnetNoDHCP(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.ip_version = 4
self.cidr = '192.168.1.0/24'
self.gateway_ip = '192.168.1.1'
self.enable_dhcp = False
self.host_routes = []
self.dns_nameservers = []
class FakeV6SubnetDHCPStateful(Dictable):
def __init__(self):
self.id = 'ffffffff-ffff-ffff-ffff-ffffffffffff'
self.ip_version = 6
self.cidr = 'fdca:3ba5:a17a:4ba3::/64'
self.gateway_ip = 'fdca:3ba5:a17a:4ba3::1'
self.enable_dhcp = True
self.host_routes = [FakeV6HostRoute()]
self.dns_nameservers = ['2001:0200:feed:7ac0::1']
self.ipv6_ra_mode = None
self.ipv6_address_mode = constants.DHCPV6_STATEFUL
class FakeV6SubnetSlaac(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.ip_version = 6
self.cidr = 'ffda:3ba5:a17a:4ba3::/64'
self.gateway_ip = 'ffda:3ba5:a17a:4ba3::1'
self.enable_dhcp = True
self.host_routes = [FakeV6HostRoute()]
self.ipv6_address_mode = constants.IPV6_SLAAC
self.ipv6_ra_mode = None
class FakeV6SubnetStateless(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.ip_version = 6
self.cidr = 'ffea:3ba5:a17a:4ba3::/64'
self.gateway_ip = 'ffea:3ba5:a17a:4ba3::1'
self.enable_dhcp = True
self.dns_nameservers = []
self.host_routes = []
self.ipv6_address_mode = constants.DHCPV6_STATELESS
self.ipv6_ra_mode = None
class FakeV6SubnetStatelessBadPrefixLength(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.ip_version = 6
self.cidr = 'ffeb:3ba5:a17a:4ba3::/56'
self.gateway_ip = 'ffeb:3ba5:a17a:4ba3::1'
self.enable_dhcp = True
self.dns_nameservers = []
self.host_routes = []
self.ipv6_address_mode = constants.DHCPV6_STATELESS
self.ipv6_ra_mode = None
class FakeV4SubnetNoGateway(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetNoGateway, self).__init__()
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.cidr = '192.168.1.0/24'
self.gateway_ip = None
self.enable_dhcp = True
self.host_routes = []
self.dns_nameservers = []
class FakeV4SubnetNoRouter(FakeV4Subnet):
def __init__(self):
super(FakeV4SubnetNoRouter, self).__init__()
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.cidr = '192.168.1.0/24'
self.gateway_ip = '192.168.1.1'
self.host_routes = []
self.dns_nameservers = []
class FakeV4Network(object):
def __init__(self):
self.id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1()]
self.namespace = 'qdhcp-ns'
class FakeV4NetworkClientId(object):
def __init__(self):
self.id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakePort5(), FakePort6()]
self.namespace = 'qdhcp-ns'
class FakeV6Network(object):
def __init__(self):
self.id = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
self.subnets = [FakeV6Subnet()]
self.ports = [FakePort2()]
self.namespace = 'qdhcp-ns'
class FakeDualNetwork(object):
def __init__(self, domain='openstacklocal'):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV6SubnetDHCPStateful()]
self.namespace = 'qdhcp-ns'
self.ports = [FakePort1(domain=domain), FakeV6Port(domain=domain),
FakeDualPort(domain=domain),
FakeRouterPort(domain=domain)]
class FakeDeviceManagerNetwork(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV6SubnetDHCPStateful()]
self.ports = [FakePort1(),
FakeV6Port(),
FakeDualPort(),
FakeRouterPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkReserved(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV6SubnetDHCPStateful()]
self.ports = [FakePort1(), FakeV6Port(), FakeDualPort(),
FakeRouterPort(), FakeReservedPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkReserved2(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV6SubnetDHCPStateful()]
self.ports = [FakePort1(), FakeV6Port(), FakeDualPort(),
FakeRouterPort(), FakeReservedPort(),
FakeReservedPort(id='reserved-2')]
self.namespace = 'qdhcp-ns'
class FakeNetworkDhcpPort(object):
def __init__(self):
self.id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakeDhcpPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkGatewayRoute(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4SubnetGatewayRoute(), FakeV6SubnetDHCPStateful()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkSingleDHCP(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV4SubnetNoDHCP()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkSingleDHCPBothAttaced(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
# dhcp-agent actually can't get the subnet with dhcp disabled
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakeRouterPortNoDHCP(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
class FakeDualNetworkDualDHCP(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV4Subnet2()]
self.ports = [FakePort1(), FakeRouterPort(), FakeRouterPort2()]
self.namespace = 'qdhcp-ns'
class FakeV4NoGatewayNetwork(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4SubnetNoGateway()]
self.ports = [FakePort1()]
class FakeV4NetworkNoRouter(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4SubnetNoRouter()]
self.ports = [FakePort1()]
class FakeV4MetadataNetwork(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4MetadataSubnet()]
self.ports = [FakeRouterPort(ip_address='169.254.169.253')]
class FakeV4NetworkDistRouter(object):
def __init__(self):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(),
FakeRouterPort(
dev_owner=constants.DEVICE_OWNER_DVR_INTERFACE)]
class FakeDualV4Pxe3Ports(object):
def __init__(self, port_detail="portsSame"):
self.id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
self.subnets = [FakeV4Subnet(), FakeV4SubnetNoDHCP()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
if port_detail == "portsSame":
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.1.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.1.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux2.0')]
self.ports[2].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.1.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.1.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux3.0')]
else:
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.2'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.5'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.5'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux2.0')]
self.ports[2].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.7'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.7'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux3.0')]
class FakeV4NetworkPxe2Ports(object):
def __init__(self, port_detail="portsSame"):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakePort2(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
if port_detail == "portsSame":
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
else:
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.5'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.5'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
class FakeV4NetworkPxe3Ports(object):
def __init__(self, port_detail="portsSame"):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakePort2(), FakePort3(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
if port_detail == "portsSame":
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.1.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.1.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[2].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.1.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.1.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
else:
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.3'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.2'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0')]
self.ports[1].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.5'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.5'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux2.0')]
self.ports[2].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.7'),
DhcpOpt(opt_name='server-ip-address', opt_value='192.168.0.7'),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux3.0')]
class FakeV6NetworkPxePort(object):
def __init__(self):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.subnets = [FakeV6SubnetDHCPStateful()]
self.ports = [FakeV6Port()]
self.namespace = 'qdhcp-ns'
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='2001:192:168::1',
ip_version=6),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0',
ip_version=6)]
class FakeV6NetworkPxePortWrongOptVersion(object):
def __init__(self):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.subnets = [FakeV6SubnetDHCPStateful()]
self.ports = [FakeV6Port()]
self.namespace = 'qdhcp-ns'
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tftp-server', opt_value='192.168.0.7',
ip_version=4),
DhcpOpt(opt_name='bootfile-name', opt_value='pxelinux.0',
ip_version=6)]
class FakeDualStackNetworkSingleDHCP(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.subnets = [FakeV4Subnet(), FakeV6SubnetSlaac()]
self.ports = [FakePort1(), FakePort4(), FakeRouterPort()]
class FakeDualStackNetworkingSingleDHCPTags(object):
def __init__(self):
self.id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.subnets = [FakeV4Subnet(), FakeV6SubnetSlaac()]
self.ports = [FakePort1(), FakePort4(), FakeRouterPort()]
for port in self.ports:
port.extra_dhcp_opts = [
DhcpOpt(opt_name='tag:ipxe,bootfile-name',
opt_value='pxelinux.0')]
class FakeV4NetworkMultipleTags(object):
def __init__(self):
self.id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
self.subnets = [FakeV4Subnet()]
self.ports = [FakePort1(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
self.ports[0].extra_dhcp_opts = [
DhcpOpt(opt_name='tag:ipxe,bootfile-name', opt_value='pxelinux.0')]
class FakeV6NetworkStatelessDHCP(object):
def __init__(self):
self.id = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
self.subnets = [FakeV6SubnetStateless()]
self.ports = [FakeV6PortExtraOpt()]
self.namespace = 'qdhcp-ns'
class FakeV6NetworkStatelessDHCPBadPrefixLength(object):
def __init__(self):
self.id = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
self.subnets = [FakeV6SubnetStatelessBadPrefixLength()]
self.ports = [FakeV6PortExtraOpt()]
self.namespace = 'qdhcp-ns'
class FakeNetworkWithV6SatelessAndV4DHCPSubnets(object):
def __init__(self):
self.id = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
self.subnets = [FakeV6SubnetStateless(), FakeV4Subnet()]
self.ports = [FakeDualPortWithV6ExtraOpt(), FakeRouterPort()]
self.namespace = 'qdhcp-ns'
class LocalChild(dhcp.DhcpLocalProcess):
PORTS = {4: [4], 6: [6]}
def __init__(self, *args, **kwargs):
self.process_monitor = mock.Mock()
kwargs['process_monitor'] = self.process_monitor
super(LocalChild, self).__init__(*args, **kwargs)
self.called = []
def reload_allocations(self):
self.called.append('reload')
def restart(self):
self.called.append('restart')
def spawn_process(self):
self.called.append('spawn')
class TestConfBase(base.BaseTestCase):
def setUp(self):
super(TestConfBase, self).setUp()
self.conf = config.setup_conf()
self.conf.register_opts(base_config.core_opts)
self.conf.register_opts(dhcp_config.DHCP_OPTS)
self.conf.register_opts(dhcp_config.DNSMASQ_OPTS)
self.conf.register_opts(external_process.OPTS)
config.register_interface_driver_opts_helper(self.conf)
class TestBase(TestConfBase):
def setUp(self):
super(TestBase, self).setUp()
instance = mock.patch("neutron.agent.linux.dhcp.DeviceManager")
self.mock_mgr = instance.start()
self.conf.register_opt(cfg.BoolOpt('enable_isolated_metadata',
default=True))
self.conf.register_opt(cfg.BoolOpt("force_metadata",
default=False))
self.conf.register_opt(cfg.BoolOpt('enable_metadata_network',
default=False))
self.config_parse(self.conf)
self.conf.set_override('state_path', '')
self.replace_p = mock.patch('neutron_lib.utils.file.replace_file')
self.execute_p = mock.patch('neutron.agent.common.utils.execute')
mock.patch('neutron.agent.linux.utils.execute').start()
self.safe = self.replace_p.start()
self.execute = self.execute_p.start()
self.makedirs = mock.patch('os.makedirs').start()
self.rmtree = mock.patch('shutil.rmtree').start()
self.external_process = mock.patch(
'neutron.agent.linux.external_process.ProcessManager').start()
self.mock_mgr.return_value.driver.bridged = True
class TestDhcpBase(TestBase):
def test_existing_dhcp_networks_abstract_error(self):
self.assertRaises(NotImplementedError,
dhcp.DhcpBase.existing_dhcp_networks,
None)
def test_check_version_abstract_error(self):
self.assertRaises(NotImplementedError,
dhcp.DhcpBase.check_version)
def test_base_abc_error(self):
self.assertRaises(TypeError, dhcp.DhcpBase, None)
def test_restart(self):
class SubClass(dhcp.DhcpBase):
def __init__(self):
dhcp.DhcpBase.__init__(self, cfg.CONF, FakeV4Network(),
mock.Mock(), None)
self.called = []
def enable(self):
self.called.append('enable')
def disable(self, retain_port=False, block=False):
self.called.append('disable %s %s' % (retain_port, block))
def reload_allocations(self):
pass
@property
def active(self):
return True
c = SubClass()
c.restart()
self.assertEqual(c.called, ['disable True True', 'enable'])
class TestDhcpLocalProcess(TestBase):
def test_get_conf_file_name(self):
tpl = '/dhcp/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/dev'
lp = LocalChild(self.conf, FakeV4Network())
self.assertEqual(lp.get_conf_file_name('dev'), tpl)
@mock.patch.object(fileutils, 'ensure_tree')
def test_ensure_dir_called(self, ensure_dir):
LocalChild(self.conf, FakeV4Network())
ensure_dir.assert_called_once_with(
'/dhcp/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa', mode=0o755)
def test_enable_already_active(self):
with mock.patch.object(LocalChild, 'active') as patched:
patched.__get__ = mock.Mock(return_value=True)
lp = LocalChild(self.conf, FakeV4Network())
lp.enable()
self.assertEqual(lp.called, ['restart'])
self.assertFalse(self.mock_mgr.return_value.setup.called)
@mock.patch.object(fileutils, 'ensure_tree')
def test_enable(self, ensure_dir):
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['active', 'interface_name']]
)
with mock.patch.multiple(LocalChild, **attrs_to_mock) as mocks:
mocks['active'].__get__ = mock.Mock(return_value=False)
mocks['interface_name'].__set__ = mock.Mock()
lp = LocalChild(self.conf,
FakeDualNetwork())
lp.enable()
self.mock_mgr.assert_has_calls(
[mock.call(self.conf, None),
mock.call().setup(mock.ANY)])
self.assertEqual(lp.called, ['spawn'])
self.assertTrue(mocks['interface_name'].__set__.called)
ensure_dir.assert_called_with(
'/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc', mode=0o755)
def _assert_disabled(self, lp):
self.assertTrue(lp.process_monitor.unregister.called)
self.assertTrue(self.external_process().disable.called)
def test_disable_not_active(self):
attrs_to_mock = dict([(a, mock.DEFAULT) for a in
['active', 'interface_name']])
with mock.patch.multiple(LocalChild, **attrs_to_mock) as mocks:
mocks['active'].__get__ = mock.Mock(return_value=False)
mocks['interface_name'].__get__ = mock.Mock(return_value='tap0')
network = FakeDualNetwork()
lp = LocalChild(self.conf, network)
lp.device_manager = mock.Mock()
lp.disable()
lp.device_manager.destroy.assert_called_once_with(
network, 'tap0')
self._assert_disabled(lp)
def test_disable_retain_port(self):
attrs_to_mock = dict([(a, mock.DEFAULT) for a in
['active', 'interface_name']])
network = FakeDualNetwork()
with mock.patch.multiple(LocalChild, **attrs_to_mock) as mocks:
mocks['active'].__get__ = mock.Mock(return_value=True)
mocks['interface_name'].__get__ = mock.Mock(return_value='tap0')
lp = LocalChild(self.conf, network)
lp.disable(retain_port=True)
self._assert_disabled(lp)
def test_disable(self):
attrs_to_mock = {'active': mock.DEFAULT}
with mock.patch.multiple(LocalChild, **attrs_to_mock) as mocks:
mocks['active'].__get__ = mock.Mock(return_value=False)
lp = LocalChild(self.conf, FakeDualNetwork())
with mock.patch('neutron.agent.linux.ip_lib.IPWrapper') as ip:
lp.disable()
self._assert_disabled(lp)
ip.return_value.netns.delete.assert_called_with('qdhcp-ns')
def test_disable_config_dir_removed_after_destroy(self):
parent = mock.MagicMock()
parent.attach_mock(self.rmtree, 'rmtree')
parent.attach_mock(self.mock_mgr, 'DeviceManager')
lp = LocalChild(self.conf, FakeDualNetwork())
lp.disable(retain_port=False)
expected = [mock.call.DeviceManager().destroy(mock.ANY, mock.ANY),
mock.call.rmtree(mock.ANY, ignore_errors=True)]
parent.assert_has_calls(expected)
def test_get_interface_name(self):
net = FakeDualNetwork()
path = '/dhcp/%s/interface' % net.id
self.useFixture(tools.OpenFixture(path, 'tap0'))
lp = LocalChild(self.conf, net)
self.assertEqual(lp.interface_name, 'tap0')
def test_set_interface_name(self):
with mock.patch('neutron_lib.utils.file.replace_file') as replace:
lp = LocalChild(self.conf, FakeDualNetwork())
with mock.patch.object(lp, 'get_conf_file_name') as conf_file:
conf_file.return_value = '/interface'
lp.interface_name = 'tap0'
conf_file.assert_called_once_with('interface')
replace.assert_called_once_with(mock.ANY, 'tap0')
class TestDnsmasq(TestBase):
def _get_dnsmasq(self, network, process_monitor=None):
process_monitor = process_monitor or mock.Mock()
return dhcp.Dnsmasq(self.conf, network,
process_monitor=process_monitor)
def _test_spawn(self, extra_options, network=FakeDualNetwork(),
max_leases=16777216, lease_duration=86400,
has_static=True, no_resolv='--no-resolv',
has_stateless=True):
def mock_get_conf_file_name(kind):
return '/dhcp/%s/%s' % (network.id, kind)
# if you need to change this path here, think twice,
# that means pid files will move around, breaking upgrades
# or backwards-compatibility
expected_pid_file = '/dhcp/%s/pid' % network.id
expected = [
'dnsmasq',
'--no-hosts',
no_resolv,
'--except-interface=lo',
'--pid-file=%s' % expected_pid_file,
'--dhcp-hostsfile=/dhcp/%s/host' % network.id,
'--addn-hosts=/dhcp/%s/addn_hosts' % network.id,
'--dhcp-optsfile=/dhcp/%s/opts' % network.id,
'--dhcp-leasefile=/dhcp/%s/leases' % network.id,
'--dhcp-match=set:ipxe,175',
'--bind-interfaces',
'--interface=tap0',
]
seconds = ''
if lease_duration == -1:
lease_duration = 'infinite'
else:
seconds = 's'
if has_static:
prefix = '--dhcp-range=set:tag%d,%s,static,%s%s'
prefix6 = '--dhcp-range=set:tag%d,%s,static,%s,%s%s'
elif has_stateless:
prefix = '--dhcp-range=set:tag%d,%s,%s%s'
prefix6 = '--dhcp-range=set:tag%d,%s,%s,%s%s'
possible_leases = 0
for i, s in enumerate(network.subnets):
if (s.ip_version != 6
or s.ipv6_address_mode == constants.DHCPV6_STATEFUL):
if s.ip_version == 4:
expected.extend([prefix % (
i, s.cidr.split('/')[0], lease_duration, seconds)])
else:
expected.extend([prefix6 % (
i, s.cidr.split('/')[0], s.cidr.split('/')[1],
lease_duration, seconds)])
possible_leases += netaddr.IPNetwork(s.cidr).size
if hasattr(network, 'mtu'):
expected.append(
'--dhcp-option-force=option:mtu,%s' % network.mtu)
expected.append('--dhcp-lease-max=%d' % min(
possible_leases, max_leases))
expected.extend(extra_options)
self.execute.return_value = ('', '')
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['_output_opts_file', 'get_conf_file_name', 'interface_name']]
)
test_pm = mock.Mock()
with mock.patch.multiple(dhcp.Dnsmasq, **attrs_to_mock) as mocks:
mocks['get_conf_file_name'].side_effect = mock_get_conf_file_name
mocks['_output_opts_file'].return_value = (
'/dhcp/%s/opts' % network.id
)
mocks['interface_name'].__get__ = mock.Mock(return_value='tap0')
dm = self._get_dnsmasq(network, test_pm)
dm.spawn_process()
self.assertTrue(mocks['_output_opts_file'].called)
self.assertTrue(test_pm.register.called)
self.external_process().enable.assert_called_once_with(
reload_cfg=False)
call_kwargs = self.external_process.mock_calls[0][2]
cmd_callback = call_kwargs['default_cmd_callback']
result_cmd = cmd_callback(expected_pid_file)
self.assertEqual(expected, result_cmd)
def test_spawn(self):
self._test_spawn(['--conf-file=', '--domain=openstacklocal'])
def test_spawn_infinite_lease_duration(self):
self.conf.set_override('dhcp_lease_duration', -1)
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
FakeDualNetwork(), 16777216, -1)
def test_spawn_cfg_config_file(self):
self.conf.set_override('dnsmasq_config_file', '/foo')
self._test_spawn(['--conf-file=/foo', '--domain=openstacklocal'])
def test_spawn_no_dns_domain(self):
(exp_host_name, exp_host_data,
exp_addn_name, exp_addn_data) = self._test_no_dns_domain_alloc_data
self.conf.set_override('dns_domain', '')
network = FakeDualNetwork(domain=self.conf.dns_domain)
self._test_spawn(['--conf-file='], network=network)
self.safe.assert_has_calls([mock.call(exp_host_name, exp_host_data),
mock.call(exp_addn_name, exp_addn_data)])
def test_spawn_no_dhcp_range(self):
network = FakeV6Network()
subnet = FakeV6SubnetSlaac()
network.subnets = [subnet]
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
network, has_static=False)
def test_spawn_no_dhcp_range_bad_prefix_length(self):
network = FakeV6NetworkStatelessDHCPBadPrefixLength()
subnet = FakeV6SubnetStatelessBadPrefixLength()
network.subnets = [subnet]
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
network, has_static=False, has_stateless=False)
def test_spawn_cfg_dns_server(self):
self.conf.set_override('dnsmasq_dns_servers', ['8.8.8.8'])
self._test_spawn(['--conf-file=',
'--server=8.8.8.8',
'--domain=openstacklocal'])
def test_spawn_cfg_multiple_dns_server(self):
self.conf.set_override('dnsmasq_dns_servers', ['8.8.8.8',
'9.9.9.9'])
self._test_spawn(['--conf-file=',
'--server=8.8.8.8',
'--server=9.9.9.9',
'--domain=openstacklocal'])
def test_spawn_cfg_enable_dnsmasq_log(self):
self.conf.set_override('dnsmasq_base_log_dir', '/tmp')
network = FakeV4Network()
dhcp_dns_log = \
'/tmp/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/dhcp_dns_log'
self._test_spawn(['--conf-file=',
'--domain=openstacklocal',
'--log-queries',
'--log-dhcp',
('--log-facility=%s' % dhcp_dns_log)],
network)
self.makedirs.assert_called_with(os.path.join('/tmp', network.id))
def test_spawn_cfg_with_local_resolv(self):
self.conf.set_override('dnsmasq_local_resolv', True)
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
no_resolv='')
def test_spawn_cfg_with_local_resolv_overridden(self):
self.conf.set_override('dnsmasq_local_resolv', True)
self.conf.set_override('dnsmasq_dns_servers', ['8.8.8.8'])
self._test_spawn(['--conf-file=',
'--server=8.8.8.8',
'--domain=openstacklocal'])
def test_spawn_max_leases_is_smaller_than_cap(self):
self._test_spawn(
['--conf-file=', '--domain=openstacklocal'],
network=FakeV4Network(),
max_leases=256)
def test_spawn_cfg_broadcast(self):
self.conf.set_override('dhcp_broadcast_reply', True)
self._test_spawn(['--conf-file=', '--domain=openstacklocal',
'--dhcp-broadcast'])
def test_spawn_cfg_advertise_mtu(self):
network = FakeV4Network()
network.mtu = 1500
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
network)
def test_spawn_cfg_advertise_mtu_plugin_doesnt_pass_mtu_value(self):
network = FakeV4Network()
self._test_spawn(['--conf-file=', '--domain=openstacklocal'],
network)
def _test_output_init_lease_file(self, timestamp):
expected = [
'00:00:80:aa:bb:cc 192.168.0.2 * *',
'00:00:f3:aa:bb:cc [fdca:3ba5:a17a:4ba3::2] * *',
'00:00:0f:aa:bb:cc 192.168.0.3 * *',
'00:00:0f:aa:bb:cc [fdca:3ba5:a17a:4ba3::3] * *',
'00:00:0f:rr:rr:rr 192.168.0.1 * *\n']
expected = "\n".join(['%s %s' % (timestamp, l) for l in expected])
with mock.patch.object(dhcp.Dnsmasq, 'get_conf_file_name') as conf_fn:
conf_fn.return_value = '/foo/leases'
dm = self._get_dnsmasq(FakeDualNetwork())
dm._output_init_lease_file()
self.safe.assert_called_once_with('/foo/leases', expected)
@mock.patch('time.time')
def test_output_init_lease_file(self, tmock):
self.conf.set_override('dhcp_lease_duration', 500)
tmock.return_value = 1000000
# lease duration should be added to current time
timestamp = 1000000 + 500
self._test_output_init_lease_file(timestamp)
def test_output_init_lease_file_infinite_duration(self):
self.conf.set_override('dhcp_lease_duration', -1)
# when duration is infinite, lease db timestamp should be 0
timestamp = 0
self._test_output_init_lease_file(timestamp)
def _test_output_opts_file(self, expected, network, ipm_retval=None):
with mock.patch.object(dhcp.Dnsmasq, 'get_conf_file_name') as conf_fn:
conf_fn.return_value = '/foo/opts'
dm = self._get_dnsmasq(network)
if ipm_retval:
with mock.patch.object(
dm, '_make_subnet_interface_ip_map') as ipm:
ipm.return_value = ipm_retval
dm._output_opts_file()
self.assertTrue(ipm.called)
else:
dm._output_opts_file()
self.safe.assert_called_once_with('/foo/opts', expected)
def test_output_opts_file(self):
fake_v6 = '2001:0200:feed:7ac0::1'
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:tag1,option6:dns-server,%s\n'
'tag:tag1,option6:domain-search,openstacklocal').lstrip() % (
'[' + fake_v6 + ']')
self._test_output_opts_file(expected, FakeDualNetwork())
def test_output_opts_file_gateway_route(self):
fake_v6 = '2001:0200:feed:7ac0::1'
expected = ('tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,'
'192.168.0.1\ntag:tag0,249,169.254.169.254/32,'
'192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:tag1,option6:dns-server,%s\n'
'tag:tag1,option6:domain-search,'
'openstacklocal').lstrip() % ('[' + fake_v6 + ']')
self._test_output_opts_file(expected, FakeDualNetworkGatewayRoute())
def test_output_opts_file_multiple_agents_without_dns_provided(self):
expected = ('tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.0.1,0.0.0.0/0,'
'192.168.0.1\ntag:tag0,option:router,192.168.0.1\n'
'tag:tag0,option:dns-server,192.168.0.5,'
'192.168.0.6').lstrip()
self._test_output_opts_file(expected,
FakeV4MultipleAgentsWithoutDnsProvided())
def test_output_opts_file_agent_dns_provided(self):
expected = ('tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.0.1,0.0.0.0/0,'
'192.168.0.1\ntag:tag0,option:router,192.168.0.1'
).lstrip()
self._test_output_opts_file(expected,
FakeV4AgentWithoutDnsProvided())
def test_output_opts_file_agent_with_many_dns_provided(self):
expected = ('tag:tag0,'
'option:dns-server,2.2.2.2,9.9.9.9,1.1.1.1,3.3.3.3\n'
'tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.0.1,0.0.0.0/0,'
'192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1').lstrip()
self._test_output_opts_file(expected,
FakeV4AgentWithManyDnsProvided())
def test_output_opts_file_multiple_agents_with_dns_provided(self):
expected = ('tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.0.1,0.0.0.0/0,'
'192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1').lstrip()
self._test_output_opts_file(expected,
FakeV4MultipleAgentsWithDnsProvided())
def test_output_opts_file_single_dhcp(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,'
'192.168.1.0/24,0.0.0.0,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,192.168.1.0/24,0.0.0.0,'
'0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1').lstrip()
self._test_output_opts_file(expected, FakeDualNetworkSingleDHCP())
def test_output_opts_file_single_dhcp_both_not_isolated(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1').lstrip()
self._test_output_opts_file(expected,
FakeDualNetworkSingleDHCPBothAttaced())
def test_output_opts_file_dual_dhcp_rfc3442(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,'
'192.168.1.0/24,0.0.0.0,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,192.168.1.0/24,0.0.0.0,'
'0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:tag1,option:dns-server,8.8.8.8\n'
'tag:tag1,option:classless-static-route,'
'169.254.169.254/32,192.168.1.1,'
'192.168.0.0/24,0.0.0.0,0.0.0.0/0,192.168.1.1\n'
'tag:tag1,249,169.254.169.254/32,192.168.1.1,'
'192.168.0.0/24,0.0.0.0,0.0.0.0/0,192.168.1.1\n'
'tag:tag1,option:router,192.168.1.1').lstrip()
self._test_output_opts_file(expected, FakeDualNetworkDualDHCP())
def test_output_opts_file_no_gateway(self):
expected = (
'tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.1.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.1.1\n'
'tag:tag0,option:router').lstrip()
ipm_retval = {FakeV4SubnetNoGateway().id: '192.168.1.1'}
self._test_output_opts_file(expected, FakeV4NoGatewayNetwork(),
ipm_retval=ipm_retval)
def test_output_opts_file_no_neutron_router_on_subnet(self):
expected = (
'tag:tag0,option:classless-static-route,'
'169.254.169.254/32,192.168.1.2,0.0.0.0/0,192.168.1.1\n'
'tag:tag0,249,169.254.169.254/32,192.168.1.2,'
'0.0.0.0/0,192.168.1.1\n'
'tag:tag0,option:router,192.168.1.1').lstrip()
ipm_retval = {FakeV4SubnetNoRouter().id: '192.168.1.2'}
self._test_output_opts_file(expected, FakeV4NetworkNoRouter(),
ipm_retval=ipm_retval)
def test_output_opts_file_dist_neutron_router_on_subnet(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1').lstrip()
ipm_retval = {FakeV4Subnet().id: '192.168.0.1'}
self._test_output_opts_file(expected, FakeV4NetworkDistRouter(),
ipm_retval=ipm_retval)
def test_output_opts_file_pxe_2port_1net(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:tftp-server,192.168.0.3\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:server-ip-address,192.168.0.2\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:bootfile-name,pxelinux.0\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:tftp-server,192.168.0.3\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:server-ip-address,192.168.0.2\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:bootfile-name,pxelinux.0').lstrip()
self._test_output_opts_file(expected, FakeV4NetworkPxe2Ports())
def test_output_opts_file_pxe_2port_1net_diff_details(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:tftp-server,192.168.0.3\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:server-ip-address,192.168.0.2\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:bootfile-name,pxelinux.0\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:tftp-server,192.168.0.5\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:server-ip-address,192.168.0.5\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:bootfile-name,pxelinux.0').lstrip()
self._test_output_opts_file(expected,
FakeV4NetworkPxe2Ports("portsDiff"))
def test_output_opts_file_pxe_3port_2net(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,'
'192.168.1.0/24,0.0.0.0,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,192.168.1.0/24,0.0.0.0,'
'0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:tftp-server,192.168.0.3\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:server-ip-address,192.168.0.2\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'option:bootfile-name,pxelinux.0\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:tftp-server,192.168.1.3\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:server-ip-address,192.168.1.2\n'
'tag:ffffffff-ffff-ffff-ffff-ffffffffffff,'
'option:bootfile-name,pxelinux2.0\n'
'tag:44444444-4444-4444-4444-444444444444,'
'option:tftp-server,192.168.1.3\n'
'tag:44444444-4444-4444-4444-444444444444,'
'option:server-ip-address,192.168.1.2\n'
'tag:44444444-4444-4444-4444-444444444444,'
'option:bootfile-name,pxelinux3.0').lstrip()
self._test_output_opts_file(expected, FakeDualV4Pxe3Ports())
def test_output_opts_file_multiple_tags(self):
expected = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee,'
'tag:ipxe,option:bootfile-name,pxelinux.0')
expected = expected.lstrip()
with mock.patch.object(dhcp.Dnsmasq, 'get_conf_file_name') as conf_fn:
conf_fn.return_value = '/foo/opts'
dm = self._get_dnsmasq(FakeV4NetworkMultipleTags())
dm._output_opts_file()
self.safe.assert_called_once_with('/foo/opts', expected)
@mock.patch('neutron.agent.linux.dhcp.Dnsmasq.get_conf_file_name',
return_value='/foo/opts')
def test_output_opts_file_pxe_ipv6_port_with_ipv6_opt(self,
mock_get_conf_fn):
expected = (
'tag:tag0,option6:dns-server,[2001:0200:feed:7ac0::1]\n'
'tag:tag0,option6:domain-search,openstacklocal\n'
'tag:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh,'
'option6:tftp-server,2001:192:168::1\n'
'tag:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh,'
'option6:bootfile-name,pxelinux.0')
expected = expected.lstrip()
dm = self._get_dnsmasq(FakeV6NetworkPxePort())
dm._output_opts_file()
self.safe.assert_called_once_with('/foo/opts', expected)
@mock.patch('neutron.agent.linux.dhcp.Dnsmasq.get_conf_file_name',
return_value='/foo/opts')
def test_output_opts_file_pxe_ipv6_port_with_ipv4_opt(self,
mock_get_conf_fn):
expected = (
'tag:tag0,option6:dns-server,[2001:0200:feed:7ac0::1]\n'
'tag:tag0,option6:domain-search,openstacklocal\n'
'tag:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh,'
'option6:bootfile-name,pxelinux.0')
expected = expected.lstrip()
dm = self._get_dnsmasq(FakeV6NetworkPxePortWrongOptVersion())
dm._output_opts_file()
self.safe.assert_called_once_with('/foo/opts', expected)
def test_output_opts_file_ipv6_address_mode_unset(self):
fake_v6 = '2001:0200:feed:7ac0::1'
expected = (
'tag:tag0,option6:dns-server,%s\n'
'tag:tag0,option6:domain-search,openstacklocal').lstrip() % (
'[' + fake_v6 + ']')
self._test_output_opts_file(expected, FakeV6Network())
def test_output_opts_file_ipv6_address_force_metadata(self):
fake_v6 = '2001:0200:feed:7ac0::1'
expected = (
'tag:tag0,option6:dns-server,%s\n'
'tag:tag0,option6:domain-search,openstacklocal').lstrip() % (
'[' + fake_v6 + ']')
self.conf.force_metadata = True
self._test_output_opts_file(expected, FakeV6Network())
@property
def _test_no_dns_domain_alloc_data(self):
exp_host_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/host'
exp_host_data = ('00:00:80:aa:bb:cc,host-192-168-0-2,'
'192.168.0.2\n'
'00:00:f3:aa:bb:cc,host-fdca-3ba5-a17a-4ba3--2,'
'[fdca:3ba5:a17a:4ba3::2]\n'
'00:00:0f:aa:bb:cc,host-192-168-0-3,'
'192.168.0.3\n'
'00:00:0f:aa:bb:cc,host-fdca-3ba5-a17a-4ba3--3,'
'[fdca:3ba5:a17a:4ba3::3]\n'
'00:00:0f:rr:rr:rr,host-192-168-0-1,'
'192.168.0.1\n').lstrip()
exp_addn_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/addn_hosts'
exp_addn_data = (
'192.168.0.2\t'
'host-192-168-0-2 host-192-168-0-2\n'
'fdca:3ba5:a17a:4ba3::2\t'
'host-fdca-3ba5-a17a-4ba3--2 '
'host-fdca-3ba5-a17a-4ba3--2\n'
'192.168.0.3\thost-192-168-0-3 '
'host-192-168-0-3\n'
'fdca:3ba5:a17a:4ba3::3\t'
'host-fdca-3ba5-a17a-4ba3--3 '
'host-fdca-3ba5-a17a-4ba3--3\n'
'192.168.0.1\t'
'host-192-168-0-1 '
'host-192-168-0-1\n'
).lstrip()
return (exp_host_name, exp_host_data,
exp_addn_name, exp_addn_data)
@property
def _test_reload_allocation_data(self):
exp_host_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/host'
exp_host_data = ('00:00:80:aa:bb:cc,host-192-168-0-2.openstacklocal.,'
'192.168.0.2\n'
'00:00:f3:aa:bb:cc,host-fdca-3ba5-a17a-4ba3--2.'
'openstacklocal.,[fdca:3ba5:a17a:4ba3::2]\n'
'00:00:0f:aa:bb:cc,host-192-168-0-3.openstacklocal.,'
'192.168.0.3\n'
'00:00:0f:aa:bb:cc,host-fdca-3ba5-a17a-4ba3--3.'
'openstacklocal.,[fdca:3ba5:a17a:4ba3::3]\n'
'00:00:0f:rr:rr:rr,host-192-168-0-1.openstacklocal.,'
'192.168.0.1\n').lstrip()
exp_addn_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/addn_hosts'
exp_addn_data = (
'192.168.0.2\t'
'host-192-168-0-2.openstacklocal. host-192-168-0-2\n'
'fdca:3ba5:a17a:4ba3::2\t'
'host-fdca-3ba5-a17a-4ba3--2.openstacklocal. '
'host-fdca-3ba5-a17a-4ba3--2\n'
'192.168.0.3\thost-192-168-0-3.openstacklocal. '
'host-192-168-0-3\n'
'fdca:3ba5:a17a:4ba3::3\t'
'host-fdca-3ba5-a17a-4ba3--3.openstacklocal. '
'host-fdca-3ba5-a17a-4ba3--3\n'
'192.168.0.1\t'
'host-192-168-0-1.openstacklocal. '
'host-192-168-0-1\n'
).lstrip()
exp_opt_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/opts'
fake_v6 = '2001:0200:feed:7ac0::1'
exp_opt_data = (
'tag:tag0,option:dns-server,8.8.8.8\n'
'tag:tag0,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,249,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag0,option:router,192.168.0.1\n'
'tag:tag1,option6:dns-server,%s\n'
'tag:tag1,option6:domain-search,openstacklocal').lstrip() % (
'[' + fake_v6 + ']')
return (exp_host_name, exp_host_data,
exp_addn_name, exp_addn_data,
exp_opt_name, exp_opt_data,)
def test_reload_allocations_no_interface(self):
net = FakeDualNetwork()
ipath = '/dhcp/%s/interface' % net.id
self.useFixture(tools.OpenFixture(ipath))
test_pm = mock.Mock()
dm = self._get_dnsmasq(net, test_pm)
dm.reload_allocations()
self.assertFalse(test_pm.register.called)
def test_reload_allocations(self):
(exp_host_name, exp_host_data,
exp_addn_name, exp_addn_data,
exp_opt_name, exp_opt_data,) = self._test_reload_allocation_data
net = FakeDualNetwork()
hpath = '/dhcp/%s/host' % net.id
ipath = '/dhcp/%s/interface' % net.id
self.useFixture(tools.OpenFixture(hpath))
self.useFixture(tools.OpenFixture(ipath, 'tapdancingmice'))
test_pm = mock.Mock()
dm = self._get_dnsmasq(net, test_pm)
dm.reload_allocations()
self.assertTrue(test_pm.register.called)
self.external_process().enable.assert_called_once_with(
reload_cfg=True)
self.safe.assert_has_calls([
mock.call(exp_host_name, exp_host_data),
mock.call(exp_addn_name, exp_addn_data),
mock.call(exp_opt_name, exp_opt_data),
])
def test_release_unused_leases(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = '192.168.1.2'
mac1 = '00:00:80:aa:bb:cc'
ip2 = '192.168.1.3'
mac2 = '00:00:80:cc:bb:aa'
ip3 = '0001:0002:0003:004:0005:0006:0007:0008'
mac3 = '00:00:80:bb:aa:cc'
old_leases = {(ip1, mac1, None), (ip2, mac2, None), (ip3, mac3, None)}
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={
'0001:0002:0003:004:0005:0006:0007:0008':
{'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}}
)
dnsmasq._output_hosts_file = mock.Mock()
dnsmasq._release_lease = mock.Mock()
dnsmasq.network.ports = []
dnsmasq.device_manager.unplug = mock.Mock()
dnsmasq._release_unused_leases()
dnsmasq._release_lease.assert_has_calls([mock.call(mac1, ip1, None),
mock.call(mac2, ip2, None),
mock.call(mac3, ip3,
'client_id',
'server_id',
0xff),
],
any_order=True)
def test_release_for_ipv6_lease(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = 'fdca:3ba5:a17a::1'
mac1 = '00:00:80:aa:bb:cc'
ip2 = '192.168.1.3'
mac2 = '00:00:80:cc:bb:aa'
old_leases = set([(ip1, mac1, None), (ip2, mac2, None)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={'fdca:3ba5:a17a::1': {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
ipw = mock.patch(
'neutron.agent.linux.ip_lib.IpNetnsCommand.execute').start()
dnsmasq._release_unused_leases()
# Verify that dhcp_release is called both for ipv4 and ipv6 addresses.
self.assertEqual(2, ipw.call_count)
ipw.assert_has_calls([mock.call(['dhcp_release6',
'--iface', None, '--ip', ip1,
'--client-id', 'client_id',
'--server-id', 'server_id',
'--iaid', 0xff],
run_as_root=True)])
ipw.assert_has_calls([mock.call(['dhcp_release', None, ip2, mac2],
run_as_root=True), ])
def test_release_for_ipv6_lease_no_dhcp_release6(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = 'fdca:3ba5:a17a::1'
mac1 = '00:00:80:aa:bb:cc'
old_leases = set([(ip1, mac1, None)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={'fdca:3ba5:a17a::1': {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
ipw = mock.patch(
'neutron.agent.linux.ip_lib.IpNetnsCommand.execute').start()
dnsmasq._IS_DHCP_RELEASE6_SUPPORTED = False
dnsmasq._release_unused_leases()
# Verify that dhcp_release6 is not called when it is not present
ipw.assert_not_called()
def test_release_unused_leases_with_dhcp_port(self):
dnsmasq = self._get_dnsmasq(FakeNetworkDhcpPort())
ip1 = '192.168.1.2'
mac1 = '00:00:80:aa:bb:cc'
ip2 = '192.168.1.3'
mac2 = '00:00:80:cc:bb:aa'
ip6 = '2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d'
old_leases = set([(ip1, mac1, None), (ip2, mac2, None)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={ip6: {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
dnsmasq._output_hosts_file = mock.Mock()
dnsmasq._release_lease = mock.Mock()
dnsmasq.device_manager.get_device_id = mock.Mock(
return_value='fake_dhcp_port')
dnsmasq._release_unused_leases()
self.assertFalse(
dnsmasq.device_manager.unplug.called)
self.assertFalse(
dnsmasq.device_manager.driver.unplug.called)
def test_release_unused_leases_with_client_id(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = '192.168.1.2'
mac1 = '00:00:80:aa:bb:cc'
client_id1 = 'client1'
ip2 = '192.168.1.3'
mac2 = '00:00:80:cc:bb:aa'
client_id2 = 'client2'
ip6 = '2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d'
old_leases = set([(ip1, mac1, client_id1), (ip2, mac2, client_id2)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={ip6: {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
dnsmasq._output_hosts_file = mock.Mock()
dnsmasq._release_lease = mock.Mock()
dnsmasq.network.ports = []
dnsmasq._release_unused_leases()
dnsmasq._release_lease.assert_has_calls(
[mock.call(mac1, ip1, client_id1),
mock.call(mac2, ip2, client_id2)],
any_order=True)
def test_release_unused_leases_one_lease(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = '192.168.0.2'
mac1 = '00:00:80:aa:bb:cc'
ip2 = '192.168.0.3'
mac2 = '00:00:80:cc:bb:aa'
ip6 = '2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d'
old_leases = set([(ip1, mac1, None), (ip2, mac2, None)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={ip6: {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
dnsmasq._output_hosts_file = mock.Mock()
dnsmasq._release_lease = mock.Mock()
dnsmasq.network.ports = [FakePort1()]
dnsmasq._release_unused_leases()
dnsmasq._release_lease.assert_called_once_with(
mac2, ip2, None)
def test_release_unused_leases_one_lease_with_client_id(self):
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
ip1 = '192.168.0.2'
mac1 = '00:00:80:aa:bb:cc'
client_id1 = 'client1'
ip2 = '192.168.0.5'
mac2 = '00:00:0f:aa:bb:55'
client_id2 = 'test5'
ip6 = '2001:0db8:11a3:09d7:1f34:8a2e:07a0:765d'
old_leases = set([(ip1, mac1, client_id1), (ip2, mac2, client_id2)])
dnsmasq._read_hosts_file_leases = mock.Mock(return_value=old_leases)
dnsmasq._output_hosts_file = mock.Mock()
dnsmasq._read_v6_leases_file_leases = mock.Mock(
return_value={ip6: {'iaid': 0xff,
'client_id': 'client_id',
'server_id': 'server_id'}
})
dnsmasq._release_lease = mock.Mock()
dnsmasq.network.ports = [FakePort5()]
dnsmasq._release_unused_leases()
dnsmasq._release_lease.assert_called_once_with(
mac1, ip1, client_id1)
def test_read_hosts_file_leases(self):
filename = '/path/to/file'
lines = ["00:00:80:aa:bb:cc,inst-name,192.168.0.1",
"00:00:80:aa:bb:cc,inst-name,[fdca:3ba5:a17a::1]"]
mock_open = self.useFixture(
tools.OpenFixture(filename, '\n'.join(lines))).mock_open
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
leases = dnsmasq._read_hosts_file_leases(filename)
self.assertEqual(set([("192.168.0.1", "00:00:80:aa:bb:cc", None),
("fdca:3ba5:a17a::1", "00:00:80:aa:bb:cc",
None)]), leases)
mock_open.assert_called_once_with(filename)
def test_read_hosts_file_leases_with_client_id(self):
filename = '/path/to/file'
lines = ["00:00:80:aa:bb:cc,id:client1,inst-name,192.168.0.1",
"00:00:80:aa:bb:cc,id:client2,inst-name,"
"[fdca:3ba5:a17a::1]"]
mock_open = self.useFixture(
tools.OpenFixture(filename, '\n'.join(lines))).mock_open
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
leases = dnsmasq._read_hosts_file_leases(filename)
self.assertEqual(set([("192.168.0.1", "00:00:80:aa:bb:cc", 'client1'),
("fdca:3ba5:a17a::1", "00:00:80:aa:bb:cc",
'client2')]), leases)
mock_open.assert_called_once_with(filename)
def test_read_hosts_file_leases_with_stateless_IPv6_tag(self):
filename = self.get_temp_file_path('leases')
with open(filename, "w") as leasesfile:
lines = [
"00:00:80:aa:bb:cc,id:client1,inst-name,192.168.0.1\n",
"00:00:80:aa:bb:cc,set:ccccccccc-cccc-cccc-cccc-cccccccc\n",
"00:00:80:aa:bb:cc,id:client2,inst-name,[fdca:3ba5:a17a::1]\n"]
for line in lines:
leasesfile.write(line)
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
leases = dnsmasq._read_hosts_file_leases(filename)
self.assertEqual(set([("192.168.0.1", "00:00:80:aa:bb:cc", 'client1'),
("fdca:3ba5:a17a::1", "00:00:80:aa:bb:cc",
'client2')]), leases)
def _test_read_leases_file_leases(self, add_bad_line=False):
filename = '/path/to/file'
lines = [
"1472673289 aa:bb:cc:00:00:01 192.168.1.2 host-192-168-1-2 *",
"1472673289 aa:bb:cc:00:00:01 192.168.1.3 host-192-168-1-3 *",
"1472673289 aa:bb:cc:00:00:01 192.168.1.4 host-192-168-1-4 *",
"duid 00:01:00:01:02:03:04:05:06:07:08:09:0a:0b",
"1472597740 1044800001 [2001:DB8::a] host-2001-db8--a "
"00:04:4a:d0:d2:34:19:2b:49:08:84:e8:34:bd:0c:dc:b9:3b",
"1472597823 1044800002 [2001:DB8::b] host-2001-db8--b "
"00:04:ce:96:53:3d:f2:c2:4c:4c:81:7d:db:c9:8d:d2:74:22:3b:0a",
"1472599048 1044800003 [2001:DB8::c] host-2001-db8--c "
"00:04:4f:f0:cd:ca:5e:77:41:bc:9d:7f:5c:33:31:37:5d:80:77:b4"
]
bad_line = '1472673289 aa:bb:cc:00:00:05 192.168.1.5 host-192.168-1-5'
if add_bad_line:
lines.append(bad_line)
mock_open = self.useFixture(
tools.OpenFixture(filename, '\n'.join(lines))).mock_open
dnsmasq = self._get_dnsmasq(FakeDualNetwork())
with mock.patch('os.path.exists', return_value=True),\
mock.patch.object(dhcp.LOG, 'warning') as mock_log_warn:
leases = dnsmasq._read_v6_leases_file_leases(filename)
server_id = '00:01:00:01:02:03:04:05:06:07:08:09:0a:0b'
entry1 = {'iaid': '1044800001',
'client_id': '00:04:4a:d0:d2:34:19:2b:49:08:84:'
'e8:34:bd:0c:dc:b9:3b',
'server_id': server_id
}
entry2 = {'iaid': '1044800002',
'client_id': '00:04:ce:96:53:3d:f2:c2:4c:4c:81:'
'7d:db:c9:8d:d2:74:22:3b:0a',
'server_id': server_id
}
entry3 = {'iaid': '1044800003',
'client_id': '00:04:4f:f0:cd:ca:5e:77:41:bc:9d:'
'7f:5c:33:31:37:5d:80:77:b4',
'server_id': server_id
}
expected = {'2001:DB8::a': entry1,
'2001:DB8::b': entry2,
'2001:DB8::c': entry3
}
self.assertEqual(expected, leases)
mock_open.assert_called_once_with(filename)
if add_bad_line:
self.assertTrue(mock_log_warn.called)
def test_read_v6_leases_file_leases(self):
self._test_read_leases_file_leases(False)
def test_read_all_leases_file_leases_with_bad_line(self):
self._test_read_leases_file_leases(True)
def test_make_subnet_interface_ip_map(self):
with mock.patch('neutron.agent.linux.ip_lib.IPDevice') as ip_dev:
ip_dev.return_value.addr.list.return_value = [
{'cidr': '192.168.0.1/24'}
]
dm = self._get_dnsmasq(FakeDualNetwork())
self.assertEqual(
dm._make_subnet_interface_ip_map(),
{FakeV4Subnet().id: '192.168.0.1'}
)
def test_remove_config_files(self):
net = FakeV4Network()
path = '/opt/data/neutron/dhcp'
self.conf.dhcp_confs = path
lp = LocalChild(self.conf, net)
lp._remove_config_files()
self.rmtree.assert_called_once_with(os.path.join(path, net.id),
ignore_errors=True)
def test_existing_dhcp_networks(self):
path = '/opt/data/neutron/dhcp'
self.conf.dhcp_confs = path
cases = {
# network_uuid --> is_dhcp_alive?
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa': True,
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb': False,
'not_uuid_like_name': True
}
def active_fake(self, instance, cls):
return cases[instance.network.id]
with mock.patch('os.listdir') as mock_listdir:
with mock.patch.object(dhcp.Dnsmasq, 'active') as mock_active:
mock_active.__get__ = active_fake
mock_listdir.return_value = list(cases)
result = dhcp.Dnsmasq.existing_dhcp_networks(self.conf)
mock_listdir.assert_called_once_with(path)
self.assertItemsEqual(['aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'],
result)
def test__output_hosts_file_log_only_twice(self):
dm = self._get_dnsmasq(FakeDualStackNetworkSingleDHCP())
with mock.patch.object(dhcp, 'LOG') as logger:
logger.process.return_value = ('fake_message', {})
dm._output_hosts_file()
# The method logs twice, at the start of and the end. There should be
# no other logs, no matter how many hosts there are to dump in the
# file.
self.assertEqual(2, len(logger.method_calls))
def test_only_populates_dhcp_enabled_subnets(self):
exp_host_name = '/dhcp/eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee/host'
exp_host_data = ('00:00:80:aa:bb:cc,host-192-168-0-2.openstacklocal.,'
'192.168.0.2\n'
'00:16:3E:C2:77:1D,host-192-168-0-4.openstacklocal.,'
'192.168.0.4\n'
'00:00:0f:rr:rr:rr,host-192-168-0-1.openstacklocal.,'
'192.168.0.1\n').lstrip()
dm = self._get_dnsmasq(FakeDualStackNetworkSingleDHCP())
dm._output_hosts_file()
self.safe.assert_has_calls([mock.call(exp_host_name,
exp_host_data)])
def test_only_populates_dhcp_client_id(self):
exp_host_name = '/dhcp/aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa/host'
exp_host_data = ('00:00:80:aa:bb:cc,host-192-168-0-2.openstacklocal.,'
'192.168.0.2\n'
'00:00:0f:aa:bb:55,id:test5,'
'host-192-168-0-5.openstacklocal.,'
'192.168.0.5\n'
'00:00:0f:aa:bb:66,id:test6,'
'host-192-168-0-6.openstacklocal.,192.168.0.6,'
'set:ccccccccc-cccc-cccc-cccc-ccccccccc\n').lstrip()
dm = self._get_dnsmasq(FakeV4NetworkClientId())
dm._output_hosts_file()
self.safe.assert_has_calls([mock.call(exp_host_name,
exp_host_data)])
def test_only_populates_dhcp_enabled_subnet_on_a_network(self):
exp_host_name = '/dhcp/cccccccc-cccc-cccc-cccc-cccccccccccc/host'
exp_host_data = ('00:00:80:aa:bb:cc,host-192-168-0-2.openstacklocal.,'
'192.168.0.2\n'
'00:00:f3:aa:bb:cc,host-192-168-0-3.openstacklocal.,'
'192.168.0.3\n'
'00:00:0f:aa:bb:cc,host-192-168-0-4.openstacklocal.,'
'192.168.0.4\n'
'00:00:0f:rr:rr:rr,host-192-168-0-1.openstacklocal.,'
'192.168.0.1\n').lstrip()
dm = self._get_dnsmasq(FakeDualNetworkSingleDHCP())
dm._output_hosts_file()
self.safe.assert_has_calls([mock.call(exp_host_name,
exp_host_data)])
def test_host_and_opts_file_on_stateless_dhcpv6_network(self):
exp_host_name = '/dhcp/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/host'
exp_host_data = ('00:16:3e:c2:77:1d,'
'set:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh\n').lstrip()
exp_opt_name = '/dhcp/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/opts'
exp_opt_data = ('tag:tag0,option6:domain-search,openstacklocal\n'
'tag:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh,'
'option6:dns-server,ffea:3ba5:a17a:4ba3::100').lstrip()
dm = self._get_dnsmasq(FakeV6NetworkStatelessDHCP())
dm._output_hosts_file()
dm._output_opts_file()
self.safe.assert_has_calls([mock.call(exp_host_name, exp_host_data),
mock.call(exp_opt_name, exp_opt_data)])
def test_host_file_on_net_with_v6_slaac_and_v4(self):
exp_host_name = '/dhcp/eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee/host'
exp_host_data = (
'00:00:80:aa:bb:cc,host-192-168-0-2.openstacklocal.,192.168.0.2,'
'set:eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee\n'
'00:16:3E:C2:77:1D,host-192-168-0-4.openstacklocal.,192.168.0.4,'
'set:gggggggg-gggg-gggg-gggg-gggggggggggg\n00:00:0f:rr:rr:rr,'
'host-192-168-0-1.openstacklocal.,192.168.0.1,'
'set:rrrrrrrr-rrrr-rrrr-rrrr-rrrrrrrrrrrr\n').lstrip()
dm = self._get_dnsmasq(FakeDualStackNetworkingSingleDHCPTags())
dm._output_hosts_file()
self.safe.assert_has_calls([mock.call(exp_host_name, exp_host_data)])
def test_host_and_opts_file_on_net_with_V6_stateless_and_V4_subnets(
self):
exp_host_name = '/dhcp/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/host'
exp_host_data = (
'00:16:3e:c2:77:1d,set:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh\n'
'00:16:3e:c2:77:1d,host-192-168-0-3.openstacklocal.,'
'192.168.0.3,set:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh\n'
'00:00:0f:rr:rr:rr,'
'host-192-168-0-1.openstacklocal.,192.168.0.1\n').lstrip()
exp_opt_name = '/dhcp/bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb/opts'
exp_opt_data = (
'tag:tag0,option6:domain-search,openstacklocal\n'
'tag:tag1,option:dns-server,8.8.8.8\n'
'tag:tag1,option:classless-static-route,20.0.0.1/24,20.0.0.1,'
'169.254.169.254/32,192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag1,249,20.0.0.1/24,20.0.0.1,169.254.169.254/32,'
'192.168.0.1,0.0.0.0/0,192.168.0.1\n'
'tag:tag1,option:router,192.168.0.1\n'
'tag:hhhhhhhh-hhhh-hhhh-hhhh-hhhhhhhhhhhh,'
'option6:dns-server,ffea:3ba5:a17a:4ba3::100').lstrip()
dm = self._get_dnsmasq(FakeNetworkWithV6SatelessAndV4DHCPSubnets())
dm._output_hosts_file()
dm._output_opts_file()
self.safe.assert_has_calls([mock.call(exp_host_name, exp_host_data),
mock.call(exp_opt_name, exp_opt_data)])
def test_should_enable_metadata_isolated_network_returns_true(self):
self.assertTrue(dhcp.Dnsmasq.should_enable_metadata(
self.conf, FakeV4NetworkNoRouter()))
def test_should_enable_metadata_non_isolated_network_returns_false(self):
self.assertFalse(dhcp.Dnsmasq.should_enable_metadata(
self.conf, FakeV4NetworkDistRouter()))
def test_should_enable_metadata_isolated_meta_disabled_returns_false(self):
self.conf.set_override('enable_isolated_metadata', False)
self.assertFalse(dhcp.Dnsmasq.should_enable_metadata(
self.conf, FakeV4MetadataNetwork()))
def test_should_enable_metadata_with_metadata_network_returns_true(self):
self.conf.set_override('enable_metadata_network', True)
self.assertTrue(dhcp.Dnsmasq.should_enable_metadata(
self.conf, FakeV4MetadataNetwork()))
def test_should_force_metadata_returns_true(self):
self.conf.set_override("force_metadata", True)
self.assertTrue(dhcp.Dnsmasq.should_enable_metadata(
self.conf, FakeDualNetworkDualDHCP()))
def _test__generate_opts_per_subnet_helper(self, config_opts,
expected_mdt_ip):
for key, value in config_opts.items():
self.conf.set_override(key, value)
dm = self._get_dnsmasq(FakeNetworkDhcpPort())
with mock.patch('neutron.agent.linux.ip_lib.IPDevice') as ipdev_mock:
list_addr = ipdev_mock.return_value.addr.list
list_addr.return_value = [{'cidr': alloc.ip_address + '/24'}
for alloc in FakeDhcpPort().fixed_ips]
options, idx_map = dm._generate_opts_per_subnet()
contains_metadata_ip = any(['%s/32' % dhcp.METADATA_DEFAULT_IP in line
for line in options])
self.assertEqual(expected_mdt_ip, contains_metadata_ip)
def test__generate_opts_per_subnet_no_metadata(self):
config = {'enable_isolated_metadata': False,
'force_metadata': False}
self._test__generate_opts_per_subnet_helper(config, False)
def test__generate_opts_per_subnet_isolated_metadata_with_router(self):
config = {'enable_isolated_metadata': True,
'force_metadata': False}
self._test__generate_opts_per_subnet_helper(config, True)
def test__generate_opts_per_subnet_forced_metadata(self):
config = {'enable_isolated_metadata': False,
'force_metadata': True}
self._test__generate_opts_per_subnet_helper(config, True)
class TestDeviceManager(TestConfBase):
def setUp(self):
super(TestDeviceManager, self).setUp()
ip_lib_patcher = mock.patch('neutron.agent.linux.dhcp.ip_lib')
load_interface_driver_patcher = mock.patch(
'neutron.agent.linux.dhcp.agent_common_utils.'
'load_interface_driver')
self.mock_ip_lib = ip_lib_patcher.start()
self.mock_load_interface_driver = load_interface_driver_patcher.start()
def _test_setup(self, load_interface_driver, ip_lib, use_gateway_ips):
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
# Create DeviceManager.
self.conf.register_opt(cfg.BoolOpt('enable_isolated_metadata',
default=False))
self.conf.register_opt(cfg.BoolOpt('force_metadata',
default=False))
plugin = mock.Mock()
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
mgr = dhcp.DeviceManager(self.conf, plugin)
load_interface_driver.assert_called_with(self.conf)
# Setup with no existing DHCP port - expect a new DHCP port to
# be created.
network = FakeDeviceManagerNetwork()
network.tenant_id = 'Tenant A'
def mock_create(dict):
port = dhcp.DictModel(dict['port'])
port.id = 'abcd-123456789'
port.mac_address = '00-12-34-56-78-90'
port.fixed_ips = [
dhcp.DictModel({'subnet_id': ip['subnet_id'],
'ip_address': 'unique-IP-address'})
for ip in port.fixed_ips
]
# server rudely gave us an extra address we didn't ask for
port.fixed_ips.append(dhcp.DictModel(
{'subnet_id': 'ffffffff-6666-6666-6666-ffffffffffff',
'ip_address': '2003::f816:3eff:fe45:e893'}))
return port
plugin.create_dhcp_port.side_effect = mock_create
mgr.driver.get_device_name.return_value = 'ns-XXX'
mgr.driver.use_gateway_ips = use_gateway_ips
ip_lib.ensure_device_is_ready.return_value = True
mgr.setup(network)
plugin.create_dhcp_port.assert_called_with(mock.ANY)
mgr.driver.init_l3.assert_called_with('ns-XXX',
mock.ANY,
namespace='qdhcp-ns')
cidrs = set(mgr.driver.init_l3.call_args[0][1])
if use_gateway_ips:
self.assertEqual(cidrs, set(['%s/%s' % (s.gateway_ip,
s.cidr.split('/')[1])
for s in network.subnets]))
else:
self.assertEqual(cidrs, set(['unique-IP-address/24',
'unique-IP-address/64']))
# Now call setup again. This time we go through the existing
# port code path, and the driver's init_l3 method is called
# again.
plugin.create_dhcp_port.reset_mock()
mgr.driver.init_l3.reset_mock()
mgr.setup(network)
mgr.driver.init_l3.assert_called_with('ns-XXX',
mock.ANY,
namespace='qdhcp-ns')
cidrs = set(mgr.driver.init_l3.call_args[0][1])
if use_gateway_ips:
self.assertEqual(cidrs, set(['%s/%s' % (s.gateway_ip,
s.cidr.split('/')[1])
for s in network.subnets]))
else:
self.assertEqual(cidrs, set(['unique-IP-address/24',
'unique-IP-address/64']))
self.assertFalse(plugin.create_dhcp_port.called)
def test_setup_device_manager_dhcp_port_without_gateway_ips(self):
self._test_setup(self.mock_load_interface_driver,
self.mock_ip_lib, use_gateway_ips=False)
def test_setup_device_manager_dhcp_port_with_gateway_ips(self):
self._test_setup(self.mock_load_interface_driver,
self.mock_ip_lib, use_gateway_ips=True)
def _test_setup_reserved(self, enable_isolated_metadata=False,
force_metadata=False):
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
# Create DeviceManager.
self.conf.register_opt(
cfg.BoolOpt('enable_isolated_metadata',
default=enable_isolated_metadata))
self.conf.register_opt(
cfg.BoolOpt('force_metadata',
default=force_metadata))
plugin = mock.Mock()
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
mgr = dhcp.DeviceManager(self.conf, plugin)
self.mock_load_interface_driver.assert_called_with(self.conf)
# Setup with a reserved DHCP port.
network = FakeDualNetworkReserved()
network.tenant_id = 'Tenant A'
reserved_port = network.ports[-1]
def mock_update(port_id, dict):
port = reserved_port
port.network_id = dict['port']['network_id']
port.device_id = dict['port']['device_id']
return port
plugin.update_dhcp_port.side_effect = mock_update
mgr.driver.get_device_name.return_value = 'ns-XXX'
mgr.driver.use_gateway_ips = False
self.mock_ip_lib.ensure_device_is_ready.return_value = True
mgr.setup(network)
plugin.update_dhcp_port.assert_called_with(reserved_port.id,
mock.ANY)
expect_ips = ['192.168.0.6/24', 'fdca:3ba5:a17a:4ba3::2/64']
if enable_isolated_metadata or force_metadata:
expect_ips.append(dhcp.METADATA_DEFAULT_CIDR)
mgr.driver.init_l3.assert_called_with('ns-XXX',
expect_ips,
namespace='qdhcp-ns')
def test_setup_reserved_and_disable_metadata(self):
"""Test reserved port case of DeviceManager's DHCP port setup
logic which metadata disabled.
"""
self._test_setup_reserved()
def test_setup_reserved_with_isolated_metadata_enable(self):
"""Test reserved port case of DeviceManager's DHCP port setup
logic which isolated_ metadata enabled.
"""
self._test_setup_reserved(enable_isolated_metadata=True)
def test_setup_reserved_with_force_metadata_enable(self):
"""Test reserved port case of DeviceManager's DHCP port setup
logic which force_metadata enabled.
"""
self._test_setup_reserved(force_metadata=True)
def test_setup_reserved_and_enable_metadata(self):
"""Test reserved port case of DeviceManager's DHCP port setup
logic which both isolated_metadata and force_metadata enabled.
"""
self._test_setup_reserved(enable_isolated_metadata=True,
force_metadata=True)
def test_setup_reserved_2(self):
"""Test scenario where a network has two reserved ports, and
update_dhcp_port fails for the first of those.
"""
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
# Create DeviceManager.
self.conf.register_opt(
cfg.BoolOpt('enable_isolated_metadata', default=False))
self.conf.register_opt(
cfg.BoolOpt('force_metadata', default=False))
plugin = mock.Mock()
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
mgr = dhcp.DeviceManager(self.conf, plugin)
self.mock_load_interface_driver.assert_called_with(self.conf)
# Setup with a reserved DHCP port.
network = FakeDualNetworkReserved2()
network.tenant_id = 'Tenant A'
reserved_port_1 = network.ports[-2]
reserved_port_2 = network.ports[-1]
def mock_update(port_id, dict):
if port_id == reserved_port_1.id:
return None
port = reserved_port_2
port.network_id = dict['port']['network_id']
port.device_id = dict['port']['device_id']
return port
plugin.update_dhcp_port.side_effect = mock_update
mgr.driver.get_device_name.return_value = 'ns-XXX'
mgr.driver.use_gateway_ips = False
self.mock_ip_lib.ensure_device_is_ready.return_value = True
mgr.setup(network)
plugin.update_dhcp_port.assert_called_with(reserved_port_2.id,
mock.ANY)
mgr.driver.init_l3.assert_called_with(
'ns-XXX', ['192.168.0.6/24', 'fdca:3ba5:a17a:4ba3::2/64'],
namespace='qdhcp-ns')
def test__setup_reserved_dhcp_port_with_fake_remote_error(self):
"""Test scenario where a fake_network has two reserved ports, and
update_dhcp_port fails for the first of those with a RemoteError
different than DhcpPortInUse.
"""
# Setup with a reserved DHCP port.
fake_network = FakeDualNetworkReserved2()
fake_network.tenant_id = 'Tenant A'
reserved_port_2 = fake_network.ports[-1]
mock_plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, mock_plugin)
messaging_error = oslo_messaging.RemoteError(
exc_type='FakeRemoteError')
mock_plugin.update_dhcp_port.side_effect = [messaging_error,
reserved_port_2]
with testtools.ExpectedException(oslo_messaging.RemoteError):
dh.setup_dhcp_port(fake_network)
def test__setup_reserved_dhcp_port_with_known_remote_error(self):
"""Test scenario where a fake_network has two reserved ports, and
update_dhcp_port fails for the first of those with a DhcpPortInUse
RemoteError.
"""
# Setup with a reserved DHCP port.
fake_network = FakeDualNetworkReserved2()
fake_network.tenant_id = 'Tenant A'
reserved_port_1 = fake_network.ports[-2]
reserved_port_2 = fake_network.ports[-1]
mock_plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, mock_plugin)
messaging_error = oslo_messaging.RemoteError(exc_type='DhcpPortInUse')
mock_plugin.update_dhcp_port.side_effect = [messaging_error,
reserved_port_2]
with mock.patch.object(dhcp.LOG, 'info') as log:
dh.setup_dhcp_port(fake_network)
self.assertEqual(1, log.call_count)
expected_calls = [mock.call(reserved_port_1.id, mock.ANY),
mock.call(reserved_port_2.id, mock.ANY)]
self.assertEqual(expected_calls,
mock_plugin.update_dhcp_port.call_args_list)
class TestDictModel(base.BaseTestCase):
def test_string_representation_port(self):
port = dhcp.DictModel({'id': 'id', 'network_id': 'net_id'})
self.assertEqual('id=id, network_id=net_id', str(port))
def test_string_representation_network(self):
net = dhcp.DictModel({'id': 'id', 'name': 'myname'})
self.assertEqual('id=id, name=myname', str(net))
| 42.855761 | 79 | 0.590967 |
d5f6c4f994788655aa9059e8ae3b7784f29200ca | 694 | py | Python | tests/conftest.py | epkanol/pytest-bdd | 2026dec01e3ab68114fb08f466ae72a992adda03 | [
"MIT"
] | 944 | 2015-02-28T20:13:01.000Z | 2022-03-29T16:04:58.000Z | tests/conftest.py | epkanol/pytest-bdd | 2026dec01e3ab68114fb08f466ae72a992adda03 | [
"MIT"
] | 393 | 2015-03-02T21:01:12.000Z | 2022-03-30T13:04:04.000Z | tests/conftest.py | epkanol/pytest-bdd | 2026dec01e3ab68114fb08f466ae72a992adda03 | [
"MIT"
] | 178 | 2015-03-12T21:46:51.000Z | 2022-03-24T11:46:15.000Z | import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
| 30.173913 | 88 | 0.566282 |
c4d3c61571f5a9b854b663a6503b576f192eab87 | 889 | py | Python | sync.py | fieldmaps/edge-matcher | d5aac4b1fbd5da3518b5a33fb6ddbe4e936af3bd | [
"MIT"
] | 1 | 2021-11-10T12:59:01.000Z | 2021-11-10T12:59:01.000Z | sync.py | fieldmaps/admin-boundaries | 6ec585174656cdc1d209c5ccd31087b7aa6d9929 | [
"MIT"
] | null | null | null | sync.py | fieldmaps/admin-boundaries | 6ec585174656cdc1d209c5ccd31087b7aa6d9929 | [
"MIT"
] | null | null | null | import subprocess
from pathlib import Path
cwd = Path(__file__).parent
srcs = {
'edge-matched': ['humanitarian', 'open'],
'cod': ['extended', 'originals'],
'geoboundaries': ['extended', 'originals'],
}
exts = ['json', 'csv', 'xlsx']
if __name__ == '__main__':
for src in srcs:
for grp in srcs[src]:
subprocess.run([
's3cmd', 'sync',
'--acl-public',
'--delete-removed',
'--rexclude', '\/\.',
'--multipart-chunk-size-mb=5120',
cwd / f'outputs/{src}/{grp}',
f's3://data.fieldmaps.io/{src}/',
])
for ext in exts:
subprocess.run([
's3cmd', 'sync',
'--acl-public',
cwd / f'outputs/{src}.{ext}',
f's3://data.fieldmaps.io/{src}.{ext}',
])
| 28.677419 | 54 | 0.446569 |
d8f13aecfa0741ff375ed956c3760694922630f8 | 13,248 | py | Python | tests/ignite/contrib/metrics/test_cohen_kappa.py | aniezurawski/ignite | f584e8a9660dede63c3f9717ad4cb861e9cebd7b | [
"BSD-3-Clause"
] | null | null | null | tests/ignite/contrib/metrics/test_cohen_kappa.py | aniezurawski/ignite | f584e8a9660dede63c3f9717ad4cb861e9cebd7b | [
"BSD-3-Clause"
] | null | null | null | tests/ignite/contrib/metrics/test_cohen_kappa.py | aniezurawski/ignite | f584e8a9660dede63c3f9717ad4cb861e9cebd7b | [
"BSD-3-Clause"
] | null | null | null | import os
from unittest.mock import patch
import pytest
import sklearn
import torch
from sklearn.metrics import cohen_kappa_score
import ignite.distributed as idist
from ignite.contrib.metrics import CohenKappa
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
torch.manual_seed(12)
@pytest.fixture()
def mock_no_sklearn():
with patch.dict("sys.modules", {"sklearn.metrics": None}):
yield sklearn
def test_no_sklearn(mock_no_sklearn):
with pytest.raises(RuntimeError, match=r"This contrib module requires sklearn to be installed."):
CohenKappa()
def test_no_update():
ck = CohenKappa()
with pytest.raises(
NotComputableError, match=r"EpochMetric must have at least one example before it can be computed"
):
ck.compute()
def test_input_types():
ck = CohenKappa()
ck.reset()
output1 = (torch.rand(4, 3), torch.randint(0, 2, size=(4, 3), dtype=torch.long))
ck.update(output1)
with pytest.raises(ValueError, match=r"Incoherent types between input y_pred and stored predictions"):
ck.update((torch.randint(0, 5, size=(4, 3)), torch.randint(0, 2, size=(4, 3))))
with pytest.raises(ValueError, match=r"Incoherent types between input y and stored targets"):
ck.update((torch.rand(4, 3), torch.randint(0, 2, size=(4, 3)).to(torch.int32)))
with pytest.raises(ValueError, match=r"Incoherent types between input y_pred and stored predictions"):
ck.update((torch.randint(0, 2, size=(10,)).long(), torch.randint(0, 2, size=(10, 5)).long()))
def test_check_shape():
ck = CohenKappa()
with pytest.raises(ValueError, match=r"Predictions should be of shape"):
ck._check_shape((torch.tensor(0), torch.tensor(0)))
with pytest.raises(ValueError, match=r"Predictions should be of shape"):
ck._check_shape((torch.rand(4, 3, 1), torch.rand(4, 3)))
with pytest.raises(ValueError, match=r"Targets should be of shape"):
ck._check_shape((torch.rand(4, 3), torch.rand(4, 3, 1)))
def test_cohen_kappa_wrong_weights_type():
with pytest.raises(ValueError, match=r"Kappa Weighting type must be"):
ck = CohenKappa(weights=7)
with pytest.raises(ValueError, match=r"Kappa Weighting type must be"):
ck = CohenKappa(weights="dd")
@pytest.mark.parametrize("weights", [None, "linear", "quadratic"])
def test_binary_input_N(weights):
ck = CohenKappa(weights)
def _test(y_pred, y, n_iters):
ck.reset()
ck.update((y_pred, y))
np_y = y.numpy()
np_y_pred = y_pred.numpy()
if n_iters > 1:
batch_size = y.shape[0] // n_iters + 1
for i in range(n_iters):
idx = i * batch_size
ck.update((y_pred[idx : idx + batch_size], y[idx : idx + batch_size]))
res = ck.compute()
assert isinstance(res, float)
assert cohen_kappa_score(np_y, np_y_pred, weights=weights) == pytest.approx(res)
def get_test_cases():
test_cases = [
(torch.randint(0, 2, size=(10,)).long(), torch.randint(0, 2, size=(10,)).long(), 1),
(torch.randint(0, 2, size=(100,)).long(), torch.randint(0, 2, size=(100,)).long(), 1),
(torch.randint(0, 2, size=(10, 1)).long(), torch.randint(0, 2, size=(10, 1)).long(), 1),
(torch.randint(0, 2, size=(100, 1)).long(), torch.randint(0, 2, size=(100, 1)).long(), 1),
# updated batches
(torch.randint(0, 2, size=(10,)).long(), torch.randint(0, 2, size=(10,)).long(), 16),
(torch.randint(0, 2, size=(100,)).long(), torch.randint(0, 2, size=(100,)).long(), 16),
(torch.randint(0, 2, size=(10, 1)).long(), torch.randint(0, 2, size=(10, 1)).long(), 16),
(torch.randint(0, 2, size=(100, 1)).long(), torch.randint(0, 2, size=(100, 1)).long(), 16),
]
return test_cases
for _ in range(10):
# check multiple random inputs as random exact occurencies are rare
test_cases = get_test_cases()
for y_pred, y, n_iters in test_cases:
_test(y_pred, y, n_iters)
def test_multilabel_inputs():
ck = CohenKappa()
with pytest.raises(ValueError, match=r"multilabel-indicator is not supported"):
ck.reset()
ck.update((torch.randint(0, 2, size=(10, 4)).long(), torch.randint(0, 2, size=(10, 4)).long()))
ck.compute()
with pytest.raises(ValueError, match=r"multilabel-indicator is not supported"):
ck.reset()
ck.update((torch.randint(0, 2, size=(10, 6)).long(), torch.randint(0, 2, size=(10, 6)).long()))
ck.compute()
with pytest.raises(ValueError, match=r"multilabel-indicator is not supported"):
ck.reset()
ck.update((torch.randint(0, 2, size=(10, 8)).long(), torch.randint(0, 2, size=(10, 8)).long()))
ck.compute()
@pytest.mark.parametrize("weights", [None, "linear", "quadratic"])
def test_integration_binary_input_with_output_transform(weights):
def _test(y_pred, y, batch_size):
def update_fn(engine, batch):
idx = (engine.state.iteration - 1) * batch_size
y_true_batch = np_y[idx : idx + batch_size]
y_pred_batch = np_y_pred[idx : idx + batch_size]
return idx, torch.from_numpy(y_pred_batch), torch.from_numpy(y_true_batch)
engine = Engine(update_fn)
ck_metric = CohenKappa(output_transform=lambda x: (x[1], x[2]), weights=weights)
ck_metric.attach(engine, "ck")
np_y = y.numpy()
np_y_pred = y_pred.numpy()
np_ck = cohen_kappa_score(np_y, np_y_pred, weights=weights)
data = list(range(y_pred.shape[0] // batch_size))
ck = engine.run(data, max_epochs=1).metrics["ck"]
assert isinstance(ck, float)
assert np_ck == pytest.approx(ck)
def get_test_cases():
test_cases = [
(torch.randint(0, 2, size=(100,)).long(), torch.randint(0, 2, size=(100,)).long(), 10),
(torch.randint(0, 2, size=(100, 1)).long(), torch.randint(0, 2, size=(100, 1)).long(), 10),
(torch.randint(0, 2, size=(200,)).long(), torch.randint(0, 2, size=(200,)).long(), 10),
(torch.randint(0, 2, size=(200, 1)).long(), torch.randint(0, 2, size=(200, 1)).long(), 10),
]
return test_cases
for _ in range(10):
# check multiple random inputs as random exact occurencies are rare
test_cases = get_test_cases()
for y_pred, y, batch_size in test_cases:
_test(y_pred, y, batch_size)
def _test_distrib_binary_input_N(device):
rank = idist.get_rank()
torch.manual_seed(12)
def _test(y_pred, y, n_iters, metric_device):
metric_device = torch.device(metric_device)
ck = CohenKappa(device=metric_device)
torch.manual_seed(10 + rank)
ck.reset()
ck.update((y_pred, y))
if n_iters > 1:
batch_size = y.shape[0] // n_iters + 1
for i in range(n_iters):
idx = i * batch_size
ck.update((y_pred[idx : idx + batch_size], y[idx : idx + batch_size]))
# gather y_pred, y
y_pred = idist.all_gather(y_pred)
y = idist.all_gather(y)
np_y = y.cpu().numpy()
np_y_pred = y_pred.cpu().numpy()
res = ck.compute()
assert isinstance(res, float)
assert cohen_kappa_score(np_y, np_y_pred) == pytest.approx(res)
def get_test_cases():
test_cases = [
(torch.randint(0, 2, size=(10,)).long(), torch.randint(0, 2, size=(10,)).long(), 1),
(torch.randint(0, 2, size=(100,)).long(), torch.randint(0, 2, size=(100,)).long(), 1),
(torch.randint(0, 2, size=(10, 1)).long(), torch.randint(0, 2, size=(10, 1)).long(), 1),
(torch.randint(0, 2, size=(100, 1)).long(), torch.randint(0, 2, size=(100, 1)).long(), 1),
# updated batches
(torch.randint(0, 2, size=(10,)).long(), torch.randint(0, 2, size=(10,)).long(), 16),
(torch.randint(0, 2, size=(100,)).long(), torch.randint(0, 2, size=(100,)).long(), 16),
(torch.randint(0, 2, size=(10, 1)).long(), torch.randint(0, 2, size=(10, 1)).long(), 16),
(torch.randint(0, 2, size=(100, 1)).long(), torch.randint(0, 2, size=(100, 1)).long(), 16),
]
return test_cases
for _ in range(3):
test_cases = get_test_cases()
for y_pred, y, batch_size in test_cases:
_test(y_pred, y, batch_size, "cpu")
if device.type != "xla":
_test(y_pred, y, batch_size, idist.device())
def _test_distrib_integration_binary(device):
rank = idist.get_rank()
torch.manual_seed(12)
def _test(n_epochs, metric_device):
metric_device = torch.device(metric_device)
n_iters = 80
s = 16
n_classes = 2
offset = n_iters * s
y_true = torch.randint(0, n_classes, size=(offset * idist.get_world_size(),)).to(device)
y_preds = torch.randint(0, n_classes, size=(offset * idist.get_world_size(),)).to(device)
def update(engine, i):
return (
y_preds[i * s + rank * offset : (i + 1) * s + rank * offset],
y_true[i * s + rank * offset : (i + 1) * s + rank * offset],
)
engine = Engine(update)
ck = CohenKappa(device=metric_device)
ck.attach(engine, "ck")
data = list(range(n_iters))
engine.run(data=data, max_epochs=n_epochs)
assert "ck" in engine.state.metrics
res = engine.state.metrics["ck"]
if isinstance(res, torch.Tensor):
res = res.cpu().numpy()
true_res = cohen_kappa_score(y_true.cpu().numpy(), y_preds.cpu().numpy())
assert pytest.approx(res) == true_res
metric_devices = ["cpu"]
if device.type != "xla":
metric_devices.append(idist.device())
for metric_device in metric_devices:
for _ in range(2):
_test(n_epochs=1, metric_device=metric_device)
_test(n_epochs=2, metric_device=metric_device)
@pytest.mark.distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
@pytest.mark.skipif(torch.cuda.device_count() < 1, reason="Skip if no GPU")
def test_distrib_gpu(distributed_context_single_node_nccl):
device = torch.device(f"cuda:{distributed_context_single_node_nccl['local_rank']}")
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
@pytest.mark.distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
def test_distrib_cpu(distributed_context_single_node_gloo):
device = torch.device("cpu")
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
@pytest.mark.distributed
@pytest.mark.skipif(not idist.has_hvd_support, reason="Skip if no Horovod dist support")
@pytest.mark.skipif("WORLD_SIZE" in os.environ, reason="Skip if launched as multiproc")
def test_distrib_hvd(gloo_hvd_executor):
device = torch.device("cpu" if not torch.cuda.is_available() else "cuda")
nproc = 4 if not torch.cuda.is_available() else torch.cuda.device_count()
gloo_hvd_executor(
_test_distrib_binary_input_N, (device,), np=nproc, do_init=True,
)
gloo_hvd_executor(
_test_distrib_integration_binary, (device,), np=nproc, do_init=True,
)
@pytest.mark.multinode_distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
@pytest.mark.skipif("MULTINODE_DISTRIB" not in os.environ, reason="Skip if not multi-node distributed")
def test_multinode_distrib_cpu(distributed_context_multi_node_gloo):
device = torch.device("cpu")
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
@pytest.mark.multinode_distributed
@pytest.mark.skipif(not idist.has_native_dist_support, reason="Skip if no native dist support")
@pytest.mark.skipif("GPU_MULTINODE_DISTRIB" not in os.environ, reason="Skip if not multi-node distributed")
def test_multinode_distrib_gpu(distributed_context_multi_node_nccl):
device = torch.device(f"cuda:{distributed_context_multi_node_nccl['local_rank']}")
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
@pytest.mark.tpu
@pytest.mark.skipif("NUM_TPU_WORKERS" in os.environ, reason="Skip if NUM_TPU_WORKERS is in env vars")
@pytest.mark.skipif(not idist.has_xla_support, reason="Skip if no PyTorch XLA package")
def test_distrib_single_device_xla():
device = idist.device()
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
def _test_distrib_xla_nprocs(index):
device = idist.device()
_test_distrib_binary_input_N(device)
_test_distrib_integration_binary(device)
@pytest.mark.tpu
@pytest.mark.skipif("NUM_TPU_WORKERS" not in os.environ, reason="Skip if no NUM_TPU_WORKERS in env vars")
@pytest.mark.skipif(not idist.has_xla_support, reason="Skip if no PyTorch XLA package")
def test_distrib_xla_nprocs(xmp_executor):
n = int(os.environ["NUM_TPU_WORKERS"])
xmp_executor(_test_distrib_xla_nprocs, args=(), nprocs=n)
| 37.109244 | 107 | 0.646815 |
33b11e0369b27892deb45008b3be2b486ca61156 | 1,392 | py | Python | tests/pdk/finfet/test_transistor.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | null | null | null | tests/pdk/finfet/test_transistor.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | null | null | null | tests/pdk/finfet/test_transistor.py | mabrains/ALIGN-public | 9a6c14310de13df369a8340f465911b629f15a3f | [
"BSD-3-Clause"
] | null | null | null | from align.pdk.finfet import CanvasPDK, mos, tap
from align.schema.transistor import Transistor
from .helper import *
def test_one():
tx = Transistor(model_name='n', nf=2, nfin=4, device_type='stack')
data = mos(CanvasPDK, tx, track_pattern={'G':[6], 'D':[4], 'S':[2]})
fn = "test_transistor_1"
compare_with_golden(fn, data)
def test_two():
tx = Transistor(model_name='n', nf=4, nfin=4, device_type='stack')
data = mos(CanvasPDK, tx, track_pattern={'G':[6], 'D':[4], 'S':[2]})
fn = "test_transistor_2"
compare_with_golden(fn, data)
def test_three():
tx = Transistor(model_name='n', nf=2, nfin=4, device_type='parallel')
data = mos(CanvasPDK, tx, track_pattern={'G':[6], 'D':[4], 'S':[2]})
fn = "test_transistor_3"
compare_with_golden(fn, data)
def test_four():
tx = Transistor(model_name='n', nf=4, nfin=4, device_type='parallel')
data = mos(CanvasPDK, tx, track_pattern={'G':[6], 'D':[4], 'S':[2]})
fn = "test_transistor_4"
compare_with_golden(fn, data)
def test_five():
tx = Transistor(model_name='n', nf=2, nfin=4, device_type='stack')
data = tap(CanvasPDK, tx)
fn = "test_transistor_5"
compare_with_golden(fn, data)
def test_six():
tx = Transistor(model_name='n', nf=4, nfin=4, device_type='stack')
data = tap(CanvasPDK, tx)
fn = "test_transistor_6"
compare_with_golden(fn, data)
| 30.26087 | 73 | 0.647989 |
6a547bbe83283bedc605fd94efcf5b99a6167fbd | 1,152 | py | Python | peewee_aio/databases.py | klen/peewee-aio | 723c5f5333f5a814fefe682b175faadb9e86f747 | [
"MIT"
] | 5 | 2021-09-07T13:39:47.000Z | 2022-01-14T16:01:26.000Z | peewee_aio/databases.py | klen/peewee-aio | 723c5f5333f5a814fefe682b175faadb9e86f747 | [
"MIT"
] | 1 | 2021-11-01T06:28:12.000Z | 2021-11-01T12:05:50.000Z | peewee_aio/databases.py | klen/peewee-aio | 723c5f5333f5a814fefe682b175faadb9e86f747 | [
"MIT"
] | null | null | null | from typing import Dict, Type
import aio_databases as aiodb
import peewee as pw
from playhouse import db_url
class Database(pw.Database):
enabled: bool = False
def execute(self, *args, **kwargs):
if not self.enabled:
raise RuntimeError(
"Sync operations are not available. Use `manager.allow_sync` to enable."
)
return super().execute(*args, **kwargs) # type: ignore
class SqliteDatabase(Database, pw.SqliteDatabase):
pass
class MySQLDatabase(Database, pw.MySQLDatabase):
pass
class PostgresqlDatabase(Database, pw.PostgresqlDatabase):
pass
_backend_to_db: Dict[str, Type[Database]] = {
"sqlite": SqliteDatabase,
"postgres": PostgresqlDatabase,
"mysql": MySQLDatabase,
}
_backend_to_db["postgresql"] = _backend_to_db["postgres"]
def get_db(db: aiodb.Database) -> Database:
url = db.backend.url
if url.path and not url.path.startswith("/"):
url = url._replace(path=f"/{url.path}")
params = db_url.parseresult_to_dict(url)
db_cls = _backend_to_db.get(db.backend.db_type, _backend_to_db["sqlite"])
return db_cls(**params)
| 24 | 88 | 0.684028 |
2d1e2adcd467090621ea506e8509d9887ad3876f | 346 | py | Python | test_call.py | chris-hld/SAFpy | 4ccae1a92dddd1f5726679792eee58cafa806385 | [
"MIT"
] | 2 | 2020-08-05T15:54:28.000Z | 2021-07-12T08:43:45.000Z | test_call.py | chris-hld/SAFpy | 4ccae1a92dddd1f5726679792eee58cafa806385 | [
"MIT"
] | null | null | null | test_call.py | chris-hld/SAFpy | 4ccae1a92dddd1f5726679792eee58cafa806385 | [
"MIT"
] | null | null | null | import numpy as np
import safpy
safpy.wrapit.whoami()
print(safpy.wrapit.factorial(3))
print(safpy.sh.getSHreal(1, [0, 0]))
print(safpy.sh.getSHcomplex(1, [0, 0]))
print(safpy.vbap.generateVBAPgainTable3D(np.reshape(np.arange(10), (5, 2)),
1, 1, 0, 0, 0))
h = safpy.afstft.AfSTFT(2, 2, 128, fs=48000)
| 24.714286 | 75 | 0.615607 |
81c5f5684cf9b3da0fcab6b865a28e4a394e93ab | 86,043 | py | Python | tests/functional/transactions/test_read_consist_sttm_restart_max_limit.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/functional/transactions/test_read_consist_sttm_restart_max_limit.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/functional/transactions/test_read_consist_sttm_restart_max_limit.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | #coding:utf-8
#
# id: functional.transactions.read_consist_sttm_restart_max_limit
# title: READ CONSISTENCY. Maximal number of statement-level restarts must be 10.
# decription:
# Initial article for reading:
# https://asktom.oracle.com/pls/asktom/f?p=100:11:::::P11_QUESTION_ID:11504247549852
# Note on terms which are used there: "BLOCKER", "LONG" and "FIRSTLAST" - their names are slightly changed here
# to: LOCKER-1, WORKER and LOCKER-2 respectively.
#
# See also: doc\\README.read_consistency.md
# Letter from Vlad: 15.09.2020 20:04 // subj "read consistency // addi test(s)"
#
# ::: NB :::
# This test uses script %FBT_REPO%
# iles
# ead-consist-sttm-restart-DDL.sql which contains common DDL for all other such tests.
# Particularly, it contains two TRIGGERS (TLOG_WANT and TLOG_DONE) which are used for logging of planned actions and actual
# results against table TEST. These triggers use AUTONOMOUS transactions in order to have ability to see results in any
# outcome of test.
#
# Detailed description can be found in "read-consist-sttm-restart-on-update-04.fbt", this test is based on the same ideas:
# * initial script add records with ID = 1...12 and does commit;
# * start locker-1 which catch record with ID = 1 that is to be involved futher in cursor of worker;
# * start worker DML which must change records in descending order of ID, starting with ID=2; worker must write ID = ID * 100 for each row;
# * start locker-2 which changes record with ID=12 by assigning this ID to -12, makes COMMIT and locks this record again (makes UPDATE w/o commit);
# * locker-1 releases record with ID=1, then changes record with ID=11 by assigning this ID to -11, makes COMMIT and locks this record again;
# * locker-2 releases record with ID=-12, then changes record with ID=10 by assigning this ID to -10, makes COMMIT and locks this record again;
# * ... and so on, until number of such actions iterations less 10 or 11 (see below) ...
#
# Each UPDATE that is performed by lockers (starting from ID=11) produces new ID (-11, -10, -9, ...) that was not present in the scope which worker
# could see before this action. This forces worker to make statement-level restart.
#
# When number of such new IDs is less than 10 then worker must finish its job successfully.
# But if this number if 11 then worker must raise exception (SQLSTATE = 40001 / deadlock / update conflicts) and rollback all changes.
#
# Test verifies both cases, using loop with TWO iterations (see 'main_iter' below): first for 10 and second to 11 records that are to be updated.
# After each iteration we do queries to the table TEST and to the view V_WORKER_LOG which contains data generated by trigger TLOG_DONE for logging.
#
# Test verifies restart number for three modes of WORKER job: UPDATE, MERGE, DELETE and SELECT WITH LOCK (see loop for checked_DML: 'upd', 'mer', 'del', 'lok').
# NOTE-1.
# For 'SELECT WITH LOCK' we must provide that no rows will be returned to client while worker is waiting for records.
# EXECUTE BLOCK with for-select which does nothing is used for this.
#
# NOTE-2.
# SELECT WITH LOCK does not allow to use VIEW as subject of query (raises "-WITH LOCK can be used only with a single physical table").
# This error is expected in current FB versions and its text presents in expected_std* section.
#
# Checked on 4.0.0.2195 SS/CS.
# 29.09.2020: added for-loop in order to check different target objects: TABLE ('test') and VIEW ('v_test'), see 'target_object_type'.
#
#
# tracker_id:
# min_versions: ['4.0']
# versions: 4.0
# qmid:
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 4.0
# resources: None
substitutions_1 = [('=', ''), ('[ \t]+', ' '), ('.*After line \\d+.*', ''), ('.*[\\-]?concurrent transaction number is \\d+', 'concurrent transaction number is'), ('.*At\\s+block\\s+line(:)?\\s+\\d+(,)?\\s+col(:)?\\s+\\d+', ''), ('.After\\s+line\\s+\\d+\\s+.*', '')]
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
# test_script_1
#---
#
# import os
# import sys
# import subprocess
# from subprocess import Popen
# import shutil
# from fdb import services
# import time
#
# os.environ["ISC_USER"] = user_name
# os.environ["ISC_PASSWORD"] = user_password
#
# # How long LOCKER must wait before raise update-conflict error
# # (useful for debug in case os some error in this test algorithm):
# LOCKER_LOCK_TIMEOUT = 5
#
# ##############################
# # Temply, for debug obly:
# this_fdb=db_conn.database_name
# this_dbg=os.path.splitext(this_fdb)[0] + '.4debug.fdb'
# ##############################
#
# db_conn.close()
# fb_home = services.connect(host='localhost').get_home_directory()
#
# #--------------------------------------------
#
# def flush_and_close( file_handle ):
# # https://docs.python.org/2/library/os.html#os.fsync
# # If you're starting with a Python file object f,
# # first do f.flush(), and
# # then do os.fsync(f.fileno()), to ensure that all internal buffers associated with f are written to disk.
# global os
#
# file_handle.flush()
# if file_handle.mode not in ('r', 'rb') and file_handle.name != os.devnull:
# # otherwise: "OSError: [Errno 9] Bad file descriptor"!
# os.fsync(file_handle.fileno())
# file_handle.close()
#
# #--------------------------------------------
#
# def cleanup( f_names_list ):
# global os
# for f in f_names_list:
# if type(f) == file:
# del_name = f.name
# elif type(f) == str:
# del_name = f
# else:
# print('Unrecognized type of element:', f, ' - can not be treated as file.')
# del_name = None
#
# if del_name and os.path.isfile( del_name ):
# os.remove( del_name )
#
# #--------------------------------------------
#
# sql_init_ddl = os.path.join(context['files_location'],'read-consist-sttm-restart-DDL.sql')
#
# for target_object_type in('table', 'view'):
#
# target_obj = 'test' if target_object_type == 'table' else 'v_test'
#
# for checked_DML in('upd', 'mer', 'del', 'lok'):
# #for checked_DML in('lok',):
# worker_dml = "select 'UNKNOWN MODE' as msg from rdb$database"
# if checked_DML == 'upd':
# worker_dml = 'update %(target_obj)s set id = id * 100 where id <= 2 order by id DESC;' % locals()
# elif checked_DML == 'mer':
# worker_dml = 'merge into %(target_obj)s t using (select x.id from %(target_obj)s x where x.id <= 2 order by id DESC) s on t.id = s.id when matched then update set t.id = s.id * 100;' % locals()
# elif checked_DML == 'del':
# worker_dml = 'delete from %(target_obj)s where id <= 2 order by id DESC;' % locals()
# elif checked_DML == 'lok':
# # ::: NB :::
# # We must SUPRESS sending record to client for SELECT WITH LOCK, otherwise error
# # deadlock/update conflist will raise immediately! Because of this, we enclose
# # such select into execute block which returns nothing:
# worker_dml = 'set term ^; execute block as declare c int; begin for select id from %(target_obj)s where id<=2 order by id desc with lock into c do begin end end^ set term ;^' % locals()
#
# for main_iter in (0,1):
# #for main_iter in (1,):
#
# ###################################################################################
# ### H O W M A N Y R E S T A R T S W E W A N T T O C H E C K ###
# ###################################################################################
# ROWS_TO_ADD = 10 + 2 * main_iter
#
#
# f_init_log=open( os.path.join(context['temp_directory'],'read-consist-sttm-restart-DDL.log'), 'w')
# f_init_err=open( ''.join( ( os.path.splitext(f_init_log.name)[0], '.err') ), 'w')
#
# # RECREATION OF ALL DB OBJECTS:
# # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# subprocess.call( [context['isql_path'], dsn, '-q', '-i', sql_init_ddl], stdout=f_init_log, stderr=f_init_err )
#
# flush_and_close(f_init_log)
# flush_and_close(f_init_err)
#
# sql_addi='''
# set term ^;
# execute block as
# begin
# rdb$set_context('USER_SESSION', 'WHO', 'INIT_DATA');
# end
# ^
# set term ;^
# insert into %(target_obj)s(id, x) select row_number()over(),row_number()over() from rdb$types rows (2 + %(ROWS_TO_ADD)s); -- <<< INITIAL DATA
# commit;
# ''' % locals()
#
# runProgram('isql', [ dsn, '-q' ], sql_addi)
#
# locker_tpb = fdb.TPB()
# locker_tpb.lock_timeout = LOCKER_LOCK_TIMEOUT
# locker_tpb.lock_resolution = fdb.isc_tpb_wait
#
# con_lock_1 = fdb.connect( dsn = dsn, isolation_level=locker_tpb )
# con_lock_2 = fdb.connect( dsn = dsn, isolation_level=locker_tpb )
#
# con_lock_1.execute_immediate( "execute block as begin rdb$set_context('USER_SESSION', 'WHO', 'LOCKER #1'); end" )
# con_lock_2.execute_immediate( "execute block as begin rdb$set_context('USER_SESSION', 'WHO', 'LOCKER #2'); end" )
#
# #########################
# ### L O C K E R - 1 ###
# #########################
#
# con_lock_1.execute_immediate( 'update %(target_obj)s set id=id where id = 1' % locals() )
#
# sql_text='''
# connect '%(dsn)s';
# set list on;
# set autoddl off;
# set term ^;
# execute block as
# begin
# rdb$set_context('USER_SESSION','WHO', 'WORKER');
# end
# ^
# set term ;^
# commit;
# SET KEEP_TRAN_PARAMS ON;
# set transaction read committed read consistency;
# set list off;
# set wng off;
#
# set count on;
# %(worker_dml)s -- UPDATE or DELETE or SELECT WITH LOCK; all ORDER BY ID DESC; MUST HANG BECAUSE OF LOCKERs
#
# -- check results:
# -- ###############
#
# select id from %(target_obj)s order by id;
#
# select v.old_id, v.op, v.snap_no_rank
# from v_worker_log v
# where v.op = iif( '%(checked_DML)s' = 'mer', 'upd', '%(checked_DML)s'); -- 'UPD' or 'DEL'; for 'SELECT WITH LOCK' no records will be in v_worker_log.
#
#
# --set width who 10;
# -- DO NOT check this! Values can differ here from one run to another!
# -- select id, trn, who, old_id, new_id, op, rec_vers, global_cn, snap_no from tlog_done order by id;
# rollback;
#
# ''' % dict(globals(), **locals())
#
# f_worker_sql=open( os.path.join(context['temp_directory'],'tmp_sttm_restart_max_limit.sql'), 'w')
# f_worker_sql.write(sql_text)
# flush_and_close(f_worker_sql)
#
#
# f_worker_log=open( ''.join( ( os.path.splitext(f_worker_sql.name)[0], '.log') ), 'w')
# f_worker_err=open( ''.join( ( os.path.splitext(f_worker_log.name)[0], '.err') ), 'w')
#
# ############################################################################
# ### L A U N C H W O R K E R U S I N G I S Q L, A S Y N C. ###
# ############################################################################
#
# p_worker = Popen( [ context['isql_path'], '-pag', '9999999', '-q', '-i', f_worker_sql.name ],stdout=f_worker_log, stderr=f_worker_err)
# time.sleep(1)
#
# cur_lock_1 = con_lock_1.cursor()
# cur_lock_2 = con_lock_2.cursor()
# sttm = 'update %(target_obj)s set id = ? where abs( id ) = ?' % locals()
#
#
# for i in range(0,ROWS_TO_ADD):
# v_id = 2 + ROWS_TO_ADD-i
# if i % 2 == 0:
# cur_lock_2.execute( sttm, ( -abs( v_id ), v_id, ) )
# con_lock_2.commit()
# cur_lock_2.execute( sttm, ( -abs( v_id ), v_id, ) )
# con_lock_1.commit()
# else:
# cur_lock_1.execute( sttm, ( -abs( v_id ), v_id, ) )
# con_lock_1.commit()
# cur_lock_1.execute( sttm, ( -abs( v_id ), v_id, ) )
# con_lock_2.commit()
#
# cur_lock_1.close()
# cur_lock_2.close()
#
# if ROWS_TO_ADD % 2 == 0:
# con_lock_2.commit()
# con_lock_1.commit()
# else:
# con_lock_1.commit()
# con_lock_2.commit()
#
# # Close lockers:
# ################
# for c in (con_lock_1, con_lock_2):
# c.close()
#
# # Here we wait for ISQL complete its mission:
# p_worker.wait()
#
# flush_and_close(f_worker_log)
# flush_and_close(f_worker_err)
#
# # CHECK RESULTS
# ###############
#
# print( 'target_object_type: %(target_object_type)s, checked_DML = %(checked_DML)s, iter = %(main_iter)s, restarts number to be tested: %(ROWS_TO_ADD)s' % locals() )
#
# with open(f_init_err.name,'r') as f:
# for line in f:
# if line:
# print( 'target_object_type: %(target_object_type)s, checked_DML = %(checked_DML)s, iter = %(main_iter)s, UNEXPECTED STDERR for initial SQL: %(line)s' % locals() )
#
# for f in (f_worker_log, f_worker_err):
# with open(f.name,'r') as g:
# for line in g:
# if line:
# logname = 'STDLOG' if f.name == f_worker_log.name else 'STDERR'
# print( 'target_object_type: %(target_object_type)s, checked_DML = %(checked_DML)s, iter = %(main_iter)s, worker %(logname)s: %(line)s' % locals() )
#
#
# #< for main_iter in (0,1)
# # < for checked_DML in ('upd', 'mer', 'del', 'lok')
# # < for target_object_type in ('table', 'view')
# # Cleanup.
# ##########
# time.sleep(1)
# cleanup( (f_init_log, f_init_err, f_worker_sql, f_worker_log, f_worker_err) )
#
# '''
# 'substitutions':[
# ('=','')
# ,('[ ]+',' ')
# ,('.*After line \\d+.*', '')
# ,('.*[\\-]?concurrent transaction number is \\d+', 'concurrent transaction number is')
# ,('.*At\\s+block\\s+line(:)?\\s+\\d+(,)?\\s+col(:)?\\s+\\d+', '')
# ]
#
# '''
#
#
#---
#act_1 = python_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
target_object_type: table, checked_DML = upd, iter = 0, restarts number to be tested: 10
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: ID
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: =======
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -1200
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -1100
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -1000
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -900
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -800
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -700
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -600
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -500
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -400
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -300
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 100
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 200
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 1
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 2
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 2
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 3
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 3
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 4
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 4
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 5
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 5
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 6
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 6
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 7
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 7
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 8
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 8
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 9
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 9
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 10
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 10
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -3 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -4 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -5 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -6 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -7 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -8 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -9 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -10 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -11 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: -12 UPD 11
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 31
target_object_type: table, checked_DML = upd, iter = 1, restarts number to be tested: 12
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 2
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: ID
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: =======
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -14
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -13
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -12
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -11
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -10
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -9
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -8
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -7
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -6
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -5
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -4
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: -3
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 14
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 1
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 2
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 2
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 3
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 3
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 4
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 4
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 5
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 5
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 6
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 6
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 7
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 7
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 8
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 8
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 9
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 9
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 10
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 10
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 11
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 11
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 21
target_object_type: table, checked_DML = upd, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: table, checked_DML = upd, iter = 1, worker STDERR: deadlock
target_object_type: table, checked_DML = upd, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: table, checked_DML = upd, iter = 1, worker STDERR: -concurrent transaction number is 343
target_object_type: table, checked_DML = upd, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: table, checked_DML = mer, iter = 0, restarts number to be tested: 10
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: ID
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: =======
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -1200
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -1100
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -1000
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -900
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -800
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -700
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -600
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -500
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -400
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -300
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 100
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 200
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 1
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 2
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 2
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 3
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 3
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 4
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 4
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 5
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 5
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 6
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 6
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 7
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 7
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 8
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 8
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 9
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 9
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 10
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 10
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -3 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -4 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -5 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -6 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -7 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -8 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -9 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -10 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -11 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: -12 UPD 11
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 31
target_object_type: table, checked_DML = mer, iter = 1, restarts number to be tested: 12
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 2
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: ID
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: =======
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -14
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -13
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -12
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -11
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -10
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -9
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -8
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -7
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -6
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -5
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -4
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: -3
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 14
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 1
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 2
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 2
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 3
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 3
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 4
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 4
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 5
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 5
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 6
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 6
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 7
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 7
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 8
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 8
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 9
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 9
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 10
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 10
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 11
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 11
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 21
target_object_type: table, checked_DML = mer, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: table, checked_DML = mer, iter = 1, worker STDERR: deadlock
target_object_type: table, checked_DML = mer, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: table, checked_DML = mer, iter = 1, worker STDERR: -concurrent transaction number is 696
target_object_type: table, checked_DML = mer, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: table, checked_DML = del, iter = 0, restarts number to be tested: 10
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: Records affected: 0
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 1
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 2
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 2
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 3
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 3
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 4
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 4
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 5
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 5
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 6
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 6
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 7
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 7
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 8
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 8
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 9
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 9
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 10
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 10
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -3 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -4 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -5 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -6 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -7 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -8 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -9 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -10 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -11 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: -12 DEL 11
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 0, worker STDLOG: Records affected: 31
target_object_type: table, checked_DML = del, iter = 1, restarts number to be tested: 12
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: Records affected: 2
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: ID
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: =======
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -14
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -13
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -12
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -11
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -10
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -9
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -8
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -7
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -6
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -5
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -4
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: -3
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: Records affected: 14
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 1
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 2
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 2
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 3
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 3
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 4
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 4
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 5
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 5
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 6
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 6
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 7
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 7
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 8
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 8
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 9
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 9
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 10
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 10
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 11
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 11
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = del, iter = 1, worker STDLOG: Records affected: 21
target_object_type: table, checked_DML = del, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: table, checked_DML = del, iter = 1, worker STDERR: deadlock
target_object_type: table, checked_DML = del, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: table, checked_DML = del, iter = 1, worker STDERR: -concurrent transaction number is 1049
target_object_type: table, checked_DML = del, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: table, checked_DML = lok, iter = 0, restarts number to be tested: 10
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: ID
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: =======
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -12
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -11
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -10
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -9
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -8
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -7
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -6
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -5
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -4
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: -3
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: 1
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: 2
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG:
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: Records affected: 12
target_object_type: table, checked_DML = lok, iter = 0, worker STDLOG: Records affected: 0
target_object_type: table, checked_DML = lok, iter = 1, restarts number to be tested: 12
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: ID
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: =======
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -14
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -13
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -12
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -11
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -10
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -9
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -8
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -7
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -6
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -5
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -4
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: -3
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: 1
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: 2
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG:
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: Records affected: 14
target_object_type: table, checked_DML = lok, iter = 1, worker STDLOG: Records affected: 0
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: deadlock
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: -concurrent transaction number is 1282
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: -At block line: 1, col: 39
target_object_type: table, checked_DML = lok, iter = 1, worker STDERR: After line 19 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: view, checked_DML = upd, iter = 0, restarts number to be tested: 10
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: ID
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: =======
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -1200
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -1100
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -1000
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -900
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -800
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -700
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -600
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -500
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -400
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -300
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 100
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 200
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 1
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 2
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 2
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 3
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 3
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 4
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 4
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 5
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 5
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 6
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 6
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 7
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 7
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 8
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 8
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 9
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 9
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 10
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 10
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 2 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: 1 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -3 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -4 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -5 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -6 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -7 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -8 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -9 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -10 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -11 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: -12 UPD 11
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 0, worker STDLOG: Records affected: 31
target_object_type: view, checked_DML = upd, iter = 1, restarts number to be tested: 12
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 2
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: ID
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: =======
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -14
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -13
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -12
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -11
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -10
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -9
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -8
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -7
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -6
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -5
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -4
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: -3
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 14
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 1
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 2
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 2
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 3
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 3
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 4
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 4
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 5
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 5
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 6
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 6
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 7
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 7
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 8
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 8
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 9
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 9
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 10
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 10
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 2 UPD 11
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: 1 UPD 11
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = upd, iter = 1, worker STDLOG: Records affected: 21
target_object_type: view, checked_DML = upd, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: view, checked_DML = upd, iter = 1, worker STDERR: deadlock
target_object_type: view, checked_DML = upd, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: view, checked_DML = upd, iter = 1, worker STDERR: -concurrent transaction number is 1630
target_object_type: view, checked_DML = upd, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: view, checked_DML = mer, iter = 0, restarts number to be tested: 10
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: ID
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: =======
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -1200
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -1100
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -1000
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -900
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -800
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -700
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -600
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -500
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -400
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -300
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 100
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 200
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 1
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 2
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 2
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 3
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 3
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 4
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 4
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 5
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 5
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 6
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 6
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 7
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 7
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 8
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 8
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 9
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 9
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 10
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 10
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 2 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: 1 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -3 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -4 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -5 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -6 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -7 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -8 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -9 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -10 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -11 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: -12 UPD 11
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 0, worker STDLOG: Records affected: 31
target_object_type: view, checked_DML = mer, iter = 1, restarts number to be tested: 12
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 2
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: ID
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: =======
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -14
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -13
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -12
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -11
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -10
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -9
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -8
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -7
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -6
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -5
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -4
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: -3
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 14
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 1
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 2
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 2
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 3
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 3
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 4
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 4
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 5
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 5
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 6
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 6
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 7
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 7
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 8
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 8
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 9
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 9
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 10
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 10
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 2 UPD 11
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: 1 UPD 11
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = mer, iter = 1, worker STDLOG: Records affected: 21
target_object_type: view, checked_DML = mer, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: view, checked_DML = mer, iter = 1, worker STDERR: deadlock
target_object_type: view, checked_DML = mer, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: view, checked_DML = mer, iter = 1, worker STDERR: -concurrent transaction number is 1983
target_object_type: view, checked_DML = mer, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: view, checked_DML = del, iter = 0, restarts number to be tested: 10
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: Records affected: 0
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 1
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 2
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 2
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 3
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 3
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 4
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 4
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 5
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 5
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 6
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 6
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 7
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 7
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 8
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 8
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 9
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 9
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 10
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 10
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 2 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: 1 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -3 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -4 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -5 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -6 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -7 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -8 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -9 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -10 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -11 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: -12 DEL 11
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 0, worker STDLOG: Records affected: 31
target_object_type: view, checked_DML = del, iter = 1, restarts number to be tested: 12
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: Records affected: 2
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: ID
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: =======
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -14
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -13
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -12
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -11
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -10
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -9
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -8
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -7
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -6
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -5
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -4
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: -3
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: Records affected: 14
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: OLD_ID OP SNAP_NO_RANK
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: ======= ====== =====================
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 1
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 2
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 2
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 3
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 3
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 4
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 4
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 5
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 5
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 6
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 6
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 7
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 7
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 8
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 8
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 9
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 9
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 10
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 10
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 2 DEL 11
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: 1 DEL 11
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = del, iter = 1, worker STDLOG: Records affected: 21
target_object_type: view, checked_DML = del, iter = 1, worker STDERR: Statement failed, SQLSTATE = 40001
target_object_type: view, checked_DML = del, iter = 1, worker STDERR: deadlock
target_object_type: view, checked_DML = del, iter = 1, worker STDERR: -update conflicts with concurrent update
target_object_type: view, checked_DML = del, iter = 1, worker STDERR: -concurrent transaction number is 2336
target_object_type: view, checked_DML = del, iter = 1, worker STDERR: After line 18 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: view, checked_DML = lok, iter = 0, restarts number to be tested: 10
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: ID
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: =======
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 1
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 2
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 3
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 4
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 5
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 6
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 7
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 8
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 9
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 10
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 11
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: 12
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG:
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: Records affected: 12
target_object_type: view, checked_DML = lok, iter = 0, worker STDLOG: Records affected: 0
target_object_type: view, checked_DML = lok, iter = 0, worker STDERR: Statement failed, SQLSTATE = 42000
target_object_type: view, checked_DML = lok, iter = 0, worker STDERR: Dynamic SQL Error
target_object_type: view, checked_DML = lok, iter = 0, worker STDERR: -SQL error code = -104
target_object_type: view, checked_DML = lok, iter = 0, worker STDERR: -WITH LOCK can be used only with a single physical table
target_object_type: view, checked_DML = lok, iter = 0, worker STDERR: After line 19 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
target_object_type: view, checked_DML = lok, iter = 1, restarts number to be tested: 12
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: ID
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: =======
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 1
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 2
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 3
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 4
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 5
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 6
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 7
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 8
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 9
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 10
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 11
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 12
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 13
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: 14
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG:
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: Records affected: 14
target_object_type: view, checked_DML = lok, iter = 1, worker STDLOG: Records affected: 0
target_object_type: view, checked_DML = lok, iter = 1, worker STDERR: Statement failed, SQLSTATE = 42000
target_object_type: view, checked_DML = lok, iter = 1, worker STDERR: Dynamic SQL Error
target_object_type: view, checked_DML = lok, iter = 1, worker STDERR: -SQL error code = -104
target_object_type: view, checked_DML = lok, iter = 1, worker STDERR: -WITH LOCK can be used only with a single physical table
target_object_type: view, checked_DML = lok, iter = 1, worker STDERR: After line 19 in file C:\\FBTESTING\\qabt-repo mp mp_sttm_restart_max_limit.sql
"""
@pytest.mark.version('>=4.0')
@pytest.mark.xfail
def test_1(db_1):
pytest.fail("Test not IMPLEMENTED")
| 80.943556 | 266 | 0.591425 |
4a426ab84ee0d0305cb15f73517664d18921a819 | 2,944 | py | Python | src/scripts/sfh_vs_fb.py | rodluger/hush | f77f4783864e9f0dd86eedf01ae300d041747ba4 | [
"MIT"
] | null | null | null | src/scripts/sfh_vs_fb.py | rodluger/hush | f77f4783864e9f0dd86eedf01ae300d041747ba4 | [
"MIT"
] | null | null | null | src/scripts/sfh_vs_fb.py | rodluger/hush | f77f4783864e9f0dd86eedf01ae300d041747ba4 | [
"MIT"
] | null | null | null | """
Generate figure 1 of the paper based on galaxy m12i and the Moe+19 metallicity-dependent binary fraction
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams
import paths
def get_FeH_from_Z(Z, Z_sun=0.02):
"""
Converts from Z to FeH under the assumption that
all stars have the same abundance as the sun
INPUTS
----------------------
Z [array]: array of metallicities to convert
Z_sun [float]: solar metallicity
RETURNS
----------------------
Z [array]: array of FeH values
"""
FeH = np.log10(Z) - np.log10(Z_sun)
return FeH
def get_binfrac_of_Z(Z):
"""
Calculates the theoretical binary fraction as a
function of metallicity.
INPUTS
----------------------
Z [array]: metallicity Z values
RETURNS
----------------------
binfrac [array]: binary fraction values
"""
FeH = get_FeH_from_Z(Z)
FeH_low = FeH[np.where(FeH <= -1.0)]
FeH_high = FeH[np.where(FeH > -1.0)]
binfrac_low = -0.0648 * FeH_low + 0.3356
binfrac_high = -0.1977 * FeH_high + 0.2025
binfrac = np.append(binfrac_low, binfrac_high)
return binfrac
rcParams["font.family"] = "serif"
rcParams["font.size"] = 14
rcParams["mathtext.default"] = "regular"
met_arr = np.logspace(np.log10(1e-4), np.log10(0.03), 15)
met_arr = np.round(met_arr, 8)
met_arr = np.append(0.0, met_arr)
Z_sun = 0.02
FIRE = pd.read_hdf(paths.data / "FIRE.h5")
fig, ax = plt.subplots()
plt.grid(lw=0.25, which="both")
bins = np.append(met_arr[1:-1] / Z_sun, FIRE.met.max())
bins = np.append(FIRE.met.min(), bins)
bins = np.log10(bins)
ax2 = ax.twinx()
h, bins, _ = ax2.hist(
np.log10(FIRE.met),
bins=bins,
histtype="step",
lw=2,
color="xkcd:tomato red",
label="Latte m12i",
)
ax2.set_yscale("log")
ax2.legend(
loc="lower left",
bbox_to_anchor=(0.7, 1.01),
ncol=4,
borderaxespad=0,
frameon=False,
)
ax.scatter(
np.log10(met_arr[1:] / Z_sun),
get_binfrac_of_Z(met_arr[1:]),
color="k",
s=15,
zorder=2,
label="COSMIC Z grid",
)
met_plot = np.linspace(FIRE.met.min() * Z_sun, FIRE.met.max() * Z_sun, 10000)
ax.plot(np.log10(met_plot / Z_sun), get_binfrac_of_Z(met_plot), color="k", label="FZ")
ax.set_xlim(bins[1] - 0.17693008, bins[-2] + 2 * 0.17693008)
ax.legend(
loc="lower left",
bbox_to_anchor=(-0.07, 1.01),
ncol=5,
borderaxespad=0,
frameon=False,
)
ax.set_zorder(ax2.get_zorder() + 1)
ax.patch.set_visible(False)
ax.set_xlabel("Log$_{10}$(Z/Z$_\odot$)", size=18)
ax.set_ylabel("Binary Fraction", size=18)
ax2.set_ylabel(r"M$_{\rm{stars}}$ per Z bin (M$_\odot$)", size=18)
ax2.set_yticks([1e4, 1e5, 1e6, 1e7])
ax2.set_yticklabels(["7e7", "7e8", "7e9", "7e10"])
ax2.tick_params(labelsize=15)
ax.tick_params(labelsize=15)
ax2.set_ylim(1e4 - 0.1e4, 1e7 + 0.1e7)
plt.savefig(paths.figures / "sfh_vs_fb.pdf", bbox_inches="tight", dpi=100)
| 25.6 | 104 | 0.636549 |
bb8db686bcdac461c7ef2053153583571c9d2b04 | 19,108 | py | Python | src/document/icr_processor.py | gregbugaj/marie-ai | f51a74f19ab5d7231c9f8a426284feff1671b974 | [
"MIT"
] | 4 | 2021-09-23T22:38:48.000Z | 2022-01-19T12:03:02.000Z | src/document/icr_processor.py | gregbugaj/marie-icr | f51a74f19ab5d7231c9f8a426284feff1671b974 | [
"MIT"
] | 17 | 2021-12-22T16:37:21.000Z | 2022-03-16T16:07:34.000Z | src/document/icr_processor.py | gregbugaj/marie-ai | f51a74f19ab5d7231c9f8a426284feff1671b974 | [
"MIT"
] | null | null | null | # Add parent to the search path, so we can reference the modules(craft, pix2pix) here without throwing and exception
import os
import sys
from models.icr.dataset import AlignCollate, RawDataset
from models.icr.memory_dataset import MemoryDataset
from models.icr.model import Model
from models.icr.utils import CTCLabelConverter, AttnLabelConverter
from utils.image_utils import imwrite
from utils.utils import ensure_exists
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import base64
import json
import typing
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import torch.utils.data
import cv2
from draw_truetype import drawTrueTypeTextOnImage
from numpyencoder import NumpyEncoder
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# device = torch.device('cpu')
class Object(object):
pass
def encodeimg2b64(img: np.ndarray) -> str:
"""encode image to base64"""
retval, buffer = cv2.imencode('.png', img)
png_as_text = base64.b64encode(buffer).decode()
return png_as_text
def icr_debug(opt):
"""
ICR debug utility
"""
if 'CTC' in opt.Prediction:
converter = CTCLabelConverter(opt.character)
else:
converter = AttnLabelConverter(opt.character)
opt.num_class = len(converter.character)
print('Evaluating on device : %s' % (device))
if opt.rgb:
opt.input_channel = 3
model = Model(opt)
print('model input parameters', opt.imgH, opt.imgW, opt.num_fiducial, opt.input_channel, opt.output_channel,
opt.hidden_size, opt.num_class, opt.batch_max_length, opt.Transformation, opt.FeatureExtraction,
opt.SequenceModeling, opt.Prediction)
model = torch.nn.DataParallel(model).to(device)
# load model
print('loading pretrained model from %s' % opt.saved_model)
model.load_state_dict(torch.load(opt.saved_model, map_location=device))
# setup data
AlignCollate_data = AlignCollate(imgH=opt.imgH, imgW=opt.imgW, keep_ratio_with_pad=opt.PAD)
eval_data = RawDataset(root=opt.image_folder, opt=opt) # use RawDataset
eval_loader = torch.utils.data.DataLoader(
eval_data, batch_size=opt.batch_size,
shuffle=False,
num_workers=int(opt.workers),
collate_fn=AlignCollate_data, pin_memory=True)
# predict
model.eval()
with torch.no_grad():
for image_tensors, image_path_list in eval_loader:
batch_size = image_tensors.size(0)
image = image_tensors.to(device)
# For max length prediction
length_for_pred = torch.IntTensor([opt.batch_max_length] * batch_size).to(device)
text_for_pred = torch.LongTensor(batch_size, opt.batch_max_length + 1).fill_(0).to(device)
if 'CTC' in opt.Prediction:
preds = model(image, text_for_pred)
# Select max probabilty (greedy decoding) then decode index to character
preds_size = torch.IntTensor([preds.size(1)] * batch_size)
_, preds_index = preds.max(2)
# preds_index = preds_index.view(-1)
preds_str = converter.decode(preds_index, preds_size)
else:
# preds = model(image, text_for_pred, is_train=False)
preds = model(image, text_for_pred)
# select max probabilty (greedy decoding) then decode index to character
_, preds_index = preds.max(2)
preds_str = converter.decode(preds_index, length_for_pred)
log = open(f'./log_eval_result.txt', 'a')
dashed_line = '-' * 120
head = f'{"image_path":25s}\t{"predicted_labels":32s}\tconfidence score'
print(f'{dashed_line}\n{head}\n{dashed_line}')
log.write(f'{dashed_line}\n{head}\n{dashed_line}\n')
preds_prob = F.softmax(preds, dim=2)
preds_max_prob, _ = preds_prob.max(dim=2)
for img_name, pred, pred_max_prob in zip(image_path_list, preds_str, preds_max_prob):
if 'Attn' in opt.Prediction:
pred_EOS = pred.find('[s]')
pred = pred[:pred_EOS] # prune after "end of sentence" token ([s])
pred_max_prob = pred_max_prob[:pred_EOS]
# calculate confidence score (= multiply of pred_max_prob)
confidence_score = pred_max_prob.cumprod(dim=0)[-1]
print(f'{img_name:25s}\t{pred:32s}\t{confidence_score:0.4f}')
log.write(f'{img_name:25s}\t{pred:32s}\t{confidence_score:0.4f}\n')
log.close()
def compute_input(image):
# should be RGB order
image = image.astype('float32')
mean = np.array([0.485, 0.456, 0.406])
variance = np.array([0.229, 0.224, 0.225])
image -= mean * 255
image /= variance * 255
return image
class IcrProcessor:
def __init__(self, work_dir: str = '/tmp/icr', cuda: bool = False) -> None:
print("ICR processor [cuda={}]".format(cuda))
self.cuda = cuda
self.work_dir = work_dir
if True:
opt = Object()
opt.Transformation = 'TPS'
opt.FeatureExtraction = 'ResNet'
opt.SequenceModeling = 'BiLSTM'
opt.Prediction = 'Attn'
opt.saved_model = './models/icr/TPS-ResNet-BiLSTM-Attn-case-sensitive-ft/best_accuracy.pth'
opt.sensitive = True
opt.imgH = 32
opt.imgW = 100
opt.character = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!"#$%&\'()*+,-./:;<=>?@[\]^_`{|}~'
opt.rgb = False
opt.num_fiducial = 20
opt.input_channel = 1
opt.output_channel = 512
opt.hidden_size = 256
opt.batch_max_length = 48
opt.batch_size = 2 # FIXME: setting batch size to 1 will cause "TypeError: forward() missing 2 required positional arguments: 'input' and 'text'"
opt.PAD = True
opt.workers = 4
opt.num_gpu = -1
opt.image_folder = './'
if False:
opt = Object()
opt.Transformation = 'TPS'
opt.FeatureExtraction = 'ResNet'
opt.SequenceModeling = 'BiLSTM'
opt.Prediction = 'Attn'
opt.saved_model = './models/icr/TPS-ResNet-BiLSTM-Attn/best_accuracy.pth'
# opt.saved_model = './models/icr/TPS-ResNet-BiLSTM-Attn-case-sensitive/TPS-ResNet-BiLSTM-Attn-case-sensitive.pth'
opt.saved_model = './models/icr/TPS-ResNet-BiLSTM-Attn/TPS-ResNet-BiLSTM-Attn.pth'
opt.sensitive = False
opt.imgH = 32
opt.imgW = 100
opt.character = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# opt.character = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!"#$%&\'()*+,-./:;<=>?@[\]^_`{|}~'
opt.num_fiducial = 20
opt.input_channel = 1
opt.output_channel = 512
opt.hidden_size = 256
opt.batch_max_length = 25
opt.batch_size = 2 # Fixme: setting batch size to 1 will cause "TypeError: forward() missing 2 required positional arguments: 'input' and 'text'"
opt.PAD = True
opt.rgb = False
opt.workers = 4
opt.num_gpu = -1
opt.image_folder = './'
self.opt = opt
self.converter, self.model = self.__load()
cudnn.benchmark = True
cudnn.deterministic = True
def __load(self):
""" model configuration """
opt = self.opt
if 'CTC' in opt.Prediction:
converter = CTCLabelConverter(opt.character)
else:
converter = AttnLabelConverter(opt.character)
opt.num_class = len(converter.character)
print('Evaluating on device : %s' % (device))
if opt.rgb:
opt.input_channel = 3
model = Model(opt)
print('model input parameters', opt.imgH, opt.imgW, opt.num_fiducial, opt.input_channel, opt.output_channel,
opt.hidden_size, opt.num_class, opt.batch_max_length, opt.Transformation, opt.FeatureExtraction,
opt.SequenceModeling, opt.Prediction)
# Somehow the model in being still loaded on GPU
# https://pytorch.org/tutorials/recipes/recipes/save_load_across_devices.html
# GPU only
model = torch.nn.DataParallel(model, device_ids=None).to(device)
# load model
print('loading pretrained model from %s' % opt.saved_model)
model.load_state_dict(torch.load(opt.saved_model, map_location=device))
if False:
class WrappedModel(torch.nn.Module):
def __init__(self, module):
super(WrappedModel, self).__init__()
self.module = module # that I actually define.
def forward(self, x):
return self.module(x)
# CPU
model = WrappedModel(model)
model = model.to(device)
state_dict = torch.load(opt.saved_model, map_location=device)
model.load_state_dict(state_dict)
return converter, model
def extract_text(self, _id, key, image):
"""Recognize text from a single image.
Process image via ICR, this is lowlever API, to get more usable results call extract_icr.
Args:
_id: Unique Image ID
key: Unique image key
image: A pre-cropped image containing characters
"""
print('ICR processing : {}, {}'.format(_id, key))
results = self.recognize_from_boxes([image], [0, 0, image.shape[1], image.shape[0]])
if len(results) == 1:
r = results[0]
return r['text'], r['confidence']
return None, 0
def recognize_from_boxes(self, image, boxes, **kwargs) -> typing.List[typing.Dict[str, any]]:
"""Recognize text from image using lists of bounding boxes.
Args:
image: input images, supplied as numpy arrays with shape
(H, W, 3).
boxes: A list of boxes to extract
"""
raise Exception("Not yet implemented")
def recognize_from_fragments(self, images, **kwargs) -> typing.List[typing.Dict[str, any]]:
"""Recognize text from image fragments
Args:
images: A list of input images, supplied as numpy arrays with shape
(H, W, 3).
"""
print('ICR processing : recognize_from_boxes via boxes')
try:
# debug_dir = ensure_exists(os.path.join(self.work_dir,id,'icr',key,'debug'))
# output_dir = ensure_exists(os.path.join(self.work_dir,id,'icr',key,'output'))
opt = self.opt
model = self.model
converter = self.converter
opt.batch_size = 192 #
# setup data
AlignCollate_data = AlignCollate(imgH=opt.imgH, imgW=opt.imgW, keep_ratio_with_pad=opt.PAD)
eval_data = MemoryDataset(images=images, opt=opt)
eval_loader = torch.utils.data.DataLoader(
eval_data, batch_size=opt.batch_size,
shuffle=False,
num_workers=int(opt.workers),
collate_fn=AlignCollate_data, pin_memory=True)
results = []
# predict
model.eval()
with torch.no_grad():
for image_tensors, image_labels in eval_loader:
print(f'OCR : {image_labels}')
batch_size = image_tensors.size(0)
image = image_tensors.to(device)
# For max length prediction
length_for_pred = torch.IntTensor([opt.batch_max_length] * batch_size).to(device)
text_for_pred = torch.LongTensor(batch_size, opt.batch_max_length + 1).fill_(0).to(device)
if 'CTC' in opt.Prediction:
preds = model(image, text_for_pred)
# Select max probabilty (greedy decoding) then decode index to character
preds_size = torch.IntTensor([preds.size(1)] * batch_size)
_, preds_index = preds.max(2)
# preds_index = preds_index.view(-1)
preds_str = converter.decode(preds_index, preds_size)
else:
preds = model(image, text_for_pred, is_train=False)
# select max probabilty (greedy decoding) then decode index to character
_, preds_index = preds.max(2)
preds_str = converter.decode(preds_index, length_for_pred)
log = open(f'./log_eval_result.txt', 'a')
dashed_line = '-' * 120
head = f'{"key":25s}\t{"predicted_labels":32s}\tconfidence score'
print(f'{dashed_line}\n{head}\n{dashed_line}')
log.write(f'{dashed_line}\n{head}\n{dashed_line}\n')
preds_prob = F.softmax(preds, dim=2)
preds_max_prob, _ = preds_prob.max(dim=2)
for img_name, pred, pred_max_prob in zip(image_labels, preds_str, preds_max_prob):
if 'Attn' in opt.Prediction:
pred_EOS = pred.find('[s]')
pred = pred[:pred_EOS] # prune after "end of sentence" token ([s])
pred_max_prob = pred_max_prob[:pred_EOS]
# calculate confidence score (= multiply of pred_max_prob)
confidence_score = pred_max_prob.cumprod(dim=0)[-1]
# get value from the TensorFloat
confidence = confidence_score.item()
txt = pred
results.append({
"confidence": confidence,
"text": txt,
"id": img_name
})
print(f'{img_name:25s}\t{pred:32s}\t{confidence_score:0.4f}')
log.write(f'{img_name:25s}\t{pred:32s}\t{confidence_score:0.4f}\n')
log.close()
except Exception as ex:
print(ex)
raise ex
return results
def recognize(self, _id, key, img, boxes, image_fragments, lines):
"""Recognize text from multiple images.
Args:
id: Unique Image ID
key: Unique image key/region for the extraction
img: A pre-cropped image containing characters
"""
print(f'ICR recognize : {_id}, {key}')
assert len(boxes) == len(image_fragments), "You must provide the same number of box groups as images."
try:
shape = img.shape
overlay_image = np.ones((shape[0], shape[1], 3), dtype=np.uint8) * 255
debug_dir = ensure_exists(os.path.join('/tmp/icr', _id))
debug_all_dir = ensure_exists(os.path.join('/tmp/icr', 'fields', key))
meta = {
'imageSize': {'width': img.shape[1], 'height': img.shape[0]},
'lang': 'en'
}
words = []
max_line_number = 0
results = self.recognize_from_fragments(image_fragments)
for i in range(len(boxes)):
box, fragment, line = boxes[i], image_fragments[i], lines[i]
# txt, confidence = self.extract_text(id, str(i), fragment)
extraction = results[i]
txt = extraction['text']
confidence = extraction['confidence']
print('Processing [box, line, txt, conf] : {}, {}, {}, {}'.format(box, line, txt, confidence))
conf_label = f'{confidence:0.4f}'
txt_label = txt
payload = dict()
payload['id'] = i
payload['text'] = txt
payload['confidence'] = round(confidence, 4)
payload['box'] = box
payload['line'] = line
payload['fragment_b64'] = encodeimg2b64(fragment)
words.append(payload)
if line > max_line_number:
max_line_number = line
overlay_image = drawTrueTypeTextOnImage(overlay_image, txt_label, (box[0], box[1] + box[3] // 2), 18,
(139, 0, 0))
overlay_image = drawTrueTypeTextOnImage(overlay_image, conf_label, (box[0], box[1] + box[3]), 10,
(0, 0, 255))
savepath = os.path.join(debug_dir, f'{key}-icr-result.png')
imwrite(savepath, overlay_image)
savepath = os.path.join(debug_all_dir, f'{_id}.png')
imwrite(savepath, overlay_image)
line_ids = np.empty((max_line_number), dtype=object)
words = np.array(words)
for i in range(0, max_line_number):
current_lid = i + 1
word_ids = []
box_picks = []
word_picks = []
for word in words:
lid = word['line']
if lid == current_lid:
word_ids.append(word['id'])
box_picks.append(word['box'])
word_picks.append(word)
box_picks = np.array(box_picks)
word_picks = np.array(word_picks)
x1 = box_picks[:, 0]
idxs = np.argsort(x1)
aligned_words = word_picks[idxs]
_w = []
_conf = []
for wd in aligned_words:
_w.append(wd['text'])
_conf.append(wd['confidence'])
text = ' '.join(_w)
min_x = box_picks[:, 0].min()
min_y = box_picks[:, 1].min()
max_w = box_picks[:, 2].max()
max_h = box_picks[:, 3].max()
bbox = [min_x, min_y, max_w, max_h]
line_ids[i] = {
'line': i + 1,
'wordids': word_ids,
'text': text,
'bbox': bbox,
'confidence': round(np.average(_conf), 4)
}
result = {
'meta': meta,
'words': words,
'lines': line_ids,
}
with open('/tmp/icr/data.json', 'w') as f:
json.dump(result, f, sort_keys=True, separators=(',', ': '), ensure_ascii=False, indent=4,
cls=NumpyEncoder)
print('------ Extraction ------------')
for line in line_ids:
txt = line['text']
print(f' >> {txt}')
except Exception as ex:
raise ex
return result, overlay_image
| 39.479339 | 158 | 0.554689 |
ef70bf0c0028a9c50ca05e16fcdc750d4b6c0d58 | 2,200 | py | Python | typhos/tests/test_plot.py | ZLLentz/typhos | 1fb321c1f1c222ac02552ecb9c86d37d5ddf3274 | [
"BSD-3-Clause-LBNL"
] | 9 | 2017-11-05T17:22:23.000Z | 2019-10-18T14:21:24.000Z | typhos/tests/test_plot.py | ZLLentz/typhos | 1fb321c1f1c222ac02552ecb9c86d37d5ddf3274 | [
"BSD-3-Clause-LBNL"
] | 244 | 2020-01-09T22:13:29.000Z | 2022-02-24T21:40:16.000Z | typhos/tests/test_plot.py | ZLLentz/typhos | 1fb321c1f1c222ac02552ecb9c86d37d5ddf3274 | [
"BSD-3-Clause-LBNL"
] | 11 | 2020-05-20T19:08:37.000Z | 2022-02-08T19:20:35.000Z | """
Tests for the plot tool.
"""
import pytest
from ophyd import EpicsSignal, Signal
from typhos import register_signal
from typhos.tools.plot import TyphosTimePlot
from typhos.utils import channel_from_signal
@pytest.fixture(scope='session')
def sim_signal():
sim_sig = Signal(name='tst_this_2')
sim_sig.put(3.14)
register_signal(sim_sig)
return sim_sig
def test_add_signal(qtbot, sim_signal):
# Create Signals
epics_sig = EpicsSignal('Tst:This')
# Create empty plot
ttp = TyphosTimePlot()
qtbot.addWidget(ttp)
# Add to list of available signals
ttp.add_available_signal(epics_sig, 'Epics Signal')
assert ttp.signal_combo.itemText(0) == 'Epics Signal'
assert ttp.signal_combo.itemData(0) == 'ca://Tst:This'
ttp.add_available_signal(sim_signal, 'Simulated Signal')
assert ttp.signal_combo.itemText(1) == 'Simulated Signal'
assert ttp.signal_combo.itemData(1) == 'sig://tst_this_2'
def test_curve_methods(qtbot, sim_signal):
ttp = TyphosTimePlot()
qtbot.addWidget(ttp)
ttp.add_curve('sig://' + sim_signal.name, name=sim_signal.name)
# Check that our signal is stored in the mapping
assert 'sig://' + sim_signal.name in ttp.channel_to_curve
# Check that our curve is live
assert len(ttp.timechart.chart.curves) == 1
# Try and add again
ttp.add_curve('sig://' + sim_signal.name, name=sim_signal.name)
# Check we didn't duplicate
assert len(ttp.timechart.chart.curves) == 1
ttp.remove_curve(channel_from_signal(sim_signal))
assert len(ttp.timechart.chart.curves) == 0
def test_curve_creation_button(qtbot, sim_signal):
ttp = TyphosTimePlot()
qtbot.addWidget(ttp)
ttp.add_available_signal(sim_signal, 'Sim Signal')
ttp.creation_requested()
# Check that our signal is stored in the mapping
assert channel_from_signal(sim_signal) in ttp.channel_to_curve
assert len(ttp.timechart.chart.curves) == 1
def test_device_plot(motor, qapp, qtbot):
dtp = TyphosTimePlot.from_device(motor)
qtbot.addWidget(dtp)
def all_signals_listed():
assert dtp.signal_combo.count() == len(motor.component_names)
qtbot.wait_until(all_signals_listed)
| 31.884058 | 69 | 0.725 |
a5d301867fc0909eb8bc01dab2f140df8d9fed54 | 483 | py | Python | accounts/admin.py | YaroslavChyhryn/Djangogram | 2de402eedbc9a77d6bd2ba06cc8b7ad966adff2c | [
"MIT"
] | null | null | null | accounts/admin.py | YaroslavChyhryn/Djangogram | 2de402eedbc9a77d6bd2ba06cc8b7ad966adff2c | [
"MIT"
] | null | null | null | accounts/admin.py | YaroslavChyhryn/Djangogram | 2de402eedbc9a77d6bd2ba06cc8b7ad966adff2c | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from .models import UserProfile
class UserProfileInline(admin.StackedInline):
"""
Display user profile fields inline with django-user model
"""
model = UserProfile
can_delete = False
class UserAdmin(BaseUserAdmin):
inlines = (UserProfileInline,)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| 23 | 64 | 0.770186 |
f47d09e7703ecb2829bf34fa754841476cb20785 | 1,463 | py | Python | scons/preprocessor_filter.py | uw-midsun/fwxv | e1ad75607304d6e95420417d8996897e38501503 | [
"MIT"
] | 4 | 2022-01-15T17:49:00.000Z | 2022-02-14T05:23:59.000Z | scons/preprocessor_filter.py | uw-midsun/fwxv | e1ad75607304d6e95420417d8996897e38501503 | [
"MIT"
] | 11 | 2022-01-22T23:15:15.000Z | 2022-03-12T21:30:20.000Z | scons/preprocessor_filter.py | uw-midsun/fwxv | e1ad75607304d6e95420417d8996897e38501503 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
def preprocessor_filter(filename, source_file):
"""
A SCons builder that filters preprocessor output to strip out #include'd file content.
We also strip out the linemarkers output by the C preprocessor.
`filename` is the name of the base file whose content we want to preserve.
`source_file` is the file to read from (the SCons `source` parameter will be ignored).
Note that `source_file` will be deleted at the end (it should be a temp file).
"""
def action(target, source, env):
output_lines = []
with open(source_file, 'r') as file:
# The C preprocessor will output 'linemarkers' of the following form:
# # <lineno> "<source filename>" <id>...
# We want to keep only lines after linemarkers from the given filename.
# See https://gcc.gnu.org/onlinedocs/cpp/Preprocessor-Output.html.
in_file = False
for line in file:
if line.startswith('# '):
# We're in a linemarker - naively test if it's for the current file
in_file = '"' + filename + '"' in line
elif in_file:
output_lines.append(line)
with open(target[0].path, 'w') as output_file:
output_file.writelines(output_lines)
# delete the temp file we were given
os.remove(source_file)
return 0
return action
| 38.5 | 90 | 0.611757 |
9a07833d6aaa5314b379d86b7f265b80cca755f1 | 964 | py | Python | pystratum_common/wrapper/Singleton0Wrapper.py | DatabaseStratum/py-stratum-common | 27bbbc7af8eeefed966c1e955b69cd345fefb7af | [
"MIT"
] | null | null | null | pystratum_common/wrapper/Singleton0Wrapper.py | DatabaseStratum/py-stratum-common | 27bbbc7af8eeefed966c1e955b69cd345fefb7af | [
"MIT"
] | null | null | null | pystratum_common/wrapper/Singleton0Wrapper.py | DatabaseStratum/py-stratum-common | 27bbbc7af8eeefed966c1e955b69cd345fefb7af | [
"MIT"
] | null | null | null | from abc import ABC
from pystratum_common.wrapper.Wrapper import Wrapper
class Singleton0Wrapper(Wrapper, ABC):
"""
Wrapper method generator for stored procedures that are selecting 0 or 1 row with one column only.
"""
# ------------------------------------------------------------------------------------------------------------------
def _return_type_hint(self) -> str:
"""
Returns the return type hint of the wrapper method.
:rtype: str
"""
return 'Any'
# ------------------------------------------------------------------------------------------------------------------
def _get_docstring_return_type(self) -> str:
"""
Returns the return type of the wrapper methods the be used in the docstring.
:rtype: str
"""
return '*'
# ----------------------------------------------------------------------------------------------------------------------
| 32.133333 | 120 | 0.387967 |
28e2575f8d09fa88c27c0859235b7f0c1468e571 | 532 | py | Python | src/middleware.py | FloatingComet62/EzServer | 7ae40a28e1f69344faa21cb1887c23b70e4837a2 | [
"MIT"
] | 2 | 2022-02-02T07:32:10.000Z | 2022-02-02T07:39:31.000Z | src/middleware.py | FloatingComet62/EzServer | 7ae40a28e1f69344faa21cb1887c23b70e4837a2 | [
"MIT"
] | null | null | null | src/middleware.py | FloatingComet62/EzServer | 7ae40a28e1f69344faa21cb1887c23b70e4837a2 | [
"MIT"
] | 1 | 2022-02-02T18:01:20.000Z | 2022-02-02T18:01:20.000Z | # def NAME(self, query):
# do stuff
# return htmlFileIndex
def homePage(self, query):
if query:
if query["auth"]:
if query["auth"]=="69":
return 0
else:
return 1
else:
return 1
else:
return 1
def exitPage(self, query):
if query:
if query["auth"]:
if query["auth"]=="420":
return 0
else:
return 1
else:
return 1
else:
return 1 | 19 | 36 | 0.422932 |
66eac7a614ebf5d4ad7514ce38a805afa7e8c6c9 | 4,439 | py | Python | dynamic_db_router/router.py | yetercatikkas/django-dynamic-db-router | 6496f7d27c5a6d5c67eea6c29581f969b35df123 | [
"MIT"
] | 1 | 2020-01-24T08:51:36.000Z | 2020-01-24T08:51:36.000Z | dynamic_db_router/router.py | voltlines/django-dynamic-db-router | 6496f7d27c5a6d5c67eea6c29581f969b35df123 | [
"MIT"
] | null | null | null | dynamic_db_router/router.py | voltlines/django-dynamic-db-router | 6496f7d27c5a6d5c67eea6c29581f969b35df123 | [
"MIT"
] | null | null | null | import threading
from functools import wraps
from uuid import uuid4
from django.db import connections
THREAD_LOCAL = threading.local()
NON_DECLARED_DB = [None]
class DynamicDbRouter(object):
"""A router that decides what db to read from based on a variable
local to the current thread.
"""
def db_for_read(self, model, **hints):
db_for_read = getattr(THREAD_LOCAL, 'DB_FOR_READ_OVERRIDE', NON_DECLARED_DB)
return db_for_read[-1] if db_for_read else NON_DECLARED_DB[-1]
def db_for_write(self, model, **hints):
db_for_write = getattr(THREAD_LOCAL, 'DB_FOR_WRITE_OVERRIDE', NON_DECLARED_DB)
return db_for_write[-1] if db_for_write else NON_DECLARED_DB[-1]
def allow_relation(self, *args, **kwargs):
return True
def allow_syncdb(self, *args, **kwargs):
return None
def allow_migrate(self, *args, **kwargs):
return None
class in_database(object):
"""A decorator and context manager to do queries on a given database.
:type database: str or dict
:param database: The database to run queries on. A string
will route through the matching database in
``django.conf.settings.DATABASES``. A dictionary will set up a
connection with the given configuration and route queries to it.
:type read: bool, optional
:param read: Controls whether database reads will route through
the provided database. If ``False``, reads will route through
the ``'default'`` database. Defaults to ``True``.
:type write: bool, optional
:param write: Controls whether database writes will route to
the provided database. If ``False``, writes will route to
the ``'default'`` database. Defaults to ``False``.
When used as eithe a decorator or a context manager, `in_database`
requires a single argument, which is the name of the database to
route queries to, or a configuration dictionary for a database to
route to.
Usage as a context manager:
.. code-block:: python
from my_django_app.utils import tricky_query
with in_database('Database_A'):
results = tricky_query()
Usage as a decorator:
.. code-block:: python
from my_django_app.models import Account
@in_database('Database_B')
def lowest_id_account():
Account.objects.order_by('-id')[0]
Used with a configuration dictionary:
.. code-block:: python
db_config = {'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'path/to/mydatabase.db'}
with in_database(db_config):
# Run queries
"""
def __init__(self, database, read=True, write=False):
self.read = read
self.write = write
self.created_db_config = False
if isinstance(database, str):
self.database = database
elif isinstance(database, dict):
# Note: this invalidates the docs above. Update them
# eventually.
self.created_db_config = True
self.unique_db_id = str(uuid4())
connections.databases[self.unique_db_id] = database
self.database = self.unique_db_id
else:
msg = ("database must be an identifier for an existing db, "
"or a complete configuration.")
raise ValueError(msg)
def __enter__(self):
if not hasattr(THREAD_LOCAL, 'DB_FOR_READ_OVERRIDE'):
THREAD_LOCAL.DB_FOR_READ_OVERRIDE = []
if not hasattr(THREAD_LOCAL, 'DB_FOR_WRITE_OVERRIDE'):
THREAD_LOCAL.DB_FOR_WRITE_OVERRIDE = []
read_db = self.database if self.read else None
write_db = self.database if self.write else None
THREAD_LOCAL.DB_FOR_READ_OVERRIDE.append(read_db)
THREAD_LOCAL.DB_FOR_WRITE_OVERRIDE.append(write_db)
return self
def __exit__(self, exc_type, exc_value, traceback):
THREAD_LOCAL.DB_FOR_READ_OVERRIDE.pop()
THREAD_LOCAL.DB_FOR_WRITE_OVERRIDE.pop()
if self.created_db_config:
connections[self.unique_db_id].close()
del connections.databases[self.unique_db_id]
def __call__(self, querying_func):
@wraps(querying_func)
def inner(*args, **kwargs):
# Call the function in our context manager
with self:
return querying_func(*args, **kwargs)
return inner
| 34.410853 | 86 | 0.655553 |
7c22163640c3f4b4446079bf506eaa8423a88d9f | 12,002 | py | Python | 4. stacking_test.py | galgodon/auroral_lines | 9c322a93c6d95c672a89a420cc72c52e71663e09 | [
"MIT"
] | null | null | null | 4. stacking_test.py | galgodon/auroral_lines | 9c322a93c6d95c672a89a420cc72c52e71663e09 | [
"MIT"
] | null | null | null | 4. stacking_test.py | galgodon/auroral_lines | 9c322a93c6d95c672a89a420cc72c52e71663e09 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 8 12:46:05 2019
@author: Gerome Algodon
"""
#%%
# =============================================================================
# Import data
# =============================================================================
from astropy.io import fits
import numpy as np
import matplotlib.pyplot as plt
import astropy.constants # this is just used to get the speed of light c
c_vel = astropy.constants.c.to('km/s').value
spaxel_data_table = fits.open('data/spaxel_data_table.fits')
bin1,bin1_control = fits.open('data/Bin_1.fits'),fits.open('data/Bin_1_control.fits')
bin2,bin2_control = fits.open('data/Bin_2.fits'),fits.open('data/Bin_2_control.fits')
bin3,bin3_control = fits.open('data/Bin_3.fits'),fits.open('data/Bin_3_control.fits')
flux_bin1, flux_bin1_control = fits.open('data/flux_bin_1.fits'), fits.open('data/flux_bin_1_control.fits')
flux_bin2, flux_bin2_control = fits.open('data/flux_bin_2.fits'), fits.open('data/flux_bin_2_control.fits')
flux_bin3, flux_bin3_control = fits.open('data/flux_bin_3.fits'), fits.open('data/flux_bin_3_control.fits')
def get_data(name,mask_name):
return np.ma.MaskedArray(spaxel_data_table[1].data[name],mask=spaxel_data_table[1].data[mask_name]>0)
#spaxel_data_table[1].data.columns # Use this to see column names
#%%
# =============================================================================
# Francesco's Drizzle Code
# =============================================================================
def _specbin( wl):
""" given wavelength vector wl, return wl_lo and wl_hi limits of wavelength bins"""
nwl = len(wl)
dwl_lo = wl-np.roll(wl, 1)
dwl_lo[0] = dwl_lo[1]
dwl_hi = np.roll(wl, -1)-wl
dwl_hi[nwl-1] = dwl_hi[nwl-2]
# find limits of spectral bins
wl_lo = wl-dwl_lo/2.0
wl_hi = wl+dwl_hi/2.0
return wl_lo, wl_hi
def specdrizzle_fast(wl, spec, z, wldrz, mask=None, flux=True):
#"""
# Drizzles the spectrum into a new rest frame wavelength vector
# Fully support masks
# THIS IS THE FASTER VERSION INSPIRED BY Carnall 2017
#
# wl - input wavelength
# spec - input spectrum
# z - redshift
# wldrz - output rest frame wavelength
# spdrz - output drizzled spectrum
# spwgt - output weight
# mask - mask of good pixels (value=1)
# flux - 1 total flux of the spectrum is conserved in the drizzling procedure
#"""
# length of input and output wavelength vectors
# nwl = len(wl)
nwldrz = len(wldrz)
if nwldrz < 2:
raise ValueError('output wavelength grid must have at least 2 elements')
# initialize output spectrum
spdrz = np.zeros(len(wldrz))
spwgt = np.zeros(len(wldrz))
# all pixels good if mask not defined
if mask is None:
mask = spec*0.
print('not masking')
if flux==True:
# conserve flux after redshift correction
specz = spec*(1.0+z)
# print 'conserving flux'
else:
specz = spec
mask2=np.where(mask==0, 1, 0)
wlz1 = np.zeros(wl.shape[0])
dwlz = np.zeros(wl.shape[0])
wl=wl/(1.0+z)
wlz1, wl_hi= _specbin(wl)
dwlz = wl_hi - wlz1
wldrz1, wldrz_hi= _specbin(wldrz)
dwldrz= wldrz_hi - wldrz1
# Calculate the new spectral flux and uncertainty values, loop over the new bins
for j in range(wldrz.shape[0]):
# Find the first old bin which is partially covered by the new bin
# while wlz1[start+1] <= wldrz1[j]:
# start += 1
start_v = np.where(wl_hi > wldrz1[j])[0]
if len(start_v) == 0:
spdrz[j]=0.
spwgt[j] =0
continue
else:
start=start_v[0]
# Find the last old bin which is partially covered by the new bin
# while (stop <= len(wlz1)) and (wlz1[stop+1] < wldrz1[j+1]):
# stop += 1
stop = np.where(wlz1 <= wldrz_hi[j])[0][-1]
# print start, stop
#
# If the new bin falls entirely within one old bin they are the same
# the new flux and new error are the same as for that bin
if stop == start:
spdrz[j]= specz[start]*mask2[start]
spwgt[j] = mask2[start]
# Otherwise multiply the first and last old bin widths by P_ij,
# all the ones in between have P_ij = 1
else:
start_factor = (wl_hi[start] - wldrz1[j])/(dwlz[start])
end_factor = (wldrz_hi[j] - wlz1[stop])/(dwlz[stop])
dwlz[start] *= start_factor
dwlz[stop] *= end_factor
# Populate the resampled_fluxes spectrum and uncertainty arrays
spwgt[j] = np.sum(dwlz[start:stop+1]*mask2[start:stop+1])/dwldrz[j]
if spwgt[j]>0:
spdrz[j] = np.sum(dwlz[start:stop+1]*specz[start:stop+1]*mask2[start:stop+1])/ \
np.sum(dwlz[start:stop+1]*mask2[start:stop+1])
# if spec_errs is not None:
# resampled_fluxes_errs[...,j] = np.sqrt(np.sum((spec_widths[start:stop+1]*spec_errs[...,start:stop+1])**2, axis=-1))/np.sum(spec_widths[start:stop+1])
#
# Put back the old bin widths to their initial values for later use
dwlz[start] /= start_factor
dwlz[stop] /= end_factor
return spdrz, spwgt
#%%
# =============================================================================
# Stack
# =============================================================================
def drizzle(bin_num,flux_num,subbin_num): # subbin_num: [x,y] x = bin number, y = subbin number
### Get wavelength array
wave = flux_num[0].data
wave_n = wave[wave < 8950] # new wavelength array for the shifted data
### Pull data we need from the fits files
z = (spaxel_data_table[1].data['z_vel'][bin_num['{}_{}'.format(subbin_num[0],subbin_num[1])].data.astype(int)])/c_vel
stell_vel = get_data('stell_vel','stell_vel_mask')[bin_num['{}_{}'.format(subbin_num[0],subbin_num[1])].data.astype(int)]
vel_off = bin_num['AVG_OFFSET_SUBBIN'].data[subbin_num[1]-1]
# ha_vel = get_data('ha_vel','ha_vel_mask')[bin_num['{}_{}'.format(subbin_num[0],subbin_num[1])].data.astype(int)]
flux = np.ma.MaskedArray(flux_num['FLUX_SUBBIN_{}'.format(subbin_num[1])].data,
mask=flux_num['MASK_SUBBIN_{}'.format(subbin_num[1])].data>0)
var = np.ma.power(np.ma.MaskedArray(flux_num['IVAR_SUBBIN_{}'.format(subbin_num[1])].data,
mask=flux_num['MASK_SUBBIN_{}'.format(subbin_num[1])].data>0),-1)
flux_n = np.zeros(( len(flux) , len(wave_n) )) # len(flux) is the nummber of spaxels in the bin
f_weight = np.zeros(( len(flux) , len(wave_n) )) # and len(wave_n) is the length of the new wave array
var_n = np.zeros(( len(flux) , len(wave_n) ))
v_weight = np.zeros(( len(flux) , len(wave_n) ))
for p in range(len(flux)):
zp = (1+(stell_vel[p] - vel_off)/c_vel)*(1+z[p])-1 # redshift at pixel 'p' note: ha_not_masked[p] = Velocity at pixel 'p'
flux_n[p,:], f_weight[p,:] = specdrizzle_fast(wave, flux.data[p,:], zp, wave_n,mask=flux.mask[p,:].astype(int))
var_n[p,:], v_weight[p,:] = specdrizzle_fast(wave, var.data[p,:], zp, wave_n,mask=var.mask[p,:].astype(int))
flux_driz = np.ma.MaskedArray(flux_n, mask=f_weight==0)
var_driz = np.ma.MaskedArray(var_n, mask=v_weight==0)
return wave_n, flux_driz, var_driz
def stack(bin_num,flux_num,subbin_num):
wave, flux_driz, var_driz = drizzle(bin_num,flux_num,subbin_num)
flat = (wave>6000)&(wave<6100)
med = np.median(flux_driz[:,flat].data,axis=1)
flux_norm = flux_driz/med[:,None]
var_norm = var_driz/(med[:,None]**2)
totalflux_norm = np.ma.average(flux_norm,axis=0)
totalvar_norm = np.ma.sum(var_norm,axis=0)/(np.sum(np.invert(var_norm.mask)))**2
return wave, totalflux_norm, totalvar_norm
#%%
#"""
wave, stack_flux_1_6, var = stack(bin1,flux_bin1,[1,6])
wave, stack_flux_c_1_6, var = stack(bin1_control,flux_bin1_control,[1,6])
#%% Strong - control
plt.figure()
plt.plot(wave,stack_flux_1_6,label='Bin 1')
plt.plot(wave,stack_flux_c_1_6,label='Control Bin 1')
plt.xlim((wave[0],wave[-1]))
plt.title('Normalized Stacked spectra: Bin 1 / Sub-bin 6')
plt.legend()
plt.show()
plt.figure()
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.xlim((wave[0],wave[-1]))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.show()
#%% Looking for auroral lines
plt.figure()
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.axvline(4363,label='OIII',ls='dashed',c='k')
plt.xlim((4100,4600)) # lim from renbin's paper fig 10
plt.ylim((-0.10,0.05))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.legend()
plt.show()
plt.figure()
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.axvline(5755,label='NII',ls='dashed',c='k')
plt.xlim((5500,6000)) # lim from renbin's paper fig 10
plt.ylim((-0.1,0.1))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.legend()
plt.show()
plt.figure()
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.axvline(7320,label='OII',ls='dashed',c='k')
plt.axvline(7330,ls='dashed',c='k')
plt.xlim((7100,7600)) # lim from renbin's paper fig 10
plt.ylim((-0.05,0.3))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.legend()
plt.show()
plt.figure()
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.axvline(4068,label='SII',ls='dashed',c='k')
plt.axvline(4076,ls='dashed',c='k')
plt.xlim((3900,4300)) # lim from renbin's paper fig 10
plt.ylim((-0.10,0.05))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.legend()
plt.show()
#%% Checking specific wavelengths for strong line
# 3969 3934 stellar Ca H K lines after change these shouldn't be centered but gas should
# Ha = 6564, OIII = 5008
plt.figure()
check = 3969 # check this wavelength
p_m = 200 # window will be check+/-p_m
plt.plot(wave,stack_flux_norm_1_6)
plt.axvline(check,label=r'{} $\AA$'.format(check),ls='dashed',c='k')
plt.xlim((check-p_m,check+p_m))
plt.title('Bin 1.6')
plt.legend()
plt.show()
#%% Checking specific wavelengths for difference
plt.figure()
check = 3969 # check this wavelength
p_m = 200 # window will be check+/-p_m
plt.plot(wave,stack_flux_1_6-stack_flux_c_1_6)
plt.axvline(check,label=r'{} $\AA$'.format(check),ls='dashed',c='k')
plt.xlim((check-p_m,check+p_m))
plt.title('Bin 1.6 - Control Bin 1.6')
plt.legend()
plt.show()
#%%
#def zshift(z,r): # Recall z = (obs-rest)/rest so therefore z*rest+rest = obs
# return z*r+r
#
#zshift(-(-275.01967108)/c_vel,5008) # the negative is bec i do stel-gas when it should've been
# # I got -275.01967108 from table_analysis.py
#%%
wave, stack_flux_1_21, var = stack(bin1,flux_bin1,[1,21])
wave, stack_flux_c_1_21, var = stack(bin1_control,flux_bin1_control,[1,21])
#%% Checking specific wavelengths for strong line
# 3969 3934 stellar Ca H K lines after change these shouldn't be centered but gas should
# Ha = 6564, OIII = 5008
plt.figure()
check = 3969 # check this wavelength
p_m = 200 # window will be check+/-p_m
plt.plot(wave,stack_flux_1_21)
plt.axvline(check,label=r'{} $\AA$'.format(check),ls='dashed',c='k')
plt.xlim((check-p_m,check+p_m))
plt.title('Bin 1.21')
plt.legend()
plt.show()
#%% Checking specific wavelengths for difference
plt.figure()
check = 6564 # check this wavelength
p_m = 200 # window will be check+/-p_m
plt.plot(wave,stack_flux_1_21-stack_flux_c_1_21)
plt.axvline(check,label=r'{} $\AA$'.format(check),ls='dashed',c='k')
plt.xlim((check-p_m,check+p_m))
plt.title('Bin 1.21 - Control Bin 1.21')
plt.legend()
plt.show()
#%%
"""
| 36.591463 | 167 | 0.605316 |
e698d04e2b0e670d43ec5ea1904a6fd8af54d667 | 1,504 | py | Python | src/util/paths.py | cfloressuazo/spike-challenge | b060747431fad60e502af7e8604513597b45fb18 | [
"MIT"
] | null | null | null | src/util/paths.py | cfloressuazo/spike-challenge | b060747431fad60e502af7e8604513597b45fb18 | [
"MIT"
] | null | null | null | src/util/paths.py | cfloressuazo/spike-challenge | b060747431fad60e502af7e8604513597b45fb18 | [
"MIT"
] | null | null | null | import os
def get_root_path(root_dir_name: str, max_parent_directories: int = 10) -> str:
current_path = os.path.abspath(os.path.dirname(__file__))
for i in range(max_parent_directories):
if os.path.basename(current_path) == root_dir_name:
return current_path
current_path = os.path.dirname(current_path)
raise LookupError
ROOT_NAME = 'spike-challenge'
PATH_RESPOSITORY_ROOT = get_root_path(ROOT_NAME)
DIRNAME_DATA = 'data'
DIRNAME_DATA_RAW = 'raw'
DIRNAME_DATA_FORMATTED = 'formatted'
DIRNAME_STATIC_FILES = 'static'
PATH_DATA = os.path.realpath(os.path.join(PATH_RESPOSITORY_ROOT, DIRNAME_DATA))
PATH_DATA_RAW = os.path.realpath(os.path.join(PATH_DATA, DIRNAME_DATA_RAW))
PATH_DATA_FORMATTED = os.path.realpath(os.path.join(PATH_DATA, DIRNAME_DATA_FORMATTED))
PATH_DATA_STATIC = os.path.realpath(os.path.join(PATH_DATA, DIRNAME_DATA_FORMATTED, DIRNAME_STATIC_FILES))
PATH_CURRENT_REPOSITORY = os.path.dirname(os.path.dirname(__file__))
# Path to configs file folder
DIRNAME_CONFIG_FILES = 'configs'
PATH_CONFIG = os.path.realpath(os.path.join(PATH_CURRENT_REPOSITORY, DIRNAME_CONFIG_FILES))
# Path to models folder
DIRNAME_MODELS = 'models'
# PATH_MODELS = os.path.realpath(os.path.join(PATH_CURRENT_REPOSITORY, DIRNAME_MODELS))
PATH_MODELS = os.path.realpath(os.path.join(PATH_RESPOSITORY_ROOT, DIRNAME_MODELS))
# Path to outputs folder
DIRNAME_OUTPUTS = 'outputs'
PATH_OUTPUTS = os.path.realpath(os.path.join(PATH_CURRENT_REPOSITORY, DIRNAME_OUTPUTS))
| 39.578947 | 106 | 0.792553 |
8c844a5a6314208efcfe34de767528bf93b992b2 | 8,809 | py | Python | docs/discord/update.py | nishantkr18/webots | 6ab631adb2e69906fc251cc77d690e67195de222 | [
"Apache-2.0"
] | null | null | null | docs/discord/update.py | nishantkr18/webots | 6ab631adb2e69906fc251cc77d690e67195de222 | [
"Apache-2.0"
] | null | null | null | docs/discord/update.py | nishantkr18/webots | 6ab631adb2e69906fc251cc77d690e67195de222 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 1996-2020 Cyberbotics Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import discord
import os
import re
channels = [
'news',
'technical-questions',
'development',
'documentation'
]
contributors = {}
class MyClient(discord.Client):
async def export_channel(self, channel):
year = None
path = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(path, channel.name + '.md'), 'w', encoding='utf-8') as file:
file.write(u'# %s\n\n' % channel.name.title())
file.write(u'This is an archive of the `%s` channel of the ' % channel.name +
'[Webots Discord server](https://discordapp.com/invite/nTWbN9m).\n\n')
previousMessageUser = None
async for message in channel.history(limit=None):
if message.type == discord.MessageType.default and (message.content or message.attachments):
# statistics
if message.author.name not in contributors:
contributors[message.author.name] = 0
else:
contributors[message.author.name] += 1
# yearly section
if year is None or year != message.created_at.year:
year = message.created_at.year
file.write(u'## %d\n\n' % year)
# author + date header
if previousMessageUser != message.author:
previousMessageUser = message.author
roles = []
if hasattr(message.author, 'roles'):
for role in message.author.roles:
if role.name != '@everyone':
roles.append(role.name)
file.write(u'##### %s %s%s\n' %
(message.author.name.replace('_', '\\_'),
'[%s] ' % '-'.join(roles) if roles else '',
message.created_at.strftime("%m/%d/%Y %H:%M:%S")))
else:
file.write('\n')
if message.content:
content = ''
# read message line by line
inCode = False
for line in message.content.splitlines():
# remove wrongly used multi-line code
for start, code, end in re.findall(r'([^`]*)```([^`]*)```([^`]*)', line):
line = '%s`%s`%s' % (start, code, end)
# multi-line code
if '```' in line:
inCode = not inCode
# make sure it is on a dedicated line
if inCode and not line.startswith('```'):
line = line.replace('```', '\n```')
if not inCode and len(line) > 3:
line = line.replace('```', '\n```')
# not inside a multi-line code
if not inCode:
# remove problematic parts
line = line.replace('<i>', '`<i>`')
if line.startswith('#') or line.startswith('> #'):
line = line.replace('#', '\\#')
# protect underscores
undescoreProtected = False
for start, code, end in re.findall(r'([^`]*)`([^`]*)`([^`]*)', line):
line = line.replace(start, start.replace('_', '\\_'))
line = line.replace(end, end.replace('_', '\\_'))
undescoreProtected = True
if not undescoreProtected:
line = line.replace('_', '\\_')
# make url links
regex = r'(?P<url>https?://([\w-]+(?:(?:\.[\w-]+)+))([\w.,\\@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)'
for url in re.findall(regex, line):
urlString = url[0]
line = line.replace(urlString, '[%s](%s)' % (urlString, urlString.replace('\\_', '_')))
line += '\n'
# add line to the content
content += line + '\n'
# remove last new line
content = content[:-2]
# replace mention by actual name
for mention in message.mentions:
alternativeMention = mention.mention.replace('<@', '<@!')
content = content.replace(alternativeMention, '`@' + mention.name + '`')
content = content.replace(mention.mention, '`@' + mention.name + '`')
file.write(content)
# add attachments
for attachment in message.attachments:
name, extension = os.path.splitext(attachment.filename)
if extension.lower() in ['.bmp', '.gif', '.jpg', '.jpeg', '.png']:
file.write(u'\n%figure\n')
file.write(u'\n' % (attachment.filename, attachment.url))
file.write(u'%end')
else:
file.write(u'\n> **Attachment**: [%s](%s)' % (attachment.filename.replace('_', '\\_'),
attachment.url))
file.write(u'\n\n')
elif message.type == discord.MessageType.pins_add or message.type == discord.MessageType.new_member:
pass
else:
print("\033[33mUnsupported message type:" + str(message.type) + '\033[0m')
print("\033[33m\tContent:" + str(message.content) + '\033[0m')
async def on_ready(self):
path = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(path, 'index.md'), 'w', encoding='utf-8') as file:
file.write(u'# Webots Discord Archives\n\n')
file.write(u'Release {{ webots.version.full }}\n\n')
file.write(u'%figure\n')
file.write(u'\n')
file.write(u'%end\n\n')
file.write(u'Copyright © {{ date.year }} Cyberbotics Ltd.\n\n')
file.write(u'These are archives of the [Webots Discord server](https://discordapp.com/invite/nTWbN9m):\n')
with open(os.path.join(path, 'menu.md'), 'w', encoding='utf-8') as menuFile:
for channel in self.get_all_channels():
if type(channel) == discord.channel.TextChannel and channel.name in channels:
file.write(u'- [%s](%s)\n' % (channel.name.title(), channel.name + '.md'))
menuFile.write(u'- [%s](%s)\n' % (channel.name.title(), channel.name + '.md'))
await self.export_channel(channel)
await self.close()
async def on_message(self, message):
print('Message from {0.author}: {0.content}'.format(message))
parser = argparse.ArgumentParser(description='Update the Webots discord doc.')
parser.add_argument('--token', '-t', dest='token', help='Specifies the Discord token', required=True)
parser.add_argument('--channels', '-c', dest='channels', nargs='+', help='list of channel to export')
args = parser.parse_args()
client = MyClient()
if args.channels is not None:
channels = args.channels
client.run(args.token)
# display statistics
contributors = sorted(contributors.items(), key=lambda kv: kv[1])
contributors.reverse()
print('Top 20 contributors:')
for contributor in contributors[0:20]:
print(' - %s (%d)' % contributor)
| 52.748503 | 125 | 0.473266 |
18d130131aa4d3f7a05f6de3198a021a99e43193 | 9,401 | py | Python | qc_tests/odd_cluster.py | rjhd2/HadISD_v3 | 420ebb3c965b61ad394f15b0f71a37b73985a97d | [
"BSD-3-Clause"
] | 3 | 2020-05-27T10:54:15.000Z | 2021-01-13T08:40:27.000Z | qc_tests/odd_cluster.py | rjhd2/HadISD_v3 | 420ebb3c965b61ad394f15b0f71a37b73985a97d | [
"BSD-3-Clause"
] | null | null | null | qc_tests/odd_cluster.py | rjhd2/HadISD_v3 | 420ebb3c965b61ad394f15b0f71a37b73985a97d | [
"BSD-3-Clause"
] | 1 | 2021-01-13T08:40:34.000Z | 2021-01-13T08:40:34.000Z | #!/usr/local/sci/bin/python
#*****************************
#
# QC checks.
#
#
#************************************************************************
# SVN Info
#$Rev:: 219 $: Revision of last commit
#$Author:: rdunn $: Author of last commit
#$Date:: 2019-05-20 16:56:47 +0100 (Mon, 20 May 2019) $: Date of last commit
#************************************************************************
import numpy as np
import scipy as sp
import datetime as dt
# RJHD routines
import qc_utils as utils
#*********************************************
class OddCluster:
'''
Class for odd cluster information
'''
def __init__(self, start, end, length, locations, data_mdi, last_data):
self.start = start
self.end = end
self.length = length
self.locations = locations
self.data_mdi = data_mdi
self.last_data = last_data
def __str__(self):
return "odd cluster, starting {}, ending {}, length {}".format(self.start, self.end, self.length)
__repr__ = __str__
#*********************************************
def oc_plots(station, cluster, time, start, indata, variable, oc_details):
'''
Plot each odd cluster highlighted against surrounding data
:param MetVar station: station object
:param OddCluster cluster: cluster object
:param int time: timestamp
:param datetime start: start of dataseries
:param masked array indata: input data
:param string variable: variable name
:returns:
'''
import matplotlib.pyplot as plt
YLABELS = {"temperatures":"Temperature (C)", "dewpoints":"Dewpoints (C)", "slp":"SLP (hPa)", "windspeeds":"Wind Speed (m/s)"}
plot_start, plot_end = cluster.locations[0] - 10*24 , time + 10*24
if plot_start < 0 : plot_start = 0
plot_times = utils.times_hours_to_datetime(station.time.data[plot_start: plot_end], start)
plt.clf()
plt.plot(plot_times, indata[plot_start: plot_end], 'bo')
plt.plot(plot_times[np.array(oc_details.locations) - plot_start], indata[oc_details.locations], 'ro')
plt.ylim(utils.sort_ts_ylim(indata[plot_start: plot_end]))
plt.ylabel(YLABELS[variable])
plt.show()
return # oc_plots
#*********************************************
def occ_normal(cluster, obs_type, time, flags):
'''
Just a normal observation. (Not all inputs used here,
but required for consistency)
:param OddCluster cluster: cluster object
:param int obs_type: type determinator of observation
:param int time: timestamp
:param array flags: flag array
:returns:
cluster - updated cluster information
obs_type - updated observation type
'''
cluster.last_data = time
return cluster, obs_type # occ_normal
#*********************************************
def occ_in_cluster(cluster, obs_type, time, flags):
'''currently in a potential cluster. (Not all inputs used here,
but required for consistency)
:param OddCluster cluster: cluster object
:param int obs_type: type determinator of observation
:param int time: timestamp
:param array flags: flag array
:returns:
cluster - updated cluster information
obs_type - updated observation type
'''
if (cluster.length == 6) or (time - cluster.start > 24.):
'''longer than 6 hours or span over 24hrs --> not a cluster --> reset'''
mdi = cluster.data_mdi
# set all specifically
cluster.start = mdi
cluster.end = mdi
cluster.length = 0
cluster.locations = mdi
cluster.last_data = mdi
obs_type = 0
else:
'''in a cluster, less than 6hr and not over 24hr - increment '''
cluster.length += 1
cluster.locations += [time]
cluster.end = time
return cluster, obs_type # occ_in_cluster
#*********************************************
def occ_start_cluster(cluster, obs_type, time, flags):
'''
There has been a gap in the data, check if long enough to start cluster
(Not all inputs used here, but required for consistency)
:param OddCluster cluster: cluster object
:param int obs_type: type determinator of observation
:param int time: timestamp
:param array flags: flag array
:returns:
cluster - updated cluster information
obs_type - updated observation type
'''
if time - cluster.last_data >= 48:
'''If gap of 48hr, then start cluster increments '''
obs_type = 2
cluster.length += 1
cluster.start = time
cluster.end = time
cluster.locations = [time]
else:
'''Gap in data not sufficiently large '''
obs_type = 1
cluster.last_data = time
cluster.start = cluster.data_mdi
cluster.end = cluster.data_mdi
cluster.length = 0
cluster.locations = 0
return cluster, obs_type # occ_start_cluster
#*********************************************
def occ_after_cluster(cluster, obs_type, time, flags):
'''
There has been a gap in the data after a cluster;
check if long enough to mean cluster is sufficiently isolated.
If so, flag else reset
:param OddCluster cluster: cluster object
:param int obs_type: type determinator of observation
:param int time: timestamp
:param array flags: flag array
:returns:
cluster - updated cluster information
obs_type - updated observation type
'''
if time - cluster.end >= 48:
'''isolated cluster with 48hr gap either side'''
# plotting done outside of this def.
flags[cluster.locations] = 1
# as have had a 48hr gap, start a new cluster
cluster.last_data = cluster.end
cluster.start = time
cluster.end = time
cluster.locations = [time]
cluster.length = 1
obs_type = 2
elif (time - cluster.start <= 24) and (cluster.length < 6):
'''have data, but cluster is small and within thresholds --> increment'''
obs_type = 2
cluster.length += 1
cluster.locations += [time]
cluster.end = time
else:
'''actually it is now normal data, so reset'''
obs_type = 0
cluster.last_data = time
cluster.start = cluster.data_mdi
cluster.end = cluster.data_mdi
cluster.length = 0
cluster.locations = 0
return cluster, obs_type # occ_after_cluster
#*********************************************
def occ(station, variable_list, flag_col, datastart, logfile, diagnostics = False, plots = False):
'''
Check for odd clusters of data surrounded by missing
up to 6hr/24hr surrounded by at least 48 on each side
:param MetVar station: the station object
:param list variable_list: list of observational variables to process
:param list flag_col: the columns to set on the QC flag array
:param datetime datastart: dataset start time
:param file logfile: logfile to store outputs
:param bool diagnostics: do extra verbose output
:param bool plots: do plots
:returns:
'''
# the four options of what to do with each observation
# the keys give values which are subroutines, and can be called
# all subroutines have to take the same set of inputs
options = {0 : occ_normal, 1 : occ_start_cluster, 2 : occ_in_cluster, 3 : occ_after_cluster}
for v,variable in enumerate(variable_list):
st_var = getattr(station, variable)
filtered_data = utils.apply_filter_flags(st_var)
var_flags = station.qc_flags[:,flag_col[v]]
prev_flag_number = 0
# using IDL copy as method to ensure reproducibility (initially)
oc_details = OddCluster(st_var.mdi, st_var.mdi, 0, st_var.mdi, st_var.mdi, -1)
obs_type = 1
for time in station.time.data:
if filtered_data.mask[time] == False:
# process observation point using subroutines, called from named tuple
if plots and (obs_type == 3) and (time - oc_details.end >= 48):
# do plotting if matches flagging criteria
oc_plots(station, oc_details, time, datastart, filtered_data, variable, oc_details)
oc_details, obs_type = options[obs_type](oc_details, obs_type, time, var_flags)
else:
# have missing data,
if obs_type == 2:
obs_type = 3
elif obs_type == 0:
obs_type = 1
station.qc_flags[:,flag_col[v]] = var_flags
flag_locs = np.where(station.qc_flags[:, flag_col[v]] != 0)
utils.print_flagged_obs_number(logfile, "Odd Cluster", variable, len(flag_locs[0]) - prev_flag_number, noWrite = diagnostics)
# copy flags into attribute
st_var.flags[flag_locs] = 1
# matches 032070 temperature 26/8/2014
station = utils.append_history(station, "Isolated Odd Cluster Check")
return # occ
#************************************************************************
if __name__ == "__main__":
print "Checking for Odd Clusters"
| 31.441472 | 133 | 0.588235 |
17ce9c17400aa36654a27c7f1200ef022e6682a0 | 1,095 | py | Python | ml/rl/preprocessing/sparse_to_dense.py | mikepsinn/Horizon | 4ce123062320c0297b80135e0b63759c02bf5699 | [
"BSD-3-Clause"
] | 2 | 2020-07-07T07:12:25.000Z | 2021-06-29T01:47:25.000Z | ml/rl/preprocessing/sparse_to_dense.py | weiddeng/Horizon | 0e7fe9d742c408e1b42803b42e104efbc56bae5b | [
"BSD-3-Clause"
] | 1 | 2021-08-25T16:13:32.000Z | 2021-08-25T16:13:32.000Z | ml/rl/preprocessing/sparse_to_dense.py | weiddeng/Horizon | 0e7fe9d742c408e1b42803b42e104efbc56bae5b | [
"BSD-3-Clause"
] | 1 | 2019-09-20T02:36:30.000Z | 2019-09-20T02:36:30.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import logging
from typing import List, Tuple
import numpy as np
from caffe2.python import workspace
from ml.rl.caffe_utils import C2
from ml.rl.preprocessing.normalization import MISSING_VALUE
logger = logging.getLogger(__name__)
def sparse_to_dense(
lengths_blob: str,
keys_blob: str,
values_blob: str,
sorted_features: List[int],
set_missing_value_to_zero: bool = False,
) -> Tuple[str, List[str]]:
MISSING_SCALAR = C2.NextBlob("MISSING_SCALAR")
missing_value = 0.0 if set_missing_value_to_zero else MISSING_VALUE
workspace.FeedBlob(MISSING_SCALAR, np.array([missing_value], dtype=np.float32))
C2.net().GivenTensorFill([], [MISSING_SCALAR], shape=[], values=[missing_value])
parameters: List[str] = [MISSING_SCALAR]
assert len(sorted_features) > 0, "Sorted features is empty"
dense_input = C2.SparseToDenseMask(
keys_blob, values_blob, MISSING_SCALAR, lengths_blob, mask=sorted_features
)[0]
return dense_input, parameters
| 29.594595 | 84 | 0.743379 |
7f8153ef10603484549fb52bb1bdd673f43fd73a | 4,186 | py | Python | rastervision2/core/evaluation/semantic_segmentation_evaluator.py | sinhmd/raster-vision | c617c49c2446b4d97281895ae953a5630e439e94 | [
"Apache-2.0"
] | 1 | 2020-05-27T07:07:58.000Z | 2020-05-27T07:07:58.000Z | rastervision2/core/evaluation/semantic_segmentation_evaluator.py | sinhmd/raster-vision | c617c49c2446b4d97281895ae953a5630e439e94 | [
"Apache-2.0"
] | null | null | null | rastervision2/core/evaluation/semantic_segmentation_evaluator.py | sinhmd/raster-vision | c617c49c2446b4d97281895ae953a5630e439e94 | [
"Apache-2.0"
] | null | null | null | import logging
from shapely.geometry import shape, mapping
from shapely.strtree import STRtree
from rastervision2.core.data import ActivateMixin
from rastervision2.core.data.vector_source import GeoJSONVectorSourceConfig
from rastervision2.core.evaluation import (ClassificationEvaluator,
SemanticSegmentationEvaluation)
log = logging.getLogger(__name__)
def filter_geojson_by_aoi(geojson, aoi_polygons):
# Note that this ignores class_id but that's ok because each prediction GeoJSON file
# covers a single class_id. But, this may change in the future.
tree = STRtree([shape(f['geometry']) for f in geojson['features']])
filtered_shapes = []
for aoi_poly in aoi_polygons:
shapes_in_aoi = tree.query(aoi_poly)
for s in shapes_in_aoi:
s_int = s.intersection(aoi_poly)
filtered_shapes.append(s_int)
features = [{
'type': 'feature',
'geometry': mapping(s)
} for s in filtered_shapes]
return {'type': 'FeatureCollection', 'features': features}
class SemanticSegmentationEvaluator(ClassificationEvaluator):
"""Evaluates predictions for a set of scenes.
"""
def __init__(self, class_config, output_uri, vector_output_uri):
super().__init__(class_config, output_uri)
self.vector_output_uri = vector_output_uri
def create_evaluation(self):
return SemanticSegmentationEvaluation(self.class_config)
def process(self, scenes, tmp_dir):
evaluation = self.create_evaluation()
vect_evaluation = self.create_evaluation()
null_class_id = self.class_config.get_null_class_id()
for scene in scenes:
log.info('Computing evaluation for scene {}...'.format(scene.id))
label_source = scene.ground_truth_label_source
label_store = scene.prediction_label_store
with ActivateMixin.compose(label_source, label_store):
ground_truth = label_source.get_labels()
predictions = label_store.get_labels()
if scene.aoi_polygons:
# Filter labels based on AOI.
ground_truth = ground_truth.filter_by_aoi(
scene.aoi_polygons, null_class_id)
predictions = predictions.filter_by_aoi(
scene.aoi_polygons, null_class_id)
scene_evaluation = self.create_evaluation()
scene_evaluation.compute(ground_truth, predictions)
evaluation.merge(scene_evaluation, scene_id=scene.id)
if hasattr(label_source, 'raster_source') and hasattr(
label_source.raster_source, 'vector_source') and hasattr(
label_store, 'vector_output'):
gt_geojson = label_source.raster_source.vector_source.get_geojson(
)
for vo in label_store.vector_output:
pred_geojson_uri = vo.uri
mode = vo.get_mode()
class_id = vo.class_id
pred_geojson_source = GeoJSONVectorSourceConfig(
uri=pred_geojson_uri, default_class_id=None).build(
self.class_config,
scene.raster_source.get_crs_transformer())
pred_geojson = pred_geojson_source.get_geojson()
if scene.aoi_polygons:
gt_geojson = filter_geojson_by_aoi(
gt_geojson, scene.aoi_polygons)
pred_geojson = filter_geojson_by_aoi(
pred_geojson, scene.aoi_polygons)
vect_scene_evaluation = self.create_evaluation()
vect_scene_evaluation.compute_vector(
gt_geojson, pred_geojson, mode, class_id)
vect_evaluation.merge(
vect_scene_evaluation, scene_id=scene.id)
if not evaluation.is_empty():
evaluation.save(self.output_uri)
if not vect_evaluation.is_empty():
vect_evaluation.save(self.vector_output_uri)
| 42.714286 | 88 | 0.625179 |
e20ba461d050825d8906aa12860e0e28d1843345 | 15,312 | py | Python | flask_restful/__init__.py | DisruptiveLabs/flask-restful | 4695d18478154209c3f638bd44a5cfb114a4ad18 | [
"BSD-3-Clause"
] | null | null | null | flask_restful/__init__.py | DisruptiveLabs/flask-restful | 4695d18478154209c3f638bd44a5cfb114a4ad18 | [
"BSD-3-Clause"
] | null | null | null | flask_restful/__init__.py | DisruptiveLabs/flask-restful | 4695d18478154209c3f638bd44a5cfb114a4ad18 | [
"BSD-3-Clause"
] | 1 | 2021-03-29T03:48:25.000Z | 2021-03-29T03:48:25.000Z | import difflib
from functools import wraps, partial
import re
from flask import request, Response, url_for
from flask import abort as original_flask_abort
from flask.views import MethodView
from flask.signals import got_request_exception
from werkzeug.exceptions import HTTPException, MethodNotAllowed, NotFound
from werkzeug.http import HTTP_STATUS_CODES
from flask.ext.restful.utils import unauthorized, error_data, unpack
from flask.ext.restful.representations.json import output_json
import sys
try:
#noinspection PyUnresolvedReferences
from collections import OrderedDict
except ImportError:
from .utils.ordereddict import OrderedDict
__all__ = ('Api', 'Resource', 'marshal', 'marshal_with', 'abort')
def abort(http_status_code, **kwargs):
"""Raise a HTTPException for the given http_status_code. Attach any keyword
arguments to the exception for later processing.
"""
#noinspection PyUnresolvedReferences
try:
original_flask_abort(http_status_code)
except HTTPException as e:
if len(kwargs):
e.data = kwargs
raise e
DEFAULT_REPRESENTATIONS = {'application/json': output_json}
class Api(object):
"""
The main entry point for the application.
You need to initialize it with a Flask Application: ::
>>> app = Flask(__name__)
>>> api = restful.Api(app)
Alternatively, you can use :meth:`init_app` to set the Flask application
after it has been constructed.
:param app: the Flask application object
:type app: flask.Flask
:param prefix: Prefix all routes with a value, eg v1 or 2010-04-01
:type prefix: str
:param default_mediatype: The default media type to return
:type default_mediatype: str
:param decorators: Decorators to attach to every resource
:type decorators: list
:param catch_all_404s: Use :meth:`handle_error`
to handle 404 errors throughout your app
:type catch_all_404s: bool
"""
def __init__(self, app=None, prefix='',
default_mediatype='application/json', decorators=None,
catch_all_404s=False):
self.representations = dict(DEFAULT_REPRESENTATIONS)
self.urls = {}
self.prefix = prefix
self.default_mediatype = default_mediatype
self.decorators = decorators if decorators else []
self.catch_all_404s = catch_all_404s
if app is not None:
self.init_app(app)
else:
self.app = None
def init_app(self, app):
"""Initialize this class with the given :class:`flask.Flask`
application object.
:param app: the Flask application object
:type app: flask.Flask
Examples::
api = Api()
api.init_app(app)
api.add_resource(...)
"""
self.app = app
self.endpoints = set()
app.handle_exception = partial(self.error_router, app.handle_exception)
app.handle_user_exception = partial(self.error_router, app.handle_user_exception)
def _should_use_fr_error_handler(self):
""" Determine if error should be handled with FR or default Flask
The goal is to return Flask error handlers for non-FR-related routes,
and FR errors (with the correct media type) for FR endpoints. This
method currently handles 404 and 405 errors.
:return: bool
"""
adapter = self.app.create_url_adapter(request)
try:
adapter.match()
except MethodNotAllowed as e:
# Check if the other HTTP methods at this url would hit the Api
valid_route_method = e.valid_methods[0]
rule, _ = adapter.match(method=valid_route_method, return_rule=True)
return rule.endpoint in self.endpoints
except NotFound:
return self.catch_all_404s
except:
# Werkzeug throws other kinds of exceptions, such as Redirect
pass
def _has_fr_route(self):
"""Encapsulating the rules for whether the request was to a Flask endpoint"""
# 404's, 405's, which might not have a url_rule
if self._should_use_fr_error_handler():
return True
# for all other errors, just check if FR dispatched the route
return request.url_rule and request.url_rule.endpoint in self.endpoints
def error_router(self, original_handler, e):
"""This function decides whether the error occured in a flask-restful
endpoint or not. If it happened in a flask-restful endpoint, our
handler will be dispatched. If it happened in an unrelated view, the
app's original error handler will be dispatched.
:param original_handler: the original Flask error handler for the app
:type original_handler: function
:param e: the exception raised while handling the request
:type e: Exception
"""
if self._has_fr_route():
return self.handle_error(e)
return original_handler(e)
def handle_error(self, e):
"""Error handler for the API transforms a raised exception into a Flask
response, with the appropriate HTTP status code and body.
:param e: the raised Exception object
:type e: Exception
"""
got_request_exception.send(self.app, exception=e)
if not hasattr(e, 'code') and self.app.propagate_exceptions:
exc_type, exc_value, tb = sys.exc_info()
if exc_value is e:
exc = exc_type(exc_value)
exc.__traceback__ = tb
raise exc
else:
raise e
code = getattr(e, 'code', 500)
data = getattr(e, 'data', error_data(code))
if code >= 500:
# There's currently a bug in Python3 that disallows calling
# logging.exception() when an exception hasn't actually be raised
if sys.exc_info() == (None, None, None):
self.app.logger.error("Internal Error")
else:
self.app.logger.exception("Internal Error")
if code == 404 and ('message' not in data or
data['message'] == HTTP_STATUS_CODES[404]):
rules = dict([(re.sub('(<.*>)', '', rule.rule), rule.rule)
for rule in self.app.url_map.iter_rules()])
close_matches = difflib.get_close_matches(request.path, rules.keys())
if close_matches:
# If we already have a message, add punctuation and continue it.
if "message" in data:
data["message"] += ". "
else:
data["message"] = ""
data['message'] += 'You have requested this URI [' + request.path + \
'] but did you mean ' + \
' or '.join((rules[match]
for match in close_matches)) + ' ?'
resp = self.make_response(data, code)
if code == 401:
resp = unauthorized(resp,
self.app.config.get("HTTP_BASIC_AUTH_REALM", "flask-restful"))
return resp
def mediatypes_method(self):
"""Return a method that returns a list of mediatypes
"""
return lambda resource_cls: self.mediatypes() + [self.default_mediatype]
def add_resource(self, resource, *urls, **kwargs):
"""Adds a resource to the api.
:param resource: the class name of your resource
:type resource: :class:`Resource`
:param urls: one or more url routes to match for the resource, standard
flask routing rules apply. Any url variables will be
passed to the resource method as args.
:type urls: str
:param endpoint: endpoint name (defaults to :meth:`Resource.__name__.lower`
Can be used to reference this route in :class:`fields.Url` fields
:type endpoint: str
Additional keyword arguments not specified above will be passed as-is
to :meth:`flask.Flask.add_url_rule`.
Examples::
api.add_resource(HelloWorld, '/', '/hello')
api.add_resource(Foo, '/foo', endpoint="foo")
api.add_resource(FooSpecial, '/special/foo', endpoint="foo")
"""
endpoint = kwargs.pop('endpoint', None) or resource.__name__.lower()
self.endpoints.add(endpoint)
if endpoint in self.app.view_functions.keys():
previous_view_class = self.app.view_functions[endpoint].__dict__['view_class']
# if you override the endpoint with a different class, avoid the collision by raising an exception
if previous_view_class != resource:
raise ValueError('This endpoint (%s) is already set to the class %s.' % (endpoint, previous_view_class.__name__))
resource.mediatypes = self.mediatypes_method() # Hacky
resource.endpoint = endpoint
resource_func = self.output(resource.as_view(endpoint))
for decorator in self.decorators:
resource_func = decorator(resource_func)
for url in urls:
self.app.add_url_rule(self.prefix + url, view_func=resource_func, **kwargs)
def output(self, resource):
"""Wraps a resource (as a flask view function), for cases where the
resource does not directly return a response object
:param resource: The resource as a flask view function
"""
@wraps(resource)
def wrapper(*args, **kwargs):
resp = resource(*args, **kwargs)
if isinstance(resp, Response): # There may be a better way to test
return resp
data, code, headers = unpack(resp)
return self.make_response(data, code, headers=headers)
return wrapper
def url_for(self, resource, **values):
"""Generates a URL to the given resource."""
return url_for(resource.endpoint, **values)
def make_response(self, data, *args, **kwargs):
"""Looks up the representation transformer for the requested media
type, invoking the transformer to create a response object. This
defaults to (application/json) if no transformer is found for the
requested mediatype.
:param data: Python object containing response data to be transformed
"""
for mediatype in self.mediatypes() + [self.default_mediatype]:
if mediatype in self.representations:
resp = self.representations[mediatype](data, *args, **kwargs)
resp.headers['Content-Type'] = mediatype
return resp
def mediatypes(self):
"""Returns a list of requested mediatypes sent in the Accept header"""
return [h for h, q in request.accept_mimetypes]
def representation(self, mediatype):
"""Allows additional representation transformers to be declared for the
api. Transformers are functions that must be decorated with this
method, passing the mediatype the transformer represents. Three
arguments are passed to the transformer:
* The data to be represented in the response body
* The http status code
* A dictionary of headers
The transformer should convert the data appropriately for the mediatype
and return a Flask response object.
Ex::
@api.representation('application/xml')
def xml(data, code, headers):
resp = make_response(convert_data_to_xml(data), code)
resp.headers.extend(headers)
return resp
"""
def wrapper(func):
self.representations[mediatype] = func
return func
return wrapper
class Resource(MethodView):
"""
Represents an abstract RESTful resource. Concrete resources should extend
from this class and expose methods for each supported HTTP method. If a
resource is invoked with an unsupported HTTP method, the API will return a
response with status 405 Method Not Allowed. Otherwise the appropriate
method is called and passed all arguments from the url rule used when
adding the resource to an Api instance. See Api.add_resource for details.
"""
representations = None
method_decorators = []
def dispatch_request(self, *args, **kwargs):
# Taken from flask
#noinspection PyUnresolvedReferences
meth = getattr(self, request.method.lower(), None)
if meth is None and request.method == 'HEAD':
meth = getattr(self, 'get', None)
assert meth is not None, 'Unimplemented method %r' % request.method
for decorator in self.method_decorators:
meth = decorator(meth)
resp = meth(*args, **kwargs)
if isinstance(resp, Response): # There may be a better way to test
return resp
representations = self.representations or {}
#noinspection PyUnresolvedReferences
for mediatype in self.mediatypes():
if mediatype in representations:
data, code, headers = unpack(resp)
resp = representations[mediatype](data, code, headers)
resp.headers['Content-Type'] = mediatype
return resp
return resp
def marshal(data, fields):
"""Takes raw data (in the form of a dict, list, object) and a dict of
fields to output and filters the data based on those fields.
:param fields: a dict of whose keys will make up the final serialized
response output
:param data: the actual object(s) from which the fields are taken from
>>> from flask.ext.restful import fields, marshal
>>> data = { 'a': 100, 'b': 'foo' }
>>> mfields = { 'a': fields.Raw }
>>> marshal(data, mfields)
OrderedDict([('a', 100)])
"""
def make(cls):
if isinstance(cls, type):
return cls()
return cls
if isinstance(data, (list, tuple)):
return [marshal(d, fields) for d in data]
items = ((k, marshal(data, v) if isinstance(v, dict)
else make(v).output(k, data))
for k, v in fields.items())
return OrderedDict(items)
class marshal_with(object):
"""A decorator that apply marshalling to the return values of your methods.
>>> from flask.ext.restful import fields, marshal_with
>>> mfields = { 'a': fields.Raw }
>>> @marshal_with(mfields)
... def get():
... return { 'a': 100, 'b': 'foo' }
...
...
>>> get()
OrderedDict([('a', 100)])
see :meth:`flask.ext.restful.marshal`
"""
def __init__(self, fields):
""":param fields: a dict of whose keys will make up the final
serialized response output"""
self.fields = fields
def __call__(self, f):
@wraps(f)
def wrapper(*args, **kwargs):
resp = f(*args, **kwargs)
if isinstance(resp, tuple):
data, code, headers = unpack(resp)
return marshal(data, self.fields), code, headers
else:
return marshal(resp, self.fields)
return wrapper
| 36.457143 | 129 | 0.622453 |
c3cde15b1a0058271963f467309796825eff460d | 586 | py | Python | Dataset/Leetcode/train/46/421.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/46/421.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/train/46/421.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution:
def XXX(self, nums: List[int]) -> List[List[int]]:
def rt_help(ls,first):
if first >= n:
return_ls.append(ls) << -- this line
for i in range(n):
if marked[i] != 1:
ls.append(nums[i])
marked[i] = 1
rt_help(ls,first+1)
marked[i] = 0
ls.pop()
return_ls = []
ls = []
n = len(nums)
marked = [0] * n
first = 0
rt_help(ls,first)
return return_ls
| 24.416667 | 54 | 0.389078 |
d0ee58a9f00edac9bdf78e05b69ae0adbd7d92ca | 22,006 | py | Python | venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_system_resource_limits.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 1 | 2020-01-22T13:11:23.000Z | 2020-01-22T13:11:23.000Z | venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_system_resource_limits.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | 12 | 2020-02-21T07:24:52.000Z | 2020-04-14T09:54:32.000Z | venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_system_resource_limits.py | usegalaxy-no/usegalaxy | 75dad095769fe918eb39677f2c887e681a747f3a | [
"MIT"
] | null | null | null | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_resource_limits
short_description: Configure resource limits in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and resource_limits category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
access_token:
description:
- Token-based authentication.
Generated from GUI of Fortigate.
type: str
required: false
enable_log:
description:
- Enable/Disable logging for task.
type: bool
required: false
default: false
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
member_path:
type: str
description:
- Member attribute path to operate on.
- Delimited by a slash character if there are more than one attribute.
- Parameter marked with member_path is legitimate for doing member operation.
member_state:
type: str
description:
- Add or delete a member under specified attribute path.
- When member_state is specified, the state option is ignored.
choices:
- present
- absent
system_resource_limits:
description:
- Configure resource limits.
default: null
type: dict
suboptions:
custom_service:
description:
- Maximum number of firewall custom services.
type: int
dialup_tunnel:
description:
- Maximum number of dial-up tunnels.
type: int
firewall_address:
description:
- Maximum number of firewall addresses (IPv4, IPv6, multicast).
type: int
firewall_addrgrp:
description:
- Maximum number of firewall address groups (IPv4, IPv6).
type: int
firewall_policy:
description:
- Maximum number of firewall policies (IPv4, IPv6, policy46, policy64, DoS-policy4, DoS-policy6, multicast).
type: int
ipsec_phase1:
description:
- Maximum number of VPN IPsec phase1 tunnels.
type: int
ipsec_phase1_interface:
description:
- Maximum number of VPN IPsec phase1 interface tunnels.
type: int
ipsec_phase2:
description:
- Maximum number of VPN IPsec phase2 tunnels.
type: int
ipsec_phase2_interface:
description:
- Maximum number of VPN IPsec phase2 interface tunnels.
type: int
log_disk_quota:
description:
- Log disk quota in MB.
type: int
onetime_schedule:
description:
- Maximum number of firewall one-time schedules.
type: int
proxy:
description:
- Maximum number of concurrent proxy users.
type: int
recurring_schedule:
description:
- Maximum number of firewall recurring schedules.
type: int
service_group:
description:
- Maximum number of firewall service groups.
type: int
session:
description:
- Maximum number of sessions.
type: int
sslvpn:
description:
- Maximum number of SSL-VPN.
type: int
user:
description:
- Maximum number of local users.
type: int
user_group:
description:
- Maximum number of user groups.
type: int
'''
EXAMPLES = '''
- collections:
- fortinet.fortios
connection: httpapi
hosts: fortigate01
vars:
ansible_httpapi_port: 443
ansible_httpapi_use_ssl: true
ansible_httpapi_validate_certs: false
vdom: root
tasks:
- name: fortios_system_resource_limits
fortios_system_resource_limits:
vdom: root
system_resource_limits: {}
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_legacy_fortiosapi
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import schema_to_module_spec
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_schema_versioning
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
def filter_system_resource_limits_data(json):
option_list = ['custom_service', 'dialup_tunnel', 'firewall_address',
'firewall_addrgrp', 'firewall_policy', 'ipsec_phase1',
'ipsec_phase1_interface', 'ipsec_phase2', 'ipsec_phase2_interface',
'log_disk_quota', 'onetime_schedule', 'proxy',
'recurring_schedule', 'service_group', 'session',
'sslvpn', 'user', 'user_group']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_resource_limits(data, fos):
vdom = data['vdom']
system_resource_limits_data = data['system_resource_limits']
filtered_data = underscore_to_hyphen(filter_system_resource_limits_data(system_resource_limits_data))
return fos.set('system',
'resource-limits',
data=filtered_data,
vdom=vdom)
def is_successful_status(resp):
return 'status' in resp and resp['status'] == 'success' or \
'http_status' in resp and resp['http_status'] == 200 or \
'http_method' in resp and resp['http_method'] == "DELETE" and resp['http_status'] == 404
def fortios_system(data, fos):
fos.do_member_operation('system_resource_limits')
if data['system_resource_limits']:
resp = system_resource_limits(data, fos)
else:
fos._module.fail_json(msg='missing task body: %s' % ('system_resource_limits'))
return not is_successful_status(resp), \
is_successful_status(resp) and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
versioned_schema = {
"type": "dict",
"children": {
"service_group": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"firewall_address": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"firewall_policy": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ipsec_phase1": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"session": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ipsec_phase1_interface": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"custom_service": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ipsec_phase2": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ipsec_phase2_interface": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dialup_tunnel": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"user": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"user_group": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sslvpn": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"onetime_schedule": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"firewall_addrgrp": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"proxy": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"recurring_schedule": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"log_disk_quota": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = None
fields = {
"access_token": {"required": False, "type": "str", "no_log": True},
"enable_log": {"required": False, "type": bool},
"vdom": {"required": False, "type": "str", "default": "root"},
"member_path": {"required": False, "type": "str"},
"member_state": {
"type": "str",
"required": False,
"choices": ["present", "absent"]
},
"system_resource_limits": {
"required": False, "type": "dict", "default": None,
"options": {
}
}
}
for attribute_name in module_spec['options']:
fields["system_resource_limits"]['options'][attribute_name] = module_spec['options'][attribute_name]
if mkeyname and mkeyname == attribute_name:
fields["system_resource_limits"]['options'][attribute_name]['required'] = True
check_legacy_fortiosapi()
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if 'access_token' in module.params:
connection.set_option('access_token', module.params['access_token'])
if 'enable_log' in module.params:
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, "system_resource_limits")
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| 31.663309 | 144 | 0.478279 |
afde8423eebd5e128ea1494edbff04690480bd81 | 51,055 | py | Python | Python/firms/tpfuncs.py | lnsongxf/OG-USA | 9e92129e67f4aea5f3a6b8da4110bf67b99ce88a | [
"CC0-1.0"
] | 1 | 2017-05-23T13:57:53.000Z | 2017-05-23T13:57:53.000Z | Python/firms/tpfuncs.py | lnsongxf/OG-USA | 9e92129e67f4aea5f3a6b8da4110bf67b99ce88a | [
"CC0-1.0"
] | null | null | null | Python/firms/tpfuncs.py | lnsongxf/OG-USA | 9e92129e67f4aea5f3a6b8da4110bf67b99ce88a | [
"CC0-1.0"
] | 1 | 2021-06-03T19:06:24.000Z | 2021-06-03T19:06:24.000Z | '''
------------------------------------------------------------------------
This file contains the functions specific to solving for the time path of
the OG model with S-period lived agents, exogenous labor, and M industries
and I goods.
These functions include:
get_p_path
get_p_tilde_path
get_cbepath
get_c_tilde_lf
get_c_lf
LfEulerSys
paths_life
TP
TP_fsolve
This Python script calls the following other file(s) with the associated
functions:
firm_funcs.py
get_p
get_p_tilde
get_c_tilde
get_c
get_C
get_K
get_L
------------------------------------------------------------------------
'''
# Import Packages
import numpy as np
import scipy.optimize as opt
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from mpl_toolkits.mplot3d import Axes3D
import sys
import firm_funcs as firm
reload(firm)
'''
------------------------------------------------------------------------
Functions
------------------------------------------------------------------------
'''
def get_p_path(params, r_path, w_path, p_path, p_k_path, K_path, X_path):
'''
Generates implied time path of output prices given the path of
guessed output prices, the path of capital prices, r, w, K, and X
Inputs:
params = length 5 tuple, (A, gamma, epsilon, delta, K_ss)
A = [M,T] matrix, total factor productivity for each
industry
gamma = [M,T] matrix, capital share of income for each industry
epsilon = [M,T] matrix, elasticity of substitution between capital
and labor for each industry
delta = [M,T] matrix, capital depreciation rate for each
industry
K_ss = [M,] vector, SS output for each industry
r_path = [T,] vector, time path of real interest rates
w_path = [T,] vector, time path of real wage rates
p_path = [M,T] matrix, time path of industry output prices
p_k_path = [M,T+1] matrix, time path of industry capital prices
K_path = [M,T] matrix, time path of industry capital demand
X_path = [M,T] matrix, time path of industry output
Functions called:
get_L_over_X
get_K_over_X
Objects in function:
p_path_implied = [M, T] matrix, time path of output prices implied from model solution
Returns: p_path_implied
'''
A, gamma, epsilon, delta, K_ss, M, T = params
l_ratio_params = (A, gamma, epsilon, delta)
L_over_X = get_L_over_X(l_ratio_params, p_path, w_path)
k_ratio_params = (A, gamma, epsilon, delta, M, T)
K_over_X = get_K_over_X(k_ratio_params, p_path, p_k_path[:,:T], r_path)
p_k_p1 = p_k_path[:,1:]
K_p1 = np.append(K_path[:,1:],np.reshape(K_ss,[M,1]),axis=1)
p_path_implied = (w_path*L_over_X) + ((r_path+delta)*p_k_path[:,:T]*K_over_X) + ((p_k_path[:,:T]- p_k_p1)*(K_p1/X_path))
#p_path_implied = (w_path*L_over_X) + ((r_path+delta)*p_k_path[:,:T]*K_over_X)
return p_path_implied
def get_p_c_path(p_path, pi, I):
'''
Generates time path of consumption good prices from
industry output prices and fixed coefficient matrix
relating output goods to consumption goods.
Inputs:
p_path = [M,T+S-2] matrix of params = length 4 tuple, (A, gamma, epsilon, delta)
pi = [I,M] matrix, element i,j gives the fraction of consumption
I = integer > S, number consumption goods
Functions called: None
Objects in function:
p_c_path = [I, T+S-2] matrix, time path of consumption good prices
Returns: p_c_path
'''
p_c_path = np.zeros((I,(p_path.shape)[1]))
for t in range(0,(p_path.shape)[1]):
p_c_path[:,t] = np.dot(pi,p_path[:,t])
return p_c_path
def get_p_k_path(p_path, xi, M):
'''
Generates time path of capital prices from
industry output prices and fixed coefficient matrix
relating output goods to capital goods.
Inputs:
p_path = [M,T+S-2] matrix of params = length 4 tuple, (A, gamma, epsilon, delta)
xi = [M,M] matrix, element i,j gives the fraction of capital used by industry i
that comes from the output of industry j
T = integer > S, number of time periods until steady
state
Functions called: None
Objects in function:
p_k_path = [I, T+S-2] matrix, time path of capital good prices
Returns: p_k_path
'''
p_k_path = np.zeros((M,(p_path.shape)[1]))
for t in range(0,(p_path.shape)[1]):
p_k_path[:,t] = np.dot(xi,p_path[:,t])
return p_k_path
def get_cbepath(params, Gamma1, r_path, w_path, p_c_path, p_tilde_path,
c_bar, I, n):
'''
Generates matrices for the time path of the distribution of
individual savings, individual composite consumption, individual
consumption of each type of good, and the Euler errors associated
with the savings decisions.
Inputs:
params = length 6 tuple, (S, T, alpha, beta, sigma, tp_tol)
S = integer in [3,80], number of periods an individual
lives
T = integer > S, number of time periods until steady
state
I = integer, number unique consumption goods
alpha = [I,T+S-1] matrix, expenditure shares on each good along time path
beta = scalar in [0,1), discount factor for each model
period
sigma = scalar > 0, coefficient of relative risk aversion
tp_tol = scalar > 0, tolerance level for fsolve's in TPI
Gamma1 = [S-1,] vector, initial period savings distribution
r_path = [T+S-1,] vector, the time path of
the interest rate
w_path = [T+S-1,] vector, the time path of
the wage
p_c_path = [I, T+S-1] matrix, time path of consumption goods prices
p_tilde_path = [T+S-1] vector, time path of composite price
c_bar = [I,T+S-1] matrix, minimum consumption values for all
goods along time path
n = [S,] vector, exogenous labor supply n_{s}
Functions called:
paths_life
Objects in function:
b_path = [S-1, T+S-1] matrix, distribution of savings along time path
c_tilde_path = [S, T+S-1] matrix, distribution of composite consumption along time path
c_path = [S, T+S-1, I] array, distribution of consumption of each cons good along time path
eulerr_path = [S-1, T+S-1] matrix, Euler equation errors along the time path
pl_params = length 4 tuple, parameters to pass into paths_life
(S, beta, sigma, TP_tol)
u = integer >= 2, represents number of periods
remaining in a lifetime, used to solve incomplete
lifetimes
b_guess = [u-1,] vector, initial guess for remaining lifetime
savings, taken from previous cohort's choices
b_lf = [u-1,] vector, optimal remaining lifetime savings
decisions
c_tilde_lf = [u,] vector, optimal remaining lifetime composite consumption
decisions
c_lf = [u,I] matrix, optimal remaining lifetime invididual consumption decisions
b_err_vec_lf = [u-1,] vector, Euler errors associated with
optimal remaining lifetime savings decisions
DiagMaskb = [u-1, u-1] boolean identity matrix
DiagMaskc = [u, u] boolean identity matrix
Returns: b_path, c_tilde_path, c_path, eulerr_path
'''
S, T, alpha, beta, sigma, tp_tol = params
b_path = np.append(Gamma1.reshape((S-1,1)), np.zeros((S-1, T+S-2)),
axis=1)
c_tilde_path = np.zeros((S, T+S-1))
c_path = np.zeros((S, T+S-1, I))
eulerr_path = np.zeros((S-1, T+S-1))
# Solve the incomplete remaining lifetime decisions of agents alive
# in period t=1 but not born in period t=1
c_tilde_path[S-1, 0], c_tilde_cstr = firm.get_c_tilde(c_bar[:,0],r_path[0],w_path[0],
p_c_path[:,0], p_tilde_path[0],n[S-1],Gamma1[S-2])
c_path[S-1, 0, :], c_cstr = firm.get_c(alpha[:,0],c_bar[:,0],c_tilde_path[S-1,0],
p_c_path[:,0],p_tilde_path[0])
for u in xrange(2, S):
# b_guess = b_ss[-u+1:]
b_guess = np.diagonal(b_path[S-u:, :u-1])
pl_params = (S, alpha[:,:u], beta, sigma, tp_tol)
b_lf, c_tilde_lf, c_lf, b_err_vec_lf = paths_life(pl_params,
S-u+1, Gamma1[S-u-1], c_bar[:,:u], n[-u:], r_path[:u],
w_path[:u], p_c_path[:, :u], p_tilde_path[:u], b_guess)
# Insert the vector lifetime solutions diagonally (twist donut)
# into the c_tilde_path, b_path, and EulErrPath matrices
DiagMaskb = np.eye(u-1, dtype=bool)
DiagMaskc = np.eye(u, dtype=bool)
b_path[S-u:, 1:u] = DiagMaskb * b_lf + b_path[S-u:, 1:u]
c_tilde_path[S-u:, :u] = DiagMaskc * c_tilde_lf + c_tilde_path[S-u:, :u]
DiagMaskc_tiled = np.tile(np.expand_dims(np.eye(u, dtype=bool),axis=2),(1,1,I))
c_lf = np.tile(np.expand_dims(c_lf.transpose(),axis=0),((c_lf.shape)[1],1,1))
c_path[S-u:, :u, :] = (DiagMaskc_tiled * c_lf +
c_path[S-u:, :u, :])
eulerr_path[S-u:, 1:u] = (DiagMaskb * b_err_vec_lf +
eulerr_path[S-u:, 1:u])
# Solve for complete lifetime decisions of agents born in periods
# 1 to T and insert the vector lifetime solutions diagonally (twist
# donut) into the c_tilde_path, b_path, and EulErrPath matrices
DiagMaskb = np.eye(S-1, dtype=bool)
DiagMaskc = np.eye(S, dtype=bool)
DiagMaskc_tiled = np.tile(np.expand_dims(np.eye(S, dtype=bool),axis=2),(1,1,I))
for t in xrange(1, T+1): # Go from periods 1 to T
# b_guess = b_ss
b_guess = np.diagonal(b_path[:, t-1:t+S-2])
pl_params = (S, alpha[:,t-1:t+S-1], beta, sigma, tp_tol)
b_lf, c_lf, c_i_lf, b_err_vec_lf = paths_life(pl_params, 1,
0, c_bar[:,t-1:t+S-1], n, r_path[t-1:t+S-1],
w_path[t-1:t+S-1], p_c_path[:, t-1:t+S-1],
p_tilde_path[t-1:t+S-1], b_guess)
c_i_lf = np.tile(np.expand_dims(c_i_lf.transpose(),axis=0),((c_i_lf.shape)[1],1,1))
# Insert the vector lifetime solutions diagonally (twist donut)
# into the c_tilde_path, b_path, and EulErrPath matrices
b_path[:, t:t+S-1] = DiagMaskb * b_lf + b_path[:, t:t+S-1]
c_tilde_path[:, t-1:t+S-1] = DiagMaskc * c_lf + c_tilde_path[:, t-1:t+S-1]
c_path[:, t-1:t+S-1, :] = (DiagMaskc_tiled * c_i_lf +
c_path[:, t-1:t+S-1, :])
eulerr_path[:, t:t+S-1] = (DiagMaskb * b_err_vec_lf +
eulerr_path[:, t:t+S-1])
return b_path, c_tilde_path, c_path, eulerr_path
def paths_life(params, beg_age, beg_wealth, c_bar, n, r_path,
w_path, p_c_path, p_tilde_path, b_init):
'''
Solve for the remaining lifetime savings decisions of an individual
who enters the model at age beg_age, with corresponding initial
wealth beg_wealth.
Inputs:
params = length 5 tuple, (S, alpha, beta, sigma, tp_tol)
S = integer in [3,80], number of periods an individual
lives
alpha = [I,S-beg_age+1], expenditure share on good for remaing lifetime
beta = scalar in [0,1), discount factor for each model
period
sigma = scalar > 0, coefficient of relative risk aversion
tp_tol = scalar > 0, tolerance level for fsolve's in TPI
beg_age = integer in [1,S-1], beginning age of remaining life
beg_wealth = scalar, beginning wealth at beginning age
n = [S-beg_age+1,] vector, remaining exogenous labor
supplies
r_path = [S-beg_age+1,] vector, remaining lifetime interest
rates
w_path = [S-beg_age+1,] vector, remaining lifetime wages
p_c_path = [I, S-beg_age+1] matrix, remaining lifetime
consumption good prices
p_tilde_path = [S-beg_age+1,] vector, remaining lifetime composite
goods prices
b_init = [S-beg_age,] vector, initial guess for remaining
lifetime savings
Functions called:
LfEulerSys
firm.get_c_tilde
firm.get_c
firm.get_b_errors
Objects in function:
u = integer in [2,S], remaining periods in life
b_guess = [u-1,] vector, initial guess for lifetime savings
decisions
eullf_objs = length 9 tuple, objects to be passed in to
LfEulerSys: (p, beta, sigma, beg_wealth, n,
r_path, w_path, p_path, p_tilde_path)
b_path = [u-1,] vector, optimal remaining lifetime savings
decisions
c_tilde_path = [u,] vector, optimal remaining lifetime
consumption decisions
c_path = [u,I] martrix, remaining lifetime consumption
decisions by consumption good
c_constr = [u,] boolean vector, =True if c_{u}<=0,
b_err_params = length 2 tuple, parameters to pass into
firm.get_b_errors (beta, sigma)
b_err_vec = [u-1,] vector, Euler errors associated with
optimal savings decisions
Returns: b_path, c_tilde_path, c_path, b_err_vec
'''
S, alpha, beta, sigma, tp_tol = params
u = int(S - beg_age + 1)
if beg_age == 1 and beg_wealth != 0:
sys.exit("Beginning wealth is nonzero for age s=1.")
if len(r_path) != u:
#print len(r_path), S-beg_age+1
sys.exit("Beginning age and length of r_path do not match.")
if len(w_path) != u:
sys.exit("Beginning age and length of w_path do not match.")
if len(n) != u:
sys.exit("Beginning age and length of n do not match.")
b_guess = 1.01 * b_init
eullf_objs = (u, beta, sigma, beg_wealth, c_bar, n, r_path,
w_path, p_c_path, p_tilde_path)
b_path = opt.fsolve(LfEulerSys, b_guess, args=(eullf_objs),
xtol=tp_tol)
c_tilde_path, c_tilde_cstr = firm.get_c_tilde(c_bar, r_path, w_path, p_c_path, p_tilde_path,
n, np.append(beg_wealth, b_path))
c_path, c_cstr = firm.get_c(alpha[:,:u], c_bar, c_tilde_path, p_c_path, p_tilde_path)
b_err_params = (beta, sigma)
b_err_vec = firm.get_b_errors(b_err_params, r_path[1:], c_tilde_path,
c_tilde_cstr, diff=True)
return b_path, c_tilde_path, c_path, b_err_vec
def LfEulerSys(b, *objs):
'''
Generates vector of all Euler errors for a given b, which errors
characterize all optimal lifetime decisions
Inputs:
b = [u-1,] vector, remaining lifetime savings decisions
where p is the number of remaining periods
objs = length 9 tuple, (u, beta, sigma, beg_wealth, n,
r_path, w_path, p_path, p_tilde_path)
u = integer in [2,S], remaining periods in life
beta = scalar in [0,1), discount factor
sigma = scalar > 0, coefficient of relative risk aversion
beg_wealth = scalar, wealth at the beginning of first age
n = [u,] vector, remaining exogenous labor supply
r_path = [u,] vector, interest rates over remaining life
w_path = [u,] vector, wages rates over remaining life
p_c_path = [I, u] matrix, remaining lifetime
consumption good prices
p_tilde_path = [u,] vector, remaining lifetime composite
goods prices
p_tilde_path = [u,] vector of composite good prices
Functions called:
firm.get_c_tilde
firm.get_b_errors
Objects in function:
b2 = [u, ] vector, remaining savings including initial
savings
c = [u, ] vector, remaining lifetime consumption
levels implied by b2
c_constr = [u, ] boolean vector, =True if c_{s,t}<=0
b_err_params = length 2 tuple, parameters to pass into
get_b_errors (beta, sigma)
b_err_vec = [u-1,] vector, Euler errors from lifetime
consumption vector
Returns: b_err_vec
'''
(u, beta, sigma, beg_wealth, c_bar, n, r_path, w_path, p_c_path,
p_tilde_path) = objs
b2 = np.append(beg_wealth, b)
c_tilde, c_tilde_cstr = firm.get_c_tilde(c_bar, r_path, w_path, p_c_path, p_tilde_path,
n, b2)
b_err_params = (beta, sigma)
b_err_vec = firm.get_b_errors(b_err_params, r_path[1:], c_tilde,
c_tilde_cstr, diff=True)
return b_err_vec
def get_K_over_X_pf_path(r_path, w_path, p_k_path, A, gamma, epsilon, delta, M, T):
'''
Generates an array of the time path of capital-ouput ratios by production industry m
for a given X, r, w, p_k.
:: Note: This function differs from get_K_over_X because that function
uses is derived from the FOC for capital demand only. This function
uses the capital-labor ratio implied from the FOCs for capital and
labor demand together with the firms' production functions
Inputs:
r_path = [T,] vector, real interest rates
w_path = [T,] vector, real wage rates
X_path = [M,T] matrix, output from each industry
p_k_path = [M,T] matrix, capital prices from each industry
A = [M,T] matrix, total factor productivity values for all
industries
gamma = [M,T] matrix, capital shares of income for all
industries
epsilon = [M,T] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T] matrix, model period depreciation rates for all
industries
Functions called: None
Objects in function:
p_k_m1 = [M,T] matrix, price of capital on period prior
aa = [M,T] matrix, gamma
bb = [M,T] matrix, 1 - gamma
cc = [M,T] matrix, (1 - gamma) / gamma
dd = [M,T] matrix, (r + delta) / w
ee = [M,T] matrix, 1 / epsilon
ff = [M,T] matrix, (epsilon - 1) / epsilon
gg = [M,T] matrix, epsilon - 1
hh = [M,T] matrix, epsilon / (1 - epsilon)
ii = [M,T] matrix, ((1 / A) * (((aa ** ee) + (bb ** ee) *
(cc ** ff) * (dd ** gg)) ** hh))
K_path = [M,T] matrix, capital demand of all industries
Returns: K_path
'''
p_k_m1 = np.insert(p_k_path[:,:T-1],0,(p_k_path[:,0]).reshape(M,),axis=1) # assumption is that p_k before time path starts same as initial value
aa = gamma
bb = 1 - gamma
cc = (1 - gamma) / gamma
dd = ((p_k_m1*(1+r_path)) - (p_k_path*(1-delta))) / w_path
ee = 1 / epsilon
ff = (epsilon - 1) / epsilon
gg = epsilon - 1
hh = epsilon / (1 - epsilon)
K_over_X_pf_path = ((1/ A) *
(((aa ** ee) + (bb ** ee) * (cc ** ff) * (dd ** gg)) ** hh))
return K_over_X_pf_path
def get_X_path(params, r_path, w_path, C_path, p_k_path, A, gamma,
epsilon, delta, xi, pi, I, M):
'''
Generate matrix (vectors) of time path of aggregate output X_{m,t}
by industry given r_t, w_t, and C_{m,t}
Inputs:
K_ss = [M,] vector, steady-state capital stock by industry
r_path = [T,] vector, real interest rates
w_path = [T,] vector, real wage rates
C_path = [I,T] matrix, aggregate consumption of each good
along the time path
A = [M,T] matrix, total factor productivity values for all
industries
gamma = [M,T] matrix, capital shares of income for all
industries
epsilon = [M,T] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T] matrix, model period depreciation rates for all
industries
xi = [M,M] matrix, element i,j gives the fraction of capital used by
industry j that comes from the output of industry i
pi = [I,M] matrix, element i,j gives the fraction of consumption
T = integer > S, number of time periods until steady
state
I = integer, number unique consumption goods
M = integer, number unique production industires
Functions called:
get_K_over_X_path_pf
Objects in function:
Inv = [M,T] matrix, investment demand from each industry
K_p1 = [M,] vector, demand for capital from each industry
in the next period
X_kp1 = [M,] vector, demand for output for capital from
each industry in the next period
X_c = [M,T] matrix, demand for output from each industry due to
consumption demand
b_coeffs = coeffients in the linear problem: aX=b
a_coeffs = coefficients in the linear problem: aX=b
X_path = [M,T] matrix, output from each industry
Returns: rc_errors
'''
T, K_ss = params
aa = get_K_over_X_pf_path(r_path, w_path, p_k_path, A, gamma, epsilon, delta, M, T)
bb = (1-delta)*aa
X_path = np.zeros((M,T))
K_p1 = K_ss
for t in range(T-1, -1, -1): # Go from periods T to 1
X_kp1 = np.dot(np.reshape(K_p1,(1,M)),xi)
X_c = np.dot(np.reshape(C_path[:,t],(1,I)),pi)
b_coeffs = (X_c + X_kp1).transpose()
a_coeffs = np.eye(M) + (np.tile(bb[:,t],(M,1))*xi.transpose())
X_path[:,t] = np.reshape(np.linalg.solve(a_coeffs, b_coeffs),(M,))
K_p1 = X_path[:,t]*aa[:,t]
return X_path
def get_K_path(r_path, w_path, X_path, p_k_path, A, gamma, epsilon, delta, M, T):
'''
Generates vector of capital demand from production industry m
along the time path for a given X, r, w.
Inputs:
r_path = [T,] vector, real interest rates
w_path = [T,] vector, real wage rates
X_path = [M,T] matrix, output from each industry
p_k_path = [M,T] matrix, capital prices from each industry
A = [M,T] matrix, total factor productivity values for all
industries
gamma = [M,T] matrix, capital shares of income for all
industries
epsilon = [M,T] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T] matrix, model period depreciation rates for all
industries
Functions called: None
Objects in function:
p_k_m1 = [M,T] matrix, price of capital on period prior
aa = [M,T] matrix, gamma
bb = [M,T] matrix, 1 - gamma
cc = [M,T] matrix, (1 - gamma) / gamma
dd = [M,T] matrix, (r + delta) / w
ee = [M,T] matrix, 1 / epsilon
ff = [M,T] matrix, (epsilon - 1) / epsilon
gg = [M,T] matrix, epsilon - 1
hh = [M,T] matrix, epsilon / (1 - epsilon)
ii = [M,T] matrix, ((1 / A) * (((aa ** ee) + (bb ** ee) *
(cc ** ff) * (dd ** gg)) ** hh))
K_path = [M,T] matrix, capital demand of all industries
Returns: K_path
'''
p_k_m1 = np.insert(p_k_path[:,:T-1],0,(p_k_path[:,0]).reshape(M,),axis=1) # assumption is that p_k before time path starts same as initial value
aa = gamma
bb = 1 - gamma
cc = (1 - gamma) / gamma
dd = ((p_k_m1*(1+r_path)) - (p_k_path*(1-delta))) / w_path
ee = 1 / epsilon
ff = (epsilon - 1) / epsilon
gg = epsilon - 1
hh = epsilon / (1 - epsilon)
K_path = ((X_path / A) *
(((aa ** ee) + (bb ** ee) * (cc ** ff) * (dd ** gg)) ** hh))
return K_path
def get_L_path(r_path, w_path, K_path, p_k_path, gamma, epsilon, delta, M, T):
'''
Generates vector of labor demand L_{m} for good m given X_{m}, p_{m}, r and w
Inputs:
K_path = [M, T] matrix, time path of aggregate output by
industry
r_path = [T, ] matrix, time path of real interest rate
w_path = [T, ] matrix, time path of real wage
p_k_path = [M,T] matrix, capital prices from each industry
gamma = [M,T] matrix, capital shares of income for all
industries
epsilon = [M,T] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T] matrix, rate of phyical depreciation for all industries
Functions called: None
Objects in function:
L_path = [M,T] matrix, labor demand from each industry
Returns: L_path
'''
p_k_m1 = np.insert(p_k_path[:,:T-1],0,(p_k_path[:,0]).reshape(M,),axis=1) # assumption is that p_k before time path starts same as initial value
L_path = K_path*((1-gamma)/gamma)*((((p_k_m1*(1+r_path)) - (p_k_path*(1-delta))) / w_path)**epsilon)
return L_path
def get_K_over_X(params, p_path, p_k_path, r_path):
'''
Generates SS capital-output ratio by industry
Inputs:
params = length 4 tuple, (A, gamma, epsilon, delta)
A = [M,T] matrix, total factor productivity for each
industry
gamma = [M,T] matrix, capital share of income for each industry
epsilon = [M,T] matrix, elasticity of substitution between capital
and labor for each industry
delta = [M,T] matrix, capital depreciation rate for each
industry
r_path = [T,] vector, time path of real interest rates
p_k_path = [M,T] matrix, SS industry capital prices
p_path = [M,T] matrix, SS industry output prices
Functions called: None
Objects in function:
p_k_m1 = [M,T] matrix, capital prices one period ago
K_over_X = [M,] vector, SS capital-output ratio by industry
Returns: K_over_X
'''
A, gamma, epsilon, delta, M, T = params
p_k_m1 = np.insert(p_k_path[:,:T-1],0,(p_k_path[:,0]).reshape(M,),axis=1) # assumption is that p_k before time path starts same as initial value
K_over_X = gamma*(A**(epsilon-1))*((p_path/((p_k_m1*(1+r_path))-(p_k_path*(1-delta))))**(epsilon))
return K_over_X
def get_L_over_X(params, p_path, w_path):
'''
Generates SS labor-output ratio by industry
Inputs:
params = length 4 tuple, (A, gamma, epsilon, delta)
A = [M,T] matrix, total factor productivity for each
industry
gamma = [M,T] matrix, capital share of income for each industry
epsilon = [M,T] matrix, elasticity of substitution between capital
and labor for each industry
delta = [M,T] matrix, capital depreciation rate for each
industry
w = [T,] vector, wage rate
p = [M,T] matrix, industry output prices
Functions called: None
Objects in function:
L_over_X = [M,] vector, SS capital-output ratio by industry
Returns: K_over_X
'''
A, gamma, epsilon, delta = params
L_over_X = (1-gamma)*(A**(epsilon-1))*((p_path/w_path)**epsilon)
return L_over_X
def TP(params, p_path_init, r_path_init, w_path_init, K_ss, X_ss, Gamma1, c_bar, A,
gamma, epsilon, delta, xi, pi, I, M, S, n, graphs):
'''
Generates equilibrium time path for all endogenous objects from
initial state (Gamma1) to the steady state using initial guesses
r_path_init and w_path_init.
Inputs:
params = length 11 tuple, (S, T, alpha, beta, sigma, r_ss,
w_ss, maxiter, mindist, xi, tp_tol)
S = integer in [3,80], number of periods an individual
lives
T = integer > S, number of time periods until steady
state
I = integer, number unique consumption goods
M = integer, number unique production industires
alpha = [I,T+S-1] matrix, expenditure share on each good
along the time path
beta = scalar in [0,1), discount factor for each model
period
sigma = scalar > 0, coefficient of relative risk aversion
p_ss = [M,] vector, SS output prices for each industry
r_ss = scalar > 0, steady-state interest rate
w_ss = scalar > 0, steady-state wage
tp_tol = scalar > 0, tolerance level for fsolve's in TP solution
r_path_init = [T+S-1,] vector, initial guess for the time path of
the interest rate
w_path_init = [T+S-1,] vector, initial guess for the time path of
the wage
p_path_init = [M, T+S-1] matrix, time path of industry output prices
X_ss = [M,] vector, steady-state industry output levels
Gamma1 = [S-1,] vector, initial period savings distribution
c_bar = [I,T+S-1] matrix, minimum consumption values for all
goods
A = [M,T+S-1] matrix, total factor productivity values for
all industries
gamma = [M,T+S-1] matrix, capital shares of income for all
industries
epsilon = [M,T+S-1] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T+S-1] matrix, model period depreciation rates for
all industries
xi = [M,M] matrix, element i,j gives the fraction of capital used by
industry j that comes from the output of industry i
pi = [I,M] matrix, element i,j gives the fraction of consumption
n = [S,] vector, exogenous labor supply n_{s}
graphs = boolean, =True if want graphs of TPI objects
Functions called:
firm.get_p
firm.get_p_tilde
get_cbepath
Objects in function:
start_time = scalar, current processor time in seconds (float)
p_params = length 4 tuple, objects to be passed to
get_p_path function:
(A, gamma, epsilon, delta)
p_c_path = [I, T+S-1] matrix, time path of consumption good prices
p_tilde_path = [T+S-1] vector, time path of composite price
r_params = length 3 tuple, parameters passed in to get_r
w_params = length 2 tuple, parameters passed in to get_w
cbe_params = length 5 tuple. parameters passed in to
get_cbepath
r_path = [T+S-2,] vector, equilibrium time path of the
interest rate
w_path = [T+S-2,] vector, equilibrium time path of the
real wage
p_path = [M, T+S-1] matrix, time path of industry output prices
c_tilde_path = [S, T+S-2] matrix, equilibrium time path values
of individual consumption c_{s,t}
b_path = [S-1, T+S-2] matrix, equilibrium time path values
of individual savings b_{s+1,t+1}
EulErrPath = [S-1, T+S-2] matrix, equilibrium time path values
of Euler errors corresponding to individual
savings b_{s+1,t+1} (first column is zeros)
K_path_constr = [T+S-2,] boolean vector, =True if K_t<=0
K_path = [T+S-2,] vector, equilibrium time path of the
aggregate capital stock
X_params = length 2 tuple, parameters to be passed to get_X
X_path = [M,T+S-2] matrix, equilibrium time path of
industry output
C_path = [I, T+S-2] matrix, equilibrium time path of
aggregate consumption
elapsed_time = scalar, time to compute TPI solution (seconds)
Returns: b_path, c_tilde_path, w_path, r_path, K_path, X_path, Cpath,
EulErr_path, elapsed_time
'''
(S, T, alpha, beta, sigma, p_ss, r_ss, w_ss, tp_tol) = params
r_path = np.zeros(T+S-1)
w_path = np.zeros(T+S-1)
r_path[:T] = r_path_init[:T]
w_path[:T] = w_path_init[:T]
r_path[T:] = r_ss
w_path[T:] = w_ss
p_path[:,:T] = p_path_init
p_path[:,T:] = np.ones((M,S-1))*np.tile(np.reshape(p_ss,(M,1)),(1,S-1))
p_k_path = get_p_k_path(p_path, xi, M)
p_c_path = get_p_c_path(p_path, pi, I)
p_tilde_path = firm.get_p_tilde(alpha, p_c_path)
cbe_params = (S, T, alpha, beta, sigma, tp_tol)
b_path, c_tilde_path, c_path, eulerr_path = get_cbepath(cbe_params,
Gamma1, r_path, w_path, p_c_path, p_tilde_path, c_bar, I,
n)
C_path = firm.get_C(c_path[:, :T, :])
X_params = (T, K_ss)
X_path = get_X_path(X_params, r_path[1:T+1], w_path[:T], C_path[:,:T], p_k_path[:,:T], A[:,:T], gamma[:,:T],
epsilon[:,:T], delta[:,:T], xi, pi, I, M)
K_path = get_K_path(r_path[1:T+1], w_path[:T], X_path, p_k_path[:,:T], A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
L_path = get_L_path(r_path[1:T+1], w_path[:T], K_path, p_k_path[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
# Calculate the time path of firm values
V_path = p_k_path[:,:T]*K_path
# Checking resource constraint along the path:
Inv_path = np.zeros((M,T))
X_inv_path = np.zeros((M,T))
X_c_path = np.zeros((M,T))
Inv_path[:,:T-1] = K_path[:,1:] - (1-delta[:,:T-1])*K_path[:,:T-1]
Inv_path[:,T-1] = K_ss - (1-delta[:,T-1])*K_path[:,T-1]
for t in range(0,T):
X_inv_path[:,t] = np.dot(Inv_path[:,t],xi)
X_c_path[:,t] = np.dot(np.reshape(C_path[:,t],(1,I)),pi)
RCdiff_path = (X_path - X_c_path - X_inv_path)
# Checking market clearing conditions
MCKerr_path = b_path[:, :T].sum(axis=0) - V_path.sum(axis=0)
MCLerr_path = n.sum() - L_path.sum(axis=0)
p_params = (A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], K_ss, M, T)
p_err_path = p_path[:,:T] - get_p_path(p_params, r_path[1:T+1], w_path[:T], p_path[:,:T], p_k_path[:,:T+1], K_path, X_path)
if graphs == True:
# Plot time path of aggregate capital stock
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
#plt.plot(tvec, K_path[0,:T])
plt.plot(tvec, K_path[1,:T])
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for aggregate capital stock')
plt.xlabel(r'Period $t$')
plt.ylabel(r'Aggregate capital $K_{t}$')
# plt.savefig('Kt_Sec2')
plt.show()
# Plot time path of aggregate output (GDP)
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, X_path[0,:T])
plt.plot(tvec, X_path[1,:T])
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for aggregate output (GDP)')
plt.xlabel(r'Period $t$')
plt.ylabel(r'Aggregate output $X_{t}$')
# plt.savefig('Yt_Sec2')
plt.show()
# Plot time path of aggregate consumption
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, C_path[0,:T])
plt.plot(tvec, C_path[1,:T])
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for aggregate consumption')
plt.xlabel(r'Period $t$')
plt.ylabel(r'Aggregate consumption $C_{t}$')
# plt.savefig('Ct_Sec2')
plt.show()
# Plot time path of real wage
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, w_path[:T])
plt.plot(tvec, np.ones(T)*w_ss)
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for real wage')
plt.xlabel(r'Period $t$')
plt.ylabel(r'Real wage $w_{t}$')
# plt.savefig('wt_Sec2')
plt.show()
# Plot time path of real interest rate
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, r_path[:T])
plt.plot(tvec, np.ones(T)*r_ss)
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for real interest rate')
plt.xlabel(r'Period $t$')
plt.ylabel(r'Real interest rate $r_{t}$')
# plt.savefig('rt_Sec2')
plt.show()
# Plot time path of the differences in the resource constraint
tvec = np.linspace(1, T-1, T-1)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, ResmDiff[0,:T-1])
plt.plot(tvec, ResmDiff[1,:T-1])
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for resource constraint')
plt.xlabel(r'Period $t$')
plt.ylabel(r'RC Difference')
# plt.savefig('wt_Sec2')
plt.show()
# Plot time path of the differences in the market clearing conditions
tvec = np.linspace(1, T, T)
minorLocator = MultipleLocator(1)
fig, ax = plt.subplots()
plt.plot(tvec, MCKerr_path[:T])
plt.plot(tvec, MCLerr_path[:T])
# for the minor ticks, use no labels; default NullFormatter
ax.xaxis.set_minor_locator(minorLocator)
plt.grid(b=True, which='major', color='0.65',linestyle='-')
plt.title('Time path for resource constraint')
plt.xlabel(r'Period $t$')
plt.ylabel(r'RC Difference')
# plt.savefig('wt_Sec2')
plt.show()
# Plot time path of individual savings distribution
tgrid = np.linspace(1, T, T)
sgrid = np.linspace(2, S, S - 1)
tmat, smat = np.meshgrid(tgrid, sgrid)
cmap_bp = matplotlib.cm.get_cmap('summer')
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(r'period-$t$')
ax.set_ylabel(r'age-$s$')
ax.set_zlabel(r'individual savings $b_{s,t}$')
strideval = max(int(1), int(round(S/10)))
ax.plot_surface(tmat, smat, b_path[:, :T], rstride=strideval,
cstride=strideval, cmap=cmap_bp)
# plt.savefig('b_path')
plt.show()
# Plot time path of individual savings distribution
tgrid = np.linspace(1, T-1, T-1)
sgrid = np.linspace(1, S, S)
tmat, smat = np.meshgrid(tgrid, sgrid)
cmap_cp = matplotlib.cm.get_cmap('summer')
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_xlabel(r'period-$t$')
ax.set_ylabel(r'age-$s$')
ax.set_zlabel(r'individual consumption $c_{s,t}$')
strideval = max(int(1), int(round(S/10)))
ax.plot_surface(tmat, smat, c_tilde_path[:, :T-1], rstride=strideval,
cstride=strideval, cmap=cmap_cp)
# plt.savefig('b_path')
plt.show()
return (r_path, w_path, p_path, p_k_path, p_tilde_path, b_path, c_tilde_path, c_path,
eulerr_path, C_path, X_path, K_path, L_path, MCKerr_path,
MCLerr_path, RCdiff_path, p_err_path)
def TP_fsolve(guesses, params, K_ss, X_ss, Gamma1, c_bar, A,
gamma, epsilon, delta, xi, pi, I, M, S, n, graphs):
'''
Generates equilibrium time path for all endogenous objects from
initial state (Gamma1) to the steady state using initial guesses
r_path_init and w_path_init.
Inputs:
params = length 11 tuple, (S, T, alpha, beta, sigma, r_ss,
w_ss, maxiter, mindist, xi, tp_tol)
S = integer in [3,80], number of periods an individual
lives
T = integer > S, number of time periods until steady
state
I = integer, number unique consumption goods
M = integer, number unique production industires
alpha = [I,T+S-1] matrix, expenditure share on each good
along the time path
beta = scalar in [0,1), discount factor for each model
period
sigma = scalar > 0, coefficient of relative risk aversion
r_ss = scalar > 0, steady-state interest rate
w_ss = scalar > 0, steady-state wage
tp_tol = scalar > 0, tolerance level for fsolve's in TP solution
r_path_init = [T+S-1,] vector, initial guess for the time path of
the interest rate
w_path_init = [T+S-1,] vector, initial guess for the time path of
the wage
X_ss = [M,] vector, steady-state industry output levels
Gamma1 = [S-1,] vector, initial period savings distribution
c_bar = [M,T+S-1] matrix, minimum consumption values for all
goods
A = [M,T+S-1] matrix, total factor productivity values for
all industries
gamma = [M,T+S-1] matrix, capital shares of income for all
industries
epsilon = [M,T+S-1] matrix, elasticities of substitution between
capital and labor for all industries
delta = [M,T+S-1] matrix, model period depreciation rates for
all industries
xi = [M,M] matrix, element i,j gives the fraction of capital used by
industry j that comes from the output of industry i
pi = [I,M] matrix, element i,j gives the fraction of consumption
n = [S,] vector, exogenous labor supply n_{s}
graphs = boolean, =True if want graphs of TPI objects
Functions called:
firm.get_p
firm.get_p_tilde
get_cbepath
Objects in function:
start_time = scalar, current processor time in seconds (float)
r_path_new = [T+S-2,] vector, new time path of the interest
rate implied by household and firm optimization
w_path_new = [T+S-2,] vector, new time path of the wage
implied by household and firm optimization
p_params = length 4 tuple, objects to be passed to
get_p_path function:
(A, gamma, epsilon, delta)
p_path = [M, T+S-1] matrix, time path of industry output prices
p_c_path = [I, T+S-1] matrix, time path of consumption good prices
p_tilde_path = [T+S-1] vector, time path of composite price
r_params = length 3 tuple, parameters passed in to get_r
w_params = length 2 tuple, parameters passed in to get_w
cbe_params = length 5 tuple. parameters passed in to
get_cbepath
r_path = [T+S-2,] vector, equilibrium time path of the
interest rate
w_path = [T+S-2,] vector, equilibrium time path of the
real wage
c_tilde_path = [S, T+S-2] matrix, equilibrium time path values
of individual consumption c_{s,t}
b_path = [S-1, T+S-2] matrix, equilibrium time path values
of individual savings b_{s+1,t+1}
EulErrPath = [S-1, T+S-2] matrix, equilibrium time path values
of Euler errors corresponding to individual
savings b_{s+1,t+1} (first column is zeros)
K_path_constr = [T+S-2,] boolean vector, =True if K_t<=0
K_path = [T+S-2,] vector, equilibrium time path of the
aggregate capital stock
X_params = length 2 tuple, parameters to be passed to get_X
X_path = [M,T+S-2] matrix, equilibrium time path of
industry output
C_path = [I, T+S-2] matrix, equilibrium time path of
aggregate consumption
Returns: b_path, c_tilde_path, w_path, r_path, K_path, X_path, Cpath,
EulErr_path
'''
(S, T, alpha, beta, sigma, p_ss, r_ss, w_ss, tp_tol) = params
r_path = np.zeros(T+S-1)
w_path = np.zeros(T+S-1)
p_path = np.zeros((M,T+S-1))
r_path[:T] = guesses[0:T].reshape(T)
w_path[:T] = guesses[T:2*T].reshape(T)
r_path[T:] = r_ss
w_path[T:] = w_ss
p_path[:,:T] = guesses[2*T:].reshape(M,T)
p_path[:,T:] = np.ones((M,S-1))*np.tile(np.reshape(p_ss,(M,1)),(1,S-1))
p_k_path = get_p_k_path(p_path, xi, M)
p_c_path = get_p_c_path(p_path, pi, I)
p_tilde_path = firm.get_p_tilde(alpha, p_c_path)
cbe_params = (S, T, alpha, beta, sigma, tp_tol)
b_path, c_tilde_path, c_path, eulerr_path = get_cbepath(cbe_params,
Gamma1, r_path, w_path, p_c_path, p_tilde_path, c_bar, I,
n)
C_path = firm.get_C(c_path[:, :T, :])
X_params = (T, K_ss)
# X_path = get_X_path(X_params, r_path[1:T+1], w_path[:T], C_path[:,:T], p_k_path[:,:T], A[:,:T], gamma[:,:T],
# epsilon[:,:T], delta[:,:T], xi, pi, I, M)
# K_path = get_K_path(r_path[1:T+1], w_path[:T], X_path, p_k_path[:,:T], A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
# L_path = get_L_path(r_path[1:T+1], w_path[:T], K_path, p_k_path[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
# # Calculate the time path of firm values
# V_path = p_k_path[:,:T]*K_path
# # Check market clearing in each period
# K_market_error = b_path[:, :T].sum(axis=0) - V_path.sum(axis=0)
# L_market_error = n.sum() - L_path[:, :].sum(axis=0)
# # Check errors between guessed and implied prices
# p_params = (A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], K_ss, M, T)
# p_error = p_path[:,:T] - get_p_path(p_params, r_path[1:T+1], w_path[:T], p_path[:,:T], p_k_path[:,:T+1], K_path, X_path)
X_path = get_X_path(X_params, r_path[:T], w_path[:T], C_path[:,:T], p_k_path[:,:T], A[:,:T], gamma[:,:T],
epsilon[:,:T], delta[:,:T], xi, pi, I, M)
K_path = get_K_path(r_path[:T], w_path[:T], X_path, p_k_path[:,:T], A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
L_path = get_L_path(r_path[:T], w_path[:T], K_path, p_k_path[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], M, T)
# Calculate the time path of firm values
V_path = p_k_path[:,:T]*K_path
# Check market clearing in each period
K_market_error = b_path[:, :T].sum(axis=0) - V_path.sum(axis=0)
L_market_error = n.sum() - L_path[:, :].sum(axis=0)
# Check errors between guessed and implied prices
p_params = (A[:,:T], gamma[:,:T], epsilon[:,:T], delta[:,:T], K_ss, M, T)
implied_p_path = get_p_path(p_params, r_path[:T], w_path[:T], p_path[:,:T], p_k_path[:,:T+1], K_path, X_path)
p_error = p_path[:,:T] - implied_p_path[:,:T]
# Checking resource constraint along the path:
Inv_path = np.zeros((M,T))
X_inv_path = np.zeros((M,T))
X_c_path = np.zeros((M,T))
Inv_path[:,:T-1] = K_path[:,1:] - (1-delta[:,:T-1])*K_path[:,:T-1]
Inv_path[:,T-1] = K_ss - (1-delta[:,T-1])*K_path[:,T-1]
for t in range(0,T):
X_inv_path[:,t] = np.dot(Inv_path[:,t],xi)
X_c_path[:,t] = np.dot(np.reshape(C_path[:,t],(1,I)),pi)
RCdiff_path = (X_path - X_c_path - X_inv_path)
print 'the max RC diff is: ', np.absolute(RCdiff_path).max(axis=1)
# Check and punish constraing violations
mask1 = r_path[:T] <= 0
mask2 = w_path[:T] <= 0
mask3 = np.isnan(r_path[:T])
mask4 = np.isnan(w_path[:T])
K_market_error[mask1] = 1e14
L_market_error[mask2] = 1e14
K_market_error[mask3] = 1e14
L_market_error[mask4] = 1e14
mask5 = p_path[:,:T] <= 0
mask6 = np.isnan(p_path[:,:T])
p_error[mask5] = 1e14
p_error[mask6] = 1e14
print 'max capital market clearing distance: ', np.absolute(K_market_error).max()
print 'max labor market clearing distance: ', np.absolute(L_market_error).max()
print 'min capital market clearing distance: ', np.absolute(K_market_error).min()
print 'min labor market clearing distance: ', np.absolute(L_market_error).min()
print 'the max pricing error is: ', np.absolute(p_error).max()
print 'the min pricing error is: ', np.absolute(p_error).min()
print 'maximum euler error is: ', np.absolute(eulerr_path).max()
if np.isnan(np.absolute(eulerr_path).max()):
print 'euler nan, price guess is: ', p_path[:,:T]
print 'euler nan, r guess is: ', r_path[:T]
print 'euler nan, r guess is: ', w_path[:T]
V_alt_path = (((p_path[:,:T-1]*X_path[:,:T-1] - w_path[:T-1]*L_path[:,:T-1] -
p_k_path[:,:T-1]*(K_path[:,1:T]-(1-delta[:,:T-1])*K_path[:,:T-1])) + (p_k_path[:,1:T]*K_path[:,1:T])) /(1+r_path[1:T]))
V_path = p_k_path[:,:T]*K_path[:,:T]
print 'the max V error is: ', np.absolute(V_alt_path-V_path[:,:T-1]).max()
print 'the min V error is: ', np.absolute(V_alt_path-V_path[:,:T-1]).min()
# get implied r:
r_implied = ((p_path[:,:T]/p_k_path[:,:T])*((A[:,:T]**((epsilon[:,:T]-1)/epsilon[:,:T]))*(((gamma[:,:T]*X_path)/K_path)**(1/epsilon[:,:T])))
+ (1-delta[:,:T])*(p_k_path[:,1:T+1]/p_path[:,1:T+1]) - (p_k_path[:,:T]/p_path[:,1:T+1]))
print 'the max r error is: ', np.absolute(r_implied-r_path[:T]).max()
print 'the min r error is: ', np.absolute(r_implied-r_path[:T]).min()
errors = np.insert(np.reshape(p_error,(T*M)),0,np.append(K_market_error, L_market_error))
return errors
| 43.048061 | 148 | 0.57916 |
c72fc1e801c0745013ef239e9d4b49f2f6d085c9 | 1,978 | py | Python | cogs/images.py | L3NNY0969/DogeBot | fe37cac06b739088aa563f35a1b8af5ba53607c7 | [
"MIT"
] | null | null | null | cogs/images.py | L3NNY0969/DogeBot | fe37cac06b739088aa563f35a1b8af5ba53607c7 | [
"MIT"
] | null | null | null | cogs/images.py | L3NNY0969/DogeBot | fe37cac06b739088aa563f35a1b8af5ba53607c7 | [
"MIT"
] | null | null | null | import discord
from discord.ext import commands
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
class images():
def __init__(self, bot):
self.bot = bot
@commands.command()
async def rip(self, ctx, user:discord.Member = None):
"""Rip dat user"""
user = user or ctx.author
if user.id == 454285151531433984:
await ctx.send("Dont rip meh!")
else:
img = Image.open("rip.png")
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("American Captain.otf", 100)
draw.text((131, 399), f"{user.name}", (0, 0, 0), font=font)
img.save(f'{user.id}.png')
await ctx.send(file=discord.File(f'{user.id}.png'))
@commands.command()
async def achievement(self, ctx, *, text = None):
"""Write a achievement"""
if text == None:
embed=discord.Embed(description="**achievement <text>**",color=0x9b9dff)
await ctx.send(embed=embed)
else:
img = Image.open("hqdefault.png")
draw = ImageDraw.Draw(img)
font = ImageFont.truetype("Minecraft.ttf", 23)
draw.text((90, 182), text, (255, 255, 255), font=font)
img.save(f'{ctx.author.id}.png')
await ctx.send(file=discord.File(f'{ctx.author.id}.png'))
@commands.command(name="avatar", aliases=['av'])
@commands.cooldown(1, 3, commands.BucketType.user)
async def avatar_command(self, ctx, *, member: discord.Member = None):
'''Gets someones pfp'''
member = member or ctx.author
av = member.avatar_url
if ".gif" in av:
av += "&f=.gif"
em = discord.Embed(title="Avatar", url=av, color=0x9b9dff)
em.set_author(name=str(member), icon_url=av)
em.set_image(url=av)
await ctx.send(embed=em)
def setup(bot):
bot.add_cog(images(bot))
| 36.62963 | 85 | 0.566229 |
41028f46de3042b3ee5827a7343ddb35a18c18f1 | 40 | py | Python | hello-world.py | tsaipavant/first_project | dac3cc5413f1ac54dac43029e2589fed005c6013 | [
"Apache-2.0"
] | null | null | null | hello-world.py | tsaipavant/first_project | dac3cc5413f1ac54dac43029e2589fed005c6013 | [
"Apache-2.0"
] | null | null | null | hello-world.py | tsaipavant/first_project | dac3cc5413f1ac54dac43029e2589fed005c6013 | [
"Apache-2.0"
] | null | null | null | print("Om Sairam")
print("Hello World")
| 13.333333 | 20 | 0.7 |
4c174e301add0292787e7a151af91af1c40b9b14 | 40,580 | py | Python | salt/modules/junos.py | edusperoni/salt | c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b | [
"Apache-2.0"
] | 12 | 2015-01-21T00:18:25.000Z | 2021-07-11T07:35:26.000Z | salt/modules/junos.py | edusperoni/salt | c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b | [
"Apache-2.0"
] | 1 | 2015-10-05T22:03:10.000Z | 2015-10-05T22:03:10.000Z | salt/modules/junos.py | edusperoni/salt | c9bfb00c2a81a9d4734fa7d1aa80e893d5ef790b | [
"Apache-2.0"
] | 12 | 2015-01-05T09:50:42.000Z | 2019-08-19T01:43:40.000Z | # -*- coding: utf-8 -*-
'''
Module to interact with Junos devices.
:maturity: new
:dependencies: junos-eznc, jxmlease
.. note::
Those who wish to use junos-eznc (PyEZ) version >= 2.1.0, must
use the latest salt code from github until the next release.
Refer to :mod:`junos <salt.proxy.junos>` for information on connecting to junos proxy.
'''
# Import Python libraries
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
try:
from lxml import etree
except ImportError:
from salt._compat import ElementTree as etree
# Import Salt libs
import salt.utils.files
import salt.utils.json
import salt.utils.stringutils
from salt.ext import six
# Juniper interface libraries
# https://github.com/Juniper/py-junos-eznc
try:
# pylint: disable=W0611
from jnpr.junos import Device
from jnpr.junos.utils.config import Config
from jnpr.junos.utils.sw import SW
from jnpr.junos.utils.scp import SCP
import jnpr.junos.utils
import jnpr.junos.cfg
import jxmlease
# pylint: enable=W0611
HAS_JUNOS = True
except ImportError:
HAS_JUNOS = False
# Set up logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'junos'
__proxyenabled__ = ['junos']
def __virtual__():
'''
We need the Junos adapter libraries for this
module to work. We also need a proxymodule entry in __opts__
in the opts dictionary
'''
if HAS_JUNOS and 'proxy' in __opts__:
return __virtualname__
else:
return (False, 'The junos module could not be loaded: '
'junos-eznc or jxmlease or proxy could not be loaded.')
def facts_refresh():
'''
Reload the facts dictionary from the device. Usually only needed if,
the device configuration is changed by some other actor.
This function will also refresh the facts stored in the salt grains.
Usage:
.. code-block:: bash
salt 'device_name' junos.facts_refresh
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
conn.facts_refresh()
except Exception as exception:
ret['message'] = 'Execution failed due to "{0}"'.format(exception)
ret['out'] = False
return ret
ret['facts'] = __proxy__['junos.get_serialized_facts']()
try:
__salt__['saltutil.sync_grains']()
except Exception as exception:
log.error('Grains could not be updated due to "%s"', exception)
return ret
def facts():
'''
Displays the facts gathered during the connection.
These facts are also stored in Salt grains.
Usage:
.. code-block:: bash
salt 'device_name' junos.facts
'''
ret = dict()
try:
ret['facts'] = __proxy__['junos.get_serialized_facts']()
ret['out'] = True
except Exception as exception:
ret['message'] = 'Could not display facts due to "{0}"'.format(
exception)
ret['out'] = False
return ret
def rpc(cmd=None, dest=None, format='xml', **kwargs):
'''
This function executes the rpc provided as arguments on the junos device.
The returned data can be stored in a file.
Usage:
.. code-block:: bash
salt 'device' junos.rpc 'get_config' '/var/log/config.txt' 'text' filter='<configuration><system/></configuration>'
salt 'device' junos.rpc 'get-interface-information' '/home/user/interface.xml' interface_name='lo0' terse=True
salt 'device' junos.rpc 'get-chassis-inventory'
Parameters:
Required
* cmd:
The rpc to be executed. (default = None)
Optional
* dest:
Destination file where the rpc output is stored. (default = None)
Note that the file will be stored on the proxy minion. To push the
files to the master use the salt's following execution module:
:py:func:`cp.push <salt.modules.cp.push>`
* format:
The format in which the rpc reply is received from the device.
(default = xml)
* kwargs: keyworded arguments taken by rpc call like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which
take a while to execute. (default= 30 seconds)
* filter:
Only to be used with 'get-config' rpc to get specific configuration.
* terse:
Amount of information you want.
* interface_name:
Name of the interface whose information you want.
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
if cmd is None:
ret['message'] = 'Please provide the rpc to execute.'
ret['out'] = False
return ret
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
elif '__pub_schedule' in kwargs:
for key, value in six.iteritems(kwargs):
if not key.startswith('__pub_'):
op[key] = value
else:
op.update(kwargs)
op['dev_timeout'] = six.text_type(op.pop('timeout', conn.timeout))
if cmd in ['get-config', 'get_config']:
filter_reply = None
if 'filter' in op:
filter_reply = etree.XML(op['filter'])
del op['filter']
op.update({'format': format})
try:
reply = getattr(
conn.rpc,
cmd.replace('-',
'_'))(filter_reply,
options=op)
except Exception as exception:
ret['message'] = 'RPC execution failed due to "{0}"'.format(
exception)
ret['out'] = False
return ret
else:
op['dev_timeout'] = int(op['dev_timeout'])
if 'filter' in op:
log.warning(
'Filter ignored as it is only used with "get-config" rpc')
try:
reply = getattr(
conn.rpc,
cmd.replace('-',
'_'))({'format': format},
**op)
except Exception as exception:
ret['message'] = 'RPC execution failed due to "{0}"'.format(
exception)
ret['out'] = False
return ret
if format == 'text':
# Earlier it was ret['message']
ret['rpc_reply'] = reply.text
elif format == 'json':
# Earlier it was ret['message']
ret['rpc_reply'] = reply
else:
# Earlier it was ret['message']
ret['rpc_reply'] = jxmlease.parse(etree.tostring(reply))
if dest:
if format == 'text':
write_response = reply.text
elif format == 'json':
write_response = salt.utils.json.dumps(reply, indent=1)
else:
write_response = etree.tostring(reply)
with salt.utils.files.fopen(dest, 'w') as fp:
fp.write(salt.utils.stringutils.to_str(write_response))
return ret
def set_hostname(hostname=None, **kwargs):
'''
To set the name of the device.
Usage:
.. code-block:: bash
salt 'device_name' junos.set_hostname salt-device
Parameters:
Required
* hostname: The name to be set. (default = None)
Optional
* kwargs: Keyworded arguments which can be provided like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands
which take a while to execute. (default = 30 seconds)
* comment:
Provide a comment to the commit. (default = None)
* confirm:
Provide time in minutes for commit confirmation. \
If this option is specified, the commit will be rollbacked in \
the given time unless the commit is confirmed.
'''
conn = __proxy__['junos.conn']()
ret = dict()
if hostname is None:
ret['message'] = 'Please provide the hostname.'
ret['out'] = False
return ret
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
# Added to recent versions of JunOs
# Use text format instead
set_string = 'set system host-name {0}'.format(hostname)
try:
conn.cu.load(set_string, format='set')
except Exception as exception:
ret['message'] = 'Could not load configuration due to error "{0}"'.format(
exception)
ret['out'] = False
return ret
try:
commit_ok = conn.cu.commit_check()
except Exception as exception:
ret['message'] = 'Could not commit check due to error "{0}"'.format(
exception)
ret['out'] = False
return ret
if commit_ok:
try:
conn.cu.commit(**op)
ret['message'] = 'Successfully changed hostname.'
ret['out'] = True
except Exception as exception:
ret['out'] = False
ret['message'] = 'Successfully loaded host-name but commit failed with "{0}"'.format(
exception)
return ret
else:
ret['out'] = False
ret[
'message'] = 'Successfully loaded host-name but pre-commit check failed.'
conn.cu.rollback()
return ret
def commit(**kwargs):
'''
To commit the changes loaded in the candidate configuration.
Usage:
.. code-block:: bash
salt 'device_name' junos.commit comment='Commiting via saltstack' detail=True
salt 'device_name' junos.commit dev_timeout=60 confirm=10
salt 'device_name' junos.commit sync=True dev_timeout=90
Parameters:
Optional
* kwargs: Keyworded arguments which can be provided like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which take a \
while to execute. (default = 30 seconds)
* comment:
Provide a comment to the commit. (default = None)
* confirm:
Provide time in minutes for commit confirmation. If this option \
is specified, the commit will be rollbacked in the given time \
unless the commit is confirmed.
* sync:
On dual control plane systems, requests that the candidate\
configuration on one control plane be copied to the other \
control plane,checked for correct syntax, and committed on \
both Routing Engines. (default = False)
* force_sync:
On dual control plane systems, force the candidate configuration
on one control plane to be copied to the other control plane.
* full:
When set to True requires all the daemons to check and evaluate \
the new configuration.
* detail:
When true return commit detail.
'''
conn = __proxy__['junos.conn']()
ret = {}
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
op['detail'] = op.get('detail', False)
try:
commit_ok = conn.cu.commit_check()
except Exception as exception:
ret['message'] = 'Could not perform commit check due to "{0}"'.format(
exception)
ret['out'] = False
return ret
if commit_ok:
try:
commit = conn.cu.commit(**op)
ret['out'] = True
if commit:
if op['detail']:
ret['message'] = jxmlease.parse(etree.tostring(commit))
else:
ret['message'] = 'Commit Successful.'
else:
ret['message'] = 'Commit failed.'
ret['out'] = False
except Exception as exception:
ret['out'] = False
ret['message'] = \
'Commit check succeeded but actual commit failed with "{0}"' \
.format(exception)
else:
ret['out'] = False
ret['message'] = 'Pre-commit check failed.'
conn.cu.rollback()
return ret
def rollback(id=0, **kwargs):
'''
To rollback the last committed configuration changes and commit the same.
Usage:
.. code-block:: bash
salt 'device_name' junos.rollback 10
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0)
* kwargs: Keyworded arguments which can be provided like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which
take a while to execute. (default = 30 seconds)
* comment:
Provide a comment to the commit. (default = None)
* confirm:
Provide time in minutes for commit confirmation. If this option \
is specified, the commit will be rollbacked in the given time \
unless the commit is confirmed.
* diffs_file:
Path to the file where any diffs will be written. (default = None)
'''
ret = dict()
conn = __proxy__['junos.conn']()
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
try:
ret['out'] = conn.cu.rollback(id)
except Exception as exception:
ret['message'] = 'Rollback failed due to "{0}"'.format(exception)
ret['out'] = False
return ret
if ret['out']:
ret['message'] = 'Rollback successful'
else:
ret['message'] = 'Rollback failed'
return ret
if 'diffs_file' in op and op['diffs_file'] is not None:
diff = conn.cu.diff()
if diff is not None:
with salt.utils.files.fopen(op['diffs_file'], 'w') as fp:
fp.write(salt.utils.stringutils.to_str(diff))
else:
log.info(
'No diff between current configuration and \
rollbacked configuration, so no diff file created')
try:
commit_ok = conn.cu.commit_check()
except Exception as exception:
ret['message'] = 'Could not commit check due to "{0}"'.format(
exception)
ret['out'] = False
return ret
if commit_ok:
try:
conn.cu.commit(**op)
ret['out'] = True
except Exception as exception:
ret['out'] = False
ret['message'] = \
'Rollback successful but commit failed with error "{0}"'\
.format(exception)
return ret
else:
ret['message'] = 'Rollback succesfull but pre-commit check failed.'
ret['out'] = False
return ret
def diff(id=0):
'''
Gives the difference between the candidate and the current configuration.
Usage:
.. code-block:: bash
salt 'device_name' junos.diff 3
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0)
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
ret['message'] = conn.cu.diff(rb_id=id)
except Exception as exception:
ret['message'] = 'Could not get diff with error "{0}"'.format(
exception)
ret['out'] = False
return ret
def ping(dest_ip=None, **kwargs):
'''
To send ping RPC to a device.
Usage:
.. code-block:: bash
salt 'device_name' junos.ping '8.8.8.8' count=5
salt 'device_name' junos.ping '8.8.8.8' ttl=1 rapid=True
Parameters:
Required
* dest_ip:
The IP which is to be pinged. (default = None)
Optional
* kwargs: Keyworded arguments which can be provided like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which
take a while to execute. (default = 30 seconds)
* rapid:
Setting this to True executes ping at 100pps instead of 1pps. \
(default = False)
* ttl:
Maximum number of IP routers (IP hops) allowed between source \
and destination.
* routing_instance:
Name of the routing instance to use to send the ping.
* interface:
Interface used to send traffic out.
* count:
Number of packets to send. (default = 5)
'''
conn = __proxy__['junos.conn']()
ret = dict()
if dest_ip is None:
ret['message'] = 'Please specify the destination ip to ping.'
ret['out'] = False
return ret
op = {'host': dest_ip}
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
op['count'] = six.text_type(op.pop('count', 5))
if 'ttl' in op:
op['ttl'] = six.text_type(op['ttl'])
ret['out'] = True
try:
ret['message'] = jxmlease.parse(etree.tostring(conn.rpc.ping(**op)))
except Exception as exception:
ret['message'] = 'Execution failed due to "{0}"'.format(exception)
ret['out'] = False
return ret
def cli(command=None, format='text', **kwargs):
'''
Executes the CLI commands and returns the output in specified format. \
(default is text) The output can also be stored in a file.
Usage:
.. code-block:: bash
salt 'device_name' junos.cli 'show system commit'
salt 'device_name' junos.cli 'show version' dev_timeout=40
salt 'device_name' junos.cli 'show system alarms' 'xml' dest=/home/user/cli_output.txt
Parameters:
Required
* command:
The command that need to be executed on Junos CLI. (default = None)
Optional
* format:
Format in which to get the CLI output. (text or xml, \
default = 'text')
* kwargs: Keyworded arguments which can be provided like-
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which
take a while to execute. (default = 30 seconds)
* dest:
The destination file where the CLI output can be stored.\
(default = None)
'''
conn = __proxy__['junos.conn']()
# Cases like salt 'device_name' junos.cli 'show system alarms' ''
# In this case the format becomes '' (empty string). And reply is sent in xml
# We want the format to default to text.
if not format:
format = 'text'
ret = dict()
if command is None:
ret['message'] = 'Please provide the CLI command to be executed.'
ret['out'] = False
return ret
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
try:
result = conn.cli(command, format, warning=False)
except Exception as exception:
ret['message'] = 'Execution failed due to "{0}"'.format(exception)
ret['out'] = False
return ret
if format == 'text':
ret['message'] = result
else:
result = etree.tostring(result)
ret['message'] = jxmlease.parse(result)
if 'dest' in op and op['dest'] is not None:
with salt.utils.files.fopen(op['dest'], 'w') as fp:
fp.write(salt.utils.stringutils.to_str(result))
ret['out'] = True
return ret
def shutdown(**kwargs):
'''
Shut down (power off) or reboot a device running Junos OS.
This includes all Routing Engines in a Virtual Chassis or a dual Routing \
Engine system.
Usage:
.. code-block:: bash
salt 'device_name' junos.shutdown reboot=True
salt 'device_name' junos.shutdown shutdown=True in_min=10
salt 'device_name' junos.shutdown shutdown=True
Parameters:
Optional
* kwargs:
* shutdown:
Set this to true if you want to shutdown the machine.
(default=False, this is a safety mechanism so that the user does
not accidentally shutdown the junos device.)
* reboot:
Whether to reboot instead of shutdown. (default=False)
Note that either one of the above arguments has to be specified
(shutdown or reboot) for this function to work.
* at:
Date and time the reboot should take place. The
string must match the junos cli reboot syntax
(To be used only if reboot=True)
* in_min:
Specify delay in minutes for shutdown
'''
conn = __proxy__['junos.conn']()
ret = dict()
sw = SW(conn)
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
if 'shutdown' not in op and 'reboot' not in op:
ret['message'] = \
'Provide either one of the arguments: shutdown or reboot.'
ret['out'] = False
return ret
try:
if 'reboot' in op and op['reboot']:
shut = sw.reboot
elif 'shutdown' in op and op['shutdown']:
shut = sw.poweroff
else:
ret['message'] = 'Nothing to be done.'
ret['out'] = False
return ret
if 'in_min' in op:
shut(in_min=op['in_min'])
elif 'at' in op:
shut(at=op['at'])
else:
shut()
ret['message'] = 'Successfully powered off/rebooted.'
ret['out'] = True
except Exception as exception:
ret['message'] = \
'Could not poweroff/reboot beacause "{0}"'.format(exception)
ret['out'] = False
return ret
def install_config(path=None, **kwargs):
'''
Installs the given configuration file into the candidate configuration.
Commits the changes if the commit checks or throws an error.
Usage:
.. code-block:: bash
salt 'device_name' junos.install_config 'salt://production/network/routers/config.set'
salt 'device_name' junos.install_config 'salt://templates/replace_config.conf' replace=True comment='Committed via SaltStack'
salt 'device_name' junos.install_config 'salt://my_new_configuration.conf' dev_timeout=300 diffs_file='/salt/confs/old_config.conf' overwrite=True
salt 'device_name' junos.install_config 'salt://syslog_template.conf' template_vars='{"syslog_host": "10.180.222.7"}'
Parameters:
Required
* path:
Path where the configuration/template file is present. If the file has a \
'*.conf' extension,
the content is treated as text format. If the file has a '*.xml' \
extension,
the content is treated as XML format. If the file has a '*.set' \
extension,
the content is treated as Junos OS 'set' commands.(default = None)
Optional
* kwargs: Keyworded arguments which can be provided like-
* mode: The mode in which the configuration is locked.
(Options: private, dynamic, batch, exclusive; default= exclusive)
* dev_timeout:
Set NETCONF RPC timeout. Can be used for commands which
take a while to execute. (default = 30 seconds)
* overwrite:
Set to True if you want this file is to completely replace the\
configuration file. (default = False)
* replace:
Specify whether the configuration file uses "replace:" statements.
Those statements under the 'replace' tag will only be changed.\
(default = False)
* format:
Determines the format of the contents.
* update:
Compare a complete loaded configuration against
the candidate configuration. For each hierarchy level or
configuration object that is different in the two configurations,
the version in the loaded configuration replaces the version in the
candidate configuration. When the configuration is later committed,
only system processes that are affected by the changed configuration
elements parse the new configuration. This action is supported from
PyEZ 2.1 (default = False)
* comment:
Provide a comment to the commit. (default = None)
* confirm:
Provide time in minutes for commit confirmation.
If this option is specified, the commit will be rollbacked in \
the given time unless the commit is confirmed.
* diffs_file:
Path to the file where the diff (difference in old configuration
and the committed configuration) will be stored.(default = None)
Note that the file will be stored on the proxy minion. To push the
files to the master use the salt's following execution module: \
:py:func:`cp.push <salt.modules.cp.push>`
* template_vars:
Variables to be passed into the template processing engine in addition
to those present in __pillar__, __opts__, __grains__, etc.
You may reference these variables in your template like so:
{{ template_vars["var_name"] }}
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
if path is None:
ret['message'] = \
'Please provide the salt path where the configuration is present'
ret['out'] = False
return ret
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
template_vars = dict()
if "template_vars" in op:
template_vars = op["template_vars"]
template_cached_path = salt.utils.files.mkstemp()
__salt__['cp.get_template'](
path,
template_cached_path,
template_vars=template_vars)
if not os.path.isfile(template_cached_path):
ret['message'] = 'Invalid file path.'
ret['out'] = False
return ret
if os.path.getsize(template_cached_path) == 0:
ret['message'] = 'Template failed to render'
ret['out'] = False
return ret
write_diff = ''
if 'diffs_file' in op and op['diffs_file'] is not None:
write_diff = op['diffs_file']
del op['diffs_file']
op['path'] = template_cached_path
if 'format' not in op:
if path.endswith('set'):
template_format = 'set'
elif path.endswith('xml'):
template_format = 'xml'
else:
template_format = 'text'
op['format'] = template_format
if 'replace' in op and op['replace']:
op['merge'] = False
del op['replace']
elif 'overwrite' in op and op['overwrite']:
op['overwrite'] = True
elif 'overwrite' in op and not op['overwrite']:
op['merge'] = True
del op['overwrite']
db_mode = op.pop('mode', 'exclusive')
with Config(conn, mode=db_mode) as cu:
try:
cu.load(**op)
except Exception as exception:
ret['message'] = 'Could not load configuration due to : "{0}"'.format(
exception)
ret['format'] = template_format
ret['out'] = False
return ret
finally:
salt.utils.files.safe_rm(template_cached_path)
config_diff = cu.diff()
if config_diff is None:
ret['message'] = 'Configuration already applied!'
ret['out'] = True
return ret
commit_params = {}
if 'confirm' in op:
commit_params['confirm'] = op['confirm']
if 'comment' in op:
commit_params['comment'] = op['comment']
try:
check = cu.commit_check()
except Exception as exception:
ret['message'] = \
'Commit check threw the following exception: "{0}"'\
.format(exception)
ret['out'] = False
return ret
if check:
try:
cu.commit(**commit_params)
ret['message'] = 'Successfully loaded and committed!'
except Exception as exception:
ret['message'] = \
'Commit check successful but commit failed with "{0}"'\
.format(exception)
ret['out'] = False
return ret
else:
ret['message'] = 'Loaded configuration but commit check failed.'
ret['out'] = False
cu.rollback()
try:
if write_diff and config_diff is not None:
with salt.utils.files.fopen(write_diff, 'w') as fp:
fp.write(salt.utils.stringutils.to_str(config_diff))
except Exception as exception:
ret['message'] = 'Could not write into diffs_file due to: "{0}"'.format(
exception)
ret['out'] = False
return ret
def zeroize():
'''
Resets the device to default factory settings
Usage:
.. code-block:: bash
salt 'device_name' junos.zeroize
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
conn.cli('request system zeroize')
ret['message'] = 'Completed zeroize and rebooted'
except Exception as exception:
ret['message'] = 'Could not zeroize due to : "{0}"'.format(exception)
ret['out'] = False
return ret
def install_os(path=None, **kwargs):
'''
Installs the given image on the device. After the installation is complete\
the device is rebooted,
if reboot=True is given as a keyworded argument.
Usage:
.. code-block:: bash
salt 'device_name' junos.install_os 'salt://images/junos_image.tgz' reboot=True
salt 'device_name' junos.install_os 'salt://junos_16_1.tgz' dev_timeout=300
Parameters:
Required
* path:
Path where the image file is present on the proxy minion.
Optional
* kwargs: keyworded arguments to be given such as dev_timeout, reboot etc
* dev_timeout:
Set NETCONF RPC timeout. Can be used to RPCs which
take a while to execute. (default = 30 seconds)
* reboot:
Whether to reboot after installation (default = False)
* no_copy:
When True the software package will not be SCP’d to the device. \
(default = False)
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
if path is None:
ret['message'] = \
'Please provide the salt path where the junos image is present.'
ret['out'] = False
return ret
image_cached_path = salt.utils.files.mkstemp()
__salt__['cp.get_file'](path, image_cached_path)
if not os.path.isfile(image_cached_path):
ret['message'] = 'Invalid image path.'
ret['out'] = False
return ret
if os.path.getsize(image_cached_path) == 0:
ret['message'] = 'Failed to copy image'
ret['out'] = False
return ret
path = image_cached_path
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
try:
conn.sw.install(path, progress=True)
ret['message'] = 'Installed the os.'
except Exception as exception:
ret['message'] = 'Installation failed due to: "{0}"'.format(exception)
ret['out'] = False
return ret
finally:
salt.utils.files.safe_rm(image_cached_path)
if 'reboot' in op and op['reboot'] is True:
try:
conn.sw.reboot()
except Exception as exception:
ret['message'] = \
'Installation successful but reboot failed due to : "{0}"' \
.format(exception)
ret['out'] = False
return ret
ret['message'] = 'Successfully installed and rebooted!'
return ret
def file_copy(src=None, dest=None):
'''
Copies the file from the local device to the junos device.
Usage:
.. code-block:: bash
salt 'device_name' junos.file_copy /home/m2/info.txt info_copy.txt
Parameters:
Required
* src:
The sorce path where the file is kept.
* dest:
The destination path where the file will be copied.
'''
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
if src is None:
ret['message'] = \
'Please provide the absolute path of the file to be copied.'
ret['out'] = False
return ret
if not os.path.isfile(src):
ret['message'] = 'Invalid source file path'
ret['out'] = False
return ret
if dest is None:
ret['message'] = \
'Please provide the absolute path of the destination where the file is to be copied.'
ret['out'] = False
return ret
try:
with SCP(conn, progress=True) as scp:
scp.put(src, dest)
ret['message'] = 'Successfully copied file from {0} to {1}'.format(
src, dest)
except Exception as exception:
ret['message'] = 'Could not copy file : "{0}"'.format(exception)
ret['out'] = False
return ret
def lock():
"""
Attempts an exclusive lock on the candidate configuration. This
is a non-blocking call.
.. note::
Any user who wishes to use lock, must necessarily unlock the
configuration too. Ensure :py:func:`unlock <salt.modules.junos.unlock>`
is called in the same orchestration run in which the lock is called.
Usage:
.. code-block:: bash
salt 'device_name' junos.lock
"""
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
conn.cu.lock()
ret['message'] = "Successfully locked the configuration."
except jnpr.junos.exception.LockError as exception:
ret['message'] = 'Could not gain lock due to : "{0}"'.format(exception)
ret['out'] = False
return ret
def unlock():
"""
Unlocks the candidate configuration.
Usage:
.. code-block:: bash
salt 'device_name' junos.unlock
"""
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
conn.cu.unlock()
ret['message'] = "Successfully unlocked the configuration."
except jnpr.junos.exception.UnlockError as exception:
ret['message'] = \
'Could not unlock configuration due to : "{0}"'.format(exception)
ret['out'] = False
return ret
def load(path=None, **kwargs):
"""
Loads the configuration from the file provided onto the device.
Usage:
.. code-block:: bash
salt 'device_name' junos.load 'salt://production/network/routers/config.set'
salt 'device_name' junos.load 'salt://templates/replace_config.conf' replace=True
salt 'device_name' junos.load 'salt://my_new_configuration.conf' overwrite=True
salt 'device_name' junos.load 'salt://syslog_template.conf' template_vars='{"syslog_host": "10.180.222.7"}'
Parameters:
Required
* path:
Path where the configuration/template file is present. If the file has a \
'*.conf' extension,
the content is treated as text format. If the file has a '*.xml' \
extension,
the content is treated as XML format. If the file has a '*.set' \
extension,
the content is treated as Junos OS 'set' commands.(default = None)
Optional
* kwargs: Keyworded arguments which can be provided like-
* overwrite:
Set to True if you want this file is to completely replace the\
configuration file. (default = False)
* replace:
Specify whether the configuration file uses "replace:" statements.
Those statements under the 'replace' tag will only be changed.\
(default = False)
* format:
Determines the format of the contents.
* update:
Compare a complete loaded configuration against
the candidate configuration. For each hierarchy level or
configuration object that is different in the two configurations,
the version in the loaded configuration replaces the version in the
candidate configuration. When the configuration is later committed,
only system processes that are affected by the changed configuration
elements parse the new configuration. This action is supported from
PyEZ 2.1 (default = False)
* template_vars:
Variables to be passed into the template processing engine in addition
to those present in __pillar__, __opts__, __grains__, etc.
You may reference these variables in your template like so:
{{ template_vars["var_name"] }}
"""
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
if path is None:
ret['message'] = \
'Please provide the salt path where the configuration is present'
ret['out'] = False
return ret
op = dict()
if '__pub_arg' in kwargs:
if kwargs['__pub_arg']:
if isinstance(kwargs['__pub_arg'][-1], dict):
op.update(kwargs['__pub_arg'][-1])
else:
op.update(kwargs)
template_vars = dict()
if "template_vars" in op:
template_vars = op["template_vars"]
template_cached_path = salt.utils.files.mkstemp()
__salt__['cp.get_template'](
path,
template_cached_path,
template_vars=template_vars)
if not os.path.isfile(template_cached_path):
ret['message'] = 'Invalid file path.'
ret['out'] = False
return ret
if os.path.getsize(template_cached_path) == 0:
ret['message'] = 'Template failed to render'
ret['out'] = False
return ret
op['path'] = template_cached_path
if 'format' not in op:
if path.endswith('set'):
template_format = 'set'
elif path.endswith('xml'):
template_format = 'xml'
else:
template_format = 'text'
op['format'] = template_format
if 'replace' in op and op['replace']:
op['merge'] = False
del op['replace']
elif 'overwrite' in op and op['overwrite']:
op['overwrite'] = True
elif 'overwrite' in op and not op['overwrite']:
op['merge'] = True
del op['overwrite']
try:
conn.cu.load(**op)
ret['message'] = "Successfully loaded the configuration."
except Exception as exception:
ret['message'] = 'Could not load configuration due to : "{0}"'.format(
exception)
ret['format'] = template_format
ret['out'] = False
return ret
finally:
salt.utils.files.safe_rm(template_cached_path)
return ret
def commit_check():
"""
Perform a commit check on the configuration.
Usage:
.. code-block:: bash
salt 'device_name' junos.commit_check
"""
conn = __proxy__['junos.conn']()
ret = dict()
ret['out'] = True
try:
conn.cu.commit_check()
ret['message'] = 'Commit check succeeded.'
except Exception as exception:
ret['message'] = 'Commit check failed with {0}'.format(exception)
ret['out'] = False
return ret
| 30.906321 | 154 | 0.577329 |
d31a6700cb4eecdbb14a0a072083a241e602970d | 13,043 | py | Python | project/pro1-hog/hog.py | zltshadow/CS61A-2019-summer | 0f5dd0be5f51927364aec1bc974526837328b695 | [
"MIT"
] | 3 | 2021-11-21T06:09:39.000Z | 2022-03-12T08:05:27.000Z | project/pro1-hog/hog.py | zltshadow/CS61A-2019-summer | 0f5dd0be5f51927364aec1bc974526837328b695 | [
"MIT"
] | null | null | null | project/pro1-hog/hog.py | zltshadow/CS61A-2019-summer | 0f5dd0be5f51927364aec1bc974526837328b695 | [
"MIT"
] | null | null | null | """CS 61A Presents The Game of Hog."""
from dice import six_sided, four_sided, make_test_dice
from ucb import main, trace, interact
GOAL_SCORE = 100 # The goal of Hog is to score 100 points.
######################
# Phase 1: Simulator #
######################
def roll_dice(num_rolls, dice=six_sided):
"""Simulate rolling the DICE exactly NUM_ROLLS > 0 times. Return the sum of
the outcomes unless any of the outcomes is 1. In that case, return 1.
num_rolls: The number of dice rolls that will be made.
dice: A function that simulates a single dice roll outcome.
"""
# These assert statements ensure that num_rolls is a positive integer.
assert type(num_rolls) == int, 'num_rolls must be an integer.'
assert num_rolls > 0, 'Must roll at least once.'
# BEGIN PROBLEM 1
"*** YOUR CODE HERE ***"
sum = 0;
flag_pig_out = False
for i in range(0,num_rolls):
t = dice()
if t == 1:
flag_pig_out = True
else:
sum += t
if flag_pig_out:
sum = 1
return sum
# END PROBLEM 1
def free_bacon(score):
"""Return the points scored from rolling 0 dice (Free Bacon).
score: The opponent's current score.
"""
assert score < 100, 'The game should be over.'
# BEGIN PROBLEM 2
"*** YOUR CODE HERE ***"
ones_digit = score % 10
tens_digit = score // 10
return 10 - min(ones_digit,tens_digit)
# END PROBLEM 2
def take_turn(num_rolls, opponent_score, dice=six_sided):
"""Simulate a turn rolling NUM_ROLLS dice, which may be 0 (Free Bacon).
Return the points scored for the turn by the current player.
num_rolls: The number of dice rolls that will be made.
opponent_score: The total score of the opponent.
dice: A function that simulates a single dice roll outcome.
"""
# Leave these assert statements here; they help check for errors.
assert type(num_rolls) == int, 'num_rolls must be an integer.'
assert num_rolls >= 0, 'Cannot roll a negative number of dice in take_turn.'
assert num_rolls <= 10, 'Cannot roll more than 10 dice.'
assert opponent_score < 100, 'The game should be over.'
# BEGIN PROBLEM 3
"*** YOUR CODE HERE ***"
if num_rolls == 0:
return free_bacon(opponent_score)
else:
return roll_dice(num_rolls,dice)
# END PROBLEM 3
def is_swap(player_score, opponent_score):
"""
Return whether the two scores should be swapped
"""
# BEGIN PROBLEM 4
"*** YOUR CODE HERE ***"
# END PROBLEM 4
while player_score >= 10:
player_score //=10
return player_score == opponent_score % 10
def other(player):
"""Return the other player, for a player PLAYER numbered 0 or 1.
>>> other(0)
1
>>> other(1)
0
"""
return 1 - player
def silence(score0, score1):
"""Announce nothing (see Phase 2)."""
return silence
def play(strategy0, strategy1, score0=0, score1=0, dice=six_sided,
goal=GOAL_SCORE, say=silence):
"""Simulate a game and return the final scores of both players, with Player
0's score first, and Player 1's score second.
A strategy is a function that takes two total scores as arguments (the
current player's score, and the opponent's score), and returns a number of
dice that the current player will roll this turn.
strategy0: The strategy function for Player 0, who plays first.
strategy1: The strategy function for Player 1, who plays second.
score0: Starting score for Player 0
score1: Starting score for Player 1
dice: A function of zero arguments that simulates a dice roll.
goal: The game ends and someone wins when this score is reached.
say: The commentary function to call at the end of the first turn.
"""
player = 0 # Which player is about to take a turn, 0 (first) or 1 (second)
# BEGIN PROBLEM 5
"*** YOUR CODE HERE ***"
while score0 < goal and score1 < goal:
if player == 0:
score0 += take_turn(strategy0(score0, score1), score1, dice)
if is_swap(score0, score1):
score0, score1 = score1, score0
else: # player == 1
score1 += take_turn(strategy1(score1, score0), score0, dice)
if is_swap(score1, score0):
score0, score1 = score1, score0
player = other(player)
# END PROBLEM 5
# (note that the indentation for the problem 6 prompt (***YOUR CODE HERE***) might be misleading)
# BEGIN PROBLEM 6
say = say(score0, score1)
# END PROBLEM 6
return score0, score1
#######################
# Phase 2: Commentary #
#######################
def say_scores(score0, score1):
"""A commentary function that announces the score for each player."""
print("Player 0 now has", score0, "and Player 1 now has", score1)
return say_scores
def announce_lead_changes(previous_leader=None):
"""Return a commentary function that announces lead changes.
>>> f0 = announce_lead_changes()
>>> f1 = f0(5, 0)
Player 0 takes the lead by 5
>>> f2 = f1(5, 12)
Player 1 takes the lead by 7
>>> f3 = f2(8, 12)
>>> f4 = f3(8, 13)
>>> f5 = f4(15, 13)
Player 0 takes the lead by 2
"""
def say(score0, score1):
if score0 > score1:
leader = 0
elif score1 > score0:
leader = 1
else:
leader = None
if leader != None and leader != previous_leader:
print('Player', leader, 'takes the lead by', abs(score0 - score1))
return announce_lead_changes(leader)
return say
def both(f, g):
"""Return a commentary function that says what f says, then what g says.
NOTE: the following game is not possible under the rules, it's just
an example for the sake of the doctest
>>> h0 = both(say_scores, announce_lead_changes())
>>> h1 = h0(10, 0)
Player 0 now has 10 and Player 1 now has 0
Player 0 takes the lead by 10
>>> h2 = h1(10, 6)
Player 0 now has 10 and Player 1 now has 6
>>> h3 = h2(6, 17)
Player 0 now has 6 and Player 1 now has 17
Player 1 takes the lead by 11
"""
def say(score0, score1):
return both(f(score0, score1), g(score0, score1))
return say
def announce_highest(who, previous_high=0, previous_score=0):
"""Return a commentary function that announces when WHO's score
increases by more than ever before in the game.
NOTE: the following game is not possible under the rules, it's just
an example for the sake of the doctest
>>> f0 = announce_highest(1) # Only announce Player 1 score gains
>>> f1 = f0(12, 0)
>>> f2 = f1(12, 11)
11 point(s)! That's the biggest gain yet for Player 1
>>> f3 = f2(20, 11)
>>> f4 = f3(13, 20)
>>> f5 = f4(20, 35)
15 point(s)! That's the biggest gain yet for Player 1
>>> f6 = f5(20, 47) # Player 1 gets 12 points; not enough for a new high
>>> f7 = f6(21, 47)
>>> f8 = f7(21, 77)
30 point(s)! That's the biggest gain yet for Player 1
>>> f9 = f8(77, 22) # Swap!
>>> f10 = f9(33, 77) # Swap!
55 point(s)! That's the biggest gain yet for Player 1
"""
assert who == 0 or who == 1, 'The who argument should indicate a player.'
# BEGIN PROBLEM 7
"*** YOUR CODE HERE ***"
def inner(score0, score1):
if who == 0:
score = score0
else: # who == 1
score = score1
gain = score - previous_score
if gain > previous_high:
print(f"{gain} point(s)! That's the biggest gain yet for Player {who}")
return announce_highest(who, gain, score)
else:
return announce_highest(who, previous_high, score)
return inner
# END PROBLEM 7
#######################
# Phase 3: Strategies #
#######################
def always_roll(n):
"""Return a strategy that always rolls N dice.
A strategy is a function that takes two total scores as arguments (the
current player's score, and the opponent's score), and returns a number of
dice that the current player will roll this turn.
>>> strategy = always_roll(5)
>>> strategy(0, 0)
5
>>> strategy(99, 99)
5
"""
def strategy(score, opponent_score):
return n
return strategy
def make_averaged(fn, num_samples=1000):
"""Return a function that returns the average value of FN when called.
To implement this function, you will have to use *args syntax, a new Python
feature introduced in this project. See the project description.
>>> dice = make_test_dice(4, 2, 5, 1)
>>> averaged_dice = make_averaged(dice, 1000)
>>> averaged_dice()
3.0
"""
# BEGIN PROBLEM 8
"*** YOUR CODE HERE ***"
return lambda *args: sum([fn(*args) for _ in range(num_samples)]) / num_samples
# END PROBLEM 8
def max_scoring_num_rolls(dice=six_sided, num_samples=1000):
"""Return the number of dice (1 to 10) that gives the highest average turn
score by calling roll_dice with the provided DICE over NUM_SAMPLES times.
Assume that the dice always return positive outcomes.
>>> dice = make_test_dice(1, 6)
>>> max_scoring_num_rolls(dice)
1
"""
# BEGIN PROBLEM 9
"*** YOUR CODE HERE ***"
averaged_roll_dice = make_averaged(roll_dice, num_samples)
res = [averaged_roll_dice(i, dice) for i in range(1, 10 + 1)]
return res.index(max(res)) + 1
# END PROBLEM 9
def winner(strategy0, strategy1):
"""Return 0 if strategy0 wins against strategy1, and 1 otherwise."""
score0, score1 = play(strategy0, strategy1)
if score0 > score1:
return 0
else:
return 1
def average_win_rate(strategy, baseline=always_roll(4)):
"""Return the average win rate of STRATEGY against BASELINE. Averages the
winrate when starting the game as player 0 and as player 1.
"""
win_rate_as_player_0 = 1 - make_averaged(winner)(strategy, baseline)
win_rate_as_player_1 = make_averaged(winner)(baseline, strategy)
return (win_rate_as_player_0 + win_rate_as_player_1) / 2
def run_experiments():
"""Run a series of strategy experiments and report results."""
if True: # Change to False when done finding max_scoring_num_rolls
six_sided_max = max_scoring_num_rolls(six_sided)
print('Max scoring num rolls for six-sided dice:', six_sided_max)
if False: # Change to True to test always_roll(8)
print('always_roll(6) win rate:', average_win_rate(always_roll(6)))
if False: # Change to True to test bacon_strategy
print('bacon_strategy win rate:', average_win_rate(bacon_strategy))
if False: # Change to True to test swap_strategy
print('swap_strategy win rate:', average_win_rate(swap_strategy))
if False: # Change to True to test final_strategy
print('final_strategy win rate:', average_win_rate(final_strategy))
"*** You may add additional experiments as you wish ***"
def bacon_strategy(score, opponent_score, margin=8, num_rolls=4):
"""This strategy rolls 0 dice if that gives at least MARGIN points, and
rolls NUM_ROLLS otherwise.
"""
# BEGIN PROBLEM 10
return 0 if free_bacon(opponent_score) >= margin else num_rolls
# END PROBLEM 10
def swap_strategy(score, opponent_score, margin=8, num_rolls=4):
"""This strategy rolls 0 dice when it triggers a beneficial swap. It also
rolls 0 dice if it gives at least MARGIN points and does not trigger a
non-beneficial swap. Otherwise, it rolls NUM_ROLLS.
"""
# BEGIN PROBLEM 11
free_bacon_gain = free_bacon(opponent_score)
free_bacon_new_score = score + free_bacon_gain
is_beneficial_swap = is_swap(free_bacon_new_score, opponent_score) and opponent_score > free_bacon_new_score
is_non_beneficial_swap = is_swap(free_bacon_new_score, opponent_score) and opponent_score <= free_bacon_new_score
return 0 if is_beneficial_swap or (free_bacon_gain >= margin and not is_non_beneficial_swap) else num_rolls
# END PROBLEM 11
def final_strategy(score, opponent_score):
"""Write a brief description of your final strategy.
*** YOUR DESCRIPTION HERE ***
"""
# BEGIN PROBLEM 12
return swap_strategy(score, opponent_score)
# END PROBLEM 12
##########################
# Command Line Interface #
##########################
# NOTE: Functions in this section do not need to be changed. They use features
# of Python not yet covered in the course.
@main
def run(*args):
"""Read in the command-line argument and calls corresponding functions.
This function uses Python syntax/techniques not yet covered in this course.
"""
import argparse
parser = argparse.ArgumentParser(description="Play Hog")
parser.add_argument('--run_experiments', '-r', action='store_true',
help='Runs strategy experiments')
args = parser.parse_args()
if args.run_experiments:
run_experiments() | 33.272959 | 117 | 0.644714 |
311f00ed580668136220f98a4a4c8ae1c71c2f2a | 116 | py | Python | answers/Problem_1/Problem_1.py | luciengaitskell/Project-Euler-Answers | 2ff221171072ece97379579d77a75daacfa7fcbf | [
"Unlicense"
] | null | null | null | answers/Problem_1/Problem_1.py | luciengaitskell/Project-Euler-Answers | 2ff221171072ece97379579d77a75daacfa7fcbf | [
"Unlicense"
] | null | null | null | answers/Problem_1/Problem_1.py | luciengaitskell/Project-Euler-Answers | 2ff221171072ece97379579d77a75daacfa7fcbf | [
"Unlicense"
] | null | null | null | totalSum=0
for ii in range(1,1000):
if ii % 3 == 0 or ii % 5 == 0:
totalSum=totalSum+ii
print(totalSum) | 19.333333 | 34 | 0.594828 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.