text
stringlengths 2
999k
|
|---|
import functools
import json
import logging
import os
from collections import defaultdict
from discord.ext import commands
from tle import constants
from tle.util import cache_system2
from tle.util import codeforces_api as cf
from tle.util import db
from tle.util import discord_common
from tle.util import event_system
logger = logging.getLogger(__name__)
# Connection to database
user_db = None
# Cache system
cache2 = None
# Event system
event_sys = event_system.EventSystem()
_contest_id_to_writers_map = None
_initialize_done = False
active_groups = defaultdict(set)
async def initialize(nodb):
global cache2
global user_db
global event_sys
global _contest_id_to_writers_map
global _initialize_done
if _initialize_done:
# This happens if the bot loses connection to Discord and on_ready is triggered again
# when it reconnects.
return
await cf.initialize()
if nodb:
user_db = db.DummyUserDbConn()
else:
user_db_file = os.path.join(constants.FILEDIR, constants.USER_DB_FILENAME)
user_db = db.UserDbConn(user_db_file)
cache_db_file = os.path.join(constants.FILEDIR, constants.CACHE_DB_FILENAME)
cache_db = db.CacheDbConn(cache_db_file)
cache2 = cache_system2.CacheSystem(cache_db)
await cache2.run()
jsonfile = os.path.join(constants.FILEDIR, constants.CONTEST_WRITERS_JSON_FILE)
try:
with open(jsonfile) as f:
data = json.load(f)
_contest_id_to_writers_map = {contest['id']: contest['writers'] for contest in data}
logger.info('Contest writers loaded from JSON file')
except FileNotFoundError:
logger.warning('JSON file containing contest writers not found')
_initialize_done = True
# algmyr's guard idea:
def user_guard(*, group):
active = active_groups[group]
def guard(fun):
@functools.wraps(fun)
async def f(self, ctx, *args, **kwargs):
user = ctx.message.author.id
if user in active:
logger.info(f'{user} repeatedly calls {group} group')
return
active.add(user)
try:
await fun(self, ctx, *args, **kwargs)
finally:
active.remove(user)
return f
return guard
def is_contest_writer(contest_id, handle):
if _contest_id_to_writers_map is None:
return False
writers = _contest_id_to_writers_map.get(contest_id)
return writers and handle in writers
_NONSTANDARD_CONTEST_INDICATORS = [
'wild', 'fools', 'unrated', 'surprise', 'unknown', 'friday', 'q#', 'testing',
'marathon', 'kotlin', 'onsite', 'experimental']
def is_nonstandard_contest(contest):
return any(string in contest.name.lower() for string in _NONSTANDARD_CONTEST_INDICATORS)
class ResolveHandleError(commands.CommandError):
pass
class HandleCountOutOfBoundsError(ResolveHandleError):
def __init__(self, mincnt, maxcnt):
super().__init__(f'Number of handles must be between {mincnt} and {maxcnt}')
class FindMemberFailedError(ResolveHandleError):
def __init__(self, member):
super().__init__(f'Unable to convert `{member}` to a server member')
class HandleNotRegisteredError(ResolveHandleError):
def __init__(self, member):
super().__init__(f'Codeforces handle for {member.mention} not found in database')
class HandleIsVjudgeError(ResolveHandleError):
HANDLES = 'vjudge1 vjudge2 vjudge3 vjudge4 vjudge5'.split()
def __init__(self, handle):
super().__init__(f"`{handle}`? I'm not doing that!\n\n(╯°□°)╯︵ ┻━┻")
def time_format(seconds):
seconds = int(seconds)
days, seconds = divmod(seconds, 86400)
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
return days, hours, minutes, seconds
def pretty_time_format(seconds):
days, hours, minutes, seconds = time_format(seconds)
timespec = [
(days, 'day', 'days'),
(hours, 'hour', 'hours'),
(minutes, 'minute', 'minutes'),
]
timeprint = [(count,singular,plural) for count,singular,plural in timespec if count]
if not timeprint:
timeprint.append((seconds, 'second', 'seconds'))
return ' '.join(f'{count} {singular if count == 1 else plural}'
for count, singular, plural in timeprint)
async def resolve_handles(ctx, converter, handles, *, mincnt=1, maxcnt=5):
"""Convert an iterable of strings to CF handles. A string beginning with ! indicates Discord username,
otherwise it is a raw CF handle to be left unchanged."""
# If this is called from a Discord command, it is recommended to call the
# cf_handle_error_handler function below from the command's error handler.
if len(handles) < mincnt or maxcnt < len(handles):
raise HandleCountOutOfBoundsError(mincnt, maxcnt)
resolved_handles = []
for handle in handles:
if handle.startswith('!'):
# ! denotes Discord user
member_identifier = handle[1:]
try:
member = await converter.convert(ctx, member_identifier)
except commands.errors.CommandError:
raise FindMemberFailedError(member_identifier)
handle = user_db.gethandle(member.id)
if handle is None:
raise HandleNotRegisteredError(member)
if handle in HandleIsVjudgeError.HANDLES:
raise HandleIsVjudgeError(handle)
resolved_handles.append(handle)
return resolved_handles
async def resolve_handle_error_handler(ctx, error):
if isinstance(error, ResolveHandleError):
await ctx.send(embed=discord_common.embed_alert(error))
error.handled = True
|
#!/usr/bin/env python3
import boto3
import json
import os
import os
import pprint
from sys import version_info
import sys
AWS_REGION = "us-west-1"
EC2_CLIENT = boto3.client('ec2', region_name=AWS_REGION)
INSTANCE_ID = 'i-06a2ac220369ddb08'
# Stopping the instance using stop_instances.
instances = EC2_CLIENT.stop_instances(
InstanceIds=[
INSTANCE_ID,
],
)
for instance in instances['StoppingInstances']:
print(f'Stopping instance "{instance["InstanceId"]}"')
print(f'Status of instance "{instance["CurrentState"]["Name"]}"')
print(json.dumps(instances, indent=4, sort_keys=True))
def terminating_instances():
# Terminating the instance using terminate_instances.
instances = EC2_CLIENT.terminate_instances(
InstanceIds=[
INSTANCE_ID,
],
)
for instance in instances['TerminatingInstances']:
print(f'Terminating instance "{instance["InstanceId"]}"')
print(f'Status of instance "{instance["CurrentState"]["Name"]}"')
print(json.dumps(instances, indent=4, sort_keys=True))
yes = {'yes','y', 'ye', ''}
no = {'no','n'}
print("Do you want to delete instance? (y/n)")
choice = input().lower()
if choice in yes:
print('Now it is beeing deleted')
terminating_instances()
elif choice in no:
print('Successfully stopped')
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
|
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
import struct
import ipaddress
from socks5.exception import ProtocolError
from socks5.connection import Connection
from socks5.events import (
Socks4Request, Socks4Response,
GreetingRequest, GreetingResponse,
Request, Response)
from socks5.define import (
REQ_COMMAND, AUTH_TYPE,
RESP_STATUS, ADDR_TYPE)
class TestServerConnection(unittest.TestCase):
def test_initiate_connection(self):
conn = Connection(our_role="server")
self.assertEqual(conn._conn.state, "init")
conn.initiate_connection()
self.assertEqual(conn._conn.state, "greeting_request")
def test_incorrect_role(self):
with self.assertRaises(ValueError):
Connection(our_role="yoyo")
def test_auth_end(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("auth_inprogress")
conn.auth_end()
self.assertEqual(conn._conn.state, "request")
def test_auth_end_in_incorrect_state(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_request")
with self.assertRaises(ProtocolError):
conn.auth_end()
def test_send_greeting_response_socks4(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 4
conn._conn._port = 5580
event = Socks4Response(0x5a, "127.0.0.1", 5580)
data = conn.send(event)
expected_data = struct.pack("!BBH4B", 0, 0x5a, 5580, 127, 0, 0, 1)
self.assertEqual(conn._conn.state, "end")
self.assertEqual(data, expected_data)
def test_send_greeting_response_socks4_with_incorrect_event_detail_verion(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 4
conn._conn._port = 5580
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_greeting_response_socks4_with_incorrect_event_detail_port(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 4
conn._conn._port = 5580
event = Socks4Response(0x5a, "127.0.0.1", 5581)
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_greeting_response_no_auth(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_AUTH"]]
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
data = conn.send(event)
expected_data = struct.pack("!BB", 0x5, 0x0)
self.assertEqual(conn._conn.state, "request")
self.assertEqual(data, expected_data)
def test_send_greeting_response_with_valid_auth(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["USERNAME_PASSWD"]]
event = GreetingResponse(AUTH_TYPE["USERNAME_PASSWD"])
conn.send(event)
self.assertEqual(conn._conn.state, "auth_inprogress")
def test_send_greeting_response_with_unsupported_auth(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
event = GreetingResponse(AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"])
conn.send(event)
self.assertEqual(conn._conn.state, "end")
def test_send_greeting_response_with_incorrect_event_detail_version(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
event = Socks4Response(0x5a, "127.0.0.1", 5580)
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_greeting_response_with_incorrect_event_detail_auth_type(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_greeting_response_incorrect_event(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
event = GreetingRequest((AUTH_TYPE["NO_AUTH"], ))
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_response(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
conn._conn._version = 5
conn._conn._addr_type = ADDR_TYPE["IPV4"]
conn._conn._addr = ipaddress.IPv4Address("127.0.0.1")
conn._conn._port = 8080
event = Response(RESP_STATUS["SUCCESS"], ADDR_TYPE["IPV4"], "127.0.0.1", 8080)
data = conn.send(event)
expected_data = struct.pack("!BBxB4BH", 0x5, 0x0, 0x1, 127, 0, 0, 1, 8080)
self.assertEqual(conn._conn.state, "end")
self.assertEqual(data, expected_data)
def test_send_response_incorrect_event(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
event = GreetingRequest((AUTH_TYPE["NO_AUTH"], ))
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_response_with_incorrect_event_detail_addr_type(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
conn._conn._version = 5
conn._conn._addr_type = ADDR_TYPE["IPV4"]
conn._conn._addr = ipaddress.IPv4Address("127.0.0.1")
conn._conn._port = 8080
event = Response(RESP_STATUS["SUCCESS"], ADDR_TYPE["DOMAINNAME"], "www.google.com", 8080)
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_response_with_incorrect_event_detail_addr(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
conn._conn._version = 5
conn._conn._addr_type = ADDR_TYPE["IPV4"]
conn._conn._addr = ipaddress.IPv4Address("127.0.0.1")
conn._conn._port = 8080
event = Response(RESP_STATUS["SUCCESS"], ADDR_TYPE["IPV4"], "192.168.0.1", 8080)
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_response_with_incorrect_event_detail_port(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
conn._conn._version = 5
conn._conn._addr_type = ADDR_TYPE["IPV4"]
conn._conn._addr = ipaddress.IPv4Address("127.0.0.1")
conn._conn._port = 8080
event = Response(RESP_STATUS["SUCCESS"], ADDR_TYPE["IPV4"], "127.0.0.1", 5580)
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_incorrect_state_greeting_request(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_request")
event = GreetingRequest((AUTH_TYPE["NO_AUTH"], ))
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_incorrect_state_request(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("request")
event = GreetingRequest((AUTH_TYPE["NO_AUTH"], ))
with self.assertRaises(ProtocolError):
conn.send(event)
def test_recv_need_more_data(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_request")
raw_data = b"\x05"
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "greeting_request")
self.assertEqual(event, "NeedMoreData")
def test_recv_in_greeting_request(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_request")
raw_data = struct.pack("!BB2B", 0x5, 0x2, 0x00, 0x01)
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "greeting_response")
self.assertEqual(event, "GreetingRequest")
self.assertEqual(event.nmethod, 2)
self.assertIn(0, event.methods)
self.assertIn(1, event.methods)
def test_recv_in_greeting_request_socks4(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_request")
raw_data = struct.pack("!BBH4B6sB", 0x4, 0x1, 5580, 127, 0, 0, 1, "Johnny".encode("ascii"), 0)
event = conn.recv(raw_data)
self.assertEqual(event, "Socks4Request")
self.assertEqual(event.cmd, 1)
self.assertEqual(event.port, 5580)
self.assertEqual(event.addr, ipaddress.IPv4Address("127.0.0.1"))
self.assertEqual(event.name, "Johnny")
def test_recv_in_request(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("request")
raw_data = struct.pack("!BBxB4BH", 0x5, 0x1, 0x1, 127, 0, 0, 1, 8080)
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "response")
self.assertEqual(event, "Request")
self.assertEqual(event.cmd, 1)
self.assertEqual(event.atyp, 1)
self.assertEqual(event.addr, ipaddress.IPv4Address("127.0.0.1"))
self.assertEqual(event.port, 8080)
def test_recv_incorrect_state_greeting_response(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("greeting_response")
with self.assertRaises(ProtocolError):
conn.recv(b"")
def test_recv_incorrect_state_response(self):
conn = Connection(our_role="server")
conn._conn.machine.set_state("response")
with self.assertRaises(ProtocolError):
conn.recv(b"")
class TestClientConnection(unittest.TestCase):
def test_initiate_connection(self):
conn = Connection(our_role="client")
self.assertEqual(conn._conn.state, "init")
conn.initiate_connection()
self.assertEqual(conn._conn.state, "greeting_request")
def test_auth_end(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("auth_inprogress")
conn.auth_end()
self.assertEqual(conn._conn.state, "request")
def test_auth_end_in_incorrect_state(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_request")
with self.assertRaises(ProtocolError):
conn.auth_end()
def test_send_in_greeting_request(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_request")
event = GreetingRequest((AUTH_TYPE["NO_AUTH"], ))
data = conn.send(event)
expected_data = struct.pack("!BBB", 0x5, 0x1, 0x00)
self.assertEqual(conn._conn.state, "greeting_response")
self.assertEqual(data, expected_data)
def test_send_in_greeting_request_socks4(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_request")
event = Socks4Request(1, "127.0.0.1", 5580, "Johnny")
data = conn.send(event)
expected_data = struct.pack("!BBH4B6sB", 0x4, 0x1, 5580, 127, 0, 0, 1, "Johnny".encode("ascii"), 0)
self.assertEqual(conn._conn.state, "greeting_response")
self.assertEqual(data, expected_data)
def test_send_in_greeting_request_incorrect_event(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_request")
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_in_request_request(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("request")
event = Request(REQ_COMMAND["CONNECT"], ADDR_TYPE["IPV4"], u"127.0.0.1", 8080)
data = conn.send(event)
expected_data = struct.pack("!BBxB4BH", 0x5, 0x1, 0x1, 127, 0, 0, 1, 8080)
self.assertEqual(conn._conn.state, "response")
self.assertEqual(data, expected_data)
def test_send_in_request_request_incorrect_event(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("request")
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_incorrect_state_greeting_response(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_send_incorrect_state_response(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("response")
event = GreetingResponse(AUTH_TYPE["NO_AUTH"])
with self.assertRaises(ProtocolError):
conn.send(event)
def test_recv_need_more_data(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
raw_data = b"\x05"
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "greeting_response")
self.assertEqual(event, "NeedMoreData")
def test_recv_in_greeting_response_no_auth(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_AUTH"]]
raw_data = struct.pack("!BB", 0x5, 0x0)
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "request")
self.assertEqual(event, "GreetingResponse")
self.assertEqual(event.auth_type, 0)
def test_recv_in_greeting_response_socks4(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 4
conn._conn._port = 5580
raw_data = struct.pack("!BBH4B", 0, 0x5a, 5580, 127, 0, 0, 1)
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "end")
self.assertEqual(event, "Socks4Response")
self.assertEqual(event.status, 0x5a)
self.assertEqual(event.port, 5580)
self.assertEqual(event.addr, ipaddress.IPv4Address("127.0.0.1"))
def test_recv_in_greeting_response_socks4_incorrect_version(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 4
conn._conn._port = 5580
raw_data = struct.pack("!BB", 0x5, 0xff)
with self.assertRaises(ProtocolError):
conn.recv(raw_data)
def test_recv_in_greeting_response_with_unsupported_auth(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
raw_data = struct.pack("!BB", 0x5, 0xff)
conn.recv(raw_data)
self.assertEqual(conn._conn.state, "end")
def test_recv_in_greeting_response_with_valid_auth(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["USERNAME_PASSWD"]]
raw_data = struct.pack("!BB", 0x5, 0x2)
conn.recv(raw_data)
self.assertEqual(conn._conn.state, "auth_inprogress")
def test_recv_in_greeting_response_with_incorrect_detail_version(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
raw_data = struct.pack("!BBH4B", 0, 0x5a, 5580, 127, 0, 0, 1)
with self.assertRaises(ProtocolError):
conn.recv(raw_data)
def test_recv_in_greeting_response_with_incorrect_auth_method(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_response")
conn._conn._version = 5
conn._conn._auth_methods = [AUTH_TYPE["NO_SUPPORT_AUTH_METHOD"]]
raw_data = struct.pack("!BB", 0x5, 0x2)
with self.assertRaises(ProtocolError):
conn.recv(raw_data)
def test_recv_in_response(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("response")
conn._conn._version = 5
conn._conn._addr_type = ADDR_TYPE["IPV4"]
conn._conn._addr = ipaddress.IPv4Address("127.0.0.1")
conn._conn._port = 8080
raw_data = struct.pack("!BBxB4BH", 0x5, 0x0, 0x1, 127, 0, 0, 1, 8080)
event = conn.recv(raw_data)
self.assertEqual(conn._conn.state, "end")
self.assertEqual(event, "Response")
self.assertEqual(event.status, 0)
self.assertEqual(event.atyp, 1)
self.assertEqual(event.addr, ipaddress.IPv4Address("127.0.0.1"))
self.assertEqual(event.port, 8080)
def test_recv_incorrect_state_greeting_request(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("greeting_request")
raw_data = b""
with self.assertRaises(ProtocolError):
conn.recv(raw_data)
def test_recv_incorrect_state_request(self):
conn = Connection(our_role="client")
conn._conn.machine.set_state("request")
raw_data = b""
with self.assertRaises(ProtocolError):
conn.recv(raw_data)
|
num1 = 10
num2 = 20
num3 = 30
num4 = 40
# 以下来自dev分支编写
num5 = 50
|
from shop.admin import notification
|
# -*- test-case-name: twisted.logger.test.test_stdlib -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Integration with Python standard library logging.
"""
import logging as stdlibLogging
from typing import Mapping, Tuple
from zope.interface import implementer
from constantly import NamedConstant # type: ignore[import]
from twisted.python.compat import currentframe
from ._format import formatEvent
from ._interfaces import ILogObserver, LogEvent
from ._levels import LogLevel
# Mappings to Python's logging module
toStdlibLogLevelMapping: Mapping[NamedConstant, int] = {
LogLevel.debug: stdlibLogging.DEBUG,
LogLevel.info: stdlibLogging.INFO,
LogLevel.warn: stdlibLogging.WARNING,
LogLevel.error: stdlibLogging.ERROR,
LogLevel.critical: stdlibLogging.CRITICAL,
}
def _reverseLogLevelMapping() -> Mapping[int, NamedConstant]:
"""
Reverse the above mapping, adding both the numerical keys used above and
the corresponding string keys also used by python logging.
@return: the reversed mapping
"""
mapping = {}
for logLevel, pyLogLevel in toStdlibLogLevelMapping.items():
mapping[pyLogLevel] = logLevel
mapping[stdlibLogging.getLevelName(pyLogLevel)] = logLevel
return mapping
fromStdlibLogLevelMapping = _reverseLogLevelMapping()
@implementer(ILogObserver)
class STDLibLogObserver:
"""
Log observer that writes to the python standard library's C{logging}
module.
@note: Warning: specific logging configurations (example: network) can lead
to this observer blocking. Nothing is done here to prevent that, so be
sure to not to configure the standard library logging module to block
when used in conjunction with this module: code within Twisted, such as
twisted.web, assumes that logging does not block.
@cvar defaultStackDepth: This is the default number of frames that it takes
to get from L{STDLibLogObserver} through the logging module, plus one;
in other words, the number of frames if you were to call a
L{STDLibLogObserver} directly. This is useful to use as an offset for
the C{stackDepth} parameter to C{__init__}, to add frames for other
publishers.
"""
defaultStackDepth = 4
def __init__(
self, name: str = "twisted", stackDepth: int = defaultStackDepth
) -> None:
"""
@param name: logger identifier.
@param stackDepth: The depth of the stack to investigate for caller
metadata.
"""
self.logger = stdlibLogging.getLogger(name)
self.logger.findCaller = self._findCaller # type: ignore[assignment]
self.stackDepth = stackDepth
def _findCaller(
self, stackInfo: bool = False, stackLevel: int = 1
) -> Tuple[str, int, str, None]:
"""
Based on the stack depth passed to this L{STDLibLogObserver}, identify
the calling function.
@param stackInfo: Whether or not to construct stack information.
(Currently ignored.)
@param stackLevel: The number of stack frames to skip when determining
the caller (currently ignored; use stackDepth on the instance).
@return: Depending on Python version, either a 3-tuple of (filename,
lineno, name) or a 4-tuple of that plus stack information.
"""
f = currentframe(self.stackDepth)
co = f.f_code
extra = (None,)
return (co.co_filename, f.f_lineno, co.co_name) + extra
def __call__(self, event: LogEvent) -> None:
"""
Format an event and bridge it to Python logging.
"""
level = event.get("log_level", LogLevel.info)
failure = event.get("log_failure")
if failure is None:
excInfo = None
else:
excInfo = (failure.type, failure.value, failure.getTracebackObject())
stdlibLevel = toStdlibLogLevelMapping.get(level, stdlibLogging.INFO)
self.logger.log(stdlibLevel, StringifiableFromEvent(event), exc_info=excInfo)
class StringifiableFromEvent:
"""
An object that implements C{__str__()} in order to defer the work of
formatting until it's converted into a C{str}.
"""
def __init__(self, event: LogEvent) -> None:
"""
@param event: An event.
"""
self.event = event
def __str__(self) -> str:
return formatEvent(self.event)
def __bytes__(self) -> bytes:
return str(self).encode("utf-8")
|
# Define here the models for your spider middleware
#
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
class CivilscraperSpiderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(self, response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(self, response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, or item objects.
for i in result:
yield i
def process_spider_exception(self, response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Request or item objects.
pass
def process_start_requests(self, start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
class CivilscraperDownloaderMiddleware:
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the downloader middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
return None
def process_response(self, request, response, spider):
# Called with the response returned from the downloader.
# Must either;
# - return a Response object
# - return a Request object
# - or raise IgnoreRequest
return response
def process_exception(self, request, exception, spider):
# Called when a download handler or a process_request()
# (from other downloader middleware) raises an exception.
# Must either:
# - return None: continue processing this exception
# - return a Response object: stops process_exception() chain
# - return a Request object: stops process_exception() chain
pass
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
# coding: utf-8
"""
Iot API
Collection of all public API endpoints. # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from iot_api_client.api_client import ApiClient
from iot_api_client.exceptions import (
ApiTypeError,
ApiValueError
)
class ThingsV2Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def things_v2_create(self, create_things_v2_payload, **kwargs): # noqa: E501
"""create things_v2 # noqa: E501
Creates a new thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_create(create_things_v2_payload, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateThingsV2Payload create_things_v2_payload: ThingPayload describes a thing (required)
:param bool force: If true, detach device from the other thing, and attach to this thing
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_create_with_http_info(create_things_v2_payload, **kwargs) # noqa: E501
def things_v2_create_with_http_info(self, create_things_v2_payload, **kwargs): # noqa: E501
"""create things_v2 # noqa: E501
Creates a new thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_create_with_http_info(create_things_v2_payload, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateThingsV2Payload create_things_v2_payload: ThingPayload describes a thing (required)
:param bool force: If true, detach device from the other thing, and attach to this thing
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['create_things_v2_payload', 'force'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_things_v2_payload' is set
if ('create_things_v2_payload' not in local_var_params or
local_var_params['create_things_v2_payload'] is None):
raise ApiValueError("Missing the required parameter `create_things_v2_payload` when calling `things_v2_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'force' in local_var_params:
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_things_v2_payload' in local_var_params:
body_params = local_var_params['create_things_v2_payload']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json', 'application/vnd.goa.error+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_create_sketch(self, id, thing_sketch, **kwargs): # noqa: E501
"""createSketch things_v2 # noqa: E501
Creates a new sketch thing associated to the thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_create_sketch(id, thing_sketch, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param ThingSketch thing_sketch: ThingSketchPayload describes a sketch of a thing (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_create_sketch_with_http_info(id, thing_sketch, **kwargs) # noqa: E501
def things_v2_create_sketch_with_http_info(self, id, thing_sketch, **kwargs): # noqa: E501
"""createSketch things_v2 # noqa: E501
Creates a new sketch thing associated to the thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_create_sketch_with_http_info(id, thing_sketch, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param ThingSketch thing_sketch: ThingSketchPayload describes a sketch of a thing (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'thing_sketch'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_create_sketch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_create_sketch`") # noqa: E501
# verify the required parameter 'thing_sketch' is set
if ('thing_sketch' not in local_var_params or
local_var_params['thing_sketch'] is None):
raise ApiValueError("Missing the required parameter `thing_sketch` when calling `things_v2_create_sketch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'thing_sketch' in local_var_params:
body_params = local_var_params['thing_sketch']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json', 'application/vnd.goa.error+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/sketch', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_delete(self, id, **kwargs): # noqa: E501
"""delete things_v2 # noqa: E501
Removes a thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool force: If true, hard delete the thing
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_delete_with_http_info(id, **kwargs) # noqa: E501
def things_v2_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""delete things_v2 # noqa: E501
Removes a thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool force: If true, hard delete the thing
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'force'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'force' in local_var_params:
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_delete_sketch(self, id, **kwargs): # noqa: E501
"""deleteSketch things_v2 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_delete_sketch(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_delete_sketch_with_http_info(id, **kwargs) # noqa: E501
def things_v2_delete_sketch_with_http_info(self, id, **kwargs): # noqa: E501
"""deleteSketch things_v2 # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_delete_sketch_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_delete_sketch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_delete_sketch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/sketch', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_list(self, **kwargs): # noqa: E501
"""list things_v2 # noqa: E501
Returns the list of things associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param bool across_user_ids: If true, returns all the things
:param str device_id: The id of the device you want to filter
:param bool show_deleted: If true, shows the soft deleted things
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ArduinoThing]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_list_with_http_info(**kwargs) # noqa: E501
def things_v2_list_with_http_info(self, **kwargs): # noqa: E501
"""list things_v2 # noqa: E501
Returns the list of things associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param bool across_user_ids: If true, returns all the things
:param str device_id: The id of the device you want to filter
:param bool show_deleted: If true, shows the soft deleted things
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ArduinoThing], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['across_user_ids', 'device_id', 'show_deleted'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'across_user_ids' in local_var_params:
query_params.append(('across_user_ids', local_var_params['across_user_ids'])) # noqa: E501
if 'device_id' in local_var_params:
query_params.append(('device_id', local_var_params['device_id'])) # noqa: E501
if 'show_deleted' in local_var_params:
query_params.append(('show_deleted', local_var_params['show_deleted'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json; type=collection']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ArduinoThing]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_show(self, id, **kwargs): # noqa: E501
"""show things_v2 # noqa: E501
Returns the thing requested by the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_show(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool show_deleted: If true, shows the soft deleted thing
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_show_with_http_info(id, **kwargs) # noqa: E501
def things_v2_show_with_http_info(self, id, **kwargs): # noqa: E501
"""show things_v2 # noqa: E501
Returns the thing requested by the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_show_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool show_deleted: If true, shows the soft deleted thing
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'show_deleted'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_show" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_show`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'show_deleted' in local_var_params:
query_params.append(('show_deleted', local_var_params['show_deleted'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json', 'application/vnd.goa.error+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_update(self, id, thing, **kwargs): # noqa: E501
"""update things_v2 # noqa: E501
Updates a thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_update(id, thing, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param Thing thing: ThingPayload describes a thing (required)
:param bool force: If true, detach device from the other thing, and attach to this thing
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_update_with_http_info(id, thing, **kwargs) # noqa: E501
def things_v2_update_with_http_info(self, id, thing, **kwargs): # noqa: E501
"""update things_v2 # noqa: E501
Updates a thing associated to the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_update_with_http_info(id, thing, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param Thing thing: ThingPayload describes a thing (required)
:param bool force: If true, detach device from the other thing, and attach to this thing
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'thing', 'force'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_update`") # noqa: E501
# verify the required parameter 'thing' is set
if ('thing' not in local_var_params or
local_var_params['thing'] is None):
raise ApiValueError("Missing the required parameter `thing` when calling `things_v2_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'force' in local_var_params:
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'thing' in local_var_params:
body_params = local_var_params['thing']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json', 'application/vnd.goa.error+json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def things_v2_update_sketch(self, id, sketch_id, **kwargs): # noqa: E501
"""updateSketch things_v2 # noqa: E501
Update an existing thing sketch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_update_sketch(id, sketch_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str sketch_id: The id of the sketch (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoThing
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.things_v2_update_sketch_with_http_info(id, sketch_id, **kwargs) # noqa: E501
def things_v2_update_sketch_with_http_info(self, id, sketch_id, **kwargs): # noqa: E501
"""updateSketch things_v2 # noqa: E501
Update an existing thing sketch # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.things_v2_update_sketch_with_http_info(id, sketch_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str sketch_id: The id of the sketch (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoThing, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'sketch_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method things_v2_update_sketch" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `things_v2_update_sketch`") # noqa: E501
# verify the required parameter 'sketch_id' is set
if ('sketch_id' not in local_var_params or
local_var_params['sketch_id'] is None):
raise ApiValueError("Missing the required parameter `sketch_id` when calling `things_v2_update_sketch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'sketch_id' in local_var_params:
path_params['sketchId'] = local_var_params['sketch_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/vnd.arduino.thing+json', 'application/vnd.goa.error+json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/sketch/{sketchId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoThing', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
#!/usr/bin/env python
# -*- test-case-name: twisted.names.test.test_examples -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Print the Address records, Mail-Exchanger records and the Nameserver
records for the given domain name. eg
python testdns.py google.com
"""
import sys
from twisted.internet import defer
from twisted.internet.task import react
from twisted.names import client, dns, error
from twisted.python import usage
class Options(usage.Options):
synopsis = "Usage: testdns.py DOMAINNAME"
def parseArgs(self, domainname):
self["domainname"] = domainname
def formatRecords(records, heading):
"""
Extract only the answer records and return them as a neatly
formatted string beneath the given heading.
"""
answers, authority, additional = records
lines = ["# " + heading]
for a in answers:
line = [
a.name,
dns.QUERY_CLASSES.get(a.cls, "UNKNOWN (%d)" % (a.cls,)),
a.payload,
]
lines.append(" ".join(str(word) for word in line))
return "\n".join(line for line in lines)
def printResults(results, domainname):
"""
Print the formatted results for each DNS record type.
"""
sys.stdout.write("# Domain Summary for %r\n" % (domainname,))
sys.stdout.write("\n\n".join(results) + "\n")
def printError(failure, domainname):
"""
Print a friendly error message if the hostname could not be
resolved.
"""
failure.trap(defer.FirstError)
failure = failure.value.subFailure
failure.trap(error.DNSNameError)
sys.stderr.write("ERROR: domain name not found %r\n" % (domainname,))
def main(reactor, *argv):
options = Options()
try:
options.parseOptions(argv)
except usage.UsageError as errortext:
sys.stderr.write(str(options) + "\n")
sys.stderr.write("ERROR: %s\n" % (errortext,))
raise SystemExit(1)
domainname = options["domainname"]
r = client.Resolver("/etc/resolv.conf")
d = defer.gatherResults(
[
r.lookupAddress(domainname).addCallback(formatRecords, "Addresses"),
r.lookupMailExchange(domainname).addCallback(
formatRecords, "Mail Exchangers"
),
r.lookupNameservers(domainname).addCallback(formatRecords, "Nameservers"),
],
consumeErrors=True,
)
d.addCallback(printResults, domainname)
d.addErrback(printError, domainname)
return d
if __name__ == "__main__":
react(main, sys.argv[1:])
|
# first images year
L4_start = 1982
# max year for land cover
land_cover_max_year = 2020
sensor_max_year = 2021
land_cover_first_year = 1992
# name of the sensor, GEE asset
sensors = {
"Landsat 4": ["LANDSAT/LT04/C01/T1_SR", 30, "l4"],
"Landsat 5": ["LANDSAT/LT05/C01/T1_SR", 30, "l5"],
"Landsat 7": ["LANDSAT/LE07/C01/T1_SR", 30, "l6"],
"MODIS MOD13Q1": ["MODIS/006/MOD13Q1", 250, "modis"],
"MODIS NPP": ["MODIS/006/MOD17A3HGF", 250, "modis"],
"Landsat 8": ["LANDSAT/LC08/C01/T1_SR", 30, "l8"],
"Sentinel 2": ["COPERNICUS/S2", 10, "s2"],
}
precipitation = "NOAA/PERSIANN-CDR"
land_cover_ic = "users/amitghosh/sdg_module/esa/cci_landcover"
jrc_water = "JRC/GSW1_3/GlobalSurfaceWater"
soil_taxonomy = "OpenLandMap/SOL/SOL_TEXTURE-CLASS_USDA-TT_M/v02"
soc = "users/geflanddegradation/toolbox_datasets/soc_sgrid_30cm"
soc_isric = "projects/soilgrids-isric/soc_mean"
ipcc_climate_zones = "users/geflanddegradation/toolbox_datasets/ipcc_climate_zones"
wte = "users/amitghosh/sdg_module/wte_2020"
gaes = "users/amitghosh/sdg_module/fao/GAES_L4"
aez = "users/amitghosh/sdg_module/fao/aez_v9v2_CRUTS32_Hist_8110_100_avg"
hru = "users/amitghosh/sdg_module/hru_250"
|
from fuzion.mixins import RetrieveNotSupportedMixin
from fuzion.subresource import SubResource
class AbstractContact(RetrieveNotSupportedMixin, SubResource):
path = "abstracts/{}/contacts"
object_id_attr_name = "fuzion_contact_id"
|
fruit_evaluation= [
['Яблоко', '5', '4', '5', '3', '4', '5'],
['Финоград', '4', '3', '4', '5', '5', '5'],
['Дыня', '5', '5', '4', '5', '3', '4'],
['Арбуз', '4', '4', '3', '5', '5', '4'],
['Киви', '4', '3', '5', '5', '4', '4'],
['Ананас', '4', '3', '4', '5', '4', '5']
]
def show_assessment(assessment_record):
marks = assessment_record[1:]
marks_as_int = []
for mark in marks:
mark = int(mark)
marks_as_int.append(mark)
marks_avg = sum(marks_as_int) / len(marks_as_int)
marks_as_str = ', '.join(marks)
print(f'{assessment_record[0]}: {marks_as_str}, средняя оценка {marks_avg}')
for record in fruit_evaluation:
show_assessment(record)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from CTFd.models import Challenges
from tests.helpers import create_ctfd, destroy_ctfd, login_as_user, register_user
def test_missing_challenge_type():
"""Test that missing challenge types don't cause total challenge rendering failure"""
app = create_ctfd(enable_plugins=True)
with app.app_context():
register_user(app)
client = login_as_user(app, name="admin", password="password")
challenge_data = {
"name": "name",
"category": "category",
"description": "description",
"value": 100,
"decay": 20,
"minimum": 1,
"state": "visible",
"type": "dynamic",
}
r = client.post("/api/v1/challenges", json=challenge_data)
assert r.get_json().get("data")["id"] == 1
assert r.get_json().get("data")["type"] == "dynamic"
chal_count = Challenges.query.count()
assert chal_count == 1
# Delete the dynamic challenge type
from CTFd.plugins.challenges import CHALLENGE_CLASSES
del CHALLENGE_CLASSES["dynamic"]
r = client.get("/admin/challenges")
assert r.status_code == 200
assert b"dynamic" in r.data
r = client.get("/admin/challenges/1")
assert r.status_code == 500
assert b"The underlying challenge type (dynamic) is not installed" in r.data
challenge_data = {
"name": "name",
"category": "category",
"description": "description",
"value": 100,
"state": "visible",
"type": "standard",
}
r = client.post("/api/v1/challenges", json=challenge_data)
r = client.get("/challenges")
assert r.status_code == 200
# We should still see the one visible standard challenge
r = client.get("/api/v1/challenges")
assert r.status_code == 200
assert len(r.json["data"]) == 1
assert r.json["data"][0]["type"] == "standard"
# We cannot load the broken challenge
r = client.get("/api/v1/challenges/1")
assert r.status_code == 500
assert (
"The underlying challenge type (dynamic) is not installed"
in r.json["message"]
)
# We can load other challenges
r = client.get("/api/v1/challenges/2")
assert r.status_code == 200
destroy_ctfd(app)
|
from copy import copy
from typing import List
import numpy as np
import torch
import torch.optim as optim
import torchvision.transforms as T
import yaml
from matplotlib.pyplot import fill
import wandb
from attacks.initial_selection import find_initial_w
from utils.wandb import load_model
class AttackConfigParser:
def __init__(self, config_file):
with open(config_file, 'r') as file:
config = yaml.safe_load(file)
self._config = config
def create_target_model(self):
model = load_model(self._config['wandb_target_run'])
model.eval()
self.model = model
return model
def get_target_dataset(self):
api = wandb.Api(timeout=60)
run = api.run(self._config['wandb_target_run'])
return run.config['Dataset'].strip().lower()
def create_evaluation_model(self):
evaluation_model = load_model(self._config['wandb_evaluation_run'])
evaluation_model.eval()
self.evaluation_model = evaluation_model
return evaluation_model
def create_optimizer(self, params, config=None):
if config is None:
config = self._config['attack']['optimizer']
optimizer_config = self._config['attack']['optimizer']
for optimizer_type, args in optimizer_config.items():
if not hasattr(optim, optimizer_type):
raise Exception(
f'{optimizer_type} is no valid optimizer. Please write the type exactly as the PyTorch class'
)
optimizer_class = getattr(optim, optimizer_type)
optimizer = optimizer_class(params, **args)
break
return optimizer
def create_lr_scheduler(self, optimizer):
if not 'lr_scheduler' in self._config['attack']:
return None
scheduler_config = self._config['attack']['lr_scheduler']
for scheduler_type, args in scheduler_config.items():
if not hasattr(optim.lr_scheduler, scheduler_type):
raise Exception(
f'{scheduler_type} is no valid learning rate scheduler. Please write the type exactly as the PyTorch class.'
)
scheduler_class = getattr(optim.lr_scheduler, scheduler_type)
scheduler_instance = scheduler_class(optimizer, **args)
break
return scheduler_instance
def create_candidates(self, generator, target_model, targets):
candidate_config = self._config['candidates']
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if 'candidate_file' in candidate_config:
candidate_file = candidate_config['candidate_file']
w = torch.load(candidate_file)
w = w[:self._config['num_candidates']]
w = w.to(device)
print(f'Loaded {w.shape[0]} candidates from {candidate_file}.')
return w
elif 'candidate_search' in candidate_config:
search_config = candidate_config['candidate_search']
w = find_initial_w(generator=generator,
target_model=target_model,
targets=targets,
seed=self.seed,
**search_config)
print(f'Created {w.shape[0]} candidates randomly in w space.')
else:
raise Exception(f'No valid candidate initialization stated.')
w = w.to(device)
return w
def create_target_vector(self):
device = 'cuda' if torch.cuda.is_available() else 'cpu'
attack_config = self._config['attack']
targets = None
target_classes = attack_config['targets']
num_candidates = self._config['candidates']['num_candidates']
if type(target_classes) is list:
targets = torch.tensor(target_classes)
targets = torch.repeat_interleave(targets, num_candidates)
elif target_classes == 'all':
targets = torch.tensor([i for i in range(self.model.num_classes)])
targets = torch.repeat_interleave(targets, num_candidates)
elif type(target_classes) == int:
targets = torch.full(size=(num_candidates, ),
fill_value=target_classes)
else:
raise Exception(
f' Please specify a target class or state a target vector.')
targets = targets.to(device)
return targets
def create_wandb_config(self):
for _, args in self.optimizer.items():
lr = args['lr']
break
config = {
**self.attack, 'optimizer': self.optimizer,
'lr': lr,
'use_scheduler': 'lr_scheduler' in self._config,
'target_architecture': self.model.architecture,
'target_extended': self.model.wandb_name,
'selection_method': self.final_selection['approach'],
'final_samples': self.final_selection['samples_per_target']
}
if 'lr_scheduler' in self._config:
config['lr_scheduler'] = self.lr_scheduler
return config
def create_attack_transformations(self):
transformation_list = []
if 'transformations' in self._config['attack']:
transformations = self._config['attack']['transformations']
for transform, args in transformations.items():
if not hasattr(T, transform):
raise Exception(
f'{transform} is no valid transformation. Please write the type exactly as the Torchvision class'
)
transformation_class = getattr(T, transform)
transformation_list.append(transformation_class(**args))
if len(transformation_list) > 0:
attack_transformations = T.Compose(transformation_list)
return attack_transformations
return None
@property
def candidates(self):
return self._config['candidates']
@property
def wandb_target_run(self):
return self._config['wandb_target_run']
@property
def logging(self):
return self._config['wandb']['enable_logging']
@property
def wandb_init_args(self):
return self._config['wandb']['wandb_init_args']
@property
def attack(self):
return self._config['attack']
@property
def wandb(self):
return self._config['wandb']
@property
def optimizer(self):
return self._config['attack']['optimizer']
@property
def lr_scheduler(self):
return self._config['attack']['lr_scheduler']
@property
def final_selection(self):
if 'final_selection' in self._config:
return self._config['final_selection']
else:
return None
@property
def stylegan_model(self):
return self._config['stylegan_model']
@property
def seed(self):
return self._config['seed']
@property
def cas_evaluation(self):
return self._config['cas_evaluation']
@property
def dataset(self):
return self._config['dataset']
@property
def fid_evaluation(self):
return self._config['fid_evaluation']
@property
def attack_center_crop(self):
if 'transformations' in self._config['attack']:
if 'CenterCrop' in self._config['attack']['transformations']:
return self._config['attack']['transformations']['CenterCrop']['size']
else:
return None
@property
def attack_resize(self):
if 'transformations' in self._config['attack']:
if 'Resize' in self._config['attack']['transformations']:
return self._config['attack']['transformations']['Resize']['size']
else:
return None
@property
def num_classes(self):
targets = self._config['attack']['targets']
if isinstance(targets, int):
return 1
else:
return len(targets)
@property
def log_progress(self):
if 'log_progress' in self._config['attack']:
return self._config['attack']['log_progress']
else:
return True
|
# -*- coding: utf-8 -*-
"""The user module."""
from flask import render_template,current_app
import random,string
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO
from datetime import datetime
try:
from PIL import Image,ImageDraw,ImageFont,ImageFilter
except Exception as e:
import Image,ImageDraw,ImageFont,ImageFilter
from flask import Blueprint,session,make_response
blueprint = Blueprint('user', __name__, url_prefix='/users', static_folder='../static')
from main.public.models import Category
from . import views,context_process # noqa
# 随机字母:
def rndChar():
str = ''
for i in range(4):
str += chr(random.randint(65, 90))
return str
# 随机颜色1:
def rndColor():
return (random.randint(64, 255), random.randint(64, 255), random.randint(64, 255))
# 随机颜色2:
def rndColor2():
return (random.randint(32, 127), random.randint(32, 127), random.randint(32, 127))
@blueprint.route('/genverify')
def generate_verification_code(nowtime =''):
output = BytesIO()
width = 70
height = 30
image = Image.new('RGB',(width,height),(255,255,255))
#字体对象
font = ImageFont.truetype(current_app.config['VERIFICATION_CODE_FONT'], 18)
draw = ImageDraw.Draw(image)
for x in range(width):
for y in range(height):
draw.point((x, y), fill=rndColor())
verify_str = rndChar()
draw.text((10, 5),verify_str, font=font, fill=rndColor2())
#模糊
# image = image.filter(ImageFilter.BLUR)
# li = []
# for i in range(10):
# temp = random.randrange(65,90)
# c = chr(temp)
# li.append(c)
image.save(output,"JPEG")
img_data = output.getvalue()
session['verify'] = verify_str
response = make_response(img_data)
response.headers['Content-Type'] = 'image/jpeg'
return response
#请求上下文 ,获取验证码
@blueprint.context_processor
def get_verify():
def get():
return generate_verification_code()
return dict(get_verify=get)
#父级导航名称
@blueprint.context_processor
def get_category_parent_title():
def get(id):
return Category.query.get(id).title
return dict(get_category_parent_title=get)
#文件过大413
@blueprint.errorhandler(413)
def page_max_file(e):
return render_template('users/413.html'), 413
@blueprint.errorhandler(404)
def page_not_found(e):
return render_template('users/404.html'), 404
|
#!/usr/bin/env python
# A script to check for the presence of all the required modules
# Gain access to the spawn.find_executable(), which works a lot like `which`
from distutils import spawn
def check_modules(setup=False, fetch=False, predict=False):
"""Function to try to import required modules, and return a list of modules
that are not installed."""
missing_modules = []
# Argparse
try:
import argparse
except ImportError:
missing_modules.append('argparse')
# If predict
if predict:
# Biopython
try:
import Bio
except ImportError:
missing_modules.append('Biopython')
return missing_modules
def missing_mods(modules):
"""Function to print a nice message about modules that are required."""
msg = """Some of the required modules were not found on your system. Please
install them and try again. The following modules were not found:"""
print(msg)
print('\n'.join(modules))
return
def check_executable(exe):
"""Checks for a path to an executable."""
path = spawn.find_executable(exe)
if path:
return path
else:
return False
def missing_executables(exelist):
"""Checks for the presence and execute permissions for all program names
passed to it."""
missing_programs = []
for e in exelist:
if check_executable(e):
continue
else:
missing_programs.append(e)
return missing_programs
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from improving_agent.models.base_model_ import Model
from improving_agent.models.log_entry import LogEntry
from improving_agent.models.message import Message
from improving_agent.models.schema2 import Schema2
from improving_agent import util
from improving_agent.models.log_entry import LogEntry # noqa: E501
from improving_agent.models.message import Message # noqa: E501
from improving_agent.models.schema2 import Schema2 # noqa: E501
class Response(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, message=None, status=None, description=None, logs=None, workflow=None): # noqa: E501
"""Response - a model defined in OpenAPI
:param message: The message of this Response. # noqa: E501
:type message: Message
:param status: The status of this Response. # noqa: E501
:type status: str
:param description: The description of this Response. # noqa: E501
:type description: str
:param logs: The logs of this Response. # noqa: E501
:type logs: List[LogEntry]
:param workflow: The workflow of this Response. # noqa: E501
:type workflow: List[Schema2]
"""
self.openapi_types = {
'message': Message,
'status': str,
'description': str,
'logs': List[LogEntry],
'workflow': List[Schema2]
}
self.attribute_map = {
'message': 'message',
'status': 'status',
'description': 'description',
'logs': 'logs',
'workflow': 'workflow'
}
self._message = message
self._status = status
self._description = description
self._logs = logs
self._workflow = workflow
@classmethod
def from_dict(cls, dikt) -> 'Response':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Response of this Response. # noqa: E501
:rtype: Response
"""
return util.deserialize_model(dikt, cls)
@property
def message(self):
"""Gets the message of this Response.
:return: The message of this Response.
:rtype: Message
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this Response.
:param message: The message of this Response.
:type message: Message
"""
if message is None:
raise ValueError("Invalid value for `message`, must not be `None`") # noqa: E501
self._message = message
@property
def status(self):
"""Gets the status of this Response.
One of a standardized set of short codes, e.g. Success, QueryNotTraversable, KPsNotAvailable # noqa: E501
:return: The status of this Response.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this Response.
One of a standardized set of short codes, e.g. Success, QueryNotTraversable, KPsNotAvailable # noqa: E501
:param status: The status of this Response.
:type status: str
"""
self._status = status
@property
def description(self):
"""Gets the description of this Response.
A brief human-readable description of the outcome # noqa: E501
:return: The description of this Response.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this Response.
A brief human-readable description of the outcome # noqa: E501
:param description: The description of this Response.
:type description: str
"""
self._description = description
@property
def logs(self):
"""Gets the logs of this Response.
Log entries containing errors, warnings, debugging information, etc # noqa: E501
:return: The logs of this Response.
:rtype: List[LogEntry]
"""
return self._logs
@logs.setter
def logs(self, logs):
"""Sets the logs of this Response.
Log entries containing errors, warnings, debugging information, etc # noqa: E501
:param logs: The logs of this Response.
:type logs: List[LogEntry]
"""
self._logs = logs
@property
def workflow(self):
"""Gets the workflow of this Response.
:return: The workflow of this Response.
:rtype: List[Schema2]
"""
return self._workflow
@workflow.setter
def workflow(self, workflow):
"""Sets the workflow of this Response.
:param workflow: The workflow of this Response.
:type workflow: List[Schema2]
"""
self._workflow = workflow
|
def exchange_rate_format(data):
"""Return a dict with the exchange rate data formatted for serialization"""
return {
'provider_1': {
'name': 'dof',
'rate': data.dof_rate,
'date': data.dof_date,
'last_updated': data.dof_last_updated,
},
'provider_2': {
'name': 'fixer',
'rate': data.fixer_rate,
'date': data.fixer_date,
'last_updated': data.fixer_last_updated,
},
'provider_3': {
'name': 'banxico',
'rate': data.banxico_rate,
'date': data.banxico_date,
'last_updated': data.banxico_last_updated,
},
'created': data.created,
}
|
import logging
import os
import traceback
from collections import OrderedDict
import great_expectations.exceptions as exceptions
from great_expectations.core.util import nested_update
from great_expectations.data_context.store.html_site_store import (
HtmlSiteStore,
SiteSectionIdentifier,
)
from great_expectations.data_context.types.resource_identifiers import (
ExpectationSuiteIdentifier,
ValidationResultIdentifier,
)
from great_expectations.data_context.util import instantiate_class_from_config
from great_expectations.render.util import resource_key_passes_run_name_filter
logger = logging.getLogger(__name__)
FALSEY_YAML_STRINGS = [
"0",
"None",
"False",
"false",
"FALSE",
"none",
"NONE",
]
class SiteBuilder:
"""SiteBuilder builds data documentation for the project defined by a
DataContext.
A data documentation site consists of HTML pages for expectation suites,
profiling and validation results, and
an index.html page that links to all the pages.
The exact behavior of SiteBuilder is controlled by configuration in the
DataContext's great_expectations.yml file.
Users can specify:
* which datasources to document (by default, all)
* whether to include expectations, validations and profiling results
sections (by default, all)
* where the expectations and validations should be read from
(filesystem or S3)
* where the HTML files should be written (filesystem or S3)
* which renderer and view class should be used to render each section
Here is an example of a minimal configuration for a site::
local_site:
class_name: SiteBuilder
store_backend:
class_name: TupleS3StoreBackend
bucket: data_docs.my_company.com
prefix: /data_docs/
A more verbose configuration can also control individual sections and
override renderers, views, and stores::
local_site:
class_name: SiteBuilder
store_backend:
class_name: TupleS3StoreBackend
bucket: data_docs.my_company.com
prefix: /data_docs/
site_index_builder:
class_name: DefaultSiteIndexBuilder
# Verbose version:
# site_index_builder:
# module_name: great_expectations.render.builder
# class_name: DefaultSiteIndexBuilder
# renderer:
# module_name: great_expectations.render.renderer
# class_name: SiteIndexPageRenderer
# view:
# module_name: great_expectations.render.view
# class_name: DefaultJinjaIndexPageView
site_section_builders:
# Minimal specification
expectations:
class_name: DefaultSiteSectionBuilder
source_store_name: expectation_store
renderer:
module_name: great_expectations.render.renderer
class_name: ExpectationSuitePageRenderer
# More verbose specification with optional arguments
validations:
module_name: great_expectations.data_context.render
class_name: DefaultSiteSectionBuilder
source_store_name: local_validation_store
renderer:
module_name: great_expectations.render.renderer
class_name: SiteIndexPageRenderer
view:
module_name: great_expectations.render.view
class_name: DefaultJinjaIndexPageView
"""
def __init__(
self,
data_context,
store_backend,
site_name=None,
site_index_builder=None,
show_how_to_buttons=True,
site_section_builders=None,
runtime_environment=None,
**kwargs,
):
self.site_name = site_name
self.data_context = data_context
self.store_backend = store_backend
self.show_how_to_buttons = show_how_to_buttons
usage_statistics_config = data_context.anonymous_usage_statistics
data_context_id = None
if (
usage_statistics_config
and usage_statistics_config.enabled
and usage_statistics_config.data_context_id
):
data_context_id = usage_statistics_config.data_context_id
self.data_context_id = data_context_id
# set custom_styles_directory if present
custom_styles_directory = None
plugins_directory = data_context.plugins_directory
if plugins_directory and os.path.isdir(
os.path.join(plugins_directory, "custom_data_docs", "styles")
):
custom_styles_directory = os.path.join(
plugins_directory, "custom_data_docs", "styles"
)
# set custom_views_directory if present
custom_views_directory = None
if plugins_directory and os.path.isdir(
os.path.join(plugins_directory, "custom_data_docs", "views")
):
custom_views_directory = os.path.join(
plugins_directory, "custom_data_docs", "views"
)
if site_index_builder is None:
site_index_builder = {"class_name": "DefaultSiteIndexBuilder"}
# The site builder is essentially a frontend store. We'll open up
# three types of backends using the base
# type of the configuration defined in the store_backend section
self.target_store = HtmlSiteStore(
store_backend=store_backend, runtime_environment=runtime_environment
)
default_site_section_builders_config = {
"expectations": {
"class_name": "DefaultSiteSectionBuilder",
"source_store_name": data_context.expectations_store_name,
"renderer": {"class_name": "ExpectationSuitePageRenderer"},
},
"validations": {
"class_name": "DefaultSiteSectionBuilder",
"source_store_name": data_context.validations_store_name,
"renderer": {"class_name": "ValidationResultsPageRenderer"},
"validation_results_limit": site_index_builder.get(
"validation_results_limit"
),
},
"profiling": {
"class_name": "DefaultSiteSectionBuilder",
"source_store_name": data_context.validations_store_name,
"renderer": {"class_name": "ProfilingResultsPageRenderer"},
},
}
if site_section_builders is None:
site_section_builders = default_site_section_builders_config
else:
site_section_builders = nested_update(
default_site_section_builders_config, site_section_builders
)
# set default run_name_filter
if site_section_builders.get("validations", "None") not in FALSEY_YAML_STRINGS:
if site_section_builders["validations"].get("run_name_filter") is None:
site_section_builders["validations"]["run_name_filter"] = {
"not_includes": "profiling"
}
if site_section_builders.get("profiling", "None") not in FALSEY_YAML_STRINGS:
if site_section_builders["profiling"].get("run_name_filter") is None:
site_section_builders["profiling"]["run_name_filter"] = {
"includes": "profiling"
}
self.site_section_builders = {}
for site_section_name, site_section_config in site_section_builders.items():
if not site_section_config or site_section_config in FALSEY_YAML_STRINGS:
continue
module_name = (
site_section_config.get("module_name")
or "great_expectations.render.renderer.site_builder"
)
self.site_section_builders[
site_section_name
] = instantiate_class_from_config(
config=site_section_config,
runtime_environment={
"data_context": data_context,
"target_store": self.target_store,
"custom_styles_directory": custom_styles_directory,
"custom_views_directory": custom_views_directory,
"data_context_id": self.data_context_id,
"show_how_to_buttons": self.show_how_to_buttons,
},
config_defaults={"name": site_section_name, "module_name": module_name},
)
if not self.site_section_builders[site_section_name]:
raise exceptions.ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=site_section_config["class_name"],
)
module_name = (
site_index_builder.get("module_name")
or "great_expectations.render.renderer.site_builder"
)
class_name = site_index_builder.get("class_name") or "DefaultSiteIndexBuilder"
self.site_index_builder = instantiate_class_from_config(
config=site_index_builder,
runtime_environment={
"data_context": data_context,
"custom_styles_directory": custom_styles_directory,
"custom_views_directory": custom_views_directory,
"show_how_to_buttons": self.show_how_to_buttons,
"target_store": self.target_store,
"site_name": self.site_name,
"data_context_id": self.data_context_id,
"source_stores": {
section_name: section_config.get("source_store_name")
for (section_name, section_config) in site_section_builders.items()
if section_config not in FALSEY_YAML_STRINGS
},
"site_section_builders_config": site_section_builders,
},
config_defaults={
"name": "site_index_builder",
"module_name": module_name,
"class_name": class_name,
},
)
if not self.site_index_builder:
raise exceptions.ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=site_index_builder["class_name"],
)
def clean_site(self):
self.target_store.clean_site()
def build(self, resource_identifiers=None, build_index: bool = True):
"""
:param resource_identifiers: a list of resource identifiers
(ExpectationSuiteIdentifier,
ValidationResultIdentifier). If specified,
rebuild HTML(or other views the data docs
site renders) only for the resources in this list.
This supports incremental build of data docs sites
(e.g., when a new validation result is created)
and avoids full rebuild.
:param build_index: a flag if False, skips building the index page
:return:
"""
# copy static assets
self.target_store.copy_static_assets()
for site_section, site_section_builder in self.site_section_builders.items():
site_section_builder.build(resource_identifiers=resource_identifiers)
index_page_url, index_links_dict = self.site_index_builder.build(
build_index=build_index
)
return (
self.get_resource_url(only_if_exists=False),
index_links_dict,
)
def get_resource_url(self, resource_identifier=None, only_if_exists=True):
"""
Return the URL of the HTML document that renders a resource
(e.g., an expectation suite or a validation result).
:param resource_identifier: ExpectationSuiteIdentifier,
ValidationResultIdentifier or any other type's identifier. The
argument is optional - when not supplied, the method returns the URL of
the index page.
:return: URL (string)
"""
return self.target_store.get_url_for_resource(
resource_identifier=resource_identifier, only_if_exists=only_if_exists
)
class DefaultSiteSectionBuilder:
def __init__(
self,
name,
data_context,
target_store,
source_store_name,
custom_styles_directory=None,
custom_views_directory=None,
show_how_to_buttons=True,
run_name_filter=None,
validation_results_limit=None,
renderer=None,
view=None,
data_context_id=None,
**kwargs,
):
self.name = name
self.source_store = data_context.stores[source_store_name]
self.target_store = target_store
self.run_name_filter = run_name_filter
self.validation_results_limit = validation_results_limit
self.data_context_id = data_context_id
self.show_how_to_buttons = show_how_to_buttons
if renderer is None:
raise exceptions.InvalidConfigError(
"SiteSectionBuilder requires a renderer configuration "
"with a class_name key."
)
module_name = (
renderer.get("module_name") or "great_expectations.render.renderer"
)
self.renderer_class = instantiate_class_from_config(
config=renderer,
runtime_environment={"data_context": data_context},
config_defaults={"module_name": module_name},
)
if not self.renderer_class:
raise exceptions.ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=renderer["class_name"],
)
module_name = "great_expectations.render.view"
if view is None:
view = {
"module_name": module_name,
"class_name": "DefaultJinjaPageView",
}
module_name = view.get("module_name") or module_name
self.view_class = instantiate_class_from_config(
config=view,
runtime_environment={
"custom_styles_directory": custom_styles_directory,
"custom_views_directory": custom_views_directory,
},
config_defaults={"module_name": module_name},
)
if not self.view_class:
raise exceptions.ClassInstantiationError(
module_name=view["module_name"],
package_name=None,
class_name=view["class_name"],
)
def build(self, resource_identifiers=None):
source_store_keys = self.source_store.list_keys()
if self.name == "validations" and self.validation_results_limit:
source_store_keys = sorted(
source_store_keys, key=lambda x: x.run_id.run_time, reverse=True
)[: self.validation_results_limit]
for resource_key in source_store_keys:
# if no resource_identifiers are passed, the section
# builder will build
# a page for every keys in its source store.
# if the caller did pass resource_identifiers, the section builder
# will build pages only for the specified resources
if resource_identifiers and resource_key not in resource_identifiers:
continue
if self.run_name_filter:
if not resource_key_passes_run_name_filter(
resource_key, self.run_name_filter
):
continue
try:
resource = self.source_store.get(resource_key)
except exceptions.InvalidKeyError:
logger.warning(
f"Object with Key: {str(resource_key)} could not be retrieved. Skipping..."
)
continue
if isinstance(resource_key, ExpectationSuiteIdentifier):
expectation_suite_name = resource_key.expectation_suite_name
logger.debug(
" Rendering expectation suite {}".format(
expectation_suite_name
)
)
elif isinstance(resource_key, ValidationResultIdentifier):
run_id = resource_key.run_id
run_name = run_id.run_name
run_time = run_id.run_time
expectation_suite_name = (
resource_key.expectation_suite_identifier.expectation_suite_name
)
if self.name == "profiling":
logger.debug(
" Rendering profiling for batch {}".format(
resource_key.batch_identifier
)
)
else:
logger.debug(
" Rendering validation: run name: {}, run time: {}, suite {} for batch {}".format(
run_name,
run_time,
expectation_suite_name,
resource_key.batch_identifier,
)
)
try:
rendered_content = self.renderer_class.render(resource)
viewable_content = self.view_class.render(
rendered_content,
data_context_id=self.data_context_id,
show_how_to_buttons=self.show_how_to_buttons,
)
self.target_store.set(
SiteSectionIdentifier(
site_section_name=self.name,
resource_identifier=resource_key,
),
viewable_content,
)
except Exception as e:
exception_message = f"""\
An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \
not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \
diagnose and repair the underlying issue. Detailed information follows:
"""
exception_traceback = traceback.format_exc()
exception_message += (
f'{type(e).__name__}: "{str(e)}". '
f'Traceback: "{exception_traceback}".'
)
logger.error(exception_message)
class DefaultSiteIndexBuilder:
def __init__(
self,
name,
site_name,
data_context,
target_store,
site_section_builders_config,
custom_styles_directory=None,
custom_views_directory=None,
show_how_to_buttons=True,
validation_results_limit=None,
renderer=None,
view=None,
data_context_id=None,
source_stores=None,
**kwargs,
):
# NOTE: This method is almost identical to DefaultSiteSectionBuilder
self.name = name
self.site_name = site_name
self.data_context = data_context
self.target_store = target_store
self.validation_results_limit = validation_results_limit
self.data_context_id = data_context_id
self.show_how_to_buttons = show_how_to_buttons
self.source_stores = source_stores or {}
self.site_section_builders_config = site_section_builders_config or {}
if renderer is None:
renderer = {
"module_name": "great_expectations.render.renderer",
"class_name": "SiteIndexPageRenderer",
}
module_name = (
renderer.get("module_name") or "great_expectations.render.renderer"
)
self.renderer_class = instantiate_class_from_config(
config=renderer,
runtime_environment={"data_context": data_context},
config_defaults={"module_name": module_name},
)
if not self.renderer_class:
raise exceptions.ClassInstantiationError(
module_name=module_name,
package_name=None,
class_name=renderer["class_name"],
)
module_name = "great_expectations.render.view"
if view is None:
view = {
"module_name": module_name,
"class_name": "DefaultJinjaIndexPageView",
}
module_name = view.get("module_name") or module_name
self.view_class = instantiate_class_from_config(
config=view,
runtime_environment={
"custom_styles_directory": custom_styles_directory,
"custom_views_directory": custom_views_directory,
},
config_defaults={"module_name": module_name},
)
if not self.view_class:
raise exceptions.ClassInstantiationError(
module_name=view["module_name"],
package_name=None,
class_name=view["class_name"],
)
def add_resource_info_to_index_links_dict(
self,
index_links_dict,
expectation_suite_name,
section_name,
batch_identifier=None,
run_id=None,
validation_success=None,
run_time=None,
run_name=None,
asset_name=None,
batch_kwargs=None,
batch_spec=None,
):
import os
if section_name + "_links" not in index_links_dict:
index_links_dict[section_name + "_links"] = []
if run_id:
filepath = (
os.path.join(
"validations",
*expectation_suite_name.split("."),
*run_id.to_tuple(),
batch_identifier,
)
+ ".html"
)
else:
filepath = (
os.path.join("expectations", *expectation_suite_name.split("."))
+ ".html"
)
expectation_suite_filepath = os.path.join(
"expectations", *expectation_suite_name.split(".")
)
expectation_suite_filepath += ".html"
index_links_dict[section_name + "_links"].append(
{
"expectation_suite_name": expectation_suite_name,
"filepath": filepath,
"run_id": run_id,
"batch_identifier": batch_identifier,
"validation_success": validation_success,
"run_time": run_time,
"run_name": run_name,
"asset_name": asset_name,
"batch_kwargs": batch_kwargs,
"batch_spec": batch_spec,
"expectation_suite_filepath": expectation_suite_filepath
if run_id
else None,
}
)
return index_links_dict
def get_calls_to_action(self):
usage_statistics = None
# db_driver = None
# datasource_classes_by_name = self.data_context.list_datasources()
#
# if datasource_classes_by_name:
# last_datasource_class_by_name = datasource_classes_by_name[-1]
# last_datasource_class_name = last_datasource_class_by_name["
# class_name"]
# last_datasource_name = last_datasource_class_by_name["name"]
# last_datasource = self.data_context.get_datasource
# (last_datasource_name)
#
# if last_datasource_class_name == "SqlAlchemyDatasource":
# try:
# # NOTE: JPC - 20200327 - I do not believe datasource
# will *ever* have a drivername property
# (it's in credentials). Suspect this isn't working.
# db_driver = last_datasource.drivername
# except AttributeError:
# pass
#
# datasource_type = DATASOURCE_TYPE_BY_DATASOURCE_CLASS[
# last_datasource_class_name].value
# usage_statistics = "?utm_source={}&utm_medium={}
# &utm_campaign={}".format(
# "ge-init-datadocs-v2",
# datasource_type,
# db_driver,
# )
return {
"header": "To continue exploring Great Expectations check out one of these tutorials...",
"buttons": self._get_call_to_action_buttons(usage_statistics),
}
def _get_call_to_action_buttons(self, usage_statistics):
"""
Build project and user specific calls to action buttons.
This can become progressively smarter about project and user specific
calls to action.
"""
create_expectations = CallToActionButton(
"How to Create Expectations",
# TODO update this link to a proper tutorial
"https://docs.greatexpectations.io/en/latest/guides/how_to_guides/creating_and_editing_expectations.html",
)
see_glossary = CallToActionButton(
"See More Kinds of Expectations",
"https://docs.greatexpectations.io/en/latest/reference/glossary_of_expectations.html",
)
validation_playground = CallToActionButton(
"How to Validate Data",
# TODO update this link to a proper tutorial
"https://docs.greatexpectations.io/en/latest/guides/how_to_guides/validation.html",
)
customize_data_docs = CallToActionButton(
"How to Customize Data Docs",
"https://docs.greatexpectations.io/en/latest/reference/core_concepts.html#data-docs",
)
team_site = CallToActionButton(
"How to Set Up a Team Site",
"https://docs.greatexpectations.io/en/latest/guides/how_to_guides/configuring_data_docs.html",
)
# TODO gallery does not yet exist
# gallery = CallToActionButton(
# "Great Expectations Gallery",
# "https://greatexpectations.io/gallery"
# )
results = []
results.append(create_expectations)
# Show these no matter what
results.append(validation_playground)
results.append(team_site)
if usage_statistics:
for button in results:
button.link = button.link + usage_statistics
return results
# TODO: deprecate dual batch api support
def build(self, skip_and_clean_missing=True, build_index: bool = True):
"""
:param skip_and_clean_missing: if True, target html store keys without corresponding source store keys will
be skipped and removed from the target store
:param build_index: a flag if False, skips building the index page
:return: tuple(index_page_url, index_links_dict)
"""
# Loop over sections in the HtmlStore
logger.debug("DefaultSiteIndexBuilder.build")
if not build_index:
logger.debug("Skipping index rendering")
return None, None
index_links_dict = OrderedDict()
index_links_dict["site_name"] = self.site_name
if self.show_how_to_buttons:
index_links_dict["cta_object"] = self.get_calls_to_action()
if (
# TODO why is this duplicated?
self.site_section_builders_config.get("expectations", "None")
and self.site_section_builders_config.get("expectations", "None")
not in FALSEY_YAML_STRINGS
):
expectation_suite_source_keys = self.data_context.stores[
self.site_section_builders_config["expectations"].get(
"source_store_name"
)
].list_keys()
expectation_suite_site_keys = [
ExpectationSuiteIdentifier.from_tuple(expectation_suite_tuple)
for expectation_suite_tuple in self.target_store.store_backends[
ExpectationSuiteIdentifier
].list_keys()
]
if skip_and_clean_missing:
cleaned_keys = []
for expectation_suite_site_key in expectation_suite_site_keys:
if expectation_suite_site_key not in expectation_suite_source_keys:
self.target_store.store_backends[
ExpectationSuiteIdentifier
].remove_key(expectation_suite_site_key)
else:
cleaned_keys.append(expectation_suite_site_key)
expectation_suite_site_keys = cleaned_keys
for expectation_suite_key in expectation_suite_site_keys:
self.add_resource_info_to_index_links_dict(
index_links_dict=index_links_dict,
expectation_suite_name=expectation_suite_key.expectation_suite_name,
section_name="expectations",
)
validation_and_profiling_result_site_keys = []
if (
# TODO why is this duplicated?
self.site_section_builders_config.get("validations", "None")
and self.site_section_builders_config.get("validations", "None")
not in FALSEY_YAML_STRINGS
or self.site_section_builders_config.get("profiling", "None")
and self.site_section_builders_config.get("profiling", "None")
not in FALSEY_YAML_STRINGS
):
source_store = (
"validations"
# TODO why is this duplicated?
if self.site_section_builders_config.get("validations", "None")
and self.site_section_builders_config.get("validations", "None")
not in FALSEY_YAML_STRINGS
else "profiling"
)
validation_and_profiling_result_source_keys = self.data_context.stores[
self.site_section_builders_config[source_store].get("source_store_name")
].list_keys()
validation_and_profiling_result_site_keys = [
ValidationResultIdentifier.from_tuple(validation_result_tuple)
for validation_result_tuple in self.target_store.store_backends[
ValidationResultIdentifier
].list_keys()
]
if skip_and_clean_missing:
cleaned_keys = []
for (
validation_result_site_key
) in validation_and_profiling_result_site_keys:
if (
validation_result_site_key
not in validation_and_profiling_result_source_keys
):
self.target_store.store_backends[
ValidationResultIdentifier
].remove_key(validation_result_site_key)
else:
cleaned_keys.append(validation_result_site_key)
validation_and_profiling_result_site_keys = cleaned_keys
if (
# TODO why is this duplicated?
self.site_section_builders_config.get("profiling", "None")
and self.site_section_builders_config.get("profiling", "None")
not in FALSEY_YAML_STRINGS
):
profiling_run_name_filter = self.site_section_builders_config["profiling"][
"run_name_filter"
]
profiling_result_site_keys = [
validation_result_key
for validation_result_key in validation_and_profiling_result_site_keys
if resource_key_passes_run_name_filter(
validation_result_key, profiling_run_name_filter
)
]
for profiling_result_key in profiling_result_site_keys:
try:
validation = self.data_context.get_validation_result(
batch_identifier=profiling_result_key.batch_identifier,
expectation_suite_name=profiling_result_key.expectation_suite_identifier.expectation_suite_name,
run_id=profiling_result_key.run_id,
validations_store_name=self.source_stores.get("profiling"),
)
batch_kwargs = validation.meta.get("batch_kwargs", {})
batch_spec = validation.meta.get("batch_spec", {})
self.add_resource_info_to_index_links_dict(
index_links_dict=index_links_dict,
expectation_suite_name=profiling_result_key.expectation_suite_identifier.expectation_suite_name,
section_name="profiling",
batch_identifier=profiling_result_key.batch_identifier,
run_id=profiling_result_key.run_id,
run_time=profiling_result_key.run_id.run_time,
run_name=profiling_result_key.run_id.run_name,
asset_name=batch_kwargs.get("data_asset_name")
or batch_spec.get("data_asset_name"),
batch_kwargs=batch_kwargs,
batch_spec=batch_spec,
)
except Exception:
error_msg = "Profiling result not found: {:s} - skipping".format(
str(profiling_result_key.to_tuple())
)
logger.warning(error_msg)
if (
# TODO why is this duplicated?
self.site_section_builders_config.get("validations", "None")
and self.site_section_builders_config.get("validations", "None")
not in FALSEY_YAML_STRINGS
):
validations_run_name_filter = self.site_section_builders_config[
"validations"
]["run_name_filter"]
validation_result_site_keys = [
validation_result_key
for validation_result_key in validation_and_profiling_result_site_keys
if resource_key_passes_run_name_filter(
validation_result_key, validations_run_name_filter
)
]
validation_result_site_keys = sorted(
validation_result_site_keys,
key=lambda x: x.run_id.run_time,
reverse=True,
)
if self.validation_results_limit:
validation_result_site_keys = validation_result_site_keys[
: self.validation_results_limit
]
for validation_result_key in validation_result_site_keys:
try:
validation = self.data_context.get_validation_result(
batch_identifier=validation_result_key.batch_identifier,
expectation_suite_name=validation_result_key.expectation_suite_identifier.expectation_suite_name,
run_id=validation_result_key.run_id,
validations_store_name=self.source_stores.get("validations"),
)
validation_success = validation.success
batch_kwargs = validation.meta.get("batch_kwargs", {})
batch_spec = validation.meta.get("batch_spec", {})
self.add_resource_info_to_index_links_dict(
index_links_dict=index_links_dict,
expectation_suite_name=validation_result_key.expectation_suite_identifier.expectation_suite_name,
section_name="validations",
batch_identifier=validation_result_key.batch_identifier,
run_id=validation_result_key.run_id,
validation_success=validation_success,
run_time=validation_result_key.run_id.run_time,
run_name=validation_result_key.run_id.run_name,
asset_name=batch_kwargs.get("data_asset_name")
or batch_spec.get("data_asset_name"),
batch_kwargs=batch_kwargs,
batch_spec=batch_spec,
)
except Exception:
error_msg = "Validation result not found: {:s} - skipping".format(
str(validation_result_key.to_tuple())
)
logger.warning(error_msg)
try:
rendered_content = self.renderer_class.render(index_links_dict)
viewable_content = self.view_class.render(
rendered_content,
data_context_id=self.data_context_id,
show_how_to_buttons=self.show_how_to_buttons,
)
except Exception as e:
exception_message = f"""\
An unexpected Exception occurred during data docs rendering. Because of this error, certain parts of data docs will \
not be rendered properly and/or may not appear altogether. Please use the trace, included in this message, to \
diagnose and repair the underlying issue. Detailed information follows:
"""
exception_traceback = traceback.format_exc()
exception_message += (
f'{type(e).__name__}: "{str(e)}". Traceback: "{exception_traceback}".'
)
logger.error(exception_message)
return (self.target_store.write_index_page(viewable_content), index_links_dict)
class CallToActionButton:
def __init__(self, title, link):
self.title = title
self.link = link
|
# Obstacle Avoidance Program for the Micro:Maqueen robot based the micro:bit
# board
#
# This program starts driving the Maqueen robot 2s after pressing the A
# button straight until encountering an obstacle, then backs up to the right or
# left with warning beeping and then resumes going straight. It stops after
# the set run time (RUN_TIME_MS) and can be run again by pressing the A button.
#
# This program was written in MIND+ (http://mindplus.cc/en.html)
# To upload it to the Maqueen, install MIND+ and copy code to the manual
# editing tab, connect to your robot and click Upload. Have Fun!
#
# Copyright (c) 2019 Jim DiNunzio MIT License
from microbit import *
import music
import urm10
import random
# Adjust this to change the demonstration run time
RUN_TIME_MS = 10 * 1000
CW = 0
CCW = 1
LEFT_MOTOR = 0
RIGHT_MOTOR = 1
DIR_LEFT = 0
DIR_RIGHT = 1
DIR_STRAIGHT = 2
SLOW_SPEED = 30
MED_SPEED = 75
TOO_CLOSE = 8
class maqueen_motor:
def __init__(self):
self.I2C = i2c
self.I2C.init(freq=100000, sda=pin20, scl=pin19)
def run(self, motor, dir, speed):
buf = bytearray(3)
if motor == 0:
buf[0] = 0x00
else:
buf[0] = 0x02
buf[1] = dir
buf[2] = speed
self.I2C.write(0x10, buf)
def stop(self, motor):
self.run(motor, 0, 0)
class maqueen_robot:
STOPPED = 0
FORWARD = 1
REVERSE = 2
def __init__(self):
self.motor = maqueen_motor()
self.status = maqueen_robot.STOPPED
def forward(self, speed):
music.stop()
self.motor.run(LEFT_MOTOR, CW, speed)
self.motor.run(RIGHT_MOTOR, CW, speed)
self.status = maqueen_robot.FORWARD
def reverse(self, speed, turn):
if turn == DIR_LEFT:
rSpeed = speed // 2
lSpeed = speed
elif turn == DIR_RIGHT:
rSpeed = speed
lSpeed = speed // 2
else: # DIR_STRAIGHT
rSpeed = speed
lSpeed = speed
self.motor.run(LEFT_MOTOR, CCW, lSpeed)
self.motor.run(RIGHT_MOTOR, CCW, rSpeed)
music.play(['B5:4', 'r:4'], pin=pin0, wait=False, loop=True)
self.status = maqueen_robot.REVERSE
def stop(self):
self.motor.stop(0)
self.motor.stop(1)
music.stop()
self.status = maqueen_robot.STOPPED
def onCheckTimeUp():
endTime = running_time() + RUN_TIME_MS
while True:
value = running_time() - endTime
yield value >= 0
maqueen = maqueen_robot()
func_CheckTimeUp = onCheckTimeUp()
random.seed(538)
while True:
func_CheckTimeUp = onCheckTimeUp()
while True:
if button_a.is_pressed() and not button_b.is_pressed():
break
sleep(2000)
while True:
if next(func_CheckTimeUp):
print("Times up!")
maqueen.stop()
break
echo_distance = urm10.read(2, 1)
# print("distance = ", echo_distance)
if echo_distance <= TOO_CLOSE and maqueen.status != maqueen.REVERSE:
print("Obstacle Detected, reversing and turning")
dir = random.randint(DIR_LEFT, DIR_RIGHT)
maqueen.reverse(SLOW_SPEED, dir)
sleep(3000)
elif maqueen.status != maqueen.FORWARD:
print("Going Forward")
maqueen.forward(MED_SPEED)
|
#!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
)
import shutil
class WalletHDTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [['-usehd=0'], ['-usehd=1', '-keypool=0']]
def run_test(self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
self.assert_start_raises_init_error(
1, ['-usehd=0'], 'already existing HD wallet')
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV = self.nodes[1].validateaddress(change_addr)
# first internal child key
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'")
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/" + str(i) + "'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV = self.nodes[1].validateaddress(change_addr)
# second internal child key
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'")
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(tmpdir + "/node1/regtest/blocks")
shutil.rmtree(tmpdir + "/node1/regtest/chainstate")
shutil.copyfile(tmpdir + "/hd.bak", tmpdir +
"/node1/regtest/wallet.dat")
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/" + str(_) + "'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(
self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(
out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:7], "m/0'/1'")
if __name__ == '__main__':
WalletHDTest().main()
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
# mypy: check-untyped-defs
from __future__ import absolute_import
from __future__ import division
import atexit
import functools
import itertools
import logging
import threading
import time
from typing import TYPE_CHECKING
from typing import Any
from typing import Dict
from typing import Iterator
from typing import Optional
from typing import Tuple
import grpc
from apache_beam.metrics import metric
from apache_beam.metrics.execution import MetricResult
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import PortableOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.options.value_provider import ValueProvider
from apache_beam.portability import common_urns
from apache_beam.portability.api import beam_artifact_api_pb2_grpc
from apache_beam.portability.api import beam_job_api_pb2
from apache_beam.runners import runner
from apache_beam.runners.job import utils as job_utils
from apache_beam.runners.portability import artifact_service
from apache_beam.runners.portability import job_server
from apache_beam.runners.portability import portable_metrics
from apache_beam.runners.portability.fn_api_runner.fn_runner import translations
from apache_beam.runners.worker import sdk_worker_main
from apache_beam.runners.worker import worker_pool_main
from apache_beam.transforms import environments
if TYPE_CHECKING:
from google.protobuf import struct_pb2 # pylint: disable=ungrouped-imports
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.pipeline import Pipeline
from apache_beam.portability.api import beam_runner_api_pb2
__all__ = ['PortableRunner']
MESSAGE_LOG_LEVELS = {
beam_job_api_pb2.JobMessage.MESSAGE_IMPORTANCE_UNSPECIFIED: logging.INFO,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_DEBUG: logging.DEBUG,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_DETAILED: logging.DEBUG,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_BASIC: logging.INFO,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_WARNING: logging.WARNING,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR: logging.ERROR,
}
TERMINAL_STATES = [
beam_job_api_pb2.JobState.DONE,
beam_job_api_pb2.JobState.DRAINED,
beam_job_api_pb2.JobState.FAILED,
beam_job_api_pb2.JobState.CANCELLED,
]
ENV_TYPE_ALIASES = {'LOOPBACK': 'EXTERNAL'}
_LOGGER = logging.getLogger(__name__)
class JobServiceHandle(object):
"""
Encapsulates the interactions necessary to submit a pipeline to a job service.
The base set of interactions consists of 3 steps:
- prepare
- stage
- run
"""
def __init__(self, job_service, options, retain_unknown_options=False):
self.job_service = job_service
self.options = options
self.timeout = options.view_as(PortableOptions).job_server_timeout
self.artifact_endpoint = options.view_as(PortableOptions).artifact_endpoint
self._retain_unknown_options = retain_unknown_options
def submit(self, proto_pipeline):
# type: (beam_runner_api_pb2.Pipeline) -> Tuple[str, Iterator[beam_job_api_pb2.JobStateEvent], Iterator[beam_job_api_pb2.JobMessagesResponse]]
"""
Submit and run the pipeline defined by `proto_pipeline`.
"""
prepare_response = self.prepare(proto_pipeline)
artifact_endpoint = (
self.artifact_endpoint or
prepare_response.artifact_staging_endpoint.url)
self.stage(
proto_pipeline,
artifact_endpoint,
prepare_response.staging_session_token)
return self.run(prepare_response.preparation_id)
def get_pipeline_options(self):
# type: () -> struct_pb2.Struct
"""
Get `self.options` as a protobuf Struct
"""
# fetch runner options from job service
# retries in case the channel is not ready
def send_options_request(max_retries=5):
num_retries = 0
while True:
try:
# This reports channel is READY but connections may fail
# Seems to be only an issue on Mac with port forwardings
return self.job_service.DescribePipelineOptions(
beam_job_api_pb2.DescribePipelineOptionsRequest(),
timeout=self.timeout)
except grpc.FutureTimeoutError:
# no retry for timeout errors
raise
except grpc.RpcError as e:
num_retries += 1
if num_retries > max_retries:
raise e
time.sleep(1)
options_response = send_options_request()
def add_runner_options(parser):
for option in options_response.options:
try:
# no default values - we don't want runner options
# added unless they were specified by the user
add_arg_args = {'action': 'store', 'help': option.description}
if option.type == beam_job_api_pb2.PipelineOptionType.BOOLEAN:
add_arg_args['action'] = 'store_true' \
if option.default_value != 'true' else 'store_false'
elif option.type == beam_job_api_pb2.PipelineOptionType.INTEGER:
add_arg_args['type'] = int
elif option.type == beam_job_api_pb2.PipelineOptionType.ARRAY:
add_arg_args['action'] = 'append'
parser.add_argument("--%s" % option.name, **add_arg_args)
except Exception as e:
# ignore runner options that are already present
# only in this case is duplicate not treated as error
if 'conflicting option string' not in str(e):
raise
_LOGGER.debug("Runner option '%s' was already added" % option.name)
all_options = self.options.get_all_options(
add_extra_args_fn=add_runner_options,
retain_unknown_options=self._retain_unknown_options)
return self.encode_pipeline_options(all_options)
@staticmethod
def encode_pipeline_options(
all_options: Dict[str, Any]) -> 'struct_pb2.Struct':
def convert_pipeline_option_value(v):
# convert int values: BEAM-5509
if type(v) == int:
return str(v)
elif isinstance(v, ValueProvider):
return convert_pipeline_option_value(
v.get()) if v.is_accessible() else None
return v
# TODO: Define URNs for options.
p_options = {
'beam:option:' + k + ':v1': convert_pipeline_option_value(v)
for k,
v in all_options.items() if v is not None
}
return job_utils.dict_to_struct(p_options)
def prepare(self, proto_pipeline):
# type: (beam_runner_api_pb2.Pipeline) -> beam_job_api_pb2.PrepareJobResponse
"""Prepare the job on the job service"""
return self.job_service.Prepare(
beam_job_api_pb2.PrepareJobRequest(
job_name='job',
pipeline=proto_pipeline,
pipeline_options=self.get_pipeline_options()),
timeout=self.timeout)
def stage(self,
proto_pipeline, # type: beam_runner_api_pb2.Pipeline
artifact_staging_endpoint,
staging_session_token
):
# type: (...) -> None
"""Stage artifacts"""
if artifact_staging_endpoint:
artifact_service.offer_artifacts(
beam_artifact_api_pb2_grpc.ArtifactStagingServiceStub(
channel=grpc.insecure_channel(artifact_staging_endpoint)),
artifact_service.ArtifactRetrievalService(
artifact_service.BeamFilesystemHandler(None).file_reader),
staging_session_token)
def run(self, preparation_id):
# type: (str) -> Tuple[str, Iterator[beam_job_api_pb2.JobStateEvent], Iterator[beam_job_api_pb2.JobMessagesResponse]]
"""Run the job"""
try:
state_stream = self.job_service.GetStateStream(
beam_job_api_pb2.GetJobStateRequest(job_id=preparation_id),
timeout=self.timeout)
# If there's an error, we don't always get it until we try to read.
# Fortunately, there's always an immediate current state published.
state_stream = itertools.chain([next(state_stream)], state_stream)
message_stream = self.job_service.GetMessageStream(
beam_job_api_pb2.JobMessagesRequest(job_id=preparation_id),
timeout=self.timeout)
except Exception:
# TODO(BEAM-6442): Unify preparation_id and job_id for all runners.
state_stream = message_stream = None
# Run the job and wait for a result, we don't set a timeout here because
# it may take a long time for a job to complete and streaming
# jobs currently never return a response.
run_response = self.job_service.Run(
beam_job_api_pb2.RunJobRequest(preparation_id=preparation_id))
if state_stream is None:
state_stream = self.job_service.GetStateStream(
beam_job_api_pb2.GetJobStateRequest(job_id=run_response.job_id))
message_stream = self.job_service.GetMessageStream(
beam_job_api_pb2.JobMessagesRequest(job_id=run_response.job_id))
return run_response.job_id, message_stream, state_stream
class PortableRunner(runner.PipelineRunner):
"""
Experimental: No backward compatibility guaranteed.
A BeamRunner that executes Python pipelines via the Beam Job API.
This runner is a stub and does not run the actual job.
This runner schedules the job on a job service. The responsibility of
running and managing the job lies with the job service used.
"""
def __init__(self):
self._dockerized_job_server = None # type: Optional[job_server.JobServer]
@staticmethod
def _create_environment(options):
# type: (PipelineOptions) -> environments.Environment
portable_options = options.view_as(PortableOptions)
# Do not set a Runner. Otherwise this can cause problems in Java's
# PipelineOptions, i.e. ClassNotFoundException, if the corresponding Runner
# does not exist in the Java SDK. In portability, the entry point is clearly
# defined via the JobService.
portable_options.view_as(StandardOptions).runner = None
environment_type = portable_options.environment_type
if not environment_type:
environment_urn = common_urns.environments.DOCKER.urn
elif environment_type.startswith('beam:env:'):
environment_urn = environment_type
else:
# e.g. handle LOOPBACK -> EXTERNAL
environment_type = ENV_TYPE_ALIASES.get(
environment_type, environment_type)
try:
environment_urn = getattr(
common_urns.environments, environment_type).urn
except AttributeError:
raise ValueError('Unknown environment type: %s' % environment_type)
env_class = environments.Environment.get_env_cls_from_urn(environment_urn)
return env_class.from_options(portable_options)
def default_job_server(self, options):
raise NotImplementedError(
'You must specify a --job_endpoint when using --runner=PortableRunner. '
'Alternatively, you may specify which portable runner you intend to '
'use, such as --runner=FlinkRunner or --runner=SparkRunner.')
def create_job_service_handle(self, job_service, options):
# type: (...) -> JobServiceHandle
return JobServiceHandle(job_service, options)
def create_job_service(self, options):
# type: (PipelineOptions) -> JobServiceHandle
"""
Start the job service and return a `JobServiceHandle`
"""
job_endpoint = options.view_as(PortableOptions).job_endpoint
if job_endpoint:
if job_endpoint == 'embed':
server = job_server.EmbeddedJobServer() # type: job_server.JobServer
else:
job_server_timeout = options.view_as(PortableOptions).job_server_timeout
server = job_server.ExternalJobServer(job_endpoint, job_server_timeout)
else:
server = self.default_job_server(options)
return self.create_job_service_handle(server.start(), options)
@staticmethod
def get_proto_pipeline(pipeline, options):
# type: (Pipeline, PipelineOptions) -> beam_runner_api_pb2.Pipeline
portable_options = options.view_as(PortableOptions)
proto_pipeline = pipeline.to_runner_api(
default_environment=PortableRunner._create_environment(
portable_options))
# TODO: https://issues.apache.org/jira/browse/BEAM-7199
# Eventually remove the 'pre_optimize' option alltogether and only perform
# the equivalent of the 'default' case below (minus the 'lift_combiners'
# part).
pre_optimize = options.view_as(DebugOptions).lookup_experiment(
'pre_optimize', 'default').lower()
if (not options.view_as(StandardOptions).streaming and
pre_optimize != 'none'):
if pre_optimize == 'default':
phases = [
# TODO: https://issues.apache.org/jira/browse/BEAM-4678
# https://issues.apache.org/jira/browse/BEAM-11478
# Eventually remove the 'lift_combiners' phase from 'default'.
translations.lift_combiners,
translations.sort_stages
]
partial = True
elif pre_optimize == 'all':
phases = [
translations.annotate_downstream_side_inputs,
translations.annotate_stateful_dofns_as_roots,
translations.fix_side_input_pcoll_coders,
translations.pack_combiners,
translations.lift_combiners,
translations.expand_sdf,
translations.fix_flatten_coders,
# translations.sink_flattens,
translations.greedily_fuse,
translations.read_to_impulse,
translations.extract_impulse_stages,
translations.remove_data_plane_ops,
translations.sort_stages
]
partial = False
elif pre_optimize == 'all_except_fusion':
# TODO(BEAM-7248): Delete this branch after PortableRunner supports
# beam:runner:executable_stage:v1.
phases = [
translations.annotate_downstream_side_inputs,
translations.annotate_stateful_dofns_as_roots,
translations.fix_side_input_pcoll_coders,
translations.pack_combiners,
translations.lift_combiners,
translations.expand_sdf,
translations.fix_flatten_coders,
# translations.sink_flattens,
# translations.greedily_fuse,
translations.read_to_impulse,
translations.extract_impulse_stages,
translations.remove_data_plane_ops,
translations.sort_stages
]
partial = True
else:
phases = []
for phase_name in pre_optimize.split(','):
# For now, these are all we allow.
if phase_name in ('pack_combiners', 'lift_combiners'):
phases.append(getattr(translations, phase_name))
else:
raise ValueError(
'Unknown or inapplicable phase for pre_optimize: %s' %
phase_name)
phases.append(translations.sort_stages)
partial = True
# All (known) portable runners (ie Flink and Spark) support these URNs.
known_urns = frozenset([
common_urns.composites.RESHUFFLE.urn,
common_urns.primitives.IMPULSE.urn,
common_urns.primitives.FLATTEN.urn,
common_urns.primitives.GROUP_BY_KEY.urn
])
proto_pipeline = translations.optimize_pipeline(
proto_pipeline,
phases=phases,
known_runner_urns=known_urns,
partial=partial)
return proto_pipeline
def run_pipeline(self, pipeline, options):
# type: (Pipeline, PipelineOptions) -> PipelineResult
portable_options = options.view_as(PortableOptions)
# TODO: https://issues.apache.org/jira/browse/BEAM-5525
# portable runner specific default
if options.view_as(SetupOptions).sdk_location == 'default':
options.view_as(SetupOptions).sdk_location = 'container'
experiments = options.view_as(DebugOptions).experiments or []
# This is needed as we start a worker server if one is requested
# but none is provided.
if portable_options.environment_type == 'LOOPBACK':
use_loopback_process_worker = options.view_as(
DebugOptions).lookup_experiment('use_loopback_process_worker', False)
portable_options.environment_config, server = (
worker_pool_main.BeamFnExternalWorkerPoolServicer.start(
state_cache_size=
sdk_worker_main._get_state_cache_size(experiments),
data_buffer_time_limit_ms=
sdk_worker_main._get_data_buffer_time_limit_ms(experiments),
use_process=use_loopback_process_worker))
cleanup_callbacks = [functools.partial(server.stop, 1)]
else:
cleanup_callbacks = []
proto_pipeline = self.get_proto_pipeline(pipeline, options)
job_service_handle = self.create_job_service(options)
job_id, message_stream, state_stream = \
job_service_handle.submit(proto_pipeline)
result = PipelineResult(
job_service_handle.job_service,
job_id,
message_stream,
state_stream,
cleanup_callbacks)
if cleanup_callbacks:
# Register an exit handler to ensure cleanup on exit.
atexit.register(functools.partial(result._cleanup, on_exit=True))
_LOGGER.info(
'Environment "%s" has started a component necessary for the '
'execution. Be sure to run the pipeline using\n'
' with Pipeline() as p:\n'
' p.apply(..)\n'
'This ensures that the pipeline finishes before this program exits.',
portable_options.environment_type)
return result
class PortableMetrics(metric.MetricResults):
def __init__(self, job_metrics_response):
metrics = job_metrics_response.metrics
self.attempted = portable_metrics.from_monitoring_infos(metrics.attempted)
self.committed = portable_metrics.from_monitoring_infos(metrics.committed)
@staticmethod
def _combine(committed, attempted, filter):
all_keys = set(committed.keys()) | set(attempted.keys())
return [
MetricResult(key, committed.get(key), attempted.get(key))
for key in all_keys if metric.MetricResults.matches(filter, key)
]
def query(self, filter=None):
counters, distributions, gauges = [
self._combine(x, y, filter)
for x, y in zip(self.committed, self.attempted)
]
return {
self.COUNTERS: counters,
self.DISTRIBUTIONS: distributions,
self.GAUGES: gauges
}
class PipelineResult(runner.PipelineResult):
def __init__(
self,
job_service,
job_id,
message_stream,
state_stream,
cleanup_callbacks=()):
super(PipelineResult, self).__init__(beam_job_api_pb2.JobState.UNSPECIFIED)
self._job_service = job_service
self._job_id = job_id
self._messages = []
self._message_stream = message_stream
self._state_stream = state_stream
self._cleanup_callbacks = cleanup_callbacks
self._metrics = None
self._runtime_exception = None
def cancel(self):
# type: () -> None
try:
self._job_service.Cancel(
beam_job_api_pb2.CancelJobRequest(job_id=self._job_id))
finally:
self._cleanup()
@property
def state(self):
runner_api_state = self._job_service.GetState(
beam_job_api_pb2.GetJobStateRequest(job_id=self._job_id)).state
self._state = self._runner_api_state_to_pipeline_state(runner_api_state)
return self._state
@staticmethod
def _runner_api_state_to_pipeline_state(runner_api_state):
return getattr(
runner.PipelineState,
beam_job_api_pb2.JobState.Enum.Name(runner_api_state))
@staticmethod
def _pipeline_state_to_runner_api_state(pipeline_state):
return beam_job_api_pb2.JobState.Enum.Value(pipeline_state)
def metrics(self):
if not self._metrics:
job_metrics_response = self._job_service.GetJobMetrics(
beam_job_api_pb2.GetJobMetricsRequest(job_id=self._job_id))
self._metrics = PortableMetrics(job_metrics_response)
return self._metrics
def _last_error_message(self):
# type: () -> str
# Filter only messages with the "message_response" and error messages.
messages = [
m.message_response for m in self._messages
if m.HasField('message_response')
]
error_messages = [
m for m in messages
if m.importance == beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR
]
if error_messages:
return error_messages[-1].message_text
else:
return 'unknown error'
def wait_until_finish(self, duration=None):
"""
:param duration: The maximum time in milliseconds to wait for the result of
the execution. If None or zero, will wait until the pipeline finishes.
:return: The result of the pipeline, i.e. PipelineResult.
"""
def read_messages():
# type: () -> None
previous_state = -1
for message in self._message_stream:
if message.HasField('message_response'):
logging.log(
MESSAGE_LOG_LEVELS[message.message_response.importance],
"%s",
message.message_response.message_text)
else:
current_state = message.state_response.state
if current_state != previous_state:
_LOGGER.info(
"Job state changed to %s",
self._runner_api_state_to_pipeline_state(current_state))
previous_state = current_state
self._messages.append(message)
message_thread = threading.Thread(
target=read_messages, name='wait_until_finish_read')
message_thread.daemon = True
message_thread.start()
if duration:
state_thread = threading.Thread(
target=functools.partial(self._observe_state, message_thread),
name='wait_until_finish_state_observer')
state_thread.daemon = True
state_thread.start()
start_time = time.time()
duration_secs = duration / 1000
while (time.time() - start_time < duration_secs and
state_thread.is_alive()):
time.sleep(1)
else:
self._observe_state(message_thread)
if self._runtime_exception:
raise self._runtime_exception
return self._state
def _observe_state(self, message_thread):
try:
for state_response in self._state_stream:
self._state = self._runner_api_state_to_pipeline_state(
state_response.state)
if state_response.state in TERMINAL_STATES:
# Wait for any last messages.
message_thread.join(10)
break
if self._state != runner.PipelineState.DONE:
self._runtime_exception = RuntimeError(
'Pipeline %s failed in state %s: %s' %
(self._job_id, self._state, self._last_error_message()))
except Exception as e:
self._runtime_exception = e
finally:
self._cleanup()
def _cleanup(self, on_exit=False):
# type: (bool) -> None
if on_exit and self._cleanup_callbacks:
_LOGGER.info(
'Running cleanup on exit. If your pipeline should continue running, '
'be sure to use the following syntax:\n'
' with Pipeline() as p:\n'
' p.apply(..)\n'
'This ensures that the pipeline finishes before this program exits.')
has_exception = None
for callback in self._cleanup_callbacks:
try:
callback()
except Exception:
has_exception = True
self._cleanup_callbacks = ()
if has_exception:
raise
|
# SPDX-Package: FAVD
# SPDX-PackageHomePage: https://github.com/secureIT-project/FAVD
# SPDX-License-Identifier: MIT
# SPDX-FileCopyrightText: 2021-2022 David Binkley, Leon Moonen, Sibren Isaacman
import numpy as np
class CharacterTable(object):
"""Given a set of characters:
+ Encode them to a one hot integer representation
+ Decode the one hot integer representation to their character output
+ Decode a vector of probabilities to their character output
"""
def __init__(self, chars):
"""Initialize character table.
# Arguments
chars: Characters that can appear in the input.
"""
self.chars = sorted(set(chars))
self.char_indices = dict((c, i) for i, c in enumerate(self.chars))
self.indices_char = dict((i, c) for i, c in enumerate(self.chars))
def encode(self, C, num_rows):
"""One hot encode given string C.
# Arguments
num_rows: Number of rows in the returned one hot encoding. This is
used to keep the # of rows for each data the same.
"""
x = np.zeros((num_rows, len(self.chars)))
for i, c in enumerate(C):
x[i, self.char_indices[c]] = 1
return x
def decode(self, x, calc_argmax=True):
if calc_argmax:
x = x.argmax(axis=-1)
return ''.join(self.indices_char[x] for x in x)
|
"""Operations for dannce."""
import tensorflow as tf
import numpy as np
from scipy.interpolate import RegularGridInterpolator
import tensorflow.keras.backend as K
import tensorflow.keras.initializers as initializers
import tensorflow.keras.constraints as constraints
import tensorflow.keras.regularizers as regularizers
from tensorflow.keras.layers import Layer, InputSpec
from tensorflow.keras.utils import get_custom_objects
import cv2
import time
from typing import Text, List, Dict, Tuple, Union
import torch
def camera_matrix(K: np.ndarray, R: np.ndarray, t: np.ndarray) -> np.ndarray:
"""Derive the camera matrix.
Derive the camera matrix from the camera intrinsic matrix (K),
and the extrinsic rotation matric (R), and extrinsic
translation vector (t).
Note that this uses the matlab convention, such that
M = [R;t] * K
"""
return np.concatenate((R, t), axis=0) @ K
def project_to2d(
pts: np.ndarray, K: np.ndarray, R: np.ndarray, t: np.ndarray
) -> np.ndarray:
"""Project 3d points to 2d.
Projects a set of 3-D points, pts, into 2-D using the camera intrinsic
matrix (K), and the extrinsic rotation matric (R), and extrinsic
translation vector (t). Note that this uses the matlab
convention, such that
M = [R;t] * K, and pts2d = pts3d * M
"""
M = np.concatenate((R, t), axis=0) @ K
projPts = np.concatenate((pts, np.ones((pts.shape[0], 1))), axis=1) @ M
projPts[:, :2] = projPts[:, :2] / projPts[:, 2:]
return projPts
def project_to2d_torch(pts, M: np.ndarray, device: Text) -> torch.Tensor:
"""Project 3d points to 2d.
Projects a set of 3-D points, pts, into 2-D using the camera intrinsic
matrix (K), and the extrinsic rotation matric (R), and extrinsic
translation vector (t). Note that this uses the matlab
convention, such that
M = [R;t] * K, and pts2d = pts3d * M
"""
import torch
# pts = torch.Tensor(pts.copy()).to(device)
M = M.to(device=device)
pts1 = torch.ones(pts.shape[0], 1, dtype=torch.float32, device=device)
projPts = torch.matmul(torch.cat((pts, pts1), 1), M)
projPts[:, :2] = projPts[:, :2] / projPts[:, 2:]
return projPts
def project_to2d_tf(projPts, M):
"""Project 3d points to 2d.
Projects a set of 3-D points, pts, into 2-D using the camera intrinsic
matrix (K), and the extrinsic rotation matric (R), and extrinsic
translation vector (t). Note that this uses the matlab
convention, such that
M = [R;t] * K, and pts2d = pts3d * M
"""
projPts = tf.matmul(projPts, M)
projPts = projPts[:, :2] / projPts[:, 2:]
return projPts
def sample_grid(im: np.ndarray, projPts: np.ndarray, method: Text = "linear"):
"""Transfer 3d features to 2d by projecting down to 2d grid.
Use 2d interpolation to transfer features to 3d points that have
projected down onto a 2d grid
Note that function expects proj_grid to be flattened, so results should be
reshaped after being returned
"""
if method == "linear":
f_r = RegularGridInterpolator(
(np.arange(im.shape[0]), np.arange(im.shape[1])),
im[:, :, 0],
method="linear",
bounds_error=False,
fill_value=0,
)
f_g = RegularGridInterpolator(
(np.arange(im.shape[0]), np.arange(im.shape[1])),
im[:, :, 1],
method="linear",
bounds_error=False,
fill_value=0,
)
f_b = RegularGridInterpolator(
(np.arange(im.shape[0]), np.arange(im.shape[1])),
im[:, :, 2],
method="linear",
bounds_error=False,
fill_value=0,
)
proj_r = f_r(projPts[:, ::-1])
proj_g = f_g(projPts[:, ::-1])
proj_b = f_b(projPts[:, ::-1])
# Nearest neighbor rounding technique
# Remember that projPts[:,0] is the "x" coordinate, i.e. the
# column dimension, and projPts[:,1] is "y", indexing in the row
# dimension, matrix-wise (i.e. from the top of the image)
elif method == "nearest":
# Now I could index an array with the values
projPts = np.round(projPts[:, ::-1]).astype("int")
# But some of them could be rounded outside of the image
projPts[projPts[:, 0] < 0, 0] = 0
projPts[projPts[:, 0] >= im.shape[0], 0] = im.shape[0] - 1
projPts[projPts[:, 1] < 0, 1] = 0
projPts[projPts[:, 1] >= im.shape[1], 1] = im.shape[1] - 1
projPts = (projPts[:, 0], projPts[:, 1])
proj_r = im[:, :, 0]
proj_r = proj_r[projPts]
proj_g = im[:, :, 1]
proj_g = proj_g[projPts]
proj_b = im[:, :, 2]
proj_b = proj_b[projPts]
# Do nearest, but because the channel dimension can be arbitrarily large,
# we put the final part of this in a loop
elif method == "out2d":
# Now I could index an array with the values
projPts = np.round(projPts[:, ::-1]).astype("int")
# But some of them could be outside of the image
projPts[projPts[:, 0] < 0, 0] = 0
projPts[projPts[:, 0] >= im.shape[0], 0] = im.shape[0] - 1
projPts[projPts[:, 1] < 0, 1] = 0
projPts[projPts[:, 1] >= im.shape[1], 1] = im.shape[1] - 1
imout = np.zeros((projPts.shape[0], im.shape[-1]))
projPts = (projPts[:, 0], projPts[:, 1])
for ii in range(im.shape[-1]):
tmp = im[:, :, ii]
imout[:, ii] = tmp[projPts]
return imout
else:
raise Exception("not a valid interpolation method")
return proj_r, proj_g, proj_b
def sample_grid_torch_nearest(
im: np.ndarray, projPts: np.ndarray, device: Text
) -> torch.Tensor:
"""Unproject features."""
# im_x, im_y are the x and y coordinates of each projected 3D position.
# These are concatenated here for every image in each batch,
import torch
feats = torch.as_tensor(im.copy(), device=device)
grid = projPts
c = int(round(projPts.shape[0] ** (1 / 3.0)))
fh, fw, fdim = list(feats.shape)
# # make sure all projected indices fit onto the feature map
im_x = torch.clamp(grid[:, 0], 0, fw - 1)
im_y = torch.clamp(grid[:, 1], 0, fh - 1)
im_xr = im_x.round().type(torch.long)
im_yr = im_y.round().type(torch.long)
im_xr[im_xr < 0] = 0
im_yr[im_yr < 0] = 0
Ir = feats[im_yr, im_xr]
return Ir.reshape((c, c, c, -1)).permute(3, 0, 1, 2).unsqueeze(0)
def sample_grid_torch_linear(
im: np.ndarray, projPts: np.ndarray, device: Text
) -> torch.Tensor:
"""Unproject features."""
# im_x, im_y are the x and y coordinates of each projected 3D position.
# These are concatenated here for every image in each batch,
import torch
feats = torch.as_tensor(im.copy(), device=device)
grid = projPts
c = int(round(projPts.shape[0] ** (1 / 3.0)))
fh, fw, fdim = list(feats.shape)
# # make sure all projected indices fit onto the feature map
im_x = torch.clamp(grid[:, 0], 0, fw - 1)
im_y = torch.clamp(grid[:, 1], 0, fh - 1)
# round all indices
im_x0 = torch.floor(im_x).type(torch.long)
# new array with rounded projected indices + 1
im_x1 = im_x0 + 1
im_y0 = torch.floor(im_y).type(torch.long)
im_y1 = im_y0 + 1
# Convert from int to float -- but these are still round
# numbers because of rounding step above
im_x0_f, im_x1_f = im_x0.type(torch.float), im_x1.type(torch.float)
im_y0_f, im_y1_f = im_y0.type(torch.float), im_y1.type(torch.float)
# Gather values
# Samples all featuremaps at the projected indices,
# and their +1 counterparts. Stop at Ia for nearest neighbor interpolation.
# need to clip the corner indices because they might be out of bounds...
# This could lead to different behavior compared to TF/numpy, which return 0
# when an index is out of bounds
im_x1_safe = torch.clamp(im_x1, 0, fw - 1)
im_y1_safe = torch.clamp(im_y1, 0, fh - 1)
im_x1[im_x1 < 0] = 0
im_y1[im_y1 < 0] = 0
im_x0[im_x0 < 0] = 0
im_y0[im_y0 < 0] = 0
im_x1_safe[im_x1_safe < 0] = 0
im_y1_safe[im_y1_safe < 0] = 0
Ia = feats[im_y0, im_x0]
Ib = feats[im_y0, im_x1_safe]
Ic = feats[im_y1_safe, im_x0]
Id = feats[im_y1_safe, im_x1_safe]
# To recaptiulate behavior in numpy/TF, zero out values that fall outside bounds
Ib[im_x1 > fw - 1] = 0
Ic[im_y1 > fh - 1] = 0
Id[(im_x1 > fw - 1) | (im_y1 > fh - 1)] = 0
# Calculate bilinear weights
# We've now sampled the feature maps at corners around the projected values
# Here, the corners are weighted by distance from the projected value
wa = (im_x1_f - im_x) * (im_y1_f - im_y)
wb = (im_x1_f - im_x) * (im_y - im_y0_f)
wc = (im_x - im_x0_f) * (im_y1_f - im_y)
wd = (im_x - im_x0_f) * (im_y - im_y0_f)
Ibilin = (
wa.unsqueeze(1) * Ia
+ wb.unsqueeze(1) * Ib
+ wc.unsqueeze(1) * Ic
+ wd.unsqueeze(1) * Id
)
return Ibilin.reshape((c, c, c, -1)).permute(3, 0, 1, 2).unsqueeze(0)
def sample_grid_torch(im: np.ndarray, projPts: np.ndarray, device: Text, method: Text = "linear"):
"""Transfer 3d features to 2d by projecting down to 2d grid, using torch.
Use 2d interpolation to transfer features to 3d points that have
projected down onto a 2d grid
Note that function expects proj_grid to be flattened, so results should be
reshaped after being returned
"""
if method == "nearest" or method == "out2d":
proj_rgb = sample_grid_torch_nearest(im, projPts, device)
elif method == "linear" or method == "bilinear":
proj_rgb = sample_grid_torch_linear(im, projPts, device)
else:
raise Exception("{} not a valid interpolation method".format(method))
return proj_rgb
def sample_grid_tf(im, projPts, device, method="linear"):
"""Transfer 3d features to 2d by projecting down to 2d grid.
Use 2d interpolation to transfer features to 3d points that have
projected down onto a 2d grid
Note that function expects proj_grid to be flattened, so results should be
reshaped after being returned
"""
with tf.device(device):
im = tf.constant(im)
im = tf.expand_dims(im, 0)
if method == "nearest":
projPts = tf.expand_dims(projPts, 0)
proj_rgb = unproj_tf_nearest(im, projPts, 1)
elif method == "linear":
im = tf.cast(im, "float32")
projPts = tf.expand_dims(projPts, 0)
projPts = tf.reverse(projPts, [1])
proj_rgb = tf.cast(unproj_tf_linear(im, projPts, 1), "uint8")
proj_rgb = tf.reshape(proj_rgb, (tf.shape(projPts)[1], 3))
proj_rgb = tf.reverse(proj_rgb, [0])
else:
raise Exception("not a valid interpolation method")
return proj_rgb
def sample_grid_tf(im, projPts, device, method="linear"):
"""Transfer 3d features to 2d by projecting down to 2d grid.
Use 2d interpolation to transfer features to 3d points that have
projected down onto a 2d grid
Note that function expects proj_grid to be flattened, so results should be
reshaped after being returned
"""
with tf.device(device):
im = tf.constant(im)
im = tf.expand_dims(im, 0)
if method == "nearest":
projPts = tf.expand_dims(projPts, 0)
proj_rgb = unproj_tf_nearest(im, projPts, 1)
elif method == "linear":
im = tf.cast(im, "float32")
projPts = tf.expand_dims(projPts, 0)
projPts = tf.reverse(projPts, [1])
proj_rgb = tf.cast(unproj_tf_linear(im, projPts, 1), "uint8")
proj_rgb = tf.reshape(proj_rgb, (tf.shape(projPts)[1], 3))
proj_rgb = tf.reverse(proj_rgb, [0])
else:
raise Exception("not a valid interpolation method")
return proj_rgb
@tf.function
def unproj_tf_nearest(feats, grid, batch_size):
"""Unproject features.
Modified from https://github.com/akar43/lsm
"""
# im_x, im_y are the x and y coordinates of each projected 3D position.
# These are concatenated here for every image in each batch,
nR, fh, fw, fdim = K.int_shape(feats)
nR2, nV, nD = K.int_shape(grid)
# # make sure all projected indices fit onto the feature map
im_x = tf.clip_by_value(grid[:, :, 0], 0, fw - 1)
im_y = tf.clip_by_value(grid[:, :, 1], 0, fh - 1)
# nR should be batch_size*num_cams
# eg. [0,1,2,3,4,5] for 3 cams, batch_size=2
ind_grid = tf.range(0, nR)
ind_grid = tf.expand_dims(ind_grid, 1)
# nV is the number of voxels, so this tiling operation
# produces e.g. [0,0,0,0,0,0; 1,1,1,1,1,1]
im_ind = tf.tile(ind_grid, [1, nV])
@tf.function
def _get_gather_inds(x, y):
return tf.reshape(tf.stack([im_ind, y, x], axis=2), [-1, 3])
im_xr = tf.cast(tf.round(im_x), "int32")
im_yr = tf.cast(tf.round(im_y), "int32")
Ir = tf.gather_nd(feats, _get_gather_inds(im_xr, im_yr))
return Ir
@tf.function
def unproj_tf_linear(feats, grid, batch_size):
"""Unproject features.
Modified from https://github.com/akar43/lsm
"""
# im_x, im_y are the x and y coordinates of each projected 3D position.
# These are concatenated here for every image in each batch,
nR, fh, fw, fdim = K.int_shape(feats)
nR2, nV, nD = K.int_shape(grid)
# make sure all projected indices fit onto the feature map
im_x = tf.clip_by_value(grid[:, :, 0], 0, fw - 1)
im_y = tf.clip_by_value(grid[:, :, 1], 0, fh - 1)
# round all indices
im_x0 = tf.cast(tf.floor(im_x), "int32")
# new array with rounded projected indices + 1
im_x1 = im_x0 + 1
im_y0 = tf.cast(tf.floor(im_y), "int32")
im_y1 = im_y0 + 1
# Convert from int to float -- but these are still round
# numbers because of rounding step above
im_x0_f, im_x1_f = tf.cast(im_x0, "float32"), tf.cast(im_x1, "float32")
im_y0_f, im_y1_f = tf.cast(im_y0, "float32"), tf.cast(im_y1, "float32")
# nR should be batch_size*num_cams
# eg. [0,1,2,3,4,5] for 3 cams, batch_size=2
ind_grid = tf.range(0, nR)
ind_grid = tf.expand_dims(ind_grid, 1)
# nV is the number of voxels, so this tiling operation
# produces e.g. [0,0,0,0,0,0; 1,1,1,1,1,1]
im_ind = tf.tile(ind_grid, [1, nV])
@tf.function
def _get_gather_inds(x, y):
return tf.reshape(tf.stack([im_ind, y, x], axis=2), [-1, 3])
# Gather values
# Samples all featuremaps per batch/camera at the projected indices,
# and their +1 counterparts. Stop at Ia for nearest neighbor interpolation.
# I* should be a tensor of shape:
# (num_cams*batch_size*len(im_x0)*len(im_y0), fdim)
Ia = tf.gather_nd(feats, _get_gather_inds(im_x0, im_y0))
Ib = tf.gather_nd(feats, _get_gather_inds(im_x0, im_y1))
Ic = tf.gather_nd(feats, _get_gather_inds(im_x1, im_y0))
Id = tf.gather_nd(feats, _get_gather_inds(im_x1, im_y1))
# Calculate bilinear weights
# We've now sampled the feature maps at corners around the projected values
# Here, the corners are weights by distance from the projected value
wa = (im_x1_f - im_x) * (im_y1_f - im_y)
wb = (im_x1_f - im_x) * (im_y - im_y0_f)
wc = (im_x - im_x0_f) * (im_y1_f - im_y)
wd = (im_x - im_x0_f) * (im_y - im_y0_f)
# TODO(reshape): Why is this reshape necessary?
wa, wb = tf.reshape(wa, [-1, 1]), tf.reshape(wb, [-1, 1])
wc, wd = tf.reshape(wc, [-1, 1]), tf.reshape(wd, [-1, 1])
Ibilin = tf.add_n([wa * Ia, wb * Ib, wc * Ic, wd * Id])
Ibilin = tf.reshape(
Ibilin,
[
batch_size,
nR // batch_size,
int((nV + 1) ** (1 / 3)),
int((nV + 1) ** (1 / 3)),
int((nV + 1) ** (1 / 3)),
fdim,
],
)
Ibilin = tf.transpose(Ibilin, [0, 1, 3, 2, 4, 5])
return Ibilin
def unproj(feats, grid, batch_size):
"""Unproject features.
Modified from https://github.com/akar43/lsm
"""
# im_x, im_y are the x and y coordinates of each projected 3D position.
# These are concatenated here for every image in each batch,
nR, fh, fw, fdim = K.int_shape(feats)
nR2, nV, nD = K.int_shape(grid)
# make sure all projected indices fit onto the feature map
im_x = tf.clip_by_value(grid[:, :, 0], 0, fw - 1)
im_y = tf.clip_by_value(grid[:, :, 1], 0, fh - 1)
# round all indices down?
im_x0 = tf.cast(tf.floor(im_x), "int32")
# new array with rounded projected indices + 1
im_x1 = im_x0 + 1
im_y0 = tf.cast(tf.floor(im_y), "int32")
im_y1 = im_y0 + 1
# Convert from int to float -- but these are still round
# numbers because of rounding step above
im_x0_f, im_x1_f = tf.to_float(im_x0), tf.to_float(im_x1)
im_y0_f, im_y1_f = tf.to_float(im_y0), tf.to_float(im_y1)
# nR should be batch_size*num_cams
# eg. [0,1,2,3,4,5] for 3 cams, batch_size=2
ind_grid = tf.range(0, nR)
ind_grid = tf.expand_dims(ind_grid, 1)
# nV is the number of voxels, so this tiling operation
# produces e.g. [0,0,0,0,0,0; 1,1,1,1,1,1]
im_ind = tf.tile(ind_grid, [1, nV])
def _get_gather_inds(x, y):
return tf.reshape(tf.stack([im_ind, y, x], axis=2), [-1, 3])
# Gather values
# Samples all featuremaps per batch/camera at the projected indices,
# and their +1 counterparts. Stop at Ia for nearest neighbor interpolation.
# I* should be a tensor of shape:
# (num_cams*batch_size*len(im_x0)*len(im_y0), fdim)
Ia = tf.gather_nd(feats, _get_gather_inds(im_x0, im_y0))
Ib = tf.gather_nd(feats, _get_gather_inds(im_x0, im_y1))
Ic = tf.gather_nd(feats, _get_gather_inds(im_x1, im_y0))
Id = tf.gather_nd(feats, _get_gather_inds(im_x1, im_y1))
# Calculate bilinear weights
# We've now sampled the feature maps at corners around the projected values
# Here, the corners are weights by distance from the projected value
wa = (im_x1_f - im_x) * (im_y1_f - im_y)
wb = (im_x1_f - im_x) * (im_y - im_y0_f)
wc = (im_x - im_x0_f) * (im_y1_f - im_y)
wd = (im_x - im_x0_f) * (im_y - im_y0_f)
# TODO(reshape): Why is this reshape necessary?
wa, wb = tf.reshape(wa, [-1, 1]), tf.reshape(wb, [-1, 1])
wc, wd = tf.reshape(wc, [-1, 1]), tf.reshape(wd, [-1, 1])
Ibilin = tf.add_n([wa * Ia, wb * Ib, wc * Ic, wd * Id])
Ibilin = tf.reshape(
Ibilin,
[
batch_size,
nR // batch_size,
int((nV + 1) ** (1 / 3)),
int((nV + 1) ** (1 / 3)),
int((nV + 1) ** (1 / 3)),
fdim,
],
)
Ibilin = tf.transpose(Ibilin, [0, 1, 3, 2, 4, 5])
return Ibilin
def unDistortPoints(
pts,
intrinsicMatrix,
radialDistortion,
tangentDistortion,
rotationMatrix,
translationVector,
):
"""Remove lens distortion from the input points.
Input is size (M,2), where M is the number of points
"""
dcoef = radialDistortion.ravel()[:2].tolist() + tangentDistortion.ravel().tolist()
if len(radialDistortion.ravel()) == 3:
dcoef = dcoef + [radialDistortion.ravel()[-1]]
else:
dcoef = dcoef + [0]
ts = time.time()
pts_u = cv2.undistortPoints(
np.reshape(pts, (-1, 1, 2)).astype("float32"),
intrinsicMatrix.T,
np.array(dcoef),
P=intrinsicMatrix.T,
)
pts_u = np.reshape(pts_u, (-1, 2))
return pts_u
def triangulate(pts1, pts2, cam1, cam2):
"""Return triangulated 3- coordinates.
Following Matlab convetion, given lists of matching points, and their
respective camera matrices, returns the triangulated 3- coordinates.
pts1 and pts2 must be Mx2, where M is the number of points with
(x,y) positions. M 3-D points will be returned after triangulation
"""
pts1 = pts1.T
pts2 = pts2.T
cam1 = cam1.T
cam2 = cam2.T
out_3d = np.zeros((3, pts1.shape[1]))
for i in range(out_3d.shape[1]):
if ~np.isnan(pts1[0, i]):
pt1 = pts1[:, i : i + 1]
pt2 = pts2[:, i : i + 1]
A = np.zeros((4, 4))
A[0:2, :] = pt1 @ cam1[2:3, :] - cam1[0:2, :]
A[2:, :] = pt2 @ cam2[2:3, :] - cam2[0:2, :]
u, s, vh = np.linalg.svd(A)
v = vh.T
X = v[:, -1]
X = X / X[-1]
out_3d[:, i] = X[0:3].T
else:
out_3d[:, i] = np.nan
return out_3d
def triangulate_multi_instance(pts, cams):
"""Return triangulated 3- coordinates.
Following Matlab convetion, given lists of matching points, and their
respective camera matrices, returns the triangulated 3- coordinates.
pts1 and pts2 must be Mx2, where M is the number of points with
(x,y) positions. M 3-D points will be returned after triangulation
"""
pts = [pt.T for pt in pts]
cams = [c.T for c in cams]
out_3d = np.zeros((3, pts[0].shape[1]))
# traces = np.zeros((out_3d.shape[1],))
for i in range(out_3d.shape[1]):
if ~np.isnan(pts[0][0, i]):
p = [p[:, i : i + 1] for p in pts]
A = np.zeros((2 * len(cams), 4))
for j in range(len(cams)):
A[(j) * 2 : (j + 1) * 2] = p[j] @ cams[j][2:3, :] - cams[j][0:2, :]
u, s, vh = np.linalg.svd(A)
v = vh.T
X = v[:, -1]
X = X / X[-1]
out_3d[:, i] = X[0:3].T
# traces[i] = np.sum(s[0:3])
else:
out_3d[:, i] = np.nan
return out_3d
def ravel_multi_index(I, J, shape):
"""Create an array of flat indices from coordinate arrays.
shape is (rows, cols)
"""
r, c = shape
return I * c + J
def collapse_dims(T):
"""Collapse dimensions."""
shape = list(K.int_shape(T))
return tf.reshape(T, [-1] + shape[2:])
def repeat_tensor(T, nrep, rep_dim=1):
"""Repeat tensor."""
repT = tf.expand_dims(T, rep_dim)
tile_dim = [1] * len(K.int_shape(repT))
tile_dim[rep_dim] = nrep
repT = tf.tile(repT, tile_dim)
return repT
def nearest3(grid, idx, clip=False):
"""TODO(Describe): I'm having a hard time reading this one."""
with tf.variable_scope("NearestInterp"):
_, h, w, d, f = grid.get_shape().as_list()
x, y, z = idx[:, 1], idx[:, 2], idx[:, 3]
g_val = tf.gather_nd(grid, tf.cast(tf.round(idx), "int32"))
if clip:
x_inv = tf.logical_or(x < 0, x > h - 1)
y_inv = tf.logical_or(y < 0, y > w - 1)
z_inv = tf.logical_or(z < 0, x > d - 1)
valid_idx = 1 - tf.to_float(
tf.logical_or(tf.logical_or(x_inv, y_inv), z_inv)
)
g_val = g_val * valid_idx[..., tf.newaxis]
return g_val
# Todo(simplify): This function had many comments that could be condensed
def proj_slice(
vmin,
vmax,
nvox,
rs_grid,
grid,
K_,
R,
proj_size=512,
samples=64,
min_z=1000.0,
max_z=2100.0,
):
"""Project slice.
grid = nv grids, R = nv x nr rotation matrices.
R = (bs, im, 3, 4), K = (bs, im, 3, 3), grid = (bs, im, h, w, d, ch)
Modified from https://github.com/akar43/lsm
"""
# Scale the camera intrinsic matrix accordingly if the final output is
# a different shape than the input
# Maybe best to start with the native image size so we don't have to deal
# with this headache
rsz_factor = 1
K_ = K_ * rsz_factor
K_shape = K.int_shape(K_)
bs, im_bs, h, w, d, ch = K.int_shape(grid)
npix = proj_size ** 2
# Compute Xc - points in camera frame
Xc = tf.matrix_triangular_solve(K_, rs_grid, lower=False, name="KinvX")
print(K.int_shape(Xc))
# Define z values of samples along ray
z_samples = tf.linspace(min_z, max_z, samples)
# Transform Xc to Xw using transpose of rotation matrix
Xc = repeat_tensor(Xc, samples, rep_dim=2)
Xc = Xc * z_samples[tf.newaxis, tf.newaxis, :, tf.newaxis, tf.newaxis]
Xc = tf.concat([Xc, tf.ones([K_shape[0], K_shape[1], samples, 1, npix])], axis=-2)
# Construct [R^{T}|-R^{T}t]
Rt = tf.matrix_transpose(R[:, :, :, :3])
tr = tf.expand_dims(R[:, :, :, 3], axis=-1)
R_c2w = tf.concat([Rt, -tf.matmul(Rt, tr)], axis=3)
R_c2w = repeat_tensor(R_c2w, samples, rep_dim=2)
Xw = tf.matmul(R_c2w, Xc)
# But remember, some rays/world points will not contact the grid --
# Took me a day to figure out, but the trick is that the x-coordinate was
# indexing the rows rather than the columns, so the grid needs to be fed
# in with the first two grid dimensions permuted...
vmin = vmin[:, tf.newaxis, tf.newaxis, :, tf.newaxis]
vmax = vmax[:, tf.newaxis, tf.newaxis, :, tf.newaxis]
Xw = ((Xw - vmin) / (vmax - vmin)) * nvox
# size now (bs, num_cams, samples, npix, 3)
Xw = tf.transpose(Xw, [0, 1, 2, 4, 3])
# size now (bs, num_grids, num_cams, samples, npix, 3)
Xw = repeat_tensor(Xw, im_bs, rep_dim=1)
# Todo(describe): Describe these operations in concepts rather than linalg
sample_grid = collapse_dims(grid)
sample_locs = collapse_dims(Xw)
lshape = K.int_shape(sample_locs)
vox_idx = tf.range(lshape[0])
vox_idx = repeat_tensor(vox_idx, lshape[1], rep_dim=1)
vox_idx = tf.reshape(vox_idx, [-1, 1])
vox_idx = repeat_tensor(vox_idx, samples * npix, rep_dim=1)
vox_idx = tf.reshape(vox_idx, [-1, 1])
sample_idx = tf.concat(
[tf.to_float(vox_idx), tf.reshape(sample_locs, [-1, 3])], axis=1
)
# The first column indicates which "grid" should be sampled for each
# x,y,z position. In my case, there should only be as many grids as there
# are samples in the mini-batch,
# but for some reason this code allows multiple 3D grids per sample.
# the order in rows (for the last 3 cols) should be rougly like this:
# [batch1_grid1_allcam1samples_locs, batch1_grid1_allcam2sample_locs,
# batch1_grid1_allcam3sample_locs, batch1_grid2_allcam1samples_locs, ...]
g_val = nearest3(sample_grid, sample_idx, clip=True)
g_val = tf.reshape(
g_val, [bs, im_bs, K_shape[1], samples, proj_size, proj_size, -1]
)
ray_slices = tf.transpose(g_val, [0, 1, 2, 4, 5, 6, 3])
return K.max(ray_slices, axis=-1, keepdims=False)
class InstanceNormalization(Layer):
"""Instance normalization layer (Lei Ba et al, 2016, Ulyanov et al., 2016).
Modified from from keras_contrib/layer/normalization
Normalize the activations of the previous layer at each step,
i.e. applies a transformation that maintains the mean activation
close to 0 and the activation standard deviation close to 1.
# Arguments
axis: Integer, the axis that should be normalized
(typically the features axis).
For instance, after a `Conv2D` layer with
`data_format="channels_first"`,
set `axis=1` in `InstanceNormalization`.
Setting `axis=None` will normalize all values in each instance of
the batch.
Axis 0 is the batch dimension. `axis` cannot be set to 0 to avoid
errors.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: If True, multiply by `gamma`.
If False, `gamma` is not used.
When the next layer is linear (also e.g. `nn.relu`),
this can be disabled since the scaling
will be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: Optional constraint for the beta weight.
gamma_constraint: Optional constraint for the gamma weight.
# Input shape
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
# Output shape
Same shape as input.
# References
- [Layer Normalization](https://arxiv.org/abs/1607.06450)
- [Instance Normalization: The Missing Ingredient for Fast Stylization]
(https://arxiv.org/abs/1607.08022)
"""
def __init__(
self,
axis=None,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer="zeros",
gamma_initializer="ones",
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
**kwargs
):
"""Initialize instance normalization."""
super(InstanceNormalization, self).__init__(**kwargs)
self.supports_masking = True
self.axis = axis
self.epsilon = epsilon
self.center = center
self.scale = scale
self.beta_initializer = initializers.get(beta_initializer)
self.gamma_initializer = initializers.get(gamma_initializer)
self.beta_regularizer = regularizers.get(beta_regularizer)
self.gamma_regularizer = regularizers.get(gamma_regularizer)
self.beta_constraint = constraints.get(beta_constraint)
self.gamma_constraint = constraints.get(gamma_constraint)
def build(self, input_shape):
"""Build instance normalization."""
ndim = len(input_shape)
if self.axis == 0:
raise ValueError("Axis cannot be zero")
if (self.axis is not None) and (ndim == 2):
raise ValueError("Cannot specify axis for rank 1 tensor")
self.input_spec = InputSpec(ndim=ndim)
if self.axis is None:
shape = (1,)
else:
shape = (input_shape[self.axis],)
if self.scale:
self.gamma = self.add_weight(
shape=shape,
name="gamma",
initializer=self.gamma_initializer,
regularizer=self.gamma_regularizer,
constraint=self.gamma_constraint,
)
else:
self.gamma = None
if self.center:
self.beta = self.add_weight(
shape=shape,
name="beta",
initializer=self.beta_initializer,
regularizer=self.beta_regularizer,
constraint=self.beta_constraint,
)
else:
self.beta = None
self.built = True
def call(self, inputs, training=None):
"""Call instance normalization."""
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
mean = K.mean(inputs, reduction_axes, keepdims=True)
stddev = K.std(inputs, reduction_axes, keepdims=True) + self.epsilon
normed = (inputs - mean) / stddev
broadcast_shape = [1] * len(input_shape)
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(self.gamma, broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(self.beta, broadcast_shape)
normed = normed + broadcast_beta
return normed
def get_config(self):
"""Reuturn configuration."""
config = {
"axis": self.axis,
"epsilon": self.epsilon,
"center": self.center,
"scale": self.scale,
"beta_initializer": initializers.serialize(self.beta_initializer),
"gamma_initializer": initializers.serialize(self.gamma_initializer),
"beta_regularizer": regularizers.serialize(self.beta_regularizer),
"gamma_regularizer": regularizers.serialize(self.gamma_regularizer),
"beta_constraint": constraints.serialize(self.beta_constraint),
"gamma_constraint": constraints.serialize(self.gamma_constraint),
}
base_config = super(InstanceNormalization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
# TODO(comment): Reading requires knowledge of the get_custom_objects function
get_custom_objects().update({"InstanceNormalization": InstanceNormalization})
def distortPoints(points, intrinsicMatrix, radialDistortion, tangentialDistortion):
"""Distort points according to camera parameters.
Ported from Matlab 2018a
"""
# unpack the intrinisc matrix
cx = intrinsicMatrix[2, 0]
cy = intrinsicMatrix[2, 1]
fx = intrinsicMatrix[0, 0]
fy = intrinsicMatrix[1, 1]
skew = intrinsicMatrix[1, 0]
# center the points
center = np.array([cx, cy])
centeredPoints = points - center[np.newaxis, :]
# normalize the points
yNorm = centeredPoints[:, 1] / fy
xNorm = (centeredPoints[:, 0] - skew * yNorm) / fx
# compute radial distortion
r2 = xNorm ** 2 + yNorm ** 2
r4 = r2 * r2
r6 = r2 * r4
k = np.zeros((3,))
k[:2] = radialDistortion[:2]
if len(radialDistortion) < 3:
k[2] = 0
else:
k[2] = radialDistortion[2]
alpha = k[0] * r2 + k[1] * r4 + k[2] * r6
# compute tangential distortion
p = tangentialDistortion
xyProduct = xNorm * yNorm
dxTangential = 2 * p[0] * xyProduct + p[1] * (r2 + 2 * xNorm ** 2)
dyTangential = p[0] * (r2 + 2 * yNorm ** 2) + 2 * p[1] * xyProduct
# apply the distortion to the points
normalizedPoints = np.stack((xNorm, yNorm)).T
distortedNormalizedPoints = (
normalizedPoints
+ normalizedPoints * np.array([alpha, alpha]).T
+ np.stack((dxTangential, dyTangential)).T
)
# # convert back to pixels
distortedPointsX = (
(distortedNormalizedPoints[:, 0] * fx)
+ cx
+ (skew * distortedNormalizedPoints[:, 1])
)
distortedPointsY = distortedNormalizedPoints[:, 1] * fy + cy
distortedPoints = np.stack((distortedPointsX, distortedPointsY))
return distortedPoints
def distortPoints_torch(
points, intrinsicMatrix, radialDistortion, tangentialDistortion, device
):
"""Distort points according to camera parameters.
Ported from Matlab 2018a
"""
import torch
# unpack the intrinsic matrix
cx = intrinsicMatrix[2, 0]
cy = intrinsicMatrix[2, 1]
fx = intrinsicMatrix[0, 0]
fy = intrinsicMatrix[1, 1]
skew = intrinsicMatrix[1, 0]
# center the points
center = torch.as_tensor((cx, cy), dtype=torch.float32, device=device)
centeredPoints = points - center
# normalize the pcenteredPoints[:, 1] / fyoints
yNorm = centeredPoints[:, 1] / fy
xNorm = (centeredPoints[:, 0] - skew * yNorm) / fx
# compute radial distortion
r2 = xNorm ** 2 + yNorm ** 2
r4 = r2 * r2
r6 = r2 * r4
k = np.zeros((3,))
k[:2] = radialDistortion[:2]
if list(radialDistortion.shape)[0] < 3:
k[2] = 0
else:
k[2] = radialDistortion[2]
alpha = k[0] * r2 + k[1] * r4 + k[2] * r6
# compute tangential distortion
p = tangentialDistortion
xyProduct = xNorm * yNorm
dxTangential = 2 * p[0] * xyProduct + p[1] * (r2 + 2 * xNorm ** 2)
dyTangential = p[0] * (r2 + 2 * yNorm ** 2) + 2 * p[1] * xyProduct
# apply the distortion to the points
normalizedPoints = torch.transpose(torch.stack((xNorm, yNorm)), 0, 1)
distortedNormalizedPoints = (
normalizedPoints
+ normalizedPoints * torch.transpose(torch.stack((alpha, alpha)), 0, 1)
+ torch.transpose(torch.stack((dxTangential, dyTangential)), 0, 1)
)
distortedPointsX = (
distortedNormalizedPoints[:, 0] * fx
+ cx
+ skew * distortedNormalizedPoints[:, 1]
)
distortedPointsY = distortedNormalizedPoints[:, 1] * fy + cy
distortedPoints = torch.stack((distortedPointsX, distortedPointsY))
return distortedPoints
@tf.function
def distortPoints_tf(points, intrinsicMatrix, radialDistortion, tangentialDistortion):
"""Distort points according to camera parameters.
Ported from Matlab 2018a
"""
# unpack the intrinsic matrix
cx = intrinsicMatrix[2, 0]
cy = intrinsicMatrix[2, 1]
fx = intrinsicMatrix[0, 0]
fy = intrinsicMatrix[1, 1]
skew = intrinsicMatrix[1, 0]
# center the points
center = tf.stack((cx, cy))
p = tangentialDistortion
centeredPoints = points - center
# normalize the pcenteredPoints[:, 1] / fyoints
yNorm = centeredPoints[:, 1] / fy
xNorm = (centeredPoints[:, 0] - skew * yNorm) / fx
# compute radial distortion
r2 = xNorm ** 2 + yNorm ** 2
r4 = r2 * r2
r6 = r2 * r4
k = radialDistortion
if list(radialDistortion.shape)[0] < 3:
k[2] = 0
alpha = k[0] * r2 + k[1] * r4 + k[2] * r6
# compute tangential distortion
xyProduct = xNorm * yNorm
dxTangential = 2 * p[0] * xyProduct + p[1] * (r2 + 2 * xNorm ** 2)
dyTangential = p[0] * (r2 + 2 * yNorm ** 2) + 2 * p[1] * xyProduct
# apply the distortion to the points
normalizedPoints = tf.transpose(tf.stack((xNorm, yNorm)))
distortedNormalizedPoints = (
normalizedPoints
+ normalizedPoints * tf.transpose(tf.stack((alpha, alpha)))
+ tf.transpose(tf.stack((dxTangential, dyTangential)))
)
distortedPointsX = (
distortedNormalizedPoints[:, 0] * fx
+ cx
+ skew * distortedNormalizedPoints[:, 1]
)
distortedPointsY = distortedNormalizedPoints[:, 1] * fy + cy
distortedPoints = tf.stack((distortedPointsX, distortedPointsY))
return distortedPoints
def expected_value_3d(prob_map, grid_centers):
"""Calculate expected value of spatial distribution over output 3D grid.
prob_map should be (batch_size,h,w,d,channels)
grid_centers should be (batch_size,h*w*d,3)
# For this to work, the values in a single prob map channel must sum to one
"""
bs, h, w, d, channels = K.int_shape(prob_map)
prob_map = tf.reshape(prob_map, [-1, channels])
grid_centers = tf.reshape(grid_centers, [-1, 3])
weighted_centers = prob_map[:, tf.newaxis, :] * grid_centers[:, :, tf.newaxis]
# weighted centers now (bs*h*w*d,3,channels).
# So we now sum over the grid to get 3D coordinates
# first reshape to put batch_size back on its own axis
weighted_centers = tf.reshape(weighted_centers, [-1, h * w * d, 3, channels])
weighted_centers = tf.reduce_sum(weighted_centers, axis=1)
return weighted_centers
def spatial_softmax(feats):
"""Normalize acros channels.
Channel/marker-wise softmax normalization so that each 3d probability map
represents a normalized probability distribution feats enters as size
(bs, h, w, d, channels) but needs to reshapes to
(bs, h*w*d, channels) for the softmax, then
reshaped back again
"""
bs, h, w, d, channels = K.int_shape(feats)
feats = tf.reshape(feats, [-1, h * w * d, channels])
feats = tf.nn.softmax(feats, axis=1)
feats = tf.reshape(feats, [-1, h, w, d, channels])
return feats
def var_3d(prob_map, grid_centers, markerlocs):
"""Return the average variance across all marker probability maps.
Used a loss to promote "peakiness" in the probability map output
prob_map should be (batch_size,h,w,d,channels)
grid_centers should be (batch_size,h*w*d,3)
markerlocs is (batch_size,3,channels)
"""
bs, h, w, d, channels = K.int_shape(prob_map)
prob_map = tf.reshape(prob_map, [-1, channels])
# we need the squared distance between all grid centers and
# the mean for each channel grid dist now (bs, h*w*d,17)
grid_dist = tf.reduce_sum(
(grid_centers[:, :, :, tf.newaxis] - markerlocs[:, tf.newaxis, :, :]) ** 2,
axis=2,
)
grid_dist = tf.reshape(grid_dist, [-1, channels])
weighted_var = prob_map * grid_dist
weighted_var = tf.reshape(weighted_var, [-1, h * w * d, channels])
weighted_var = tf.reduce_sum(weighted_var, axis=1)
return tf.reduce_mean(weighted_var, axis=-1)[:, tf.newaxis]
|
import os
from celery import Celery
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pycon.settings.celery")
app = Celery("pycon")
app.config_from_object("django.conf:settings", namespace="CELERY")
app.autodiscover_tasks(["integrations"])
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
# pylint: disable=too-many-statements
from azure.cli.core.commands.parameters import (
tags_type,
get_enum_type,
resource_group_name_type,
get_location_type
)
from azure.cli.core.commands.validators import (
get_default_location_from_resource_group,
validate_file_or_dict
)
from azext_datafactory.action import (
AddFactoryVstsConfiguration,
AddFactoryGitHubConfiguration,
AddFakeIdentity
)
def load_arguments(self, _):
with self.argument_context('datafactory list') as c:
c.argument('resource_group_name', resource_group_name_type)
with self.argument_context('datafactory show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name', configured_default='factory')
c.argument('if_none_match', type=str, help='ETag of the factory entity. Should only be specified for get. If '
'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.')
with self.argument_context('datafactory create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
configured_default='factory')
c.argument('if_match', type=str, help='ETag of the factory entity. Should only be specified for update, for '
'which it should match existing entity or can be * for unconditional update.')
c.argument('location', arg_type=get_location_type(self.cli_ctx),
validator=get_default_location_from_resource_group)
c.argument('tags', tags_type)
c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS '
'repo information.', arg_group='RepoConfiguration')
c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s '
'GitHub repo information.', arg_group='RepoConfiguration')
c.argument('fake_identity', action=AddFakeIdentity, nargs='*', help='This is only for az test.')
c.argument('zones', nargs='*', help='This is only for az test.')
with self.argument_context('datafactory update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name', configured_default='factory')
c.argument('tags', tags_type)
with self.argument_context('datafactory delete') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name', configured_default='factory')
with self.argument_context('datafactory configure-factory-repo') as c:
c.argument('location_id', type=str, help='The location identifier.', id_part='name')
c.argument('factory_resource_id', type=str, help='The factory resource id.')
c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS '
'repo information.', arg_group='RepoConfiguration')
c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s '
'GitHub repo information.', arg_group='RepoConfiguration')
with self.argument_context('datafactory get-data-plane-access') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name', configured_default='factory')
c.argument('permissions', type=str, help='The string with permissions for Data Plane access. Currently only '
'\'r\' is supported which grants read only access.')
c.argument('access_resource_path', type=str, help='The resource path to get access relative to factory. '
'Currently only empty string is supported which corresponds to the factory resource.')
c.argument('profile_name', type=str, help='The name of the profile. Currently only the default is supported. '
'The default value is DefaultProfile.')
c.argument('start_time', type=str, help='Start time for the token. If not specified the current time will be '
'used.')
c.argument('expire_time', type=str, help='Expiration time for the token. Maximum duration for the token is '
'eight hours and by default the token will expire in eight hours.')
with self.argument_context('datafactory get-git-hub-access-token') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', options_list=['--name', '-n', '--factory-name'], type=str, help='The factory name.',
id_part='name', configured_default='factory')
c.argument('git_hub_access_code', type=str, help='GitHub access code.')
c.argument('git_hub_client_id', type=str, help='GitHub application client ID.')
c.argument('git_hub_access_token_base_url', type=str, help='GitHub access token base URL.')
with self.argument_context('datafactory trigger list') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
with self.argument_context('datafactory trigger show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If '
'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.')
with self.argument_context('datafactory trigger create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str,
help='The trigger name.')
c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for '
'which it should match existing entity or can be * for unconditional update.')
c.argument('properties', type=validate_file_or_dict, help='Properties of the trigger. Expected value: '
'json-string/@json-file.')
with self.argument_context('datafactory trigger update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
c.argument('if_match', type=str, help='ETag of the trigger entity. Should only be specified for update, for '
'which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='Trigger description.')
c.argument('annotations', type=validate_file_or_dict, help='List of tags that can be used for describing the '
'trigger. Expected value: json-string/@json-file.')
c.ignore('properties')
with self.argument_context('datafactory trigger delete') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger get-event-subscription-status') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger query-by-factory') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('continuation_token', type=str, help='The continuation token for getting the next page of results. '
'Null for first page.')
c.argument('parent_trigger_name', type=str, help='The name of the parent TumblingWindowTrigger to get the '
'child rerun triggers')
with self.argument_context('datafactory trigger start') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger stop') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger subscribe-to-event') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger unsubscribe-from-event') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
with self.argument_context('datafactory trigger wait') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('trigger_name', options_list=['--name', '-n', '--trigger-name'], type=str, help='The trigger name.',
id_part='child_name_1')
c.argument('if_none_match', type=str, help='ETag of the trigger entity. Should only be specified for get. If '
'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.')
with self.argument_context('datafactory integration-runtime list') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
with self.argument_context('datafactory integration-runtime show') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified '
'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will '
'be returned.')
with self.argument_context('datafactory integration-runtime linked-integration-runtime create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
c.argument('integration_runtime_name', type=str, help='The integration runtime name.')
c.argument('name', type=str, help='The name of the linked integration runtime.')
c.argument('subscription_id', type=str, help='The ID of the subscription that the linked integration runtime '
'belongs to.')
c.argument('data_factory_name', type=str, help='The name of the data factory that the linked integration '
'runtime belongs to.')
c.argument('data_factory_location', type=str, help='The location of the data factory that the linked '
'integration runtime belongs to.')
with self.argument_context('datafactory integration-runtime managed create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.')
c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for '
'update, for which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='Integration runtime description.')
c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='*', help='Factory\'s VSTS '
'repo information.', arg_group='RepoConfiguration')
c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='*', help='Factory\'s '
'GitHub repo information.', arg_group='RepoConfiguration')
c.argument('fake_identity', action=AddFakeIdentity, nargs='*', help='This is only for az test.')
c.argument('zones', nargs='*', help='This is only for az test.')
c.argument('type_properties_compute_properties', type=validate_file_or_dict, help='The compute resource for '
'managed integration runtime. Expected value: json-string/@json-file.')
c.argument('type_properties_ssis_properties', type=validate_file_or_dict, help='SSIS properties for managed '
'integration runtime. Expected value: json-string/@json-file.')
with self.argument_context('datafactory integration-runtime self-hosted create') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.')
c.argument('if_match', type=str, help='ETag of the integration runtime entity. Should only be specified for '
'update, for which it should match existing entity or can be * for unconditional update.')
c.argument('description', type=str, help='Integration runtime description.')
c.argument('type_properties_linked_info', type=validate_file_or_dict, help='The base definition of a linked '
'integration runtime. Expected value: json-string/@json-file.')
with self.argument_context('datafactory integration-runtime update') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
c.argument('auto_update', arg_type=get_enum_type(['On', 'Off', 'fakeValue1', 'fakeValue2', 'fakeValue3', ''
'fakeValue4', 'fakeValue5', 'fakeValue6']), help='Enables or '
'disables the auto-update feature of the self-hosted integration runtime. See '
'https://go.microsoft.com/fwlink/?linkid=854189.')
c.argument('update_delay_offset', type=str, help='The time offset (in hours) in the day, e.g., PT03H is 3 '
'hours. The integration runtime auto update will happen on that time.')
with self.argument_context('datafactory integration-runtime delete') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime get-connection-info') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime get-monitoring-data') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime get-status') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime list-auth-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.')
with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
c.argument('key_name', arg_type=get_enum_type(['authKey1', 'authKey2']), help='The name of the authentication '
'key to regenerate.')
with self.argument_context('datafactory integration-runtime remove-link') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
c.argument('linked_factory_name', type=str, help='The data factory name for linked integration runtime.')
with self.argument_context('datafactory integration-runtime start') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime stop') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime sync-credentials') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime upgrade') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
with self.argument_context('datafactory integration-runtime wait') as c:
c.argument('resource_group_name', resource_group_name_type)
c.argument('factory_name', type=str, help='The factory name.', id_part='name', configured_default='factory')
c.argument('integration_runtime_name', options_list=['--name', '-n', '--integration-runtime-name'], type=str,
help='The integration runtime name.', id_part='child_name_1')
c.argument('if_none_match', type=str, help='ETag of the integration runtime entity. Should only be specified '
'for get. If the ETag matches the existing entity tag, or if * was provided, then no content will '
'be returned.')
|
# @Author: Manuel Rodriguez <valle>
# @Date: 01-Jan-2018
# @Email: valle.mrv@gmail.com
# @Last modified by: valle
# @Last modified time: 13-Jul-2018
# @License: Apache license vesion 2.0
"""
Django settings for service_web project.
Generated by 'django-admin startproject' using Django 1.10.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BRAND_TITLE = "Farmacia Tapas"
BRAND = "Farmacia"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = ""
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'channels',
'impresion',
"almacen",
'contabilidad',
'gestion',
'inicio',
'global',
'ventas'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'tokenapi.backends.TokenBackend'
]
ROOT_URLCONF = 'service_web.urls'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static/resources'),)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'service_web.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
#channels
ASGI_APPLICATION = "service_web.routing.application"
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [('127.0.0.1', 6379)],
},
},
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'init_command': 'SET default_storage_engine=INNODB',
},
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
#smtp config
#sstmp
EMAIL_HOST_PASSWORD = ''
EMAIL_HOST_USER = ''
EMAIL_PORT = ''
EMAIL_HOST = ''
EMAIL_USE_TLS = True
# session expire at browser close
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
TOKEN_TIMEOUT_DAYS = 360
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'es-es'
TIME_ZONE = 'Europe/Madrid'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0022_auto_20151118_1642'),
]
operations = [
migrations.AddField(
model_name='role',
name='commit',
field=models.CharField(max_length=256, blank=True),
),
migrations.AddField(
model_name='role',
name='commit_message',
field=models.CharField(max_length=256, blank=True),
)
]
|
# coding: utf-8
"""
Engine api
Engine APIs # noqa: E501
The version of the OpenAPI document: 1.0.4
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import vtpl_api
from vtpl_api.models.extra import Extra # noqa: E501
from vtpl_api.rest import ApiException
class TestExtra(unittest.TestCase):
"""Extra unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testExtra(self):
"""Test Extra"""
# FIXME: construct object with mandatory attributes with example values
# model = vtpl_api.models.extra.Extra() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
valor = float(input("qual o valor atual do produto o qual você deseja dar o desconto? "))
desconto = float(input("quanto de desconto você quer dar ao produto? em porcentagem "))
print("o produto custava {}, mas com desconto de {}% ele passa a custar {:.2f}R$".format(valor, desconto, (valor -(( desconto * 1/100)*valor))))
|
from plenum.common.constants import TXN_TYPE, DATA
from plenum.test.helper import sdk_gen_request, sdk_sign_and_submit_req_obj, sdk_get_reply
from plenum.test.plugin.demo_plugin.constants import AUCTION_START
def send_auction_txn(looper,
sdk_pool_handle, sdk_wallet_steward):
op = {
TXN_TYPE: AUCTION_START,
DATA: {'id': 'abc'}
}
successful_op(looper, op, sdk_wallet_steward, sdk_pool_handle)
def successful_op(looper, op, sdk_wallet, sdk_pool_handle):
req_obj = sdk_gen_request(op, identifier=sdk_wallet[1])
req = sdk_sign_and_submit_req_obj(looper, sdk_pool_handle,
sdk_wallet, req_obj)
sdk_get_reply(looper, req)
|
# Can 31 dominoes cover all but two opposite corners of a chess board?
def dominoes():
# Each dominoe must cover one black and one white square.
# But there are different numbers of black and white squares.
# Read up on such parity arguments here:
# http://ihxrelation.blogspot.com/2015/10/tiling-problems.html
return False
|
import numpy as np
import pandas as pd
csv_data = pd.read_csv("../data/double11_1020_1120.csv")
csv_data *= 100.0
# csv_data.fillna(0.0,inplace=True)
# print(csv_data.isnull().any())
csv_data_u = csv_data.round(5).drop_duplicates(subset=csv_data.columns[1:],keep='first')
# csv_data_u = csv_data
csv_data_u = csv_data_u.sample(n=65536, frac=None, replace=False, weights=None, random_state=None, axis=0)
csv_data_u_cut = csv_data_u.iloc[:,1:]
csv_data_u_float = csv_data_u_cut.astype('float32')
# print(csv_data_u_float.isnull().any())
# print(csv_data_u.iloc[:10,:])
print(csv_data_u_float.shape)
for x in csv_data_u_float.duplicated():
if (x is True):
print("duplication exist")
break
with open("../data/eco_nodes",'wb') as bin_output:
csv_data_u_float.values.tofile(bin_output)
with open("../data/eco_nodes.csv",'w') as csv_output:
csv_data_u.to_csv(csv_output)
|
import unittest
from mysql.toolkit import MySQL
from tests import config
from looptools import Timer
def printer(title, data, sep='-' * 100):
print('\n{title}:\n{sep}\n{data}\n{sep}\n'.format(title=title.upper(), sep=sep, data=data))
class TestStructureDefinition(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.sql = MySQL(config('testing_models'))
@classmethod
def tearDownClass(cls):
cls.sql.disconnect()
@Timer.decorator
def test_get_table_definition(self):
table = 'customers'
td = self.sql.get_table_definition(table)
printer(title='TABLE DEFINITION', data=td)
self.assertEqual(781, len(td))
@Timer.decorator
def test_get_column_definition_all(self):
definitions = ['`productCode` varchar(15) NOT NULL',
'`productName` varchar(70) NOT NULL',
'`productLine` varchar(50) NOT NULL',
'`productScale` varchar(10) NOT NULL',
'`productVendor` varchar(50) NOT NULL',
'`productDescription` text NOT NULL',
'`quantityInStock` smallint(6) NOT NULL',
'`buyPrice` decimal(10, 2) NOT NULL',
'`MSRP` decimal(10, 2) NOT NULL']
table = 'products'
td = self.sql.get_column_definition_all(table)
printer('Column definition', td)
for i in range(0, len(td)):
self.assertEqual(definitions[i], td[i])
self.assertEqual(len(td), 9)
@Timer.decorator
def test_get_column_definition(self):
table = 'offices'
col = 'addressLine1'
td = self.sql.get_column_definition(table, col)
self.assertEqual(35, len(td))
if __name__ == '__main__':
unittest.main()
|
import requests
from bs4 import BeautifulSoup
def GetPubSubdir():
response = requests.get(
"https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/")
html = response.content.decode("utf-8")
parsedHtml = BeautifulSoup(html, "html.parser")
links = parsedHtml.find_all("a")
return ("https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/"
+ links[-1]["href"] + "/")
def Check(auth, mirrors):
response = requests.get(GetPubSubdir())
html = response.content.decode("utf-8")
parsedHtml = BeautifulSoup(html, "html.parser")
links = parsedHtml.find_all("a")
maxVersionRc = -1
for link in links:
if ("util-linux-" in link.text and ".tar.gz" in link.text):
versionMajor = int(link.text.split("-")[2].split(".")[0])
versionMinor = int(link.text.split("-")[2].split(".")[1])
if len(link.text.split("-")) == 3:
return str(versionMajor) + "." + str(versionMinor)
versionRc = int(
link.text.split("-")[3].split(".")[0].split("rc")[1])
if versionRc > maxVersionRc:
maxVersionRc = versionRc
return (
str(versionMajor) + "." + str(versionMinor) + "-rc" + str(maxVersionRc))
|
import pyfirmata
board = pyfirmata.Arduino('/dev/ttyACM0')
led_pin = board.get_pin('d:10:p')
while True:
duty_s = input("Enter Brightness (0 to 100):")
duty = int(duty_s)
led_pin.write(duty / 100.0)
|
import logging
import sys
from logging.handlers import RotatingFileHandler
import os
FORMATTER = logging.Formatter("%(asctime)s — %(threadName)s — %(name)s — %(levelname)s — %(funcName)s:%(lineno)d — %(message)s")
def get_console_handler():
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(FORMATTER)
return console_handler
def get_file_handler(file_name):
direction = '/var/log'
os.makedirs(direction, exist_ok=True)
file_handler = RotatingFileHandler(f'{direction}/{file_name}.log', mode='w', maxBytes=5000000, backupCount=2)
file_handler.setFormatter(FORMATTER)
return file_handler
def get_logger(logger_name):
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG) # better to have too much log than not enough
# logger.addHandler(get_console_handler())
logger.addHandler(get_file_handler(logger_name))
# with this pattern, it's rarely necessary to propagate the error up to parent
logger.propagate = False
return logger
|
"""
Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix in spiral order.
Example 1:
Input:
[
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
Output: [1,2,3,6,9,8,7,4,5]
Game plan
1. Keep 4 pointers top, bottom, left, right
2. Start from the first row and iterate over the columns, top = 0, left += 1
3. Once you read the end, Keey the column constant and start incrementing the row. left = len(matrix) and top += 1
4. Once you reach the bottom, right -= 1, bottom = len(matrix)
5. Last step - top -= 1, left = 0
-
"""
def spiralOrder(matrix):
result = []
if len(matrix) == 0:
return []
top = 0
bottom = 0
left = 0
right = 0
while left <= len(matrix) or right >= 0:
for i in range(len(matrix)):
result.append(matrix[top][left])
left += 1
top += 1
isBottom = True
if isBottom:
for i in range(len(matrix)):
result.append(matrix[top][left])
left += 1
|
#This Project Works Well if Your camera(WEBCAM) is working ..if Not then Check my another project where the video is loaded manually (From directory)
#----------------------------------------#
#FACE DETECTION USING PYTHON3 AND OPENCV #
#--------AUTHOR- Ritesh Aggarwal---------#
#-----------Language->Python3------------#
#-----------Github:->imkiller32----------#
#----------------Appendix----------------#
# Select Camera According to your Laptop.#
#for more camera hint ...comment below
#---------Enjoy Your Live Feed----------#
#importing useful library
import cv2
#import numpy as np
def main():
path = "C:\\Users\\imkiller\\AppData\\Local\\Programs\\Python\\Python36-32\\Lib\\site-packages\\cv2\\data\\"
ClassifierPath= path + "haarcascade_frontalface_default.xml"
facedetect=cv2.CascadeClassifier(ClassifierPath)
#Resolution if available
w=800
h=600
#Capturing LiveFeed
cap=cv2.VideoCapture(1)
#setting Width and Height
cap.set(3,w)
cap.set(4,h)
#Checking Whether Cam is open or Not
if cap.isOpened():
ret, frame = cap.read()
else:
ret=False
ret, frame = cap.read()
while ret:
#Reading webcam
ret, frame = cap.read()
grayFrame=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#cv2.imshow('gray',grayFrame) For Debug
#Their can be more than one face.....parameters are set to get a good result
faces = facedetect.detectMultiScale(grayFrame,1.3,5)
for (x,y,w,h) in faces:
#for Debug
print('Ok')
#Draw a red Rectangle Over image if their is a face
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,0,255),2)
cv2.imshow('FaceDetection Soft',frame)
if cv2.waitKey(1) == 27: #exit on ESC
break
#Releasing Camera
cap.release()
#destroying All windows Created
cv2.destroyAllWindows()
print('Thanks For checking...visit again')
if __name__ == "__main__":
print('Starting software...')
main()
|
# Copyright (c) 2021, NVIDIA CORPORATION.
import numpy as np
import pandas as pd
import pytest
from dask import dataframe as dd
import dask_cudf as dgd
import cudf
def _make_random_frame(nelem, npartitions=2):
df = pd.DataFrame(
{
"x": np.random.randint(0, 5, size=nelem),
"y": np.random.normal(size=nelem) + 1,
}
)
gdf = cudf.DataFrame.from_pandas(df)
dgf = dgd.from_cudf(gdf, npartitions=npartitions)
return df, dgf
_reducers = ["sum", "count", "mean", "var", "std", "min", "max"]
def _get_reduce_fn(name):
def wrapped(series):
fn = getattr(series, name)
return fn()
return wrapped
@pytest.mark.parametrize("reducer", _reducers)
def test_series_reduce(reducer):
reducer = _get_reduce_fn(reducer)
np.random.seed(0)
size = 10
df, gdf = _make_random_frame(size)
got = reducer(gdf.x)
exp = reducer(df.x)
dd.assert_eq(got, exp)
@pytest.mark.parametrize(
"data",
[
cudf.datasets.randomdata(
nrows=10000,
dtypes={"a": "category", "b": int, "c": float, "d": int},
),
cudf.datasets.randomdata(
nrows=10000,
dtypes={"a": "category", "b": int, "c": float, "d": str},
),
cudf.datasets.randomdata(
nrows=10000, dtypes={"a": bool, "b": int, "c": float, "d": str}
),
],
)
@pytest.mark.parametrize(
"op", ["max", "min", "sum", "prod", "mean", "var", "std"]
)
def test_rowwise_reductions(data, op):
gddf = dgd.from_cudf(data, npartitions=10)
pddf = gddf.to_dask_dataframe()
if op in ("var", "std"):
expected = getattr(pddf, op)(axis=1, ddof=0)
got = getattr(gddf, op)(axis=1, ddof=0)
else:
expected = getattr(pddf, op)(axis=1)
got = getattr(pddf, op)(axis=1)
dd.assert_eq(expected.compute(), got.compute(), check_exact=False)
|
#!/usr/bin/python
#
# Copyright (c) 2020 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation, Inc.
# ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""OSPF Basic Functionality Automation."""
import os
import sys
import time
import pytest
from time import sleep
from copy import deepcopy
from lib.topotest import frr_unicode
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
# Import topoJson from lib, to create topology and initial configuration
from lib.common_config import (
start_topology,
write_test_header,
write_test_footer,
reset_config_on_routers,
step,
shutdown_bringup_interface,
topo_daemons,
)
from lib.topolog import logger
from lib.topojson import build_config_from_json
from lib.ospf import verify_ospf_neighbor, config_ospf_interface, clear_ospf
from ipaddress import IPv4Address
pytestmark = [pytest.mark.ospfd]
# Global variables
topo = None
"""
TOPOOLOGY =
Please view in a fixed-width font such as Courier.
+---+ A1 +---+
+R1 +------------+R2 |
+-+-+- +--++
| -- -- |
| -- A0 -- |
A0| ---- |
| ---- | A2
| -- -- |
| -- -- |
+-+-+- +-+-+
+R0 +-------------+R3 |
+---+ A3 +---+
TESTCASES =
1. Verify ospf authentication with Simple password authentication.
2. Verify ospf authentication with MD5 authentication.
3. Verify ospf authentication with different authentication methods.
"""
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Running setup_module to create topology")
# This function initiates the topology build with Topogen...
json_file = "{}/ospf_authentication.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# get list of daemons needs to be started for this suite.
daemons = topo_daemons(tgen, topo)
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen, daemons)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
# Don't run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
ospf_covergence = verify_ospf_neighbor(tgen, topo)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
logger.info("Running setup_module() done")
def teardown_module(mod):
"""
Teardown the pytest environment.
* `mod`: module name
"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
# ##################################
# Test cases start here.
# ##################################
def test_ospf_authentication_simple_pass_tc28_p1(request):
"""
OSPF Authentication - Verify ospf authentication with Simple
password authentication.
"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
global topo
step("Bring up the base config.")
reset_config_on_routers(tgen)
step(
"Configure ospf with on R1 and R2, enable ospf on R1 interface"
"connected to R2 with simple password authentication using ip ospf "
"authentication Simple password cmd."
)
r1_ospf_auth = {
"r1": {
"links": {
"r2": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("clear ip ospf after configuring the authentication.")
clear_ospf(tgen, "r1")
step("Verify that the neighbour is not FULL between R1 and R2.")
dut = "r1"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut, expected=False)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"On R2 enable ospf on interface with simple password authentication "
"using ip ospf authentication Simple password cmd."
)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 "
"using show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"Disable simple password authentication on R2 using no ip ospf "
"authentication Simple password cmd."
)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": True,
"authentication-key": "ospf",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Verify on R1 neighbour is deleted for R2 after dead interval expiry")
# wait till the dead time expiry
sleep(6)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(
tgen, topo, dut=dut, expected=False, retry_timeout=10
)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Again On R2 enable ospf on interface with Simple password auth")
r2_ospf_auth = {
"r2": {
"links": {
"r1": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 using"
" show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Shut no shut interface on R1")
dut = "r1"
intf = topo["routers"]["r1"]["links"]["r2"]["interface"]
shutdown_bringup_interface(tgen, dut, intf, False)
dut = "r2"
step(
"Verify that the neighbour is not FULL between R1 and R2 using "
"show ip ospf neighbor cmd."
)
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut, expected=False)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
dut = "r1"
shutdown_bringup_interface(tgen, dut, intf, True)
step(
"Verify that the neighbour is FULL between R1 and R2 using "
"show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Change Ip address on R1 and R2")
topo_modify_change_ip = deepcopy(topo)
intf_ip = topo_modify_change_ip["routers"]["r1"]["links"]["r2"]["ipv4"]
topo_modify_change_ip["routers"]["r1"]["links"]["r2"]["ipv4"] = str(
IPv4Address(frr_unicode(intf_ip.split("/")[0])) + 3
) + "/{}".format(intf_ip.split("/")[1])
build_config_from_json(tgen, topo_modify_change_ip, save_bkup=False)
reset_config_on_routers(tgen, routerName="r1")
dut = "r1"
intf = topo["routers"]["r1"]["links"]["r2"]["interface"]
shutdown_bringup_interface(tgen, dut, intf, False)
shutdown_bringup_interface(tgen, dut, intf, True)
# clear ip ospf after configuring the authentication.
clear_ospf(tgen, "r1")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 with new "
"ip address using show ip ospf "
)
dut = "r1"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
write_test_footer(tc_name)
def test_ospf_authentication_md5_tc29_p1(request):
"""
OSPF Authentication - Verify ospf authentication with MD5 authentication.
"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
global topo
step("Bring up the base config.")
reset_config_on_routers(tgen)
step(
"Configure ospf with on R1 and R2, enable ospf on R1 interface "
"connected to R2 with message-digest authentication using ip "
"ospf authentication message-digest cmd."
)
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Verify that the neighbour is not FULL between R1 and R2.")
# wait for dead time expiry.
sleep(6)
dut = "r1"
ospf_covergence = verify_ospf_neighbor(
tgen, topo, dut=dut, expected=False, retry_timeout=6
)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"On R2 enable ospf on interface with message-digest authentication"
" using ip ospf authentication message-digest password cmd."
)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 "
"using show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"Disable message-digest authentication on R2 using no ip ospf "
"authentication message-digest password cmd."
)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Verify on R1 ,nbr is deleted for R2 after dead interval expiry")
# wait till the dead timer expiry
sleep(6)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(
tgen, topo, dut=dut, expected=False, retry_timeout=10
)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Again On R2 enable ospf on interface with message-digest auth")
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 using"
" show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Shut no shut interface on R1")
dut = "r1"
intf = topo["routers"]["r1"]["links"]["r2"]["interface"]
shutdown_bringup_interface(tgen, dut, intf, False)
dut = "r2"
step(
"Verify that the neighbour is not FULL between R1 and R2 using "
"show ip ospf neighbor cmd."
)
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut, expected=False)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
dut = "r1"
shutdown_bringup_interface(tgen, dut, intf, True)
step(
"Verify that the neighbour is FULL between R1 and R2 using "
"show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Change Ip address on R1 and R2")
topo_modify_change_ip = deepcopy(topo)
intf_ip = topo_modify_change_ip["routers"]["r1"]["links"]["r2"]["ipv4"]
topo_modify_change_ip["routers"]["r1"]["links"]["r2"]["ipv4"] = str(
IPv4Address(frr_unicode(intf_ip.split("/")[0])) + 3
) + "/{}".format(intf_ip.split("/")[1])
build_config_from_json(tgen, topo_modify_change_ip, save_bkup=False)
reset_config_on_routers(tgen, routerName="r1")
dut = "r1"
intf = topo["routers"]["r1"]["links"]["r2"]["interface"]
shutdown_bringup_interface(tgen, dut, intf, False)
shutdown_bringup_interface(tgen, dut, intf, True)
clear_ospf(tgen, "r1")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 with new "
"ip address using show ip ospf "
)
dut = "r1"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
write_test_footer(tc_name)
def test_ospf_authentication_different_auths_tc30_p1(request):
"""
OSPF Authentication - Verify ospf authentication with different
authentication methods.
"""
tc_name = request.node.name
write_test_header(tc_name)
tgen = get_topogen()
global topo
step("Bring up the base config.")
reset_config_on_routers(tgen)
step(
"Configure ospf with on R1 and R2, enable ospf on R1 interface "
"connected to R2 with message-digest authentication using ip "
"ospf authentication message-digest cmd."
)
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
# wait for dead timer expiry
sleep(6)
step("Verify that the neighbour is not FULL between R1 and R2.")
dut = "r1"
ospf_covergence = verify_ospf_neighbor(
tgen, topo, dut=dut, expected=False, retry_timeout=10
)
assert ospf_covergence is not True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(
"On R2 enable ospf on interface with message-digest authentication"
" using ip ospf authentication message-digest password cmd."
)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 "
"using show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step(" Delete the configured password on both the routers.")
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the deletion is successful and neighbour is FULL"
" between R1 and R2 using show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Change the authentication type to simple password.")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {"ospf": {"authentication": True, "authentication-key": "ospf"}}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the deletion is successful and neighbour is"
" FULL between R1 and R2 using show ip "
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Change the password in simple password.")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {"ospf": {"authentication": True, "authentication-key": "OSPFv4"}}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {"ospf": {"authentication": True, "authentication-key": "OSPFv4"}}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the deletion is successful and neighbour is"
" FULL between R1 and R2 using show ip "
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Delete the password authentication on the interface ")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": True,
"authentication-key": "OSPFv4",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": True,
"authentication-key": "OSPFv4",
"del_action": True,
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the deletion is successful and neighbour is"
" FULL between R1 and R2 using show ip "
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Enable Md5 authentication on the interface")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "ospf",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step(
"Verify that the neighbour is FULL between R1 and R2 using"
" show ip ospf neighbor cmd."
)
dut = "r2"
ospf_covergence = verify_ospf_neighbor(tgen, topo, dut=dut)
assert ospf_covergence is True, "setup_module :Failed \n Error:" " {}".format(
ospf_covergence
)
step("Change the MD5 authentication password")
r1_ospf_auth = {
"r1": {
"links": {
"r2": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "OSPFv4",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r1_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
r2_ospf_auth = {
"r2": {
"links": {
"r1": {
"ospf": {
"authentication": "message-digest",
"authentication-key": "OSPFv4",
"message-digest-key": "10",
}
}
}
}
}
result = config_ospf_interface(tgen, topo, r2_ospf_auth)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
write_test_footer(tc_name)
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from .. import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .backup_short_term_retention_policy import *
from .data_masking_policy import *
from .database import *
from .database_advisor import *
from .database_blob_auditing_policy import *
from .database_security_alert_policy import *
from .database_vulnerability_assessment import *
from .database_vulnerability_assessment_rule_baseline import *
from .disaster_recovery_configuration import *
from .elastic_pool import *
from .extended_database_blob_auditing_policy import *
from .extended_server_blob_auditing_policy import *
from .failover_group import *
from .firewall_rule import *
from .geo_backup_policy import *
from .get_backup_short_term_retention_policy import *
from .get_data_masking_policy import *
from .get_database import *
from .get_database_advisor import *
from .get_database_blob_auditing_policy import *
from .get_database_security_alert_policy import *
from .get_database_vulnerability_assessment import *
from .get_database_vulnerability_assessment_rule_baseline import *
from .get_disaster_recovery_configuration import *
from .get_elastic_pool import *
from .get_extended_database_blob_auditing_policy import *
from .get_extended_server_blob_auditing_policy import *
from .get_failover_group import *
from .get_firewall_rule import *
from .get_geo_backup_policy import *
from .get_instance_failover_group import *
from .get_instance_pool import *
from .get_job import *
from .get_job_agent import *
from .get_job_credential import *
from .get_job_step import *
from .get_job_target_group import *
from .get_long_term_retention_policy import *
from .get_managed_database import *
from .get_managed_database_sensitivity_label import *
from .get_managed_database_vulnerability_assessment import *
from .get_managed_database_vulnerability_assessment_rule_baseline import *
from .get_managed_instance import *
from .get_managed_instance_administrator import *
from .get_managed_instance_azure_ad_only_authentication import *
from .get_managed_instance_key import *
from .get_managed_instance_private_endpoint_connection import *
from .get_managed_instance_vulnerability_assessment import *
from .get_outbound_firewall_rule import *
from .get_private_endpoint_connection import *
from .get_sensitivity_label import *
from .get_server import *
from .get_server_advisor import *
from .get_server_azure_ad_administrator import *
from .get_server_azure_ad_only_authentication import *
from .get_server_blob_auditing_policy import *
from .get_server_communication_link import *
from .get_server_dns_alias import *
from .get_server_key import *
from .get_server_security_alert_policy import *
from .get_server_trust_group import *
from .get_server_vulnerability_assessment import *
from .get_sync_agent import *
from .get_sync_group import *
from .get_sync_member import *
from .get_transparent_data_encryption import *
from .get_virtual_network_rule import *
from .get_workload_classifier import *
from .get_workload_group import *
from .instance_failover_group import *
from .instance_pool import *
from .job import *
from .job_agent import *
from .job_credential import *
from .job_step import *
from .job_target_group import *
from .long_term_retention_policy import *
from .managed_database import *
from .managed_database_sensitivity_label import *
from .managed_database_vulnerability_assessment import *
from .managed_database_vulnerability_assessment_rule_baseline import *
from .managed_instance import *
from .managed_instance_administrator import *
from .managed_instance_azure_ad_only_authentication import *
from .managed_instance_key import *
from .managed_instance_private_endpoint_connection import *
from .managed_instance_vulnerability_assessment import *
from .outbound_firewall_rule import *
from .private_endpoint_connection import *
from .sensitivity_label import *
from .server import *
from .server_advisor import *
from .server_azure_ad_administrator import *
from .server_azure_ad_only_authentication import *
from .server_blob_auditing_policy import *
from .server_communication_link import *
from .server_dns_alias import *
from .server_key import *
from .server_security_alert_policy import *
from .server_trust_group import *
from .server_vulnerability_assessment import *
from .sync_agent import *
from .sync_group import *
from .sync_member import *
from .transparent_data_encryption import *
from .virtual_network_rule import *
from .workload_classifier import *
from .workload_group import *
from ._inputs import *
from . import outputs
# Make subpackages available:
if typing.TYPE_CHECKING:
import pulumi_azure_native.sql.v20140401 as __v20140401
v20140401 = __v20140401
import pulumi_azure_native.sql.v20150501preview as __v20150501preview
v20150501preview = __v20150501preview
import pulumi_azure_native.sql.v20170301preview as __v20170301preview
v20170301preview = __v20170301preview
import pulumi_azure_native.sql.v20171001preview as __v20171001preview
v20171001preview = __v20171001preview
import pulumi_azure_native.sql.v20180601preview as __v20180601preview
v20180601preview = __v20180601preview
import pulumi_azure_native.sql.v20190601preview as __v20190601preview
v20190601preview = __v20190601preview
import pulumi_azure_native.sql.v20200202preview as __v20200202preview
v20200202preview = __v20200202preview
import pulumi_azure_native.sql.v20200801preview as __v20200801preview
v20200801preview = __v20200801preview
import pulumi_azure_native.sql.v20201101preview as __v20201101preview
v20201101preview = __v20201101preview
import pulumi_azure_native.sql.v20210201preview as __v20210201preview
v20210201preview = __v20210201preview
else:
v20140401 = _utilities.lazy_import('pulumi_azure_native.sql.v20140401')
v20150501preview = _utilities.lazy_import('pulumi_azure_native.sql.v20150501preview')
v20170301preview = _utilities.lazy_import('pulumi_azure_native.sql.v20170301preview')
v20171001preview = _utilities.lazy_import('pulumi_azure_native.sql.v20171001preview')
v20180601preview = _utilities.lazy_import('pulumi_azure_native.sql.v20180601preview')
v20190601preview = _utilities.lazy_import('pulumi_azure_native.sql.v20190601preview')
v20200202preview = _utilities.lazy_import('pulumi_azure_native.sql.v20200202preview')
v20200801preview = _utilities.lazy_import('pulumi_azure_native.sql.v20200801preview')
v20201101preview = _utilities.lazy_import('pulumi_azure_native.sql.v20201101preview')
v20210201preview = _utilities.lazy_import('pulumi_azure_native.sql.v20210201preview')
|
import sys
import logging as log
import time
import datetime
from . import gitlab
from .approvals import Approvals
GET, POST, PUT, DELETE = gitlab.GET, gitlab.POST, gitlab.PUT, gitlab.DELETE
class MergeRequest(gitlab.Resource):
@classmethod
def create(cls, api, project_id, params):
merge_request_info = api.call(POST(
'/projects/{project_id}/merge_requests'.format(project_id=project_id),
params,
))
merge_request = cls(api, merge_request_info)
return merge_request
@classmethod
def search(cls, api, project_id, params):
merge_requests = api.collect_all_pages(GET(
'/projects/{project_id}/merge_requests'.format(project_id=project_id),
params,
))
return [cls(api, merge_request) for merge_request in merge_requests]
@classmethod
def fetch_by_iid(cls, project_id, merge_request_iid, api):
merge_request = cls(api, {'iid': merge_request_iid, 'project_id': project_id})
merge_request.refetch_info()
return merge_request
@classmethod
def fetch_assigned_at(cls, user, api, merge_request):
assigned_at = 0
all_discussions = api.collect_all_pages(
GET('/projects/{project_id}/merge_requests/{merge_requests_id}/discussions'.format(
project_id=merge_request.get('project_id'),
merge_requests_id=merge_request.get('iid')
)))
match_body = 'assigned to @{username}'.format(username=user.username)
for discussion in all_discussions:
for note in discussion.get('notes'):
if match_body in note.get('body'):
date_string = note.get('created_at')
date_format = "%Y-%m-%dT%H:%M:%S.%f%z"
if (sys.version_info.major, sys.version_info.minor) <= (3, 6):
assigned = datetime.datetime.strptime(date_string[:-1], date_format[:-2]) \
.replace(tzinfo=datetime.timezone.utc).timestamp()
else:
assigned = datetime.datetime.strptime(date_string, date_format).timestamp()
if assigned > assigned_at:
assigned_at = assigned
return assigned_at
@classmethod
def fetch_all_open_for_user(cls, project_id, user, api, merge_order):
request_merge_order = 'created_at' if merge_order == 'assigned_at' else merge_order
all_merge_request_infos = api.collect_all_pages(GET(
'/projects/{project_id}/merge_requests'.format(project_id=project_id),
{'state': 'opened', 'order_by': request_merge_order, 'sort': 'asc'},
))
my_merge_request_infos = [
mri for mri in all_merge_request_infos
if ((mri.get('assignee', {}) or {}).get('id') == user.id) or
(user.id in [assignee.get('id') for assignee in (mri.get('assignees', []) or [])])
]
if merge_order == 'assigned_at':
my_merge_request_infos.sort(key=lambda mri: cls.fetch_assigned_at(user, api, mri))
return [cls(api, merge_request_info) for merge_request_info in my_merge_request_infos]
@property
def project_id(self):
return self.info['project_id']
@property
def iid(self):
return self.info['iid']
@property
def title(self):
return self.info['title']
@property
def state(self):
return self.info['state']
@property
def merge_status(self):
return self.info['merge_status']
@property
def rebase_in_progress(self):
return self.info.get('rebase_in_progress', False)
@property
def merge_error(self):
return self.info.get('merge_error')
@property
def assignee_ids(self):
if 'assignees' in self.info:
return [assignee.get('id') for assignee in (self.info['assignees'] or [])]
return [(self.info.get('assignee', {}) or {}).get('id')]
@property
def author_id(self):
return self.info['author'].get('id')
@property
def source_branch(self):
return self.info['source_branch']
@property
def target_branch(self):
return self.info['target_branch']
@property
def sha(self):
return self.info['sha']
@property
def squash(self):
return self.info.get('squash', False) # missing means auto-squash not supported
@property
def source_project_id(self):
return self.info['source_project_id']
@property
def target_project_id(self):
return self.info['target_project_id']
@property
def work_in_progress(self):
return self.info['work_in_progress']
@property
def approved_by(self):
return self.info['approved_by']
@property
def web_url(self):
return self.info['web_url']
@property
def blocking_discussions_resolved(self):
return self.info['blocking_discussions_resolved']
@property
def force_remove_source_branch(self):
return self.info['force_remove_source_branch']
def update_sha(self, sha):
"""record the updated sha. We don't use refetch_info instead as it may hit cache."""
self._info['sha'] = sha
def refetch_info(self):
self._info = self._api.call(GET('/projects/{0.project_id}/merge_requests/{0.iid}'.format(self)))
def comment(self, message):
if self._api.version().release >= (9, 2, 2):
notes_url = '/projects/{0.project_id}/merge_requests/{0.iid}/notes'.format(self)
else:
# GitLab botched the v4 api before 9.2.2
notes_url = '/projects/{0.project_id}/merge_requests/{0.id}/notes'.format(self)
return self._api.call(POST(notes_url, {'body': message}))
def rebase(self):
self.refetch_info()
if not self.rebase_in_progress:
self._api.call(PUT(
'/projects/{0.project_id}/merge_requests/{0.iid}/rebase'.format(self),
))
else:
# We wanted to rebase and someone just happened to press the button for us!
log.info('A rebase was already in progress on the merge request!')
max_attempts = 30
wait_between_attempts_in_secs = 1
for _ in range(max_attempts):
self.refetch_info()
if not self.rebase_in_progress:
if self.merge_error:
raise MergeRequestRebaseFailed(self.merge_error)
return
time.sleep(wait_between_attempts_in_secs)
raise TimeoutError('Waiting for merge request to be rebased by GitLab')
def accept(self, remove_branch=False, sha=None, merge_when_pipeline_succeeds=True):
return self._api.call(PUT(
'/projects/{0.project_id}/merge_requests/{0.iid}/merge'.format(self),
dict(
should_remove_source_branch=remove_branch,
merge_when_pipeline_succeeds=merge_when_pipeline_succeeds,
sha=sha or self.sha, # if provided, ensures what is merged is what we want (or fails)
),
))
def close(self):
return self._api.call(PUT(
'/projects/{0.project_id}/merge_requests/{0.iid}'.format(self),
{'state_event': 'close'},
))
def assign_to(self, user_id):
return self._api.call(PUT(
'/projects/{0.project_id}/merge_requests/{0.iid}'.format(self),
{'assignee_id': user_id},
))
def unassign(self):
return self.assign_to(0)
def fetch_approvals(self):
# 'id' needed for for GitLab 9.2.2 hack (see Approvals.refetch_info())
info = {'id': self.id, 'iid': self.iid, 'project_id': self.project_id}
approvals = Approvals(self.api, info)
approvals.refetch_info()
return approvals
def fetch_commits(self):
return self._api.call(GET('/projects/{0.project_id}/merge_requests/{0.iid}/commits'.format(self)))
class MergeRequestRebaseFailed(Exception):
pass
|
from core import Window, alerts
import imgui
from utils import singleton
@singleton
class ExceptionAlert():
def __init__(self, exception, traceinfo):
self.exception = exception
self.traceinfo = traceinfo
def render(self):
expanded, visible = imgui.collapsing_header(self.exception)
if expanded:
imgui.text(self.traceinfo)
def __str__(self):
return self.exception
def render_alert(alertItem):
alert, count = alertItem
imgui.text(str(count))
imgui.next_column()
alert.render()
imgui.next_column()
imgui.separator()
class AlertViewer(Window):
def render(self):
imgui.columns(2, 'alertList')
imgui.set_column_offset(1, 45)
imgui.text("Count")
imgui.next_column()
imgui.text("Alert")
imgui.next_column()
imgui.separator()
# ToDo: In the future dont revert this, and simple have it lock scroll
# to bottom like a terminal? Might be more effort than it's worth.
list(map(render_alert, reversed(alerts.items())))
imgui.text("Count")
imgui.next_column()
imgui.text("Alert")
imgui.next_column()
imgui.columns(1)
|
# K2000 KIT Python Module
# to be used in conjonction with the arduinoMicroShiftRegisterTest(s), to produce effect(s) such as K2000 KIT
import time
from collections import deque # used to cycle/rotate the leds deck pattern to be sent over serial to the uC/Ar
# == debug tests ==
'''
larger_list = [0,'A','B','C',1, 1,'F','G', 'H', 0]
#print larger_list
smaller_list = larger_list[1:len(larger_list)-1]
#print smaller_list
deck = deque( smaller_list
#print deck
deck.rotate(1)
#print deck
deck.rotate(-1) # rotate by one to the left
#print deck
'''
config = {
'large_pattern': '0000000000',
'actual_pattern': '00011000',
'direction': 1,
'position': 4,
'deck_len': 2
#'pattern_len': len(config['actual_pattern'])
}
def loopOverConfig():
if config['direction'] == 1:
print 'direction => RIGHT'
deckEndIndex = config['position'] + config['deck_len'] - 1
if deckEndIndex < len(config['large_pattern']):
# can go right
print 'free space on right => TRUE'
large_pattern = config['large_pattern'])
print 'large pattern as list => '
else:
# cannot go right
print 'free space on right => FALSE'
elif if config['direction'] == 0:
print 'direction => LEFT'
if config['position'] > 0:
# can go left
print 'free space on left => TRUE'
else:
# cannot go left
print 'free space on left => FALSE'
# == sketch ==
larger_pattern = '0000000000'
pattern = '00011000'
start_direction = 1 # right
start_position = 4 # index of 1st led of the deck
deck_len = 2 # number of leds on at the same time
current_direction = 1 # 1 = right, 0 = left
current_position = 4
current_pattern = '00000000'
# dirty helper function
def loopK200Kit():
global current_direction
global current_position
global larger_pattern
global current_pattern
global deck_len
# could be done: check the mode & its options ( .. )
# check the direction we're set to go to
if current_direction == 1: # going right
print 'going right'
if current_position + deck_len - 1 < len(larger_pattern):
print 'still free space on the right ..'
large_pattern_list = str(larger_pattern)
# strip the first & last elements of the list
sized_pattern = large_pattern_list[1:len(large_pattern_list)-1]
# convert it to a deque
deck = deque( sized_pattern )
deck.rotate(1)
# increment the current position
current_position += 1
# get back current pattern list from deque
current_pattern_list = list(deque(deck))
# write the current pattern as string, not list
current_pattern = ''.join(current_pattern_list)
# write the updated larger pattern
large_pattern = current_pattern_list
large_pattern[:0] = '0';
large_pattern[len(large_pattern):] = '0'
larger_pattern = str( larger_pattern )
# display the current pattern
print 'pattern: ', current_pattern, ' position: ', current_position
else:
print 'no more free space on the right ..'
current_direction = 0
#return
elif current_direction == 0: #going left
print 'going left'
if current_position > 0:
print 'still free space on the left ..'
large_pattern_list = larger_pattern.split()
# strip the first & last elements of the list
sized_pattern = large_pattern_list[1:len(large_pattern_list)-1]
# convert it to a deque
deck = deque( sized_pattern )
deck.rotate(-1)
# decrement the current position
current_position -= 1
# get back current pattern list from deque
current_pattern_list = list(deque(deck))
# write the current pattern as string, not list
current_pattern = ''.join(current_pattern_list)
# write the updated larger pattern
large_pattern = current_pattern_list
large_pattern[:0] = '0';
large_pattern[len(large_pattern):] = '0'
larger_pattern = str( larger_pattern )
# display the current pattern
print 'pattern: ', current_pattern, ' position: ', current_position
else:
print 'no more free space on the left ..'
current_direction = 1
#return
# infinite loop
while 1 == 1:
loopK200Kit()
time.sleep(2)
print 'the program ended'
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
from google import auth
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.ads.googleads.v5.resources.types import keyword_plan
from google.ads.googleads.v5.services.types import keyword_plan_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-ads-googleads",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class KeywordPlanServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for KeywordPlanService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/adwords",)
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.get_keyword_plan: gapic_v1.method.wrap_method(
self.get_keyword_plan,
default_timeout=None,
client_info=client_info,
),
self.mutate_keyword_plans: gapic_v1.method.wrap_method(
self.mutate_keyword_plans,
default_timeout=None,
client_info=client_info,
),
self.generate_forecast_curve: gapic_v1.method.wrap_method(
self.generate_forecast_curve,
default_timeout=None,
client_info=client_info,
),
self.generate_forecast_time_series: gapic_v1.method.wrap_method(
self.generate_forecast_time_series,
default_timeout=None,
client_info=client_info,
),
self.generate_forecast_metrics: gapic_v1.method.wrap_method(
self.generate_forecast_metrics,
default_timeout=None,
client_info=client_info,
),
self.generate_historical_metrics: gapic_v1.method.wrap_method(
self.generate_historical_metrics,
default_timeout=None,
client_info=client_info,
),
}
@property
def get_keyword_plan(
self,
) -> typing.Callable[
[keyword_plan_service.GetKeywordPlanRequest], keyword_plan.KeywordPlan
]:
raise NotImplementedError
@property
def mutate_keyword_plans(
self,
) -> typing.Callable[
[keyword_plan_service.MutateKeywordPlansRequest],
keyword_plan_service.MutateKeywordPlansResponse,
]:
raise NotImplementedError
@property
def generate_forecast_curve(
self,
) -> typing.Callable[
[keyword_plan_service.GenerateForecastCurveRequest],
keyword_plan_service.GenerateForecastCurveResponse,
]:
raise NotImplementedError
@property
def generate_forecast_time_series(
self,
) -> typing.Callable[
[keyword_plan_service.GenerateForecastTimeSeriesRequest],
keyword_plan_service.GenerateForecastTimeSeriesResponse,
]:
raise NotImplementedError
@property
def generate_forecast_metrics(
self,
) -> typing.Callable[
[keyword_plan_service.GenerateForecastMetricsRequest],
keyword_plan_service.GenerateForecastMetricsResponse,
]:
raise NotImplementedError
@property
def generate_historical_metrics(
self,
) -> typing.Callable[
[keyword_plan_service.GenerateHistoricalMetricsRequest],
keyword_plan_service.GenerateHistoricalMetricsResponse,
]:
raise NotImplementedError
__all__ = ("KeywordPlanServiceTransport",)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Andrey Skopenko <andrey@scopenco.net>
PLUGIN_NAME = 'CageFS'
import sys
sys.path.insert(0, '/usr/local/ispmgr/addon')
from cli import ExitOk, Log, xml_doc, xml_error
from libcagefs import CageFS
from os import chdir
from sys import exit, stderr
from cgi import FieldStorage
from traceback import format_exc
if __name__ == "__main__":
chdir('/usr/local/ispmgr/')
# activate logging
# stderr ==> ispmgr.log
log = Log(plugin=PLUGIN_NAME)
stderr = log
try:
# get cgi vars
req = FieldStorage(keep_blank_values=True)
func = req.getvalue('func')
elid = req.getvalue('elid')
sok = req.getvalue('sok')
log.write('func %s, elid %s, sok %s' % (func, elid, sok))
if func not in ['cagefs.main', 'cagefs.toggle', 'cagefs.status',
'cagefs.update_start', 'cagefs.update_progress',
'cagefs.update_done', 'cagefs.init_start',
'cagefs.init_progress', 'cagefs.init_done',
'cagefs.enable-all', 'cagefs.disable-all']:
print xml_doc()
raise ExitOk('no action')
cagefs = CageFS(log)
if func == 'cagefs.main':
cagefs.cagefs_main()
if func == 'cagefs.toggle':
cagefs.cagefs_toggle()
if func == 'cagefs.status':
cagefs.cagefs_status()
if func == 'cagefs.enable-all':
cagefs.cagefsctl_run_cmd('--enable-all')
if func == 'cagefs.disable-all':
cagefs.cagefsctl_run_cmd('--disable-all')
if func == 'cagefs.init_start':
cagefs.cagefs_init_start()
if func == 'cagefs.init_progress':
cagefs.cagefs_init_progress()
if func == 'cagefs.init_done':
cagefs.cagefs_init_done()
if func == 'cagefs.update_start':
cagefs.cagefs_update_start()
if func == 'cagefs.update_progress':
cagefs.cagefs_update_progress()
if func == 'cagefs.update_done':
cagefs.cagefs_update_done()
print cagefs.get_output()
raise ExitOk('done')
except ExitOk, e:
log.write(e)
except:
print xml_error('please contact support team', code_num='1')
log.write(format_exc())
exit(0)
|
# coding: utf-8
"""
Pure Storage FlashBlade REST 1.8.1 Python SDK
Pure Storage FlashBlade REST 1.8.1 Python SDK, developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.8.1
Contact: info@purestorage.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class LinkAggregationGroup(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
#BEGIN_CUSTOM
# IR-51527: Prevent Pytest from attempting to collect this class based on name.
__test__ = False
#END_CUSTOM
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'name': 'str',
'lag_speed': 'int',
'mac_address': 'str',
'ports': 'list[Reference]',
'port_speed': 'int',
'status': 'str'
}
attribute_map = {
'id': 'id',
'name': 'name',
'lag_speed': 'lag_speed',
'mac_address': 'mac_address',
'ports': 'ports',
'port_speed': 'port_speed',
'status': 'status'
}
def __init__(self, id=None, name=None, lag_speed=None, mac_address=None, ports=None, port_speed=None, status=None): # noqa: E501
"""LinkAggregationGroup - a model defined in Swagger""" # noqa: E501
self._id = None
self._name = None
self._lag_speed = None
self._mac_address = None
self._ports = None
self._port_speed = None
self._status = None
self.discriminator = None
if id is not None:
self.id = id
if name is not None:
self.name = name
if lag_speed is not None:
self.lag_speed = lag_speed
if mac_address is not None:
self.mac_address = mac_address
if ports is not None:
self.ports = ports
if port_speed is not None:
self.port_speed = port_speed
if status is not None:
self.status = status
@property
def id(self):
"""Gets the id of this LinkAggregationGroup. # noqa: E501
A non-modifiable, globally unique ID chosen by the system. # noqa: E501
:return: The id of this LinkAggregationGroup. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this LinkAggregationGroup.
A non-modifiable, globally unique ID chosen by the system. # noqa: E501
:param id: The id of this LinkAggregationGroup. # noqa: E501
:type: str
"""
self._id = id
@property
def name(self):
"""Gets the name of this LinkAggregationGroup. # noqa: E501
The name of the object (e.g., a file system or snapshot). # noqa: E501
:return: The name of this LinkAggregationGroup. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this LinkAggregationGroup.
The name of the object (e.g., a file system or snapshot). # noqa: E501
:param name: The name of this LinkAggregationGroup. # noqa: E501
:type: str
"""
self._name = name
@property
def lag_speed(self):
"""Gets the lag_speed of this LinkAggregationGroup. # noqa: E501
Combined speed of all ports in the LAG in bits-per-second. # noqa: E501
:return: The lag_speed of this LinkAggregationGroup. # noqa: E501
:rtype: int
"""
return self._lag_speed
@lag_speed.setter
def lag_speed(self, lag_speed):
"""Sets the lag_speed of this LinkAggregationGroup.
Combined speed of all ports in the LAG in bits-per-second. # noqa: E501
:param lag_speed: The lag_speed of this LinkAggregationGroup. # noqa: E501
:type: int
"""
self._lag_speed = lag_speed
@property
def mac_address(self):
"""Gets the mac_address of this LinkAggregationGroup. # noqa: E501
Unique MAC address assigned to the LAG # noqa: E501
:return: The mac_address of this LinkAggregationGroup. # noqa: E501
:rtype: str
"""
return self._mac_address
@mac_address.setter
def mac_address(self, mac_address):
"""Sets the mac_address of this LinkAggregationGroup.
Unique MAC address assigned to the LAG # noqa: E501
:param mac_address: The mac_address of this LinkAggregationGroup. # noqa: E501
:type: str
"""
if mac_address is not None and not re.search(r'^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', mac_address): # noqa: E501
raise ValueError(r"Invalid value for `mac_address`, must be a follow pattern or equal to `/^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$/`") # noqa: E501
self._mac_address = mac_address
@property
def ports(self):
"""Gets the ports of this LinkAggregationGroup. # noqa: E501
Ports associated with the LAG # noqa: E501
:return: The ports of this LinkAggregationGroup. # noqa: E501
:rtype: list[Reference]
"""
return self._ports
@ports.setter
def ports(self, ports):
"""Sets the ports of this LinkAggregationGroup.
Ports associated with the LAG # noqa: E501
:param ports: The ports of this LinkAggregationGroup. # noqa: E501
:type: list[Reference]
"""
self._ports = ports
@property
def port_speed(self):
"""Gets the port_speed of this LinkAggregationGroup. # noqa: E501
Configured speed of each port in the LAG in bits-per-second # noqa: E501
:return: The port_speed of this LinkAggregationGroup. # noqa: E501
:rtype: int
"""
return self._port_speed
@port_speed.setter
def port_speed(self, port_speed):
"""Sets the port_speed of this LinkAggregationGroup.
Configured speed of each port in the LAG in bits-per-second # noqa: E501
:param port_speed: The port_speed of this LinkAggregationGroup. # noqa: E501
:type: int
"""
self._port_speed = port_speed
@property
def status(self):
"""Gets the status of this LinkAggregationGroup. # noqa: E501
Health status of the LAG. Possible values are critical, healthy, identifying, unclaimed, unhealthy, unrecognized and unused. # noqa: E501
:return: The status of this LinkAggregationGroup. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this LinkAggregationGroup.
Health status of the LAG. Possible values are critical, healthy, identifying, unclaimed, unhealthy, unrecognized and unused. # noqa: E501
:param status: The status of this LinkAggregationGroup. # noqa: E501
:type: str
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LinkAggregationGroup, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LinkAggregationGroup):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
# qubit number=3
# total number=15
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.cx(input_qubit[0],input_qubit[2]) # number=9
prog.x(input_qubit[2]) # number=10
prog.cx(input_qubit[0],input_qubit[2]) # number=11
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=5
prog.cx(input_qubit[1],input_qubit[3]) # number=12
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.cx(input_qubit[1],input_qubit[0]) # number=7
prog.cx(input_qubit[1],input_qubit[0]) # number=8
prog.y(input_qubit[3]) # number=13
prog.y(input_qubit[3]) # number=14
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =3962
writefile = open("../data/startQiskit_noisy494.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
import pytest
from asynch.cursors import DictCursor
from asynch.proto import constants
@pytest.mark.asyncio
async def test_fetchone(conn):
async with conn.cursor() as cursor:
await cursor.execute("SELECT 1")
ret = cursor.fetchone()
assert ret == (1,)
await cursor.execute("SELECT * FROM system.tables")
ret = cursor.fetchall()
assert isinstance(ret, list)
@pytest.mark.asyncio
async def test_fetchall(conn):
async with conn.cursor() as cursor:
await cursor.execute("SELECT 1")
ret = cursor.fetchall()
assert ret == [(1,)]
@pytest.mark.asyncio
async def test_dict_cursor(conn):
async with conn.cursor(cursor=DictCursor) as cursor:
await cursor.execute("SELECT 1")
ret = cursor.fetchall()
assert ret == [{"1": 1}]
@pytest.mark.asyncio
async def test_insert_dict(conn):
async with conn.cursor(cursor=DictCursor) as cursor:
rows = await cursor.execute(
"""INSERT INTO test.asynch(id,decimal,date,datetime,float,uuid,string,ipv4,ipv6) VALUES""",
[
{
"id": 1,
"decimal": 1,
"date": "2020-08-08",
"datetime": "2020-08-08 00:00:00",
"float": 1,
"uuid": "59e182c4-545d-4f30-8b32-cefea2d0d5ba",
"string": "1",
"ipv4": "0.0.0.0",
"ipv6": "::",
}
],
)
assert rows == 1
@pytest.mark.asyncio
async def test_insert_tuple(conn):
async with conn.cursor(cursor=DictCursor) as cursor:
rows = await cursor.execute(
"""INSERT INTO test.asynch(id,decimal,date,datetime,float,uuid,string,ipv4,ipv6) VALUES""",
[
(
1,
1,
"2020-08-08",
"2020-08-08 00:00:00",
1,
"59e182c4-545d-4f30-8b32-cefea2d0d5ba",
"1",
"0.0.0.0",
"::",
)
],
)
assert rows == 1
@pytest.mark.asyncio
async def test_executemany(conn):
async with conn.cursor(cursor=DictCursor) as cursor:
rows = await cursor.executemany(
"""INSERT INTO test.asynch(id,decimal,date,datetime,float,uuid,string,ipv4,ipv6) VALUES""",
[
(
1,
1,
"2020-08-08",
"2020-08-08 00:00:00",
1,
"59e182c4-545d-4f30-8b32-cefea2d0d5ba",
"1",
"0.0.0.0",
"::",
),
(
1,
1,
"2020-08-08",
"2020-08-08 00:00:00",
1,
"59e182c4-545d-4f30-8b32-cefea2d0d5ba",
"1",
"0.0.0.0",
"::",
),
],
)
assert rows == 2
@pytest.mark.asyncio
async def test_table_ddl(conn):
async with conn.cursor() as cursor:
await cursor.execute("drop table if exists test.alter_table")
create_table_sql = """
CREATE TABLE test.alter_table
(
`id` Int32
)
ENGINE = MergeTree
ORDER BY id
"""
await cursor.execute(create_table_sql)
add_column_sql = """alter table test.alter_table add column c String"""
await cursor.execute(add_column_sql)
show_table_sql = """show create table test.alter_table"""
await cursor.execute(show_table_sql)
assert cursor.fetchone() == (
"CREATE TABLE test.alter_table\n(\n `id` Int32,\n `c` String\n)\nENGINE = MergeTree\nORDER BY id\nSETTINGS index_granularity = 8192",
)
await cursor.execute("drop table test.alter_table")
@pytest.mark.asyncio
async def test_insert_buffer_overflow(conn):
old_buffer_size = constants.BUFFER_SIZE
constants.BUFFER_SIZE = 2 ** 6 + 1
async with conn.cursor() as cursor:
await cursor.execute("DROP TABLE if exists test.test")
create_table_sql = """CREATE TABLE test.test
(
`i` Int32,
`c1` String,
`c2` String,
`c3` String,
`c4` String
) ENGINE = MergeTree ORDER BY i"""
await cursor.execute(create_table_sql)
await cursor.execute("INSERT INTO test.test VALUES", [(1, "t", "t", "t", "t")])
await cursor.execute("DROP TABLE if exists test.test")
constants.BUFFER_SIZE = old_buffer_size
|
from .env import init_dist, get_root_logger, set_random_seed
from .train import train_detector
from .inference import inference_detector, show_result
__all__ = [
'init_dist', 'get_root_logger', 'set_random_seed', 'train_detector',
'inference_detector', 'show_result'
]
|
# Copyright 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import, division, print_function, unicode_literals
import platform
import tests.utils
from tests.utils import dedent
class PlatformTest(tests.utils.TestCase):
includes = [("@fbcode_macros//build_defs:platform_utils.bzl", "platform_utils")]
current_arch = platform.machine()
other_arch = "x86_64" if current_arch == "aarch64" else "aarch64"
third_party_config = dedent(
"""\
third_party_config = {{
"platforms": {{
"gcc5": {{
"architecture": "{current_arch}",
}},
"gcc6": {{
"architecture": "{current_arch}",
}},
"gcc7": {{
"architecture": "{current_arch}",
}},
"gcc5-other": {{
"architecture": "{other_arch}",
}},
}},
}}
""".format(
current_arch=current_arch, other_arch=other_arch
)
)
@tests.utils.with_project()
def test_transform_platform_overrides(self, root):
# This should be a load time error
platform_overrides = dedent(
"""\
platform_overrides = {
"fbcode": {
"foo/bar": ["gcc5", "gcc5-other"],
"foo": ["gcc7"],
},
}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
expected = {
"fbcode": {
"foo/bar": {self.other_arch: "gcc5-other", self.current_arch: "gcc5"},
"foo": {self.current_arch: "gcc7"},
}
}
result = root.runUnitTests(
self.includes, ["platform_utils.get_platform_overrides()"]
)
self.assertSuccess(result, expected)
@tests.utils.with_project()
def test_transform_platform_overrides_fails_with_invalid_platform(self, root):
# This should be a load time error
platform_overrides = dedent(
"""\
platform_overrides = {
"fbcode": {
"foo/bar": ["gcc5", "invalid-platform"],
},
}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
result = root.runUnitTests(
self.includes, ["platform_utils.get_platform_overrides()"]
)
self.assertFailureWithMessage(
result,
"Path foo/bar has invalid platform invalid-platform. Must be one "
"of gcc5, gcc5-other, gcc6, gcc7",
)
@tests.utils.with_project()
def test_transform_platform_overrides_fails_with_duplicate_platforms_for_arch(
self, root
):
# This should be a load time error
platform_overrides = dedent(
"""\
platform_overrides = {
"fbcode": {
"foo/bar": ["gcc5", "gcc7"],
},
}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
result = root.runUnitTests(
self.includes, ["platform_utils.get_platform_overrides()"]
)
self.assertFailureWithMessage(
result,
"Path foo/bar has both platform gcc5 and gcc7 for architecture %s"
% self.current_arch,
)
@tests.utils.with_project()
def test_get_platform_fails_when_platform_required(self, root):
statements = ['platform_utils.get_platform_for_base_path("blah")']
root.updateBuckconfig("fbcode", "require_platform", "true")
results = root.runUnitTests(self.includes, statements)
self.assertFailureWithMessage(
results,
"Cannot find fbcode platform to use for architecture {}".format(
self.current_arch
),
)
@tests.utils.with_project()
def test_get_platform_default_when_platform_not_required(self, root):
statements = ['platform_utils.get_platform_for_base_path("blah")']
results = root.runUnitTests(self.includes, statements)
self.assertSuccess(results, "default")
@tests.utils.with_project()
def test_base_name_gets_correct_platform_for_various_directories_and_archs(
self, root
):
platform_overrides = dedent(
"""\
platform_overrides = {"fbcode": {
"foo/bar": ["gcc5-other", "gcc5"],
"foo/bar-other": ["gcc5-other"],
"foo": ["gcc6"],
"": ["gcc7"],
}}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
statements = [
'platform_utils.get_platform_for_base_path("foo/bar")',
'platform_utils.get_platform_for_base_path("foo/bar-other")',
'platform_utils.get_platform_for_base_path("foo/baz")',
'platform_utils.get_platform_for_base_path("foo")',
'platform_utils.get_platform_for_base_path("foobar")',
]
result = root.runUnitTests(self.includes, statements)
self.assertSuccess(result, "gcc5", "gcc6", "gcc6", "gcc6", "gcc7")
@tests.utils.with_project()
def test_gets_correct_platform_for_various_directories_and_archs(self, root):
platform_overrides = dedent(
"""\
platform_overrides = {"fbcode": {
"foo/bar": ["gcc5-other", "gcc5"],
"foo/bar-other": ["gcc5-other"],
"foo": ["gcc6"],
"": ["gcc7"],
}}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
statements = ["platform_utils.get_platform_for_current_buildfile()"]
result1 = root.runUnitTests(self.includes, statements, buckfile="foo/bar/BUCK")
result2 = root.runUnitTests(
self.includes, statements, buckfile="foo/bar-other/BUCK"
)
result3 = root.runUnitTests(self.includes, statements, buckfile="foo/baz/BUCK")
result4 = root.runUnitTests(self.includes, statements, buckfile="foo/BUCK")
result5 = root.runUnitTests(self.includes, statements, buckfile="foobar/BUCK")
self.assertSuccess(result1, "gcc5")
self.assertSuccess(result2, "gcc6")
self.assertSuccess(result3, "gcc6")
self.assertSuccess(result4, "gcc6")
self.assertSuccess(result5, "gcc7")
@tests.utils.with_project()
def test_returns_platform_override_if_set(self, root):
platform_overrides = dedent(
"""\
platform_overrides = {"fbcode": {
"foo/bar": ["gcc5-other", "gcc5"],
"": ["gcc7"],
}}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/third_party_config.bzl", self.third_party_config
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
root.updateBuckconfig("fbcode", "platform", "gcc8")
statements = [
'platform_utils.get_platform_for_base_path("foo/bar")',
'platform_utils.get_platform_for_base_path("foobar")',
]
result = root.runUnitTests(self.includes, statements)
self.assertSuccess(result, "gcc8", "gcc8")
@tests.utils.with_project()
def test_helper_util_runs_properly(self, root):
platform_overrides = dedent(
"""\
platform_overrides = {"fbcode": {
"foo/bar": ["gcc5-other", "gcc5"],
"foo/bar-other": ["gcc5-other"],
"foo": ["gcc6"],
"": ["gcc7"],
}}
"""
)
root.project.cells["fbcode_macros"].addFile(
"build_defs/platform_overrides.bzl", platform_overrides
)
result = root.run(
[
"buck",
"run",
"fbcode_macros//tools:get_platform",
"foo/bar",
"foo/bar-other",
"foo/baz",
"foo",
"foobar",
],
{},
{},
)
self.assertSuccess(result)
expected = (
dedent(
"""
foo/bar:gcc5
foo/bar-other:gcc6
foo/baz:gcc6
foo:gcc6
foobar:gcc7
"""
)
+ "\n"
)
self.assertEqual(expected, result.stdout)
@tests.utils.with_project()
def test_get_platform_architecture(self, root):
self.assertSuccess(
root.runUnitTests(
self.includes, ['platform_utils.get_platform_architecture("gcc7")']
),
platform.machine(),
)
@tests.utils.with_project()
def test_get_all_platforms(self, root):
result = root.runUnitTests(
self.includes, ["platform_utils.get_all_platforms()"]
)
self.assertSuccess(result)
self.assertEquals(
["default", "gcc5", "gcc5-other", "gcc6", "gcc7"],
sorted(result.debug_lines[0]),
)
@tests.utils.with_project()
def test_get_platforms_for_host_architecture(self, root):
result = root.runUnitTests(
self.includes,
[
"platform_utils.get_platforms_for_host_architecture()",
],
)
self.assertSuccess(result)
self.assertEquals(
["default", "gcc5", "gcc6", "gcc7"], sorted(result.debug_lines[0])
)
@tests.utils.with_project()
def test_get_platforms_for_architecture(self, root):
result = root.runUnitTests(
self.includes,
[
'platform_utils.get_platforms_for_architecture("{}")'.format(
self.other_arch
)
],
)
self.assertSuccess(result, ["gcc5-other"])
|
import difflib
import functools
import math
import numbers
import os
import warnings
import numpy as np
from tlz import frequencies, concat
from .core import Array
from ..highlevelgraph import HighLevelGraph
from ..utils import has_keyword, ignoring, is_arraylike
try:
AxisError = np.AxisError
except AttributeError:
try:
np.array([0]).sum(axis=5)
except Exception as e:
AxisError = type(e)
def normalize_to_array(x):
if "cupy" in str(type(x)): # TODO: avoid explicit reference to cupy
return x.get()
else:
return x
def meta_from_array(x, ndim=None, dtype=None):
"""Normalize an array to appropriate meta object
Parameters
----------
x: array-like, callable
Either an object that looks sufficiently like a Numpy array,
or a callable that accepts shape and dtype keywords
ndim: int
Number of dimensions of the array
dtype: Numpy dtype
A valid input for ``np.dtype``
Returns
-------
array-like with zero elements of the correct dtype
"""
# If using x._meta, x must be a Dask Array, some libraries (e.g. zarr)
# implement a _meta attribute that are incompatible with Dask Array._meta
if hasattr(x, "_meta") and isinstance(x, Array):
x = x._meta
if dtype is None and x is None:
raise ValueError("You must specify the meta or dtype of the array")
if np.isscalar(x):
x = np.array(x)
if x is None:
x = np.ndarray
if isinstance(x, type):
x = x(shape=(0,) * (ndim or 0), dtype=dtype)
if (
not hasattr(x, "shape")
or not hasattr(x, "dtype")
or not isinstance(x.shape, tuple)
):
return x
if isinstance(x, list) or isinstance(x, tuple):
ndims = [
0
if isinstance(a, numbers.Number)
else a.ndim
if hasattr(a, "ndim")
else len(a)
for a in x
]
a = [a if nd == 0 else meta_from_array(a, nd) for a, nd in zip(x, ndims)]
return a if isinstance(x, list) else tuple(x)
if ndim is None:
ndim = x.ndim
try:
meta = x[tuple(slice(0, 0, None) for _ in range(x.ndim))]
if meta.ndim != ndim:
if ndim > x.ndim:
meta = meta[(Ellipsis,) + tuple(None for _ in range(ndim - meta.ndim))]
meta = meta[tuple(slice(0, 0, None) for _ in range(meta.ndim))]
elif ndim == 0:
meta = meta.sum()
else:
meta = meta.reshape((0,) * ndim)
except Exception:
meta = np.empty((0,) * ndim, dtype=dtype or x.dtype)
if np.isscalar(meta):
meta = np.array(meta)
if dtype and meta.dtype != dtype:
meta = meta.astype(dtype)
return meta
def compute_meta(func, _dtype, *args, **kwargs):
with np.errstate(all="ignore"), warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
args_meta = [meta_from_array(x) if is_arraylike(x) else x for x in args]
kwargs_meta = {
k: meta_from_array(v) if is_arraylike(v) else v for k, v in kwargs.items()
}
# todo: look for alternative to this, causes issues when using map_blocks()
# with np.vectorize, such as dask.array.routines._isnonzero_vec().
if isinstance(func, np.vectorize):
meta = func(*args_meta)
else:
try:
# some reduction functions need to know they are computing meta
if has_keyword(func, "computing_meta"):
kwargs_meta["computing_meta"] = True
meta = func(*args_meta, **kwargs_meta)
except TypeError as e:
if (
"unexpected keyword argument" in str(e)
or "is an invalid keyword for" in str(e)
or "Did not understand the following kwargs" in str(e)
):
raise
else:
return None
except Exception:
return None
if _dtype and getattr(meta, "dtype", None) != _dtype:
with ignoring(AttributeError):
meta = meta.astype(_dtype)
if np.isscalar(meta):
meta = np.array(meta)
return meta
def allclose(a, b, equal_nan=False, **kwargs):
a = normalize_to_array(a)
b = normalize_to_array(b)
if getattr(a, "dtype", None) != "O":
return np.allclose(a, b, equal_nan=equal_nan, **kwargs)
if equal_nan:
return a.shape == b.shape and all(
np.isnan(b) if np.isnan(a) else a == b for (a, b) in zip(a.flat, b.flat)
)
return (a == b).all()
def same_keys(a, b):
def key(k):
if isinstance(k, str):
return (k, -1, -1, -1)
else:
return k
return sorted(a.dask, key=key) == sorted(b.dask, key=key)
def _not_empty(x):
return x.shape and 0 not in x.shape
def _check_dsk(dsk):
""" Check that graph is well named and non-overlapping """
if not isinstance(dsk, HighLevelGraph):
return
dsk.validate()
assert all(isinstance(k, (tuple, str)) for k in dsk.layers)
freqs = frequencies(concat(dsk.dicts.values()))
non_one = {k: v for k, v in freqs.items() if v != 1}
assert not non_one, non_one
def assert_eq_shape(a, b, check_nan=True):
for aa, bb in zip(a, b):
if math.isnan(aa) or math.isnan(bb):
if check_nan:
assert math.isnan(aa) == math.isnan(bb)
else:
assert aa == bb
def _get_dt_meta_computed(x, check_shape=True, check_graph=True):
x_original = x
x_meta = None
x_computed = None
if isinstance(x, Array):
assert x.dtype is not None
adt = x.dtype
if check_graph:
_check_dsk(x.dask)
x_meta = getattr(x, "_meta", None)
x = x.compute(scheduler="sync")
x_computed = x
if hasattr(x, "todense"):
x = x.todense()
if not hasattr(x, "dtype"):
x = np.array(x, dtype="O")
if _not_empty(x):
assert x.dtype == x_original.dtype
if check_shape:
assert_eq_shape(x_original.shape, x.shape, check_nan=False)
else:
if not hasattr(x, "dtype"):
x = np.array(x, dtype="O")
adt = getattr(x, "dtype", None)
return x, adt, x_meta, x_computed
def assert_eq(a, b, check_shape=True, check_graph=True, check_meta=True, **kwargs):
a_original = a
b_original = b
a, adt, a_meta, a_computed = _get_dt_meta_computed(
a, check_shape=check_shape, check_graph=check_graph
)
b, bdt, b_meta, b_computed = _get_dt_meta_computed(
b, check_shape=check_shape, check_graph=check_graph
)
if str(adt) != str(bdt):
# Ignore check for matching length of flexible dtypes, since Array._meta
# can't encode that information
if adt.type == bdt.type and not (adt.type == np.bytes_ or adt.type == np.str_):
diff = difflib.ndiff(str(adt).splitlines(), str(bdt).splitlines())
raise AssertionError(
"string repr are different" + os.linesep + os.linesep.join(diff)
)
try:
assert a.shape == b.shape
if check_meta:
if hasattr(a, "_meta") and hasattr(b, "_meta"):
assert_eq(a._meta, b._meta)
if hasattr(a_original, "_meta"):
assert a_original._meta.ndim == a.ndim
if a_meta is not None:
assert type(a_original._meta) == type(a_meta)
if not (np.isscalar(a_meta) or np.isscalar(a_computed)):
assert type(a_meta) == type(a_computed)
if hasattr(b_original, "_meta"):
assert b_original._meta.ndim == b.ndim
if b_meta is not None:
assert type(b_original._meta) == type(b_meta)
if not (np.isscalar(b_meta) or np.isscalar(b_computed)):
assert type(b_meta) == type(b_computed)
assert allclose(a, b, **kwargs)
return True
except TypeError:
pass
c = a == b
if isinstance(c, np.ndarray):
assert c.all()
else:
assert c
return True
def safe_wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS):
"""Like functools.wraps, but safe to use even if wrapped is not a function.
Only needed on Python 2.
"""
if all(hasattr(wrapped, attr) for attr in assigned):
return functools.wraps(wrapped, assigned=assigned)
else:
return lambda x: x
def empty_like_safe(a, shape, **kwargs):
"""
Return np.empty_like(a, shape=shape, **kwargs) if the shape argument
is supported (requires NumPy >= 1.17), otherwise falls back to
using the old behavior, returning np.empty(shape, **kwargs).
"""
try:
return np.empty_like(a, shape=shape, **kwargs)
except TypeError:
return np.empty(shape, **kwargs)
def full_like_safe(a, fill_value, shape, **kwargs):
"""
Return np.full_like(a, fill_value, shape=shape, **kwargs) if the
shape argument is supported (requires NumPy >= 1.17), otherwise
falls back to using the old behavior, returning
np.full(shape, fill_value, **kwargs).
"""
try:
return np.full_like(a, fill_value, shape=shape, **kwargs)
except TypeError:
return np.full(shape, fill_value, **kwargs)
def ones_like_safe(a, shape, **kwargs):
"""
Return np.ones_like(a, shape=shape, **kwargs) if the shape argument
is supported (requires NumPy >= 1.17), otherwise falls back to
using the old behavior, returning np.ones(shape, **kwargs).
"""
try:
return np.ones_like(a, shape=shape, **kwargs)
except TypeError:
return np.ones(shape, **kwargs)
def zeros_like_safe(a, shape, **kwargs):
"""
Return np.zeros_like(a, shape=shape, **kwargs) if the shape argument
is supported (requires NumPy >= 1.17), otherwise falls back to
using the old behavior, returning np.zeros(shape, **kwargs).
"""
try:
return np.zeros_like(a, shape=shape, **kwargs)
except TypeError:
return np.zeros(shape, **kwargs)
def validate_axis(axis, ndim):
""" Validate an input to axis= keywords """
if isinstance(axis, (tuple, list)):
return tuple(validate_axis(ax, ndim) for ax in axis)
if not isinstance(axis, numbers.Integral):
raise TypeError("Axis value must be an integer, got %s" % axis)
if axis < -ndim or axis >= ndim:
raise AxisError(
"Axis %d is out of bounds for array of dimension %d" % (axis, ndim)
)
if axis < 0:
axis += ndim
return axis
def svd_flip(u, v, u_based_decision=False):
"""Sign correction to ensure deterministic output from SVD.
This function is useful for orienting eigenvectors such that
they all lie in a shared but arbitrary half-space. This makes
it possible to ensure that results are equivalent across SVD
implementations and random number generator states.
Parameters
----------
u : (M, K) array_like
Left singular vectors (in columns)
v : (K, N) array_like
Right singular vectors (in rows)
u_based_decision: bool
Whether or not to choose signs based
on `u` rather than `v`, by default False
Returns
-------
u : (M, K) array_like
Left singular vectors with corrected sign
v: (K, N) array_like
Right singular vectors with corrected sign
"""
# Determine half-space in which all singular vectors
# lie relative to an arbitrary vector; summation
# equivalent to dot product with row vector of ones
if u_based_decision:
dtype = u.dtype
signs = np.sum(u, axis=0, keepdims=True)
else:
dtype = v.dtype
signs = np.sum(v, axis=1, keepdims=True).T
signs = dtype.type(2) * ((signs >= 0) - dtype.type(0.5))
# Force all singular vectors into same half-space
u, v = u * signs, v * signs.T
return u, v
def _is_nep18_active():
class A:
def __array_function__(self, *args, **kwargs):
return True
try:
return np.concatenate([A()])
except ValueError:
return False
IS_NEP18_ACTIVE = _is_nep18_active()
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.21
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1beta1APIService(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1beta1APIServiceSpec',
'status': 'V1beta1APIServiceStatus'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec',
'status': 'status'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, status=None, local_vars_configuration=None): # noqa: E501
"""V1beta1APIService - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1beta1APIService. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1beta1APIService. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1beta1APIService.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1beta1APIService. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1beta1APIService. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1beta1APIService. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1beta1APIService.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1beta1APIService. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1beta1APIService. # noqa: E501
:return: The metadata of this V1beta1APIService. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1beta1APIService.
:param metadata: The metadata of this V1beta1APIService. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1beta1APIService. # noqa: E501
:return: The spec of this V1beta1APIService. # noqa: E501
:rtype: V1beta1APIServiceSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1beta1APIService.
:param spec: The spec of this V1beta1APIService. # noqa: E501
:type: V1beta1APIServiceSpec
"""
self._spec = spec
@property
def status(self):
"""Gets the status of this V1beta1APIService. # noqa: E501
:return: The status of this V1beta1APIService. # noqa: E501
:rtype: V1beta1APIServiceStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1beta1APIService.
:param status: The status of this V1beta1APIService. # noqa: E501
:type: V1beta1APIServiceStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1beta1APIService):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1beta1APIService):
return True
return self.to_dict() != other.to_dict()
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import ComputeManagementClientConfiguration
from .operations import Operations
from .operations import AvailabilitySetsOperations
from .operations import ProximityPlacementGroupsOperations
from .operations import VirtualMachineExtensionImagesOperations
from .operations import VirtualMachineExtensionsOperations
from .operations import VirtualMachineImagesOperations
from .operations import UsageOperations
from .operations import VirtualMachinesOperations
from .operations import VirtualMachineSizesOperations
from .operations import ImagesOperations
from .operations import VirtualMachineScaleSetsOperations
from .operations import VirtualMachineScaleSetExtensionsOperations
from .operations import VirtualMachineScaleSetRollingUpgradesOperations
from .operations import VirtualMachineScaleSetVMsOperations
from .operations import LogAnalyticsOperations
from .operations import VirtualMachineRunCommandsOperations
from .operations import GalleriesOperations
from .operations import GalleryImagesOperations
from .operations import GalleryImageVersionsOperations
from .operations import DisksOperations
from .operations import SnapshotsOperations
from .. import models
class ComputeManagementClient(object):
"""Compute Client.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.compute.v2018_06_01.aio.operations.Operations
:ivar availability_sets: AvailabilitySetsOperations operations
:vartype availability_sets: azure.mgmt.compute.v2018_06_01.aio.operations.AvailabilitySetsOperations
:ivar proximity_placement_groups: ProximityPlacementGroupsOperations operations
:vartype proximity_placement_groups: azure.mgmt.compute.v2018_06_01.aio.operations.ProximityPlacementGroupsOperations
:ivar virtual_machine_extension_images: VirtualMachineExtensionImagesOperations operations
:vartype virtual_machine_extension_images: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineExtensionImagesOperations
:ivar virtual_machine_extensions: VirtualMachineExtensionsOperations operations
:vartype virtual_machine_extensions: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineExtensionsOperations
:ivar virtual_machine_images: VirtualMachineImagesOperations operations
:vartype virtual_machine_images: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineImagesOperations
:ivar usage: UsageOperations operations
:vartype usage: azure.mgmt.compute.v2018_06_01.aio.operations.UsageOperations
:ivar virtual_machines: VirtualMachinesOperations operations
:vartype virtual_machines: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachinesOperations
:ivar virtual_machine_sizes: VirtualMachineSizesOperations operations
:vartype virtual_machine_sizes: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineSizesOperations
:ivar images: ImagesOperations operations
:vartype images: azure.mgmt.compute.v2018_06_01.aio.operations.ImagesOperations
:ivar virtual_machine_scale_sets: VirtualMachineScaleSetsOperations operations
:vartype virtual_machine_scale_sets: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineScaleSetsOperations
:ivar virtual_machine_scale_set_extensions: VirtualMachineScaleSetExtensionsOperations operations
:vartype virtual_machine_scale_set_extensions: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineScaleSetExtensionsOperations
:ivar virtual_machine_scale_set_rolling_upgrades: VirtualMachineScaleSetRollingUpgradesOperations operations
:vartype virtual_machine_scale_set_rolling_upgrades: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineScaleSetRollingUpgradesOperations
:ivar virtual_machine_scale_set_vms: VirtualMachineScaleSetVMsOperations operations
:vartype virtual_machine_scale_set_vms: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineScaleSetVMsOperations
:ivar log_analytics: LogAnalyticsOperations operations
:vartype log_analytics: azure.mgmt.compute.v2018_06_01.aio.operations.LogAnalyticsOperations
:ivar virtual_machine_run_commands: VirtualMachineRunCommandsOperations operations
:vartype virtual_machine_run_commands: azure.mgmt.compute.v2018_06_01.aio.operations.VirtualMachineRunCommandsOperations
:ivar galleries: GalleriesOperations operations
:vartype galleries: azure.mgmt.compute.v2018_06_01.aio.operations.GalleriesOperations
:ivar gallery_images: GalleryImagesOperations operations
:vartype gallery_images: azure.mgmt.compute.v2018_06_01.aio.operations.GalleryImagesOperations
:ivar gallery_image_versions: GalleryImageVersionsOperations operations
:vartype gallery_image_versions: azure.mgmt.compute.v2018_06_01.aio.operations.GalleryImageVersionsOperations
:ivar disks: DisksOperations operations
:vartype disks: azure.mgmt.compute.v2018_06_01.aio.operations.DisksOperations
:ivar snapshots: SnapshotsOperations operations
:vartype snapshots: azure.mgmt.compute.v2018_06_01.aio.operations.SnapshotsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = ComputeManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.availability_sets = AvailabilitySetsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.proximity_placement_groups = ProximityPlacementGroupsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_extension_images = VirtualMachineExtensionImagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_extensions = VirtualMachineExtensionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_images = VirtualMachineImagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.usage = UsageOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machines = VirtualMachinesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_sizes = VirtualMachineSizesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.images = ImagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_scale_sets = VirtualMachineScaleSetsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_scale_set_extensions = VirtualMachineScaleSetExtensionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_scale_set_rolling_upgrades = VirtualMachineScaleSetRollingUpgradesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_scale_set_vms = VirtualMachineScaleSetVMsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.log_analytics = LogAnalyticsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.virtual_machine_run_commands = VirtualMachineRunCommandsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.galleries = GalleriesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.gallery_images = GalleryImagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.gallery_image_versions = GalleryImageVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.disks = DisksOperations(
self._client, self._config, self._serialize, self._deserialize)
self.snapshots = SnapshotsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "ComputeManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
|
# https://www.hackerrank.com/challenges/python-sort-sort/problem
import math
import os
import random
import re
import sys
if __name__ == "__main__":
N, M = map(int, input().split())
arr = [list(map(int, input().split())) for _ in range(N)]
K = int(input())
arr.sort(key=lambda x: x[K])
[print(*el) for el in arr]
|
import androidhelper
droid = androidhelper.Android()
import math
#definiranje klase tocka
class Tocka (object):
def __init__(self,rbr,y,x,z):
self.rbr = str(rbr)
self.y = float(y)
self.x = float(x)
self.z = float(z)
def smjerni(self,other):
dy = (other.y)-(self.y)
dx = (other.x)-(self.x)
if dx==0.:
if dy==0.:
s_kut=0.
return s_kut
elif other.y<self.y:
s_kut=math.pi/2.
return s_kut
elif self.y<other.y:
s_kut=math.pi+math.pi/2.
return s_kut
#1. kvadrant dy+ dx+
elif dy>=0. and dx>=0. :
s_kut=math.atan(dy/dx)
return s_kut
#2. kvadrant dy+ dx-
elif dy>=0. and dx<=0.:
s_kut=math.atan(dy/dx)+math.pi
return s_kut
#3. kvadrant dy- dx-
elif dy<=0. and dx<=0.:
s_kut=math.atan(dy/dx)+math.pi
return s_kut
#4. kvadrant dy- dx+
elif dy<=0. and dx>=0. :
s_kut=math.atan(dy/dx)+math.pi*2.
return s_kut
#definiranje funkcije duljine
def duljina(self,other):
dy=(other.y)-(self.y)
dx=(other.x)-(self.x)
duljina=math.sqrt(dy**2+dx**2)
return duljina
#definiranje zbroja
def __add__(self,other):
zbroj = Tocka("zbroj:"+str(self.rbr)+"_"+str(other.rbr), self.y+other.y, self.x+other.x, self.z+other.z)
return zbroj
#definiranje printanja tocke
def __str__(self):
return str(self.rbr)+" "+str(self.y)+" "+str(self.x)+" "+str(self.z)
#definiranje oduzimanja
def __sub__(self, other):
minus = Tocka("minus:"+str(self.rbr)+"_"+str(other.rbr), self.y-other.y, self.x-other.x, self.z-other.z)
return minus
A = droid.dialogGetInput("Stajaliste","ime,y,x,h","").result
A_lista = A.split(",")
A = Tocka(A_lista[0],A_lista[1],A_lista[2],A_lista[3])
B = droid.dialogGetInput("Orijentacija","ime,y,x,h","").result
B_lista = B.split(",")
B = Tocka(B_lista[0],B_lista[1],B_lista[2],B_lista[3])
C = droid.dialogGetInput("Detalj","ime,y,x,h","").result
C_lista = C.split(",")
C = Tocka(C_lista[0],C_lista[1],C_lista[2],C_lista[3])
alfa = A.smjerni(C)-A.smjerni(B)
#uvjeti pravca prema detalju
if alfa==0.:
alfa=0.
elif alfa<0.:
alfa+=math.pi*2
elif alfa>=2*math.pi:
alfa-=math.pi*2
d=round(A.duljina(C),3)
kut_DEG=alfa*180/math.pi
kut_D=int(kut_DEG)
temp=60*(kut_DEG-kut_D)
kut_M=int(temp)
kut_S=int(math.ceil(60*(temp-kut_M)))
kut=str(kut_D)+"-"+str(kut_M)+"-"+str(kut_S)
print "duljina ",d
print "kut ", kut
|
#! /usr/bin/python
#
# Copyright (c) 2012 Tresys Technology LLC, Columbia, Maryland, USA
#
# This software was developed by Tresys Technology LLC
# with U.S. Government sponsorship.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, re, os
def map_fixes():
FIXDIR = "scap-security-guide/RHEL6/input/fixes/"
PROFILES = "scap-security-guide/RHEL6/input/profiles/"
if len(sys.argv) != 2:
BASEDIR="/usr/local/"
else:
BASEDIR=sys.argv[1]
MANUAL = "/usr/libexec/aqueduct/SSG/tools/manual.xml"
FIXDIR = BASEDIR+FIXDIR
PROFILES = BASEDIR+PROFILES
COMMON_PROFILE = PROFILES+"common.xml"
FIX_FILE = FIXDIR+"bash-ks.xml"
exclusions=""
legit_scripts = os.listdir("/usr/libexec/aqueduct/SSG/scripts/")
if os.path.exists(MANUAL):
with open(MANUAL, "r") as manual_file:
exclusions = manual_file.read()
with open(FIX_FILE, "w") as fixes:
fixes.write('<fix-group id="bash" system="urn:xccdf:fix:script:bash" xmlns="http://checklists.nist.gov/xccdf/1.1">\n')
fixes.write('<!-- TODO: Add environment variables to each script. -->\n')
with open(COMMON_PROFILE, "r") as profile:
for line in profile:
inclusion = re.search("idref\=\"[^\"]*\"", line)
if inclusion and (str.find(exclusions, line) < 0):
inclusion = re.sub("(idref\=\"|\")", "", inclusion.group(0))
if (inclusion+".sh" in legit_scripts):
fixes.write("<fix rule=\"%s\"> {\"script\" : \"/usr/libexec/aqueduct/SSG/scripts/%s.sh\"} </fix>\n" %(inclusion, inclusion))
fixes.write('<!-- Manual content. -->\n')
exclusions = re.sub("(<.*Profile.*)|(<title.*)|(<description.*)", "", exclusions)
manual_content = re.sub("(<select idref=)\"([^\"]*)\".*", "<fix rule=\"\\2\"> {\"script\" : \"/usr/libexec/aqueduct/SSG/scripts/\\2.sh\"} </fix>", exclusions)
fixes.write(manual_content)
fixes.write("</fix-group>")
map_fixes()
|
import logging
import sys
# Log configurations
LOG_FORMAT = (
"%(asctime)s [%(levelname)s] %(name)s:%(lineno)d: %(message)s"
)
LOG_FORMATTER = logging.Formatter(
LOG_FORMAT,
datefmt="%Y-%m-%d %H:%M:%S"
)
LOG_FILE = "main.log"
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
file_handler = logging.FileHandler(LOG_FILE)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(LOG_FORMATTER)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.DEBUG)
console_handler.setFormatter(LOG_FORMATTER)
logger.addHandler(file_handler)
logger.addHandler(console_handler)
|
from person_start import Person
bob = Person('Bob Smith', 42)
sue = Person('Sue Jones', 45, 40000)
people = [bob, sue]
for person in people:
print(person.name, person.pay)
x = [(person.name, person.pay) for person in people]
print(x)
x = [rec.name for rec in people if rec.age >= 45]
print(x)
x = [(rec.age ** 2 if rec.age >= 45 else rec.age) for rec in people]
print(x)
from person import Person
from manager import Manager
bob = Person(name = 'Bob Smith', age = 42, pay = 10000)
sue = Person(name = 'Sue Jones', age = 45, pay = 20000)
tom = Manager(name = 'Tom Doe', age = 55, pay = 30000)
db = [bob, sue, tom]
for obj in db:
obj.giveRaise(.10)
for obj in db:
print(obj.lastName(), '=>', obj.pay)
|
# Author : github.com/slingthy
# Time : March 4, 2020
# ---------------------------------------------
# The purpose is to extract someone's record
# from wechat group and store it in a docx file.
# !!!JUST 'TEXT' message.
# ---------------------------------------------
# Before RUN:
# 1. COPY the WeChat Group message
# 2. PASTE into Word document as 'xxxxxx.docx'
# 3. SAVE it, do NOT make any change
# ---------------------------------------------
# Dependencies : python-docx, lxml
from docx import Document
from time import strftime,localtime
import os
def pymain(pathroad,groupmember,savename):
when=strftime("%m.%d", localtime())
D = Document(pathroad)
D2= Document()
para=D.paragraphs
#
for i in range(len(para)):
if para[i].text.find(groupmember)==-1:
raise ValueError
if savename=='':
raise ValueError
for i in range(len(para)):
if para[i].text.find(":")>-1 and para[i].text.find(groupmember)==-1:
para[i].clear()
try:
j=1
while para[i+j].text.find(groupmember+':')==-1:
para[i+j].clear()
j+=1
except IndexError:
continue
elif para[i].text.find(groupmember)>-1:
para[i].clear()
null_list=[p.text for p in para]
true_list = [i for i in null_list if i != '']
data='\n'.join(true_list)
D2.add_paragraph(when,style='Heading 1')
D2.add_paragraph(data,style='Normal')
D2.save(os.path.join(os.path.dirname(pathroad),savename+'.docx'))
|
# exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6A', ['C8pro'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 10000.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 140000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6A_0', 0.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6A_obs', C6A())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C6pro(C3A=None), C6pro_0)
|
# Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
import copy
import unittest
from typing import Dict, List
from databuilder.models.table_metadata import ColumnMetadata, TableMetadata
from databuilder.models.type_metadata import ArrayTypeMetadata, TypeMetadata
from databuilder.serializers import (
mysql_serializer, neo4_serializer, neptune_serializer,
)
from tests.unit.models.test_fixtures.table_metadata_fixtures import (
EXPECTED_NEPTUNE_NODES, EXPECTED_RECORDS_MYSQL, EXPECTED_RELATIONSHIPS_NEPTUNE,
)
class TestTableMetadata(unittest.TestCase):
def setUp(self) -> None:
super(TestTableMetadata, self).setUp()
TableMetadata.serialized_nodes_keys = set()
TableMetadata.serialized_rels_keys = set()
column_with_type_metadata = ColumnMetadata('has_nested_type', 'column with nested types',
'array<array<array<string>>>', 6)
column_with_type_metadata.set_column_key('hive://gold.test_schema1/test_table1/has_nested_type')
column_with_type_metadata.set_type_metadata(self._set_up_type_metadata(column_with_type_metadata))
self.table_metadata = TableMetadata(
'hive',
'gold',
'test_schema1',
'test_table1',
'test_table1',
[
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0),
ColumnMetadata('test_id2', 'description of test_id2', 'bigint', 1),
ColumnMetadata('is_active', None, 'boolean', 2),
ColumnMetadata('source', 'description of source', 'varchar', 3),
ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4),
ColumnMetadata('ds', None, 'varchar', 5),
column_with_type_metadata
]
)
self.table_metadata2 = TableMetadata(
'hive',
'gold',
'test_schema1',
'test_table1',
'test_table1',
[
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0),
ColumnMetadata('test_id2', 'description of test_id2', 'bigint', 1),
ColumnMetadata('is_active', None, 'boolean', 2),
ColumnMetadata('source', 'description of source', 'varchar', 3),
ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4),
ColumnMetadata('ds', None, 'varchar', 5),
column_with_type_metadata
]
)
def _set_up_type_metadata(self, parent_column: ColumnMetadata) -> TypeMetadata:
array_type_metadata = ArrayTypeMetadata(
name='has_nested_type',
parent=parent_column,
type_str='array<array<array<string>>>'
)
nested_array_type_metadata_level1 = ArrayTypeMetadata(
name='_inner_',
parent=array_type_metadata,
type_str='array<array<string>>'
)
nested_array_type_metadata_level2 = ArrayTypeMetadata(
name='_inner_',
parent=nested_array_type_metadata_level1,
type_str='array<string>'
)
array_type_metadata.array_inner_type = nested_array_type_metadata_level1
nested_array_type_metadata_level1.array_inner_type = nested_array_type_metadata_level2
return array_type_metadata
def test_serialize(self) -> None:
self.expected_nodes_deduped = [
{'name': 'test_table1', 'KEY': 'hive://gold.test_schema1/test_table1', 'LABEL': 'Table',
'is_view:UNQUOTED': False},
{'description': 'test_table1', 'KEY': 'hive://gold.test_schema1/test_table1/_description',
'LABEL': 'Description', 'description_source': 'description'},
{'sort_order:UNQUOTED': 0, 'col_type': 'bigint', 'name': 'test_id1',
'KEY': 'hive://gold.test_schema1/test_table1/test_id1', 'LABEL': 'Column'},
{'description': 'description of test_table1',
'KEY': 'hive://gold.test_schema1/test_table1/test_id1/_description', 'LABEL': 'Description',
'description_source': 'description'},
{'sort_order:UNQUOTED': 1, 'col_type': 'bigint', 'name': 'test_id2',
'KEY': 'hive://gold.test_schema1/test_table1/test_id2', 'LABEL': 'Column'},
{'description': 'description of test_id2',
'KEY': 'hive://gold.test_schema1/test_table1/test_id2/_description',
'LABEL': 'Description', 'description_source': 'description'},
{'sort_order:UNQUOTED': 2, 'col_type': 'boolean', 'name': 'is_active',
'KEY': 'hive://gold.test_schema1/test_table1/is_active', 'LABEL': 'Column'},
{'sort_order:UNQUOTED': 3, 'col_type': 'varchar', 'name': 'source',
'KEY': 'hive://gold.test_schema1/test_table1/source', 'LABEL': 'Column'},
{'description': 'description of source', 'KEY': 'hive://gold.test_schema1/test_table1/source/_description',
'LABEL': 'Description', 'description_source': 'description'},
{'sort_order:UNQUOTED': 4, 'col_type': 'timestamp', 'name': 'etl_created_at',
'KEY': 'hive://gold.test_schema1/test_table1/etl_created_at', 'LABEL': 'Column'},
{'description': 'description of etl_created_at',
'KEY': 'hive://gold.test_schema1/test_table1/etl_created_at/_description', 'LABEL': 'Description',
'description_source': 'description'},
{'sort_order:UNQUOTED': 5, 'col_type': 'varchar', 'name': 'ds',
'KEY': 'hive://gold.test_schema1/test_table1/ds', 'LABEL': 'Column'},
{'sort_order:UNQUOTED': 6, 'col_type': 'array<array<array<string>>>',
'name': 'has_nested_type', 'KEY': 'hive://gold.test_schema1/test_table1/has_nested_type',
'LABEL': 'Column'},
{'description': 'column with nested types',
'KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/_description', 'LABEL': 'Description',
'description_source': 'description'},
{'kind': 'array', 'name': 'has_nested_type', 'LABEL': 'Type_Metadata',
'data_type': 'array<array<array<string>>>',
'KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type'},
{'kind': 'array', 'name': '_inner_', 'LABEL': 'Type_Metadata', 'data_type': 'array<array<string>>',
'KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type/_inner_'},
{'kind': 'array', 'name': '_inner_', 'LABEL': 'Type_Metadata', 'data_type': 'array<string>',
'KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type/_inner_/_inner_'}
]
self.expected_nodes = copy.deepcopy(self.expected_nodes_deduped)
self.expected_nodes.append({'name': 'hive', 'KEY': 'database://hive', 'LABEL': 'Database'})
self.expected_nodes.append({'name': 'gold', 'KEY': 'hive://gold', 'LABEL': 'Cluster'})
self.expected_nodes.append({'name': 'test_schema1', 'KEY': 'hive://gold.test_schema1', 'LABEL': 'Schema'})
self.expected_rels_deduped = [
{'END_KEY': 'hive://gold.test_schema1/test_table1', 'START_LABEL': 'Schema', 'END_LABEL': 'Table',
'START_KEY': 'hive://gold.test_schema1', 'TYPE': 'TABLE', 'REVERSE_TYPE': 'TABLE_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/_description', 'START_LABEL': 'Table',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'DESCRIPTION',
'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/test_id1', 'START_LABEL': 'Table',
'END_LABEL': 'Column', 'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN',
'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/test_id1/_description', 'START_LABEL': 'Column',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1/test_id1',
'TYPE': 'DESCRIPTION', 'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/test_id2', 'START_LABEL': 'Table', 'END_LABEL': 'Column',
'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN', 'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/test_id2/_description', 'START_LABEL': 'Column',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1/test_id2',
'TYPE': 'DESCRIPTION', 'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/is_active', 'START_LABEL': 'Table', 'END_LABEL': 'Column',
'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN', 'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/source', 'START_LABEL': 'Table', 'END_LABEL': 'Column',
'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN', 'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/source/_description', 'START_LABEL': 'Column',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1/source',
'TYPE': 'DESCRIPTION',
'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/etl_created_at', 'START_LABEL': 'Table',
'END_LABEL': 'Column', 'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN',
'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/etl_created_at/_description', 'START_LABEL': 'Column',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1/etl_created_at',
'TYPE': 'DESCRIPTION', 'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/ds', 'START_LABEL': 'Table', 'END_LABEL': 'Column',
'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN', 'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type', 'START_LABEL': 'Table',
'END_LABEL': 'Column', 'START_KEY': 'hive://gold.test_schema1/test_table1', 'TYPE': 'COLUMN',
'REVERSE_TYPE': 'COLUMN_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/_description', 'START_LABEL': 'Column',
'END_LABEL': 'Description', 'START_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type',
'TYPE': 'DESCRIPTION', 'REVERSE_TYPE': 'DESCRIPTION_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type',
'START_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type', 'END_LABEL': 'Type_Metadata',
'START_LABEL': 'Column', 'TYPE': 'TYPE_METADATA', 'REVERSE_TYPE': 'TYPE_METADATA_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type/_inner_',
'START_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type',
'END_LABEL': 'Type_Metadata', 'START_LABEL': 'Type_Metadata', 'TYPE': 'SUBTYPE',
'REVERSE_TYPE': 'SUBTYPE_OF'},
{'END_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type/_inner_/_inner_',
'START_KEY': 'hive://gold.test_schema1/test_table1/has_nested_type/type/has_nested_type/_inner_',
'END_LABEL': 'Type_Metadata', 'START_LABEL': 'Type_Metadata', 'TYPE': 'SUBTYPE',
'REVERSE_TYPE': 'SUBTYPE_OF'}
]
self.expected_rels = copy.deepcopy(self.expected_rels_deduped)
self.expected_rels.append({'END_KEY': 'hive://gold', 'START_LABEL': 'Database', 'END_LABEL': 'Cluster',
'START_KEY': 'database://hive', 'TYPE': 'CLUSTER', 'REVERSE_TYPE': 'CLUSTER_OF'})
self.expected_rels.append({'END_KEY': 'hive://gold.test_schema1', 'START_LABEL': 'Cluster',
'END_LABEL': 'Schema', 'START_KEY': 'hive://gold',
'TYPE': 'SCHEMA', 'REVERSE_TYPE': 'SCHEMA_OF'})
node_row = self.table_metadata.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata.next_node()
for i in range(0, len(self.expected_nodes)):
self.assertEqual(actual[i], self.expected_nodes[i])
relation_row = self.table_metadata.next_relation()
actual = []
while relation_row:
relation_row_serialized = neo4_serializer.serialize_relationship(relation_row)
actual.append(relation_row_serialized)
relation_row = self.table_metadata.next_relation()
for i in range(0, len(self.expected_rels)):
self.assertEqual(actual[i], self.expected_rels[i])
# 2nd record should not show already serialized database, cluster, and schema
node_row = self.table_metadata2.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata2.next_node()
self.assertEqual(self.expected_nodes_deduped, actual)
relation_row = self.table_metadata2.next_relation()
actual = []
while relation_row:
relation_row_serialized = neo4_serializer.serialize_relationship(relation_row)
actual.append(relation_row_serialized)
relation_row = self.table_metadata2.next_relation()
self.assertEqual(self.expected_rels_deduped, actual)
def test_serialize_neptune(self) -> None:
node_row = self.table_metadata.next_node()
actual = []
while node_row:
node_row_serialized = neptune_serializer.convert_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata.next_node()
self.assertEqual(EXPECTED_NEPTUNE_NODES, actual)
relation_row = self.table_metadata.next_relation()
neptune_actual: List[List[Dict]] = []
while relation_row:
relation_row_serialized = neptune_serializer.convert_relationship(relation_row)
neptune_actual.append(relation_row_serialized)
relation_row = self.table_metadata.next_relation()
self.maxDiff = None
self.assertEqual(EXPECTED_RELATIONSHIPS_NEPTUNE, neptune_actual)
def test_serialize_mysql(self) -> None:
actual = []
record = self.table_metadata.next_record()
while record:
serialized_record = mysql_serializer.serialize_record(record)
actual.append(serialized_record)
record = self.table_metadata.next_record()
self.assertEqual(EXPECTED_RECORDS_MYSQL, actual)
def test_table_attributes(self) -> None:
self.table_metadata3 = TableMetadata('hive', 'gold', 'test_schema3', 'test_table3', 'test_table3', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0),
ColumnMetadata('test_id2', 'description of test_id2', 'bigint', 1),
ColumnMetadata('is_active', None, 'boolean', 2),
ColumnMetadata('source', 'description of source', 'varchar', 3),
ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4),
ColumnMetadata('ds', None, 'varchar', 5)], is_view=False, attr1='uri', attr2='attr2')
node_row = self.table_metadata3.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata3.next_node()
self.assertEqual(actual[0].get('attr1'), 'uri')
self.assertEqual(actual[0].get('attr2'), 'attr2')
# TODO NO test can run before serialiable... need to fix
def test_z_custom_sources(self) -> None:
self.custom_source = TableMetadata('hive', 'gold', 'test_schema3', 'test_table4', 'test_table4', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0),
ColumnMetadata('test_id2', 'description of test_id2', 'bigint', 1),
ColumnMetadata('is_active', None, 'boolean', 2),
ColumnMetadata('source', 'description of source', 'varchar', 3),
ColumnMetadata('etl_created_at', 'description of etl_created_at', 'timestamp', 4),
ColumnMetadata('ds', None, 'varchar', 5)], is_view=False, description_source="custom")
node_row = self.custom_source.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.custom_source.next_node()
expected = {'LABEL': 'Programmatic_Description',
'KEY': 'hive://gold.test_schema3/test_table4/_custom_description',
'description_source': 'custom', 'description': 'test_table4'}
self.assertEqual(actual[1], expected)
def test_tags_field(self) -> None:
self.table_metadata4 = TableMetadata('hive', 'gold', 'test_schema4', 'test_table4', 'test_table4', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0)],
is_view=False, tags=['tag1', 'tag2'], attr1='uri', attr2='attr2')
node_row = self.table_metadata4.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata4.next_node()
self.assertEqual(actual[0].get('attr1'), 'uri')
self.assertEqual(actual[0].get('attr2'), 'attr2')
self.assertEqual(actual[2].get('LABEL'), 'Tag')
self.assertEqual(actual[2].get('KEY'), 'tag1')
self.assertEqual(actual[3].get('KEY'), 'tag2')
relation_row = self.table_metadata4.next_relation()
actual = []
while relation_row:
relation_row_serialized = neo4_serializer.serialize_relationship(relation_row)
actual.append(relation_row_serialized)
relation_row = self.table_metadata4.next_relation()
# Table tag relationship
expected_tab_tag_rel1 = {'END_KEY': 'tag1', 'START_LABEL': 'Table', 'END_LABEL':
'Tag', 'START_KEY': 'hive://gold.test_schema4/test_table4',
'TYPE': 'TAGGED_BY', 'REVERSE_TYPE': 'TAG'}
expected_tab_tag_rel2 = {'END_KEY': 'tag2', 'START_LABEL': 'Table',
'END_LABEL': 'Tag', 'START_KEY': 'hive://gold.test_schema4/test_table4',
'TYPE': 'TAGGED_BY', 'REVERSE_TYPE': 'TAG'}
self.assertEqual(actual[2], expected_tab_tag_rel1)
self.assertEqual(actual[3], expected_tab_tag_rel2)
def test_col_badge_field(self) -> None:
self.table_metadata4 = TableMetadata('hive', 'gold', 'test_schema4', 'test_table4', 'test_table4', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0, ['col-badge1', 'col-badge2'])],
is_view=False, attr1='uri', attr2='attr2')
node_row = self.table_metadata4.next_node()
actual = []
while node_row:
serialized_node_row = neo4_serializer.serialize_node(node_row)
actual.append(serialized_node_row)
node_row = self.table_metadata4.next_node()
self.assertEqual(actual[4].get('KEY'), 'col-badge1')
self.assertEqual(actual[5].get('KEY'), 'col-badge2')
relation_row = self.table_metadata4.next_relation()
actual = []
while relation_row:
serialized_relation_row = neo4_serializer.serialize_relationship(relation_row)
actual.append(serialized_relation_row)
relation_row = self.table_metadata4.next_relation()
expected_col_badge_rel1 = {'END_KEY': 'col-badge1', 'START_LABEL': 'Column',
'END_LABEL': 'Badge',
'START_KEY': 'hive://gold.test_schema4/test_table4/test_id1',
'TYPE': 'HAS_BADGE', 'REVERSE_TYPE': 'BADGE_FOR'}
expected_col_badge_rel2 = {'END_KEY': 'col-badge2', 'START_LABEL': 'Column',
'END_LABEL': 'Badge',
'START_KEY': 'hive://gold.test_schema4/test_table4/test_id1',
'TYPE': 'HAS_BADGE', 'REVERSE_TYPE': 'BADGE_FOR'}
self.assertEqual(actual[4], expected_col_badge_rel1)
self.assertEqual(actual[5], expected_col_badge_rel2)
def test_tags_populated_from_str(self) -> None:
self.table_metadata5 = TableMetadata('hive', 'gold', 'test_schema5', 'test_table5', 'test_table5', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0)], tags="tag3, tag4")
# Test table tag field populated from str
node_row = self.table_metadata5.next_node()
actual = []
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
actual.append(node_row_serialized)
node_row = self.table_metadata5.next_node()
self.assertEqual(actual[2].get('LABEL'), 'Tag')
self.assertEqual(actual[2].get('KEY'), 'tag3')
self.assertEqual(actual[3].get('KEY'), 'tag4')
relation_row = self.table_metadata5.next_relation()
actual = []
while relation_row:
relation_row_serialized = neo4_serializer.serialize_relationship(relation_row)
actual.append(relation_row_serialized)
relation_row = self.table_metadata5.next_relation()
# Table tag relationship
expected_tab_tag_rel3 = {'END_KEY': 'tag3', 'START_LABEL': 'Table', 'END_LABEL':
'Tag', 'START_KEY': 'hive://gold.test_schema5/test_table5',
'TYPE': 'TAGGED_BY', 'REVERSE_TYPE': 'TAG'}
expected_tab_tag_rel4 = {'END_KEY': 'tag4', 'START_LABEL': 'Table',
'END_LABEL': 'Tag', 'START_KEY': 'hive://gold.test_schema5/test_table5',
'TYPE': 'TAGGED_BY', 'REVERSE_TYPE': 'TAG'}
self.assertEqual(actual[2], expected_tab_tag_rel3)
self.assertEqual(actual[3], expected_tab_tag_rel4)
def test_tags_arent_populated_from_empty_list_and_str(self) -> None:
self.table_metadata6 = TableMetadata('hive', 'gold', 'test_schema6', 'test_table6', 'test_table6', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0)], tags=[])
self.table_metadata7 = TableMetadata('hive', 'gold', 'test_schema7', 'test_table7', 'test_table7', [
ColumnMetadata('test_id1', 'description of test_table1', 'bigint', 0)], tags="")
# Test table tag fields are not populated from empty List
node_row = self.table_metadata6.next_node()
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
self.assertNotEqual(node_row_serialized.get('LABEL'), 'Tag')
node_row = self.table_metadata6.next_node()
# Test table tag fields are not populated from empty str
node_row = self.table_metadata7.next_node()
while node_row:
node_row_serialized = neo4_serializer.serialize_node(node_row)
self.assertNotEqual(node_row_serialized.get('LABEL'), 'Tag')
node_row = self.table_metadata7.next_node()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
ZMQ example using python3's asyncio
EducaCoind should be started with the command line arguments:
educacoind -testnet -daemon \
-zmqpubrawtx=tcp://127.0.0.1:28332 \
-zmqpubrawblock=tcp://127.0.0.1:28332 \
-zmqpubhashtx=tcp://127.0.0.1:28332 \
-zmqpubhashblock=tcp://127.0.0.1:28332
We use the asyncio library here. `self.handle()` installs itself as a
future at the end of the function. Since it never returns with the event
loop having an empty stack of futures, this creates an infinite loop. An
alternative is to wrap the contents of `handle` inside `while True`.
A blocking example using python 2.7 can be obtained from the git history:
https://github.com/bitcoin/bitcoin/blob/37a7fe9e440b83e2364d5498931253937abe9294/contrib/zmq/zmq_sub.py
"""
import binascii
import asyncio
import zmq
import zmq.asyncio
import signal
import struct
import sys
if not (sys.version_info.major >= 3 and sys.version_info.minor >= 5):
print("This example only works with Python 3.5 and greater")
sys.exit(1)
port = 28332
class ZMQHandler():
def __init__(self):
self.loop = zmq.asyncio.install()
self.zmqContext = zmq.asyncio.Context()
self.zmqSubSocket = self.zmqContext.socket(zmq.SUB)
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashtx")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock")
self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx")
self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)
async def handle(self) :
msg = await self.zmqSubSocket.recv_multipart()
topic = msg[0]
body = msg[1]
sequence = "Unknown"
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == b"hashblock":
print('- HASH BLOCK ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"hashtx":
print('- HASH TX ('+sequence+') -')
print(binascii.hexlify(body))
elif topic == b"rawblock":
print('- RAW BLOCK HEADER ('+sequence+') -')
print(binascii.hexlify(body[:80]))
elif topic == b"rawtx":
print('- RAW TX ('+sequence+') -')
print(binascii.hexlify(body))
# schedule ourselves to receive the next message
asyncio.ensure_future(self.handle())
def start(self):
self.loop.add_signal_handler(signal.SIGINT, self.stop)
self.loop.create_task(self.handle())
self.loop.run_forever()
def stop(self):
self.loop.stop()
self.zmqContext.destroy()
daemon = ZMQHandler()
daemon.start()
|
""" FizzBuzzフレームワークの利用例 """
from typing import Any, List
import framework
def FizzBuzzFilter() -> framework.OperatorChain:
""" 入力値に対してFizzBuzz変換掛けるフィルタ
中身は単純に ModReplaceFilter を組み合わせただけ。
"""
return framework.OperatorChain(
framework.ModReplaceFilter(15, "fizzbuzz"),
framework.ModReplaceFilter(3, "fizz"),
framework.ModReplaceFilter(5, "buzz"),
)
class FactorialFilter(framework.Operator):
""" 階乗的な計算をするフィルタ
前回の出力を記憶しておいて、新しい入力と掛けたものを次の出力とする。
"""
def __init__(self) -> None:
self.last_value: float = 1
def execute(self, value: float) -> List[float]:
""" 階乗的な計算をした結果を出力する
:param value: 入力値。少なくともかけ算が出来る値である必要がある。
:return: 階乗的な計算の結果。
"""
self.last_value = value * self.last_value
return [self.last_value]
if __name__ == "__main__":
framework.execute(
framework.RangeGenerator(1, 30),
# FactorialFilter(), # この行のコメントアウトを消すと階乗になる
FizzBuzzFilter(),
framework.ConsolePrinter(),
)
|
import wxf_test as t
t.wxf('./test.jpg')
|
# coding: utf-8
# View all groups
# Created by James Raphael Tiovalen (2021)
import slack
import ast
import settings
import config
from slackers.hooks import commands
conv_db = config.conv_handler
@commands.on("viewgroups")
def viewgroups(payload):
return
|
import math
class Solution(object):
def mySqrt(self, x):
"""
:type x: int
:rtype: int
"""
return int(math.sqrt(x))
|
from abc import ABC
from rest_framework import serializers
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
# User serializer
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email')
# Register serializer
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = User.objects.create_user(validated_data['username'], validated_data['email'], validated_data['password'])
return user
# login serializer
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
user = authenticate(**attrs)
if user and user.is_active:
return user
raise serializers.ValidationError('Incorrect Credentials')
|
# -*- coding: utf-8 -*-
"""Top-level package for UK Boards."""
__author__ = """Griffith Rees"""
__email__ = "griff.rees@gmail.com"
# fmt: off
__version__ = '0.5.4'
# fmt: on
|
from Jumpscale import j
class Package(j.baseclasses.threebot_package):
def _init(self, **kwargs):
if "branch" in kwargs.keys():
self.branch = kwargs["branch"]
else:
self.branch = "*"
def prepare(self):
"""
is called at install time
:return:
"""
pass
def start(self):
"""
called when the 3bot starts
:return:
"""
server = self.openresty
server.install(reset=False)
server.configure()
website = server.get_from_port(80)
locations = website.locations.get("community_location")
website_location = locations.locations_spa.new()
website_location.name = "community"
website_location.path_url = "/community"
website_location.use_jumpscale_weblibs = True
fullpath = j.sal.fs.joinPaths(self.package_root, "static/")
website_location.path_location = fullpath
proxy_location = locations.locations_proxy.new()
proxy_location.name = "connect"
proxy_location.path_url = "/connect"
proxy_location.ipaddr_dest = "0.0.0.0"
proxy_location.port_dest = 80
proxy_location.path_dest = "/chat/session/community_join"
proxy_location.scheme = "http"
proxy_location = locations.locations_proxy.new()
proxy_location.name = "register"
proxy_location.path_url = "/register"
proxy_location.ipaddr_dest = "0.0.0.0"
proxy_location.port_dest = 80
proxy_location.path_dest = "/chat/session/community_join"
proxy_location.scheme = "http"
proxy_location = locations.locations_proxy.new()
proxy_location.name = "join"
proxy_location.path_url = "/join"
proxy_location.ipaddr_dest = "0.0.0.0"
proxy_location.port_dest = 80
proxy_location.path_dest = "/chat/session/community_join"
proxy_location.scheme = "http"
locations.configure()
website.configure()
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGgsignif(RPackage):
"""Enrich your 'ggplots' with group-wise comparisons. This package provides
an easy way to indicate if two groups are significantly different. Commonly
this is shown by a bracket on top connecting the groups of interest which
itself is annotated with the level of significance (NS, *, **, ***). The
package provides a single layer (geom_signif()) that takes the groups for
comparison and the test (t.test(), wilcox.text() etc.) as arguments and
adds the annotation to the plot."""
homepage = "https://github.com/const-ae/ggsignif"
url = "https://cloud.r-project.org/src/contrib/ggsignif_0.6.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/ggsignif"
version('0.6.0', sha256='6fe13efda31386483e64d466ba2f5a53a2a235ae04f5c17bba3ccc63d283499e')
depends_on('r-ggplot2@2.0.0:', type=('build', 'run'))
|
'''
Stanley Bak
Engine controller specification checking
'''
import numpy as np
from numpy import deg2rad
from RunF16Sim import RunF16Sim
from PassFailAutomaton import AirspeedPFA, FlightLimits
from CtrlLimits import CtrlLimits
from LowLevelController import LowLevelController
from Autopilot import FixedSpeedAutopilot
from controlledF16 import controlledF16
from plot import plot2d
def main():
'main function'
ctrlLimits = CtrlLimits()
flightLimits = FlightLimits()
llc = LowLevelController(ctrlLimits)
setpoint = 2220
p_gain = 0.01
ap = FixedSpeedAutopilot(setpoint, p_gain, llc.xequil, llc.uequil, flightLimits, ctrlLimits)
# If the airspeed can get 2220 in 60s with 5% error
pass_fail = AirspeedPFA(60, setpoint, 5)
### Initial Conditions ###
power = 0 # Power
# Default alpha & beta
alpha = deg2rad(2.1215) # Trim Angle of Attack (rad)
beta = 0 # Side slip angle (rad)
alt = 20000 # Initial Attitude
Vt = 1000 # Initial Speed
phi = 0 #(pi/2)*0.5 # Roll angle from wings level (rad)
theta = 0 #(-pi/2)*0.8 # Pitch angle from nose level (rad)
psi = 0 #-pi/4 # Yaw angle from North (rad)
# Build Initial Condition Vectors
# state = [VT, alpha, beta, phi, theta, psi, P, Q, R, pn, pe, h, pow]
initialState = [Vt, alpha, beta, phi, theta, psi, 0, 0, 0, 0, 0, alt, power]
# Select Desired F-16 Plant
f16_plant = 'morelli' # 'stevens' or 'morelli'
tMax = 70 # simulation time
def der_func(t, y):
'derivative function'
der = controlledF16(t, y, f16_plant, ap, llc)[0]
rv = np.zeros((y.shape[0],))
rv[0] = der[0] # speed
rv[12] = der[12] # power lag term
return rv
passed, times, states, modes, ps_list, Nz_list, u_list = \
RunF16Sim(initialState, tMax, der_func, f16_plant, ap, llc, pass_fail, sim_step=0.1)
print("Simulation Conditions Passed: {}".format(passed))
# plot
filename = "engine_e.png" # engine_e.png
plot2d(filename, times, [(states, [(0, 'Vt'), (12, 'Pow')]), (u_list, [(0, 'Throttle')])])
if __name__ == '__main__':
main()
|
"""
Heidi: Helpers related to visuals.
"""
import logging
__all__ = ['ColorizingStreamHandler', 'ReadableSqlFilter']
# Copyright (C) 2010-2012 Vinay Sajip. All rights reserved. Licensed under the new BSD license.
# https://gist.github.com/758430
class ColorizingStreamHandler(logging.StreamHandler):
# color names to indices
color_map = {
'black': 0,
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'magenta': 5,
'cyan': 6,
'white': 7,
}
# levels to (background, foreground, bold/intense)
level_map = {
logging.DEBUG: (None, 'blue', False),
logging.INFO: (None, 'white', False),
logging.WARNING: (None, 'yellow', False),
logging.ERROR: (None, 'red', False),
logging.CRITICAL: ('red', 'white', True),
}
csi = '\x1b['
reset = '\x1b[0m'
@property
def is_tty(self):
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def emit(self, record):
try:
message = self.format(record)
stream = self.stream
if not self.is_tty:
stream.write(message)
else:
self.output_colorized(message)
stream.write(getattr(self, 'terminator', '\n'))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except: # noqa: E722
self.handleError(record)
def output_colorized(self, message):
self.stream.write(message)
def colorize(self, message, record):
if record.levelno in self.level_map:
bg, fg, bold = self.level_map[record.levelno]
params = []
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
if params:
message = ''.join((self.csi, ';'.join(params),
'm', message, self.reset))
return message
def format(self, record):
message = logging.StreamHandler.format(self, record)
if self.is_tty:
# Don't colorize any traceback
parts = message.split('\n', 1)
parts[0] = self.colorize(parts[0], record)
message = '\n'.join(parts)
return message
# LOGGING FILTERS
#################
class ReadableSqlFilter(logging.Filter):
"""
A filter for more readable sql by stripping out the SELECT ... columns.
Modeled after how debug toolbar displays SQL. This code should be optimized
for performance. For example, I don't check to make sure record.name is
'django.db.backends' because I assume you put this filter alongside it.
Sample Usage in Django's `settings.py`:
LOGGING = {
...
'filters': {
'readable_sql': {
'()': 'project_runpy.ReadableSqlFilter',
},
},
'loggers': {
'django.db.backends': {
'filters': ['readable_sql'],
...
},
...
},
}
"""
def filter(self, record):
# https://github.com/django/django/blob/febe136d4c3310ec8901abecca3ea5ba2be3952c/django/db/backends/utils.py#L106-L131
duration, sql, *__ = record.args
if not sql or 'SELECT' not in sql[:28]:
# WISHLIST what's the most performant way to see if 'SELECT' was
# used?
return super().filter(record)
begin = sql.index('SELECT')
try:
end = sql.index('FROM', begin + 6)
except ValueError: # not all SELECT statements also have a FROM
return super().filter(record)
sql = '{0}...{1}'.format(sql[:begin + 6], sql[end:])
# Drop "; args=%s" to shorten logging output
record.msg = '(%.3f) %s'
record.args = (duration, sql)
return super().filter(record)
|
import time
from abc import ABCMeta
from selenium.common.exceptions import TimeoutException, NoSuchElementException, StaleElementReferenceException, \
WebDriverException, InvalidElementStateException
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.expected_conditions import presence_of_element_located, element_to_be_clickable
from .actions import Action
from .. import retry_delay
from ...base_element import KOMElementBase
from ...general import Log
from ...mixins.action_chains import ActionChainsMixin
from ...mixins.javascript import JSElementMixin
from ...mixins.wait import WaitElementMixin
class KOMElement(KOMElementBase):
__metaclass__ = ABCMeta
def find(self, wait_time: int = 0, **kwargs):
return self.wait_for.presence_of_element_located(wait_time)
@property
def wait_for(self) -> WaitElementMixin:
return WaitElementMixin(self, self.locator)
@property
def js(self) -> JSElementMixin:
return JSElementMixin(self.ancestor, self.wait_for.presence_of_element_located(), self.name)
@property
def action_chains(self) -> ActionChainsMixin:
return ActionChainsMixin(self.ancestor, self.wait_for.presence_of_element_located(), self.name)
def exists(self, wait_time: int = 0) -> bool:
Log.info("Checking if '%s' element exists" % self.name)
try:
self.wait_for.presence_of_element_located(wait_time)
return True
except (NoSuchElementException, TimeoutException):
return False
def execute_action(self, action, element_condition: expected_conditions = presence_of_element_located, *args):
try:
obj = getattr(self.wait_for.condition(wait_time=0, condition=element_condition(self.locator)), action)
if isinstance(obj, str):
self._retry_count = 0
return obj
else:
if self.action_element:
self.js.inject_waiter()
if args:
value = obj(*args)
else:
value = obj()
if self.action_element:
self.js.wait_until_http_requests_are_finished()
return value
except (StaleElementReferenceException, WebDriverException, InvalidElementStateException) as e:
if self._retry_count <= 2:
self._retry_count += 1
Log.error('Error on performing \'%s\' action. Retrying...' % action)
Log.error(e.msg)
time.sleep(retry_delay)
if 'is not clickable at point' in e.msg:
self.js.scroll_into_view()
return self.execute_action(action, element_condition, *args)
else:
raise e
@property
def type(self):
return self.__class__.__name__
# Native WebElement methods
def click(self, element_condition: expected_conditions = element_to_be_clickable):
Log.info('Clicking on the "%s" "%s"' % (self.name, self.type))
self.execute_action(Action.CLICK, element_condition)
def get_attribute(self, name: str):
Log.info('Getting attribute value from "%s" "%s"' % (self.name, self.type))
return self.execute_action(Action.GET_ATTRIBUTE, presence_of_element_located, name)
def get_css_value(self, name: str):
Log.info('Getting attribute value from "%s" "%s" css property' % (self.name, self.type))
return self.execute_action(Action.VALUE_OF_CSS_PROPERTY, presence_of_element_located, name)
@property
def text(self) -> str:
Log.info('Getting text from "%s" "%s"' % (self.name, self.type))
text = self.execute_action(Action.TEXT)
Log.info('Text from "%s" "%s" is "%s"' % (self.name, self.type, text))
return text
def is_displayed(self) -> bool:
return self.execute_action(Action.IS_DISPLAYED)
def type_keys(self, key):
Log.info('Typing keys into "%s" "%s"' % (self.name, self.type))
self.execute_action(Action.SEND_KEYS, presence_of_element_located, key)
def is_enabled(self) -> bool:
return self.execute_action(Action.IS_ENABLED)
def get_css_value(self, name: str):
return self.execute_action(Action.VALUE_OF_CSS_PROPERTY, presence_of_element_located, name)
|
# _ __
# | |/ /___ ___ _ __ ___ _ _ ®
# | ' </ -_) -_) '_ \/ -_) '_|
# |_|\_\___\___| .__/\___|_|
# |_|
#
# Keeper Secrets Manager
# Copyright 2021 Keeper Security Inc.
# Contact: ops@keepersecurity.com
import json
import mimetypes
import os
from datetime import datetime
from pathlib import Path
import requests
from keeper_secrets_manager_core.crypto import CryptoUtils
from keeper_secrets_manager_core.exceptions import KeeperError
from keeper_secrets_manager_core import utils, helpers
class Record:
def __init__(self, record_dict, secret_key):
self.uid = ''
self.title = ''
self.type = ''
self.files = []
self.raw_json = None
self.dict = {}
self.password = None
self.revision = None
self.is_editable = None
self.uid = record_dict.get('recordUid')
if 'recordKey' in record_dict and record_dict.get('recordKey'):
# Folder Share
record_key_encrypted_str = record_dict.get('recordKey')
record_key_encrypted_bytes = utils.base64_to_bytes(record_key_encrypted_str) if \
record_key_encrypted_str else None
self.record_key_bytes = CryptoUtils.decrypt_aes(record_key_encrypted_bytes, secret_key)
else:
# Single Record Share
self.record_key_bytes = secret_key
record_encrypted_data = record_dict.get('data')
record_data_json = CryptoUtils.decrypt_record(record_encrypted_data, self.record_key_bytes)
self.raw_json = record_data_json
self.dict = utils.json_to_dict(self.raw_json)
self.title = self.dict.get('title')
self.type = self.dict.get('type')
self.revision = record_dict.get('revision')
self.is_editable = record_dict.get("isEditable")
# files
if record_dict.get('files'):
for f in record_dict.get('files'):
file = KeeperFile(f, self.record_key_bytes)
self.files.append(file)
# password (if `login` type)
if self.type == 'login':
fields = self.dict.get('fields')
password_field = next((item for item in fields if item["type"] == "password"), None)
# If the password field exists and there is a value in the array, then set the password.
if password_field is not None and len(password_field.get('value', [])) > 0:
self.password = password_field.get('value')[0]
def find_file_by_title(self, title):
"""Finds file by file title"""
found_file = next((f for f in self.files if f.title == title), None)
return found_file
def download_file_by_title(self, title, path):
found_file = self.find_file_by_title(title)
if not found_file:
raise KeeperError("File %s not found" % title)
found_file.save_file(path)
def __str__(self):
return '[Record: uid=%s, type: %s, title: %s, files count: %s]' % (self.uid, self.type, self.title,
str(len(self.files)))
def _update(self):
""" Take the values in the diction and update the attributes and raw JSON
"""
self.dict["title"] = self.title
self.dict["type"] = self.type
# Find the password in the field and update the password attribute
password_field = next((item for item in self.dict["fields"] if item["type"] == "password"), None)
self.password = password_field.get('value')[0]
self.raw_json = utils.dict_to_json(self.dict)
@staticmethod
def _value(values, single):
if single is True:
if values is None or len(values) == 0:
return None
return values[0]
return values
@staticmethod
def _field_search(fields, field_key):
""" This is a generic field search that returns the field
It will work for for both standard and custom fields. It
returns the field as a dictionary.
"""
# First check in the field_key matches any labels. Label matching is case sensitive.
found_item = None
for item in fields:
if item.get("label") is not None and item.get("label") == field_key:
found_item = item
break
# If the label was not found, check the field type. Field type is case insensitive.
if found_item is None:
for item in fields:
if item.get("type").lower() == field_key.lower():
found_item = item
break
return found_item
def get_standard_field(self, field_type):
return self._field_search(fields=self.dict.get('fields', []), field_key=field_type)
def get_standard_field_value(self, field_type, single=False):
field = self.get_standard_field(field_type)
if field is None:
raise ValueError("Cannot find standard field {} in record".format(field_type))
return Record._value(field.get("value", []), single)
def set_standard_field_value(self, field_type, value):
field = self.get_standard_field(field_type)
if field is None:
raise ValueError("Cannot find standard field {} in record".format(field_type))
if type(value) is not list:
value = [value]
field["value"] = value
self._update()
def get_custom_field(self, field_type):
return self._field_search(fields=self.dict.get('custom', []), field_key=field_type)
def get_custom_field_value(self, field_type, single=False):
field = self.get_custom_field(field_type)
if field is None:
raise ValueError("Cannot find custom field {} in record".format(field_type))
return Record._value(field.get("value", []), single)
def set_custom_field_value(self, field_type, value):
field = self.get_custom_field(field_type)
if field is None:
raise ValueError("Cannot find custom field {} in record".format(field_type))
if type(value) is not list:
value = [value]
field["value"] = value
self._update()
# TODO: Deprecate this for better getter and setters
def field(self, field_type, value=None, single=False):
""" Getter and setter for standard fields
A getter operation is performed when the 'value' parameter is not passed. For example, this would
return the value.
record.field("login")
A setter operation is performed when a 'value' parameter is passed. For example, this would set
the value in the field.
record.field("login", value="My New Value")
"""
field = self._field_search(fields=self.dict.get('fields', []), field_key=field_type)
if field is None:
raise ValueError("Cannot find the field for {}".format(field_type))
if value is None:
value = Record._value(field["value"], single)
else:
if type(value) is not list:
value = [value]
field["value"] = value
self._update()
return value
# TODO: Deprecate this for better getter and setters
def custom_field(self, label=None, value=None, field_type=None, single=False):
custom_field = None
if label is not None:
custom_field = self._field_search(fields=self.dict.get('custom', []), field_key=label)
if custom_field is None and field_type is not None:
custom_field = self._field_search(fields=self.dict.get('custom', []), field_key=field_type)
if custom_field is None:
raise ValueError("Cannot find the custom field label='{}', field type='{}'.".format(label, field_type))
if value is None:
value = Record._value(custom_field["value"], single)
else:
if type(value) is not list:
value = [value]
custom_field["value"] = value
self._update()
return value
def print(self):
print("===")
print("Title: {}".format(self.title))
print("UID: {}".format(self.uid))
print("Type: {}".format(self.type))
print("")
print("Fields")
print("------")
for item in self.dict.get('fields'):
if item["type"] in ["fileRef", "oneTimeCode"]:
continue
print("{} : {}".format(item["type"], ", ".join(item["value"])))
print("")
print("Custom Fields")
print("------")
for item in self.dict.get('custom', []):
print("{} ({}) : {}".format(item["label"], item["type"], ", ".join(item["value"])))
class Folder:
def __init__(self, folder, secret_key):
self.uid = ''
self.records = []
if not folder.get('folderUid'):
raise Exception("Not a folder")
folder_uid = folder.get('folderUid')
folder_key_enc = folder.get('folderKey')
folder_key = CryptoUtils.decrypt_aes(utils.base64_to_bytes(folder_key_enc), secret_key)
folder_records = folder.get('records')
self.uid = folder_uid
self.key = folder_key
for r in folder_records:
record = Record(r, folder_key)
self.records.append(record)
class KeeperFile:
def __init__(self, f, record_key_bytes):
self.file_key = ''
self.meta_dict = None
self.file_data = None
self.name = ''
self.title = ''
self.type = ''
self.last_modified = 0
self.size = 0
self.f = f
self.record_key_bytes = record_key_bytes
# Set file metadata
meta = self.__get_meta()
self.title = meta.get('title')
self.name = meta.get('name')
self.type = meta.get('type')
self.last_modified = meta.get('lastModified')
self.size = meta.get('size')
def __decrypt_file_key(self):
file_key_encrypted_base64 = self.f.get('fileKey')
file_key_encrypted = utils.base64_to_bytes(file_key_encrypted_base64)
file_key = CryptoUtils.decrypt_aes(file_key_encrypted, self.record_key_bytes)
return file_key
def __get_meta(self):
"""
Returns file metadata dictionary (file name, title, size, type, etc.)
"""
if not self.meta_dict:
file_key = self.__decrypt_file_key()
meta_json = CryptoUtils.decrypt_aes(utils.base64_to_bytes(self.f.get('data')), file_key)
self.meta_dict = utils.json_to_dict(meta_json)
return self.meta_dict
def get_file_data(self):
"""
Return decrypted raw file data
"""
if not self.file_data: # cached if nothing
file_key = self.__decrypt_file_key()
file_url = self.f.get('url')
rs = requests.get(file_url)
file_encrypted_data = rs.content
self.file_data = CryptoUtils.decrypt_aes(file_encrypted_data, file_key)
return self.file_data
def save_file(self, path, create_folders=False):
"""
Save decrypted file data to the provided path
"""
if create_folders:
os.makedirs(os.path.dirname(path), exist_ok=True)
file_data = self.get_file_data()
dir_path = os.path.dirname(os.path.abspath(path))
if not os.path.exists(dir_path):
raise KeeperError("No such file or directory %s\nConsider adding `create_folders=True` to `save_file()` "
"method " % path)
file = open(path, "wb")
file.write(file_data)
file.close()
return True
def __str__(self):
return "[KeeperFile - name: %s, title: %s]" % (self.name, self.title)
class KeeperFileUpload:
def __init__(self, name=None, title=None, mime_type=None, data=None):
self.Name = name
self.Title = title
self.Type = mime_type
self.Data = data
@staticmethod
def from_file(path, file_name=None, file_title=None, mime_type=None):
"""Helper method to get Keeper File Upload object from the file path"""
file_name = file_name if file_name else Path(path).name
file_title = file_title if file_title else file_name
if not mime_type:
mime_type = mimetypes.guess_type(path)[0]
if not mime_type:
# fall back to `application/octet-stream` if type was not determined
mime_type = 'application/octet-stream'
in_file = open(path, 'rb') # opening for [r]eading as [b]inary
file_bytes_data = in_file.read()
file_upload = KeeperFileUpload(name=file_name, title=file_title, mime_type=mime_type, data=file_bytes_data)
return file_upload
class KeeperFileData:
def __init__(self):
self.name = None
self.size = None
self.title = None
self.lastModified = None
self.type = None
class RecordField:
def __init__(self, field_type=None, value=None, label=None, required=None):
self.type = field_type
if isinstance(value, list):
self.value = value
else:
self.value = [value] if value else []
if label:
self.label = label
if required:
self.required = required
class RecordCreate:
def __init__(self, record_type, title):
self.record_type = record_type
self.title = title
self.notes = None
self.fields = None
self.custom = None
def to_dict(self):
rec_dict = {
'type': self.record_type,
'title': self.title,
'fields': self.fields,
}
if self.notes:
rec_dict['notes'] = self.notes
if self.custom:
rec_dict['custom'] = self.custom
return helpers.obj_to_dict(rec_dict)
def to_json(self):
json_object = json.dumps(self.to_dict(), indent=4)
return json_object
class AppData:
"""
Application info
"""
def __init__(self, title="", app_type=""):
self.title = title
self.app_type = app_type
class SecretsManagerResponse:
"""
Server response contained details about the application and the records
that were requested to be returned
"""
def __init__(self):
self.appData = None
# self.encryptedAppKey = None
# self.appOwnerPublicKey = None
self.folders = None
self.records = None
self.expiresOn = None
self.warnings = None
self.justBound = False
def expires_on_str(self, date_format='%Y-%m-%d %H:%M:%S'):
"""
Retrieve string formatted expiration date
"""
return datetime.fromtimestamp(self.expiresOn/1000).strftime(date_format)
class SecretsManagerAddFileResponse:
def __init__(self):
self.url = None
self.parameters = None
self.successStatusCode = None
|
#!/usr/bin/env python3
import re
import sys
import datetime
import subprocess
from widgets.widget import Widget
from widgets.config import colors, icons
class button(Widget):
'''
'''
def __init__(self, value=''):
'''
Params:
bg: background color
fg: foreground color
icon: icon
'''
Widget.__init__(self)
self.value = value
self.bg = None
self.fg = None
self.icon = icons['dropbox']
self.gaps = (10, 7)
self.show_text = False
def execute(self, cmd):
if cmd == 'show':
cmd = ['i3-msg', 'workspace 3; exec /usr/bin/terminator']
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output = process.communicate()[0]
|
import sys
from scrimmage.client import Client
from game.engine import Engine
from game.utils.generate_game import generate
import game.config
import argparse
import subprocess
import updater
if __name__ == '__main__':
plat = sys.platform
# Setup Primary Parser
par = argparse.ArgumentParser()
# Create Subparsers
spar = par.add_subparsers(title="Commands", dest="command")
# Generate Subparser
gen_subpar = spar.add_parser('generate', aliases=['g'], help='Generates a new random game map')
# Run Subparser and optionals
run_subpar = spar.add_parser('run', aliases=['r'],
help='Runs your bot against the last generated map! "r -h" shows more options')
run_subpar.add_argument('-debug', '-d', action='store', type=int, nargs='?', const=-1,
default=None, dest='debug', help='Allows for debugging when running your code')
run_subpar.add_argument('-quiet', '-q', action='store_true', default=False,
dest='q_bool', help='Runs your AI... quietly :)')
# Scrimmage Subparser
scrim_subpar = spar.add_parser('scrimmage', aliases=['s'], help='Boot client for scrimmage server')
# Visualizer Subparser
vis_subpar = spar.add_parser('visualizer', aliases=['v'], help='Visualizes last run game')
# Updating Subparser
upd_subpar = spar.add_parser('update', aliases=['u'], help='Updates your game to match the newest version if possible')
# Parse Command Line
par_args = par.parse_args()
# Main Action variable
action = par_args.command
# Generate game options
if action in ['generate', 'g']:
generate()
# Run game options
elif action in ['run', 'r']:
# Additional args
quiet = False
if par_args.debug is not None:
if par_args.debug >= 0:
game.config.Debug.level = par_args.debug
else:
print('Valid debug input not found, using default value')
if par_args.q_bool:
quiet = True
engine = Engine(quiet)
engine.loop()
# Boot up the scrimmage server client
elif action in ['scrimmage', 's']:
cl = Client()
elif action in ['visualizer', 'v']:
# Check operating system and run corresponding visualizer
if plat == "win32":
print("You're running Windows")
subprocess.call(["./visualizer.exe"])
elif plat == "linux":
print("You're a linux man I see.")
subprocess.call(["./visualizer.x86_64"])
elif plat == "darwin":
print("We don't currently have visualizer support for Mac, we apologize. Give us a poke and we'll see what can do.")
# Attempt to update the game
elif action in ['update', 'u']:
updater.update()
# Print help if no arguments are passed
if len(sys.argv) == 1:
print("\nLooks like you didn't tell the launcher what to do!"
+ "\nHere's the basic commands in case you've forgotten.\n")
par.print_help()
|
from stellar_model.response.account_data_response import *
from stellar_model.response.account_response import *
from stellar_model.response.accounts_response import *
from stellar_model.response.assets_response import *
from stellar_model.response.claimable_balance_response import *
from stellar_model.response.claimable_balances_response import *
from stellar_model.response.effects_response import *
from stellar_model.response.error_response import *
from stellar_model.response.fee_stats_response import *
from stellar_model.response.ledger_response import *
from stellar_model.response.ledgers_response import *
from stellar_model.response.offer_response import *
from stellar_model.response.offers_response import *
from stellar_model.response.operation_response import *
from stellar_model.response.operations_response import *
from stellar_model.response.paths_response import *
from stellar_model.response.payments_response import *
from stellar_model.response.trade_aggregations_response import *
from stellar_model.response.trades_response import *
from stellar_model.response.transaction_response import *
from stellar_model.response.transactions_response import *
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualWansOperations(object):
"""VirtualWansOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.VirtualWAN"
"""Retrieves the details of a VirtualWAN.
:param resource_group_name: The resource group name of the VirtualWan.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN being retrieved.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualWAN, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_07_01.models.VirtualWAN
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWAN"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'VirtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
wan_parameters, # type: "models.VirtualWAN"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualWAN"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWAN"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'VirtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(wan_parameters, 'VirtualWAN')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
wan_parameters, # type: "models.VirtualWAN"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Creates a VirtualWAN resource if it doesn't exist else updates the existing VirtualWAN.
:param resource_group_name: The resource group name of the VirtualWan.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN being created or updated.
:type virtual_wan_name: str
:param wan_parameters: Parameters supplied to create or update VirtualWAN.
:type wan_parameters: ~azure.mgmt.network.v2019_07_01.models.VirtualWAN
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualWAN or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.VirtualWAN]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWAN"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_wan_name=virtual_wan_name,
wan_parameters=wan_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
wan_parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "models.VirtualWAN"
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWAN"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'VirtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(wan_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
wan_parameters, # type: "models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Updates a VirtualWAN tags.
:param resource_group_name: The resource group name of the VirtualWan.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN being updated.
:type virtual_wan_name: str
:param wan_parameters: Parameters supplied to Update VirtualWAN tags.
:type wan_parameters: ~azure.mgmt.network.v2019_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualWAN or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_07_01.models.VirtualWAN]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWAN"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_wan_name=virtual_wan_name,
wan_parameters=wan_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualWAN', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'VirtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_wan_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller
"""Deletes a VirtualWAN.
:param resource_group_name: The resource group name of the VirtualWan.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN being deleted.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_wan_name=virtual_wan_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{VirtualWANName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ListVirtualWANsResult"]
"""Lists all the VirtualWANs in a resource group.
:param resource_group_name: The resource group name of the VirtualWan.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualWANsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.ListVirtualWANsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ListVirtualWANsResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualWANsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ListVirtualWANsResult"]
"""Lists all the VirtualWANs in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualWANsResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.ListVirtualWANsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ListVirtualWANsResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualWANsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualWans'} # type: ignore
|
def test_del():
a: i32
b: i32
a = 4
b = 20
del a, b
|
import itertools
import os
import shutil
import tempfile
from contextlib import contextmanager
from dagster import check
from dagster.core.storage.file_manager import LocalFileHandle
def _unlink_swallow_errors(path):
check.str_param(path, "path")
try:
os.unlink(path)
except Exception:
pass
@contextmanager
def get_temp_file_handle_with_data(data):
with get_temp_file_name_with_data(data) as temp_file:
yield LocalFileHandle(temp_file)
@contextmanager
def get_temp_file_name_with_data(data):
with get_temp_file_name() as temp_file:
with open(temp_file, "wb") as ff:
ff.write(data)
yield temp_file
@contextmanager
def get_temp_file_handle():
with get_temp_file_name() as temp_file:
yield LocalFileHandle(temp_file)
@contextmanager
def get_temp_file_name():
handle, temp_file_name = tempfile.mkstemp()
os.close(handle) # just need the name - avoid leaking the file descriptor
try:
yield temp_file_name
finally:
_unlink_swallow_errors(temp_file_name)
@contextmanager
def get_temp_file_names(number):
check.int_param(number, "number")
temp_file_names = list()
for _ in itertools.repeat(None, number):
handle, temp_file_name = tempfile.mkstemp()
os.close(handle) # # just need the name - avoid leaking the file descriptor
temp_file_names.append(temp_file_name)
try:
yield tuple(temp_file_names)
finally:
for temp_file_name in temp_file_names:
_unlink_swallow_errors(temp_file_name)
@contextmanager
def get_temp_dir(in_directory=None):
temp_dir = None
try:
temp_dir = tempfile.mkdtemp(dir=in_directory)
yield temp_dir
finally:
if temp_dir:
shutil.rmtree(temp_dir)
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import papermill as pm
import pytest
import scrapbook as sb
# Parameters
KERNEL_NAME = "python3"
OUTPUT_NOTEBOOK = "output.ipynb"
@pytest.mark.notebooks
@pytest.mark.linuxgpu
def test_01_notebook_run(similarity_notebooks):
notebook_path = similarity_notebooks["01"]
pm.execute_notebook(
notebook_path,
OUTPUT_NOTEBOOK,
parameters=dict(PM_VERSION=pm.__version__),
kernel_name=KERNEL_NAME,
)
nb_output = sb.read_notebook(OUTPUT_NOTEBOOK)
assert nb_output.scraps["median_rank"].data <= 15
@pytest.mark.notebooks
@pytest.mark.linuxgpu
def test_11_notebook_run(similarity_notebooks, tiny_ic_data_path):
notebook_path = similarity_notebooks["11"]
pm.execute_notebook(
notebook_path,
OUTPUT_NOTEBOOK,
parameters=dict(
PM_VERSION=pm.__version__,
# Speed up testing since otherwise would take ~12 minutes on V100
DATA_PATHS=[tiny_ic_data_path],
REPS=1,
IM_SIZES=[60, 100],
),
kernel_name=KERNEL_NAME,
)
nb_output = sb.read_notebook(OUTPUT_NOTEBOOK)
assert min(nb_output.scraps["ranks"].data) <= 30
@pytest.mark.notebooks
@pytest.mark.linuxgpu
def test_12_notebook_run(similarity_notebooks):
notebook_path = similarity_notebooks["12"]
pm.execute_notebook(
notebook_path,
OUTPUT_NOTEBOOK,
parameters=dict(PM_VERSION=pm.__version__),
kernel_name=KERNEL_NAME,
)
nb_output = sb.read_notebook(OUTPUT_NOTEBOOK)
assert nb_output.scraps["median_rank"].data <= 5
assert nb_output.scraps["feature_dimension"].data == 512
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import sys
import numbers
import contextlib
from unittest.mock import patch
import numpy as np
import nevergrad.common.typing as tp
from nevergrad.parametrization import parameter as p
from nevergrad.common import testing
from nevergrad.optimization import test_base
from nevergrad.functions import ArtificialFunction
from nevergrad.functions import ExperimentFunction
from nevergrad.functions.base import MultiExperiment
from nevergrad.functions.test_functionlib import DESCRIPTION_KEYS as ARTIFICIAL_KEYS
from . import xpbase
DESCRIPTION_KEYS = {"seed", "elapsed_time", "elapsed_budget", "loss", "optimizer_name", "pseudotime",
"num_workers", "budget", "error", "batch_mode"} | ARTIFICIAL_KEYS
def test_run_artificial_function() -> None:
func = ArtificialFunction(name="sphere", block_dimension=2)
xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=24, num_workers=2, batch_mode=True, seed=12)
summary = xp.run()
assert summary["elapsed_time"] < .5 # should be much faster
np.testing.assert_almost_equal(summary["loss"], 0.00078544) # makes sure seeding works!
testing.assert_set_equal(summary.keys(), DESCRIPTION_KEYS)
np.testing.assert_equal(summary["elapsed_budget"], 24)
np.testing.assert_equal(summary["pseudotime"], 12) # defaults to 1 unit per eval ( /2 because 2 workers)
def test_run_packed_artificial_function() -> None:
func = MultiExperiment([ArtificialFunction(name="sphere", block_dimension=2) for _ in range(2)],
[100, 100])
xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=24, num_workers=2, batch_mode=True, seed=14)
summary = xp.run()
np.testing.assert_almost_equal(summary["loss"], -9961.7, decimal=1) # makes sure seeding works!
def test_noisy_artificial_function_loss() -> None:
func = ArtificialFunction(name="sphere", block_dimension=5, noise_level=.3)
seed = np.random.randint(99999)
xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=5, seed=seed)
xp.run()
loss_ref = xp.result["loss"]
# now with copy
reco = xp.recommendation
assert reco is not None
np.random.seed(seed)
pfunc = func.copy()
np.testing.assert_equal(pfunc.evaluation_function(*reco.args, **reco.kwargs), loss_ref)
np.random.seed(None)
def test_run_with_error() -> None:
func = ArtificialFunction(name="sphere", block_dimension=2)
xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=300, num_workers=1)
with patch("nevergrad.optimization.base.Optimizer.minimize") as run:
run.side_effect = ValueError("test error string")
with contextlib.redirect_stderr(sys.stdout):
summary = xp.run()
testing.assert_set_equal(summary.keys(), DESCRIPTION_KEYS)
np.testing.assert_equal(summary["error"], "ValueError")
assert xp._optimizer is not None
np.testing.assert_equal(xp._optimizer.num_tell, 0) # make sure optimizer is kept in case we need to restart (eg.: KeyboardInterrupt)
assert not np.isnan(summary["loss"]), "Loss should be recorded with the current recommendation"
@testing.parametrized(
concurrent=("OnePlusOne", 10, False), # no true case implemented for now
)
def test_is_incoherent(optimizer: str, num_workers: int, expected: bool) -> None:
func = ArtificialFunction(name="sphere", block_dimension=2)
xp = xpbase.Experiment(func, optimizer=optimizer, budget=300, num_workers=num_workers)
np.testing.assert_equal(xp.is_incoherent, expected)
@testing.parametrized(
none=(None, 12, [None, None, None, None]),
seed_no_rand=(12, 0, [363, 803, 222, 277]),
seed_with_rand=(12, 12, [363, 803, 222, 277]),
different_seed=(24, 0, [914, 555, 376, 855]),
)
def test_seed_generator(seed: tp.Optional[int], randsize: int, expected: tp.List[tp.Optional[int]]) -> None:
output = []
generator = xpbase.create_seed_generator(seed)
for _ in range(4):
if randsize: # call the standard random generator
np.random.normal(0, 1, size=randsize)
value = next(generator)
output.append(value if value is None else value % 1000)
np.testing.assert_array_equal(output, expected)
class Function(ExperimentFunction):
def __init__(self, dimension: int):
super().__init__(self.oracle_call, p.Array(shape=(dimension,)))
self.register_initialization(dimension=dimension)
def oracle_call(self, x: np.ndarray) -> float:
return float(x[0])
# pylint: disable=unused-argument
def compute_pseudotime(self, input_parameter: tp.Any, loss: tp.Loss) -> float:
assert isinstance(loss, numbers.Number)
return 5 - loss
@testing.parametrized(
w3_batch=(True, ['s0', 's1', 's2', 'u0', 'u1', 'u2', 's3', 's4', 'u3', 'u4']),
w3_steady=(False, ['s0', 's1', 's2', 'u2', 's3', 'u1', 's4', 'u0', 'u3', 'u4']), # u0 and u1 are delayed
)
def test_batch_mode_parameter(batch_mode: bool, expected: tp.List[str]) -> None:
func = Function(dimension=1)
optim = test_base.LoggingOptimizer(3)
with patch.object(xpbase.OptimizerSettings, "instantiate", return_value=optim):
xp = xpbase.Experiment(func, optimizer="OnePlusOne", budget=10, num_workers=3, batch_mode=batch_mode)
xp._run_with_error()
testing.printed_assert_equal(optim.logs, expected)
def test_equality() -> None:
func = ArtificialFunction(name="sphere", block_dimension=2)
xp1 = xpbase.Experiment(func, optimizer="OnePlusOne", budget=300, num_workers=2)
xp2 = xpbase.Experiment(func, optimizer="RandomSearch", budget=300, num_workers=2)
assert xp1 != xp2
def test_multiobjective_experiment() -> None:
mofunc = MultiExperiment([ArtificialFunction("sphere", block_dimension=7),
ArtificialFunction("cigar", block_dimension=7)],
upper_bounds=np.array((50., 50.)))
xp = xpbase.Experiment(mofunc, optimizer="TwoPointsDE", budget=100, num_workers=1)
summary = xp.run()
loss: float = summary["loss"]
assert loss < 1e9
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Model exaplaination https://www.coursera.org/lecture/advanced-computer-vision-with-tensorflow/fcn-architecture-details-uwVDj
@author: anelmusic
"""
import tensorflow as tf
import config
vgg_weights_path = config.VGG_WEIGHTS_PATH
def block(x, n_convs, filters, kernel_size, activation, pool_size, pool_stride, block_name):
for i in range(n_convs):
x = tf.keras.layers.Conv2D(filters=filters, kernel_size=kernel_size, activation=activation, padding='same', name="{}_conv{}".format(block_name, i + 1))(x)
x = tf.keras.layers.MaxPooling2D(pool_size=pool_size, strides=pool_stride, name="{}_pool{}".format(block_name, i+1 ))(x)
return x
def VGG_16(image_input):
x = block(image_input,n_convs=2, filters=64, kernel_size=(3,3), activation='relu',pool_size=(2,2), pool_stride=(2,2), block_name='block1')
p1= x
x = block(x,n_convs=2, filters=128, kernel_size=(3,3), activation='relu',pool_size=(2,2), pool_stride=(2,2), block_name='block2')
p2 = x
x = block(x,n_convs=3, filters=256, kernel_size=(3,3), activation='relu',pool_size=(2,2), pool_stride=(2,2), block_name='block3')
p3 = x
x = block(x,n_convs=3, filters=512, kernel_size=(3,3), activation='relu',pool_size=(2,2), pool_stride=(2,2), block_name='block4')
p4 = x
x = block(x,n_convs=3, filters=512, kernel_size=(3,3), activation='relu',pool_size=(2,2), pool_stride=(2,2), block_name='block5')
p5 = x
vgg = tf.keras.Model(image_input , p5)
vgg.load_weights(vgg_weights_path)
# number of filters for the output convolutional layers
n = 4096
c6 = tf.keras.layers.Conv2D( n , ( 7 , 7 ) , activation='relu' , padding='same', name="conv6")(p5)
c7 = tf.keras.layers.Conv2D( n , ( 1 , 1 ) , activation='relu' , padding='same', name="conv7")(c6)
return (p1, p2, p3, p4, c7)
def fcn8_decoder(convs, n_classes):
f1, f2, f3, f4, f5 = convs
o = tf.keras.layers.Conv2DTranspose(n_classes , kernel_size=(4,4) , strides=(2,2) , use_bias=False )(f5)
o = tf.keras.layers.Cropping2D(cropping=(1,1))(o)
o2 = f4
o2 = ( tf.keras.layers.Conv2D(n_classes , ( 1 , 1 ) , activation='relu' , padding='same'))(o2)
o = tf.keras.layers.Add()([o, o2])
o = (tf.keras.layers.Conv2DTranspose( n_classes , kernel_size=(4,4) , strides=(2,2) , use_bias=False ))(o)
o = tf.keras.layers.Cropping2D(cropping=(1, 1))(o)
o2 = f3
o2 = ( tf.keras.layers.Conv2D(n_classes , ( 1 , 1 ) , activation='relu' , padding='same'))(o2)
o = tf.keras.layers.Add()([o, o2])
o = tf.keras.layers.Conv2DTranspose(n_classes , kernel_size=(8,8) , strides=(8,8) , use_bias=False )(o)
o = (tf.keras.layers.Activation('softmax'))(o)
return o
def segmentation_model():
inputs = tf.keras.layers.Input(shape=(config.NUM_PIXELS_SQRT,config.NUM_PIXELS_SQRT,3,))
convs = VGG_16(image_input=inputs)
outputs = fcn8_decoder(convs, config.OUTPUT_CHANNELS)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
return model
|
import pandas as pd
import psycopg2 as pg2
import yaml
import io
import ohio.ext.pandas
from sqlalchemy import create_engine
def open_db_connection(secrets_file="secrets.yaml", verbose=True):
"""
Opens connection to psql db
:return:
connection object
"""
try:
with open(secrets_file, 'r') as f:
# loads contents of secrets.yaml into a python dictionary
secret_config = yaml.safe_load(f.read())
db_params = secret_config['db']
except FileNotFoundError:
print("Cannot establish connection to database. Please provide db_params in secrets.yaml file.")
exit(1)
conn = pg2.connect(
host=db_params['host'],
port=db_params['port'],
dbname=db_params['dbname'],
user=db_params['user'],
password=db_params['password']
)
if verbose:
print(f"Connection opened to database {db_params['dbname']}")
return conn
connection = open_db_connection()
def write_df_in_table(conn, df, schema_name, table_name):
"""write pandas dataframe in table
Args:
conn: a pSQL databse connection object
df: a pandas dataframe to write to the database
schema_name: name of the schema for the table
table_name: name of the table
"""
# write df to memory buffer
SEP = "~"
buffer = io.StringIO()
df.to_csv(buffer, index_label='id', header=False, sep=SEP)
buffer.seek(0)
type_mapping = {'int64': 'integer', 'float64': 'double precision', 'object': 'varchar'}
cur = conn.cursor()
cur.execute(f"DROP TABLE IF EXISTS {schema_name}.{table_name};")
cur.execute(f"CREATE TABLE {schema_name}.{table_name} (id integer PRIMARY KEY);")
# cur.execute(f"GRANT ALL PRIVILEGES ON {schema_name}.{table_name} TO bills1;")
cur.execute(f"ALTER TABLE {schema_name}.{table_name} OWNER TO bills1;")
# create table column
for col_name, col_type in zip(df.columns, df.dtypes):
print(col_name)
col_type = type_mapping[str(col_type)]
cur.execute(f"ALTER table {schema_name}.{table_name} ADD COLUMN {col_name} {col_type};")
# hard-coded for now, may be made dynamic later
# TODO: need to figure out how to change NULL values to date as well
#if col_name == "introduced_date":
# cur.execute(f"""ALTER table {schema_name}.{table_name} ALTER COLUMN {col_name}
# TYPE date using to_date({col_name}, 'YYYY-MM-DD');""")
# copy data from buffer to table
cur.copy_from(buffer, f'{schema_name}.{table_name}', sep=SEP)
conn.commit()
cur.close()
# If you need to recreate the SQL tables for whatever reason
object = pd.read_pickle(r'/data/groups/bills1/mlpolicylab_fall20_bills1/bid_groups.pkl')
white_df = pd.DataFrame(object['white'], columns=['bill_id'])
write_df_in_table(conn=connection, df=white_df, schema_name="sketch", table_name="reference_bills_w")
"""
black_df = pd.DataFrame(object['black'], columns=['bill_id'])
asian_df = pd.DataFrame(object['asian'], columns=['bill_id'])
write_df_in_table(conn=connection, df= black_df, schema_name="sketch", table_name="protected_bills_b")
write_df_in_table(conn=connection, df= asian_df, schema_name="sketch", table_name="protected_bills_a")
"""
|
import argparse
import torch
import pyro
import pyro.distributions as dist
from pyro.infer.mcmc.api import MCMC
from pyro.infer.mcmc import NUTS
"""
This simple example is intended to demonstrate how to use an LKJ prior with
a multivariate distribution.
It generates entirely random, uncorrelated data, and then attempts to fit a correlation matrix
and vector of variances.
"""
def model(y):
d = y.shape[1]
N = y.shape[0]
options = dict(dtype=y.dtype, device=y.device)
# Vector of variances for each of the d variables
theta = pyro.sample("theta", dist.HalfCauchy(torch.ones(d, **options)))
# Lower cholesky factor of a correlation matrix
eta = torch.ones(1, **options) # Implies a uniform distribution over correlation matrices
L_omega = pyro.sample("L_omega", dist.LKJCorrCholesky(d, eta))
# Lower cholesky factor of the covariance matrix
L_Omega = torch.mm(torch.diag(theta.sqrt()), L_omega)
# For inference with SVI, one might prefer to use torch.bmm(theta.sqrt().diag_embed(), L_omega)
# Vector of expectations
mu = torch.zeros(d, **options)
with pyro.plate("observations", N):
obs = pyro.sample("obs", dist.MultivariateNormal(mu, scale_tril=L_Omega), obs=y)
return obs
def main(args):
y = torch.randn(args.n, args.num_variables).to(dtype=torch.double)
if args.cuda:
y = y.cuda()
nuts_kernel = NUTS(model, jit_compile=False, step_size=1e-5)
MCMC(nuts_kernel, num_samples=args.num_samples,
warmup_steps=args.warmup_steps, num_chains=args.num_chains).run(y)
if __name__ == "__main__":
assert pyro.__version__.startswith('1.0.0')
parser = argparse.ArgumentParser(description="Demonstrate the use of an LKJ Prior")
parser.add_argument("--num-samples", nargs="?", default=200, type=int)
parser.add_argument("--n", nargs="?", default=500, type=int)
parser.add_argument("--num-chains", nargs='?', default=4, type=int)
parser.add_argument("--num-variables", nargs='?', default=5, type=int)
parser.add_argument("--warmup-steps", nargs='?', default=100, type=int)
parser.add_argument("--rng_seed", nargs='?', default=0, type=int)
parser.add_argument("--cuda", action="store_true", default=False)
args = parser.parse_args()
pyro.set_rng_seed(args.rng_seed)
# Enable validation checks
pyro.enable_validation(__debug__)
# work around with the error "RuntimeError: received 0 items of ancdata"
# see https://discuss.pytorch.org/t/received-0-items-of-ancdata-pytorch-0-4-0/19823
torch.multiprocessing.set_sharing_strategy("file_system")
main(args)
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""An example functional test
The module-level docstring should include a high-level description of
what the test is doing. It's the first thing people see when they open
the file and should give the reader information about *what* the test
is testing and *how* it's being tested
"""
# Imports should be in PEP8 ordering (std library first, then third party
# libraries then local imports).
from collections import defaultdict
# Avoid wildcard * imports if possible
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.messages import (CInv, msg_block, msg_getdata)
from test_framework.mininode import (
P2PInterface,
mininode_lock,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
wait_until,
)
# P2PInterface is a class containing callbacks to be executed when a P2P
# message is received from the node-under-test. Subclass P2PInterface and
# override the on_*() methods if you need custom behaviour.
class BaseNode(P2PInterface):
def __init__(self):
"""Initialize the P2PInterface
Used to initialize custom properties for the Node that aren't
included by default in the base class. Be aware that the P2PInterface
base class already stores a counter for each P2P message type and the
last received message of each type, which should be sufficient for the
needs of most tests.
Call super().__init__() first for standard initialization and then
initialize custom properties."""
super().__init__()
# Stores a dictionary of all blocks received
self.block_receive_map = defaultdict(int)
def on_block(self, message):
"""Override the standard on_block callback
Store the hash of a received block in the dictionary."""
message.block.calc_sha256()
self.block_receive_map[message.block.sha256] += 1
def on_inv(self, message):
"""Override the standard on_inv callback"""
pass
def custom_function():
"""Do some custom behaviour
If this function is more generally useful for other tests, consider
moving it to a module in test_framework."""
# self.log.info("running custom_function") # Oops! Can't run self.log outside the BitcoinTestFramework
pass
class ExampleTest(BitcoinTestFramework):
# Each functional test is a subclass of the BitcoinTestFramework class.
# Override the set_test_params(), add_options(), setup_chain(), setup_network()
# and setup_nodes() methods to customize the test setup as required.
def set_test_params(self):
"""Override test parameters for your individual test.
This method must be overridden and num_nodes must be exlicitly set."""
self.setup_clean_chain = True
self.num_nodes = 3
# Use self.extra_args to change command-line arguments for the nodes
self.extra_args = [[], ["-logips"], []]
# self.log.info("I've finished set_test_params") # Oops! Can't run self.log before run_test()
# Use add_options() to add specific command-line options for your test.
# In practice this is not used very much, since the tests are mostly written
# to be run in automated environments without command-line options.
# def add_options()
# pass
# Use setup_chain() to customize the node data directories. In practice
# this is not used very much since the default behaviour is almost always
# fine
# def setup_chain():
# pass
def setup_network(self):
"""Setup the test network topology
Often you won't need to override this, since the standard network topology
(linear: node0 <-> node1 <-> node2 <-> ...) is fine for most tests.
If you do override this method, remember to start the nodes, assign
them to self.nodes, connect them and then sync."""
self.setup_nodes()
# In this test, we're not connecting node2 to node0 or node1. Calls to
# sync_all() should not include node2, since we're not expecting it to
# sync.
connect_nodes(self.nodes[0], self.nodes[1])
self.sync_all([self.nodes[0:1]])
# Use setup_nodes() to customize the node start behaviour (for example if
# you don't want to start all nodes at the start of the test).
# def setup_nodes():
# pass
def custom_method(self):
"""Do some custom behaviour for this test
Define it in a method here because you're going to use it repeatedly.
If you think it's useful in general, consider moving it to the base
BitcoinTestFramework class so other tests can use it."""
self.log.info("Running custom_method")
def run_test(self):
"""Main test logic"""
# Create P2P connections will wait for a verack to make sure the connection is fully up
self.nodes[0].add_p2p_connection(BaseNode())
# Generating a block on one of the nodes will get us out of IBD
blocks = [int(self.nodes[0].generate(nblocks=1)[0], 16)]
self.sync_all([self.nodes[0:1]])
# Notice above how we called an RPC by calling a method with the same
# name on the node object. Notice also how we used a keyword argument
# to specify a named RPC argument. Neither of those are defined on the
# node object. Instead there's some __getattr__() magic going on under
# the covers to dispatch unrecognised attribute calls to the RPC
# interface.
# Logs are nice. Do plenty of them. They can be used in place of comments for
# breaking the test into sub-sections.
self.log.info("Starting test!")
self.log.info("Calling a custom function")
custom_function()
self.log.info("Calling a custom method")
self.custom_method()
self.log.info("Create some blocks")
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(
self.nodes[0].getbestblockhash())['time'] + 1
height = 1
for i in range(10):
# Use the mininode and blocktools functionality to manually build a block
# Calling the generate() rpc is easier, but this allows us to exactly
# control the blocks and transactions.
block = create_block(
self.tip, create_coinbase(height), self.block_time)
block.solve()
block_message = msg_block(block)
# Send message is used to send a P2P message to the node over our P2PInterface
self.nodes[0].p2p.send_message(block_message)
self.tip = block.sha256
blocks.append(self.tip)
self.block_time += 1
height += 1
self.log.info(
"Wait for node1 to reach current tip (height 11) using RPC")
self.nodes[1].waitforblockheight(11)
self.log.info("Connect node2 and node1")
connect_nodes(self.nodes[1], self.nodes[2])
self.log.info("Add P2P connection to node2")
self.nodes[0].disconnect_p2ps()
self.nodes[2].add_p2p_connection(BaseNode())
self.log.info(
"Wait for node2 reach current tip. Test that it has propagated all the blocks to us")
getdata_request = msg_getdata()
for block in blocks:
getdata_request.inv.append(CInv(2, block))
self.nodes[2].p2p.send_message(getdata_request)
# wait_until() will loop until a predicate condition is met. Use it to test properties of the
# P2PInterface objects.
wait_until(lambda: sorted(blocks) == sorted(
list(self.nodes[2].p2p.block_receive_map.keys())), timeout=5, lock=mininode_lock)
self.log.info("Check that each block was received only once")
# The network thread uses a global lock on data access to the P2PConnection objects when sending and receiving
# messages. The test thread should acquire the global lock before accessing any P2PConnection data to avoid locking
# and synchronization issues. Note wait_until() acquires this global lock when testing the predicate.
with mininode_lock:
for block in self.nodes[2].p2p.block_receive_map.values():
assert_equal(block, 1)
if __name__ == '__main__':
ExampleTest().main()
|
# -*- coding: utf-8 -*-
# cython: language_level=3
# Copyright (c) 2020 Nekokatt
# Copyright (c) 2021-present davfsa
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Package metadata."""
from __future__ import annotations
import typing
# DO NOT CHANGE THE TYPE HINTS FOR THESE FIELDS. THESE ARE AUTOMATICALLY UPDATED
# FROM THE CI SCRIPT AND DOING THIS MAY LEAD TO THE DEPLOY PROCESS FAILING.
__author__: typing.Final[str] = "Nekokatt"
__maintainer__: typing.Final[str] = "davfsa"
__ci__: typing.Final[str] = "https://github.com/hikari-py/hikari/actions"
__copyright__: typing.Final[str] = "© 2021-present davfsa"
__coverage__: typing.Final[str] = "https://codeclimate.com/github/hikari-py/hikari"
__discord_invite__: typing.Final[str] = "https://discord.gg/Jx4cNGG"
__docs__: typing.Final[str] = "https://hikari-py.dev/hikari"
__email__: typing.Final[str] = "davfsa@gmail.com"
__issue_tracker__: typing.Final[str] = "https://github.com/hikari-py/hikari/issues"
__license__: typing.Final[str] = "MIT"
__url__: typing.Final[str] = "https://github.com/hikari-py/hikari"
__version__: typing.Final[str] = "2.0.0.dev106"
__git_sha1__: typing.Final[str] = "HEAD"
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: language_agent_v2/JVMMetric.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from python2sky.proto.common import common_pb2 as common_dot_common__pb2
from python2sky.proto.common import JVM_pb2 as common_dot_JVM__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='language_agent_v2/JVMMetric.proto',
package='',
syntax='proto3',
serialized_options=b'\n3org.apache.skywalking.apm.network.language.agent.v2P\001\252\002\032SkyWalking.NetworkProtocol',
serialized_pb=b'\n!language_agent_v2/JVMMetric.proto\x1a\x13\x63ommon/common.proto\x1a\x10\x63ommon/JVM.proto\"M\n\x13JVMMetricCollection\x12\x1b\n\x07metrics\x18\x01 \x03(\x0b\x32\n.JVMMetric\x12\x19\n\x11serviceInstanceId\x18\x02 \x01(\x05\x32\x46\n\x16JVMMetricReportService\x12,\n\x07\x63ollect\x12\x14.JVMMetricCollection\x1a\t.Commands\"\x00\x42T\n3org.apache.skywalking.apm.network.language.agent.v2P\x01\xaa\x02\x1aSkyWalking.NetworkProtocolb\x06proto3'
,
dependencies=[common_dot_common__pb2.DESCRIPTOR,common_dot_JVM__pb2.DESCRIPTOR,])
_JVMMETRICCOLLECTION = _descriptor.Descriptor(
name='JVMMetricCollection',
full_name='JVMMetricCollection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='metrics', full_name='JVMMetricCollection.metrics', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='serviceInstanceId', full_name='JVMMetricCollection.serviceInstanceId', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=76,
serialized_end=153,
)
_JVMMETRICCOLLECTION.fields_by_name['metrics'].message_type = common_dot_JVM__pb2._JVMMETRIC
DESCRIPTOR.message_types_by_name['JVMMetricCollection'] = _JVMMETRICCOLLECTION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
JVMMetricCollection = _reflection.GeneratedProtocolMessageType('JVMMetricCollection', (_message.Message,), {
'DESCRIPTOR' : _JVMMETRICCOLLECTION,
'__module__' : 'language_agent_v2.JVMMetric_pb2'
# @@protoc_insertion_point(class_scope:JVMMetricCollection)
})
_sym_db.RegisterMessage(JVMMetricCollection)
DESCRIPTOR._options = None
_JVMMETRICREPORTSERVICE = _descriptor.ServiceDescriptor(
name='JVMMetricReportService',
full_name='JVMMetricReportService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=155,
serialized_end=225,
methods=[
_descriptor.MethodDescriptor(
name='collect',
full_name='JVMMetricReportService.collect',
index=0,
containing_service=None,
input_type=_JVMMETRICCOLLECTION,
output_type=common_dot_common__pb2._COMMANDS,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_JVMMETRICREPORTSERVICE)
DESCRIPTOR.services_by_name['JVMMetricReportService'] = _JVMMETRICREPORTSERVICE
# @@protoc_insertion_point(module_scope)
|
from django.urls import path
from traffic import views
from django.conf.urls import url
urlpatterns = [
# path('', views.index, name='index'),
url(r'^traffic_all_long', views.traffic_all_long, name='traffic_all_long'),
url(r'^traffic_all_short', views.traffic_all_short, name='traffic_all_short'),
url(r'^traffic_add_long', views.traffic_add_long, name='traffic_add_long'),
url(r'^traffic_add_short', views.traffic_add_short, name='traffic_add_short'),
url(r'^traffic_edit_long', views.traffic_edit_long, name='traffic_edit_long'),
url(r'^traffic_edit_short', views.traffic_edit_short, name='traffic_edit_short'),
]
|
from insights.parsers import init_process_cgroup
from insights.tests import context_wrap
CGROUP_HOST = """
11:hugetlb:/
10:memory:/
9:devices:/
8:pids:/
7:perf_event:/
6:net_prio,net_cls:/
5:blkio:/
4:freezer:/
3:cpuacct,cpu:/
2:cpuset:/
1:name=systemd:/
""".strip()
CGROUP_CONTAINER = """
11:hugetlb:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
10:memory:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
9:devices:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
8:pids:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
7:perf_event:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
6:net_prio,net_cls:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
5:blkio:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
4:freezer:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
3:cpuacct,cpu:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
2:cpuset:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
1:name=systemd:/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope
""".strip()
def test_init_process_cgroup():
result = init_process_cgroup.InitProcessCgroup(context_wrap(CGROUP_HOST))
assert result.data["memory"] == ["10", "/"]
assert result.is_container is False
result = init_process_cgroup.InitProcessCgroup(context_wrap(CGROUP_CONTAINER))
assert result.data["memory"] == ["10", "/system.slice/docker-55b2b88feeb4fc56bb9384e55100a8581271ca7a22399c6ec52784a35dba933b.scope"]
assert result.is_container is True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2021. Huawei Technologies Co., Ltd. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""import files when initiating"""
from npu_device.npu_device import open
from npu_device.npu_device import npu_compat_function
from npu_device.npu_device import gen_npu_ops
from npu_device.npu_device import global_options
from npu_device.npu_device import set_npu_loop_size
from npu_device.utils.scope import keep_dtype_scope
from npu_device._api import distribute
from npu_device._api import train
from npu_device._api import ops
|
from direct.showbase import PythonUtil
from pandac.PandaModules import VBase4
GameActions = PythonUtil.Enum(('EnterDoor',
'RevealDoor',
'OpenDoor',
'Countdown',
'TimeAlert'))
SecondsUntilTimeout = 4.0 * 60.0
SecondsUntilGameEnds = 60.0
SecondsForTimeAlert = 60.0
MaxPlayers = 4
IntroDurationSeconds = 24.0
FinishDurationSeconds = 5.0
PlayerCollisionName = 'CogdoMazePlayer_Collision'
LocalPlayerCollisionName = 'CogdoMazeLocalPlayer_Collision'
PlayerCollisionRadius = 1.0
HitCooldownTime = 2.0
HintTimeout = 6.0
NumQuadrants = (3, 3)
FrameWallThickness = 1
QuadrantUnitGap = 3
TotalBarriers = 12
NumBarriers = 3
MazeBarriers = ([(7, 34),
(8, 34),
(9, 34),
(10, 34)],
[(24, 34),
(25, 34),
(26, 34),
(27, 34)],
[(41, 34),
(42, 34),
(43, 34),
(44, 34)],
[(7, 17),
(8, 17),
(9, 17),
(10, 17)],
[(24, 17),
(25, 17),
(26, 17),
(27, 17)],
[(41, 17),
(42, 17),
(43, 17),
(44, 17)],
[(17, 41),
(17, 42),
(17, 43),
(17, 44)],
[(17, 24),
(17, 25),
(17, 26),
(17, 27)],
[(17, 7),
(17, 8),
(17, 9),
(17, 10)],
[(34, 41),
(34, 42),
(34, 43),
(34, 44)],
[(34, 24),
(34, 25),
(34, 26),
(34, 27)],
[(34, 7),
(34, 8),
(34, 9),
(34, 10)])
ToonRunSpeed = 11.2
CameraAngle = 60
CameraRemoteToonRadius = 6
CameraMinDistance = 40
CameraMaxDistance = 61
CamCutoffFactor = 1.34
ToonAnimationInfo = {'hit': ('slip-backward', 2.25, 12)}
NumPickups = 256
PickupsUntilDoorOpens = int(NumPickups * 0.6)
SuitCollisionName = 'CogdoMazeSuit_Collision'
SuitWalkSameDirectionProb = 1
SuitWalkTurnAroundProb = 100
SuitTypes = PythonUtil.Enum(('Boss', 'FastMinion', 'SlowMinion'))
SuitData = {}
SuitData[SuitTypes.Boss] = {'dnaName': 'ms',
'cellWalkPeriod': 192,
'toonDamage': 3.0,
'scale': 2.5,
'hp': 2,
'memos': 0}
SuitData[SuitTypes.FastMinion] = {'dnaName': 'nd',
'cellWalkPeriod': 64,
'toonDamage': 1.0,
'scale': 1.3,
'hp': 1,
'memos': 3}
SuitData[SuitTypes.SlowMinion] = {'dnaName': 'cc',
'cellWalkPeriod': 160,
'toonDamage': 2.0,
'scale': 1.33,
'hp': 1,
'memos': 2}
NumSuits = (4, 5, 5)
BossSpinTime = 1.0
BossSpinCount = 2
BlinkFrequency = 1.0
BlinkSpeed = 0.5
BlinkColor = VBase4(1.0, 0.4, 0.4, 1.0)
SuitsModifier = (0, 6, 9)
DamageModifier = 9.0
DropShakeEnabled = True
BossShakeEnabled = True
DropShakeStrength = 4.0
DropMaxDistance = 20.0
BossShakeStrength = 1.2
BossMaxDistance = 25.0
BossShakeTime = 0.53
BossStompSfxCutoff = 70.0
BossCogStompAnimationPlayrateFactor = 0.75
CameraShakeFalloff = 2.2
CameraShakeMax = 5.0
QuakeSfxFalloff = 0.01
QuakeSfxMax = 2.0
QuakeSfxEnabled = True
DropFrequency = 3
DropDamage = 0
DropTime = 1.0
ShadowTime = 2.0
DropHeight = 70
DropFadeTime = 1.0
DropCollisionRadius = 1.0
DropCollisionName = 'DropCollision'
DroppedCollisionRadius = 2.0
DropChance = 0.25
GagChance = 0.5
GagSitTime = 15.0
BalloonDelay = 1.2
ThrowDistance = 18
ThrowDuration = 0.5
ThrowStartFrame = 61
ThrowEndFrame = 64
ThrowPlayRate = 1.5
GagPickupScale = 2.0
GagPickupCollisionRadius = 1.0
GagPickupCollisionName = 'PickUpCollision'
GagColors = ((1.0,
0.27,
0.27,
1.0),
(1.0,
0.66,
0.15,
1.0),
(0.31,
1.0,
0.29,
1.0),
(0.31,
0.62,
1.0,
1.0),
(0.91,
0.32,
1.0,
1.0))
GagCollisionName = 'Gag_Collision'
WaterCoolerTriggerRadius = 2.5
WaterCoolerTriggerOffset = (0, -1.5, 0)
WaterCoolerCollisionName = 'WaterCooler_Collision'
WaterCoolerShowEventName = 'CogdoMazeWaterCooler_Show'
WaterCoolerHideEventName = 'CogdoMazeWaterCooler_Hide'
AudioCutoff = 75.0
MusicFiles = {'normal': 'phase_9/audio/bgm/CHQ_FACT_bg.mid',
'timeRunningOut': 'phase_7/audio/bgm/encntr_suit_winning_indoor.mid'}
SfxFiles = {'toonHitByDrop': 'phase_5/audio/sfx/tt_s_ara_cmg_toonHit.mp3',
'toonHit': 'phase_4/audio/sfx/MG_cannon_hit_dirt.mp3',
'getMemo': 'phase_4/audio/sfx/MG_maze_pickup.mp3',
'drop': 'phase_5/audio/sfx/tt_s_ara_cmg_itemHitsFloor.mp3',
'throw': 'phase_3.5/audio/sfx/AA_pie_throw_only.mp3',
'splat': 'phase_5/audio/sfx/SA_watercooler_spray_only.mp3',
'cogSpin': 'phase_3.5/audio/sfx/Cog_Death.mp3',
'cogDeath': 'phase_3.5/audio/sfx/ENC_cogfall_apart.mp3',
'bossCogAngry': 'phase_5/audio/sfx/tt_s_ara_cmg_bossCogAngry.mp3',
'cogStomp': 'phase_5/audio/sfx/tt_s_ara_cmg_cogStomp.mp3',
'quake': 'phase_5/audio/sfx/tt_s_ara_cmg_groundquake.mp3',
'waterCoolerFill': 'phase_5/audio/sfx/tt_s_ara_cmg_waterCoolerFill.mp3',
'lose': 'phase_4/audio/sfx/MG_lose.mp3',
'win': 'phase_4/audio/sfx/MG_win.mp3',
'cogDialogue': 'phase_3.5/audio/dial/COG_VO_statement.mp3',
'toonDialogue': 'phase_3.5/audio/dial/AV_dog_long.mp3'}
MessageLabelPos = (0.0, 0.0, -0.4)
MemoGuiPos = (-0.85, 0, -0.9)
MemoGuiTextScale = 0.1
MemoGuiTextColor = (0.95,
0.95,
0,
1)
MapGuiBgColor = (0.9, 0.9, 0.9)
MapGuiFgColor = (0.5,
0.5,
0.5,
1)
MapGuiPos = (1.05, 0.0, -0.71)
MapGuiScale = 0.225
MapGuiSuitMarkerFlashColor = (1.0, 0.0, 0.0)
MapGuiSuitMarkerSize = 0.075
MapGuiWaterCoolerMarkerSize = 0.08
QuestArrowScale = 5
QuestArrowColor = (1,
1,
0,
1)
CoolerArrowScale = 8
CoolerArrowColor = (1,
1,
0,
1)
CoolerArrowZ = 10
CoolerArrowBounce = 2
CoolerArrowSpeed = 2
BossGuiScale = 0.8
BossGuiPos = (0, 0, -0.83)
BossGuiTitleLabelScale = 0.055
BossCodeFrameWidth = 0.13
BossCodeFrameGap = 0.005
BossCodeFrameLabelScale = 0.12
BossCodeFrameLabelNormalColor = (0,
0,
0,
1)
BossCodeFrameLabelHighlightColor = (0,
0.5,
0,
1)
|
import numpy as np
#import jax.numpy as np
from typing import Callable
import warnings
'''
optimization module
Provides matrix-valued prox-gradient method and its accelerated versions.
Modified from the code in https://github.com/harrispopgen/mushi/
'''
def hs_dot(A, B):
return (A*B).flatten().sum()
def prox_grad_method(x: np.ndarray,
g: Callable[[np.ndarray], np.float64],
grad_g: Callable[[np.ndarray], np.float64],
h: Callable[[np.ndarray], np.float64],
prox: Callable[[np.ndarray, np.float64], np.float64],
tol: np.float64 = 1e-6,
max_iter: int = 100,
s0: np.float64 = 1,
max_line_iter: int = 100,
gamma: np.float64 = 0.8,
verbosity = 0) -> np.ndarray:
u"""Nesterov accelerated proximal gradient method
https://people.eecs.berkeley.edu/~elghaoui/Teaching/EE227A/lecture18.pdf
x: initial point
g: differentiable term in objective function
grad_g: gradient of g
h: non-differentiable term in objective function
prox: proximal operator corresponding to h
tol: relative tolerance in objective function for convergence
max_iter: maximum number of proximal gradient steps
s0: initial step size
max_line_iter: maximum number of line search steps
gamma: step size shrinkage rate for line search
"""
# initialize step size
s = s0
# initial objective value
f = g(x) + h(x)
if verbosity > 0:
print(f'initial objective {f:.6e}', flush=True)
print(f'initial smooth part {g(x):.6e}', flush=True)
for k in range(1, max_iter + 1):
# evaluate differtiable part of objective at current point
g1 = g(x)
grad_g1 = grad_g(x)
# check for errors
if not np.all(np.isfinite(grad_g1)):
warnings.warn("gradient contains invalid values", RuntimeWarning)
return np.nan
if np.all(grad_g1 == 0):
warnings.warn("zero gradient, breaking", RuntimeWarning)
break
# store old iterate
x_old = x
# Armijo line search
for line_iter in range(max_line_iter):
# new point via prox-gradient of momentum point
x = prox(x - s * grad_g1, s)
# G_s(q) as in the notes linked above
G = (1 / s) * (x_old - x)
# test g(q - sG_s(q)) for sufficient decrease
if g(x) <= (g1 - s * hs_dot(grad_g1, G) + (s / 2) * hs_dot(G, G)):
# Armijo satisfied
break
else:
# Armijo not satisfied
s *= gamma # shrink step size
if line_iter == max_line_iter - 1:
warnings.warn("line search failed", RuntimeWarning)
s = s0
if not np.all(np.isfinite(x)):
warnings.warn("x contains invalid values", RuntimeWarning)
# terminate if objective function is constant within tolerance
f_old = f
f = g(x) + h(x)
rel_change = np.abs((f - f_old) / f_old)
if verbosity > 0:
print(f'iteration {k}, objective {f:.3e}, '
f'relative change {rel_change:.3e}', flush=True)
# print(f'iteration {k}, objective {f:.3e}, '
# f'relative change {rel_change:.3e}',
# end=' \r', flush=True)
if rel_change < tol:
if verbosity > 0:
print(f'\nrelative change in objective function {rel_change:.2g} '
f'is within tolerance {tol} after {k} iterations',
flush=True)
break
if k == max_iter and verbosity > 0:
print(f'\nmaximum iteration {max_iter} reached with relative '
f'change in objective function {rel_change:.2g}', flush=True)
return x
def acc_prox_grad_method(x: np.ndarray,
g: Callable[[np.ndarray], np.float64],
grad_g: Callable[[np.ndarray], np.float64],
h: Callable[[np.ndarray], np.float64],
prox: Callable[[np.ndarray, np.float64], np.float64],
tol: np.float64 = 1e-6,
max_iter: int = 100,
s0: np.float64 = 1,
max_line_iter: int = 100,
gamma: np.float64 = 0.8,
verbosity = 0) -> np.ndarray:
u"""Nesterov accelerated proximal gradient method
https://people.eecs.berkeley.edu/~elghaoui/Teaching/EE227A/lecture18.pdf
x: initial point
g: differentiable term in objective function
grad_g: gradient of g
h: non-differentiable term in objective function
prox: proximal operator corresponding to h
tol: relative tolerance in objective function for convergence
max_iter: maximum number of proximal gradient steps
s0: initial step size
max_line_iter: maximum number of line search steps
gamma: step size shrinkage rate for line search
"""
# initialize step size
s = s0
# initialize momentum iterate
q = x
# initial objective value
f = g(x) + h(x)
if verbosity > 0:
print(f'initial objective {f:.6e}', flush=True)
for k in range(1, max_iter + 1):
# evaluate differtiable part of objective at momentum point
g1 = g(q)
grad_g1 = grad_g(q)
if not np.all(np.isfinite(grad_g1)):
warnings.warn("gradient contains invalid values", RuntimeWarning)
return np.nan
if np.all(grad_g1 == 0):
warnings.warn("zero gradient, breaking", RuntimeWarning)
break
# store old iterate
x_old = x
# Armijo line search
for line_iter in range(max_line_iter):
# new point via prox-gradient of momentum point
x = prox(q - s * grad_g1, s)
# G_s(q) as in the notes linked above
G = (1 / s) * (q - x)
# test g(q - sG_s(q)) for sufficient decrease
if g(q - s * G) <= (g1 - s * hs_dot(grad_g1, G) + (s / 2) * hs_dot(G, G)):
# Armijo satisfied
break
else:
# Armijo not satisfied
s *= gamma # shrink step size
# update momentum point
q = x + ((k - 1) / (k + 2)) * (x - x_old)
if line_iter == max_line_iter - 1:
warnings.warn("line search failed", RuntimeWarning)
s = s0
if not np.all(np.isfinite(x)):
warnings.warn("x contains invalid values", RuntimeWarning)
return np.nan
# terminate if objective function is constant within tolerance
f_old = f
f = g(x) + h(x)
rel_change = np.abs((f - f_old) / f_old)
# print(f'iteration {k}, objective {f:.3e}, '
# f'relative change {rel_change:.3e}', flush=True)
if verbosity > 0:
print(f'iteration {k}, objective {f:.3e}, '
f'relative change {rel_change:.3e}',
end=' \r', flush=True)
if rel_change < tol:
if verbosity > 0:
print(f'\nrelative change in objective function {rel_change:.2g} '
f'is within tolerance {tol} after {k} iterations',
flush=True)
break
if k == max_iter:
if verbosity > 0:
print(f'\nmaximum iteration {max_iter} reached with relative '
f'change in objective function {rel_change:.2g}', flush=True)
return x
|
import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.dates as dt
import matplotlib.ticker as ticker
import datetime
import pickle
import copy
import snake_case
YAXPARAMS = {
'cases': {
'total': {
'ymax': 90,
'yinterval':10
},
'adj': {
'ymax': 100,
'yinterval': 10
}
},
'deaths': {
'total': {
'ymax': 5,
'yinterval': 1
},
'adj': {
'ymax': 5,
'yinterval': 1
}
},
}
YINTERVAL_TOTAL = 5
YINTERVAL_ADJ = 10
SOURCE_LABELS = {
'nyt': 'New York Times',
'jhu': 'Johns Hopkins University'
}
STATE_COLORS = {
'Vermont': '#1f77b4',
'New Hampshire': '#871f78',
}
df_start = pickle.load(
open('output/pickles/df_us_nyt.p', 'rb')).reset_index()
# If you pass in a population, the output will be per 1,000 people
# If you pass in an output filename, the plots will be written to ./images and not rendered to the screen
def county_plot(county, state, metrics=['cases', 'deaths'], source='nyt', total_population=None):
df = copy.deepcopy(df_start)
start_date = pd.to_datetime('2020-03-01')
location = {
'type': 'county',
'value': [county, state]
}
for metric in metrics:
for population in [False, total_population]:
count_of = f'{metric}'
county = location['value'][0]
state = location['value'][1]
color = STATE_COLORS[state]
df = df[df.county == county]
df = df[df.state == state]
df = df[df.date >= start_date]
if population:
df[count_of] = df[count_of].apply(lambda x: (x / population) * 100000)
df['count_of_diff'] = df[count_of].diff()
df['count_of_diff_7_day_mean'] = df.count_of_diff.rolling(7).mean()
df = df.iloc[1:]
fig = plt.figure(figsize=(7, 3))
ax = fig.add_subplot(111)
ax.bar('date', 'count_of_diff', data=df, color=color, alpha=0.35)
ax.plot('date', 'count_of_diff_7_day_mean', color=color, data=df)
ax.xaxis.set_major_locator(dt.MonthLocator())
ax.xaxis.set_major_formatter(dt.DateFormatter('%b'))
ax.set_ylim(ymin=0)
yaxparams = YAXPARAMS[metric]['adj' if population else 'total']
ymax = yaxparams['ymax']
yinterval = yaxparams['yinterval']
# ax.set_ylim(ymax=yaxparams['ymax'])
ax.yaxis.set_ticks(np.arange(0, ymax + yinterval, yinterval))
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.0f'))
ax.tick_params(axis='y', colors=color)
ax.tick_params(axis='x', colors=color)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.grid(axis='x')
plt.style.use('seaborn-whitegrid')
plt.text(df.date.iloc[-1] + datetime.timedelta(days=3), df.count_of_diff_7_day_mean.iloc[-1],
"7-day\navg.", color=color, style='italic')
filename = snake_case.convert(f'{county} {state} {metric}{" adjusted" if population else ""}.svg')
plt.savefig(f'output/charts/{filename}')
county_dicts = [
{'county': 'Orange', 'state': 'Vermont', 'total_population': 28892},
{'county': 'Orange', 'state': 'Vermont', 'total_population': 28892},
{'county': 'Windsor', 'state': 'Vermont', 'total_population': 55062},
{'county': 'Grafton', 'state': 'New Hampshire', 'total_population': 89886},
{'county': 'Sullivan', 'state': 'New Hampshire', 'total_population': 43146},
]
for county_dict in county_dicts:
county_plot(**county_dict)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pytest
import pandas
import modin.pandas as pd
from modin.pandas.utils import (
to_pandas,
from_pandas
)
@pytest.fixture
def ray_df_equals_pandas(ray_df, pandas_df):
return to_pandas(ray_df).sort_index().equals(pandas_df.sort_index())
@pytest.fixture
def generate_dfs():
df = pandas.DataFrame({'col1': [0, 1, 2, 3],
'col2': [4, 5, 6, 7],
'col3': [8, 9, 10, 11],
'col4': [12, 13, 14, 15],
'col5': [0, 0, 0, 0]})
df2 = pandas.DataFrame({'col1': [0, 1, 2, 3],
'col2': [4, 5, 6, 7],
'col3': [8, 9, 10, 11],
'col6': [12, 13, 14, 15],
'col7': [0, 0, 0, 0]})
return df, df2
@pytest.fixture
def generate_none_dfs():
df = pandas.DataFrame({'col1': [0, 1, 2, 3],
'col2': [4, 5, None, 7],
'col3': [8, 9, 10, 11],
'col4': [12, 13, 14, 15],
'col5': [None, None, None, None]})
df2 = pandas.DataFrame({'col1': [0, 1, 2, 3],
'col2': [4, 5, 6, 7],
'col3': [8, 9, 10, 11],
'col6': [12, 13, 14, 15],
'col7': [0, 0, 0, 0]})
return df, df2
@pytest.fixture
def test_df_concat():
df, df2 = generate_dfs()
assert(ray_df_equals_pandas(pd.concat([df, df2]),
pandas.concat([df, df2])))
def test_ray_concat():
df, df2 = generate_dfs()
ray_df, ray_df2 = from_pandas(df, 2), from_pandas(df2, 2)
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2]),
pandas.concat([df, df2]))
def test_ray_concat_on_index():
df, df2 = generate_dfs()
ray_df, ray_df2 = from_pandas(df, 2), from_pandas(df2, 2)
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2], axis='index'),
pandas.concat([df, df2], axis='index'))
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2], axis='rows'),
pandas.concat([df, df2], axis='rows'))
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2], axis=0),
pandas.concat([df, df2], axis=0))
def test_ray_concat_on_column():
df, df2 = generate_dfs()
ray_df, ray_df2 = from_pandas(df, 2), from_pandas(df2, 2)
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2], axis=1),
pandas.concat([df, df2], axis=1))
assert ray_df_equals_pandas(pd.concat([ray_df, ray_df2], axis="columns"),
pandas.concat([df, df2], axis="columns"))
def test_invalid_axis_errors():
df, df2 = generate_dfs()
ray_df, ray_df2 = from_pandas(df, 2), from_pandas(df2, 2)
with pytest.raises(ValueError):
pd.concat([ray_df, ray_df2], axis=2)
def test_mixed_concat():
df, df2 = generate_dfs()
df3 = df.copy()
mixed_dfs = [from_pandas(df, 2), from_pandas(df2, 2), df3]
assert(ray_df_equals_pandas(pd.concat(mixed_dfs),
pandas.concat([df, df2, df3])))
def test_mixed_inner_concat():
df, df2 = generate_dfs()
df3 = df.copy()
mixed_dfs = [from_pandas(df, 2), from_pandas(df2, 2), df3]
assert(ray_df_equals_pandas(pd.concat(mixed_dfs, join='inner'),
pandas.concat([df, df2, df3], join='inner')))
def test_mixed_none_concat():
df, df2 = generate_none_dfs()
df3 = df.copy()
mixed_dfs = [from_pandas(df, 2), from_pandas(df2, 2), df3]
assert(ray_df_equals_pandas(pd.concat(mixed_dfs),
pandas.concat([df, df2, df3])))
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 9 22:15:47 2020
@author: mofarrag
"""
from IPython import get_ipython
get_ipython().magic("reset -f")
# import os
# os.chdir("")
import geopandas as gpd
import numpy as np
import pandas as pd
import Hapi.inputs as IN
# BasinF = "F:/02Case studies/Coello/base_data/GIS/delineation/features/basins.shp"
BasinF = "F:/02Case studies/Coello/base_data/GIS/GIS/BasinExtractParameters.shp"
ParametersPath = "F:/01Algorithms/HAPI/Hapi/Parameters"
SaveTo = "F:/02Case studies/Coello/Hapi/Data/00inputs/Basic_inputs"
#%%
Basin = gpd.read_file(BasinF)
# parameters name with the same order inside the Input module
ind = ["tt","sfcf","cfmax","cwh","cfr","fc","beta","lp","k0","k1","k2","uzl","perc","maxbas"]
Par = pd.DataFrame(index = ind)
# extract parameters boundaries
Par['UB'], Par['LB'] = IN.ExtractParametersBoundaries(Basin)
# extract parameters in a specific scenarion from the 10 scenarios
Par['1'] = IN.ExtractParameters(Basin,"10")
"""
zoom to the place where the catchment exist to check if the basin polygon overlay
the right location, if not there is a problem in the coordinate reference system
transformation
"""
#%% save the parameters
Par['UB'].to_csv(SaveTo + "/UB-Extracted.txt", header=None)
Par['LB'].to_csv(SaveTo + "/LB-Extracted.txt", header=None)
Par['1'].to_csv(SaveTo + "/scenario10.txt", header=None)
|
# -*- coding: utf-8 -*-
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserLocaleProfile'
db.create_table(u'common_userlocaleprofile', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.User'], unique=True)),
('timezone', self.gf('django.db.models.fields.CharField')(max_length=48)),
('language', self.gf('django.db.models.fields.CharField')(max_length=8)),
))
db.send_create_signal(u'common', ['UserLocaleProfile'])
def backwards(self, orm):
# Deleting model 'UserLocaleProfile'
db.delete_table(u'common_userlocaleprofile')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'common.anonymoususersingleton': {
'Meta': {'object_name': 'AnonymousUserSingleton'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'common.autoadminsingleton': {
'Meta': {'object_name': 'AutoAdminSingleton'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'auto_admin_account'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'password_hash': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
u'common.shareduploadedfile': {
'Meta': {'object_name': 'SharedUploadedFile'},
'datatime': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'common.userlocaleprofile': {
'Meta': {'object_name': 'UserLocaleProfile'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'timezone': ('django.db.models.fields.CharField', [], {'max_length': '48'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['common']
|
import basis.robot_math as rm
import modeling._ode_cdhelper as mcd
class ModelCollection(object):
"""
a helper class to further hide pandanodes
list of collision and geom models can be added to this collection for visualization
author: weiwei
date: 201900825, 20201212
"""
def __init__(self, name='modelcollection'):
self._name = name
self._gm_list = []
self._cm_list = []
@property
def name(self):
return self._name
@property
def cm_list(self):
return self._cm_list
@property
def gm_list(self):
return self._gm_list
@property
def cdmesh(self):
vertices = []
vertex_normals = []
faces = []
for objcm in self._cm_list:
if objcm.cdmesh_type == 'aabb':
objtrm = objcm.objtrm.bounding_box
elif objcm.cdmesh_type == 'obb':
objtrm = objcm.objtrm.bounding_box_oriented
elif objcm.cdmesh_type == 'convexhull':
objtrm = objcm.objtrm.convex_hull
elif objcm.cdmesh_type == 'triangles':
objtrm = objcm.objtrm
homomat = objcm.get_homomat()
vertices += rm.homomat_transform_points(homomat, objtrm.vertices)
vertex_normals += rm.homomat_transform_points(homomat, objtrm.vertex_normals)
faces += (objtrm.faces+len(faces))
return mcd.gen_cdmesh_vvnf(vertices, vertex_normals, faces)
@property
def cdmesh_list(self):
return [objcm.cdmesh for objcm in self._cm_list]
def add_cm(self, objcm):
self._cm_list.append(objcm)
def remove_cm(self, objcm):
self._cm_list.remove(objcm)
def add_gm(self, objcm):
self._gm_list.append(objcm)
def remove_gm(self, objcm):
self._gm_list.remove(objcm)
def attach_to(self, obj):
# TODO check if obj is ShowBase
for cm in self._cm_list:
cm.attach_to(obj)
for gm in self._gm_list:
gm.attach_to(obj)
def detach(self):
for cm in self._cm_list:
cm.detach()
for gm in self._gm_list:
gm.detach()
def show_cdprimit(self): # only work for cm
for cm in self._cm_list:
cm.show_cdprimit()
def unshow_cdprimit(self): # only work for cm
for cm in self._cm_list:
cm.unshow_cdprimit()
def show_cdmesh(self):
for objcm in self._cm_list:
objcm.show_cdmesh()
def unshow_cdmesh(self):
for objcm in self._cm_list:
objcm.unshow_cdmesh()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.