hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f738cf24af7138d988dffecb2ef44f559fc23734 | 925 | py | Python | password-generator.py | AkashSDas/Mini-Projects | 908cea6df6ef6c62abdfe05585634b786e60b812 | [
"MIT"
] | null | null | null | password-generator.py | AkashSDas/Mini-Projects | 908cea6df6ef6c62abdfe05585634b786e60b812 | [
"MIT"
] | null | null | null | password-generator.py | AkashSDas/Mini-Projects | 908cea6df6ef6c62abdfe05585634b786e60b812 | [
"MIT"
] | null | null | null | import random
import string
# Generating characters for password
def generate_password_characters():
alphabets = string.ascii_letters
alphabets = [alphabet for alphabet in alphabets]
numbers = string.digits
numbers = [number for number in numbers]
special_characters = string.punctuation
special_characters = [
special_character for special_character in special_characters]
return [alphabets, numbers, special_characters]
# Creating a 15 character random password
def password_generator():
characters = generate_password_characters()
password = []
for _ in range(15):
character_list = characters[random.randint(0, len(characters)-1)]
password_character = character_list[random.randint(
0, len(character_list)-1)]
password.append(password_character)
password = "".join(password)
return password
print(password_generator())
| 25.694444 | 73 | 0.726486 | import random
import string
def generate_password_characters():
alphabets = string.ascii_letters
alphabets = [alphabet for alphabet in alphabets]
numbers = string.digits
numbers = [number for number in numbers]
special_characters = string.punctuation
special_characters = [
special_character for special_character in special_characters]
return [alphabets, numbers, special_characters]
def password_generator():
characters = generate_password_characters()
password = []
for _ in range(15):
character_list = characters[random.randint(0, len(characters)-1)]
password_character = character_list[random.randint(
0, len(character_list)-1)]
password.append(password_character)
password = "".join(password)
return password
print(password_generator())
| true | true |
f738cf63d0defe7800ee62d77cb5f24a10290e99 | 2,560 | py | Python | test/functional/ncc_state_root.py | nccproject/ncc | 068ccc82a73d28136546095261ad8ccef7e541a3 | [
"MIT"
] | null | null | null | test/functional/ncc_state_root.py | nccproject/ncc | 068ccc82a73d28136546095261ad8ccef7e541a3 | [
"MIT"
] | null | null | null | test/functional/ncc_state_root.py | nccproject/ncc | 068ccc82a73d28136546095261ad8ccef7e541a3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.messages import COIN
from test_framework.nccconfig import *
import sys
class StateRootTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
# verify that the state hash is not 0 on genesis
def verify_not_null_test(self):
block_hash = self.node.getblockhash(0)
block = self.node.getblock(block_hash)
assert(int(block['hashStateRoot'], 16) != 0)
# verify that the state hash changes on contract creation
def verify_state_hash_changes(self):
amount = 20000*COIN
self.node.generate(COINBASE_MATURITY+50)
block_hash_a = self.node.getblockhash(COINBASE_MATURITY+50)
block_a = self.node.getblock(block_hash_a)
"""
pragma solidity ^0.4.10;
contract Example {
function () payable {}
}
"""
self.node.createcontract("60606040523415600b57fe5b5b60398060196000396000f30060606040525b600b5b5b565b0000a165627a7a7230582092926a9814888ff08700cbd86cf4ff8c50052f5fd894e794570d9551733591d60029")
self.node.generate(1)
block_hash_b = self.node.getblockhash(COINBASE_MATURITY+51)
block_b = self.node.getblock(block_hash_b)
assert(block_a['hashStateRoot'] != block_b['hashStateRoot'])
# verify that the state hash remains the same on restart
def verify_state_hash_remains_on_restart(self):
block_hash_a = self.node.getblockhash(COINBASE_MATURITY+51)
block_a = self.node.getblock(block_hash_a)
self.stop_nodes()
self.start_nodes()
self.node = self.nodes[0]
self.node.generate(1)
block_hash_b = self.node.getblockhash(COINBASE_MATURITY+52)
block_b = self.node.getblock(block_hash_b)
assert(block_a['hashStateRoot'] == block_b['hashStateRoot'])
def run_test(self):
self.node = self.nodes[0]
self.verify_not_null_test()
self.verify_state_hash_changes()
self.verify_state_hash_remains_on_restart()
if __name__ == '__main__':
StateRootTest().main()
| 38.787879 | 200 | 0.715625 |
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.messages import COIN
from test_framework.nccconfig import *
import sys
class StateRootTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def verify_not_null_test(self):
block_hash = self.node.getblockhash(0)
block = self.node.getblock(block_hash)
assert(int(block['hashStateRoot'], 16) != 0)
def verify_state_hash_changes(self):
amount = 20000*COIN
self.node.generate(COINBASE_MATURITY+50)
block_hash_a = self.node.getblockhash(COINBASE_MATURITY+50)
block_a = self.node.getblock(block_hash_a)
self.node.createcontract("60606040523415600b57fe5b5b60398060196000396000f30060606040525b600b5b5b565b0000a165627a7a7230582092926a9814888ff08700cbd86cf4ff8c50052f5fd894e794570d9551733591d60029")
self.node.generate(1)
block_hash_b = self.node.getblockhash(COINBASE_MATURITY+51)
block_b = self.node.getblock(block_hash_b)
assert(block_a['hashStateRoot'] != block_b['hashStateRoot'])
def verify_state_hash_remains_on_restart(self):
block_hash_a = self.node.getblockhash(COINBASE_MATURITY+51)
block_a = self.node.getblock(block_hash_a)
self.stop_nodes()
self.start_nodes()
self.node = self.nodes[0]
self.node.generate(1)
block_hash_b = self.node.getblockhash(COINBASE_MATURITY+52)
block_b = self.node.getblock(block_hash_b)
assert(block_a['hashStateRoot'] == block_b['hashStateRoot'])
def run_test(self):
self.node = self.nodes[0]
self.verify_not_null_test()
self.verify_state_hash_changes()
self.verify_state_hash_remains_on_restart()
if __name__ == '__main__':
StateRootTest().main()
| true | true |
f738cf941c2a8ecc9a7e38fe3c49971fe26c72c6 | 65,550 | py | Python | glance_store/tests/unit/test_swift_store.py | redhat-openstack/glance_store | c816b38d9f12be75d989409cbab6dfefa8f49dc3 | [
"Apache-2.0"
] | null | null | null | glance_store/tests/unit/test_swift_store.py | redhat-openstack/glance_store | c816b38d9f12be75d989409cbab6dfefa8f49dc3 | [
"Apache-2.0"
] | null | null | null | glance_store/tests/unit/test_swift_store.py | redhat-openstack/glance_store | c816b38d9f12be75d989409cbab6dfefa8f49dc3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests the Swift backend store"""
import copy
import fixtures
import hashlib
import mock
import tempfile
import uuid
from oslo_config import cfg
from oslo_utils import units
from oslotest import moxstubout
import requests_mock
import six
from six import moves
from six.moves import http_client
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
import swiftclient
from glance_store._drivers.swift import store as swift
from glance_store import backend
from glance_store import BackendException
from glance_store import capabilities
from glance_store.common import auth
from glance_store.common import utils
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from glance_store.tests.unit import test_store_capabilities
CONF = cfg.CONF
FAKE_UUID = lambda: str(uuid.uuid4())
FAKE_UUID2 = lambda: str(uuid.uuid4())
Store = swift.Store
FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi
MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_PUT_OBJECT_CALLS = 0
SWIFT_CONF = {'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_user': 'user',
'swift_store_key': 'key',
'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_retry_get_count': 1,
'default_swift_reference': 'ref1'
}
# We stub out as little as possible to ensure that the code paths
# between swift and swiftclient are tested
# thoroughly
def stub_out_swiftclient(stubs, swift_store_auth_version):
fixture_containers = ['glance']
fixture_container_headers = {}
fixture_headers = {
'glance/%s' % FAKE_UUID: {
'content-length': FIVE_KB,
'etag': 'c2e5db72bd7fd153f53ede5da5a06de3'
},
'glance/%s' % FAKE_UUID2: {'x-static-large-object': 'true', },
}
fixture_objects = {'glance/%s' % FAKE_UUID: six.BytesIO(b"*" * FIVE_KB),
'glance/%s' % FAKE_UUID2: six.BytesIO(b"*" * FIVE_KB), }
def fake_head_container(url, token, container, **kwargs):
if container not in fixture_containers:
msg = "No container %s found" % container
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
return fixture_container_headers
def fake_put_container(url, token, container, **kwargs):
fixture_containers.append(container)
def fake_post_container(url, token, container, headers, http_conn=None):
for key, value in six.iteritems(headers):
fixture_container_headers[key] = value
def fake_put_object(url, token, container, name, contents, **kwargs):
# PUT returns the ETag header for the newly-added object
# Large object manifest...
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS += 1
CHUNKSIZE = 64 * units.Ki
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
if kwargs.get('headers'):
etag = kwargs['headers']['ETag']
manifest = kwargs.get('headers').get('X-Object-Manifest')
fixture_headers[fixture_key] = {'manifest': True,
'etag': etag,
'x-object-manifest': manifest}
fixture_objects[fixture_key] = None
return etag
if hasattr(contents, 'read'):
fixture_object = six.BytesIO()
read_len = 0
chunk = contents.read(CHUNKSIZE)
checksum = hashlib.md5()
while chunk:
fixture_object.write(chunk)
read_len += len(chunk)
checksum.update(chunk)
chunk = contents.read(CHUNKSIZE)
etag = checksum.hexdigest()
else:
fixture_object = six.BytesIO(contents)
read_len = len(contents)
etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
if read_len > MAX_SWIFT_OBJECT_SIZE:
msg = ('Image size:%d exceeds Swift max:%d' %
(read_len, MAX_SWIFT_OBJECT_SIZE))
raise swiftclient.ClientException(
msg, http_status=http_client.REQUEST_ENTITY_TOO_LARGE)
fixture_objects[fixture_key] = fixture_object
fixture_headers[fixture_key] = {
'content-length': read_len,
'etag': etag}
return etag
else:
msg = ("Object PUT failed - Object with key %s already exists"
% fixture_key)
raise swiftclient.ClientException(msg,
http_status=http_client.CONFLICT)
def fake_get_object(url, token, container, name, **kwargs):
# GET returns the tuple (list of headers, file object)
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object GET failed"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
byte_range = None
headers = kwargs.get('headers', dict())
if headers is not None:
headers = dict((k.lower(), v) for k, v in six.iteritems(headers))
if 'range' in headers:
byte_range = headers.get('range')
fixture = fixture_headers[fixture_key]
if 'manifest' in fixture:
# Large object manifest... we return a file containing
# all objects with prefix of this fixture key
chunk_keys = sorted([k for k in fixture_headers.keys()
if k.startswith(fixture_key) and
k != fixture_key])
result = six.BytesIO()
for key in chunk_keys:
result.write(fixture_objects[key].getvalue())
else:
result = fixture_objects[fixture_key]
if byte_range is not None:
start = int(byte_range.split('=')[1].strip('-'))
result = six.BytesIO(result.getvalue()[start:])
fixture_headers[fixture_key]['content-length'] = len(
result.getvalue())
return fixture_headers[fixture_key], result
def fake_head_object(url, token, container, name, **kwargs):
# HEAD returns the list of headers for an object
try:
fixture_key = "%s/%s" % (container, name)
return fixture_headers[fixture_key]
except KeyError:
msg = "Object HEAD failed - Object does not exist"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
def fake_delete_object(url, token, container, name, **kwargs):
# DELETE returns nothing
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object DELETE failed - Object does not exist"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
else:
del fixture_headers[fixture_key]
del fixture_objects[fixture_key]
def fake_http_connection(*args, **kwargs):
return None
def fake_get_auth(url, user, key, auth_version, **kwargs):
if url is None:
return None, None
if 'http' in url and '://' not in url:
raise ValueError('Invalid url %s' % url)
# Check the auth version against the configured value
if swift_store_auth_version != auth_version:
msg = 'AUTHENTICATION failed (version mismatch)'
raise swiftclient.ClientException(msg)
return None, None
stubs.Set(swiftclient.client,
'head_container', fake_head_container)
stubs.Set(swiftclient.client,
'put_container', fake_put_container)
stubs.Set(swiftclient.client,
'post_container', fake_post_container)
stubs.Set(swiftclient.client,
'put_object', fake_put_object)
stubs.Set(swiftclient.client,
'delete_object', fake_delete_object)
stubs.Set(swiftclient.client,
'head_object', fake_head_object)
stubs.Set(swiftclient.client,
'get_object', fake_get_object)
stubs.Set(swiftclient.client,
'get_auth', fake_get_auth)
stubs.Set(swiftclient.client,
'http_connection', fake_http_connection)
class SwiftTests(object):
@property
def swift_store_user(self):
return 'tenant:user1'
def test_get_size(self):
"""
Test that we can get the size of an object in the swift store
"""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
image_size = self.store.get_size(loc)
self.assertEqual(image_size, 5120)
def test_get_size_with_multi_tenant_on(self):
"""Test that single tenant uris work with multi tenant on."""
uri = ("swift://%s:key@auth_address/glance/%s" %
(self.swift_store_user, FAKE_UUID))
self.config(swift_store_multi_tenant=True)
# NOTE(markwash): ensure the image is found
ctxt = mock.MagicMock()
size = backend.get_size_from_backend(uri, context=ctxt)
self.assertEqual(size, 5120)
def test_get(self):
"""Test a "normal" retrieval of an image in chunks."""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
(image_swift, image_size) = self.store.get(loc)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_retry(self):
"""
Test a retrieval where Swift does not get the full image in a single
request.
"""
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
ctxt = mock.MagicMock()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
resp_full = b''.join([chunk for chunk in image_swift.wrapped])
resp_half = resp_full[:len(resp_full) // 2]
resp_half = six.BytesIO(resp_half)
image_swift.wrapped = swift.swift_retry_iter(resp_half, image_size,
self.store,
loc.store_location,
ctxt)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_http_auth(self):
"""
Test a retrieval from Swift with an HTTP authurl. This is
specified either via a Location header with swift+http:// or using
http:// in the swift_store_auth_address config value
"""
loc = location.get_location_from_uri(
"swift+http://%s:key@auth_address/glance/%s" %
(self.swift_store_user, FAKE_UUID), conf=self.conf)
ctxt = mock.MagicMock()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_non_existing(self):
"""
Test that trying to retrieve a swift that doesn't exist
raises an error
"""
loc = location.get_location_from_uri(
"swift://%s:key@authurl/glance/noexist" % (self.swift_store_user),
conf=self.conf)
self.assertRaises(exceptions.NotFound,
self.store.get,
loc)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=False))
def test_add(self):
"""Test that we can add an image via the swift backend."""
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s"
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting a single object to be created on Swift i.e. no chunking.
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_multi_store(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['default_swift_reference'] = 'store_2'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_image_id = str(uuid.uuid4())
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
loc = 'swift+config://store_2/glance/%s'
expected_location = loc % (expected_image_id)
location, size, checksum, arg = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_auth_url_variations(self):
"""
Test that we can add an image via the swift backend with
a variety of different auth_address values
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
variations = {
'store_4': 'swift+config://store_4/glance/%s',
'store_5': 'swift+config://store_5/glance/%s',
'store_6': 'swift+config://store_6/glance/%s'
}
for variation, expected_location in variations.items():
image_id = str(uuid.uuid4())
expected_location = expected_location % image_id
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest()
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf['default_swift_reference'] = variation
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(image_id, image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location,
conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_no_container_no_create(self):
"""
Tests that adding an image with a non-existing container
raises an appropriate exception
"""
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = False
conf['swift_store_container'] = 'noexist'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
# We check the exception text to ensure the container
# missing text is found in it, otherwise, we would have
# simply used self.assertRaises here
exception_caught = False
try:
self.store.add(str(uuid.uuid4()), image_swift, 0)
except BackendException as e:
exception_caught = True
self.assertIn("container noexist does not exist "
"in Swift", utils.exception_to_str(e))
self.assertTrue(exception_caught)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_create(self):
"""
Tests that adding an image with a non-existing container
creates the container automatically if flag is set
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/noexist/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = True
conf['swift_store_container'] = 'noexist'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_multiple_containers_create(self):
"""
Tests that adding an image with a non-existing container while using
multi containers will create the container automatically if flag is set
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
container = 'randomname_' + expected_image_id[:2]
loc = 'swift+config://ref1/%s/%s'
expected_location = loc % (container, expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = True
conf['swift_store_container'] = 'randomname'
conf['swift_store_multiple_containers_seed'] = 2
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_multiple_containers_no_create(self):
"""
Tests that adding an image with a non-existing container while using
multiple containers raises an appropriate exception
"""
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = False
conf['swift_store_container'] = 'randomname'
conf['swift_store_multiple_containers_seed'] = 2
self.config(**conf)
moves.reload_module(swift)
expected_image_id = str(uuid.uuid4())
expected_container = 'randomname_' + expected_image_id[:2]
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
# We check the exception text to ensure the container
# missing text is found in it, otherwise, we would have
# simply used self.assertRaises here
exception_caught = False
try:
self.store.add(expected_image_id, image_swift, 0)
except BackendException as e:
exception_caught = True
expected_msg = "container %s does not exist in Swift"
expected_msg = expected_msg % expected_container
self.assertIn(expected_msg, utils.exception_to_str(e))
self.assertTrue(exception_caught)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=False))
def test_multi_container_doesnt_impact_multi_tenant_add(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_image_id = str(uuid.uuid4())
expected_container = 'container_' + expected_image_id
loc = 'swift+https://some_endpoint/%s/%s'
expected_location = loc % (expected_container, expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.config(swift_store_container='container')
self.config(swift_store_create_container_on_put=True)
self.config(swift_store_multiple_containers_seed=2)
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location, size, checksum, _ = store.add(expected_image_id, image_swift,
expected_swift_size,
context=ctxt)
self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_large_object(self):
"""
Tests that adding a very large image. We simulate the large
object by setting store.large_object_size to a small number
and then verify that there have been a number of calls to
put_object()...
"""
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
try:
self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting 6 objects to be created on Swift -- 5 chunks and 1
# manifest.
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 6)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_large_object_zero_size(self):
"""
Tests that adding an image to Swift which has both an unknown size and
exceeds Swift's maximum limit of 5GB is correctly uploaded.
We avoid the overhead of creating a 5GB object for this test by
temporarily setting MAX_SWIFT_OBJECT_SIZE to 1KB, and then adding
an object of 5KB.
Bug lp:891738
"""
# Set up a 'large' image of 5KB
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
# Temporarily set Swift MAX_SWIFT_OBJECT_SIZE to 1KB and add our image,
# explicitly setting the image_length to 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
global MAX_SWIFT_OBJECT_SIZE
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
try:
MAX_SWIFT_OBJECT_SIZE = units.Ki
self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift, 0)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
MAX_SWIFT_OBJECT_SIZE = orig_max_swift_object_size
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# Expecting 7 calls to put_object -- 5 chunks, a zero chunk which is
# then deleted, and the manifest. Note the difference with above
# where the image_size is specified in advance (there's no zero chunk
# in that case).
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 7)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_already_existing(self):
"""
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
self.assertRaises(exceptions.Duplicate,
self.store.add,
FAKE_UUID, image_swift, 0)
def _option_required(self, key):
conf = self.getConfig()
conf[key] = None
try:
self.config(**conf)
self.store = Store(self.conf)
return not self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS)
except Exception:
return False
return False
def test_no_store_credentials(self):
"""
Tests that options without a valid credentials disables the add method
"""
self.store = Store(self.conf)
self.store.ref_params = {'ref1': {'auth_address':
'authurl.com', 'user': '',
'key': ''}}
self.store.configure()
self.assertFalse(self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS))
def test_no_auth_address(self):
"""
Tests that options without auth address disables the add method
"""
self.store = Store(self.conf)
self.store.ref_params = {'ref1': {'auth_address':
'', 'user': 'user1',
'key': 'key1'}}
self.store.configure()
self.assertFalse(self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS))
def test_delete(self):
"""
Test we can delete an existing image in the swift store
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
@mock.patch.object(swiftclient.client, 'delete_object')
def test_delete_slo(self, mock_del_obj):
"""
Test we can delete an existing image stored as SLO, static large object
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (self.swift_store_user,
FAKE_UUID2)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertEqual(1, mock_del_obj.call_count)
_, kwargs = mock_del_obj.call_args
self.assertEqual('multipart-manifest=delete',
kwargs.get('query_string'))
@mock.patch.object(swiftclient.client, 'delete_object')
def test_delete_nonslo_not_deleted_as_slo(self, mock_del_obj):
"""
Test that non-SLOs are not being deleted the SLO way
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (self.swift_store_user,
FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertEqual(1, mock_del_obj.call_count)
_, kwargs = mock_del_obj.call_args
self.assertEqual(None, kwargs.get('query_string'))
def test_delete_with_reference_params(self):
"""
Test we can delete an existing image in the swift store
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift+config://ref1/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
"""
Test that trying to delete a swift that doesn't exist
raises an error
"""
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc = location.get_location_from_uri(
"swift://%s:key@authurl/glance/noexist" % (self.swift_store_user),
conf=self.conf)
self.assertRaises(exceptions.NotFound, self.store.delete, loc)
def test_delete_with_some_segments_failing(self):
"""
Tests that delete of a segmented object recovers from error(s) while
deleting one or more segments.
To test this we add a segmented object first and then delete it, while
simulating errors on one or more segments.
"""
test_image_id = str(uuid.uuid4())
def fake_head_object(container, object_name):
object_manifest = '/'.join([container, object_name]) + '-'
return {'x-object-manifest': object_manifest}
def fake_get_container(container, **kwargs):
# Returning 5 fake segments
return None, [{'name': '%s-%03d' % (test_image_id, x)}
for x in range(1, 6)]
def fake_delete_object(container, object_name):
# Simulate error on 1st and 3rd segments
global SWIFT_DELETE_OBJECT_CALLS
SWIFT_DELETE_OBJECT_CALLS += 1
if object_name.endswith('-001') or object_name.endswith('-003'):
raise swiftclient.ClientException('Object DELETE failed')
else:
pass
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc_uri = "swift+https://%s:key@localhost:8080/glance/%s"
loc_uri = loc_uri % (self.swift_store_user, test_image_id)
loc = location.get_location_from_uri(loc_uri)
conn = self.store.get_connection(loc.store_location)
conn.delete_object = fake_delete_object
conn.head_object = fake_head_object
conn.get_container = fake_get_container
global SWIFT_DELETE_OBJECT_CALLS
SWIFT_DELETE_OBJECT_CALLS = 0
self.store.delete(loc, connection=conn)
# Expecting 6 delete calls, 5 for the segments and 1 for the manifest
self.assertEqual(SWIFT_DELETE_OBJECT_CALLS, 6)
def test_read_acl_public(self):
"""
Test that we can set a public read acl.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
ctxt = mock.MagicMock()
store.set_acls(loc, public=True, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
"*:*")
def test_read_acl_tenants(self):
"""
Test that we can set read acl for tenants.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
read_tenants = ['matt', 'mark']
ctxt = mock.MagicMock()
store.set_acls(loc, read_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
'matt:*,mark:*')
def test_write_acls(self):
"""
Test that we can set write acl for tenants.
"""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
read_tenants = ['frank', 'jim']
ctxt = mock.MagicMock()
store.set_acls(loc, write_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Write'],
'frank:*,jim:*')
class TestStoreAuthV1(base.StoreBaseTest, SwiftTests,
test_store_capabilities.TestStoreCapabilitiesChecking):
_CONF = cfg.CONF
def getConfig(self):
conf = SWIFT_CONF.copy()
conf['swift_store_auth_version'] = '1'
conf['swift_store_user'] = 'tenant:user1'
return conf
def setUp(self):
"""Establish a clean test environment."""
super(TestStoreAuthV1, self).setUp()
conf = self.getConfig()
conf_file = 'glance-swift.conf'
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
stub_out_swiftclient(self.stubs, conf['swift_store_auth_version'])
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store, 'swift')
self.addCleanup(self.conf.reset)
class TestStoreAuthV2(TestStoreAuthV1):
def getConfig(self):
conf = super(TestStoreAuthV2, self).getConfig()
conf['swift_store_auth_version'] = '2'
conf['swift_store_user'] = 'tenant:user1'
return conf
def test_v2_with_no_tenant(self):
uri = "swift://failme:key@auth_address/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertRaises(exceptions.BadStoreUri,
self.store.get,
loc)
def test_v2_multi_tenant_location(self):
conf = self.getConfig()
conf['swift_store_multi_tenant'] = True
uri = "swift://auth_address/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertEqual('swift', loc.store_name)
class TestStoreAuthV3(TestStoreAuthV1):
def getConfig(self):
conf = super(TestStoreAuthV3, self).getConfig()
conf['swift_store_auth_version'] = '3'
conf['swift_store_user'] = 'tenant:user1'
return conf
class FakeConnection(object):
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
preauthtoken=None, starting_backoff=1, tenant_name=None,
os_options=None, auth_version="1", insecure=False,
ssl_compression=True, cacert=None):
if os_options is None:
os_options = {}
self.authurl = authurl
self.user = user
self.key = key
self.preauthurl = preauthurl
self.preauthtoken = preauthtoken
self.tenant_name = tenant_name
self.os_options = os_options
self.auth_version = auth_version
self.insecure = insecure
self.cacert = cacert
class TestSingleTenantStoreConnections(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestSingleTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.store = swift.SingleTenantStore(self.conf)
self.store.configure()
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com/v2/',
'user': 'tenant:user1',
'key': 'key1',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs, self.conf)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertIsNone(connection.preauthurl)
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_conf_endpoint(self):
ctx = mock.MagicMock(user='tenant:user1', tenant='tenant')
self.config(swift_store_endpoint='https://internal.com')
self.store.configure()
connection = self.store.get_connection(self.location, context=ctx)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertEqual(connection.preauthurl, 'https://internal.com')
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_conf_endpoint_no_context(self):
self.config(swift_store_endpoint='https://internal.com')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertEqual(connection.preauthurl, 'https://internal.com')
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_no_trailing_slash(self):
self.location.auth_or_store_url = 'example.com/v2'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
def test_connection_insecure(self):
self.config(swift_store_auth_insecure=True)
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertTrue(connection.insecure)
def test_connection_with_auth_v1(self):
self.config(swift_store_auth_version='1')
self.store.configure()
self.location.user = 'auth_v1_user'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.auth_version, '1')
self.assertEqual(connection.user, 'auth_v1_user')
self.assertIsNone(connection.tenant_name)
def test_connection_invalid_user(self):
self.store.configure()
self.location.user = 'invalid:format:user'
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_missing_user(self):
self.store.configure()
self.location.user = None
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_with_region(self):
self.config(swift_store_region='Sahara')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'region_name': 'Sahara',
'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_service_type(self):
self.config(swift_store_service_type='shoe-store')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'shoe-store',
'endpoint_type': 'publicURL'})
def test_connection_with_endpoint_type(self):
self.config(swift_store_endpoint_type='internalURL')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'internalURL'})
def test_bad_location_uri(self):
self.store.configure()
self.location.uri = 'http://bad_uri://'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_credentials(self):
self.store.configure()
self.location.uri = 'swift://bad_creds@uri/cont/obj'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_object_path(self):
self.store.configure()
self.location.uri = 'swift://user:key@uri/cont'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_ref_overrides_defaults(self):
self.config(swift_store_auth_version='2',
swift_store_user='testuser',
swift_store_key='testpass',
swift_store_auth_address='testaddress',
swift_store_endpoint_type='internalURL',
swift_store_config_file='somefile')
self.store.ref_params = {'ref1': {'auth_address': 'authurl.com',
'auth_version': '3',
'user': 'user:pass',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'}}
self.store.configure()
self.assertEqual('user:pass', self.store.user)
self.assertEqual('3', self.store.auth_version)
self.assertEqual('authurl.com', self.store.auth_address)
self.assertEqual('default', self.store.user_domain_id)
self.assertEqual('ignored', self.store.user_domain_name)
self.assertEqual('default', self.store.project_domain_id)
self.assertEqual('ignored', self.store.project_domain_name)
def test_with_v3_auth(self):
self.store.ref_params = {'ref1': {'auth_address': 'authurl.com',
'auth_version': '3',
'user': 'user:pass',
'key': 'password',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'}}
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual('3', connection.auth_version)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'})
class TestMultiTenantStoreConnections(base.StoreBaseTest):
def setUp(self):
super(TestMultiTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.context = mock.MagicMock(
user='tenant:user1', tenant='tenant', auth_token='0123')
self.store = swift.MultiTenantStore(self.conf)
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs, self.conf)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
self.store.configure()
connection = self.store.get_connection(self.location,
context=self.context)
self.assertIsNone(connection.authurl)
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'tenant:user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertIsNone(connection.key)
self.assertEqual(connection.preauthurl, 'https://example.com')
self.assertEqual(connection.preauthtoken, '0123')
self.assertEqual(connection.os_options, {})
class TestMultiTenantStoreContext(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
"""Establish a clean test environment."""
super(TestMultiTenantStoreContext, self).setUp()
conf = SWIFT_CONF.copy()
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store, 'swift')
self.service_catalog = [{
"name": "Object Storage",
"type": "object-store",
"endpoints": [{
"publicURL": "http://127.0.0.1:0",
"region": "region1",
"versionId": "1.0",
}]
}]
self.addCleanup(self.conf.reset)
@requests_mock.mock()
def test_download_context(self, m):
"""Verify context (ie token) is passed to swift on download."""
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://127.0.0.1/glance_123/123"
loc = location.get_location_from_uri(uri, conf=self.conf)
ctx = mock.MagicMock(
service_catalog=self.service_catalog, user='tenant:user1',
tenant='tenant', auth_token='0123')
m.get("http://127.0.0.1/glance_123/123")
store.get(loc, context=ctx)
self.assertEqual(b'0123', m.last_request.headers['X-Auth-Token'])
@requests_mock.mock()
def test_upload_context(self, m):
"""Verify context (ie token) is passed to swift on upload."""
head_req = m.head("http://127.0.0.1/glance_123",
text='Some data',
status_code=201)
put_req = m.put("http://127.0.0.1/glance_123/123")
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
content = b'Some data'
pseudo_file = six.BytesIO(content)
ctx = mock.MagicMock(
service_catalog=self.service_catalog, user='tenant:user1',
tenant='tenant', auth_token='0123')
store.add('123', pseudo_file, len(content),
context=ctx)
self.assertEqual(b'0123',
head_req.last_request.headers['X-Auth-Token'])
self.assertEqual(b'0123',
put_req.last_request.headers['X-Auth-Token'])
class FakeGetEndpoint(object):
def __init__(self, response):
self.response = response
def __call__(self, service_catalog, service_type=None,
endpoint_region=None, endpoint_type=None):
self.service_type = service_type
self.endpoint_region = endpoint_region
self.endpoint_type = endpoint_type
return self.response
class TestCreatingLocations(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestCreatingLocations, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
conf = copy.deepcopy(SWIFT_CONF)
self.store = Store(self.conf)
self.config(**conf)
moves.reload_module(swift)
self.addCleanup(self.conf.reset)
def test_single_tenant_location(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_container'] = 'container'
conf_file = "glance-swift.conf"
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
conf['default_swift_reference'] = 'ref1'
self.config(**conf)
moves.reload_module(swift)
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://example.com')
self.assertEqual(location.container, 'container')
self.assertEqual(location.obj, 'image-id')
self.assertEqual(location.user, 'tenant:user1')
self.assertEqual(location.key, 'key1')
def test_single_tenant_location_http(self):
conf_file = "glance-swift.conf"
test_dir = self.useFixture(fixtures.TempDir()).path
self.swift_config_file = self.copy_data_file(conf_file, test_dir)
self.config(swift_store_container='container',
default_swift_reference='ref2',
swift_store_config_file=self.swift_config_file)
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://example.com')
def test_multi_tenant_location(self):
self.config(swift_store_container='container')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://some_endpoint')
self.assertEqual(location.container, 'container_image-id')
self.assertEqual(location.obj, 'image-id')
self.assertIsNone(location.user)
self.assertIsNone(location.key)
self.assertEqual(fake_get_endpoint.service_type, 'object-store')
def test_multi_tenant_location_http(self):
fake_get_endpoint = FakeGetEndpoint('http://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://some_endpoint')
def test_multi_tenant_location_with_region(self):
self.config(swift_store_region='WestCarolina')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_region, 'WestCarolina')
def test_multi_tenant_location_custom_service_type(self):
self.config(swift_store_service_type='toy-store')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.service_type, 'toy-store')
def test_multi_tenant_location_custom_endpoint_type(self):
self.config(swift_store_endpoint_type='InternalURL')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_type, 'InternalURL')
class TestChunkReader(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestChunkReader, self).setUp()
conf = copy.deepcopy(SWIFT_CONF)
Store(self.conf)
self.config(**conf)
def test_read_all_data(self):
"""
Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object
"""
CHUNKSIZE = 100
checksum = hashlib.md5()
data_file = tempfile.NamedTemporaryFile()
data_file.write(b'*' * units.Ki)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE)
bytes_read += len(chunk)
if not chunk:
break
self.assertEqual(units.Ki, bytes_read)
data_file.close()
infile.close()
class TestMultipleContainers(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestMultipleContainers, self).setUp()
self.config(swift_store_multiple_containers_seed=3)
self.store = swift.SingleTenantStore(self.conf)
self.store.configure()
def test_get_container_name_happy_path_with_seed_three(self):
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_fda'
self.assertEqual(expected, actual)
def test_get_container_name_with_negative_seed(self):
self.config(swift_store_multiple_containers_seed=-1)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'random_id'
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.get_container_name, test_image_id,
'default_container')
def test_get_container_name_with_seed_beyond_max(self):
self.config(swift_store_multiple_containers_seed=33)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'random_id'
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.get_container_name, test_image_id,
'default_container')
def test_get_container_name_with_max_seed(self):
self.config(swift_store_multiple_containers_seed=32)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + test_image_id
self.assertEqual(expected, actual)
def test_get_container_name_with_dash(self):
self.config(swift_store_multiple_containers_seed=10)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + 'fdae39a1-ba'
self.assertEqual(expected, actual)
def test_get_container_name_with_min_seed(self):
self.config(swift_store_multiple_containers_seed=1)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + 'f'
self.assertEqual(expected, actual)
def test_get_container_name_with_multiple_containers_turned_off(self):
self.config(swift_store_multiple_containers_seed=0)
self.store.configure()
test_image_id = 'random_id'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container'
self.assertEqual(expected, actual)
| 41.278338 | 79 | 0.62328 |
import copy
import fixtures
import hashlib
import mock
import tempfile
import uuid
from oslo_config import cfg
from oslo_utils import units
from oslotest import moxstubout
import requests_mock
import six
from six import moves
from six.moves import http_client
from six.moves import range
import swiftclient
from glance_store._drivers.swift import store as swift
from glance_store import backend
from glance_store import BackendException
from glance_store import capabilities
from glance_store.common import auth
from glance_store.common import utils
from glance_store import exceptions
from glance_store import location
from glance_store.tests import base
from glance_store.tests.unit import test_store_capabilities
CONF = cfg.CONF
FAKE_UUID = lambda: str(uuid.uuid4())
FAKE_UUID2 = lambda: str(uuid.uuid4())
Store = swift.Store
FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi
MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_PUT_OBJECT_CALLS = 0
SWIFT_CONF = {'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_user': 'user',
'swift_store_key': 'key',
'swift_store_auth_address': 'localhost:8080',
'swift_store_container': 'glance',
'swift_store_retry_get_count': 1,
'default_swift_reference': 'ref1'
}
def stub_out_swiftclient(stubs, swift_store_auth_version):
fixture_containers = ['glance']
fixture_container_headers = {}
fixture_headers = {
'glance/%s' % FAKE_UUID: {
'content-length': FIVE_KB,
'etag': 'c2e5db72bd7fd153f53ede5da5a06de3'
},
'glance/%s' % FAKE_UUID2: {'x-static-large-object': 'true', },
}
fixture_objects = {'glance/%s' % FAKE_UUID: six.BytesIO(b"*" * FIVE_KB),
'glance/%s' % FAKE_UUID2: six.BytesIO(b"*" * FIVE_KB), }
def fake_head_container(url, token, container, **kwargs):
if container not in fixture_containers:
msg = "No container %s found" % container
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
return fixture_container_headers
def fake_put_container(url, token, container, **kwargs):
fixture_containers.append(container)
def fake_post_container(url, token, container, headers, http_conn=None):
for key, value in six.iteritems(headers):
fixture_container_headers[key] = value
def fake_put_object(url, token, container, name, contents, **kwargs):
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS += 1
CHUNKSIZE = 64 * units.Ki
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
if kwargs.get('headers'):
etag = kwargs['headers']['ETag']
manifest = kwargs.get('headers').get('X-Object-Manifest')
fixture_headers[fixture_key] = {'manifest': True,
'etag': etag,
'x-object-manifest': manifest}
fixture_objects[fixture_key] = None
return etag
if hasattr(contents, 'read'):
fixture_object = six.BytesIO()
read_len = 0
chunk = contents.read(CHUNKSIZE)
checksum = hashlib.md5()
while chunk:
fixture_object.write(chunk)
read_len += len(chunk)
checksum.update(chunk)
chunk = contents.read(CHUNKSIZE)
etag = checksum.hexdigest()
else:
fixture_object = six.BytesIO(contents)
read_len = len(contents)
etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
if read_len > MAX_SWIFT_OBJECT_SIZE:
msg = ('Image size:%d exceeds Swift max:%d' %
(read_len, MAX_SWIFT_OBJECT_SIZE))
raise swiftclient.ClientException(
msg, http_status=http_client.REQUEST_ENTITY_TOO_LARGE)
fixture_objects[fixture_key] = fixture_object
fixture_headers[fixture_key] = {
'content-length': read_len,
'etag': etag}
return etag
else:
msg = ("Object PUT failed - Object with key %s already exists"
% fixture_key)
raise swiftclient.ClientException(msg,
http_status=http_client.CONFLICT)
def fake_get_object(url, token, container, name, **kwargs):
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object GET failed"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
byte_range = None
headers = kwargs.get('headers', dict())
if headers is not None:
headers = dict((k.lower(), v) for k, v in six.iteritems(headers))
if 'range' in headers:
byte_range = headers.get('range')
fixture = fixture_headers[fixture_key]
if 'manifest' in fixture:
chunk_keys = sorted([k for k in fixture_headers.keys()
if k.startswith(fixture_key) and
k != fixture_key])
result = six.BytesIO()
for key in chunk_keys:
result.write(fixture_objects[key].getvalue())
else:
result = fixture_objects[fixture_key]
if byte_range is not None:
start = int(byte_range.split('=')[1].strip('-'))
result = six.BytesIO(result.getvalue()[start:])
fixture_headers[fixture_key]['content-length'] = len(
result.getvalue())
return fixture_headers[fixture_key], result
def fake_head_object(url, token, container, name, **kwargs):
try:
fixture_key = "%s/%s" % (container, name)
return fixture_headers[fixture_key]
except KeyError:
msg = "Object HEAD failed - Object does not exist"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
def fake_delete_object(url, token, container, name, **kwargs):
fixture_key = "%s/%s" % (container, name)
if fixture_key not in fixture_headers:
msg = "Object DELETE failed - Object does not exist"
status = http_client.NOT_FOUND
raise swiftclient.ClientException(msg, http_status=status)
else:
del fixture_headers[fixture_key]
del fixture_objects[fixture_key]
def fake_http_connection(*args, **kwargs):
return None
def fake_get_auth(url, user, key, auth_version, **kwargs):
if url is None:
return None, None
if 'http' in url and '://' not in url:
raise ValueError('Invalid url %s' % url)
if swift_store_auth_version != auth_version:
msg = 'AUTHENTICATION failed (version mismatch)'
raise swiftclient.ClientException(msg)
return None, None
stubs.Set(swiftclient.client,
'head_container', fake_head_container)
stubs.Set(swiftclient.client,
'put_container', fake_put_container)
stubs.Set(swiftclient.client,
'post_container', fake_post_container)
stubs.Set(swiftclient.client,
'put_object', fake_put_object)
stubs.Set(swiftclient.client,
'delete_object', fake_delete_object)
stubs.Set(swiftclient.client,
'head_object', fake_head_object)
stubs.Set(swiftclient.client,
'get_object', fake_get_object)
stubs.Set(swiftclient.client,
'get_auth', fake_get_auth)
stubs.Set(swiftclient.client,
'http_connection', fake_http_connection)
class SwiftTests(object):
@property
def swift_store_user(self):
return 'tenant:user1'
def test_get_size(self):
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
image_size = self.store.get_size(loc)
self.assertEqual(image_size, 5120)
def test_get_size_with_multi_tenant_on(self):
uri = ("swift://%s:key@auth_address/glance/%s" %
(self.swift_store_user, FAKE_UUID))
self.config(swift_store_multi_tenant=True)
ctxt = mock.MagicMock()
size = backend.get_size_from_backend(uri, context=ctxt)
self.assertEqual(size, 5120)
def test_get(self):
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
(image_swift, image_size) = self.store.get(loc)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_retry(self):
uri = "swift://%s:key@auth_address/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
ctxt = mock.MagicMock()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
resp_full = b''.join([chunk for chunk in image_swift.wrapped])
resp_half = resp_full[:len(resp_full) // 2]
resp_half = six.BytesIO(resp_half)
image_swift.wrapped = swift.swift_retry_iter(resp_half, image_size,
self.store,
loc.store_location,
ctxt)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_with_http_auth(self):
loc = location.get_location_from_uri(
"swift+http://%s:key@auth_address/glance/%s" %
(self.swift_store_user, FAKE_UUID), conf=self.conf)
ctxt = mock.MagicMock()
(image_swift, image_size) = self.store.get(loc, context=ctxt)
self.assertEqual(image_size, 5120)
expected_data = b"*" * FIVE_KB
data = b""
for chunk in image_swift:
data += chunk
self.assertEqual(expected_data, data)
def test_get_non_existing(self):
loc = location.get_location_from_uri(
"swift://%s:key@authurl/glance/noexist" % (self.swift_store_user),
conf=self.conf)
self.assertRaises(exceptions.NotFound,
self.store.get,
loc)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=False))
def test_add(self):
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s"
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_multi_store(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['default_swift_reference'] = 'store_2'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_image_id = str(uuid.uuid4())
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
loc = 'swift+config://store_2/glance/%s'
expected_location = loc % (expected_image_id)
location, size, checksum, arg = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_auth_url_variations(self):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
variations = {
'store_4': 'swift+config://store_4/glance/%s',
'store_5': 'swift+config://store_5/glance/%s',
'store_6': 'swift+config://store_6/glance/%s'
}
for variation, expected_location in variations.items():
image_id = str(uuid.uuid4())
expected_location = expected_location % image_id
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest()
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf['default_swift_reference'] = variation
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(image_id, image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location,
conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_no_container_no_create(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = False
conf['swift_store_container'] = 'noexist'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
exception_caught = False
try:
self.store.add(str(uuid.uuid4()), image_swift, 0)
except BackendException as e:
exception_caught = True
self.assertIn("container noexist does not exist "
"in Swift", utils.exception_to_str(e))
self.assertTrue(exception_caught)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_create(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/noexist/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = True
conf['swift_store_container'] = 'noexist'
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_multiple_containers_create(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
container = 'randomname_' + expected_image_id[:2]
loc = 'swift+config://ref1/%s/%s'
expected_location = loc % (container, expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = True
conf['swift_store_container'] = 'randomname'
conf['swift_store_multiple_containers_seed'] = 2
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_swift)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_no_container_and_multiple_containers_no_create(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_user'] = 'tenant:user'
conf['swift_store_create_container_on_put'] = False
conf['swift_store_container'] = 'randomname'
conf['swift_store_multiple_containers_seed'] = 2
self.config(**conf)
moves.reload_module(swift)
expected_image_id = str(uuid.uuid4())
expected_container = 'randomname_' + expected_image_id[:2]
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
exception_caught = False
try:
self.store.add(expected_image_id, image_swift, 0)
except BackendException as e:
exception_caught = True
expected_msg = "container %s does not exist in Swift"
expected_msg = expected_msg % expected_container
self.assertIn(expected_msg, utils.exception_to_str(e))
self.assertTrue(exception_caught)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=False))
def test_multi_container_doesnt_impact_multi_tenant_add(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_image_id = str(uuid.uuid4())
expected_container = 'container_' + expected_image_id
loc = 'swift+https://some_endpoint/%s/%s'
expected_location = loc % (expected_container, expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.config(swift_store_container='container')
self.config(swift_store_create_container_on_put=True)
self.config(swift_store_multiple_containers_seed=2)
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location, size, checksum, _ = store.add(expected_image_id, image_swift,
expected_swift_size,
context=ctxt)
self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=True))
def test_add_large_object(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
try:
self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift,
expected_swift_size)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 6)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_large_object_zero_size(self):
expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id)
image_swift = six.BytesIO(expected_swift_contents)
global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0
self.store = Store(self.conf)
self.store.configure()
orig_max_size = self.store.large_object_size
orig_temp_size = self.store.large_object_chunk_size
global MAX_SWIFT_OBJECT_SIZE
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
try:
MAX_SWIFT_OBJECT_SIZE = units.Ki
self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id,
image_swift, 0)
finally:
self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size
MAX_SWIFT_OBJECT_SIZE = orig_max_swift_object_size
self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum)
# in that case).
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 7)
loc = location.get_location_from_uri(expected_location, conf=self.conf)
(new_image_swift, new_image_size) = self.store.get(loc)
new_image_contents = b''.join([chunk for chunk in new_image_swift])
new_image_swift_size = len(new_image_contents)
self.assertEqual(expected_swift_contents, new_image_contents)
self.assertEqual(expected_swift_size, new_image_swift_size)
def test_add_already_existing(self):
self.store = Store(self.conf)
self.store.configure()
image_swift = six.BytesIO(b"nevergonnamakeit")
self.assertRaises(exceptions.Duplicate,
self.store.add,
FAKE_UUID, image_swift, 0)
def _option_required(self, key):
conf = self.getConfig()
conf[key] = None
try:
self.config(**conf)
self.store = Store(self.conf)
return not self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS)
except Exception:
return False
return False
def test_no_store_credentials(self):
self.store = Store(self.conf)
self.store.ref_params = {'ref1': {'auth_address':
'authurl.com', 'user': '',
'key': ''}}
self.store.configure()
self.assertFalse(self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS))
def test_no_auth_address(self):
self.store = Store(self.conf)
self.store.ref_params = {'ref1': {'auth_address':
'', 'user': 'user1',
'key': 'key1'}}
self.store.configure()
self.assertFalse(self.store.is_capable(
capabilities.BitMasks.WRITE_ACCESS))
def test_delete(self):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (
self.swift_store_user, FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
@mock.patch.object(swiftclient.client, 'delete_object')
def test_delete_slo(self, mock_del_obj):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (self.swift_store_user,
FAKE_UUID2)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertEqual(1, mock_del_obj.call_count)
_, kwargs = mock_del_obj.call_args
self.assertEqual('multipart-manifest=delete',
kwargs.get('query_string'))
@mock.patch.object(swiftclient.client, 'delete_object')
def test_delete_nonslo_not_deleted_as_slo(self, mock_del_obj):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift://%s:key@authurl/glance/%s" % (self.swift_store_user,
FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertEqual(1, mock_del_obj.call_count)
_, kwargs = mock_del_obj.call_args
self.assertEqual(None, kwargs.get('query_string'))
def test_delete_with_reference_params(self):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
uri = "swift+config://ref1/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.store.delete(loc)
self.assertRaises(exceptions.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc = location.get_location_from_uri(
"swift://%s:key@authurl/glance/noexist" % (self.swift_store_user),
conf=self.conf)
self.assertRaises(exceptions.NotFound, self.store.delete, loc)
def test_delete_with_some_segments_failing(self):
test_image_id = str(uuid.uuid4())
def fake_head_object(container, object_name):
object_manifest = '/'.join([container, object_name]) + '-'
return {'x-object-manifest': object_manifest}
def fake_get_container(container, **kwargs):
# Returning 5 fake segments
return None, [{'name': '%s-%03d' % (test_image_id, x)}
for x in range(1, 6)]
def fake_delete_object(container, object_name):
# Simulate error on 1st and 3rd segments
global SWIFT_DELETE_OBJECT_CALLS
SWIFT_DELETE_OBJECT_CALLS += 1
if object_name.endswith('-001') or object_name.endswith('-003'):
raise swiftclient.ClientException('Object DELETE failed')
else:
pass
conf = copy.deepcopy(SWIFT_CONF)
self.config(**conf)
moves.reload_module(swift)
self.store = Store(self.conf)
self.store.configure()
loc_uri = "swift+https://%s:key@localhost:8080/glance/%s"
loc_uri = loc_uri % (self.swift_store_user, test_image_id)
loc = location.get_location_from_uri(loc_uri)
conn = self.store.get_connection(loc.store_location)
conn.delete_object = fake_delete_object
conn.head_object = fake_head_object
conn.get_container = fake_get_container
global SWIFT_DELETE_OBJECT_CALLS
SWIFT_DELETE_OBJECT_CALLS = 0
self.store.delete(loc, connection=conn)
# Expecting 6 delete calls, 5 for the segments and 1 for the manifest
self.assertEqual(SWIFT_DELETE_OBJECT_CALLS, 6)
def test_read_acl_public(self):
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
ctxt = mock.MagicMock()
store.set_acls(loc, public=True, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
"*:*")
def test_read_acl_tenants(self):
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
read_tenants = ['matt', 'mark']
ctxt = mock.MagicMock()
store.set_acls(loc, read_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Read'],
'matt:*,mark:*')
def test_write_acls(self):
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
loc = location.get_location_from_uri(uri, conf=self.conf)
read_tenants = ['frank', 'jim']
ctxt = mock.MagicMock()
store.set_acls(loc, write_tenants=read_tenants, context=ctxt)
container_headers = swiftclient.client.head_container('x', 'y',
'glance')
self.assertEqual(container_headers['X-Container-Write'],
'frank:*,jim:*')
class TestStoreAuthV1(base.StoreBaseTest, SwiftTests,
test_store_capabilities.TestStoreCapabilitiesChecking):
_CONF = cfg.CONF
def getConfig(self):
conf = SWIFT_CONF.copy()
conf['swift_store_auth_version'] = '1'
conf['swift_store_user'] = 'tenant:user1'
return conf
def setUp(self):
super(TestStoreAuthV1, self).setUp()
conf = self.getConfig()
conf_file = 'glance-swift.conf'
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
stub_out_swiftclient(self.stubs, conf['swift_store_auth_version'])
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store, 'swift')
self.addCleanup(self.conf.reset)
class TestStoreAuthV2(TestStoreAuthV1):
def getConfig(self):
conf = super(TestStoreAuthV2, self).getConfig()
conf['swift_store_auth_version'] = '2'
conf['swift_store_user'] = 'tenant:user1'
return conf
def test_v2_with_no_tenant(self):
uri = "swift://failme:key@auth_address/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertRaises(exceptions.BadStoreUri,
self.store.get,
loc)
def test_v2_multi_tenant_location(self):
conf = self.getConfig()
conf['swift_store_multi_tenant'] = True
uri = "swift://auth_address/glance/%s" % (FAKE_UUID)
loc = location.get_location_from_uri(uri, conf=self.conf)
self.assertEqual('swift', loc.store_name)
class TestStoreAuthV3(TestStoreAuthV1):
def getConfig(self):
conf = super(TestStoreAuthV3, self).getConfig()
conf['swift_store_auth_version'] = '3'
conf['swift_store_user'] = 'tenant:user1'
return conf
class FakeConnection(object):
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
preauthtoken=None, starting_backoff=1, tenant_name=None,
os_options=None, auth_version="1", insecure=False,
ssl_compression=True, cacert=None):
if os_options is None:
os_options = {}
self.authurl = authurl
self.user = user
self.key = key
self.preauthurl = preauthurl
self.preauthtoken = preauthtoken
self.tenant_name = tenant_name
self.os_options = os_options
self.auth_version = auth_version
self.insecure = insecure
self.cacert = cacert
class TestSingleTenantStoreConnections(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestSingleTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.store = swift.SingleTenantStore(self.conf)
self.store.configure()
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com/v2/',
'user': 'tenant:user1',
'key': 'key1',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs, self.conf)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertIsNone(connection.preauthurl)
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_conf_endpoint(self):
ctx = mock.MagicMock(user='tenant:user1', tenant='tenant')
self.config(swift_store_endpoint='https://internal.com')
self.store.configure()
connection = self.store.get_connection(self.location, context=ctx)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertEqual(connection.preauthurl, 'https://internal.com')
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_conf_endpoint_no_context(self):
self.config(swift_store_endpoint='https://internal.com')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertEqual(connection.key, 'key1')
self.assertEqual(connection.preauthurl, 'https://internal.com')
self.assertFalse(connection.insecure)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_no_trailing_slash(self):
self.location.auth_or_store_url = 'example.com/v2'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.authurl, 'https://example.com/v2/')
def test_connection_insecure(self):
self.config(swift_store_auth_insecure=True)
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertTrue(connection.insecure)
def test_connection_with_auth_v1(self):
self.config(swift_store_auth_version='1')
self.store.configure()
self.location.user = 'auth_v1_user'
connection = self.store.get_connection(self.location)
self.assertEqual(connection.auth_version, '1')
self.assertEqual(connection.user, 'auth_v1_user')
self.assertIsNone(connection.tenant_name)
def test_connection_invalid_user(self):
self.store.configure()
self.location.user = 'invalid:format:user'
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_missing_user(self):
self.store.configure()
self.location.user = None
self.assertRaises(exceptions.BadStoreUri,
self.store.get_connection, self.location)
def test_connection_with_region(self):
self.config(swift_store_region='Sahara')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'region_name': 'Sahara',
'service_type': 'object-store',
'endpoint_type': 'publicURL'})
def test_connection_with_service_type(self):
self.config(swift_store_service_type='shoe-store')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'shoe-store',
'endpoint_type': 'publicURL'})
def test_connection_with_endpoint_type(self):
self.config(swift_store_endpoint_type='internalURL')
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'internalURL'})
def test_bad_location_uri(self):
self.store.configure()
self.location.uri = 'http://bad_uri://'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_credentials(self):
self.store.configure()
self.location.uri = 'swift://bad_creds@uri/cont/obj'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_bad_location_uri_invalid_object_path(self):
self.store.configure()
self.location.uri = 'swift://user:key@uri/cont'
self.assertRaises(exceptions.BadStoreUri,
self.location.parse_uri,
self.location.uri)
def test_ref_overrides_defaults(self):
self.config(swift_store_auth_version='2',
swift_store_user='testuser',
swift_store_key='testpass',
swift_store_auth_address='testaddress',
swift_store_endpoint_type='internalURL',
swift_store_config_file='somefile')
self.store.ref_params = {'ref1': {'auth_address': 'authurl.com',
'auth_version': '3',
'user': 'user:pass',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'}}
self.store.configure()
self.assertEqual('user:pass', self.store.user)
self.assertEqual('3', self.store.auth_version)
self.assertEqual('authurl.com', self.store.auth_address)
self.assertEqual('default', self.store.user_domain_id)
self.assertEqual('ignored', self.store.user_domain_name)
self.assertEqual('default', self.store.project_domain_id)
self.assertEqual('ignored', self.store.project_domain_name)
def test_with_v3_auth(self):
self.store.ref_params = {'ref1': {'auth_address': 'authurl.com',
'auth_version': '3',
'user': 'user:pass',
'key': 'password',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'}}
self.store.configure()
connection = self.store.get_connection(self.location)
self.assertEqual('3', connection.auth_version)
self.assertEqual(connection.os_options,
{'service_type': 'object-store',
'endpoint_type': 'publicURL',
'user_domain_id': 'default',
'user_domain_name': 'ignored',
'project_domain_id': 'default',
'project_domain_name': 'ignored'})
class TestMultiTenantStoreConnections(base.StoreBaseTest):
def setUp(self):
super(TestMultiTenantStoreConnections, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
self.context = mock.MagicMock(
user='tenant:user1', tenant='tenant', auth_token='0123')
self.store = swift.MultiTenantStore(self.conf)
specs = {'scheme': 'swift',
'auth_or_store_url': 'example.com',
'container': 'cont',
'obj': 'object'}
self.location = swift.StoreLocation(specs, self.conf)
self.addCleanup(self.conf.reset)
def test_basic_connection(self):
self.store.configure()
connection = self.store.get_connection(self.location,
context=self.context)
self.assertIsNone(connection.authurl)
self.assertEqual(connection.auth_version, '2')
self.assertEqual(connection.user, 'tenant:user1')
self.assertEqual(connection.tenant_name, 'tenant')
self.assertIsNone(connection.key)
self.assertEqual(connection.preauthurl, 'https://example.com')
self.assertEqual(connection.preauthtoken, '0123')
self.assertEqual(connection.os_options, {})
class TestMultiTenantStoreContext(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestMultiTenantStoreContext, self).setUp()
conf = SWIFT_CONF.copy()
self.store = Store(self.conf)
self.config(**conf)
self.store.configure()
self.register_store_schemes(self.store, 'swift')
self.service_catalog = [{
"name": "Object Storage",
"type": "object-store",
"endpoints": [{
"publicURL": "http://127.0.0.1:0",
"region": "region1",
"versionId": "1.0",
}]
}]
self.addCleanup(self.conf.reset)
@requests_mock.mock()
def test_download_context(self, m):
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
uri = "swift+http://127.0.0.1/glance_123/123"
loc = location.get_location_from_uri(uri, conf=self.conf)
ctx = mock.MagicMock(
service_catalog=self.service_catalog, user='tenant:user1',
tenant='tenant', auth_token='0123')
m.get("http://127.0.0.1/glance_123/123")
store.get(loc, context=ctx)
self.assertEqual(b'0123', m.last_request.headers['X-Auth-Token'])
@requests_mock.mock()
def test_upload_context(self, m):
head_req = m.head("http://127.0.0.1/glance_123",
text='Some data',
status_code=201)
put_req = m.put("http://127.0.0.1/glance_123/123")
self.config(swift_store_multi_tenant=True)
store = Store(self.conf)
store.configure()
content = b'Some data'
pseudo_file = six.BytesIO(content)
ctx = mock.MagicMock(
service_catalog=self.service_catalog, user='tenant:user1',
tenant='tenant', auth_token='0123')
store.add('123', pseudo_file, len(content),
context=ctx)
self.assertEqual(b'0123',
head_req.last_request.headers['X-Auth-Token'])
self.assertEqual(b'0123',
put_req.last_request.headers['X-Auth-Token'])
class FakeGetEndpoint(object):
def __init__(self, response):
self.response = response
def __call__(self, service_catalog, service_type=None,
endpoint_region=None, endpoint_type=None):
self.service_type = service_type
self.endpoint_region = endpoint_region
self.endpoint_type = endpoint_type
return self.response
class TestCreatingLocations(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestCreatingLocations, self).setUp()
moxfixture = self.useFixture(moxstubout.MoxStubout())
self.stubs = moxfixture.stubs
conf = copy.deepcopy(SWIFT_CONF)
self.store = Store(self.conf)
self.config(**conf)
moves.reload_module(swift)
self.addCleanup(self.conf.reset)
def test_single_tenant_location(self):
conf = copy.deepcopy(SWIFT_CONF)
conf['swift_store_container'] = 'container'
conf_file = "glance-swift.conf"
self.swift_config_file = self.copy_data_file(conf_file, self.test_dir)
conf.update({'swift_store_config_file': self.swift_config_file})
conf['default_swift_reference'] = 'ref1'
self.config(**conf)
moves.reload_module(swift)
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://example.com')
self.assertEqual(location.container, 'container')
self.assertEqual(location.obj, 'image-id')
self.assertEqual(location.user, 'tenant:user1')
self.assertEqual(location.key, 'key1')
def test_single_tenant_location_http(self):
conf_file = "glance-swift.conf"
test_dir = self.useFixture(fixtures.TempDir()).path
self.swift_config_file = self.copy_data_file(conf_file, test_dir)
self.config(swift_store_container='container',
default_swift_reference='ref2',
swift_store_config_file=self.swift_config_file)
store = swift.SingleTenantStore(self.conf)
store.configure()
location = store.create_location('image-id')
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://example.com')
def test_multi_tenant_location(self):
self.config(swift_store_container='container')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+https')
self.assertEqual(location.swift_url, 'https://some_endpoint')
self.assertEqual(location.container, 'container_image-id')
self.assertEqual(location.obj, 'image-id')
self.assertIsNone(location.user)
self.assertIsNone(location.key)
self.assertEqual(fake_get_endpoint.service_type, 'object-store')
def test_multi_tenant_location_http(self):
fake_get_endpoint = FakeGetEndpoint('http://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
location = store.create_location('image-id', context=ctxt)
self.assertEqual(location.scheme, 'swift+http')
self.assertEqual(location.swift_url, 'http://some_endpoint')
def test_multi_tenant_location_with_region(self):
self.config(swift_store_region='WestCarolina')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_region, 'WestCarolina')
def test_multi_tenant_location_custom_service_type(self):
self.config(swift_store_service_type='toy-store')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.service_type, 'toy-store')
def test_multi_tenant_location_custom_endpoint_type(self):
self.config(swift_store_endpoint_type='InternalURL')
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
self.stubs.Set(auth, 'get_endpoint', fake_get_endpoint)
ctxt = mock.MagicMock(
user='user', tenant='tenant', auth_token='123',
service_catalog={})
store = swift.MultiTenantStore(self.conf)
store.configure()
store._get_endpoint(ctxt)
self.assertEqual(fake_get_endpoint.endpoint_type, 'InternalURL')
class TestChunkReader(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestChunkReader, self).setUp()
conf = copy.deepcopy(SWIFT_CONF)
Store(self.conf)
self.config(**conf)
def test_read_all_data(self):
CHUNKSIZE = 100
checksum = hashlib.md5()
data_file = tempfile.NamedTemporaryFile()
data_file.write(b'*' * units.Ki)
data_file.flush()
infile = open(data_file.name, 'rb')
bytes_read = 0
while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE)
bytes_read += len(chunk)
if not chunk:
break
self.assertEqual(units.Ki, bytes_read)
data_file.close()
infile.close()
class TestMultipleContainers(base.StoreBaseTest):
_CONF = cfg.CONF
def setUp(self):
super(TestMultipleContainers, self).setUp()
self.config(swift_store_multiple_containers_seed=3)
self.store = swift.SingleTenantStore(self.conf)
self.store.configure()
def test_get_container_name_happy_path_with_seed_three(self):
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_fda'
self.assertEqual(expected, actual)
def test_get_container_name_with_negative_seed(self):
self.config(swift_store_multiple_containers_seed=-1)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'random_id'
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.get_container_name, test_image_id,
'default_container')
def test_get_container_name_with_seed_beyond_max(self):
self.config(swift_store_multiple_containers_seed=33)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'random_id'
self.assertRaises(exceptions.BadStoreConfiguration,
self.store.get_container_name, test_image_id,
'default_container')
def test_get_container_name_with_max_seed(self):
self.config(swift_store_multiple_containers_seed=32)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + test_image_id
self.assertEqual(expected, actual)
def test_get_container_name_with_dash(self):
self.config(swift_store_multiple_containers_seed=10)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + 'fdae39a1-ba'
self.assertEqual(expected, actual)
def test_get_container_name_with_min_seed(self):
self.config(swift_store_multiple_containers_seed=1)
self.store = swift.SingleTenantStore(self.conf)
test_image_id = 'fdae39a1-bac5-4238-aba4-69bcc726e848'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container_' + 'f'
self.assertEqual(expected, actual)
def test_get_container_name_with_multiple_containers_turned_off(self):
self.config(swift_store_multiple_containers_seed=0)
self.store.configure()
test_image_id = 'random_id'
actual = self.store.get_container_name(test_image_id,
'default_container')
expected = 'default_container'
self.assertEqual(expected, actual)
| true | true |
f738d1cc691e5e7835df1f29f63efb94f2ce77a6 | 1,149 | py | Python | src/scrapy_redis_loadbalancing/tools.py | EchoShoot/scrapy-redis-loadbalancing | d5c7ff76dc842f201dcaa68928dd27c24be4acb8 | [
"MIT"
] | 3 | 2019-03-05T02:30:19.000Z | 2020-03-13T13:29:34.000Z | src/scrapy_redis_loadbalancing/tools.py | EchoShoot/scrapy-redis-loadbalancing | d5c7ff76dc842f201dcaa68928dd27c24be4acb8 | [
"MIT"
] | null | null | null | src/scrapy_redis_loadbalancing/tools.py | EchoShoot/scrapy-redis-loadbalancing | d5c7ff76dc842f201dcaa68928dd27c24be4acb8 | [
"MIT"
] | 3 | 2019-12-20T04:41:04.000Z | 2020-07-12T22:26:27.000Z | class Color(object):
""" 输出各种颜色,方便 shell观察 """
@staticmethod
def black(text):
""" 黑色 """
return '\033[90m{content}\033[0m'.format(content=text)
@staticmethod
def red(text):
""" 红色 """
return '\033[91m{content}\033[0m'.format(content=text)
@staticmethod
def green(text):
""" 绿色 """
return '\033[92m{content}\033[0m'.format(content=text)
@staticmethod
def yellow(text):
""" 黄色 """
return '\033[93m{content}\033[0m'.format(content=text)
@staticmethod
def violet(text):
""" 紫罗兰色 """
return '\033[94m{content}\033[0m'.format(content=text)
@staticmethod
def purple(text):
""" 紫色 """
return '\033[95m{content}\033[0m'.format(content=text)
@staticmethod
def cyan(text):
""" 青色 """
return '\033[96m{content}\033[0m'.format(content=text)
@staticmethod
def white(text):
""" 白色 """
return '\033[97m{content}\033[0m'.format(content=text)
@staticmethod
def gray(text):
""" 灰色 """
return '\033[98m{content}\033[0m'.format(content=text) | 24.446809 | 62 | 0.552654 | class Color(object):
@staticmethod
def black(text):
return '\033[90m{content}\033[0m'.format(content=text)
@staticmethod
def red(text):
return '\033[91m{content}\033[0m'.format(content=text)
@staticmethod
def green(text):
return '\033[92m{content}\033[0m'.format(content=text)
@staticmethod
def yellow(text):
return '\033[93m{content}\033[0m'.format(content=text)
@staticmethod
def violet(text):
return '\033[94m{content}\033[0m'.format(content=text)
@staticmethod
def purple(text):
return '\033[95m{content}\033[0m'.format(content=text)
@staticmethod
def cyan(text):
return '\033[96m{content}\033[0m'.format(content=text)
@staticmethod
def white(text):
return '\033[97m{content}\033[0m'.format(content=text)
@staticmethod
def gray(text):
return '\033[98m{content}\033[0m'.format(content=text) | true | true |
f738d27a767e8d05f94578173bcb4472c4c89b20 | 4,332 | py | Python | stream_alert_cli/terraform/cloudtrail.py | yutiansut/streamalert | 7d198a3273781f66465420e90886a3ce53ec7559 | [
"Apache-2.0"
] | 2 | 2018-01-18T14:09:24.000Z | 2021-11-02T10:45:23.000Z | stream_alert_cli/terraform/cloudtrail.py | revaniki/streamalert | 7d198a3273781f66465420e90886a3ce53ec7559 | [
"Apache-2.0"
] | null | null | null | stream_alert_cli/terraform/cloudtrail.py | revaniki/streamalert | 7d198a3273781f66465420e90886a3ce53ec7559 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
from stream_alert.shared.logger import get_logger
LOGGER = get_logger(__name__)
def generate_cloudtrail(cluster_name, cluster_dict, config):
"""Add the CloudTrail module to the Terraform cluster dict.
Args:
cluster_name (str): The name of the currently generating cluster
cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster.
config (dict): The loaded config from the 'conf/' directory
Returns:
bool: Result of applying the cloudtrail module
"""
modules = config['clusters'][cluster_name]['modules']
cloudtrail_module = 'cloudtrail_{}'.format(cluster_name)
enabled_legacy = modules['cloudtrail'].get('enabled')
cloudtrail_enabled = modules['cloudtrail'].get('enable_logging', True)
kinesis_enabled = modules['cloudtrail'].get('enable_kinesis', True)
send_to_cloudwatch = modules['cloudtrail'].get('send_to_cloudwatch', False)
exclude_home_region = modules['cloudtrail'].get('exclude_home_region_events', False)
account_ids = list(
set([config['global']['account']['aws_account_id']] + modules['cloudtrail'].get(
'cross_account_ids', [])))
# Allow for backwards compatibility
if enabled_legacy:
del config['clusters'][cluster_name]['modules']['cloudtrail']['enabled']
config['clusters'][cluster_name]['modules']['cloudtrail']['enable_logging'] = True
config['clusters'][cluster_name]['modules']['cloudtrail']['enable_kinesis'] = True
LOGGER.info('Converting legacy CloudTrail config')
config.write()
kinesis_enabled = True
cloudtrail_enabled = True
existing_trail = modules['cloudtrail'].get('existing_trail', False)
is_global_trail = modules['cloudtrail'].get('is_global_trail', True)
region = config['global']['account']['region']
event_pattern_default = {'account': [config['global']['account']['aws_account_id']]}
event_pattern = modules['cloudtrail'].get('event_pattern', event_pattern_default)
# From here: http://amzn.to/2zF7CS0
valid_event_pattern_keys = {
'version', 'id', 'detail-type', 'source', 'account', 'time', 'region', 'resources', 'detail'
}
if not set(event_pattern.keys()).issubset(valid_event_pattern_keys):
LOGGER.error('Config Error: Invalid CloudWatch Event Pattern!')
return False
module_info = {
'source': 'modules/tf_stream_alert_cloudtrail',
'primary_account_id': config['global']['account']['aws_account_id'],
'account_ids': account_ids,
'cluster': cluster_name,
'prefix': config['global']['account']['prefix'],
'enable_logging': cloudtrail_enabled,
'enable_kinesis': kinesis_enabled,
's3_logging_bucket': config['global']['s3_access_logging']['logging_bucket'],
'existing_trail': existing_trail,
'send_to_cloudwatch': send_to_cloudwatch,
'exclude_home_region_events': exclude_home_region,
'region': region,
'is_global_trail': is_global_trail
}
# use the kinesis output from the kinesis streams module
if kinesis_enabled:
module_info['kinesis_arn'] = '${{module.kinesis_{}.arn}}'.format(cluster_name)
module_info['event_pattern'] = json.dumps(event_pattern)
if send_to_cloudwatch:
destination_arn = modules['cloudtrail'].get(
'cloudwatch_destination_arn',
'${{module.cloudwatch_{}_{}.cloudwatch_destination_arn}}'.format(cluster_name,
region)
)
module_info['cloudwatch_destination_arn'] = destination_arn
cluster_dict['module'][cloudtrail_module] = module_info
return True
| 41.257143 | 100 | 0.686288 | import json
from stream_alert.shared.logger import get_logger
LOGGER = get_logger(__name__)
def generate_cloudtrail(cluster_name, cluster_dict, config):
modules = config['clusters'][cluster_name]['modules']
cloudtrail_module = 'cloudtrail_{}'.format(cluster_name)
enabled_legacy = modules['cloudtrail'].get('enabled')
cloudtrail_enabled = modules['cloudtrail'].get('enable_logging', True)
kinesis_enabled = modules['cloudtrail'].get('enable_kinesis', True)
send_to_cloudwatch = modules['cloudtrail'].get('send_to_cloudwatch', False)
exclude_home_region = modules['cloudtrail'].get('exclude_home_region_events', False)
account_ids = list(
set([config['global']['account']['aws_account_id']] + modules['cloudtrail'].get(
'cross_account_ids', [])))
if enabled_legacy:
del config['clusters'][cluster_name]['modules']['cloudtrail']['enabled']
config['clusters'][cluster_name]['modules']['cloudtrail']['enable_logging'] = True
config['clusters'][cluster_name]['modules']['cloudtrail']['enable_kinesis'] = True
LOGGER.info('Converting legacy CloudTrail config')
config.write()
kinesis_enabled = True
cloudtrail_enabled = True
existing_trail = modules['cloudtrail'].get('existing_trail', False)
is_global_trail = modules['cloudtrail'].get('is_global_trail', True)
region = config['global']['account']['region']
event_pattern_default = {'account': [config['global']['account']['aws_account_id']]}
event_pattern = modules['cloudtrail'].get('event_pattern', event_pattern_default)
valid_event_pattern_keys = {
'version', 'id', 'detail-type', 'source', 'account', 'time', 'region', 'resources', 'detail'
}
if not set(event_pattern.keys()).issubset(valid_event_pattern_keys):
LOGGER.error('Config Error: Invalid CloudWatch Event Pattern!')
return False
module_info = {
'source': 'modules/tf_stream_alert_cloudtrail',
'primary_account_id': config['global']['account']['aws_account_id'],
'account_ids': account_ids,
'cluster': cluster_name,
'prefix': config['global']['account']['prefix'],
'enable_logging': cloudtrail_enabled,
'enable_kinesis': kinesis_enabled,
's3_logging_bucket': config['global']['s3_access_logging']['logging_bucket'],
'existing_trail': existing_trail,
'send_to_cloudwatch': send_to_cloudwatch,
'exclude_home_region_events': exclude_home_region,
'region': region,
'is_global_trail': is_global_trail
}
if kinesis_enabled:
module_info['kinesis_arn'] = '${{module.kinesis_{}.arn}}'.format(cluster_name)
module_info['event_pattern'] = json.dumps(event_pattern)
if send_to_cloudwatch:
destination_arn = modules['cloudtrail'].get(
'cloudwatch_destination_arn',
'${{module.cloudwatch_{}_{}.cloudwatch_destination_arn}}'.format(cluster_name,
region)
)
module_info['cloudwatch_destination_arn'] = destination_arn
cluster_dict['module'][cloudtrail_module] = module_info
return True
| true | true |
f738d34b0b45fa9d7ce0797c816edf87cad25272 | 7,402 | py | Python | src/dials/util/batch_handling.py | dials-src/dials | 25055c1f6164dc33e672e7c5c6a9c5a35e870660 | [
"BSD-3-Clause"
] | 1 | 2021-12-10T17:28:16.000Z | 2021-12-10T17:28:16.000Z | src/dials/util/batch_handling.py | dials-src/dials | 25055c1f6164dc33e672e7c5c6a9c5a35e870660 | [
"BSD-3-Clause"
] | null | null | null | src/dials/util/batch_handling.py | dials-src/dials | 25055c1f6164dc33e672e7c5c6a9c5a35e870660 | [
"BSD-3-Clause"
] | 1 | 2021-12-07T12:39:04.000Z | 2021-12-07T12:39:04.000Z | """
Functions to help with calculating batch properties for experiments objects.
"""
from __future__ import annotations
import logging
from dials.array_family import flex
logger = logging.getLogger("dials")
class batch_manager:
def __init__(self, batches, batch_params):
# batch params is a list of dicts with "id" and "range" - used to be
# a 'scope extract' object
self.batch_params = sorted(batch_params, key=lambda b: b["range"][0])
self.batches = batches
self.reduced_batches, self._batch_increments = self._reduce()
def _reduce(self):
reduced_batches = flex.int(self.batches)
batch_increments = []
incr = 0
for batch in self.batch_params:
sel = (reduced_batches >= batch["range"][0]) & (
reduced_batches <= batch["range"][1]
)
reduced_batches.set_selected(
sel, reduced_batches.select(sel) - (batch["range"][0] - incr) + 1
)
batch_increments.append(incr)
incr += batch["range"][1] - batch["range"][0] + 1
assert len(set(reduced_batches)) == len(reduced_batches)
return list(reduced_batches), batch_increments
def batch_plot_shapes_and_annotations(self):
light_grey = "#d3d3d3"
grey = "#808080"
shapes = []
annotations = []
batches = flex.int(self.batches)
text = flex.std_string(batches.size())
for i, batch in enumerate(self.batch_params):
fillcolor = [light_grey, grey][i % 2] # alternate colours
shapes.append(
{
"type": "rect",
# x-reference is assigned to the x-values
"xref": "x",
# y-reference is assigned to the plot paper [0,1]
"yref": "paper",
"x0": self._batch_increments[i],
"y0": 0,
"x1": self._batch_increments[i]
+ (batch["range"][1] - batch["range"][0]),
"y1": 1,
"fillcolor": fillcolor,
"opacity": 0.2,
"line": {"width": 0},
}
)
annotations.append(
{
# x-reference is assigned to the x-values
"xref": "x",
# y-reference is assigned to the plot paper [0,1]
"yref": "paper",
"x": self._batch_increments[i]
+ (batch["range"][1] - batch["range"][0]) / 2,
"y": 1,
"text": f"{batch['id']}",
"showarrow": False,
"yshift": 20,
# 'arrowhead': 7,
# 'ax': 0,
# 'ay': -40
}
)
sel = (batches >= batch["range"][0]) & (batches <= batch["range"][1])
text.set_selected(
sel,
flex.std_string(
[
f"{batch['id']}: {j - batch['range'][0] + 1}"
for j in batches.select(sel)
]
),
)
return shapes, annotations, list(text)
def assign_batches_to_reflections(reflections, batch_offsets):
"""Assign a 'batch' column to the reflection table"""
for batch_offset, refl in zip(batch_offsets, reflections):
xdet, ydet, zdet = [flex.double(x) for x in refl["xyzobs.px.value"].parts()]
# compute BATCH values - floor() to get (fortran) image captured within
# +1 because FORTRAN counting; zdet+1=image_index
# +off because image_index+o=batch
refl["batch"] = (flex.floor(zdet).iround() + 1) + batch_offset
return reflections
def get_batch_ranges(experiments, batch_offsets):
"""Get batch ranges for a list of experiments and offsets"""
batch_ranges = []
assert len(experiments) == len(batch_offsets)
image_ranges = get_image_ranges(experiments)
for batch_offset, image_range in zip(batch_offsets, image_ranges):
batch_ranges.append(
(image_range[0] + batch_offset, image_range[1] + batch_offset)
)
return batch_ranges
def get_image_ranges(experiments):
"""Get image ranges for a list of experiments (including scanless exp.)"""
# Note, if set to 1,1,for scanless experiments then first batch offset in
# _calculate_batch_offsets is zero below, bad!
return [e.scan.get_image_range() if e.scan else (0, 0) for e in experiments]
def calculate_batch_offsets(experiment_list):
"""Take a list of experiments and resolve and return the batch offsets.
First adds an image_range property as not all experiments have scans."""
image_ranges = get_image_ranges(experiment_list)
offsets = _calculate_batch_offsets(image_ranges)
return offsets
def set_batch_offsets(experiment_list, batch_offsets):
"""Set batch offsets in scan objects. Don't need to set anything for
scanless experiments, as these are not used with the batch system."""
for exp, offset in zip(experiment_list, batch_offsets):
if exp.scan:
exp.scan.set_batch_offset(offset)
def _calculate_batch_offsets(image_ranges):
"""Take a list of (modified) experiments and resolve and return the batch
offsets.
This is the number added to the image number to give the
batch number, such that:
- Each experiment has a unique, nonoverlapping, nonconsecutive range
- None are zero
- Image number ranges are kept if at all possible
"""
experiments_to_shift = []
existing_ranges = set()
maximum_batch_number = 0
batch_offsets = [0] * len(image_ranges)
# Handle zeroth shifts and kept ranges
for i, image_range in enumerate(image_ranges):
ilow, ihigh = image_range
# Check assumptions
assert ilow <= ihigh, "Inverted image order!?"
assert ilow >= 0, "Negative image indices are not expected"
# Don't emit zero: Causes problems with C/fortran number conversion
if ilow == 0:
ilow, ihigh = ilow + 1, ihigh + 1
# If we overlap with anything, then process later
if any(ilow < high + 1 and ihigh >= low - 1 for low, high in existing_ranges):
experiments_to_shift.append((i, image_range))
else:
batch_offsets[i] = ilow - image_range[0]
existing_ranges.add((ilow, ihigh))
maximum_batch_number = max(maximum_batch_number, ihigh)
# Now handle all the experiments that overlapped by pushing them higher
for i, image_range in experiments_to_shift:
start_number = _next_epoch(maximum_batch_number)
range_width = image_range[1] - image_range[0] + 1
end_number = start_number + range_width - 1
batch_offsets[i] = start_number - image_range[0]
maximum_batch_number = end_number
return batch_offsets
def _next_epoch(val):
"""Find the next number above the existing value that ends in 1, that is
not consecutive with the current value."""
if val % 100 == 99:
return val + 2
elif val % 100 == 0:
return val + 101
else:
rem = val % 100
return val - rem + 101
| 38.552083 | 86 | 0.579168 |
from __future__ import annotations
import logging
from dials.array_family import flex
logger = logging.getLogger("dials")
class batch_manager:
def __init__(self, batches, batch_params):
self.batch_params = sorted(batch_params, key=lambda b: b["range"][0])
self.batches = batches
self.reduced_batches, self._batch_increments = self._reduce()
def _reduce(self):
reduced_batches = flex.int(self.batches)
batch_increments = []
incr = 0
for batch in self.batch_params:
sel = (reduced_batches >= batch["range"][0]) & (
reduced_batches <= batch["range"][1]
)
reduced_batches.set_selected(
sel, reduced_batches.select(sel) - (batch["range"][0] - incr) + 1
)
batch_increments.append(incr)
incr += batch["range"][1] - batch["range"][0] + 1
assert len(set(reduced_batches)) == len(reduced_batches)
return list(reduced_batches), batch_increments
def batch_plot_shapes_and_annotations(self):
light_grey = "#d3d3d3"
grey = "#808080"
shapes = []
annotations = []
batches = flex.int(self.batches)
text = flex.std_string(batches.size())
for i, batch in enumerate(self.batch_params):
fillcolor = [light_grey, grey][i % 2]
shapes.append(
{
"type": "rect",
"xref": "x",
"yref": "paper",
"x0": self._batch_increments[i],
"y0": 0,
"x1": self._batch_increments[i]
+ (batch["range"][1] - batch["range"][0]),
"y1": 1,
"fillcolor": fillcolor,
"opacity": 0.2,
"line": {"width": 0},
}
)
annotations.append(
{
"xref": "x",
"yref": "paper",
"x": self._batch_increments[i]
+ (batch["range"][1] - batch["range"][0]) / 2,
"y": 1,
"text": f"{batch['id']}",
"showarrow": False,
"yshift": 20,
}
)
sel = (batches >= batch["range"][0]) & (batches <= batch["range"][1])
text.set_selected(
sel,
flex.std_string(
[
f"{batch['id']}: {j - batch['range'][0] + 1}"
for j in batches.select(sel)
]
),
)
return shapes, annotations, list(text)
def assign_batches_to_reflections(reflections, batch_offsets):
for batch_offset, refl in zip(batch_offsets, reflections):
xdet, ydet, zdet = [flex.double(x) for x in refl["xyzobs.px.value"].parts()]
refl["batch"] = (flex.floor(zdet).iround() + 1) + batch_offset
return reflections
def get_batch_ranges(experiments, batch_offsets):
batch_ranges = []
assert len(experiments) == len(batch_offsets)
image_ranges = get_image_ranges(experiments)
for batch_offset, image_range in zip(batch_offsets, image_ranges):
batch_ranges.append(
(image_range[0] + batch_offset, image_range[1] + batch_offset)
)
return batch_ranges
def get_image_ranges(experiments):
return [e.scan.get_image_range() if e.scan else (0, 0) for e in experiments]
def calculate_batch_offsets(experiment_list):
image_ranges = get_image_ranges(experiment_list)
offsets = _calculate_batch_offsets(image_ranges)
return offsets
def set_batch_offsets(experiment_list, batch_offsets):
for exp, offset in zip(experiment_list, batch_offsets):
if exp.scan:
exp.scan.set_batch_offset(offset)
def _calculate_batch_offsets(image_ranges):
experiments_to_shift = []
existing_ranges = set()
maximum_batch_number = 0
batch_offsets = [0] * len(image_ranges)
for i, image_range in enumerate(image_ranges):
ilow, ihigh = image_range
assert ilow <= ihigh, "Inverted image order!?"
assert ilow >= 0, "Negative image indices are not expected"
if ilow == 0:
ilow, ihigh = ilow + 1, ihigh + 1
# If we overlap with anything, then process later
if any(ilow < high + 1 and ihigh >= low - 1 for low, high in existing_ranges):
experiments_to_shift.append((i, image_range))
else:
batch_offsets[i] = ilow - image_range[0]
existing_ranges.add((ilow, ihigh))
maximum_batch_number = max(maximum_batch_number, ihigh)
# Now handle all the experiments that overlapped by pushing them higher
for i, image_range in experiments_to_shift:
start_number = _next_epoch(maximum_batch_number)
range_width = image_range[1] - image_range[0] + 1
end_number = start_number + range_width - 1
batch_offsets[i] = start_number - image_range[0]
maximum_batch_number = end_number
return batch_offsets
def _next_epoch(val):
if val % 100 == 99:
return val + 2
elif val % 100 == 0:
return val + 101
else:
rem = val % 100
return val - rem + 101
| true | true |
f738d3546b92ea29140748d9f8745f245cb5fc8e | 33,002 | py | Python | salt/modules/rh_ip.py | wikimedia/operations-debs-salt | be6342abc7401ff92f67ed59f7834f1359f35314 | [
"Apache-2.0"
] | null | null | null | salt/modules/rh_ip.py | wikimedia/operations-debs-salt | be6342abc7401ff92f67ed59f7834f1359f35314 | [
"Apache-2.0"
] | null | null | null | salt/modules/rh_ip.py | wikimedia/operations-debs-salt | be6342abc7401ff92f67ed59f7834f1359f35314 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
The networking module for RHEL/Fedora based distros
'''
# Import python libs
import logging
import os.path
import os
import StringIO
# Import third party libs
import jinja2
import jinja2.exceptions
# Import salt libs
import salt.utils
import salt.utils.templates
import salt.utils.validate.net
# Set up logging
log = logging.getLogger(__name__)
# Set up template environment
JINJA = jinja2.Environment(
loader=jinja2.FileSystemLoader(
os.path.join(salt.utils.templates.TEMPLATE_DIRNAME, 'rh_ip')
)
)
# Define the module's virtual name
__virtualname__ = 'ip'
def __virtual__():
'''
Confine this module to RHEL/Fedora based distros
'''
if __grains__['os_family'] == 'RedHat':
return __virtualname__
return False
# Setup networking attributes
_ETHTOOL_CONFIG_OPTS = [
'autoneg', 'speed', 'duplex',
'rx', 'tx', 'sg', 'tso', 'ufo',
'gso', 'gro', 'lro'
]
_RH_CONFIG_OPTS = [
'domain', 'peerdns', 'defroute',
'mtu', 'static-routes', 'gateway'
]
_RH_CONFIG_BONDING_OPTS = [
'mode', 'miimon', 'arp_interval',
'arp_ip_target', 'downdelay', 'updelay',
'use_carrier', 'lacp_rate', 'hashing-algorithm',
'max_bonds', 'tx_queues', 'num_grat_arp',
'num_unsol_na', 'primary', 'primary_reselect',
'ad_select', 'xmit_hash_policy', 'arp_validate',
'fail_over_mac', 'all_slaves_active', 'resend_igmp'
]
_RH_NETWORK_SCRIPT_DIR = '/etc/sysconfig/network-scripts'
_RH_NETWORK_FILE = '/etc/sysconfig/network'
_RH_NETWORK_CONF_FILES = '/etc/modprobe.d'
_CONFIG_TRUE = ['yes', 'on', 'true', '1', True]
_CONFIG_FALSE = ['no', 'off', 'false', '0', False]
_IFACE_TYPES = [
'eth', 'bond', 'alias', 'clone',
'ipsec', 'dialup', 'bridge', 'slave', 'vlan',
]
def _error_msg_iface(iface, option, expected):
'''
Build an appropriate error message from a given option and
a list of expected values.
'''
msg = 'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, '|'.join(expected))
def _error_msg_routes(iface, option, expected):
'''
Build an appropriate error message from a given option and
a list of expected values.
'''
msg = 'Invalid option -- Route interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, expected)
def _log_default_iface(iface, opt, value):
msg = 'Using default option -- Interface: {0} Option: {1} Value: {2}'
log.info(msg.format(iface, opt, value))
def _error_msg_network(option, expected):
'''
Build an appropriate error message from a given option and
a list of expected values.
'''
msg = 'Invalid network setting -- Setting: {0}, Expected: [{1}]'
return msg.format(option, '|'.join(expected))
def _log_default_network(opt, value):
msg = 'Using existing setting -- Setting: {0} Value: {1}'
log.info(msg.format(opt, value))
def _parse_rh_config(path):
rh_config = _read_file(path)
cv_rh_config = {}
if rh_config:
for line in rh_config:
line = line.strip()
if len(line) == 0 or line.startswith('!') or line.startswith('#'):
continue
pair = [p.rstrip() for p in line.split('=', 1)]
if len(pair) != 2:
continue
name, value = pair
cv_rh_config[name.upper()] = value
return cv_rh_config
def _parse_ethtool_opts(opts, iface):
'''
Filters given options and outputs valid settings for ETHTOOLS_OPTS
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
config = {}
if 'autoneg' in opts:
if opts['autoneg'] in _CONFIG_TRUE:
config.update({'autoneg': 'on'})
elif opts['autoneg'] in _CONFIG_FALSE:
config.update({'autoneg': 'off'})
else:
_raise_error_iface(iface, 'autoneg', _CONFIG_TRUE + _CONFIG_FALSE)
if 'duplex' in opts:
valid = ['full', 'half']
if opts['duplex'] in valid:
config.update({'duplex': opts['duplex']})
else:
_raise_error_iface(iface, 'duplex', valid)
if 'speed' in opts:
valid = ['10', '100', '1000', '10000']
if str(opts['speed']) in valid:
config.update({'speed': opts['speed']})
else:
_raise_error_iface(iface, opts['speed'], valid)
valid = _CONFIG_TRUE + _CONFIG_FALSE
for option in ('rx', 'tx', 'sg', 'tso', 'ufo', 'gso', 'gro', 'lro'):
if option in opts:
if opts[option] in _CONFIG_TRUE:
config.update({option: 'on'})
elif opts[option] in _CONFIG_FALSE:
config.update({option: 'off'})
else:
_raise_error_iface(iface, option, valid)
return config
def _parse_settings_bond(opts, iface):
'''
Filters given options and outputs valid settings for requested
operation. If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond_def = {
# 803.ad aggregation selection logic
# 0 for stable (default)
# 1 for bandwidth
# 2 for count
'ad_select': '0',
# Max number of transmit queues (default = 16)
'tx_queues': '16',
# Link monitoring in milliseconds. Most NICs support this
'miimon': '100',
# ARP interval in milliseconds
'arp_interval': '250',
# Delay before considering link down in milliseconds (miimon * 2)
'downdelay': '200',
# lacp_rate 0: Slow - every 30 seconds
# lacp_rate 1: Fast - every 1 second
'lacp_rate': '0',
# Max bonds for this driver
'max_bonds': '1',
# Specifies the time, in milliseconds, to wait before
# enabling a slave after a link recovery has been
# detected. Only used with miimon.
'updelay': '0',
# Used with miimon.
# On: driver sends mii
# Off: ethtool sends mii
'use_carrier': 'on',
# Default. Don't change unless you know what you are doing.
'xmit_hash_policy': 'layer2',
}
if opts['mode'] in ['balance-rr', '0']:
log.info(
'Device: {0} Bonding Mode: load balancing (round-robin)'.format(
iface
)
)
return _parse_settings_bond_0(opts, iface, bond_def)
elif opts['mode'] in ['active-backup', '1']:
log.info(
'Device: {0} Bonding Mode: fault-tolerance (active-backup)'.format(
iface
)
)
return _parse_settings_bond_1(opts, iface, bond_def)
elif opts['mode'] in ['balance-xor', '2']:
log.info(
'Device: {0} Bonding Mode: load balancing (xor)'.format(iface)
)
return _parse_settings_bond_2(opts, iface, bond_def)
elif opts['mode'] in ['broadcast', '3']:
log.info(
'Device: {0} Bonding Mode: fault-tolerance (broadcast)'.format(
iface
)
)
return _parse_settings_bond_3(opts, iface, bond_def)
elif opts['mode'] in ['802.3ad', '4']:
log.info(
'Device: {0} Bonding Mode: IEEE 802.3ad Dynamic link '
'aggregation'.format(iface)
)
return _parse_settings_bond_4(opts, iface, bond_def)
elif opts['mode'] in ['balance-tlb', '5']:
log.info(
'Device: {0} Bonding Mode: transmit load balancing'.format(iface)
)
return _parse_settings_bond_5(opts, iface, bond_def)
elif opts['mode'] in ['balance-alb', '6']:
log.info(
'Device: {0} Bonding Mode: adaptive load balancing'.format(iface)
)
return _parse_settings_bond_6(opts, iface, bond_def)
else:
valid = [
'0', '1', '2', '3', '4', '5', '6',
'balance-rr', 'active-backup', 'balance-xor',
'broadcast', '802.3ad', 'balance-tlb', 'balance-alb'
]
_raise_error_iface(iface, 'mode', valid)
def _parse_settings_bond_0(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond0.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '0'}
# ARP targets in n.n.n.n form
valid = ['list of ips (up to 16)']
if 'arp_ip_target' in opts:
if isinstance(opts['arp_ip_target'], list):
if 1 <= len(opts['arp_ip_target']) <= 16:
bond.update({'arp_ip_target': []})
for ip in opts['arp_ip_target']: # pylint: disable=C0103
bond['arp_ip_target'].append(ip)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
if 'arp_interval' in opts:
try:
int(opts['arp_interval'])
bond.update({'arp_interval': opts['arp_interval']})
except Exception:
_raise_error_iface(iface, 'arp_interval', ['integer'])
else:
_log_default_iface(iface, 'arp_interval', bond_def['arp_interval'])
bond.update({'arp_interval': bond_def['arp_interval']})
return bond
def _parse_settings_bond_1(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond1.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '1'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_2(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond2.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '2'}
valid = ['list of ips (up to 16)']
if 'arp_ip_target' in opts:
if isinstance(opts['arp_ip_target'], list):
if 1 <= len(opts['arp_ip_target']) <= 16:
bond.update({'arp_ip_target': []})
for ip in opts['arp_ip_target']: # pylint: disable=C0103
bond['arp_ip_target'].append(ip)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
if 'arp_interval' in opts:
try:
int(opts['arp_interval'])
bond.update({'arp_interval': opts['arp_interval']})
except Exception:
_raise_error_iface(iface, 'arp_interval', ['integer'])
else:
_log_default_iface(iface, 'arp_interval', bond_def['arp_interval'])
bond.update({'arp_interval': bond_def['arp_interval']})
if 'primary' in opts:
bond.update({'primary': opts['primary']})
if 'hashing-algorithm' in opts:
valid = ['layer2', 'layer2+3', 'layer3+4']
if opts['hashing-algorithm'] in valid:
bond.update({'xmit_hash_policy': opts['hashing-algorithm']})
else:
_raise_error_iface(iface, 'hashing-algorithm', valid)
return bond
def _parse_settings_bond_3(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond3.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '3'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_4(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond4.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '4'}
for binding in ['miimon', 'downdelay', 'updelay', 'lacp_rate', 'ad_select']:
if binding in opts:
if binding == 'lacp_rate':
if opts[binding] == 'fast':
opts.update({binding: '1'})
if opts[binding] == 'slow':
opts.update({binding: '0'})
valid = ['fast', '1', 'slow', '0']
else:
valid = ['integer']
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, valid)
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
if 'hashing-algorithm' in opts:
valid = ['layer2', 'layer2+3', 'layer3+4']
if opts['hashing-algorithm'] in valid:
bond.update({'xmit_hash_policy': opts['hashing-algorithm']})
else:
_raise_error_iface(iface, 'hashing-algorithm', valid)
return bond
def _parse_settings_bond_5(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond5.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '5'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_6(opts, iface, bond_def):
'''
Filters given options and outputs valid settings for bond6.
If an option has a value that is not expected, this
function will log what the Interface, Setting and what it was
expecting.
'''
bond = {'mode': '6'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_eth(opts, iface_type, enabled, iface):
'''
Filters given options and outputs valid settings for a
network interface.
'''
result = {'name': iface}
if 'proto' in opts:
valid = ['none', 'bootp', 'dhcp']
if opts['proto'] in valid:
result['proto'] = opts['proto']
else:
_raise_error_iface(iface, opts['proto'], valid)
if 'dns' in opts:
result['dns'] = opts['dns']
result['peerdns'] = 'yes'
if 'mtu' in opts:
try:
result['mtu'] = int(opts['mtu'])
except Exception:
_raise_error_iface(iface, 'mtu', ['integer'])
if iface_type not in ['bridge']:
ethtool = _parse_ethtool_opts(opts, iface)
if ethtool:
result['ethtool'] = ethtool
if iface_type == 'slave':
result['proto'] = 'none'
if iface_type == 'bond':
bonding = _parse_settings_bond(opts, iface)
if bonding:
result['bonding'] = bonding
if iface_type not in ['bond', 'vlan', 'bridge']:
if 'addr' in opts:
if salt.utils.validate.net.mac(opts['addr']):
result['addr'] = opts['addr']
else:
_raise_error_iface(iface, opts['addr'], ['AA:BB:CC:DD:EE:FF'])
else:
# If interface type is slave for bond, not setting hwaddr
if iface_type != 'slave':
ifaces = __salt__['network.interfaces']()
if iface in ifaces and 'hwaddr' in ifaces[iface]:
result['addr'] = ifaces[iface]['hwaddr']
if iface_type == 'bridge':
result['devtype'] = 'Bridge'
bypassfirewall = True
valid = _CONFIG_TRUE + _CONFIG_FALSE
for opt in ['bypassfirewall']:
if opt in opts:
if opts[opt] in _CONFIG_TRUE:
bypassfirewall = True
elif opts[opt] in _CONFIG_FALSE:
bypassfirewall = False
else:
_raise_error_iface(iface, opts[opt], valid)
if bypassfirewall:
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-ip6tables', '0')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-iptables', '0')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-arptables', '0')
else:
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-ip6tables', '1')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-iptables', '1')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-arptables', '1')
else:
if 'bridge' in opts:
result['bridge'] = opts['bridge']
for opt in ['ipaddr', 'master', 'netmask', 'srcaddr', 'delay', 'domain', 'gateway']:
if opt in opts:
result[opt] = opts[opt]
for opt in ['ipv6addr', 'ipv6gateway']:
if opt in opts:
result[opt] = opts[opt]
if 'ipv6_autoconf' in opts:
result['ipv6_autoconf'] = opts['ipv6_autoconf']
if 'enable_ipv6' in opts:
result['enable_ipv6'] = opts['enable_ipv6']
valid = _CONFIG_TRUE + _CONFIG_FALSE
for opt in ['onparent', 'peerdns', 'slave', 'vlan', 'defroute', 'stp']:
if opt in opts:
if opts[opt] in _CONFIG_TRUE:
result[opt] = 'yes'
elif opts[opt] in _CONFIG_FALSE:
result[opt] = 'no'
else:
_raise_error_iface(iface, opts[opt], valid)
if 'onboot' in opts:
log.warning(
'The \'onboot\' option is controlled by the \'enabled\' option. '
'Interface: {0} Enabled: {1}'.format(iface, enabled)
)
if enabled:
result['onboot'] = 'yes'
else:
result['onboot'] = 'no'
# If the interface is defined then we want to always take
# control away from non-root users; unless the administrator
# wants to allow non-root users to control the device.
if 'userctl' in opts:
if opts['userctl'] in _CONFIG_TRUE:
result['userctl'] = 'yes'
elif opts['userctl'] in _CONFIG_FALSE:
result['userctl'] = 'no'
else:
_raise_error_iface(iface, opts['userctl'], valid)
else:
result['userctl'] = 'no'
return result
def _parse_routes(iface, opts):
'''
Filters given options and outputs valid settings for
the route settings file.
'''
# Normalize keys
opts = dict((k.lower(), v) for (k, v) in opts.iteritems())
result = {}
if 'routes' not in opts:
_raise_error_routes(iface, 'routes', 'List of routes')
for opt in opts:
result[opt] = opts[opt]
return result
def _parse_network_settings(opts, current):
'''
Filters given options and outputs valid settings for
the global network settings file.
'''
# Normalize keys
opts = dict((k.lower(), v) for (k, v) in opts.iteritems())
current = dict((k.lower(), v) for (k, v) in current.iteritems())
result = {}
valid = _CONFIG_TRUE + _CONFIG_FALSE
if 'enabled' not in opts:
try:
opts['networking'] = current['networking']
_log_default_network('networking', current['networking'])
except ValueError:
_raise_error_network('networking', valid)
else:
opts['networking'] = opts['enabled']
if opts['networking'] in valid:
if opts['networking'] in _CONFIG_TRUE:
result['networking'] = 'yes'
elif opts['networking'] in _CONFIG_FALSE:
result['networking'] = 'no'
else:
_raise_error_network('networking', valid)
if 'hostname' not in opts:
try:
opts['hostname'] = current['hostname']
_log_default_network('hostname', current['hostname'])
except Exception:
_raise_error_network('hostname', ['server1.example.com'])
if opts['hostname']:
result['hostname'] = opts['hostname']
else:
_raise_error_network('hostname', ['server1.example.com'])
if 'nozeroconf' in opts:
if opts['nozeroconf'] in valid:
if opts['nozeroconf'] in _CONFIG_TRUE:
result['nozeroconf'] = 'true'
elif opts['nozeroconf'] in _CONFIG_FALSE:
result['nozeroconf'] = 'false'
else:
_raise_error_network('nozeroconf', valid)
for opt in opts:
if opt not in ['networking', 'hostname', 'nozeroconf']:
result[opt] = opts[opt]
return result
def _raise_error_iface(iface, option, expected):
'''
Log and raise an error with a logical formatted message.
'''
msg = _error_msg_iface(iface, option, expected)
log.error(msg)
raise AttributeError(msg)
def _raise_error_network(option, expected):
'''
Log and raise an error with a logical formatted message.
'''
msg = _error_msg_network(option, expected)
log.error(msg)
raise AttributeError(msg)
def _raise_error_routes(iface, option, expected):
'''
Log and raise an error with a logical formatted message.
'''
msg = _error_msg_routes(iface, option, expected)
log.error(msg)
raise AttributeError(msg)
def _read_file(path):
'''
Reads and returns the contents of a file
'''
try:
with salt.utils.fopen(path, 'rb') as contents:
# without newlines character. http://stackoverflow.com/questions/12330522/reading-a-file-without-newlines
return contents.read().splitlines()
except Exception:
return ''
def _write_file_iface(iface, data, folder, pattern):
'''
Writes a file to disk
'''
filename = os.path.join(folder, pattern.format(iface))
if not os.path.exists(folder):
msg = '{0} cannot be written. {1} does not exist'
msg = msg.format(filename, folder)
log.error(msg)
raise AttributeError(msg)
fout = salt.utils.fopen(filename, 'w')
fout.write(data)
fout.close()
def _write_file_network(data, filename):
'''
Writes a file to disk
'''
fout = salt.utils.fopen(filename, 'w')
fout.write(data)
fout.close()
def _read_temp(data):
tout = StringIO.StringIO()
tout.write(data)
tout.seek(0)
output = tout.read().splitlines() # Discard newlines
tout.close()
return output
def build_bond(iface, **settings):
'''
Create a bond script in /etc/modprobe.d with the passed settings
and load the bonding kernel module.
CLI Example:
.. code-block:: bash
salt '*' ip.build_bond bond0 mode=balance-alb
'''
rh_major = __grains__['osrelease'][:1]
opts = _parse_settings_bond(settings, iface)
try:
template = JINJA.get_template('conf.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error('Could not load template conf.jinja')
return ''
data = template.render({'name': iface, 'bonding': opts})
_write_file_iface(iface, data, _RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
path = os.path.join(_RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
if rh_major == '5':
__salt__['cmd.run'](
'sed -i -e "/^alias\\s{0}.*/d" /etc/modprobe.conf'.format(iface),
python_shell=False
)
__salt__['cmd.run'](
'sed -i -e "/^options\\s{0}.*/d" /etc/modprobe.conf'.format(iface),
python_shell=False
)
__salt__['file.append']('/etc/modprobe.conf', path)
__salt__['kmod.load']('bonding')
if settings['test']:
return _read_temp(data)
return _read_file(path)
def build_interface(iface, iface_type, enabled, **settings):
'''
Build an interface script for a network interface.
CLI Example:
.. code-block:: bash
salt '*' ip.build_interface eth0 eth <settings>
'''
if __grains__['os'] == 'Fedora':
rh_major = '6'
else:
rh_major = __grains__['osrelease'][:1]
iface = iface.lower()
iface_type = iface_type.lower()
if iface_type not in _IFACE_TYPES:
_raise_error_iface(iface, iface_type, _IFACE_TYPES)
if iface_type == 'slave':
settings['slave'] = 'yes'
if 'master' not in settings:
msg = 'master is a required setting for slave interfaces'
log.error(msg)
raise AttributeError(msg)
if iface_type == 'vlan':
settings['vlan'] = 'yes'
if iface_type == 'bridge':
__salt__['pkg.install']('bridge-utils')
if iface_type in ['eth', 'bond', 'bridge', 'slave', 'vlan']:
opts = _parse_settings_eth(settings, iface_type, enabled, iface)
try:
template = JINJA.get_template('rh{0}_eth.jinja'.format(rh_major))
except jinja2.exceptions.TemplateNotFound:
log.error(
'Could not load template rh{0}_eth.jinja'.format(
rh_major
)
)
return ''
ifcfg = template.render(opts)
if 'test' in settings and settings['test']:
return _read_temp(ifcfg)
_write_file_iface(iface, ifcfg, _RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}')
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}'.format(iface))
return _read_file(path)
def build_routes(iface, **settings):
'''
Build a route script for a network interface.
CLI Example:
.. code-block:: bash
salt '*' ip.build_routes eth0 <settings>
'''
iface = iface.lower()
opts = _parse_routes(iface, settings)
try:
template = JINJA.get_template('route_eth.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error(
'Could not load template route_eth.jinja'
)
return ''
routecfg = template.render(routes=opts['routes'])
if settings['test']:
return _read_temp(routecfg)
_write_file_iface(iface, routecfg, _RH_NETWORK_SCRIPT_DIR, 'route-{0}')
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'route-{0}'.format(iface))
return _read_file(path)
def down(iface, iface_type):
'''
Shutdown a network interface
CLI Example:
.. code-block:: bash
salt '*' ip.down eth0
'''
# Slave devices are controlled by the master.
if iface_type not in ['slave']:
return __salt__['cmd.run']('ifdown {0}'.format(iface))
return None
def get_bond(iface):
'''
Return the content of a bond script
CLI Example:
.. code-block:: bash
salt '*' ip.get_bond bond0
'''
path = os.path.join(_RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
return _read_file(path)
def get_interface(iface):
'''
Return the contents of an interface script
CLI Example:
.. code-block:: bash
salt '*' ip.get_interface eth0
'''
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}'.format(iface))
return _read_file(path)
def up(iface, iface_type): # pylint: disable=C0103
'''
Start up a network interface
CLI Example:
.. code-block:: bash
salt '*' ip.up eth0
'''
# Slave devices are controlled by the master.
if iface_type not in ['slave']:
return __salt__['cmd.run']('ifup {0}'.format(iface))
return None
def get_routes(iface):
'''
Return the contents of the interface routes script.
CLI Example:
.. code-block:: bash
salt '*' ip.get_routes eth0
'''
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'route-{0}'.format(iface))
return _read_file(path)
def get_network_settings():
'''
Return the contents of the global network script.
CLI Example:
.. code-block:: bash
salt '*' ip.get_network_settings
'''
return _read_file(_RH_NETWORK_FILE)
def apply_network_settings(**settings):
'''
Apply global network configuration.
CLI Example:
.. code-block:: bash
salt '*' ip.apply_network_settings
'''
if 'require_reboot' not in settings:
settings['require_reboot'] = False
if settings['require_reboot'] in _CONFIG_TRUE:
log.warning(
'The network state sls is requiring a reboot of the system to '
'properly apply network configuration.'
)
return True
else:
return __salt__['service.restart']('network')
def build_network_settings(**settings):
'''
Build the global network script.
CLI Example:
.. code-block:: bash
salt '*' ip.build_network_settings <settings>
'''
# Read current configuration and store default values
current_network_settings = _parse_rh_config(_RH_NETWORK_FILE)
# Build settings
opts = _parse_network_settings(settings, current_network_settings)
try:
template = JINJA.get_template('network.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error('Could not load template network.jinja')
return ''
network = template.render(opts)
if settings['test']:
return _read_temp(network)
# Write settings
_write_file_network(network, _RH_NETWORK_FILE)
return _read_file(_RH_NETWORK_FILE)
| 30.699535 | 117 | 0.597146 |
import logging
import os.path
import os
import StringIO
import jinja2
import jinja2.exceptions
import salt.utils
import salt.utils.templates
import salt.utils.validate.net
log = logging.getLogger(__name__)
JINJA = jinja2.Environment(
loader=jinja2.FileSystemLoader(
os.path.join(salt.utils.templates.TEMPLATE_DIRNAME, 'rh_ip')
)
)
__virtualname__ = 'ip'
def __virtual__():
if __grains__['os_family'] == 'RedHat':
return __virtualname__
return False
# Setup networking attributes
_ETHTOOL_CONFIG_OPTS = [
'autoneg', 'speed', 'duplex',
'rx', 'tx', 'sg', 'tso', 'ufo',
'gso', 'gro', 'lro'
]
_RH_CONFIG_OPTS = [
'domain', 'peerdns', 'defroute',
'mtu', 'static-routes', 'gateway'
]
_RH_CONFIG_BONDING_OPTS = [
'mode', 'miimon', 'arp_interval',
'arp_ip_target', 'downdelay', 'updelay',
'use_carrier', 'lacp_rate', 'hashing-algorithm',
'max_bonds', 'tx_queues', 'num_grat_arp',
'num_unsol_na', 'primary', 'primary_reselect',
'ad_select', 'xmit_hash_policy', 'arp_validate',
'fail_over_mac', 'all_slaves_active', 'resend_igmp'
]
_RH_NETWORK_SCRIPT_DIR = '/etc/sysconfig/network-scripts'
_RH_NETWORK_FILE = '/etc/sysconfig/network'
_RH_NETWORK_CONF_FILES = '/etc/modprobe.d'
_CONFIG_TRUE = ['yes', 'on', 'true', '1', True]
_CONFIG_FALSE = ['no', 'off', 'false', '0', False]
_IFACE_TYPES = [
'eth', 'bond', 'alias', 'clone',
'ipsec', 'dialup', 'bridge', 'slave', 'vlan',
]
def _error_msg_iface(iface, option, expected):
msg = 'Invalid option -- Interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, '|'.join(expected))
def _error_msg_routes(iface, option, expected):
msg = 'Invalid option -- Route interface: {0}, Option: {1}, Expected: [{2}]'
return msg.format(iface, option, expected)
def _log_default_iface(iface, opt, value):
msg = 'Using default option -- Interface: {0} Option: {1} Value: {2}'
log.info(msg.format(iface, opt, value))
def _error_msg_network(option, expected):
msg = 'Invalid network setting -- Setting: {0}, Expected: [{1}]'
return msg.format(option, '|'.join(expected))
def _log_default_network(opt, value):
msg = 'Using existing setting -- Setting: {0} Value: {1}'
log.info(msg.format(opt, value))
def _parse_rh_config(path):
rh_config = _read_file(path)
cv_rh_config = {}
if rh_config:
for line in rh_config:
line = line.strip()
if len(line) == 0 or line.startswith('!') or line.startswith('
continue
pair = [p.rstrip() for p in line.split('=', 1)]
if len(pair) != 2:
continue
name, value = pair
cv_rh_config[name.upper()] = value
return cv_rh_config
def _parse_ethtool_opts(opts, iface):
config = {}
if 'autoneg' in opts:
if opts['autoneg'] in _CONFIG_TRUE:
config.update({'autoneg': 'on'})
elif opts['autoneg'] in _CONFIG_FALSE:
config.update({'autoneg': 'off'})
else:
_raise_error_iface(iface, 'autoneg', _CONFIG_TRUE + _CONFIG_FALSE)
if 'duplex' in opts:
valid = ['full', 'half']
if opts['duplex'] in valid:
config.update({'duplex': opts['duplex']})
else:
_raise_error_iface(iface, 'duplex', valid)
if 'speed' in opts:
valid = ['10', '100', '1000', '10000']
if str(opts['speed']) in valid:
config.update({'speed': opts['speed']})
else:
_raise_error_iface(iface, opts['speed'], valid)
valid = _CONFIG_TRUE + _CONFIG_FALSE
for option in ('rx', 'tx', 'sg', 'tso', 'ufo', 'gso', 'gro', 'lro'):
if option in opts:
if opts[option] in _CONFIG_TRUE:
config.update({option: 'on'})
elif opts[option] in _CONFIG_FALSE:
config.update({option: 'off'})
else:
_raise_error_iface(iface, option, valid)
return config
def _parse_settings_bond(opts, iface):
bond_def = {
# 803.ad aggregation selection logic
# 0 for stable (default)
# 1 for bandwidth
# 2 for count
'ad_select': '0',
# Max number of transmit queues (default = 16)
'tx_queues': '16',
# Link monitoring in milliseconds. Most NICs support this
'miimon': '100',
# ARP interval in milliseconds
'arp_interval': '250',
# Delay before considering link down in milliseconds (miimon * 2)
'downdelay': '200',
# lacp_rate 0: Slow - every 30 seconds
# lacp_rate 1: Fast - every 1 second
'lacp_rate': '0',
# Max bonds for this driver
'max_bonds': '1',
# Specifies the time, in milliseconds, to wait before
# enabling a slave after a link recovery has been
# detected. Only used with miimon.
'updelay': '0',
# Used with miimon.
# On: driver sends mii
# Off: ethtool sends mii
'use_carrier': 'on',
# Default. Don't change unless you know what you are doing.
'xmit_hash_policy': 'layer2',
}
if opts['mode'] in ['balance-rr', '0']:
log.info(
'Device: {0} Bonding Mode: load balancing (round-robin)'.format(
iface
)
)
return _parse_settings_bond_0(opts, iface, bond_def)
elif opts['mode'] in ['active-backup', '1']:
log.info(
'Device: {0} Bonding Mode: fault-tolerance (active-backup)'.format(
iface
)
)
return _parse_settings_bond_1(opts, iface, bond_def)
elif opts['mode'] in ['balance-xor', '2']:
log.info(
'Device: {0} Bonding Mode: load balancing (xor)'.format(iface)
)
return _parse_settings_bond_2(opts, iface, bond_def)
elif opts['mode'] in ['broadcast', '3']:
log.info(
'Device: {0} Bonding Mode: fault-tolerance (broadcast)'.format(
iface
)
)
return _parse_settings_bond_3(opts, iface, bond_def)
elif opts['mode'] in ['802.3ad', '4']:
log.info(
'Device: {0} Bonding Mode: IEEE 802.3ad Dynamic link '
'aggregation'.format(iface)
)
return _parse_settings_bond_4(opts, iface, bond_def)
elif opts['mode'] in ['balance-tlb', '5']:
log.info(
'Device: {0} Bonding Mode: transmit load balancing'.format(iface)
)
return _parse_settings_bond_5(opts, iface, bond_def)
elif opts['mode'] in ['balance-alb', '6']:
log.info(
'Device: {0} Bonding Mode: adaptive load balancing'.format(iface)
)
return _parse_settings_bond_6(opts, iface, bond_def)
else:
valid = [
'0', '1', '2', '3', '4', '5', '6',
'balance-rr', 'active-backup', 'balance-xor',
'broadcast', '802.3ad', 'balance-tlb', 'balance-alb'
]
_raise_error_iface(iface, 'mode', valid)
def _parse_settings_bond_0(opts, iface, bond_def):
bond = {'mode': '0'}
valid = ['list of ips (up to 16)']
if 'arp_ip_target' in opts:
if isinstance(opts['arp_ip_target'], list):
if 1 <= len(opts['arp_ip_target']) <= 16:
bond.update({'arp_ip_target': []})
for ip in opts['arp_ip_target']:
bond['arp_ip_target'].append(ip)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
if 'arp_interval' in opts:
try:
int(opts['arp_interval'])
bond.update({'arp_interval': opts['arp_interval']})
except Exception:
_raise_error_iface(iface, 'arp_interval', ['integer'])
else:
_log_default_iface(iface, 'arp_interval', bond_def['arp_interval'])
bond.update({'arp_interval': bond_def['arp_interval']})
return bond
def _parse_settings_bond_1(opts, iface, bond_def):
bond = {'mode': '1'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_2(opts, iface, bond_def):
bond = {'mode': '2'}
valid = ['list of ips (up to 16)']
if 'arp_ip_target' in opts:
if isinstance(opts['arp_ip_target'], list):
if 1 <= len(opts['arp_ip_target']) <= 16:
bond.update({'arp_ip_target': []})
for ip in opts['arp_ip_target']:
bond['arp_ip_target'].append(ip)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
else:
_raise_error_iface(iface, 'arp_ip_target', valid)
if 'arp_interval' in opts:
try:
int(opts['arp_interval'])
bond.update({'arp_interval': opts['arp_interval']})
except Exception:
_raise_error_iface(iface, 'arp_interval', ['integer'])
else:
_log_default_iface(iface, 'arp_interval', bond_def['arp_interval'])
bond.update({'arp_interval': bond_def['arp_interval']})
if 'primary' in opts:
bond.update({'primary': opts['primary']})
if 'hashing-algorithm' in opts:
valid = ['layer2', 'layer2+3', 'layer3+4']
if opts['hashing-algorithm'] in valid:
bond.update({'xmit_hash_policy': opts['hashing-algorithm']})
else:
_raise_error_iface(iface, 'hashing-algorithm', valid)
return bond
def _parse_settings_bond_3(opts, iface, bond_def):
bond = {'mode': '3'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_4(opts, iface, bond_def):
bond = {'mode': '4'}
for binding in ['miimon', 'downdelay', 'updelay', 'lacp_rate', 'ad_select']:
if binding in opts:
if binding == 'lacp_rate':
if opts[binding] == 'fast':
opts.update({binding: '1'})
if opts[binding] == 'slow':
opts.update({binding: '0'})
valid = ['fast', '1', 'slow', '0']
else:
valid = ['integer']
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, valid)
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
if 'hashing-algorithm' in opts:
valid = ['layer2', 'layer2+3', 'layer3+4']
if opts['hashing-algorithm'] in valid:
bond.update({'xmit_hash_policy': opts['hashing-algorithm']})
else:
_raise_error_iface(iface, 'hashing-algorithm', valid)
return bond
def _parse_settings_bond_5(opts, iface, bond_def):
bond = {'mode': '5'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_bond_6(opts, iface, bond_def):
bond = {'mode': '6'}
for binding in ['miimon', 'downdelay', 'updelay']:
if binding in opts:
try:
int(opts[binding])
bond.update({binding: opts[binding]})
except Exception:
_raise_error_iface(iface, binding, ['integer'])
else:
_log_default_iface(iface, binding, bond_def[binding])
bond.update({binding: bond_def[binding]})
if 'use_carrier' in opts:
if opts['use_carrier'] in _CONFIG_TRUE:
bond.update({'use_carrier': 'on'})
elif opts['use_carrier'] in _CONFIG_FALSE:
bond.update({'use_carrier': 'off'})
else:
valid = _CONFIG_TRUE + _CONFIG_FALSE
_raise_error_iface(iface, 'use_carrier', valid)
else:
_log_default_iface(iface, 'use_carrier', bond_def['use_carrier'])
bond.update({'use_carrier': bond_def['use_carrier']})
return bond
def _parse_settings_eth(opts, iface_type, enabled, iface):
result = {'name': iface}
if 'proto' in opts:
valid = ['none', 'bootp', 'dhcp']
if opts['proto'] in valid:
result['proto'] = opts['proto']
else:
_raise_error_iface(iface, opts['proto'], valid)
if 'dns' in opts:
result['dns'] = opts['dns']
result['peerdns'] = 'yes'
if 'mtu' in opts:
try:
result['mtu'] = int(opts['mtu'])
except Exception:
_raise_error_iface(iface, 'mtu', ['integer'])
if iface_type not in ['bridge']:
ethtool = _parse_ethtool_opts(opts, iface)
if ethtool:
result['ethtool'] = ethtool
if iface_type == 'slave':
result['proto'] = 'none'
if iface_type == 'bond':
bonding = _parse_settings_bond(opts, iface)
if bonding:
result['bonding'] = bonding
if iface_type not in ['bond', 'vlan', 'bridge']:
if 'addr' in opts:
if salt.utils.validate.net.mac(opts['addr']):
result['addr'] = opts['addr']
else:
_raise_error_iface(iface, opts['addr'], ['AA:BB:CC:DD:EE:FF'])
else:
if iface_type != 'slave':
ifaces = __salt__['network.interfaces']()
if iface in ifaces and 'hwaddr' in ifaces[iface]:
result['addr'] = ifaces[iface]['hwaddr']
if iface_type == 'bridge':
result['devtype'] = 'Bridge'
bypassfirewall = True
valid = _CONFIG_TRUE + _CONFIG_FALSE
for opt in ['bypassfirewall']:
if opt in opts:
if opts[opt] in _CONFIG_TRUE:
bypassfirewall = True
elif opts[opt] in _CONFIG_FALSE:
bypassfirewall = False
else:
_raise_error_iface(iface, opts[opt], valid)
if bypassfirewall:
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-ip6tables', '0')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-iptables', '0')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-arptables', '0')
else:
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-ip6tables', '1')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-iptables', '1')
__salt__['sysctl.persist']('net.bridge.bridge-nf-call-arptables', '1')
else:
if 'bridge' in opts:
result['bridge'] = opts['bridge']
for opt in ['ipaddr', 'master', 'netmask', 'srcaddr', 'delay', 'domain', 'gateway']:
if opt in opts:
result[opt] = opts[opt]
for opt in ['ipv6addr', 'ipv6gateway']:
if opt in opts:
result[opt] = opts[opt]
if 'ipv6_autoconf' in opts:
result['ipv6_autoconf'] = opts['ipv6_autoconf']
if 'enable_ipv6' in opts:
result['enable_ipv6'] = opts['enable_ipv6']
valid = _CONFIG_TRUE + _CONFIG_FALSE
for opt in ['onparent', 'peerdns', 'slave', 'vlan', 'defroute', 'stp']:
if opt in opts:
if opts[opt] in _CONFIG_TRUE:
result[opt] = 'yes'
elif opts[opt] in _CONFIG_FALSE:
result[opt] = 'no'
else:
_raise_error_iface(iface, opts[opt], valid)
if 'onboot' in opts:
log.warning(
'The \'onboot\' option is controlled by the \'enabled\' option. '
'Interface: {0} Enabled: {1}'.format(iface, enabled)
)
if enabled:
result['onboot'] = 'yes'
else:
result['onboot'] = 'no'
if 'userctl' in opts:
if opts['userctl'] in _CONFIG_TRUE:
result['userctl'] = 'yes'
elif opts['userctl'] in _CONFIG_FALSE:
result['userctl'] = 'no'
else:
_raise_error_iface(iface, opts['userctl'], valid)
else:
result['userctl'] = 'no'
return result
def _parse_routes(iface, opts):
opts = dict((k.lower(), v) for (k, v) in opts.iteritems())
result = {}
if 'routes' not in opts:
_raise_error_routes(iface, 'routes', 'List of routes')
for opt in opts:
result[opt] = opts[opt]
return result
def _parse_network_settings(opts, current):
opts = dict((k.lower(), v) for (k, v) in opts.iteritems())
current = dict((k.lower(), v) for (k, v) in current.iteritems())
result = {}
valid = _CONFIG_TRUE + _CONFIG_FALSE
if 'enabled' not in opts:
try:
opts['networking'] = current['networking']
_log_default_network('networking', current['networking'])
except ValueError:
_raise_error_network('networking', valid)
else:
opts['networking'] = opts['enabled']
if opts['networking'] in valid:
if opts['networking'] in _CONFIG_TRUE:
result['networking'] = 'yes'
elif opts['networking'] in _CONFIG_FALSE:
result['networking'] = 'no'
else:
_raise_error_network('networking', valid)
if 'hostname' not in opts:
try:
opts['hostname'] = current['hostname']
_log_default_network('hostname', current['hostname'])
except Exception:
_raise_error_network('hostname', ['server1.example.com'])
if opts['hostname']:
result['hostname'] = opts['hostname']
else:
_raise_error_network('hostname', ['server1.example.com'])
if 'nozeroconf' in opts:
if opts['nozeroconf'] in valid:
if opts['nozeroconf'] in _CONFIG_TRUE:
result['nozeroconf'] = 'true'
elif opts['nozeroconf'] in _CONFIG_FALSE:
result['nozeroconf'] = 'false'
else:
_raise_error_network('nozeroconf', valid)
for opt in opts:
if opt not in ['networking', 'hostname', 'nozeroconf']:
result[opt] = opts[opt]
return result
def _raise_error_iface(iface, option, expected):
msg = _error_msg_iface(iface, option, expected)
log.error(msg)
raise AttributeError(msg)
def _raise_error_network(option, expected):
msg = _error_msg_network(option, expected)
log.error(msg)
raise AttributeError(msg)
def _raise_error_routes(iface, option, expected):
msg = _error_msg_routes(iface, option, expected)
log.error(msg)
raise AttributeError(msg)
def _read_file(path):
try:
with salt.utils.fopen(path, 'rb') as contents:
return contents.read().splitlines()
except Exception:
return ''
def _write_file_iface(iface, data, folder, pattern):
filename = os.path.join(folder, pattern.format(iface))
if not os.path.exists(folder):
msg = '{0} cannot be written. {1} does not exist'
msg = msg.format(filename, folder)
log.error(msg)
raise AttributeError(msg)
fout = salt.utils.fopen(filename, 'w')
fout.write(data)
fout.close()
def _write_file_network(data, filename):
fout = salt.utils.fopen(filename, 'w')
fout.write(data)
fout.close()
def _read_temp(data):
tout = StringIO.StringIO()
tout.write(data)
tout.seek(0)
output = tout.read().splitlines()
tout.close()
return output
def build_bond(iface, **settings):
rh_major = __grains__['osrelease'][:1]
opts = _parse_settings_bond(settings, iface)
try:
template = JINJA.get_template('conf.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error('Could not load template conf.jinja')
return ''
data = template.render({'name': iface, 'bonding': opts})
_write_file_iface(iface, data, _RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
path = os.path.join(_RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
if rh_major == '5':
__salt__['cmd.run'](
'sed -i -e "/^alias\\s{0}.*/d" /etc/modprobe.conf'.format(iface),
python_shell=False
)
__salt__['cmd.run'](
'sed -i -e "/^options\\s{0}.*/d" /etc/modprobe.conf'.format(iface),
python_shell=False
)
__salt__['file.append']('/etc/modprobe.conf', path)
__salt__['kmod.load']('bonding')
if settings['test']:
return _read_temp(data)
return _read_file(path)
def build_interface(iface, iface_type, enabled, **settings):
if __grains__['os'] == 'Fedora':
rh_major = '6'
else:
rh_major = __grains__['osrelease'][:1]
iface = iface.lower()
iface_type = iface_type.lower()
if iface_type not in _IFACE_TYPES:
_raise_error_iface(iface, iface_type, _IFACE_TYPES)
if iface_type == 'slave':
settings['slave'] = 'yes'
if 'master' not in settings:
msg = 'master is a required setting for slave interfaces'
log.error(msg)
raise AttributeError(msg)
if iface_type == 'vlan':
settings['vlan'] = 'yes'
if iface_type == 'bridge':
__salt__['pkg.install']('bridge-utils')
if iface_type in ['eth', 'bond', 'bridge', 'slave', 'vlan']:
opts = _parse_settings_eth(settings, iface_type, enabled, iface)
try:
template = JINJA.get_template('rh{0}_eth.jinja'.format(rh_major))
except jinja2.exceptions.TemplateNotFound:
log.error(
'Could not load template rh{0}_eth.jinja'.format(
rh_major
)
)
return ''
ifcfg = template.render(opts)
if 'test' in settings and settings['test']:
return _read_temp(ifcfg)
_write_file_iface(iface, ifcfg, _RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}')
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}'.format(iface))
return _read_file(path)
def build_routes(iface, **settings):
iface = iface.lower()
opts = _parse_routes(iface, settings)
try:
template = JINJA.get_template('route_eth.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error(
'Could not load template route_eth.jinja'
)
return ''
routecfg = template.render(routes=opts['routes'])
if settings['test']:
return _read_temp(routecfg)
_write_file_iface(iface, routecfg, _RH_NETWORK_SCRIPT_DIR, 'route-{0}')
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'route-{0}'.format(iface))
return _read_file(path)
def down(iface, iface_type):
if iface_type not in ['slave']:
return __salt__['cmd.run']('ifdown {0}'.format(iface))
return None
def get_bond(iface):
path = os.path.join(_RH_NETWORK_CONF_FILES, '{0}.conf'.format(iface))
return _read_file(path)
def get_interface(iface):
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'ifcfg-{0}'.format(iface))
return _read_file(path)
def up(iface, iface_type):
if iface_type not in ['slave']:
return __salt__['cmd.run']('ifup {0}'.format(iface))
return None
def get_routes(iface):
path = os.path.join(_RH_NETWORK_SCRIPT_DIR, 'route-{0}'.format(iface))
return _read_file(path)
def get_network_settings():
return _read_file(_RH_NETWORK_FILE)
def apply_network_settings(**settings):
if 'require_reboot' not in settings:
settings['require_reboot'] = False
if settings['require_reboot'] in _CONFIG_TRUE:
log.warning(
'The network state sls is requiring a reboot of the system to '
'properly apply network configuration.'
)
return True
else:
return __salt__['service.restart']('network')
def build_network_settings(**settings):
current_network_settings = _parse_rh_config(_RH_NETWORK_FILE)
opts = _parse_network_settings(settings, current_network_settings)
try:
template = JINJA.get_template('network.jinja')
except jinja2.exceptions.TemplateNotFound:
log.error('Could not load template network.jinja')
return ''
network = template.render(opts)
if settings['test']:
return _read_temp(network)
_write_file_network(network, _RH_NETWORK_FILE)
return _read_file(_RH_NETWORK_FILE)
| true | true |
f738d5716ac500af9359849c11832bf0363239e6 | 991 | py | Python | problems/predictingofficespaceprice/submissions/accepted/stefan2.py | stoman/CompetitiveProgramming | 0000b64369b50e31c6f48939e837bdf6cece8ce4 | [
"MIT"
] | 2 | 2020-12-22T13:21:25.000Z | 2021-12-12T22:26:26.000Z | problems/predictingofficespaceprice/submissions/accepted/stefan2.py | stoman/CompetitiveProgramming | 0000b64369b50e31c6f48939e837bdf6cece8ce4 | [
"MIT"
] | null | null | null | problems/predictingofficespaceprice/submissions/accepted/stefan2.py | stoman/CompetitiveProgramming | 0000b64369b50e31c6f48939e837bdf6cece8ce4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
#Author: Stefan Toman
import itertools
import numpy as np
from operator import mul
from sklearn.linear_model import LinearRegression
if __name__ == '__main__':
#read input
f, n = map(int, raw_input().split())
X = []
y = []
for _ in range(n):
line = raw_input().split()
X.append([float(x) for x in line[:-1]])
y.append([float(line[-1])])
q = int(raw_input())
Xt = []
for _ in range(q):
Xt.append([float(x) for x in raw_input().split()])
#add new features as monomials of degree <= 3
X = np.array(X)
Xt = np.array(Xt)
for i in range(2, 4):
for var in itertools.product(range(f), repeat=i):
X = np.hstack((X, reduce(mul, [X[:, j] for j in var]).reshape(-1, 1)))
Xt = np.hstack((Xt, reduce(mul, [Xt[:, j] for j in var]).reshape(-1, 1)))
#use sklearn to compute output
for yt in LinearRegression().fit(X, y).predict(Xt):
print(yt[0])
| 30.030303 | 85 | 0.572149 |
import itertools
import numpy as np
from operator import mul
from sklearn.linear_model import LinearRegression
if __name__ == '__main__':
f, n = map(int, raw_input().split())
X = []
y = []
for _ in range(n):
line = raw_input().split()
X.append([float(x) for x in line[:-1]])
y.append([float(line[-1])])
q = int(raw_input())
Xt = []
for _ in range(q):
Xt.append([float(x) for x in raw_input().split()])
X = np.array(X)
Xt = np.array(Xt)
for i in range(2, 4):
for var in itertools.product(range(f), repeat=i):
X = np.hstack((X, reduce(mul, [X[:, j] for j in var]).reshape(-1, 1)))
Xt = np.hstack((Xt, reduce(mul, [Xt[:, j] for j in var]).reshape(-1, 1)))
for yt in LinearRegression().fit(X, y).predict(Xt):
print(yt[0])
| true | true |
f738d5b0215c5883ba81ab9a0ea08660d6114dd6 | 584 | py | Python | examples/plotting/server/iris.py | rothnic/bokeh | 8da5e16b260a75caa8e7ef4caf215bb93dd784db | [
"BSD-3-Clause"
] | 1 | 2015-07-17T13:57:01.000Z | 2015-07-17T13:57:01.000Z | examples/plotting/server/iris.py | evidation-health/bokeh | 2c580d93419033b962d36e3c46d7606cc2f24606 | [
"BSD-3-Clause"
] | null | null | null | examples/plotting/server/iris.py | evidation-health/bokeh | 2c580d93419033b962d36e3c46d7606cc2f24606 | [
"BSD-3-Clause"
] | 1 | 2016-03-18T03:01:59.000Z | 2016-03-18T03:01:59.000Z | # The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from bokeh.sampledata.iris import flowers
from bokeh.plotting import figure, show, output_server
colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}
flowers['color'] = flowers['species'].map(lambda x: colormap[x])
output_server("iris")
p = figure(title = "Iris Morphology")
p.xaxis.axis_label = 'Petal Length'
p.yaxis.axis_label = 'Petal Width'
p.circle(flowers["petal_length"], flowers["petal_width"],
color=flowers["color"], fill_alpha=0.2, size=10, )
show(p)
| 30.736842 | 72 | 0.715753 |
from bokeh.sampledata.iris import flowers
from bokeh.plotting import figure, show, output_server
colormap = {'setosa': 'red', 'versicolor': 'green', 'virginica': 'blue'}
flowers['color'] = flowers['species'].map(lambda x: colormap[x])
output_server("iris")
p = figure(title = "Iris Morphology")
p.xaxis.axis_label = 'Petal Length'
p.yaxis.axis_label = 'Petal Width'
p.circle(flowers["petal_length"], flowers["petal_width"],
color=flowers["color"], fill_alpha=0.2, size=10, )
show(p)
| true | true |
f738d5eb525196b32ef354d702e313f16b6ed068 | 1,464 | py | Python | binarytree.py | sanofi2104/algorithms | a331a143aa51837df876b93993e1e1930d823331 | [
"MIT"
] | 1 | 2015-10-11T11:54:07.000Z | 2015-10-11T11:54:07.000Z | binarytree.py | sanofi2104/algorithms | a331a143aa51837df876b93993e1e1930d823331 | [
"MIT"
] | null | null | null | binarytree.py | sanofi2104/algorithms | a331a143aa51837df876b93993e1e1930d823331 | [
"MIT"
] | null | null | null | import Queue
class binary_node(object):
def __init__(self, _value):
self.value = _value
self.left = None
self.right = None
'''
Breadth-First search
'''
def breadth_first(_tree):
if not _tree:
return False
result = []
queue = Queue.Queue()
queue.put(_tree)
while not queue.empty():
temp = queue.get()
result.append(temp.value)
if temp.left:
queue.put(temp.left)
if temp.right:
queue.put(temp.right)
return result
'''
Depth-first search
'''
# Pre-Order depth-first searching algorithm
def pre_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
result.append(_node.value)
pre_order(_node.left, result)
pre_order(_node.right, result)
return result
# In-Order depth-first searching algorithm
def in_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
in_order(_node.left, result)
result.append(_node.value)
in_order(_node.right, result)
return result
# Post-Order depth-first searching algorithm
def post_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
post_order(_node.left, result)
post_order(_node.right, result)
result.append(_node.value)
return result
| 18.531646 | 44 | 0.594262 | import Queue
class binary_node(object):
def __init__(self, _value):
self.value = _value
self.left = None
self.right = None
def breadth_first(_tree):
if not _tree:
return False
result = []
queue = Queue.Queue()
queue.put(_tree)
while not queue.empty():
temp = queue.get()
result.append(temp.value)
if temp.left:
queue.put(temp.left)
if temp.right:
queue.put(temp.right)
return result
def pre_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
result.append(_node.value)
pre_order(_node.left, result)
pre_order(_node.right, result)
return result
def in_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
in_order(_node.left, result)
result.append(_node.value)
in_order(_node.right, result)
return result
def post_order(_node, result = None):
if _node == None:
return
if result is None:
result = []
post_order(_node.left, result)
post_order(_node.right, result)
result.append(_node.value)
return result
| true | true |
f738d63d673034757db9e2a842b937dc2fff5775 | 8,276 | py | Python | azure-cosmosdb-table/azure/cosmosdb/table/sharedaccesssignature.py | bluca/azure-cosmos-table-python | 25e99050f93b1f76f19bbd2534502955d0247aac | [
"Apache-2.0"
] | 20 | 2018-10-03T04:36:03.000Z | 2021-10-03T09:01:05.000Z | azure-cosmosdb-table/azure/cosmosdb/table/sharedaccesssignature.py | bluca/azure-cosmos-table-python | 25e99050f93b1f76f19bbd2534502955d0247aac | [
"Apache-2.0"
] | 25 | 2018-12-13T15:31:06.000Z | 2021-12-13T19:29:36.000Z | azure-cosmosdb-table/azure/cosmosdb/table/sharedaccesssignature.py | bluca/azure-cosmos-table-python | 25e99050f93b1f76f19bbd2534502955d0247aac | [
"Apache-2.0"
] | 16 | 2019-01-19T09:04:26.000Z | 2021-09-10T19:05:35.000Z | # -------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --------------------------------------------------------------------------
from azure.cosmosdb.table.common._common_conversion import (
_sign_string,
)
from azure.cosmosdb.table.common.sharedaccesssignature import (
SharedAccessSignature,
_SharedAccessHelper,
_QueryStringConstants,
)
from ._constants import X_MS_VERSION
class TableSharedAccessSignature(SharedAccessSignature):
'''
Provides a factory for creating file and share access
signature tokens with a common account name and account key. Users can either
use the factory or can construct the appropriate service and use the
generate_*_shared_access_signature method directly.
'''
def __init__(self, account_name, account_key):
'''
:param str account_name:
The storage account name used to generate the shared access signatures.
:param str account_key:
The access key to generate the shares access signatures.
'''
super(TableSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
def generate_table(self, table_name, permission=None,
expiry=None, start=None, id=None,
ip=None, protocol=None,
start_pk=None, start_rk=None,
end_pk=None, end_rk=None):
'''
Generates a shared access signature for the table.
Use the returned signature with the sas_token parameter of TableService.
:param str table_name:
Name of table.
:param TablePermissions permission:
The permissions associated with the shared access signature. The
user is restricted to operations allowed by the permissions.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has been
specified in an associated stored access policy.
:param expiry:
The time at which the shared access signature becomes invalid.
Required unless an id is given referencing a stored access policy
which contains this field. This field must be omitted if it has
been specified in an associated stored access policy. Azure will always
convert values to UTC. If a date is passed in without timezone info, it
is assumed to be UTC.
:type expiry: datetime or str
:param start:
The time at which the shared access signature becomes valid. If
omitted, start time for this call is assumed to be the time when the
storage service receives the request. Azure will always convert values
to UTC. If a date is passed in without timezone info, it is assumed to
be UTC.
:type start: datetime or str
:param str id:
A unique value up to 64 characters in length that correlates to a
stored access policy. To create a stored access policy, use
set_table_service_properties.
:param str ip:
Specifies an IP address or a range of IP addresses from which to accept requests.
If the IP address from which the request originates does not match the IP address
or address range specified on the SAS token, the request is not authenticated.
For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS
restricts the request to those IP addresses.
:param str protocol:
Specifies the protocol permitted for a request made. The default value
is https,http. See :class:`~azure.cosmosdb.table.common.models.Protocol` for possible values.
:param str start_pk:
The minimum partition key accessible with this shared access
signature. startpk must accompany startrk. Key values are inclusive.
If omitted, there is no lower bound on the table entities that can
be accessed.
:param str start_rk:
The minimum row key accessible with this shared access signature.
startpk must accompany startrk. Key values are inclusive. If
omitted, there is no lower bound on the table entities that can be
accessed.
:param str end_pk:
The maximum partition key accessible with this shared access
signature. endpk must accompany endrk. Key values are inclusive. If
omitted, there is no upper bound on the table entities that can be
accessed.
:param str end_rk:
The maximum row key accessible with this shared access signature.
endpk must accompany endrk. Key values are inclusive. If omitted,
there is no upper bound on the table entities that can be accessed.
'''
sas = _TableSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, X_MS_VERSION)
sas.add_id(id)
sas.add_table_access_ranges(table_name, start_pk, start_rk, end_pk, end_rk)
# Table names must be signed lower case
resource_path = table_name.lower()
sas.add_resource_signature(self.account_name, self.account_key, 'table', resource_path)
return sas.get_token()
class _TableQueryStringConstants( _QueryStringConstants):
TABLE_NAME = 'tn'
class _TableSharedAccessHelper(_SharedAccessHelper):
def __init__(self):
self.query_dict = {}
def add_table_access_ranges(self, table_name, start_pk, start_rk,
end_pk, end_rk):
self._add_query(_TableQueryStringConstants.TABLE_NAME, table_name)
self._add_query(_TableQueryStringConstants.START_PK, start_pk)
self._add_query(_TableQueryStringConstants.START_RK, start_rk)
self._add_query(_TableQueryStringConstants.END_PK, end_pk)
self._add_query(_TableQueryStringConstants.END_RK, end_rk)
def add_resource_signature(self, account_name, account_key, service, path):
def get_value_to_append(query):
return_value = self.query_dict.get(query) or ''
return return_value + '\n'
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/' + service + '/' + account_name + path + '\n'
# Form the string to sign from shared_access_policy and canonicalized
# resource. The order of values is important.
string_to_sign = \
(get_value_to_append(_QueryStringConstants.SIGNED_PERMISSION) +
get_value_to_append(_QueryStringConstants.SIGNED_START) +
get_value_to_append(_QueryStringConstants.SIGNED_EXPIRY) +
canonicalized_resource +
get_value_to_append(_QueryStringConstants.SIGNED_IDENTIFIER) +
get_value_to_append(_QueryStringConstants.SIGNED_IP) +
get_value_to_append(_QueryStringConstants.SIGNED_PROTOCOL) +
get_value_to_append(_QueryStringConstants.SIGNED_VERSION))
string_to_sign += \
(get_value_to_append(_QueryStringConstants.START_PK) +
get_value_to_append(_QueryStringConstants.START_RK) +
get_value_to_append(_QueryStringConstants.END_PK) +
get_value_to_append(_QueryStringConstants.END_RK))
# remove the trailing newline
if string_to_sign[-1] == '\n':
string_to_sign = string_to_sign[:-1]
self._add_query(_QueryStringConstants.SIGNED_SIGNATURE,
_sign_string(account_key, string_to_sign))
| 47.83815 | 110 | 0.666989 |
from azure.cosmosdb.table.common._common_conversion import (
_sign_string,
)
from azure.cosmosdb.table.common.sharedaccesssignature import (
SharedAccessSignature,
_SharedAccessHelper,
_QueryStringConstants,
)
from ._constants import X_MS_VERSION
class TableSharedAccessSignature(SharedAccessSignature):
def __init__(self, account_name, account_key):
super(TableSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
def generate_table(self, table_name, permission=None,
expiry=None, start=None, id=None,
ip=None, protocol=None,
start_pk=None, start_rk=None,
end_pk=None, end_rk=None):
sas = _TableSharedAccessHelper()
sas.add_base(permission, expiry, start, ip, protocol, X_MS_VERSION)
sas.add_id(id)
sas.add_table_access_ranges(table_name, start_pk, start_rk, end_pk, end_rk)
resource_path = table_name.lower()
sas.add_resource_signature(self.account_name, self.account_key, 'table', resource_path)
return sas.get_token()
class _TableQueryStringConstants( _QueryStringConstants):
TABLE_NAME = 'tn'
class _TableSharedAccessHelper(_SharedAccessHelper):
def __init__(self):
self.query_dict = {}
def add_table_access_ranges(self, table_name, start_pk, start_rk,
end_pk, end_rk):
self._add_query(_TableQueryStringConstants.TABLE_NAME, table_name)
self._add_query(_TableQueryStringConstants.START_PK, start_pk)
self._add_query(_TableQueryStringConstants.START_RK, start_rk)
self._add_query(_TableQueryStringConstants.END_PK, end_pk)
self._add_query(_TableQueryStringConstants.END_RK, end_rk)
def add_resource_signature(self, account_name, account_key, service, path):
def get_value_to_append(query):
return_value = self.query_dict.get(query) or ''
return return_value + '\n'
if path[0] != '/':
path = '/' + path
canonicalized_resource = '/' + service + '/' + account_name + path + '\n'
string_to_sign = \
(get_value_to_append(_QueryStringConstants.SIGNED_PERMISSION) +
get_value_to_append(_QueryStringConstants.SIGNED_START) +
get_value_to_append(_QueryStringConstants.SIGNED_EXPIRY) +
canonicalized_resource +
get_value_to_append(_QueryStringConstants.SIGNED_IDENTIFIER) +
get_value_to_append(_QueryStringConstants.SIGNED_IP) +
get_value_to_append(_QueryStringConstants.SIGNED_PROTOCOL) +
get_value_to_append(_QueryStringConstants.SIGNED_VERSION))
string_to_sign += \
(get_value_to_append(_QueryStringConstants.START_PK) +
get_value_to_append(_QueryStringConstants.START_RK) +
get_value_to_append(_QueryStringConstants.END_PK) +
get_value_to_append(_QueryStringConstants.END_RK))
if string_to_sign[-1] == '\n':
string_to_sign = string_to_sign[:-1]
self._add_query(_QueryStringConstants.SIGNED_SIGNATURE,
_sign_string(account_key, string_to_sign))
| true | true |
f738d6c9152aaf0bc30172d9c59308cf84ce2746 | 2,011 | py | Python | 21-fs-ias-lec/16-nicknames-forward/subChat/Colorize.py | paultroeger/BACnet | 855b931f2a0e9b64e9571f41de2a8cd71d7a01f4 | [
"MIT"
] | 8 | 2020-03-17T21:12:18.000Z | 2021-12-12T15:55:54.000Z | 21-fs-ias-lec/16-nicknames-forward/subChat/Colorize.py | paultroeger/BACnet | 855b931f2a0e9b64e9571f41de2a8cd71d7a01f4 | [
"MIT"
] | 2 | 2021-07-19T06:18:43.000Z | 2022-02-10T12:17:58.000Z | 21-fs-ias-lec/16-nicknames-forward/subChat/Colorize.py | paultroeger/BACnet | 855b931f2a0e9b64e9571f41de2a8cd71d7a01f4 | [
"MIT"
] | 25 | 2020-03-20T09:32:45.000Z | 2021-07-18T18:12:59.000Z | #version 15:38
import random
import string
#name = 'zzz'
set_off = 23
def convert(name):
for i in range(len(name)):
if name[i].lower() == 'i' or name[i].lower() == 'y' or name[i].lower() == '9':
name = list(name)
name[i] = 'g'
name = ''.join(name)
indx = 0
c=0
while len(name) < 6:
if c >16:
return '#ffb300' # just in case it goes into an infinate Loop (probability is very, very low)
c +=1
new_letter = chr(65 + (ord(name[indx]) + set_off + (indx*6) )%25) # this keeps the char within the range of A-Z in the asci table and adds variation in case the letter is the same (indx*6)
if new_letter.lower() != 'i' and new_letter.lower() != 'y' and new_letter != '9':
name = name + new_letter #add the letter
indx = (indx+1)%len(name)
if len(name) > 6:
name = name[:6] #cut name if too long
name = list(name) # make it a list so we can edit it more easily
for i in range(len(name)):
Integer = (ord(name[i])+set_off)%16
Hex = Integer.to_bytes(((Integer.bit_length() + 7) // 8),"big").hex()
#print("...."+Hex)
Hex = Hex[1:]
name[i] = Hex
name = ''.join(name)
color = '#' + name
return color
def name_to_color(name):
color = convert(name)
r = int(color[1:3], 16)
g = int(color[3:5], 16)
b = int(color[5:7], 16)
if r<128 or g<128 or b<128 and len(name) == 7:
return color
else:
return '#00f7ff' # some ord() chars aren't convertable. When we checked all, we found this to be the case with i, y and 9 which is why we prevent the program from outputting them. Just in case there are any other letters that we forgot to check, we added this clause. Should never get here but the presentation got us worried cuase if the color is not exactly 6 digits long, tkinter crashes.
| 38.673077 | 401 | 0.558429 |
import random
import string
set_off = 23
def convert(name):
for i in range(len(name)):
if name[i].lower() == 'i' or name[i].lower() == 'y' or name[i].lower() == '9':
name = list(name)
name[i] = 'g'
name = ''.join(name)
indx = 0
c=0
while len(name) < 6:
if c >16:
return '#ffb300'
c +=1
new_letter = chr(65 + (ord(name[indx]) + set_off + (indx*6) )%25)
if new_letter.lower() != 'i' and new_letter.lower() != 'y' and new_letter != '9':
name = name + new_letter
indx = (indx+1)%len(name)
if len(name) > 6:
name = name[:6]
name = list(name)
for i in range(len(name)):
Integer = (ord(name[i])+set_off)%16
Hex = Integer.to_bytes(((Integer.bit_length() + 7) // 8),"big").hex()
Hex = Hex[1:]
name[i] = Hex
name = ''.join(name)
color = '#' + name
return color
def name_to_color(name):
color = convert(name)
r = int(color[1:3], 16)
g = int(color[3:5], 16)
b = int(color[5:7], 16)
if r<128 or g<128 or b<128 and len(name) == 7:
return color
else:
return '#00f7ff'
| true | true |
f738d788b59babbab994d85a7f5b2804becccbe1 | 14,640 | py | Python | tests/integration/conftest.py | linsicai/peloton | 4706ce094432f8fcdacdf7d8045a4aaa7f5259af | [
"Apache-2.0"
] | null | null | null | tests/integration/conftest.py | linsicai/peloton | 4706ce094432f8fcdacdf7d8045a4aaa7f5259af | [
"Apache-2.0"
] | null | null | null | tests/integration/conftest.py | linsicai/peloton | 4706ce094432f8fcdacdf7d8045a4aaa7f5259af | [
"Apache-2.0"
] | null | null | null | import logging
import os
import pytest
import time
import grpc
import requests
from docker import Client
from tools.minicluster.main import setup, teardown, config as mc_config
from tools.minicluster.minicluster import run_mesos_agent, teardown_mesos_agent
from host import start_maintenance, complete_maintenance, wait_for_host_state
from job import Job
from job import query_jobs as batch_query_jobs
from job import kill_jobs as batch_kill_jobs
from stateless_job import StatelessJob
from stateless_job import query_jobs as stateless_query_jobs
from stateless_job import delete_jobs as stateless_delete_jobs
from m3.client import M3
from m3.emitter import BatchedEmitter
from peloton_client.pbgen.peloton.api.v0.host import host_pb2
from peloton_client.pbgen.peloton.api.v0.job import job_pb2
from conf_util import (
TERMINAL_JOB_STATES,
ACTIVE_JOB_STATES,
MESOS_MASTER,
MESOS_AGENTS,
)
import conf_util as util
log = logging.getLogger(__name__)
class TestMetrics(object):
def __init__(self):
self.failed = 0
self.passed = 0
self.duration = 0.0
def increment_passed(self, duration):
self.passed += 1
self.duration += duration
def increment_failed(self, duration):
self.failed += 1
self.duration += duration
collect_metrics = TestMetrics()
#
# Module scoped setup / teardown across test suites.
#
@pytest.fixture(scope="module", autouse=True)
def setup_cluster(request):
tests_failed_before_module = request.session.testsfailed
setup_minicluster()
def teardown_cluster():
dump_logs = False
if (request.session.testsfailed - tests_failed_before_module) > 0:
dump_logs = True
teardown_minicluster(dump_logs)
request.addfinalizer(teardown_cluster)
@pytest.fixture(autouse=True)
def run_around_tests():
# before each test
yield
# after each test
cleanup_batch_jobs()
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
global collect_metrics
outcome = yield
rep = outcome.get_result()
if rep.outcome == "passed" and rep.when == "call":
collect_metrics.increment_passed(rep.duration)
if rep.outcome == "failed" and rep.when == "call":
collect_metrics.increment_failed(rep.duration)
rep = outcome.get_result()
setattr(item, "rep_" + rep.when, rep)
if "incremental" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_sessionfinish(session, exitstatus):
emitter = BatchedEmitter()
m3 = M3(
application_identifier="peloton",
emitter=emitter,
environment="production",
default_tags={"result": "watchdog", "cluster": os.getenv("CLUSTER")},
)
if collect_metrics.failed > 0:
m3.gauge("watchdog_result", 1)
else:
m3.gauge("watchdog_result", 0)
m3.gauge("total_tests", collect_metrics.failed + collect_metrics.passed)
m3.gauge("failed_tests", collect_metrics.failed)
m3.gauge("passed_tests", collect_metrics.passed)
m3.gauge("duration_tests", collect_metrics.duration)
class Container(object):
def __init__(self, names):
self._cli = Client(base_url="unix://var/run/docker.sock")
self._names = names
def start(self):
for name in self._names:
self._cli.start(name)
log.info("%s started", name)
if self._names[0] in MESOS_MASTER:
wait_for_mesos_master_leader()
def stop(self):
for name in self._names:
self._cli.stop(name, timeout=0)
log.info("%s stopped", name)
def restart(self):
for name in self._names:
self._cli.restart(name, timeout=0)
log.info("%s restarted", name)
if self._names[0] in MESOS_MASTER:
wait_for_mesos_master_leader()
def get_container(container_name):
return Container(container_name)
def wait_for_mesos_master_leader(
url="http://127.0.0.1:5050/state.json", timeout_secs=20
):
"""
util method to wait for mesos master leader elected
"""
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
resp = requests.get(url)
if resp.status_code != 200:
time.sleep(2)
continue
return
except Exception:
pass
assert False, "timed out waiting for mesos master leader"
def wait_for_all_agents_to_register(
url="http://127.0.0.1:5050/state.json",
timeout_secs=300,
):
"""
util method to wait for all agents to register
"""
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
resp = requests.get(url)
if resp.status_code == 200:
registered_agents = 0
for a in resp.json()['slaves']:
if a['active'] == True:
registered_agents += 1
if registered_agents == 3:
return
time.sleep(10)
except Exception:
pass
assert False, "timed out waiting for agents to register"
def setup_minicluster(enable_k8s=False):
"""
setup minicluster
"""
log.info("setup cluster")
if os.getenv("CLUSTER", ""):
log.info("cluster mode")
else:
log.info("local minicluster mode")
setup(enable_peloton=True, enable_k8s=enable_k8s)
time.sleep(5)
def teardown_minicluster(dump_logs=False):
"""
teardown minicluster
"""
log.info("\nteardown cluster")
if os.getenv("CLUSTER", ""):
log.info("cluster mode, no teardown actions")
elif os.getenv("NO_TEARDOWN", ""):
log.info("skip teardown")
else:
log.info("tearing down")
# dump logs only if tests have failed in the current module
if dump_logs:
# stop containers so that log stream will not block
teardown(stop=True)
try:
# TODO (varung): enable PE and mesos-master logs if needed
cli = Client(base_url="unix://var/run/docker.sock")
for c in ("peloton-jobmgr0",
"peloton-resmgr0"):
for l in cli.logs(c, stream=True):
# remove newline character when logging
log.info(l.rstrip())
except Exception as e:
log.info(e)
teardown()
def cleanup_batch_jobs():
"""
stop all batch jobs from minicluster
"""
jobs = batch_query_jobs()
batch_kill_jobs(jobs)
def cleanup_stateless_jobs(timeout_secs=10):
"""
delete all service jobs from minicluster
"""
jobs = stateless_query_jobs()
# opportunistic delete for jobs, if not deleted within
# timeout period, it will get cleanup in next test run.
stateless_delete_jobs(jobs)
# Wait for job deletion to complete.
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
jobs = stateless_query_jobs()
if len(jobs) == 0:
return
time.sleep(2)
except grpc.RpcError as e:
# Catch "not-found" error here because QueryJobs endpoint does
# two db queries in sequence: "QueryJobs" and "GetUpdate".
# However, when we delete a job, updates are deleted first,
# there is a slight chance QueryJobs will fail to query the
# update, returning "not-found" error.
if e.code() == grpc.StatusCode.NOT_FOUND:
time.sleep(2)
continue
@pytest.fixture()
def mesos_master():
return Container(MESOS_MASTER)
@pytest.fixture()
def mesos_agent():
# TODO: We need to pick up the count dynamically.
return Container(MESOS_AGENTS)
@pytest.fixture()
def placement_engines():
return Container(util.PLACEMENT_ENGINES)
@pytest.fixture()
def jobmgr():
# TODO: We need to pick up the count dynamically.
return Container(util.JOB_MGRS)
@pytest.fixture()
def resmgr():
# TODO: We need to pick up the count dynamically.
return Container(util.RES_MGRS)
@pytest.fixture()
def hostmgr():
# TODO: We need to pick up the count dynamically.
return Container(util.HOST_MGRS)
@pytest.fixture()
def aurorabridge():
# TODO: We need to pick up the count dynamically.
return Container(util.AURORA_BRIDGE)
@pytest.fixture
def long_running_job(request):
job = Job(job_file="long_running_job.yaml")
# teardown
def kill_long_running_job():
print("\nstopping long running job")
job.stop()
request.addfinalizer(kill_long_running_job)
return job
@pytest.fixture
def stateless_job(request):
job = StatelessJob()
# teardown
def kill_stateless_job():
print("\nstopping stateless job")
job.stop()
request.addfinalizer(kill_stateless_job)
return job
@pytest.fixture
def host_affinity_job(request):
job = Job(job_file="test_job_host_affinity_constraint.yaml")
# Kill job
def kill_host_affinity_job():
print("\nstopping host affinity job")
job.stop()
request.addfinalizer(kill_host_affinity_job)
return job
# For unit tests of update/restart running with in_place, it would
# be tested with both in_place feature enabled and disabled
@pytest.fixture(params=[True, False])
def in_place(request):
return request.param
@pytest.fixture
def maintenance(request):
draining_hosts = []
client = [None] # Use list to store a reference to the client object
def update_client(new_client):
client[0] = new_client
def start(hosts):
resp = start_maintenance(hosts)
if not resp:
log.error("Start maintenance failed:" + resp)
return resp
draining_hosts.extend(hosts)
return resp
def stop(hosts):
resp = complete_maintenance(hosts)
if not resp:
log.error("Complete maintenance failed:" + resp)
return resp
# The mesos-agent containers needs to be started explicitly as they would
# have been stopped when the agents transition to DOWN
Container(hosts).start()
del draining_hosts[:]
return resp
def clean_up():
# kill stateless jobs. This is needed since host draining
# is done in SLA aware manner for stateless jobs.
for j in stateless_query_jobs(client=client[0]):
j.stop()
if not draining_hosts:
return
for h in draining_hosts:
wait_for_host_state(h, host_pb2.HOST_STATE_DOWN)
stop(draining_hosts)
request.addfinalizer(clean_up)
response = dict()
response["start"] = start
response["stop"] = stop
response["update_client"] = update_client
return response
"""
Setup fixture for getting a dict of job objects per state
"""
@pytest.fixture
def jobs_by_state(request):
return util.create_job_config_by_state(_num_jobs_per_state=1)
"""
Setup/Cleanup fixture that starts a set of RUNNING, SUCCEEDED and
FAILED jobs scoped per module. This is to give each module a set
of active and completed jobs to test on.
Returns:
common salt identifier, respoolID and dict of created jobs
"""
@pytest.fixture(scope="module")
def create_jobs(request):
jobs_by_state = util.create_job_config_by_state()
salt = jobs_by_state[0]
jobs_dict = jobs_by_state[1]
log.info("Create jobs")
respoolID = None
for state in TERMINAL_JOB_STATES:
jobs = jobs_dict[state]
for job in jobs:
job.create()
if state == "FAILED":
job.wait_for_state(
goal_state="FAILED", failed_state="SUCCEEDED"
)
else:
job.wait_for_state(goal_state=state)
if respoolID is None:
respoolID = job.get_config().respoolID
def stop_jobs():
log.info("Stop jobs")
for state in TERMINAL_JOB_STATES:
jobs = jobs_dict[state]
for job in jobs:
state = job_pb2.JobState.Name(job.get_runtime().state)
if state in ACTIVE_JOB_STATES:
job.stop()
job.wait_for_state(goal_state="KILLED")
request.addfinalizer(stop_jobs)
# Job Query accuracy depends on lucene index being up to date
# lucene index refresh time is 10 seconds. Sleep for 12 sec.
time.sleep(12)
return salt, respoolID, jobs_dict
"""
Setup/Cleanup fixture for tasks query integ-tests.
Within fixture parameter, a list of tuples,
such as [(task_state, count)], is passed to give each test case
a varied number of tasks to test on.
Returns:
The job id of the job created.
"""
@pytest.fixture
def task_test_fixture(request):
# task_states is a list of tuples, e.g. [('SUCCEEDED', 2)].
task_states = request.param
assert task_states is not None
if len(task_states) > 1:
mixed_task_states = True
else:
mixed_task_states = False
test_config = util.generate_job_config(
file_name="test_task.yaml", task_states=task_states
)
# Create job with customized tasks.
job = Job(job_config=test_config)
job.create()
log.info("Job for task query is created: %s", job.job_id)
# Determine terminating state.
job_state = task_states[0][0] if not mixed_task_states else "FAILED"
if job_state == "FAILED":
job.wait_for_state(goal_state="FAILED", failed_state="SUCCEEDED")
else:
job.wait_for_state(goal_state=job_state)
def stop_job():
state = job_pb2.JobState.Name(job.get_runtime().state)
if state in ACTIVE_JOB_STATES:
job.stop()
job.wait_for_state(goal_state="KILLED")
request.addfinalizer(stop_job)
return job.job_id
"""
Setup/cleanup fixture that replaces a regular Mesos agent with
another one that has "peloton/exclusive" attribute. Cleanup does
the exact opposite.
"""
@pytest.fixture
def exclusive_host(request):
def clean_up():
teardown_mesos_agent(mc_config, 0, is_exclusive=True)
run_mesos_agent(mc_config, 0, 0)
time.sleep(5)
# Remove agent #0 and instead create exclusive agent #0
teardown_mesos_agent(mc_config, 0)
run_mesos_agent(
mc_config,
0,
3,
is_exclusive=True,
exclusive_label_value="exclusive-test-label",
)
time.sleep(5)
request.addfinalizer(clean_up)
| 26.961326 | 81 | 0.647746 | import logging
import os
import pytest
import time
import grpc
import requests
from docker import Client
from tools.minicluster.main import setup, teardown, config as mc_config
from tools.minicluster.minicluster import run_mesos_agent, teardown_mesos_agent
from host import start_maintenance, complete_maintenance, wait_for_host_state
from job import Job
from job import query_jobs as batch_query_jobs
from job import kill_jobs as batch_kill_jobs
from stateless_job import StatelessJob
from stateless_job import query_jobs as stateless_query_jobs
from stateless_job import delete_jobs as stateless_delete_jobs
from m3.client import M3
from m3.emitter import BatchedEmitter
from peloton_client.pbgen.peloton.api.v0.host import host_pb2
from peloton_client.pbgen.peloton.api.v0.job import job_pb2
from conf_util import (
TERMINAL_JOB_STATES,
ACTIVE_JOB_STATES,
MESOS_MASTER,
MESOS_AGENTS,
)
import conf_util as util
log = logging.getLogger(__name__)
class TestMetrics(object):
def __init__(self):
self.failed = 0
self.passed = 0
self.duration = 0.0
def increment_passed(self, duration):
self.passed += 1
self.duration += duration
def increment_failed(self, duration):
self.failed += 1
self.duration += duration
collect_metrics = TestMetrics()
@pytest.fixture(scope="module", autouse=True)
def setup_cluster(request):
tests_failed_before_module = request.session.testsfailed
setup_minicluster()
def teardown_cluster():
dump_logs = False
if (request.session.testsfailed - tests_failed_before_module) > 0:
dump_logs = True
teardown_minicluster(dump_logs)
request.addfinalizer(teardown_cluster)
@pytest.fixture(autouse=True)
def run_around_tests():
yield
cleanup_batch_jobs()
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
global collect_metrics
outcome = yield
rep = outcome.get_result()
if rep.outcome == "passed" and rep.when == "call":
collect_metrics.increment_passed(rep.duration)
if rep.outcome == "failed" and rep.when == "call":
collect_metrics.increment_failed(rep.duration)
rep = outcome.get_result()
setattr(item, "rep_" + rep.when, rep)
if "incremental" in item.keywords:
if call.excinfo is not None:
parent = item.parent
parent._previousfailed = item
def pytest_sessionfinish(session, exitstatus):
emitter = BatchedEmitter()
m3 = M3(
application_identifier="peloton",
emitter=emitter,
environment="production",
default_tags={"result": "watchdog", "cluster": os.getenv("CLUSTER")},
)
if collect_metrics.failed > 0:
m3.gauge("watchdog_result", 1)
else:
m3.gauge("watchdog_result", 0)
m3.gauge("total_tests", collect_metrics.failed + collect_metrics.passed)
m3.gauge("failed_tests", collect_metrics.failed)
m3.gauge("passed_tests", collect_metrics.passed)
m3.gauge("duration_tests", collect_metrics.duration)
class Container(object):
def __init__(self, names):
self._cli = Client(base_url="unix://var/run/docker.sock")
self._names = names
def start(self):
for name in self._names:
self._cli.start(name)
log.info("%s started", name)
if self._names[0] in MESOS_MASTER:
wait_for_mesos_master_leader()
def stop(self):
for name in self._names:
self._cli.stop(name, timeout=0)
log.info("%s stopped", name)
def restart(self):
for name in self._names:
self._cli.restart(name, timeout=0)
log.info("%s restarted", name)
if self._names[0] in MESOS_MASTER:
wait_for_mesos_master_leader()
def get_container(container_name):
return Container(container_name)
def wait_for_mesos_master_leader(
url="http://127.0.0.1:5050/state.json", timeout_secs=20
):
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
resp = requests.get(url)
if resp.status_code != 200:
time.sleep(2)
continue
return
except Exception:
pass
assert False, "timed out waiting for mesos master leader"
def wait_for_all_agents_to_register(
url="http://127.0.0.1:5050/state.json",
timeout_secs=300,
):
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
resp = requests.get(url)
if resp.status_code == 200:
registered_agents = 0
for a in resp.json()['slaves']:
if a['active'] == True:
registered_agents += 1
if registered_agents == 3:
return
time.sleep(10)
except Exception:
pass
assert False, "timed out waiting for agents to register"
def setup_minicluster(enable_k8s=False):
log.info("setup cluster")
if os.getenv("CLUSTER", ""):
log.info("cluster mode")
else:
log.info("local minicluster mode")
setup(enable_peloton=True, enable_k8s=enable_k8s)
time.sleep(5)
def teardown_minicluster(dump_logs=False):
log.info("\nteardown cluster")
if os.getenv("CLUSTER", ""):
log.info("cluster mode, no teardown actions")
elif os.getenv("NO_TEARDOWN", ""):
log.info("skip teardown")
else:
log.info("tearing down")
if dump_logs:
teardown(stop=True)
try:
cli = Client(base_url="unix://var/run/docker.sock")
for c in ("peloton-jobmgr0",
"peloton-resmgr0"):
for l in cli.logs(c, stream=True):
log.info(l.rstrip())
except Exception as e:
log.info(e)
teardown()
def cleanup_batch_jobs():
jobs = batch_query_jobs()
batch_kill_jobs(jobs)
def cleanup_stateless_jobs(timeout_secs=10):
jobs = stateless_query_jobs()
stateless_delete_jobs(jobs)
deadline = time.time() + timeout_secs
while time.time() < deadline:
try:
jobs = stateless_query_jobs()
if len(jobs) == 0:
return
time.sleep(2)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.NOT_FOUND:
time.sleep(2)
continue
@pytest.fixture()
def mesos_master():
return Container(MESOS_MASTER)
@pytest.fixture()
def mesos_agent():
return Container(MESOS_AGENTS)
@pytest.fixture()
def placement_engines():
return Container(util.PLACEMENT_ENGINES)
@pytest.fixture()
def jobmgr():
return Container(util.JOB_MGRS)
@pytest.fixture()
def resmgr():
return Container(util.RES_MGRS)
@pytest.fixture()
def hostmgr():
return Container(util.HOST_MGRS)
@pytest.fixture()
def aurorabridge():
return Container(util.AURORA_BRIDGE)
@pytest.fixture
def long_running_job(request):
job = Job(job_file="long_running_job.yaml")
def kill_long_running_job():
print("\nstopping long running job")
job.stop()
request.addfinalizer(kill_long_running_job)
return job
@pytest.fixture
def stateless_job(request):
job = StatelessJob()
def kill_stateless_job():
print("\nstopping stateless job")
job.stop()
request.addfinalizer(kill_stateless_job)
return job
@pytest.fixture
def host_affinity_job(request):
job = Job(job_file="test_job_host_affinity_constraint.yaml")
def kill_host_affinity_job():
print("\nstopping host affinity job")
job.stop()
request.addfinalizer(kill_host_affinity_job)
return job
@pytest.fixture(params=[True, False])
def in_place(request):
return request.param
@pytest.fixture
def maintenance(request):
draining_hosts = []
client = [None]
def update_client(new_client):
client[0] = new_client
def start(hosts):
resp = start_maintenance(hosts)
if not resp:
log.error("Start maintenance failed:" + resp)
return resp
draining_hosts.extend(hosts)
return resp
def stop(hosts):
resp = complete_maintenance(hosts)
if not resp:
log.error("Complete maintenance failed:" + resp)
return resp
Container(hosts).start()
del draining_hosts[:]
return resp
def clean_up():
for j in stateless_query_jobs(client=client[0]):
j.stop()
if not draining_hosts:
return
for h in draining_hosts:
wait_for_host_state(h, host_pb2.HOST_STATE_DOWN)
stop(draining_hosts)
request.addfinalizer(clean_up)
response = dict()
response["start"] = start
response["stop"] = stop
response["update_client"] = update_client
return response
@pytest.fixture
def jobs_by_state(request):
return util.create_job_config_by_state(_num_jobs_per_state=1)
@pytest.fixture(scope="module")
def create_jobs(request):
jobs_by_state = util.create_job_config_by_state()
salt = jobs_by_state[0]
jobs_dict = jobs_by_state[1]
log.info("Create jobs")
respoolID = None
for state in TERMINAL_JOB_STATES:
jobs = jobs_dict[state]
for job in jobs:
job.create()
if state == "FAILED":
job.wait_for_state(
goal_state="FAILED", failed_state="SUCCEEDED"
)
else:
job.wait_for_state(goal_state=state)
if respoolID is None:
respoolID = job.get_config().respoolID
def stop_jobs():
log.info("Stop jobs")
for state in TERMINAL_JOB_STATES:
jobs = jobs_dict[state]
for job in jobs:
state = job_pb2.JobState.Name(job.get_runtime().state)
if state in ACTIVE_JOB_STATES:
job.stop()
job.wait_for_state(goal_state="KILLED")
request.addfinalizer(stop_jobs)
time.sleep(12)
return salt, respoolID, jobs_dict
@pytest.fixture
def task_test_fixture(request):
task_states = request.param
assert task_states is not None
if len(task_states) > 1:
mixed_task_states = True
else:
mixed_task_states = False
test_config = util.generate_job_config(
file_name="test_task.yaml", task_states=task_states
)
job = Job(job_config=test_config)
job.create()
log.info("Job for task query is created: %s", job.job_id)
job_state = task_states[0][0] if not mixed_task_states else "FAILED"
if job_state == "FAILED":
job.wait_for_state(goal_state="FAILED", failed_state="SUCCEEDED")
else:
job.wait_for_state(goal_state=job_state)
def stop_job():
state = job_pb2.JobState.Name(job.get_runtime().state)
if state in ACTIVE_JOB_STATES:
job.stop()
job.wait_for_state(goal_state="KILLED")
request.addfinalizer(stop_job)
return job.job_id
@pytest.fixture
def exclusive_host(request):
def clean_up():
teardown_mesos_agent(mc_config, 0, is_exclusive=True)
run_mesos_agent(mc_config, 0, 0)
time.sleep(5)
run_mesos_agent(
mc_config,
0,
3,
is_exclusive=True,
exclusive_label_value="exclusive-test-label",
)
time.sleep(5)
request.addfinalizer(clean_up)
| true | true |
f738daab38d0ea2df36ed71b54b64b00c4cdf066 | 2,876 | py | Python | google/cloud/securitycenter/v1/securitycenter-v1-py/google/cloud/securitycenter_v1/types/source.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/cloud/securitycenter/v1/securitycenter-v1-py/google/cloud/securitycenter_v1/types/source.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/cloud/securitycenter/v1/securitycenter-v1-py/google/cloud/securitycenter_v1/types/source.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.cloud.securitycenter.v1',
manifest={
'Source',
},
)
class Source(proto.Message):
r"""Security Command Center finding source. A finding source
is an entity or a mechanism that can produce a finding. A source
is like a container of findings that come from the same scanner,
logger, monitor, and other tools.
Attributes:
name (str):
The relative resource name of this source. See:
https://cloud.google.com/apis/design/resource_names#relative_resource_name
Example:
"organizations/{organization_id}/sources/{source_id}".
display_name (str):
The source's display name.
A source's display name must be unique amongst
its siblings, for example, two sources with the
same parent can't share the same display name.
The display name must have a length between 1
and 64 characters (inclusive).
description (str):
The description of the source (max of 1024
characters). Example:
"Web Security Scanner is a web security scanner
for common vulnerabilities in App Engine
applications. It can automatically scan and
detect four common vulnerabilities, including
cross-site-scripting (XSS), Flash injection,
mixed content (HTTP in HTTPS), and outdated or
insecure libraries.".
canonical_name (str):
The canonical name of the finding. It's either
"organizations/{organization_id}/sources/{source_id}",
"folders/{folder_id}/sources/{source_id}" or
"projects/{project_number}/sources/{source_id}", depending
on the closest CRM ancestor of the resource associated with
the finding.
"""
name = proto.Field(
proto.STRING,
number=1,
)
display_name = proto.Field(
proto.STRING,
number=2,
)
description = proto.Field(
proto.STRING,
number=3,
)
canonical_name = proto.Field(
proto.STRING,
number=14,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| 34.238095 | 86 | 0.645688 |
import proto
__protobuf__ = proto.module(
package='google.cloud.securitycenter.v1',
manifest={
'Source',
},
)
class Source(proto.Message):
name = proto.Field(
proto.STRING,
number=1,
)
display_name = proto.Field(
proto.STRING,
number=2,
)
description = proto.Field(
proto.STRING,
number=3,
)
canonical_name = proto.Field(
proto.STRING,
number=14,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| true | true |
f738db5484d20dc4c77a6d9f4274781afffe802a | 5,607 | py | Python | recip/network/messages/extensions/types/Account.py | anthonybuckle/Reciprocity-Core | 3254073f44e8fe2222aed9879885a2e609d4044a | [
"MIT"
] | null | null | null | recip/network/messages/extensions/types/Account.py | anthonybuckle/Reciprocity-Core | 3254073f44e8fe2222aed9879885a2e609d4044a | [
"MIT"
] | null | null | null | recip/network/messages/extensions/types/Account.py | anthonybuckle/Reciprocity-Core | 3254073f44e8fe2222aed9879885a2e609d4044a | [
"MIT"
] | null | null | null | from recip.network.messages.extensions.ExtMessage import ExtMessage
from recip.network.messages.extensions import ExtMessageType
from recip.core.Account import Account as CoreAccount
from recip.core import AccountType
from recip.storage import Accounts
from recip.util import Address
from recip.util import Crypto
from recip.util import DataType
from recip.util import JSONRPC
from recip.util import Validator
from recip.util import Units
class Account(ExtMessage):
def __init__(self):
super().__init__()
self.publicKeys = []
self.address = []
def validate(self):
if self.address == None or self.publicKeys == None:
return False
for publicKey in self.publicKeys:
if not Validator.public(publicKey):
return False
for addr in self.address:
if not Validator.address(addr):
return False
return True
def deserialize(self, payload):
if self.validatePayload(payload):
self.deserializePayload(payload)
if self.validateParameters():
if self.isMultiSig():
for publicKey in self.params:
publicKey = DataType.fromHex(publicKey)
self.publicKeys.append(publicKey)
else:
for addr in self.params:
addr = Address.toAddressBytes(addr)
self.address.append(addr)
if self.validate():
return True
return False
def onSuccess(self, callback = None):
if ExtMessageType.CREATE_ATOMIC_SWAP_ACCOUNT == self.method:
self.createAtomicSwapAccount(callback)
elif ExtMessageType.CREATE_MULTISIG_ACCOUNT == self.method:
self.createMultiSigAccount(callback)
elif ExtMessageType.GET_ACCOUNTS == self.method:
self.get(callback)
elif ExtMessageType.GET_NEW_ACCOUNT == self.method:
self.add(callback)
elif ExtMessageType.DELETE_ACCOUNTS == self.method:
self.remove(callback)
else:
self.onFailure(callback)
def isMultiSig(self):
if ExtMessageType.CREATE_MULTISIG_ACCOUNT == self.method:
return True
if ExtMessageType.CREATE_ATOMIC_SWAP_ACCOUNT == self.method:
return True
return False
def createAtomicSwapAccount(self, callback = None):
_address, _public, _private = Crypto.generateKeys()
privateBytes = bytearray()
privateBytes.extend(_address)
privateBytes.extend(_public)
privateBytes.extend(_private)
privateKey = Crypto.generateHash(privateBytes)
self.createNewMultiSigAccountType(AccountType.ATOMIC_SWAP, privateKey, callback)
def createMultiSigAccount(self, callback = None):
self.createNewMultiSigAccountType(AccountType.MULTISIGNATURE, None, callback)
def createNewMultiSigAccountType(self, accountType, privateKey, callback = None):
multiSigAddressBytes = bytearray()
for publicKey in self.publicKeys:
multiSigAddressBytes.extend(publicKey)
multiSigAddress = Crypto.generateAddress(multiSigAddressBytes)
account = CoreAccount(multiSigAddress, self.publicKeys, privateKey, accountType)
Accounts.addAccount(account)
multiSigAddress = Address.to0xAddress(multiSigAddress)
callback(
JSONRPC.createResultObject(multiSigAddress, self.id)
)
def get(self, callback = None):
accounts = []
# Account Types
standard = []
multisig = []
atomicswap = []
for account in Accounts.getAccounts():
confirmedBalance = Accounts.getConfirmedBalanceByAddress(account.address)
confirmedBalance = Units.toValue(confirmedBalance)
confirmedBalance = DataType.asFloat(confirmedBalance)
address = Address.to0xAddress(account.address)
accountInfo = {
'address': address,
'type': account.type,
'balance': confirmedBalance
}
if account.type == AccountType.STANDARD:
standard.append(accountInfo)
elif account.type == AccountType.MULTISIGNATURE:
multisig.append(accountInfo)
elif account.type == AccountType.ATOMIC_SWAP:
atomicswap.append(accountInfo)
accounts.extend(standard)
accounts.extend(multisig)
accounts.extend(atomicswap)
callback(
JSONRPC.createResultObject(accounts, self.id)
)
def add(self, callback = None):
account = CoreAccount()
Accounts.addAccount(account)
address = Address.to0xAddress(account.address)
callback(
JSONRPC.createResultObject(address, self.id)
)
def remove(self, callback = None):
removed = False
for addr in self.address:
removed = Accounts.removeAccount(addr)
if not removed:
break
if removed:
callback(
JSONRPC.createResultObject('accounts removed', self.id)
)
else:
callback(
JSONRPC.createErrorObject(-32003, 'delete failed', 'failed to remove accounts', self.id)
)
def onFailure(self, callback = None):
callback(
JSONRPC.createErrorObject(-32000, 'invalid message', 'invalid account request', self.id)
)
| 38.142857 | 104 | 0.622258 | from recip.network.messages.extensions.ExtMessage import ExtMessage
from recip.network.messages.extensions import ExtMessageType
from recip.core.Account import Account as CoreAccount
from recip.core import AccountType
from recip.storage import Accounts
from recip.util import Address
from recip.util import Crypto
from recip.util import DataType
from recip.util import JSONRPC
from recip.util import Validator
from recip.util import Units
class Account(ExtMessage):
def __init__(self):
super().__init__()
self.publicKeys = []
self.address = []
def validate(self):
if self.address == None or self.publicKeys == None:
return False
for publicKey in self.publicKeys:
if not Validator.public(publicKey):
return False
for addr in self.address:
if not Validator.address(addr):
return False
return True
def deserialize(self, payload):
if self.validatePayload(payload):
self.deserializePayload(payload)
if self.validateParameters():
if self.isMultiSig():
for publicKey in self.params:
publicKey = DataType.fromHex(publicKey)
self.publicKeys.append(publicKey)
else:
for addr in self.params:
addr = Address.toAddressBytes(addr)
self.address.append(addr)
if self.validate():
return True
return False
def onSuccess(self, callback = None):
if ExtMessageType.CREATE_ATOMIC_SWAP_ACCOUNT == self.method:
self.createAtomicSwapAccount(callback)
elif ExtMessageType.CREATE_MULTISIG_ACCOUNT == self.method:
self.createMultiSigAccount(callback)
elif ExtMessageType.GET_ACCOUNTS == self.method:
self.get(callback)
elif ExtMessageType.GET_NEW_ACCOUNT == self.method:
self.add(callback)
elif ExtMessageType.DELETE_ACCOUNTS == self.method:
self.remove(callback)
else:
self.onFailure(callback)
def isMultiSig(self):
if ExtMessageType.CREATE_MULTISIG_ACCOUNT == self.method:
return True
if ExtMessageType.CREATE_ATOMIC_SWAP_ACCOUNT == self.method:
return True
return False
def createAtomicSwapAccount(self, callback = None):
_address, _public, _private = Crypto.generateKeys()
privateBytes = bytearray()
privateBytes.extend(_address)
privateBytes.extend(_public)
privateBytes.extend(_private)
privateKey = Crypto.generateHash(privateBytes)
self.createNewMultiSigAccountType(AccountType.ATOMIC_SWAP, privateKey, callback)
def createMultiSigAccount(self, callback = None):
self.createNewMultiSigAccountType(AccountType.MULTISIGNATURE, None, callback)
def createNewMultiSigAccountType(self, accountType, privateKey, callback = None):
multiSigAddressBytes = bytearray()
for publicKey in self.publicKeys:
multiSigAddressBytes.extend(publicKey)
multiSigAddress = Crypto.generateAddress(multiSigAddressBytes)
account = CoreAccount(multiSigAddress, self.publicKeys, privateKey, accountType)
Accounts.addAccount(account)
multiSigAddress = Address.to0xAddress(multiSigAddress)
callback(
JSONRPC.createResultObject(multiSigAddress, self.id)
)
def get(self, callback = None):
accounts = []
standard = []
multisig = []
atomicswap = []
for account in Accounts.getAccounts():
confirmedBalance = Accounts.getConfirmedBalanceByAddress(account.address)
confirmedBalance = Units.toValue(confirmedBalance)
confirmedBalance = DataType.asFloat(confirmedBalance)
address = Address.to0xAddress(account.address)
accountInfo = {
'address': address,
'type': account.type,
'balance': confirmedBalance
}
if account.type == AccountType.STANDARD:
standard.append(accountInfo)
elif account.type == AccountType.MULTISIGNATURE:
multisig.append(accountInfo)
elif account.type == AccountType.ATOMIC_SWAP:
atomicswap.append(accountInfo)
accounts.extend(standard)
accounts.extend(multisig)
accounts.extend(atomicswap)
callback(
JSONRPC.createResultObject(accounts, self.id)
)
def add(self, callback = None):
account = CoreAccount()
Accounts.addAccount(account)
address = Address.to0xAddress(account.address)
callback(
JSONRPC.createResultObject(address, self.id)
)
def remove(self, callback = None):
removed = False
for addr in self.address:
removed = Accounts.removeAccount(addr)
if not removed:
break
if removed:
callback(
JSONRPC.createResultObject('accounts removed', self.id)
)
else:
callback(
JSONRPC.createErrorObject(-32003, 'delete failed', 'failed to remove accounts', self.id)
)
def onFailure(self, callback = None):
callback(
JSONRPC.createErrorObject(-32000, 'invalid message', 'invalid account request', self.id)
)
| true | true |
f738db80f9cc00ed7e5eb5f0b2d93c033d9219fc | 6,378 | py | Python | src/datasets/Fruits360/f360_dataset.py | JoseLuisRojasAranda/tfmodels | 56dce0236f0cc03dd7031aecf305d470c9fb97a9 | [
"MIT"
] | 1 | 2020-06-05T23:25:03.000Z | 2020-06-05T23:25:03.000Z | src/datasets/Fruits360/f360_dataset.py | JoseLuisRojasAranda/tfmodels | 56dce0236f0cc03dd7031aecf305d470c9fb97a9 | [
"MIT"
] | null | null | null | src/datasets/Fruits360/f360_dataset.py | JoseLuisRojasAranda/tfmodels | 56dce0236f0cc03dd7031aecf305d470c9fb97a9 | [
"MIT"
] | null | null | null | import tensorflow as tf
import cv2
from glob import glob
import sys
import os
from os import path
import json
import random
from datasets.datasets_features import bytes_feature
# Metodo que regresa el dataset de f360 ya procesado a tfrecord
# Los data set tiene el formato:
# x: tensor con la imagen normalizada
# y: tensor con onehot encoding de la categoria
# Returns:
# train_data: Dataset de entrenameinto
# test_data: Dataset de pruebas
def f360_load_dataset(path=None, resize=None, num_classes=None):
train_path = "f360_train.tfrecord"
test_path = "f360_test.tfrecord"
if path == None:
path = ""
train_raw_data = tf.data.TFRecordDataset(path+train_path)
test_raw_data = tf.data.TFRecordDataset(path+test_path)
_format = {
"x": tf.io.FixedLenFeature([], tf.string),
"y": tf.io.FixedLenFeature([], tf.string)
}
def _parse_example(example):
ex = tf.io.parse_single_example(example, _format)
x = tf.io.parse_tensor(ex["x"], tf.float32)
y = tf.io.parse_tensor(ex["y"], tf.float32)
y = tf.reshape(y, [-1])
data_dict = {
"x": x,
"y": y
}
return x, y
train_data = train_raw_data.map(_parse_example)
test_data = test_raw_data.map(_parse_example)
def _set_dataset_shape(x, y):
x.set_shape([100, 100, 3])
return x, y
train_data = train_data.map(_set_dataset_shape)
test_data = test_data.map(_set_dataset_shape)
if resize != None:
def _resize_dataset(x, y):
x = tf.image.resize(x, [resize, resize])
return x, y
train_data = train_data.map(_resize_dataset)
test_data = test_data.map(_resize_dataset)
with open(path+"dataset_info.json", "r") as data:
info = json.load(data)
return train_data, test_data, info
# Metodo que convierte el dataset de Fruits 360 a tfrecord, para despues usarlo
# con el Dataset API de tensorflow
# Args:
# training_path: el path al dataset de training
# test_path: el path al dataset de pruebas
# num_imgs: numero de images a obtener, -1 para todas
# result_path: el path donde se guarda el resultado
def f360_create_dataset(training_path=None, test_path=None, num_imgs=-1,
result_path=None, delta=1, offset=0):
# Crea la carpeta por si no existe donde se va a guardar el resultado
if not path.exists(result_path):
os.makedirs(result_path)
process_cats = ["Apple Golden 1", "Banana", "Orange"]
"""
process_cats = ["Apple Braeburn", "Apple Golden 1", "Avocado", "Lemon",
"Limes", "Lychee", "Mandarine", "Banana", "Onion White", "Onion White",
"Pear", "Orange", "Pineapple", "Potato White", "Strawberry", "Tomato 4"]
"""
onehot_depth = len(process_cats)
onehot_dict = { }
for i in range(len(process_cats)):
cat = process_cats[i]
onehot_dict[cat] = i
# Obtiene todas las categorias que existen
cats = [x[1] for x in os.walk(training_path)][0]
# Writer al tfrecord
train_writer = tf.io.TFRecordWriter(result_path+"f360_train.tfrecord")
test_writer = tf.io.TFRecordWriter(result_path+"f360_test.tfrecord")
train_size = 0
test_size = 0
total_train_size = 0
total_test_size = 0
categories_size = { }
# funcion que escribe una imagen al tfrecord
def encode_image_info(image, category, writer):
# Convierte la imagen a un tensor y lo normaliza
image_tensor = tf.convert_to_tensor(image)
image_tensor /= 255
category = tf.one_hot([onehot_dict[category]], onehot_depth)
# Genera los features para el example
data = {
"x": bytes_feature(tf.io.serialize_tensor(image_tensor)),
"y": bytes_feature(tf.io.serialize_tensor(category))
}
example = tf.train.Example(features=tf.train.Features(feature=data))
writer.write(example.SerializeToString())
print("[INFO] Writing dataset to tfrecord")
# itera sobre todas las categorias a procesar
for cat in process_cats:
# si la categoria existe
if cat in cats:
print("[INFO] Writing {}...".format(cat))
train_size = test_size = 0
# obtiene los paths
train_img_path = glob(training_path+cat+"/*.jpg")
test_img_path = glob(test_path+cat+"/*.jpg")
# Ordena los paths
train_img_path = sorted(train_img_path)
test_img_path = sorted(test_img_path)
# el numero de imagenes a que se van a ciclar
n_train = n_test = num_imgs
if n_train == -1:
n_train = len(train_img_path)
n_test = len(test_img_path)
i = offset
j = 0
total = 0
# escribe training images
"""
for i in range(n_train):
img_path = train_img_path[i]
image = cv2.imread(img_path)
encode_image_info(image, cat, train_writer)
train_size += 1
"""
while total < n_train:
img_path = train_img_path[i]
image = cv2.imread(img_path)
encode_image_info(image, cat, train_writer)
train_size += 1
#i += random.randint(10, 20)
i += delta
if i >= n_train: i = i - n_train
total += delta
# escribe test images
for j in range(n_test):
img_path = test_img_path[j]
image = cv2.imread(img_path)
encode_image_info(image, cat, test_writer)
test_size += 1
categories_size[cat] = (train_size, test_size)
total_train_size += train_size
total_test_size += test_size
train_writer.close()
test_writer.close()
dataset_info = {
"name": "Fruits 360 dataset",
"num_classes": len(process_cats),
"delta": delta,
"offset": offset,
"categories": process_cats,
"train_size": total_train_size,
"test_size": total_test_size,
"categories_size": categories_size
}
# Escribe el info del dataset
with open(result_path+"dataset_info.json", "w") as writer:
json.dump(dataset_info, writer, indent=4)
| 31.418719 | 80 | 0.611007 | import tensorflow as tf
import cv2
from glob import glob
import sys
import os
from os import path
import json
import random
from datasets.datasets_features import bytes_feature
def f360_load_dataset(path=None, resize=None, num_classes=None):
train_path = "f360_train.tfrecord"
test_path = "f360_test.tfrecord"
if path == None:
path = ""
train_raw_data = tf.data.TFRecordDataset(path+train_path)
test_raw_data = tf.data.TFRecordDataset(path+test_path)
_format = {
"x": tf.io.FixedLenFeature([], tf.string),
"y": tf.io.FixedLenFeature([], tf.string)
}
def _parse_example(example):
ex = tf.io.parse_single_example(example, _format)
x = tf.io.parse_tensor(ex["x"], tf.float32)
y = tf.io.parse_tensor(ex["y"], tf.float32)
y = tf.reshape(y, [-1])
data_dict = {
"x": x,
"y": y
}
return x, y
train_data = train_raw_data.map(_parse_example)
test_data = test_raw_data.map(_parse_example)
def _set_dataset_shape(x, y):
x.set_shape([100, 100, 3])
return x, y
train_data = train_data.map(_set_dataset_shape)
test_data = test_data.map(_set_dataset_shape)
if resize != None:
def _resize_dataset(x, y):
x = tf.image.resize(x, [resize, resize])
return x, y
train_data = train_data.map(_resize_dataset)
test_data = test_data.map(_resize_dataset)
with open(path+"dataset_info.json", "r") as data:
info = json.load(data)
return train_data, test_data, info
def f360_create_dataset(training_path=None, test_path=None, num_imgs=-1,
result_path=None, delta=1, offset=0):
if not path.exists(result_path):
os.makedirs(result_path)
process_cats = ["Apple Golden 1", "Banana", "Orange"]
onehot_depth = len(process_cats)
onehot_dict = { }
for i in range(len(process_cats)):
cat = process_cats[i]
onehot_dict[cat] = i
cats = [x[1] for x in os.walk(training_path)][0]
train_writer = tf.io.TFRecordWriter(result_path+"f360_train.tfrecord")
test_writer = tf.io.TFRecordWriter(result_path+"f360_test.tfrecord")
train_size = 0
test_size = 0
total_train_size = 0
total_test_size = 0
categories_size = { }
def encode_image_info(image, category, writer):
image_tensor = tf.convert_to_tensor(image)
image_tensor /= 255
category = tf.one_hot([onehot_dict[category]], onehot_depth)
data = {
"x": bytes_feature(tf.io.serialize_tensor(image_tensor)),
"y": bytes_feature(tf.io.serialize_tensor(category))
}
example = tf.train.Example(features=tf.train.Features(feature=data))
writer.write(example.SerializeToString())
print("[INFO] Writing dataset to tfrecord")
for cat in process_cats:
if cat in cats:
print("[INFO] Writing {}...".format(cat))
train_size = test_size = 0
train_img_path = glob(training_path+cat+"/*.jpg")
test_img_path = glob(test_path+cat+"/*.jpg")
train_img_path = sorted(train_img_path)
test_img_path = sorted(test_img_path)
n_train = n_test = num_imgs
if n_train == -1:
n_train = len(train_img_path)
n_test = len(test_img_path)
i = offset
j = 0
total = 0
while total < n_train:
img_path = train_img_path[i]
image = cv2.imread(img_path)
encode_image_info(image, cat, train_writer)
train_size += 1
i += delta
if i >= n_train: i = i - n_train
total += delta
for j in range(n_test):
img_path = test_img_path[j]
image = cv2.imread(img_path)
encode_image_info(image, cat, test_writer)
test_size += 1
categories_size[cat] = (train_size, test_size)
total_train_size += train_size
total_test_size += test_size
train_writer.close()
test_writer.close()
dataset_info = {
"name": "Fruits 360 dataset",
"num_classes": len(process_cats),
"delta": delta,
"offset": offset,
"categories": process_cats,
"train_size": total_train_size,
"test_size": total_test_size,
"categories_size": categories_size
}
with open(result_path+"dataset_info.json", "w") as writer:
json.dump(dataset_info, writer, indent=4)
| true | true |
f738dc133f66e48df7c43829949098c52214b42c | 256 | py | Python | src/airfly/_vendor/airflow/decorators/python.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | 7 | 2021-09-27T11:38:48.000Z | 2022-02-01T06:06:24.000Z | src/airfly/_vendor/airflow/decorators/python.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | null | null | null | src/airfly/_vendor/airflow/decorators/python.py | ryanchao2012/airfly | 230ddd88885defc67485fa0c51f66c4a67ae98a9 | [
"MIT"
] | null | null | null | # Auto generated by 'inv collect-airflow'
from airfly._vendor.airflow.decorators.base import DecoratedOperator
from airfly._vendor.airflow.operators.python import PythonOperator
class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
pass
| 32 | 68 | 0.84375 |
from airfly._vendor.airflow.decorators.base import DecoratedOperator
from airfly._vendor.airflow.operators.python import PythonOperator
class _PythonDecoratedOperator(DecoratedOperator, PythonOperator):
pass
| true | true |
f738dc15aae4bc9e8704d6dc10a845408ad63b2e | 1,759 | py | Python | src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py | arcivanov/pybuilder | 419dd58cf52b1547ca1044e8e1f6b9faa47fadec | [
"Apache-2.0"
] | null | null | null | src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py | arcivanov/pybuilder | 419dd58cf52b1547ca1044e8e1f6b9faa47fadec | [
"Apache-2.0"
] | 6 | 2021-09-05T06:00:20.000Z | 2021-11-07T08:26:55.000Z | src/main/python/pybuilder/_vendor/virtualenv/seed/wheels/embed/__init__.py | ufo2011/pybuilder | 9cb589c11f743e4f9e3a9051184471206b1d7c3b | [
"Apache-2.0"
] | 1 | 2020-02-22T05:25:50.000Z | 2020-02-22T05:25:50.000Z | from __future__ import absolute_import, unicode_literals
from ..util import Wheel
from ....util.path import Path
BUNDLE_FOLDER = Path(__file__).absolute().parent
BUNDLE_SUPPORT = {
"3.10": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.9": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.8": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.7": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.6": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.5": {
"pip": "pip-20.3.4-py2.py3-none-any.whl",
"setuptools": "setuptools-50.3.2-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"2.7": {
"pip": "pip-20.3.4-py2.py3-none-any.whl",
"setuptools": "setuptools-44.1.1-py2.py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
}
MAX = "3.10"
def get_embed_wheel(distribution, for_py_version):
path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution)
return Wheel.from_path(path)
__all__ = (
"get_embed_wheel",
"BUNDLE_SUPPORT",
"MAX",
"BUNDLE_FOLDER",
)
| 30.327586 | 108 | 0.572484 | from __future__ import absolute_import, unicode_literals
from ..util import Wheel
from ....util.path import Path
BUNDLE_FOLDER = Path(__file__).absolute().parent
BUNDLE_SUPPORT = {
"3.10": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.9": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.8": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.7": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.6": {
"pip": "pip-21.3.1-py3-none-any.whl",
"setuptools": "setuptools-58.3.0-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"3.5": {
"pip": "pip-20.3.4-py2.py3-none-any.whl",
"setuptools": "setuptools-50.3.2-py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
"2.7": {
"pip": "pip-20.3.4-py2.py3-none-any.whl",
"setuptools": "setuptools-44.1.1-py2.py3-none-any.whl",
"wheel": "wheel-0.37.0-py2.py3-none-any.whl",
},
}
MAX = "3.10"
def get_embed_wheel(distribution, for_py_version):
path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution)
return Wheel.from_path(path)
__all__ = (
"get_embed_wheel",
"BUNDLE_SUPPORT",
"MAX",
"BUNDLE_FOLDER",
)
| true | true |
f738dc24263c0009dbd5d4831b0b1b62ca5c82ef | 309 | py | Python | 05/5.5.py | abe-101/ThinkPython-2 | bcebb1e9b3cc63c403f59c3cc0f33017bb017363 | [
"MIT"
] | 1 | 2021-12-16T16:46:47.000Z | 2021-12-16T16:46:47.000Z | 05/5.5.py | abe-101/ThinkPython-2 | bcebb1e9b3cc63c403f59c3cc0f33017bb017363 | [
"MIT"
] | null | null | null | 05/5.5.py | abe-101/ThinkPython-2 | bcebb1e9b3cc63c403f59c3cc0f33017bb017363 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import turtle
def draw(t, length, n):
if n == 0:
return
angle = 50
t.fd(length*n)
t.lt(angle)
draw(t, length, n-1)
t.rt(2*angle)
draw(t, length, n-1)
t.lt(angle)
t.bk(length*n)
bob = turtle.Turtle()
print(bob)
draw(bob, 7, 5)
turtle.mainloop()
| 15.45 | 24 | 0.563107 |
import turtle
def draw(t, length, n):
if n == 0:
return
angle = 50
t.fd(length*n)
t.lt(angle)
draw(t, length, n-1)
t.rt(2*angle)
draw(t, length, n-1)
t.lt(angle)
t.bk(length*n)
bob = turtle.Turtle()
print(bob)
draw(bob, 7, 5)
turtle.mainloop()
| true | true |
f738dc71bfd7d43ffae7f7d4c2c4ec1081620eff | 890 | py | Python | tests/test_html_escaping.py | PureDreamer/lms | 4e4c2030f4f51a552e1fbfe82c16dc837e95310c | [
"BSD-3-Clause"
] | 85 | 2020-03-21T18:52:57.000Z | 2022-03-22T06:33:38.000Z | tests/test_html_escaping.py | PureDreamer/lms | 4e4c2030f4f51a552e1fbfe82c16dc837e95310c | [
"BSD-3-Clause"
] | 210 | 2020-03-21T22:44:57.000Z | 2022-01-06T22:47:50.000Z | tests/test_html_escaping.py | PureDreamer/lms | 4e4c2030f4f51a552e1fbfe82c16dc837e95310c | [
"BSD-3-Clause"
] | 27 | 2020-03-31T13:57:33.000Z | 2022-03-06T15:57:32.000Z | from flask import json
from lms.lmsdb.models import Solution, User
from tests import conftest
USER_COMMENT_BEFORE_ESCAPING = '<html><body><p>Welcome "LMS"</p></body></html>'
USER_COMMENT_AFTER_ESCAPING = (
'<html><body><p>Welcome "LMS"'
'</p></body></html>'
)
class TestHtmlEscaping:
@staticmethod
def test_comment_text_escaping(student_user: User, solution: Solution):
client = conftest.get_logged_user(student_user.username)
comment_response = client.post('/comments', data=json.dumps({
'fileId': solution.files[0].id, 'act': 'create', 'kind': 'text',
'comment': USER_COMMENT_BEFORE_ESCAPING, 'line': 1,
}), content_type='application/json')
assert comment_response.status_code == 200
assert solution.comments[0].comment.text == USER_COMMENT_AFTER_ESCAPING
| 35.6 | 79 | 0.689888 | from flask import json
from lms.lmsdb.models import Solution, User
from tests import conftest
USER_COMMENT_BEFORE_ESCAPING = '<html><body><p>Welcome "LMS"</p></body></html>'
USER_COMMENT_AFTER_ESCAPING = (
'<html><body><p>Welcome "LMS"'
'</p></body></html>'
)
class TestHtmlEscaping:
@staticmethod
def test_comment_text_escaping(student_user: User, solution: Solution):
client = conftest.get_logged_user(student_user.username)
comment_response = client.post('/comments', data=json.dumps({
'fileId': solution.files[0].id, 'act': 'create', 'kind': 'text',
'comment': USER_COMMENT_BEFORE_ESCAPING, 'line': 1,
}), content_type='application/json')
assert comment_response.status_code == 200
assert solution.comments[0].comment.text == USER_COMMENT_AFTER_ESCAPING
| true | true |
f738dce996b7b83b3e2c5c20eda0dab44a0f28d3 | 64 | py | Python | numbers.py | Arif8099/myNewRepository | 7fe087a5fa0ce6b81238c0a2a1229cc30ac21aa7 | [
"MIT"
] | null | null | null | numbers.py | Arif8099/myNewRepository | 7fe087a5fa0ce6b81238c0a2a1229cc30ac21aa7 | [
"MIT"
] | null | null | null | numbers.py | Arif8099/myNewRepository | 7fe087a5fa0ce6b81238c0a2a1229cc30ac21aa7 | [
"MIT"
] | null | null | null | for i in range(1,11):
print(str(i))
print(list(range(1,11)))
| 12.8 | 24 | 0.625 | for i in range(1,11):
print(str(i))
print(list(range(1,11)))
| true | true |
f738dd6ff6034d08207c1bcd65c39091e0e5c9fb | 6,621 | py | Python | src/kestrel/codegen/pattern.py | vincentywdeng/kestrel-lang | 91e61c179bef433f5dc2e9fb6edf184d38ae6173 | [
"Apache-2.0"
] | 119 | 2021-06-04T15:40:10.000Z | 2022-03-24T09:56:53.000Z | src/kestrel/codegen/pattern.py | vincentywdeng/kestrel-lang | 91e61c179bef433f5dc2e9fb6edf184d38ae6173 | [
"Apache-2.0"
] | 76 | 2021-06-04T15:06:10.000Z | 2022-03-20T21:03:13.000Z | src/kestrel/codegen/pattern.py | vincentywdeng/kestrel-lang | 91e61c179bef433f5dc2e9fb6edf184d38ae6173 | [
"Apache-2.0"
] | 28 | 2021-06-05T07:27:15.000Z | 2022-01-20T18:43:47.000Z | import dateutil.parser
import datetime
import logging
import re
from kestrel.utils import dedup_dicts
from kestrel.semantics import get_entity_table
from kestrel.syntax.paramstix import parse_extended_stix_pattern
from kestrel.exceptions import (
InvalidAttribute,
UnsupportedStixSyntax,
KestrelInternalError,
)
from firepit.exceptions import InvalidAttr
_logger = logging.getLogger(__name__)
def or_patterns(patterns):
bodies = []
time_range = []
for pattern in patterns:
if pattern:
pieces = pattern.split()
if len(pieces) > 4 and pieces[-4] == "START" and pieces[-2] == "STOP":
time_range.append((pieces[-3], pieces[-1]))
bodies.append("(" + " ".join(pieces[:-4]) + ")")
else:
bodies.append(pattern)
if bodies:
if time_range:
start = min([t[0] for t in time_range])
end = max([t[1] for t in time_range])
final_pattern = (
"(" + " OR ".join(bodies) + ")" + " START " + start + " STOP " + end
)
else:
final_pattern = " OR ".join(bodies)
_logger.debug(f"or pattern merged: {final_pattern}")
else:
final_pattern = None
_logger.info(f"all None patterns input into or_patterns()")
return final_pattern
def build_pattern(
raw_pattern_body, time_range, start_offset, end_offset, symtable, store
):
"""Dereference variables in a STIX pattern and output the unfolded pattern."""
references = parse_extended_stix_pattern(raw_pattern_body)
pattern_body = raw_pattern_body
_logger.debug(f"building pattern for: {raw_pattern_body}")
if references:
_logger.debug(f"references found: {list(references.keys())}")
var_attr_to_vals_str = _dereference_multiple_variables(
store, symtable, references
)
for var_attr, vals_str in var_attr_to_vals_str.items():
pattern_body = _replace_ref_with_op(pattern_body, var_attr, vals_str)
_logger.debug(f'pattern body dereferred: "{pattern_body}"')
if pattern_body and not time_range:
try:
ref_var_time_ranges = [
_get_variable_time_range(store, symtable, var_name)
for var_name in references.keys()
]
start = min([t[0] for t in ref_var_time_ranges])
end = max([t[1] for t in ref_var_time_ranges])
start_adj = start + datetime.timedelta(seconds=start_offset)
end_adj = end + datetime.timedelta(seconds=end_offset)
start_stix = start_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z")
stop_stix = end_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z")
time_range = (start_stix, stop_stix)
_logger.debug(f"pattern time range computed: {time_range}")
except InvalidAttribute:
time_range = None
_logger.warning(
f"pattern time range searching failed on variable {var_name}"
)
if pattern_body:
if time_range:
pattern = (
f"({pattern_body}) START t'{time_range[0]}' STOP t'{time_range[1]}'"
)
else:
pattern = f"{pattern_body}"
_logger.debug(f'final pattern assembled: "{pattern}"')
else:
pattern = None
_logger.info(f"empty pattern assembled")
return pattern
def build_pattern_from_ids(return_type, ids):
if ids:
return "[" + return_type + ":id IN (" + ", ".join(map(_type_value, ids)) + ")]"
else:
return None
def _dereference_multiple_variables(store, symtable, references):
return {
var + "." + attr: "(" + ", ".join(map(_type_value, vals)) + ")"
for var, attrs in references.items()
for attr, vals in _dereference_variable(store, symtable, var, attrs).items()
}
def _dereference_variable(store, symtable, var_name, attributes):
attr_line = ",".join(attributes)
_logger.debug(f'deref "{var_name}" with attributes "{attr_line}"')
var_entity_table = get_entity_table(var_name, symtable)
try:
store_return = store.lookup(var_entity_table, attr_line)
except InvalidAttr as e:
_logger.warning(f"cannot deref {attr_line}. Invalid attribute in firepit.")
raise InvalidAttribute(e.message)
attr_to_values = {k: [] for k in attributes}
for row in store_return:
for k, v in row.items():
if v and v not in attr_to_values[k]:
attr_to_values[k].append(v)
for k, v in attr_to_values.items():
if not v:
raise InvalidAttribute(var_name + "." + k)
_logger.debug(f"deref results: {str(attr_to_values)}")
return attr_to_values
def _get_variable_time_range(store, symtable, var_name):
"""
Returns:
start (datetime.datetime): the time any entities is observed first.
end (datetime.datetime): the time any entities is observed last.
"""
time_attr_line = ",".join(["first_observed", "last_observed"])
var_entity_table = get_entity_table(var_name, symtable)
try:
store_return = store.lookup(var_entity_table, time_attr_line)
except InvalidAttr as e:
raise InvalidAttribute(e.message)
life_span = dedup_dicts(store_return)
start = min([dateutil.parser.isoparse(e["first_observed"]) for e in life_span])
end = max([dateutil.parser.isoparse(e["last_observed"]) for e in life_span])
return start, end
def _type_value(value):
if isinstance(value, str):
return f"'{value}'"
elif isinstance(value, int):
return str(value)
elif isinstance(value, float):
# pandas dataframe and sqlite may save integers as floats
return str(round(value))
else:
return str(value)
def _replace_ref_with_op(pattern, var_attr, vals_str):
# avoid adhesive parans/ops that prevent correct splitting
pattern = re.sub(r"([=><\[\]])", r" \1 ", pattern)
pieces = pattern.split()
try:
ref_index = pieces.index(var_attr)
except ValueError:
err_msg = f'cannot find "{var_attr}" when assembling pattern "{pattern}"'
_logger.error(err_msg)
raise KestrelInternalError(err_msg)
if pieces[ref_index - 1] == "=":
pieces[ref_index - 1] = "IN"
pieces[ref_index] = vals_str
else:
raise UnsupportedStixSyntax(
'only "=" is supported before referred variable in parameterized STIX'
)
return " ".join(pieces)
| 33.105 | 87 | 0.622414 | import dateutil.parser
import datetime
import logging
import re
from kestrel.utils import dedup_dicts
from kestrel.semantics import get_entity_table
from kestrel.syntax.paramstix import parse_extended_stix_pattern
from kestrel.exceptions import (
InvalidAttribute,
UnsupportedStixSyntax,
KestrelInternalError,
)
from firepit.exceptions import InvalidAttr
_logger = logging.getLogger(__name__)
def or_patterns(patterns):
bodies = []
time_range = []
for pattern in patterns:
if pattern:
pieces = pattern.split()
if len(pieces) > 4 and pieces[-4] == "START" and pieces[-2] == "STOP":
time_range.append((pieces[-3], pieces[-1]))
bodies.append("(" + " ".join(pieces[:-4]) + ")")
else:
bodies.append(pattern)
if bodies:
if time_range:
start = min([t[0] for t in time_range])
end = max([t[1] for t in time_range])
final_pattern = (
"(" + " OR ".join(bodies) + ")" + " START " + start + " STOP " + end
)
else:
final_pattern = " OR ".join(bodies)
_logger.debug(f"or pattern merged: {final_pattern}")
else:
final_pattern = None
_logger.info(f"all None patterns input into or_patterns()")
return final_pattern
def build_pattern(
raw_pattern_body, time_range, start_offset, end_offset, symtable, store
):
references = parse_extended_stix_pattern(raw_pattern_body)
pattern_body = raw_pattern_body
_logger.debug(f"building pattern for: {raw_pattern_body}")
if references:
_logger.debug(f"references found: {list(references.keys())}")
var_attr_to_vals_str = _dereference_multiple_variables(
store, symtable, references
)
for var_attr, vals_str in var_attr_to_vals_str.items():
pattern_body = _replace_ref_with_op(pattern_body, var_attr, vals_str)
_logger.debug(f'pattern body dereferred: "{pattern_body}"')
if pattern_body and not time_range:
try:
ref_var_time_ranges = [
_get_variable_time_range(store, symtable, var_name)
for var_name in references.keys()
]
start = min([t[0] for t in ref_var_time_ranges])
end = max([t[1] for t in ref_var_time_ranges])
start_adj = start + datetime.timedelta(seconds=start_offset)
end_adj = end + datetime.timedelta(seconds=end_offset)
start_stix = start_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z")
stop_stix = end_adj.strftime("%Y-%m-%dT%H:%M:%S.000Z")
time_range = (start_stix, stop_stix)
_logger.debug(f"pattern time range computed: {time_range}")
except InvalidAttribute:
time_range = None
_logger.warning(
f"pattern time range searching failed on variable {var_name}"
)
if pattern_body:
if time_range:
pattern = (
f"({pattern_body}) START t'{time_range[0]}' STOP t'{time_range[1]}'"
)
else:
pattern = f"{pattern_body}"
_logger.debug(f'final pattern assembled: "{pattern}"')
else:
pattern = None
_logger.info(f"empty pattern assembled")
return pattern
def build_pattern_from_ids(return_type, ids):
if ids:
return "[" + return_type + ":id IN (" + ", ".join(map(_type_value, ids)) + ")]"
else:
return None
def _dereference_multiple_variables(store, symtable, references):
return {
var + "." + attr: "(" + ", ".join(map(_type_value, vals)) + ")"
for var, attrs in references.items()
for attr, vals in _dereference_variable(store, symtable, var, attrs).items()
}
def _dereference_variable(store, symtable, var_name, attributes):
attr_line = ",".join(attributes)
_logger.debug(f'deref "{var_name}" with attributes "{attr_line}"')
var_entity_table = get_entity_table(var_name, symtable)
try:
store_return = store.lookup(var_entity_table, attr_line)
except InvalidAttr as e:
_logger.warning(f"cannot deref {attr_line}. Invalid attribute in firepit.")
raise InvalidAttribute(e.message)
attr_to_values = {k: [] for k in attributes}
for row in store_return:
for k, v in row.items():
if v and v not in attr_to_values[k]:
attr_to_values[k].append(v)
for k, v in attr_to_values.items():
if not v:
raise InvalidAttribute(var_name + "." + k)
_logger.debug(f"deref results: {str(attr_to_values)}")
return attr_to_values
def _get_variable_time_range(store, symtable, var_name):
time_attr_line = ",".join(["first_observed", "last_observed"])
var_entity_table = get_entity_table(var_name, symtable)
try:
store_return = store.lookup(var_entity_table, time_attr_line)
except InvalidAttr as e:
raise InvalidAttribute(e.message)
life_span = dedup_dicts(store_return)
start = min([dateutil.parser.isoparse(e["first_observed"]) for e in life_span])
end = max([dateutil.parser.isoparse(e["last_observed"]) for e in life_span])
return start, end
def _type_value(value):
if isinstance(value, str):
return f"'{value}'"
elif isinstance(value, int):
return str(value)
elif isinstance(value, float):
return str(round(value))
else:
return str(value)
def _replace_ref_with_op(pattern, var_attr, vals_str):
pattern = re.sub(r"([=><\[\]])", r" \1 ", pattern)
pieces = pattern.split()
try:
ref_index = pieces.index(var_attr)
except ValueError:
err_msg = f'cannot find "{var_attr}" when assembling pattern "{pattern}"'
_logger.error(err_msg)
raise KestrelInternalError(err_msg)
if pieces[ref_index - 1] == "=":
pieces[ref_index - 1] = "IN"
pieces[ref_index] = vals_str
else:
raise UnsupportedStixSyntax(
'only "=" is supported before referred variable in parameterized STIX'
)
return " ".join(pieces)
| true | true |
f738ddd9c66d2ab195c75e9d45d2b52105e9e45c | 10,263 | py | Python | systolic/test/SystolicCL_test.py | pnnl/open-cgra | c19bc3a1baca3da659dc8f3cbfe32dd6003a2c65 | [
"BSD-3-Clause"
] | 13 | 2020-09-01T16:55:42.000Z | 2021-09-01T16:39:44.000Z | systolic/test/SystolicCL_test.py | pnnl/open-cgra | c19bc3a1baca3da659dc8f3cbfe32dd6003a2c65 | [
"BSD-3-Clause"
] | 5 | 2020-08-19T05:40:01.000Z | 2021-07-15T22:05:03.000Z | systolic/test/SystolicCL_test.py | pnnl/open-cgra | c19bc3a1baca3da659dc8f3cbfe32dd6003a2c65 | [
"BSD-3-Clause"
] | 4 | 2020-09-01T16:44:02.000Z | 2021-07-11T04:40:05.000Z | """
==========================================================================
SystolicCL_test.py
==========================================================================
Test cases for Systolic Array with CL data/config memory.
Author : Cheng Tan
Date : Dec 28, 2019
"""
from pymtl3 import *
from pymtl3.stdlib.test import TestSinkCL
from pymtl3.stdlib.test.test_srcs import TestSrcRTL
from ...lib.opt_type import *
from ...lib.messages import *
from ...lib.ctrl_helper import *
from ...fu.flexible.FlexibleFuRTL import FlexibleFuRTL
from ...fu.single.AdderRTL import AdderRTL
from ...fu.single.MemUnitRTL import MemUnitRTL
from ...fu.double.SeqMulAdderRTL import SeqMulAdderRTL
from ..SystolicCL import SystolicCL
import os
#-------------------------------------------------------------------------
# Test harness
#-------------------------------------------------------------------------
class TestHarness( Component ):
def construct( s, DUT, FunctionUnit, FuList, DataType, PredicateType,
CtrlType, width, height, ctrl_mem_size, data_mem_size,
src_opt, preload_data, preload_const, sink_out ):
s.num_tiles = width * height
AddrType = mk_bits( clog2( ctrl_mem_size ) )
s.sink_out = [ TestSinkCL( DataType, sink_out[i] )
for i in range( height-1 ) ]
s.dut = DUT( FunctionUnit, FuList, DataType, PredicateType, CtrlType,
width, height, ctrl_mem_size, data_mem_size, ctrl_mem_size,
src_opt, preload_data, preload_const )
for i in range( height-1 ):
connect( s.dut.send_data[i], s.sink_out[i].recv )
def line_trace( s ):
return s.dut.line_trace()
def run_sim( test_harness, max_cycles=6 ):
test_harness.elaborate()
test_harness.apply( SimulationPass() )
test_harness.sim_reset()
# Run simulation
ncycles = 0
print()
print( "{}:{}".format( ncycles, test_harness.line_trace() ))
while ncycles < max_cycles:
test_harness.tick()
ncycles += 1
print( "----------------------------------------------------" )
print( "{}:{}".format( ncycles, test_harness.line_trace() ))
# Check timeout
# assert ncycles < max_cycles
test_harness.tick()
test_harness.tick()
test_harness.tick()
# ------------------------------------------------------------------
# To emulate systolic array
# left bottom is 0, 0
# right top is 1, 1
# 1: North, 2: South, 3: West, 4: East
# 5 - 8: registers
# ------------------------------------------------------------------
def test_systolic_2x2():
num_tile_inports = 4
num_tile_outports = 4
num_xbar_inports = 6
num_xbar_outports = 8
ctrl_mem_size = 8
width = 2
height = 3
RouteType = mk_bits( clog2( num_xbar_inports + 1 ) )
AddrType = mk_bits( clog2( ctrl_mem_size ) )
num_tiles = width * height
ctrl_mem_size = 8
data_mem_size = 2
# number of inputs of FU is fixed inside the tile
num_fu_in = 4
DUT = SystolicCL
FunctionUnit = FlexibleFuRTL
FuList = [AdderRTL, MemUnitRTL, SeqMulAdderRTL]
DataType = mk_data( 16, 1 )
PredicateType = mk_predicate( 1, 1 )
CtrlType = mk_ctrl( num_fu_in, num_xbar_inports, num_xbar_outports )
FuInType = mk_bits( clog2( num_fu_in + 1 ) )
pickRegister = [ FuInType( x+1 ) for x in range( num_fu_in ) ]
src_opt = [[CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
]
]
preload_mem = [DataType(1, 1), DataType(2, 1), DataType(3, 1), DataType(4, 1)]
preload_const = [[DataType(0, 1), DataType(1, 1)],
[DataType(0, 0), DataType(2, 1), DataType(3, 1)], # offset address used for loading
[DataType(2, 1)], [DataType(4, 1)], # preloaded data
[DataType(6, 1)], [DataType(8, 1)]] # preloaded data
"""
1 3 2 6 14 20
x =
2 4 4 8 30 44
"""
sink_out = [[DataType(14, 1), DataType(20, 1)], [DataType(30, 1), DataType(44, 1)]]
th = TestHarness( DUT, FunctionUnit, FuList, DataType, PredicateType,
CtrlType, width, height, ctrl_mem_size, len(preload_mem),
src_opt, preload_mem, preload_const, sink_out )
run_sim( th )
| 48.871429 | 102 | 0.518952 |
from pymtl3 import *
from pymtl3.stdlib.test import TestSinkCL
from pymtl3.stdlib.test.test_srcs import TestSrcRTL
from ...lib.opt_type import *
from ...lib.messages import *
from ...lib.ctrl_helper import *
from ...fu.flexible.FlexibleFuRTL import FlexibleFuRTL
from ...fu.single.AdderRTL import AdderRTL
from ...fu.single.MemUnitRTL import MemUnitRTL
from ...fu.double.SeqMulAdderRTL import SeqMulAdderRTL
from ..SystolicCL import SystolicCL
import os
class TestHarness( Component ):
def construct( s, DUT, FunctionUnit, FuList, DataType, PredicateType,
CtrlType, width, height, ctrl_mem_size, data_mem_size,
src_opt, preload_data, preload_const, sink_out ):
s.num_tiles = width * height
AddrType = mk_bits( clog2( ctrl_mem_size ) )
s.sink_out = [ TestSinkCL( DataType, sink_out[i] )
for i in range( height-1 ) ]
s.dut = DUT( FunctionUnit, FuList, DataType, PredicateType, CtrlType,
width, height, ctrl_mem_size, data_mem_size, ctrl_mem_size,
src_opt, preload_data, preload_const )
for i in range( height-1 ):
connect( s.dut.send_data[i], s.sink_out[i].recv )
def line_trace( s ):
return s.dut.line_trace()
def run_sim( test_harness, max_cycles=6 ):
test_harness.elaborate()
test_harness.apply( SimulationPass() )
test_harness.sim_reset()
ncycles = 0
print()
print( "{}:{}".format( ncycles, test_harness.line_trace() ))
while ncycles < max_cycles:
test_harness.tick()
ncycles += 1
print( "----------------------------------------------------" )
print( "{}:{}".format( ncycles, test_harness.line_trace() ))
test_harness.tick()
test_harness.tick()
test_harness.tick()
def test_systolic_2x2():
num_tile_inports = 4
num_tile_outports = 4
num_xbar_inports = 6
num_xbar_outports = 8
ctrl_mem_size = 8
width = 2
height = 3
RouteType = mk_bits( clog2( num_xbar_inports + 1 ) )
AddrType = mk_bits( clog2( ctrl_mem_size ) )
num_tiles = width * height
ctrl_mem_size = 8
data_mem_size = 2
num_fu_in = 4
DUT = SystolicCL
FunctionUnit = FlexibleFuRTL
FuList = [AdderRTL, MemUnitRTL, SeqMulAdderRTL]
DataType = mk_data( 16, 1 )
PredicateType = mk_predicate( 1, 1 )
CtrlType = mk_ctrl( num_fu_in, num_xbar_inports, num_xbar_outports )
FuInType = mk_bits( clog2( num_fu_in + 1 ) )
pickRegister = [ FuInType( x+1 ) for x in range( num_fu_in ) ]
src_opt = [[CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_LD_CONST, b1( 0 ), pickRegister, [
RouteType(5), RouteType(0), RouteType(0), RouteType(0),
RouteType(0), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(2), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
CtrlType( OPT_MUL_CONST, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(0), RouteType(0)] ),
],
[CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_NAH, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(0),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
CtrlType( OPT_MUL_CONST_ADD, b1( 0 ), pickRegister, [
RouteType(0), RouteType(0), RouteType(0), RouteType(5),
RouteType(2), RouteType(0), RouteType(3), RouteType(0)] ),
]
]
preload_mem = [DataType(1, 1), DataType(2, 1), DataType(3, 1), DataType(4, 1)]
preload_const = [[DataType(0, 1), DataType(1, 1)],
[DataType(0, 0), DataType(2, 1), DataType(3, 1)],
[DataType(2, 1)], [DataType(4, 1)],
[DataType(6, 1)], [DataType(8, 1)]]
sink_out = [[DataType(14, 1), DataType(20, 1)], [DataType(30, 1), DataType(44, 1)]]
th = TestHarness( DUT, FunctionUnit, FuList, DataType, PredicateType,
CtrlType, width, height, ctrl_mem_size, len(preload_mem),
src_opt, preload_mem, preload_const, sink_out )
run_sim( th )
| true | true |
f738df852121a8d04e083354e6672df79a7edc21 | 1,216 | py | Python | Attic/migrations/0002_auto_20210508_1848.py | RemeoLong/Attic | 8d9458b86a924b639001fe0bee052ba5be66dbce | [
"MIT"
] | null | null | null | Attic/migrations/0002_auto_20210508_1848.py | RemeoLong/Attic | 8d9458b86a924b639001fe0bee052ba5be66dbce | [
"MIT"
] | null | null | null | Attic/migrations/0002_auto_20210508_1848.py | RemeoLong/Attic | 8d9458b86a924b639001fe0bee052ba5be66dbce | [
"MIT"
] | null | null | null | # Generated by Django 2.2.16 on 2021-05-08 23:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Attic', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='consultation',
name='service',
field=models.CharField(choices=[('Pest: Mice', 'Pest: Mice'), ('Pest: Rats', 'Pest: Rats'), ('Pest: Racoons', 'Pest: Racoons'), ('Pest: Squirrels', 'Pest: Squirrels'), ('Pest: Skunks', 'Pest: Skunks'), ('Pest: Opossums', 'Pest: Opossums'), ('Pest: Snakes', 'Pest: Snakes'), ('Pest: Bats', 'Pest: Bats'), ('Pest: Birds', 'Pest: Birds'), ('Pest: Insects', 'Pest: Insects'), ('Insulation Install', 'Insulation Install'), ('Roofing Repair', 'Roofing Repair'), ('Construction', 'Construction'), ('Sheet Rock', 'Sheet Rock'), ('Cement Small Jobs', 'Cement Small Jobs')], max_length=100),
),
migrations.AlterField(
model_name='consultation',
name='status',
field=models.CharField(choices=[('New', 'New'), ('Working', 'Working'), ('Pending Customer', 'Pending Customer'), ('Customer', 'Customer')], default='New', max_length=20),
),
]
| 50.666667 | 593 | 0.602796 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Attic', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='consultation',
name='service',
field=models.CharField(choices=[('Pest: Mice', 'Pest: Mice'), ('Pest: Rats', 'Pest: Rats'), ('Pest: Racoons', 'Pest: Racoons'), ('Pest: Squirrels', 'Pest: Squirrels'), ('Pest: Skunks', 'Pest: Skunks'), ('Pest: Opossums', 'Pest: Opossums'), ('Pest: Snakes', 'Pest: Snakes'), ('Pest: Bats', 'Pest: Bats'), ('Pest: Birds', 'Pest: Birds'), ('Pest: Insects', 'Pest: Insects'), ('Insulation Install', 'Insulation Install'), ('Roofing Repair', 'Roofing Repair'), ('Construction', 'Construction'), ('Sheet Rock', 'Sheet Rock'), ('Cement Small Jobs', 'Cement Small Jobs')], max_length=100),
),
migrations.AlterField(
model_name='consultation',
name='status',
field=models.CharField(choices=[('New', 'New'), ('Working', 'Working'), ('Pending Customer', 'Pending Customer'), ('Customer', 'Customer')], default='New', max_length=20),
),
]
| true | true |
f738e07889769bc26b37aa793f9905064e6a4642 | 6,970 | py | Python | databricks_cli/pipelines/api.py | itaiw/databricks-cli | fdcb92499da5cda90c4436d5f09cdc697a8f46b9 | [
"Apache-2.0"
] | null | null | null | databricks_cli/pipelines/api.py | itaiw/databricks-cli | fdcb92499da5cda90c4436d5f09cdc697a8f46b9 | [
"Apache-2.0"
] | null | null | null | databricks_cli/pipelines/api.py | itaiw/databricks-cli | fdcb92499da5cda90c4436d5f09cdc697a8f46b9 | [
"Apache-2.0"
] | null | null | null | # Databricks CLI
# Copyright 2017 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"), except
# that the use of services to which certain application programming
# interfaces (each, an "API") connect requires that the user first obtain
# a license for the use of the APIs from Databricks, Inc. ("Databricks"),
# by creating an account at www.databricks.com and agreeing to either (a)
# the Community Edition Terms of Service, (b) the Databricks Terms of
# Service, or (c) another written agreement between Licensee and Databricks
# for the use of the APIs.
#
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from hashlib import sha1
import os
from six.moves import urllib
from databricks_cli.sdk import DeltaPipelinesService
from databricks_cli.dbfs.api import DbfsApi
from databricks_cli.dbfs.dbfs_path import DbfsPath
BUFFER_SIZE = 1024 * 64
base_pipelines_dir = 'dbfs:/pipelines/code'
supported_lib_types = {'jar', 'whl', 'maven'}
class PipelinesApi(object):
def __init__(self, api_client):
self.client = DeltaPipelinesService(api_client)
self.dbfs_client = DbfsApi(api_client)
def deploy(self, spec, headers=None):
lib_objects = LibraryObject.from_json(spec.get('libraries', []))
local_lib_objects, external_lib_objects = \
self._identify_local_libraries(lib_objects)
spec['libraries'] = LibraryObject.to_json(external_lib_objects +
self._upload_local_libraries(local_lib_objects))
pipeline_id = spec['id']
self.client.client.perform_query('PUT', '/pipelines/{}'.format(pipeline_id), data=spec,
headers=headers)
def delete(self, pipeline_id, headers=None):
self.client.delete(pipeline_id, headers)
def get(self, pipeline_id, headers=None):
return self.client.get(pipeline_id, headers)
def reset(self, pipeline_id, headers=None):
self.client.reset(pipeline_id, headers)
@staticmethod
def _identify_local_libraries(lib_objects):
"""
Partitions the given set of libraries into local and those already present in dbfs/s3 etc.
Local libraries are (currently) jar files with a file scheme or no scheme at all.
All other libraries should be present in a supported external source.
:param lib_objects: List[LibraryObject]
:return: List[List[LibraryObject], List[LibraryObject]] ([Local, External])
"""
local_lib_objects, external_lib_objects = [], []
for lib_object in lib_objects:
if lib_object.lib_type == 'maven':
external_lib_objects.append(lib_object)
continue
parsed_uri = urllib.parse.urlparse(lib_object.path)
if lib_object.lib_type in supported_lib_types and parsed_uri.scheme == '':
local_lib_objects.append(lib_object)
elif lib_object.lib_type in supported_lib_types and parsed_uri.scheme.lower() == 'file':
# exactly 1 or 3
if parsed_uri.path.startswith('//') or parsed_uri.netloc != '':
raise RuntimeError('invalid file uri scheme, '
'did you mean to use file:/ or file:///')
local_lib_objects.append(LibraryObject(lib_object.lib_type, parsed_uri.path))
else:
external_lib_objects.append(lib_object)
return local_lib_objects, external_lib_objects
def _upload_local_libraries(self, local_lib_objects):
remote_lib_objects = [LibraryObject(llo.lib_type, self._get_hashed_path(llo.path))
for llo in local_lib_objects]
transformed_remote_lib_objects = [LibraryObject(rlo.lib_type, DbfsPath(rlo.path))
for rlo in remote_lib_objects]
upload_files = [llo_tuple for llo_tuple in
zip(local_lib_objects, transformed_remote_lib_objects)
if not self.dbfs_client.file_exists(llo_tuple[1].path)]
for llo, rlo in upload_files:
self.dbfs_client.put_file(llo.path, rlo.path, False)
return remote_lib_objects
@staticmethod
def _get_hashed_path(path):
"""
Finds the corresponding dbfs file path for the file located at the supplied path by
calculating its hash using SHA1.
:param path: Local File Path
:return: Remote Path (pipeline_base_dir + file_hash (dot) file_extension)
"""
hash_buffer = sha1()
with open(path, 'rb') as f:
while True:
data = f.read(BUFFER_SIZE)
if not data:
break
hash_buffer.update(data)
file_hash = hash_buffer.hexdigest()
# splitext includes the period in the extension
extension = os.path.splitext(path)[1][1:]
if extension == 'whl':
# Wheels need to follow the format described in the PEP, so we simply
# pre-pend the content hash to the wheel_name
# basename in Python returns the extension as well
wheel_name = os.path.basename(path)
path = '{}/{}/{}'.format(base_pipelines_dir, file_hash, wheel_name)
else:
path = '{}/{}.{}'.format(base_pipelines_dir, file_hash, extension)
return path
class LibraryObject(object):
def __init__(self, lib_type, lib_path):
self.path = lib_path
self.lib_type = lib_type
@classmethod
def from_json(cls, libraries):
"""
Serialize Libraries into LibraryObjects
:param libraries: List[Dictionary{String, String}]
:return: List[LibraryObject]
"""
lib_objects = []
for library in libraries:
for lib_type, path in library.items():
lib_objects.append(LibraryObject(lib_type, path))
return lib_objects
@classmethod
def to_json(cls, lib_objects):
"""
Deserialize LibraryObjects
:param lib_objects: List[LibraryObject]
:return: List[Dictionary{String, String}]
"""
libraries = []
for lib_object in lib_objects:
libraries.append({lib_object.lib_type: lib_object.path})
return libraries
def __eq__(self, other):
if not isinstance(other, LibraryObject):
return NotImplemented
return self.path == other.path and self.lib_type == other.lib_type
| 41 | 100 | 0.649785 |
from hashlib import sha1
import os
from six.moves import urllib
from databricks_cli.sdk import DeltaPipelinesService
from databricks_cli.dbfs.api import DbfsApi
from databricks_cli.dbfs.dbfs_path import DbfsPath
BUFFER_SIZE = 1024 * 64
base_pipelines_dir = 'dbfs:/pipelines/code'
supported_lib_types = {'jar', 'whl', 'maven'}
class PipelinesApi(object):
def __init__(self, api_client):
self.client = DeltaPipelinesService(api_client)
self.dbfs_client = DbfsApi(api_client)
def deploy(self, spec, headers=None):
lib_objects = LibraryObject.from_json(spec.get('libraries', []))
local_lib_objects, external_lib_objects = \
self._identify_local_libraries(lib_objects)
spec['libraries'] = LibraryObject.to_json(external_lib_objects +
self._upload_local_libraries(local_lib_objects))
pipeline_id = spec['id']
self.client.client.perform_query('PUT', '/pipelines/{}'.format(pipeline_id), data=spec,
headers=headers)
def delete(self, pipeline_id, headers=None):
self.client.delete(pipeline_id, headers)
def get(self, pipeline_id, headers=None):
return self.client.get(pipeline_id, headers)
def reset(self, pipeline_id, headers=None):
self.client.reset(pipeline_id, headers)
@staticmethod
def _identify_local_libraries(lib_objects):
local_lib_objects, external_lib_objects = [], []
for lib_object in lib_objects:
if lib_object.lib_type == 'maven':
external_lib_objects.append(lib_object)
continue
parsed_uri = urllib.parse.urlparse(lib_object.path)
if lib_object.lib_type in supported_lib_types and parsed_uri.scheme == '':
local_lib_objects.append(lib_object)
elif lib_object.lib_type in supported_lib_types and parsed_uri.scheme.lower() == 'file':
if parsed_uri.path.startswith('//') or parsed_uri.netloc != '':
raise RuntimeError('invalid file uri scheme, '
'did you mean to use file:/ or file:///')
local_lib_objects.append(LibraryObject(lib_object.lib_type, parsed_uri.path))
else:
external_lib_objects.append(lib_object)
return local_lib_objects, external_lib_objects
def _upload_local_libraries(self, local_lib_objects):
remote_lib_objects = [LibraryObject(llo.lib_type, self._get_hashed_path(llo.path))
for llo in local_lib_objects]
transformed_remote_lib_objects = [LibraryObject(rlo.lib_type, DbfsPath(rlo.path))
for rlo in remote_lib_objects]
upload_files = [llo_tuple for llo_tuple in
zip(local_lib_objects, transformed_remote_lib_objects)
if not self.dbfs_client.file_exists(llo_tuple[1].path)]
for llo, rlo in upload_files:
self.dbfs_client.put_file(llo.path, rlo.path, False)
return remote_lib_objects
@staticmethod
def _get_hashed_path(path):
hash_buffer = sha1()
with open(path, 'rb') as f:
while True:
data = f.read(BUFFER_SIZE)
if not data:
break
hash_buffer.update(data)
file_hash = hash_buffer.hexdigest()
extension = os.path.splitext(path)[1][1:]
if extension == 'whl':
wheel_name = os.path.basename(path)
path = '{}/{}/{}'.format(base_pipelines_dir, file_hash, wheel_name)
else:
path = '{}/{}.{}'.format(base_pipelines_dir, file_hash, extension)
return path
class LibraryObject(object):
def __init__(self, lib_type, lib_path):
self.path = lib_path
self.lib_type = lib_type
@classmethod
def from_json(cls, libraries):
lib_objects = []
for library in libraries:
for lib_type, path in library.items():
lib_objects.append(LibraryObject(lib_type, path))
return lib_objects
@classmethod
def to_json(cls, lib_objects):
libraries = []
for lib_object in lib_objects:
libraries.append({lib_object.lib_type: lib_object.path})
return libraries
def __eq__(self, other):
if not isinstance(other, LibraryObject):
return NotImplemented
return self.path == other.path and self.lib_type == other.lib_type
| true | true |
f738e07fab3be3ad49fc0d2c35aa572ce82c17c9 | 3,616 | py | Python | amical/_cli/commands/clean.py | tomasstolker/AMICAL | c9bbf8e4a468313efff3b349fffea7648c411a51 | [
"MIT"
] | null | null | null | amical/_cli/commands/clean.py | tomasstolker/AMICAL | c9bbf8e4a468313efff3b349fffea7648c411a51 | [
"MIT"
] | null | null | null | amical/_cli/commands/clean.py | tomasstolker/AMICAL | c9bbf8e4a468313efff3b349fffea7648c411a51 | [
"MIT"
] | null | null | null | import os
from datetime import datetime
from glob import glob
from pathlib import Path
from astropy.io import fits
from matplotlib import pyplot as plt
from tabulate import tabulate
from termcolor import cprint
from tqdm import tqdm
import amical
def _select_data_file(args, process):
"""Show report with the data found and allow to select one to be treated."""
l_file = sorted(glob("%s/*.fits" % args.datadir))
if len(l_file) == 0:
print("No fits files found in %s, check --datadir." % args.datadir)
return 1
headers = ["FILENAME", "TARGET", "DATE", "INSTRUM", "INDEX"]
index_file = []
d = []
for i, f in enumerate(l_file):
with fits.open(f) as hdu:
hdr = hdu[0].header
target = hdr.get("OBJECT", None)
date = hdr.get("DATE-OBS", None)
ins = hdr.get("INSTRUME", None)
index_file.append(i)
filename = f.split("/")[-1]
d.append([filename, target, date, ins, i])
print(tabulate(d, headers=headers))
if args.file >= 0:
choosen_index = args.file
else:
choosen_index = int(input("\nWhich file to %s?\n" % process))
try:
filename = l_file[choosen_index]
except IndexError:
print(
"Selected index (%i) not valid (only %i files found)."
% (choosen_index, len(l_file))
)
raise SystemExit
else:
with fits.open(filename) as hdul:
hdr = hdul[0].header
return filename, hdr
def perform_clean(args):
"""Clean the data with AMICAL."""
cprint("---- AMICAL clean process ----", "cyan")
clean_param = {
"isz": args.isz,
"r1": args.r1,
"dr": args.dr,
"apod": args.apod,
"window": args.window,
"f_kernel": args.kernel,
}
if not os.path.exists(args.datadir):
print(
"%s directory not found, check --datadir. AMICAL look for data only in this specified directory."
% args.datadir
)
return 1
l_file = sorted(glob("%s/*.fits" % args.datadir))
if len(l_file) == 0:
print("No fits files found in %s, check --datadir." % args.datadir)
return 1
if not args.all:
filename, hdr = _select_data_file(args, process="clean")
if args.check:
amical.show_clean_params(filename, **clean_param)
plt.show(block=True)
return 0
if not os.path.exists(args.outdir):
os.mkdir(args.outdir)
clean_param["clip"] = args.clip
clean_param["sky"] = args.sky
if args.all:
# Clean all files in --datadir
for f in tqdm(l_file, ncols=100, desc="# files"):
hdr = fits.open(f)[0].header
hdr["HIERARCH AMICAL step"] = "CLEANED"
cube = amical.select_clean_data(f, **clean_param, display=True)
f_clean = os.path.join(args.outdir, Path(f).stem + "_cleaned.fits")
fits.writeto(f_clean, cube, header=hdr, overwrite=True)
else:
# Or clean just the specified file (in --datadir)
hdr["HIERARCH AMICAL step"] = "CLEANED"
now = datetime.now()
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
hdr["HIERARCH AMICAL time"] = dt_string
for k in clean_param:
hdr["HIERARCH AMICAL params %s" % k] = clean_param[k]
cube = amical.select_clean_data(filename, **clean_param, display=True)
if args.plot:
plt.show()
f_clean = os.path.join(args.outdir, Path(filename).stem + "_cleaned.fits")
fits.writeto(f_clean, cube, header=hdr, overwrite=True)
return 0
| 30.386555 | 109 | 0.589878 | import os
from datetime import datetime
from glob import glob
from pathlib import Path
from astropy.io import fits
from matplotlib import pyplot as plt
from tabulate import tabulate
from termcolor import cprint
from tqdm import tqdm
import amical
def _select_data_file(args, process):
l_file = sorted(glob("%s/*.fits" % args.datadir))
if len(l_file) == 0:
print("No fits files found in %s, check --datadir." % args.datadir)
return 1
headers = ["FILENAME", "TARGET", "DATE", "INSTRUM", "INDEX"]
index_file = []
d = []
for i, f in enumerate(l_file):
with fits.open(f) as hdu:
hdr = hdu[0].header
target = hdr.get("OBJECT", None)
date = hdr.get("DATE-OBS", None)
ins = hdr.get("INSTRUME", None)
index_file.append(i)
filename = f.split("/")[-1]
d.append([filename, target, date, ins, i])
print(tabulate(d, headers=headers))
if args.file >= 0:
choosen_index = args.file
else:
choosen_index = int(input("\nWhich file to %s?\n" % process))
try:
filename = l_file[choosen_index]
except IndexError:
print(
"Selected index (%i) not valid (only %i files found)."
% (choosen_index, len(l_file))
)
raise SystemExit
else:
with fits.open(filename) as hdul:
hdr = hdul[0].header
return filename, hdr
def perform_clean(args):
cprint("---- AMICAL clean process ----", "cyan")
clean_param = {
"isz": args.isz,
"r1": args.r1,
"dr": args.dr,
"apod": args.apod,
"window": args.window,
"f_kernel": args.kernel,
}
if not os.path.exists(args.datadir):
print(
"%s directory not found, check --datadir. AMICAL look for data only in this specified directory."
% args.datadir
)
return 1
l_file = sorted(glob("%s/*.fits" % args.datadir))
if len(l_file) == 0:
print("No fits files found in %s, check --datadir." % args.datadir)
return 1
if not args.all:
filename, hdr = _select_data_file(args, process="clean")
if args.check:
amical.show_clean_params(filename, **clean_param)
plt.show(block=True)
return 0
if not os.path.exists(args.outdir):
os.mkdir(args.outdir)
clean_param["clip"] = args.clip
clean_param["sky"] = args.sky
if args.all:
for f in tqdm(l_file, ncols=100, desc="# files"):
hdr = fits.open(f)[0].header
hdr["HIERARCH AMICAL step"] = "CLEANED"
cube = amical.select_clean_data(f, **clean_param, display=True)
f_clean = os.path.join(args.outdir, Path(f).stem + "_cleaned.fits")
fits.writeto(f_clean, cube, header=hdr, overwrite=True)
else:
hdr["HIERARCH AMICAL step"] = "CLEANED"
now = datetime.now()
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
hdr["HIERARCH AMICAL time"] = dt_string
for k in clean_param:
hdr["HIERARCH AMICAL params %s" % k] = clean_param[k]
cube = amical.select_clean_data(filename, **clean_param, display=True)
if args.plot:
plt.show()
f_clean = os.path.join(args.outdir, Path(filename).stem + "_cleaned.fits")
fits.writeto(f_clean, cube, header=hdr, overwrite=True)
return 0
| true | true |
f738e084271100fae4934591514291316a9bafdd | 1,500 | py | Python | ui/mext.py | szymonkaliski/nott | fa85e64b570f71733ea199dddbd0bc0f013a613b | [
"MIT"
] | 25 | 2019-07-01T14:58:48.000Z | 2021-11-13T17:00:44.000Z | ui/mext.py | szymonkaliski/nott | fa85e64b570f71733ea199dddbd0bc0f013a613b | [
"MIT"
] | 6 | 2019-12-30T02:50:19.000Z | 2021-05-10T16:41:47.000Z | ui/mext.py | szymonkaliski/nott | fa85e64b570f71733ea199dddbd0bc0f013a613b | [
"MIT"
] | 2 | 2020-01-05T13:02:07.000Z | 2020-05-21T15:54:57.000Z | # FIXME: fix all "happy paths coding" issues
import liblo
from threading import Thread
class Mext(object):
device = None
def __init__(self, device_port=5000):
self.device_receiver = liblo.ServerThread(device_port)
self.device_receiver.add_method("/monome/grid/key", "iii", self.on_grid_key)
self.device_receiver.add_method(
"/serialosc/device", "ssi", self.on_serialosc_device
)
self.device_receiver.start()
liblo.send(liblo.Address(12002), "/serialosc/list", "127.0.0.1", device_port)
def set_grid_key_callback(self, fn):
self.grid_key_callback = fn
def set_led_level(self, x, y, value):
Thread(
target=(
lambda: liblo.send(
self.device, "/monome/grid/led/level/set", x, y, value
)
)
).start()
def set_led_map(self, offset_x, offset_y, values):
Thread(
target=(
lambda: liblo.send(
self.device,
"/monome/grid/led/level/map",
offset_x,
offset_y,
*values
)
)
).start()
def on_grid_key(self, path, args):
x, y, edge = args
if self.grid_key_callback:
self.grid_key_callback(x, y, edge)
def on_serialosc_device(self, path, args):
_, sysId, port = args
self.device = liblo.Address(port)
| 26.315789 | 85 | 0.544 |
import liblo
from threading import Thread
class Mext(object):
device = None
def __init__(self, device_port=5000):
self.device_receiver = liblo.ServerThread(device_port)
self.device_receiver.add_method("/monome/grid/key", "iii", self.on_grid_key)
self.device_receiver.add_method(
"/serialosc/device", "ssi", self.on_serialosc_device
)
self.device_receiver.start()
liblo.send(liblo.Address(12002), "/serialosc/list", "127.0.0.1", device_port)
def set_grid_key_callback(self, fn):
self.grid_key_callback = fn
def set_led_level(self, x, y, value):
Thread(
target=(
lambda: liblo.send(
self.device, "/monome/grid/led/level/set", x, y, value
)
)
).start()
def set_led_map(self, offset_x, offset_y, values):
Thread(
target=(
lambda: liblo.send(
self.device,
"/monome/grid/led/level/map",
offset_x,
offset_y,
*values
)
)
).start()
def on_grid_key(self, path, args):
x, y, edge = args
if self.grid_key_callback:
self.grid_key_callback(x, y, edge)
def on_serialosc_device(self, path, args):
_, sysId, port = args
self.device = liblo.Address(port)
| true | true |
f738e0bea76b5ce7f1a12a4e6443af286201917a | 33,549 | py | Python | src/at/utils/_vendor/tomlkit/items.py | alextremblay/si-utils | 1377ffd9dbefe63ac51efcca638fb583b2c89628 | [
"MIT"
] | 1 | 2021-08-21T03:26:14.000Z | 2021-08-21T03:26:14.000Z | src/at/utils/_vendor/tomlkit/items.py | alextremblay/si-utils | 1377ffd9dbefe63ac51efcca638fb583b2c89628 | [
"MIT"
] | null | null | null | src/at/utils/_vendor/tomlkit/items.py | alextremblay/si-utils | 1377ffd9dbefe63ac51efcca638fb583b2c89628 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
import re
import string
from datetime import date
from datetime import datetime
from datetime import time
from enum import Enum
from typing import Any
from typing import Dict
from typing import Generator
from typing import List
from typing import Optional
from typing import Union
from ._compat import PY2
from ._compat import PY38
from ._compat import MutableMapping
from ._compat import decode
from ._compat import long
from ._compat import unicode
from ._utils import escape_string
if PY2:
from functools32 import lru_cache
else:
from functools import lru_cache
def item(value, _parent=None, _sort_keys=False):
from .container import Container
if isinstance(value, Item):
return value
if isinstance(value, bool):
return Bool(value, Trivia())
elif isinstance(value, int):
return Integer(value, Trivia(), str(value))
elif isinstance(value, float):
return Float(value, Trivia(), str(value))
elif isinstance(value, dict):
val = Table(Container(), Trivia(), False)
for k, v in sorted(
value.items(),
key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1),
):
val[k] = item(v, _parent=val, _sort_keys=_sort_keys)
return val
elif isinstance(value, list):
if value and isinstance(value[0], dict):
a = AoT([])
else:
a = Array([], Trivia())
for v in value:
if isinstance(v, dict):
table = Table(Container(), Trivia(), True)
for k, _v in sorted(
v.items(),
key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1),
):
i = item(_v, _sort_keys=_sort_keys)
if isinstance(table, InlineTable):
i.trivia.trail = ""
table[k] = item(i, _sort_keys=_sort_keys)
v = table
a.append(v)
return a
elif isinstance(value, (str, unicode)):
escaped = escape_string(value)
return String(StringType.SLB, decode(value), escaped, Trivia())
elif isinstance(value, datetime):
return DateTime(
value.year,
value.month,
value.day,
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo,
Trivia(),
value.isoformat().replace("+00:00", "Z"),
)
elif isinstance(value, date):
return Date(value.year, value.month, value.day, Trivia(), value.isoformat())
elif isinstance(value, time):
return Time(
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo,
Trivia(),
value.isoformat(),
)
raise ValueError("Invalid type {}".format(type(value)))
class StringType(Enum):
# Single Line Basic
SLB = '"'
# Multi Line Basic
MLB = '"""'
# Single Line Literal
SLL = "'"
# Multi Line Literal
MLL = "'''"
@property
@lru_cache(maxsize=None)
def unit(self): # type: () -> str
return self.value[0]
@lru_cache(maxsize=None)
def is_basic(self): # type: () -> bool
return self in {StringType.SLB, StringType.MLB}
@lru_cache(maxsize=None)
def is_literal(self): # type: () -> bool
return self in {StringType.SLL, StringType.MLL}
@lru_cache(maxsize=None)
def is_singleline(self): # type: () -> bool
return self in {StringType.SLB, StringType.SLL}
@lru_cache(maxsize=None)
def is_multiline(self): # type: () -> bool
return self in {StringType.MLB, StringType.MLL}
@lru_cache(maxsize=None)
def toggle(self): # type: () -> StringType
return {
StringType.SLB: StringType.MLB,
StringType.MLB: StringType.SLB,
StringType.SLL: StringType.MLL,
StringType.MLL: StringType.SLL,
}[self]
class BoolType(Enum):
TRUE = "true"
FALSE = "false"
@lru_cache(maxsize=None)
def __bool__(self):
return {BoolType.TRUE: True, BoolType.FALSE: False}[self]
if PY2:
__nonzero__ = __bool__ # for PY2
def __iter__(self):
return iter(self.value)
def __len__(self):
return len(self.value)
class Trivia:
"""
Trivia information (aka metadata).
"""
def __init__(
self, indent=None, comment_ws=None, comment=None, trail=None
): # type: (str, str, str, str) -> None
# Whitespace before a value.
self.indent = indent or ""
# Whitespace after a value, but before a comment.
self.comment_ws = comment_ws or ""
# Comment, starting with # character, or empty string if no comment.
self.comment = comment or ""
# Trailing newline.
if trail is None:
trail = "\n"
self.trail = trail
class KeyType(Enum):
"""
The type of a Key.
Keys can be bare (unquoted), or quoted using basic ("), or literal (')
quotes following the same escaping rules as single-line StringType.
"""
Bare = ""
Basic = '"'
Literal = "'"
class Key:
"""
A key value.
"""
def __init__(
self, k, t=None, sep=None, dotted=False, original=None
): # type: (str, Optional[KeyType], Optional[str], bool, Optional[str]) -> None
if t is None:
if any(
[c not in string.ascii_letters + string.digits + "-" + "_" for c in k]
):
t = KeyType.Basic
else:
t = KeyType.Bare
self.t = t
if sep is None:
sep = " = "
self.sep = sep
self.key = k
if original is None:
original = k
self._original = original
self._dotted = dotted
@property
def delimiter(self): # type: () -> str
return self.t.value
def is_dotted(self): # type: () -> bool
return self._dotted
def is_bare(self): # type: () -> bool
return self.t == KeyType.Bare
def as_string(self): # type: () -> str
return "{}{}{}".format(self.delimiter, self._original, self.delimiter)
def __hash__(self): # type: () -> int
return hash(self.key)
def __eq__(self, other): # type: (Key) -> bool
if isinstance(other, Key):
return self.key == other.key
return self.key == other
def __str__(self): # type: () -> str
return self.as_string()
def __repr__(self): # type: () -> str
return "<Key {}>".format(self.as_string())
class Item(object):
"""
An item within a TOML document.
"""
def __init__(self, trivia): # type: (Trivia) -> None
self._trivia = trivia
@property
def trivia(self): # type: () -> Trivia
return self._trivia
@property
def discriminant(self): # type: () -> int
raise NotImplementedError()
def as_string(self): # type: () -> str
raise NotImplementedError()
# Helpers
def comment(self, comment): # type: (str) -> Item
if not comment.strip().startswith("#"):
comment = "# " + comment
self._trivia.comment_ws = " "
self._trivia.comment = comment
return self
def indent(self, indent): # type: (int) -> Item
if self._trivia.indent.startswith("\n"):
self._trivia.indent = "\n" + " " * indent
else:
self._trivia.indent = " " * indent
return self
def is_boolean(self): # type: () -> bool
return isinstance(self, Bool)
def is_table(self): # type: () -> bool
return isinstance(self, Table)
def is_inline_table(self): # type: () -> bool
return isinstance(self, InlineTable)
def is_aot(self): # type: () -> bool
return isinstance(self, AoT)
def _getstate(self, protocol=3):
return (self._trivia,)
def __reduce__(self):
return self.__reduce_ex__(2)
def __reduce_ex__(self, protocol):
return self.__class__, self._getstate(protocol)
class Whitespace(Item):
"""
A whitespace literal.
"""
def __init__(self, s, fixed=False): # type: (str, bool) -> None
self._s = s
self._fixed = fixed
@property
def s(self): # type: () -> str
return self._s
@property
def value(self): # type: () -> str
return self._s
@property
def trivia(self): # type: () -> Trivia
raise RuntimeError("Called trivia on a Whitespace variant.")
@property
def discriminant(self): # type: () -> int
return 0
def is_fixed(self): # type: () -> bool
return self._fixed
def as_string(self): # type: () -> str
return self._s
def __repr__(self): # type: () -> str
return "<{} {}>".format(self.__class__.__name__, repr(self._s))
def _getstate(self, protocol=3):
return self._s, self._fixed
class Comment(Item):
"""
A comment literal.
"""
@property
def discriminant(self): # type: () -> int
return 1
def as_string(self): # type: () -> str
return "{}{}{}".format(
self._trivia.indent, decode(self._trivia.comment), self._trivia.trail
)
def __str__(self): # type: () -> str
return "{}{}".format(self._trivia.indent, decode(self._trivia.comment))
class Integer(long, Item):
"""
An integer literal.
"""
def __new__(cls, value, trivia, raw): # type: (int, Trivia, str) -> Integer
return super(Integer, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (int, Trivia, str) -> None
super(Integer, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-]\d+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 2
@property
def value(self): # type: () -> int
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Integer, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Integer, self).__radd__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def __sub__(self, other):
result = super(Integer, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Integer, self).__rsub__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Integer(result, self._trivia, raw)
def _getstate(self, protocol=3):
return int(self), self._trivia, self._raw
class Float(float, Item):
"""
A float literal.
"""
def __new__(cls, value, trivia, raw): # type: (float, Trivia, str) -> Integer
return super(Float, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (float, Trivia, str) -> None
super(Float, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-].+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 3
@property
def value(self): # type: () -> float
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Float, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Float, self).__radd__(other)
if isinstance(other, Float):
return self._new(result)
return result
def __sub__(self, other):
result = super(Float, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Float, self).__rsub__(other)
if isinstance(other, Float):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Float(result, self._trivia, raw)
def _getstate(self, protocol=3):
return float(self), self._trivia, self._raw
class Bool(Item):
"""
A boolean literal.
"""
def __init__(self, t, trivia): # type: (int, Trivia) -> None
super(Bool, self).__init__(trivia)
self._value = bool(t)
@property
def discriminant(self): # type: () -> int
return 4
@property
def value(self): # type: () -> bool
return self._value
def as_string(self): # type: () -> str
return str(self._value).lower()
def _getstate(self, protocol=3):
return self._value, self._trivia
def __bool__(self):
return self._value
__nonzero__ = __bool__
def __eq__(self, other):
if not isinstance(other, bool):
return NotImplemented
return other == self._value
def __hash__(self):
return hash(self._value)
def __repr__(self):
return repr(self._value)
class DateTime(Item, datetime):
"""
A datetime literal.
"""
def __new__(
cls,
year,
month,
day,
hour,
minute,
second,
microsecond,
tzinfo,
trivia,
raw,
**kwargs
): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str, Any) -> datetime
return datetime.__new__(
cls,
year,
month,
day,
hour,
minute,
second,
microsecond,
tzinfo=tzinfo,
**kwargs
)
def __init__(
self, year, month, day, hour, minute, second, microsecond, tzinfo, trivia, raw
): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None
super(DateTime, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 5
@property
def value(self): # type: () -> datetime
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
if PY38:
result = datetime(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
).__add__(other)
else:
result = super(DateTime, self).__add__(other)
return self._new(result)
def __sub__(self, other):
if PY38:
result = datetime(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
).__sub__(other)
else:
result = super(DateTime, self).__sub__(other)
if isinstance(result, datetime):
result = self._new(result)
return result
def _new(self, result):
raw = result.isoformat()
return DateTime(
result.year,
result.month,
result.day,
result.hour,
result.minute,
result.second,
result.microsecond,
result.tzinfo,
self._trivia,
raw,
)
def _getstate(self, protocol=3):
return (
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
self._trivia,
self._raw,
)
class Date(Item, date):
"""
A date literal.
"""
def __new__(cls, year, month, day, *_): # type: (int, int, int, Any) -> date
return date.__new__(cls, year, month, day)
def __init__(
self, year, month, day, trivia, raw
): # type: (int, int, int, Trivia, str) -> None
super(Date, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 6
@property
def value(self): # type: () -> date
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
if PY38:
result = date(self.year, self.month, self.day).__add__(other)
else:
result = super(Date, self).__add__(other)
return self._new(result)
def __sub__(self, other):
if PY38:
result = date(self.year, self.month, self.day).__sub__(other)
else:
result = super(Date, self).__sub__(other)
if isinstance(result, date):
result = self._new(result)
return result
def _new(self, result):
raw = result.isoformat()
return Date(result.year, result.month, result.day, self._trivia, raw)
def _getstate(self, protocol=3):
return (self.year, self.month, self.day, self._trivia, self._raw)
class Time(Item, time):
"""
A time literal.
"""
def __new__(
cls, hour, minute, second, microsecond, tzinfo, *_
): # type: (int, int, int, int, Optional[datetime.tzinfo], Any) -> time
return time.__new__(cls, hour, minute, second, microsecond, tzinfo)
def __init__(
self, hour, minute, second, microsecond, tzinfo, trivia, raw
): # type: (int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None
super(Time, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 7
@property
def value(self): # type: () -> time
return self
def as_string(self): # type: () -> str
return self._raw
def _getstate(self, protocol=3):
return (
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
self._trivia,
self._raw,
)
class Array(Item, list):
"""
An array literal
"""
def __init__(
self, value, trivia, multiline=False
): # type: (list, Trivia, bool) -> None
super(Array, self).__init__(trivia)
list.__init__(
self, [v.value for v in value if not isinstance(v, (Whitespace, Comment))]
)
self._value = value
self._multiline = multiline
@property
def discriminant(self): # type: () -> int
return 8
@property
def value(self): # type: () -> list
return self
def multiline(self, multiline): # type: (bool) -> self
self._multiline = multiline
return self
def as_string(self): # type: () -> str
if not self._multiline:
return "[{}]".format("".join(v.as_string() for v in self._value))
s = "[\n" + self.trivia.indent + " " * 4
s += (",\n" + self.trivia.indent + " " * 4).join(
v.as_string() for v in self._value if not isinstance(v, Whitespace)
)
s += ",\n"
s += "]"
return s
def append(self, _item): # type: (Any) -> None
if self._value:
self._value.append(Whitespace(", "))
it = item(_item)
super(Array, self).append(it.value)
self._value.append(it)
if not PY2:
def clear(self):
super(Array, self).clear()
self._value.clear()
def __iadd__(self, other): # type: (list) -> Array
if not isinstance(other, list):
return NotImplemented
for v in other:
self.append(v)
return self
def __delitem__(self, key):
super(Array, self).__delitem__(key)
j = 0 if key >= 0 else -1
for i, v in enumerate(self._value if key >= 0 else reversed(self._value)):
if key < 0:
i = -i - 1
if isinstance(v, (Comment, Whitespace)):
continue
if j == key:
del self._value[i]
if i < 0 and abs(i) > len(self._value):
i += 1
if i < len(self._value) - 1 and isinstance(self._value[i], Whitespace):
del self._value[i]
break
j += 1 if key >= 0 else -1
def __str__(self):
return str(
[v.value for v in self._value if not isinstance(v, (Whitespace, Comment))]
)
def __repr__(self):
return str(self)
def _getstate(self, protocol=3):
return self._value, self._trivia
class Table(Item, MutableMapping, dict):
"""
A table literal.
"""
def __init__(
self,
value,
trivia,
is_aot_element,
is_super_table=False,
name=None,
display_name=None,
): # type: (tomlkit.container.Container, Trivia, bool, bool, Optional[str], Optional[str]) -> None
super(Table, self).__init__(trivia)
self.name = name
self.display_name = display_name
self._value = value
self._is_aot_element = is_aot_element
self._is_super_table = is_super_table
for k, v in self._value.body:
if k is not None:
dict.__setitem__(self, k.key, v)
@property
def value(self): # type: () -> tomlkit.container.Container
return self._value
@property
def discriminant(self): # type: () -> int
return 9
def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item
if item is None:
if not isinstance(key, (Comment, Whitespace)):
raise ValueError(
"Non comment/whitespace items must have an associated key"
)
key, item = None, key
return self.append(key, item)
def append(self, key, _item): # type: (Union[Key, str], Any) -> Table
"""
Appends a (key, item) to the table.
"""
if not isinstance(_item, Item):
_item = item(_item)
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return self
indent = m.group(1)
if not isinstance(_item, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent)
if not m:
_item.trivia.indent = indent
else:
_item.trivia.indent = m.group(1) + indent + m.group(2)
return self
def raw_append(self, key, _item): # type: (Union[Key, str], Any) -> Table
if not isinstance(_item, Item):
_item = item(_item)
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
return self
def remove(self, key): # type: (Union[Key, str]) -> Table
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__delitem__(self, key)
return self
def is_aot_element(self): # type: () -> bool
return self._is_aot_element
def is_super_table(self): # type: () -> bool
return self._is_super_table
def as_string(self): # type: () -> str
return self._value.as_string()
# Helpers
def indent(self, indent): # type: (int) -> Table
super(Table, self).indent(indent)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
indent = ""
else:
indent = m.group(1)
for k, item in self._value.body:
if not isinstance(item, Whitespace):
item.trivia.indent = indent + item.trivia.indent
return self
def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any
return self._value.get(key, default)
def setdefault(
self, key, default=None
): # type: (Union[Key, str], Any) -> Union[Item, Container]
super(Table, self).setdefault(key, default=default)
return self[key]
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
fix_indent = key not in self
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
dict.__setitem__(self, key, value)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m or not fix_indent:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __len__(self): # type: () -> int
return len(self._value)
def __iter__(self): # type: () -> Iterator[str]
return iter(self._value)
def __repr__(self): # type: () -> str
return repr(self._value)
def _getstate(self, protocol=3):
return (
self._value,
self._trivia,
self._is_aot_element,
self._is_super_table,
self.name,
self.display_name,
)
class InlineTable(Item, MutableMapping, dict):
"""
An inline table literal.
"""
def __init__(
self, value, trivia, new=False
): # type: (tomlkit.container.Container, Trivia, bool) -> None
super(InlineTable, self).__init__(trivia)
self._value = value
self._new = new
for k, v in self._value.body:
if k is not None:
dict.__setitem__(self, k.key, v)
@property
def discriminant(self): # type: () -> int
return 10
@property
def value(self): # type: () -> Dict
return self._value
def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable
"""
Appends a (key, item) to the table.
"""
if not isinstance(_item, Item):
_item = item(_item)
if not isinstance(_item, (Whitespace, Comment)):
if not _item.trivia.indent and len(self._value) > 0 and not self._new:
_item.trivia.indent = " "
if _item.trivia.comment:
_item.trivia.comment = ""
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
return self
def remove(self, key): # type: (Union[Key, str]) -> InlineTable
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__delitem__(self, key)
return self
def as_string(self): # type: () -> str
buf = "{"
for i, (k, v) in enumerate(self._value.body):
if k is None:
if i == len(self._value.body) - 1:
if self._new:
buf = buf.rstrip(", ")
else:
buf = buf.rstrip(",")
buf += v.as_string()
continue
buf += "{}{}{}{}{}{}".format(
v.trivia.indent,
k.as_string() + ("." if k.is_dotted() else ""),
k.sep,
v.as_string(),
v.trivia.comment,
v.trivia.trail.replace("\n", ""),
)
if i != len(self._value.body) - 1:
buf += ","
if self._new:
buf += " "
buf += "}"
return buf
def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any
return self._value.get(key, default)
def setdefault(
self, key, default=None
): # type: (Union[Key, str], Any) -> Union[Item, Container]
super(InlineTable, self).setdefault(key, default=default)
return self[key]
def __contains__(self, key): # type: (Union[Key, str]) -> bool
return key in self._value
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
dict.__setitem__(self, key, value)
if value.trivia.comment:
value.trivia.comment = ""
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __len__(self): # type: () -> int
return len(self._value)
def __iter__(self): # type: () -> Iterator[str]
return iter(self._value)
def __repr__(self):
return repr(self._value)
def _getstate(self, protocol=3):
return (self._value, self._trivia)
class String(unicode, Item):
"""
A string literal.
"""
def __new__(cls, t, value, original, trivia):
return super(String, cls).__new__(cls, value)
def __init__(
self, t, _, original, trivia
): # type: (StringType, str, original, Trivia) -> None
super(String, self).__init__(trivia)
self._t = t
self._original = original
@property
def discriminant(self): # type: () -> int
return 11
@property
def value(self): # type: () -> str
return self
def as_string(self): # type: () -> str
return "{}{}{}".format(self._t.value, decode(self._original), self._t.value)
def __add__(self, other):
result = super(String, self).__add__(other)
return self._new(result)
def __sub__(self, other):
result = super(String, self).__sub__(other)
return self._new(result)
def _new(self, result):
return String(self._t, result, result, self._trivia)
def _getstate(self, protocol=3):
return self._t, unicode(self), self._original, self._trivia
class AoT(Item, list):
"""
An array of table literal
"""
def __init__(
self, body, name=None, parsed=False
): # type: (List[Table], Optional[str], bool) -> None
self.name = name
self._body = []
self._parsed = parsed
super(AoT, self).__init__(Trivia(trail=""))
for table in body:
self.append(table)
@property
def body(self): # type: () -> List[Table]
return self._body
@property
def discriminant(self): # type: () -> int
return 12
@property
def value(self): # type: () -> List[Dict[Any, Any]]
return [v.value for v in self._body]
def append(self, table): # type: (Table) -> Table
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if m:
indent = m.group(1)
m = re.match("(?s)^([^ ]*)(.*)$", table.trivia.indent)
if not m:
table.trivia.indent = indent
else:
table.trivia.indent = m.group(1) + indent + m.group(2)
if not self._parsed and "\n" not in table.trivia.indent and self._body:
table.trivia.indent = "\n" + table.trivia.indent
self._body.append(table)
super(AoT, self).append(table)
return table
def as_string(self): # type: () -> str
b = ""
for table in self._body:
b += table.as_string()
return b
def __repr__(self): # type: () -> str
return "<AoT {}>".format(self.value)
def _getstate(self, protocol=3):
return self._body, self.name, self._parsed
class Null(Item):
"""
A null item.
"""
def __init__(self): # type: () -> None
pass
@property
def discriminant(self): # type: () -> int
return -1
@property
def value(self): # type: () -> None
return None
def as_string(self): # type: () -> str
return ""
def _getstate(self, protocol=3):
return tuple()
| 25.186937 | 108 | 0.534442 | from __future__ import unicode_literals
import re
import string
from datetime import date
from datetime import datetime
from datetime import time
from enum import Enum
from typing import Any
from typing import Dict
from typing import Generator
from typing import List
from typing import Optional
from typing import Union
from ._compat import PY2
from ._compat import PY38
from ._compat import MutableMapping
from ._compat import decode
from ._compat import long
from ._compat import unicode
from ._utils import escape_string
if PY2:
from functools32 import lru_cache
else:
from functools import lru_cache
def item(value, _parent=None, _sort_keys=False):
from .container import Container
if isinstance(value, Item):
return value
if isinstance(value, bool):
return Bool(value, Trivia())
elif isinstance(value, int):
return Integer(value, Trivia(), str(value))
elif isinstance(value, float):
return Float(value, Trivia(), str(value))
elif isinstance(value, dict):
val = Table(Container(), Trivia(), False)
for k, v in sorted(
value.items(),
key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1),
):
val[k] = item(v, _parent=val, _sort_keys=_sort_keys)
return val
elif isinstance(value, list):
if value and isinstance(value[0], dict):
a = AoT([])
else:
a = Array([], Trivia())
for v in value:
if isinstance(v, dict):
table = Table(Container(), Trivia(), True)
for k, _v in sorted(
v.items(),
key=lambda i: (isinstance(i[1], dict), i[0] if _sort_keys else 1),
):
i = item(_v, _sort_keys=_sort_keys)
if isinstance(table, InlineTable):
i.trivia.trail = ""
table[k] = item(i, _sort_keys=_sort_keys)
v = table
a.append(v)
return a
elif isinstance(value, (str, unicode)):
escaped = escape_string(value)
return String(StringType.SLB, decode(value), escaped, Trivia())
elif isinstance(value, datetime):
return DateTime(
value.year,
value.month,
value.day,
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo,
Trivia(),
value.isoformat().replace("+00:00", "Z"),
)
elif isinstance(value, date):
return Date(value.year, value.month, value.day, Trivia(), value.isoformat())
elif isinstance(value, time):
return Time(
value.hour,
value.minute,
value.second,
value.microsecond,
value.tzinfo,
Trivia(),
value.isoformat(),
)
raise ValueError("Invalid type {}".format(type(value)))
class StringType(Enum):
SLB = '"'
# Multi Line Basic
MLB = '"""'
# Single Line Literal
SLL = "'"
# Multi Line Literal
MLL = "'''"
@property
@lru_cache(maxsize=None)
def unit(self): # type: () -> str
return self.value[0]
@lru_cache(maxsize=None)
def is_basic(self): # type: () -> bool
return self in {StringType.SLB, StringType.MLB}
@lru_cache(maxsize=None)
def is_literal(self): # type: () -> bool
return self in {StringType.SLL, StringType.MLL}
@lru_cache(maxsize=None)
def is_singleline(self): # type: () -> bool
return self in {StringType.SLB, StringType.SLL}
@lru_cache(maxsize=None)
def is_multiline(self): # type: () -> bool
return self in {StringType.MLB, StringType.MLL}
@lru_cache(maxsize=None)
def toggle(self): # type: () -> StringType
return {
StringType.SLB: StringType.MLB,
StringType.MLB: StringType.SLB,
StringType.SLL: StringType.MLL,
StringType.MLL: StringType.SLL,
}[self]
class BoolType(Enum):
TRUE = "true"
FALSE = "false"
@lru_cache(maxsize=None)
def __bool__(self):
return {BoolType.TRUE: True, BoolType.FALSE: False}[self]
if PY2:
__nonzero__ = __bool__ # for PY2
def __iter__(self):
return iter(self.value)
def __len__(self):
return len(self.value)
class Trivia:
def __init__(
self, indent=None, comment_ws=None, comment=None, trail=None
): # type: (str, str, str, str) -> None
# Whitespace before a value.
self.indent = indent or ""
# Whitespace after a value, but before a comment.
self.comment_ws = comment_ws or ""
# Comment, starting with # character, or empty string if no comment.
self.comment = comment or ""
# Trailing newline.
if trail is None:
trail = "\n"
self.trail = trail
class KeyType(Enum):
Bare = ""
Basic = '"'
Literal = "'"
class Key:
def __init__(
self, k, t=None, sep=None, dotted=False, original=None
): # type: (str, Optional[KeyType], Optional[str], bool, Optional[str]) -> None
if t is None:
if any(
[c not in string.ascii_letters + string.digits + "-" + "_" for c in k]
):
t = KeyType.Basic
else:
t = KeyType.Bare
self.t = t
if sep is None:
sep = " = "
self.sep = sep
self.key = k
if original is None:
original = k
self._original = original
self._dotted = dotted
@property
def delimiter(self): # type: () -> str
return self.t.value
def is_dotted(self): # type: () -> bool
return self._dotted
def is_bare(self): # type: () -> bool
return self.t == KeyType.Bare
def as_string(self): # type: () -> str
return "{}{}{}".format(self.delimiter, self._original, self.delimiter)
def __hash__(self): # type: () -> int
return hash(self.key)
def __eq__(self, other): # type: (Key) -> bool
if isinstance(other, Key):
return self.key == other.key
return self.key == other
def __str__(self): # type: () -> str
return self.as_string()
def __repr__(self): # type: () -> str
return "<Key {}>".format(self.as_string())
class Item(object):
def __init__(self, trivia): # type: (Trivia) -> None
self._trivia = trivia
@property
def trivia(self): # type: () -> Trivia
return self._trivia
@property
def discriminant(self): # type: () -> int
raise NotImplementedError()
def as_string(self): # type: () -> str
raise NotImplementedError()
# Helpers
def comment(self, comment): # type: (str) -> Item
if not comment.strip().startswith("#"):
comment = "# " + comment
self._trivia.comment_ws = " "
self._trivia.comment = comment
return self
def indent(self, indent): # type: (int) -> Item
if self._trivia.indent.startswith("\n"):
self._trivia.indent = "\n" + " " * indent
else:
self._trivia.indent = " " * indent
return self
def is_boolean(self): # type: () -> bool
return isinstance(self, Bool)
def is_table(self): # type: () -> bool
return isinstance(self, Table)
def is_inline_table(self): # type: () -> bool
return isinstance(self, InlineTable)
def is_aot(self): # type: () -> bool
return isinstance(self, AoT)
def _getstate(self, protocol=3):
return (self._trivia,)
def __reduce__(self):
return self.__reduce_ex__(2)
def __reduce_ex__(self, protocol):
return self.__class__, self._getstate(protocol)
class Whitespace(Item):
def __init__(self, s, fixed=False): # type: (str, bool) -> None
self._s = s
self._fixed = fixed
@property
def s(self): # type: () -> str
return self._s
@property
def value(self): # type: () -> str
return self._s
@property
def trivia(self): # type: () -> Trivia
raise RuntimeError("Called trivia on a Whitespace variant.")
@property
def discriminant(self): # type: () -> int
return 0
def is_fixed(self): # type: () -> bool
return self._fixed
def as_string(self): # type: () -> str
return self._s
def __repr__(self): # type: () -> str
return "<{} {}>".format(self.__class__.__name__, repr(self._s))
def _getstate(self, protocol=3):
return self._s, self._fixed
class Comment(Item):
@property
def discriminant(self): # type: () -> int
return 1
def as_string(self): # type: () -> str
return "{}{}{}".format(
self._trivia.indent, decode(self._trivia.comment), self._trivia.trail
)
def __str__(self): # type: () -> str
return "{}{}".format(self._trivia.indent, decode(self._trivia.comment))
class Integer(long, Item):
def __new__(cls, value, trivia, raw): # type: (int, Trivia, str) -> Integer
return super(Integer, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (int, Trivia, str) -> None
super(Integer, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-]\d+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 2
@property
def value(self): # type: () -> int
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Integer, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Integer, self).__radd__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def __sub__(self, other):
result = super(Integer, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Integer, self).__rsub__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Integer(result, self._trivia, raw)
def _getstate(self, protocol=3):
return int(self), self._trivia, self._raw
class Float(float, Item):
def __new__(cls, value, trivia, raw): # type: (float, Trivia, str) -> Integer
return super(Float, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (float, Trivia, str) -> None
super(Float, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-].+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 3
@property
def value(self): # type: () -> float
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Float, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Float, self).__radd__(other)
if isinstance(other, Float):
return self._new(result)
return result
def __sub__(self, other):
result = super(Float, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Float, self).__rsub__(other)
if isinstance(other, Float):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Float(result, self._trivia, raw)
def _getstate(self, protocol=3):
return float(self), self._trivia, self._raw
class Bool(Item):
def __init__(self, t, trivia): # type: (int, Trivia) -> None
super(Bool, self).__init__(trivia)
self._value = bool(t)
@property
def discriminant(self): # type: () -> int
return 4
@property
def value(self): # type: () -> bool
return self._value
def as_string(self): # type: () -> str
return str(self._value).lower()
def _getstate(self, protocol=3):
return self._value, self._trivia
def __bool__(self):
return self._value
__nonzero__ = __bool__
def __eq__(self, other):
if not isinstance(other, bool):
return NotImplemented
return other == self._value
def __hash__(self):
return hash(self._value)
def __repr__(self):
return repr(self._value)
class DateTime(Item, datetime):
def __new__(
cls,
year,
month,
day,
hour,
minute,
second,
microsecond,
tzinfo,
trivia,
raw,
**kwargs
): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str, Any) -> datetime
return datetime.__new__(
cls,
year,
month,
day,
hour,
minute,
second,
microsecond,
tzinfo=tzinfo,
**kwargs
)
def __init__(
self, year, month, day, hour, minute, second, microsecond, tzinfo, trivia, raw
): # type: (int, int, int, int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None
super(DateTime, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 5
@property
def value(self): # type: () -> datetime
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
if PY38:
result = datetime(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
).__add__(other)
else:
result = super(DateTime, self).__add__(other)
return self._new(result)
def __sub__(self, other):
if PY38:
result = datetime(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
).__sub__(other)
else:
result = super(DateTime, self).__sub__(other)
if isinstance(result, datetime):
result = self._new(result)
return result
def _new(self, result):
raw = result.isoformat()
return DateTime(
result.year,
result.month,
result.day,
result.hour,
result.minute,
result.second,
result.microsecond,
result.tzinfo,
self._trivia,
raw,
)
def _getstate(self, protocol=3):
return (
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
self._trivia,
self._raw,
)
class Date(Item, date):
def __new__(cls, year, month, day, *_): # type: (int, int, int, Any) -> date
return date.__new__(cls, year, month, day)
def __init__(
self, year, month, day, trivia, raw
): # type: (int, int, int, Trivia, str) -> None
super(Date, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 6
@property
def value(self): # type: () -> date
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
if PY38:
result = date(self.year, self.month, self.day).__add__(other)
else:
result = super(Date, self).__add__(other)
return self._new(result)
def __sub__(self, other):
if PY38:
result = date(self.year, self.month, self.day).__sub__(other)
else:
result = super(Date, self).__sub__(other)
if isinstance(result, date):
result = self._new(result)
return result
def _new(self, result):
raw = result.isoformat()
return Date(result.year, result.month, result.day, self._trivia, raw)
def _getstate(self, protocol=3):
return (self.year, self.month, self.day, self._trivia, self._raw)
class Time(Item, time):
def __new__(
cls, hour, minute, second, microsecond, tzinfo, *_
): # type: (int, int, int, int, Optional[datetime.tzinfo], Any) -> time
return time.__new__(cls, hour, minute, second, microsecond, tzinfo)
def __init__(
self, hour, minute, second, microsecond, tzinfo, trivia, raw
): # type: (int, int, int, int, Optional[datetime.tzinfo], Trivia, str) -> None
super(Time, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 7
@property
def value(self): # type: () -> time
return self
def as_string(self): # type: () -> str
return self._raw
def _getstate(self, protocol=3):
return (
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
self._trivia,
self._raw,
)
class Array(Item, list):
def __init__(
self, value, trivia, multiline=False
): # type: (list, Trivia, bool) -> None
super(Array, self).__init__(trivia)
list.__init__(
self, [v.value for v in value if not isinstance(v, (Whitespace, Comment))]
)
self._value = value
self._multiline = multiline
@property
def discriminant(self): # type: () -> int
return 8
@property
def value(self): # type: () -> list
return self
def multiline(self, multiline): # type: (bool) -> self
self._multiline = multiline
return self
def as_string(self): # type: () -> str
if not self._multiline:
return "[{}]".format("".join(v.as_string() for v in self._value))
s = "[\n" + self.trivia.indent + " " * 4
s += (",\n" + self.trivia.indent + " " * 4).join(
v.as_string() for v in self._value if not isinstance(v, Whitespace)
)
s += ",\n"
s += "]"
return s
def append(self, _item): # type: (Any) -> None
if self._value:
self._value.append(Whitespace(", "))
it = item(_item)
super(Array, self).append(it.value)
self._value.append(it)
if not PY2:
def clear(self):
super(Array, self).clear()
self._value.clear()
def __iadd__(self, other): # type: (list) -> Array
if not isinstance(other, list):
return NotImplemented
for v in other:
self.append(v)
return self
def __delitem__(self, key):
super(Array, self).__delitem__(key)
j = 0 if key >= 0 else -1
for i, v in enumerate(self._value if key >= 0 else reversed(self._value)):
if key < 0:
i = -i - 1
if isinstance(v, (Comment, Whitespace)):
continue
if j == key:
del self._value[i]
if i < 0 and abs(i) > len(self._value):
i += 1
if i < len(self._value) - 1 and isinstance(self._value[i], Whitespace):
del self._value[i]
break
j += 1 if key >= 0 else -1
def __str__(self):
return str(
[v.value for v in self._value if not isinstance(v, (Whitespace, Comment))]
)
def __repr__(self):
return str(self)
def _getstate(self, protocol=3):
return self._value, self._trivia
class Table(Item, MutableMapping, dict):
def __init__(
self,
value,
trivia,
is_aot_element,
is_super_table=False,
name=None,
display_name=None,
): # type: (tomlkit.container.Container, Trivia, bool, bool, Optional[str], Optional[str]) -> None
super(Table, self).__init__(trivia)
self.name = name
self.display_name = display_name
self._value = value
self._is_aot_element = is_aot_element
self._is_super_table = is_super_table
for k, v in self._value.body:
if k is not None:
dict.__setitem__(self, k.key, v)
@property
def value(self): # type: () -> tomlkit.container.Container
return self._value
@property
def discriminant(self): # type: () -> int
return 9
def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item
if item is None:
if not isinstance(key, (Comment, Whitespace)):
raise ValueError(
"Non comment/whitespace items must have an associated key"
)
key, item = None, key
return self.append(key, item)
def append(self, key, _item): # type: (Union[Key, str], Any) -> Table
if not isinstance(_item, Item):
_item = item(_item)
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return self
indent = m.group(1)
if not isinstance(_item, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent)
if not m:
_item.trivia.indent = indent
else:
_item.trivia.indent = m.group(1) + indent + m.group(2)
return self
def raw_append(self, key, _item): # type: (Union[Key, str], Any) -> Table
if not isinstance(_item, Item):
_item = item(_item)
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
return self
def remove(self, key): # type: (Union[Key, str]) -> Table
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__delitem__(self, key)
return self
def is_aot_element(self): # type: () -> bool
return self._is_aot_element
def is_super_table(self): # type: () -> bool
return self._is_super_table
def as_string(self): # type: () -> str
return self._value.as_string()
# Helpers
def indent(self, indent): # type: (int) -> Table
super(Table, self).indent(indent)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
indent = ""
else:
indent = m.group(1)
for k, item in self._value.body:
if not isinstance(item, Whitespace):
item.trivia.indent = indent + item.trivia.indent
return self
def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any
return self._value.get(key, default)
def setdefault(
self, key, default=None
): # type: (Union[Key, str], Any) -> Union[Item, Container]
super(Table, self).setdefault(key, default=default)
return self[key]
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
fix_indent = key not in self
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
dict.__setitem__(self, key, value)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m or not fix_indent:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __len__(self): # type: () -> int
return len(self._value)
def __iter__(self): # type: () -> Iterator[str]
return iter(self._value)
def __repr__(self): # type: () -> str
return repr(self._value)
def _getstate(self, protocol=3):
return (
self._value,
self._trivia,
self._is_aot_element,
self._is_super_table,
self.name,
self.display_name,
)
class InlineTable(Item, MutableMapping, dict):
def __init__(
self, value, trivia, new=False
): # type: (tomlkit.container.Container, Trivia, bool) -> None
super(InlineTable, self).__init__(trivia)
self._value = value
self._new = new
for k, v in self._value.body:
if k is not None:
dict.__setitem__(self, k.key, v)
@property
def discriminant(self): # type: () -> int
return 10
@property
def value(self): # type: () -> Dict
return self._value
def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable
if not isinstance(_item, Item):
_item = item(_item)
if not isinstance(_item, (Whitespace, Comment)):
if not _item.trivia.indent and len(self._value) > 0 and not self._new:
_item.trivia.indent = " "
if _item.trivia.comment:
_item.trivia.comment = ""
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__setitem__(self, key, _item)
return self
def remove(self, key): # type: (Union[Key, str]) -> InlineTable
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
dict.__delitem__(self, key)
return self
def as_string(self): # type: () -> str
buf = "{"
for i, (k, v) in enumerate(self._value.body):
if k is None:
if i == len(self._value.body) - 1:
if self._new:
buf = buf.rstrip(", ")
else:
buf = buf.rstrip(",")
buf += v.as_string()
continue
buf += "{}{}{}{}{}{}".format(
v.trivia.indent,
k.as_string() + ("." if k.is_dotted() else ""),
k.sep,
v.as_string(),
v.trivia.comment,
v.trivia.trail.replace("\n", ""),
)
if i != len(self._value.body) - 1:
buf += ","
if self._new:
buf += " "
buf += "}"
return buf
def get(self, key, default=None): # type: (Any, Optional[Any]) -> Any
return self._value.get(key, default)
def setdefault(
self, key, default=None
): # type: (Union[Key, str], Any) -> Union[Item, Container]
super(InlineTable, self).setdefault(key, default=default)
return self[key]
def __contains__(self, key): # type: (Union[Key, str]) -> bool
return key in self._value
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
dict.__setitem__(self, key, value)
if value.trivia.comment:
value.trivia.comment = ""
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __len__(self): # type: () -> int
return len(self._value)
def __iter__(self): # type: () -> Iterator[str]
return iter(self._value)
def __repr__(self):
return repr(self._value)
def _getstate(self, protocol=3):
return (self._value, self._trivia)
class String(unicode, Item):
def __new__(cls, t, value, original, trivia):
return super(String, cls).__new__(cls, value)
def __init__(
self, t, _, original, trivia
): # type: (StringType, str, original, Trivia) -> None
super(String, self).__init__(trivia)
self._t = t
self._original = original
@property
def discriminant(self): # type: () -> int
return 11
@property
def value(self): # type: () -> str
return self
def as_string(self): # type: () -> str
return "{}{}{}".format(self._t.value, decode(self._original), self._t.value)
def __add__(self, other):
result = super(String, self).__add__(other)
return self._new(result)
def __sub__(self, other):
result = super(String, self).__sub__(other)
return self._new(result)
def _new(self, result):
return String(self._t, result, result, self._trivia)
def _getstate(self, protocol=3):
return self._t, unicode(self), self._original, self._trivia
class AoT(Item, list):
def __init__(
self, body, name=None, parsed=False
): # type: (List[Table], Optional[str], bool) -> None
self.name = name
self._body = []
self._parsed = parsed
super(AoT, self).__init__(Trivia(trail=""))
for table in body:
self.append(table)
@property
def body(self): # type: () -> List[Table]
return self._body
@property
def discriminant(self): # type: () -> int
return 12
@property
def value(self): # type: () -> List[Dict[Any, Any]]
return [v.value for v in self._body]
def append(self, table): # type: (Table) -> Table
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if m:
indent = m.group(1)
m = re.match("(?s)^([^ ]*)(.*)$", table.trivia.indent)
if not m:
table.trivia.indent = indent
else:
table.trivia.indent = m.group(1) + indent + m.group(2)
if not self._parsed and "\n" not in table.trivia.indent and self._body:
table.trivia.indent = "\n" + table.trivia.indent
self._body.append(table)
super(AoT, self).append(table)
return table
def as_string(self): # type: () -> str
b = ""
for table in self._body:
b += table.as_string()
return b
def __repr__(self): # type: () -> str
return "<AoT {}>".format(self.value)
def _getstate(self, protocol=3):
return self._body, self.name, self._parsed
class Null(Item):
def __init__(self): # type: () -> None
pass
@property
def discriminant(self): # type: () -> int
return -1
@property
def value(self): # type: () -> None
return None
def as_string(self): # type: () -> str
return ""
def _getstate(self, protocol=3):
return tuple()
| true | true |
f738e33442f8fe750877550660aea02e805f4126 | 646 | py | Python | winlib/pyreadline/__init__.py | netsec/pytan | 29a3484d21cb90d8896275febd1c535e4f3cdc7e | [
"MIT"
] | 1 | 2019-01-29T21:22:06.000Z | 2019-01-29T21:22:06.000Z | winlib/pyreadline/__init__.py | c1rdan/pytan | 5e537a6dcf4136e3b9c3905a39f073396e7f044f | [
"MIT"
] | null | null | null | winlib/pyreadline/__init__.py | c1rdan/pytan | 5e537a6dcf4136e3b9c3905a39f073396e7f044f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#*****************************************************************************
# Copyright (C) 2003-2006 Gary Bishop.
# Copyright (C) 2006 Jorgen Stenarson. <jorgen.stenarson@bostream.nu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
from __future__ import print_function, unicode_literals, absolute_import
from . import unicode_helper, logger, clipboard, lineeditor, modes, console
from .rlmain import *
from . import rlmain
| 46.142857 | 79 | 0.544892 |
from __future__ import print_function, unicode_literals, absolute_import
from . import unicode_helper, logger, clipboard, lineeditor, modes, console
from .rlmain import *
from . import rlmain
| true | true |
f738e52b4e0b5f53940c8f7c5d99b58167225f41 | 5,934 | py | Python | trustlab/lab/scenarios/basic_scenario.py | ValentinSiegert/aTLAS | a2e7521447f81add83a0c958d61846a1cbfbbe9c | [
"MIT"
] | null | null | null | trustlab/lab/scenarios/basic_scenario.py | ValentinSiegert/aTLAS | a2e7521447f81add83a0c958d61846a1cbfbbe9c | [
"MIT"
] | null | null | null | trustlab/lab/scenarios/basic_scenario.py | ValentinSiegert/aTLAS | a2e7521447f81add83a0c958d61846a1cbfbbe9c | [
"MIT"
] | null | null | null | """
This file was auto-generated by an ObjectFactory of aTLAS
"""
NAME = 'Basic Scenario'
AGENTS = ['A', 'B', 'C', 'D']
OBSERVATIONS = [{'authors': ['A'],
'before': [],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Redecentralization_of_the_Web'},
'message': 'Redecentralization of the Web',
'observation_id': 1,
'receiver': 'B',
'sender': 'A'},
{'authors': ['A'],
'before': [1],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web_of_Things'},
'message': 'Web of Things',
'observation_id': 2,
'receiver': 'B',
'sender': 'A'},
{'authors': ['A'],
'before': [2],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web_Assembly'},
'message': 'Web Assembly',
'observation_id': 3,
'receiver': 'B',
'sender': 'A'},
{'authors': ['C'],
'before': [3],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Semantic_Web_and_Linked_Open_Data'},
'message': 'Semantic Web and Linked Open Data',
'observation_id': 4,
'receiver': 'B',
'sender': 'C'},
{'authors': ['C'],
'before': [4],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Redecentralization_of_the_Web'},
'message': 'Redecentralization of the Web',
'observation_id': 5,
'receiver': 'B',
'sender': 'C'},
{'authors': ['C'],
'before': [5],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web-based_learning'},
'message': 'Web-based learning',
'observation_id': 6,
'receiver': 'B',
'sender': 'C'}]
HISTORY = {'A': [['B', 'http://example.com/Semantic_Web_and_Linked_Open_Data', 1.0],
['C', 'http://example.com/Web-based_learning', 1.0],
['D', 'http://example.com/Redecentralization_of_the_Web', 1.0]],
'B': [['A', 'http://example.com/Redecentralization_of_the_Web', 0.0],
['C', 'http://example.com/Web-based_learning', 0.0],
['D', 'http://example.com/Web_Assembly', 1.0]],
'C': [['A', 'http://example.com/Semantic_Web_and_Linked_Open_Data', 1.0],
['B', 'http://example.com/Web-based_learning', 1.0],
['D', 'http://example.com/Web_of_Things', 1.0]],
'D': [['A', 'http://example.com/Redecentralization_of_the_Web', 1.0],
['B', 'http://example.com/Redecentralization_of_the_Web', 1.0],
['C', 'http://example.com/Redecentralization_of_the_Web', 1.0]]}
SCALES_PER_AGENT = {'A': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'B': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'C': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'D': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'}}
METRICS_PER_AGENT = {'A': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'B': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'C': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'D': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}}}
DESCRIPTION = 'This is a basic scenario with four agents.'
| 47.472 | 92 | 0.422144 |
NAME = 'Basic Scenario'
AGENTS = ['A', 'B', 'C', 'D']
OBSERVATIONS = [{'authors': ['A'],
'before': [],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Redecentralization_of_the_Web'},
'message': 'Redecentralization of the Web',
'observation_id': 1,
'receiver': 'B',
'sender': 'A'},
{'authors': ['A'],
'before': [1],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web_of_Things'},
'message': 'Web of Things',
'observation_id': 2,
'receiver': 'B',
'sender': 'A'},
{'authors': ['A'],
'before': [2],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web_Assembly'},
'message': 'Web Assembly',
'observation_id': 3,
'receiver': 'B',
'sender': 'A'},
{'authors': ['C'],
'before': [3],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Semantic_Web_and_Linked_Open_Data'},
'message': 'Semantic Web and Linked Open Data',
'observation_id': 4,
'receiver': 'B',
'sender': 'C'},
{'authors': ['C'],
'before': [4],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Redecentralization_of_the_Web'},
'message': 'Redecentralization of the Web',
'observation_id': 5,
'receiver': 'B',
'sender': 'C'},
{'authors': ['C'],
'before': [5],
'details': {'content_trust.topics': ['Web Engineering'],
'uri': 'http://example.com/Web-based_learning'},
'message': 'Web-based learning',
'observation_id': 6,
'receiver': 'B',
'sender': 'C'}]
HISTORY = {'A': [['B', 'http://example.com/Semantic_Web_and_Linked_Open_Data', 1.0],
['C', 'http://example.com/Web-based_learning', 1.0],
['D', 'http://example.com/Redecentralization_of_the_Web', 1.0]],
'B': [['A', 'http://example.com/Redecentralization_of_the_Web', 0.0],
['C', 'http://example.com/Web-based_learning', 0.0],
['D', 'http://example.com/Web_Assembly', 1.0]],
'C': [['A', 'http://example.com/Semantic_Web_and_Linked_Open_Data', 1.0],
['B', 'http://example.com/Web-based_learning', 1.0],
['D', 'http://example.com/Web_of_Things', 1.0]],
'D': [['A', 'http://example.com/Redecentralization_of_the_Web', 1.0],
['B', 'http://example.com/Redecentralization_of_the_Web', 1.0],
['C', 'http://example.com/Redecentralization_of_the_Web', 1.0]]}
SCALES_PER_AGENT = {'A': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'B': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'C': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'},
'D': {'cooperation': 0.5,
'default': 0.0,
'forgivability': -0.5,
'maximum': 1.0,
'minimum': -1.0,
'name': 'Trust Scale by Marsh and Briggs (2009)',
'package': 'marsh_briggs_scale'}}
METRICS_PER_AGENT = {'A': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'B': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'C': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}},
'D': {'__final__': {'name': 'weighted_average', 'weights': {}},
'content_trust.direct_experience': {},
'content_trust.popularity': {},
'content_trust.recommendation': {}}}
DESCRIPTION = 'This is a basic scenario with four agents.'
| true | true |
f738e53dae0449ebfaecf3213895f97f66501890 | 1,148 | py | Python | scripts/gen__dfd_pyx.py | jgukelberger/fdint | 0237323d6fd5d4161190ff7982811d8ae290f89e | [
"BSD-3-Clause"
] | 11 | 2015-10-25T18:51:55.000Z | 2021-02-26T13:05:07.000Z | scripts/gen__dfd_pyx.py | jgukelberger/fdint | 0237323d6fd5d4161190ff7982811d8ae290f89e | [
"BSD-3-Clause"
] | 19 | 2015-04-23T19:41:20.000Z | 2017-08-01T02:04:04.000Z | scripts/gen__dfd_pyx.py | jgukelberger/fdint | 0237323d6fd5d4161190ff7982811d8ae290f89e | [
"BSD-3-Clause"
] | 10 | 2017-05-31T07:27:16.000Z | 2021-08-28T15:34:09.000Z | # Copyright (c) 2015, Scott J Maddox. All rights reserved.
# Use of this source code is governed by the BSD-3-Clause
# license that can be found in the LICENSE file.
import os
import sys
fpath = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../fdint/_dfd.pyx'))
with open(fpath, 'w') as f:
for i in range(-9,22,2)+range(0,21,2):
a = str(i).replace('-','m')
b = str(i+2).replace('-','m')
c = (i+2)/2.
f.write("""
@cython.cdivision(True)
cdef inline double dfd{b}h(double phi):
'''
First derivative of fd{b}h.
'''
return {c}*fd{a}h(phi)
""".format(a=a, b=b, c=repr(c)))
for i in xrange(-9,22,2):
a = str(i).replace('-','m')
b = str(i+2).replace('-','m')
c = (i+2)/2.
for ext in ['lt_m2', 'm2_to_0', '0_to_2', '2_to_5', '5_to_10',
'10_to_20', '20_to_40', 'gt_40']:
f.write("""
@cython.cdivision(True)
cdef inline double dfd{b}h_{ext}(double phi):
'''
First derivative of fd{b}h_{ext}.
'''
return {c}*fd{a}h_{ext}(phi)
""".format(a=a, b=b, ext=ext, c=repr(c))) | 31.888889 | 70 | 0.53223 |
import os
import sys
fpath = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../fdint/_dfd.pyx'))
with open(fpath, 'w') as f:
for i in range(-9,22,2)+range(0,21,2):
a = str(i).replace('-','m')
b = str(i+2).replace('-','m')
c = (i+2)/2.
f.write("""
@cython.cdivision(True)
cdef inline double dfd{b}h(double phi):
'''
First derivative of fd{b}h.
'''
return {c}*fd{a}h(phi)
""".format(a=a, b=b, c=repr(c)))
for i in xrange(-9,22,2):
a = str(i).replace('-','m')
b = str(i+2).replace('-','m')
c = (i+2)/2.
for ext in ['lt_m2', 'm2_to_0', '0_to_2', '2_to_5', '5_to_10',
'10_to_20', '20_to_40', 'gt_40']:
f.write("""
@cython.cdivision(True)
cdef inline double dfd{b}h_{ext}(double phi):
'''
First derivative of fd{b}h_{ext}.
'''
return {c}*fd{a}h_{ext}(phi)
""".format(a=a, b=b, ext=ext, c=repr(c))) | true | true |
f738e6f8d7b8453bcf33d0004fdbca2710d10727 | 463 | py | Python | redditBotColorize/colorize_gif.py | ForeverRecompin/reddit_crawlers | e72b317dba5a8f2195de186f766192dfe873fbd4 | [
"BSD-2-Clause"
] | 422 | 2016-07-20T20:07:11.000Z | 2022-02-22T18:58:11.000Z | redditBotColorize/colorize_gif.py | ForeverRecompin/reddit_crawlers | e72b317dba5a8f2195de186f766192dfe873fbd4 | [
"BSD-2-Clause"
] | 2 | 2016-08-05T15:20:04.000Z | 2017-01-28T11:53:55.000Z | redditBotColorize/colorize_gif.py | ForeverRecompin/reddit_crawlers | e72b317dba5a8f2195de186f766192dfe873fbd4 | [
"BSD-2-Clause"
] | 61 | 2016-07-25T19:26:39.000Z | 2021-12-17T17:23:06.000Z | import sys
import cv2
import colorize
import os
colorize.loadDNN(False)
gif_path = sys.argv[1]
cam = cv2.VideoCapture(gif_path)
counter = 0
while True:
ret,img = cam.read()
if not ret:
break
temp_img_path = '/tmp/%06d.jpg'%counter
cv2.imwrite(temp_img_path,img)
coloredImage = colorize.runDNN(temp_img_path)
cv2.imwrite(temp_img_path,coloredImage)
counter += 1
os.system('ffmpeg -i /tmp/\%06d.jpg colorized_%s'%gif_path)
| 18.52 | 59 | 0.701944 | import sys
import cv2
import colorize
import os
colorize.loadDNN(False)
gif_path = sys.argv[1]
cam = cv2.VideoCapture(gif_path)
counter = 0
while True:
ret,img = cam.read()
if not ret:
break
temp_img_path = '/tmp/%06d.jpg'%counter
cv2.imwrite(temp_img_path,img)
coloredImage = colorize.runDNN(temp_img_path)
cv2.imwrite(temp_img_path,coloredImage)
counter += 1
os.system('ffmpeg -i /tmp/\%06d.jpg colorized_%s'%gif_path)
| true | true |
f738e719fb08c24a6742e31d1ec58bb25023459d | 11,535 | py | Python | dashboard/lib/flanker/addresslib/_parser/mailbox_or_url_parsetab.py | robertsimmons514/isthislegit | aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495 | [
"BSD-3-Clause"
] | 929 | 2015-01-01T11:14:21.000Z | 2022-03-28T23:47:40.000Z | dashboard/lib/flanker/addresslib/_parser/mailbox_or_url_parsetab.py | robertsimmons514/isthislegit | aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495 | [
"BSD-3-Clause"
] | 141 | 2015-01-10T19:02:03.000Z | 2021-07-26T18:04:14.000Z | dashboard/lib/flanker/addresslib/_parser/mailbox_or_url_parsetab.py | robertsimmons514/isthislegit | aa8f2b6cb2ac3de2b0fe03bb93dbceccc4c1f495 | [
"BSD-3-Clause"
] | 179 | 2015-01-01T18:42:46.000Z | 2022-02-16T21:57:14.000Z |
# mailbox_or_url_parsetab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'mailbox_or_urlFWSP AT DOT COMMA SEMICOLON LANGLE RANGLE ATOM DOT_ATOM LBRACKET RBRACKET DTEXT DQUOTE QTEXT QPAIR LPAREN RPAREN CTEXT URLmailbox_or_url_list : mailbox_or_url_list delim mailbox_or_url\n | mailbox_or_url_list delim\n | mailbox_or_urldelim : delim fwsp COMMA\n | delim fwsp SEMICOLON\n | COMMA\n | SEMICOLONmailbox_or_url : mailbox\n | urlurl : ofwsp URL ofwspmailbox : addr_spec\n | angle_addr\n | name_addrname_addr : ofwsp phrase angle_addrangle_addr : ofwsp LANGLE addr_spec RANGLE ofwspaddr_spec : ofwsp local_part AT domain ofwsplocal_part : DOT_ATOM\n | ATOM\n | quoted_stringdomain : DOT_ATOM\n | ATOM\n | domain_literalquoted_string : DQUOTE quoted_string_text DQUOTE\n | DQUOTE DQUOTEquoted_string_text : quoted_string_text QTEXT\n | quoted_string_text QPAIR\n | quoted_string_text fwsp\n | QTEXT\n | QPAIR\n | fwspdomain_literal : LBRACKET domain_literal_text RBRACKET\n | LBRACKET RBRACKETdomain_literal_text : domain_literal_text DTEXT\n | domain_literal_text fwsp\n | DTEXT\n | fwspcomment : LPAREN comment_text RPAREN\n | LPAREN RPARENcomment_text : comment_text CTEXT\n | comment_text fwsp\n | CTEXT\n | fwspphrase : phrase fwsp ATOM\n | phrase fwsp DOT_ATOM\n | phrase fwsp DOT\n | phrase fwsp quoted_string\n | phrase ATOM\n | phrase DOT_ATOM\n | phrase DOT\n | phrase quoted_string\n | ATOM\n | DOT_ATOM\n | DOT\n | quoted_stringofwsp : fwsp comment fwsp\n | fwsp comment\n | comment fwsp\n | comment\n | fwsp\n |fwsp : FWSP'
_lr_action_items = {'FWSP':([0,2,7,11,12,14,15,17,18,19,20,21,22,23,24,25,26,32,33,34,35,36,40,41,42,43,44,45,46,50,51,52,53,54,55,56,57,58,59,60,61,62,63,66,67,68,69,70,71,72,],[7,7,-61,7,7,7,7,-51,7,-52,7,-54,-53,-42,7,-38,-41,-30,-29,-28,-24,7,-48,-47,-50,-49,-40,-37,-39,7,7,7,-20,-21,-22,-27,-26,-25,-23,-44,-43,-46,-45,-36,-35,7,-32,-34,-33,-31,]),'LANGLE':([0,1,2,4,7,12,13,17,19,20,21,22,25,27,35,37,38,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,14,-61,-56,-57,-51,-52,-60,-54,-53,-38,-55,-24,-59,14,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'QPAIR':([7,18,32,33,34,36,56,57,58,],[-61,33,-30,-29,-28,57,-27,-26,-25,]),'URL':([0,1,2,4,7,12,13,25,27,45,],[-60,-59,-58,15,-61,-56,-57,-38,-55,-37,]),'QTEXT':([7,18,32,33,34,36,56,57,58,],[-61,34,-30,-29,-28,58,-27,-26,-25,]),'DTEXT':([7,52,66,67,68,70,71,],[-61,67,-36,-35,71,-34,-33,]),'DQUOTE':([0,1,2,4,7,12,13,14,17,18,19,20,21,22,25,27,28,32,33,34,35,36,37,40,41,42,43,45,56,57,58,59,60,61,62,63,],[-60,-59,-58,18,-61,-56,-57,-60,-51,35,-52,18,-54,-53,-38,-55,18,-30,-29,-28,-24,59,18,-48,-47,-50,-49,-37,-27,-26,-25,-23,-44,-43,-46,-45,]),'LBRACKET':([31,],[52,]),'DOT_ATOM':([0,1,2,4,7,12,13,14,17,19,20,21,22,25,27,28,31,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,19,-61,-56,-57,-60,-51,-52,40,-54,-53,-38,-55,47,53,-24,60,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'RPAREN':([7,11,23,24,26,44,46,],[-61,25,-42,45,-41,-40,-39,]),'AT':([16,17,19,21,35,47,48,49,59,],[31,-18,-17,-19,-24,-17,-18,-19,-23,]),'LPAREN':([0,1,7,14,15,17,19,20,21,22,35,37,40,41,42,43,50,51,53,54,55,59,60,61,62,63,69,72,],[11,11,-61,11,11,-51,-52,11,-54,-53,-24,11,-48,-47,-50,-49,11,11,-20,-21,-22,-23,-44,-43,-46,-45,-32,-31,]),'ATOM':([0,1,2,4,7,12,13,14,17,19,20,21,22,25,27,28,31,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,17,-61,-56,-57,-60,-51,-52,41,-54,-53,-38,-55,48,54,-24,61,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'RANGLE':([1,2,7,12,13,25,27,29,45,51,53,54,55,65,69,72,],[-59,-58,-61,-56,-57,-38,-55,50,-37,-60,-20,-21,-22,-16,-32,-31,]),'RBRACKET':([7,52,66,67,68,70,71,],[-61,69,-36,-35,72,-34,-33,]),'CTEXT':([7,11,23,24,26,44,46,],[-61,26,-42,46,-41,-40,-39,]),'DOT':([0,1,2,4,7,12,13,17,19,20,21,22,25,27,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,22,-61,-56,-57,-51,-52,43,-54,-53,-38,-55,-24,63,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'$end':([1,2,3,5,6,7,8,9,10,12,13,15,25,27,30,39,45,50,51,53,54,55,64,65,69,72,],[-59,-58,-13,-12,0,-61,-8,-9,-11,-56,-57,-60,-38,-55,-10,-14,-37,-60,-60,-20,-21,-22,-15,-16,-32,-31,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'fwsp':([0,2,11,12,14,15,18,20,24,36,50,51,52,68,],[1,13,23,27,1,1,32,37,44,56,1,1,66,70,]),'comment':([0,1,14,15,20,37,50,51,],[2,12,2,2,2,12,2,2,]),'domain':([31,],[51,]),'comment_text':([11,],[24,]),'name_addr':([0,],[3,]),'ofwsp':([0,14,15,20,50,51,],[4,28,30,38,64,65,]),'angle_addr':([0,20,],[5,39,]),'mailbox_or_url':([0,],[6,]),'local_part':([4,28,],[16,16,]),'domain_literal_text':([52,],[68,]),'mailbox':([0,],[8,]),'quoted_string_text':([18,],[36,]),'url':([0,],[9,]),'addr_spec':([0,14,],[10,29,]),'phrase':([4,],[20,]),'quoted_string':([4,20,28,37,],[21,42,49,62,]),'domain_literal':([31,],[55,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> mailbox_or_url","S'",1,None,None,None),
('mailbox_or_url_list -> mailbox_or_url_list delim mailbox_or_url','mailbox_or_url_list',3,'p_expression_mailbox_or_url_list','parser.py',19),
('mailbox_or_url_list -> mailbox_or_url_list delim','mailbox_or_url_list',2,'p_expression_mailbox_or_url_list','parser.py',20),
('mailbox_or_url_list -> mailbox_or_url','mailbox_or_url_list',1,'p_expression_mailbox_or_url_list','parser.py',21),
('delim -> delim fwsp COMMA','delim',3,'p_delim','parser.py',30),
('delim -> delim fwsp SEMICOLON','delim',3,'p_delim','parser.py',31),
('delim -> COMMA','delim',1,'p_delim','parser.py',32),
('delim -> SEMICOLON','delim',1,'p_delim','parser.py',33),
('mailbox_or_url -> mailbox','mailbox_or_url',1,'p_expression_mailbox_or_url','parser.py',36),
('mailbox_or_url -> url','mailbox_or_url',1,'p_expression_mailbox_or_url','parser.py',37),
('url -> ofwsp URL ofwsp','url',3,'p_expression_url','parser.py',41),
('mailbox -> addr_spec','mailbox',1,'p_expression_mailbox','parser.py',45),
('mailbox -> angle_addr','mailbox',1,'p_expression_mailbox','parser.py',46),
('mailbox -> name_addr','mailbox',1,'p_expression_mailbox','parser.py',47),
('name_addr -> ofwsp phrase angle_addr','name_addr',3,'p_expression_name_addr','parser.py',51),
('angle_addr -> ofwsp LANGLE addr_spec RANGLE ofwsp','angle_addr',5,'p_expression_angle_addr','parser.py',55),
('addr_spec -> ofwsp local_part AT domain ofwsp','addr_spec',5,'p_expression_addr_spec','parser.py',59),
('local_part -> DOT_ATOM','local_part',1,'p_expression_local_part','parser.py',63),
('local_part -> ATOM','local_part',1,'p_expression_local_part','parser.py',64),
('local_part -> quoted_string','local_part',1,'p_expression_local_part','parser.py',65),
('domain -> DOT_ATOM','domain',1,'p_expression_domain','parser.py',69),
('domain -> ATOM','domain',1,'p_expression_domain','parser.py',70),
('domain -> domain_literal','domain',1,'p_expression_domain','parser.py',71),
('quoted_string -> DQUOTE quoted_string_text DQUOTE','quoted_string',3,'p_expression_quoted_string','parser.py',75),
('quoted_string -> DQUOTE DQUOTE','quoted_string',2,'p_expression_quoted_string','parser.py',76),
('quoted_string_text -> quoted_string_text QTEXT','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',83),
('quoted_string_text -> quoted_string_text QPAIR','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',84),
('quoted_string_text -> quoted_string_text fwsp','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',85),
('quoted_string_text -> QTEXT','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',86),
('quoted_string_text -> QPAIR','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',87),
('quoted_string_text -> fwsp','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',88),
('domain_literal -> LBRACKET domain_literal_text RBRACKET','domain_literal',3,'p_expression_domain_literal','parser.py',92),
('domain_literal -> LBRACKET RBRACKET','domain_literal',2,'p_expression_domain_literal','parser.py',93),
('domain_literal_text -> domain_literal_text DTEXT','domain_literal_text',2,'p_expression_domain_literal_text','parser.py',100),
('domain_literal_text -> domain_literal_text fwsp','domain_literal_text',2,'p_expression_domain_literal_text','parser.py',101),
('domain_literal_text -> DTEXT','domain_literal_text',1,'p_expression_domain_literal_text','parser.py',102),
('domain_literal_text -> fwsp','domain_literal_text',1,'p_expression_domain_literal_text','parser.py',103),
('comment -> LPAREN comment_text RPAREN','comment',3,'p_expression_comment','parser.py',107),
('comment -> LPAREN RPAREN','comment',2,'p_expression_comment','parser.py',108),
('comment_text -> comment_text CTEXT','comment_text',2,'p_expression_comment_text','parser.py',112),
('comment_text -> comment_text fwsp','comment_text',2,'p_expression_comment_text','parser.py',113),
('comment_text -> CTEXT','comment_text',1,'p_expression_comment_text','parser.py',114),
('comment_text -> fwsp','comment_text',1,'p_expression_comment_text','parser.py',115),
('phrase -> phrase fwsp ATOM','phrase',3,'p_expression_phrase','parser.py',119),
('phrase -> phrase fwsp DOT_ATOM','phrase',3,'p_expression_phrase','parser.py',120),
('phrase -> phrase fwsp DOT','phrase',3,'p_expression_phrase','parser.py',121),
('phrase -> phrase fwsp quoted_string','phrase',3,'p_expression_phrase','parser.py',122),
('phrase -> phrase ATOM','phrase',2,'p_expression_phrase','parser.py',123),
('phrase -> phrase DOT_ATOM','phrase',2,'p_expression_phrase','parser.py',124),
('phrase -> phrase DOT','phrase',2,'p_expression_phrase','parser.py',125),
('phrase -> phrase quoted_string','phrase',2,'p_expression_phrase','parser.py',126),
('phrase -> ATOM','phrase',1,'p_expression_phrase','parser.py',127),
('phrase -> DOT_ATOM','phrase',1,'p_expression_phrase','parser.py',128),
('phrase -> DOT','phrase',1,'p_expression_phrase','parser.py',129),
('phrase -> quoted_string','phrase',1,'p_expression_phrase','parser.py',130),
('ofwsp -> fwsp comment fwsp','ofwsp',3,'p_expression_ofwsp','parser.py',139),
('ofwsp -> fwsp comment','ofwsp',2,'p_expression_ofwsp','parser.py',140),
('ofwsp -> comment fwsp','ofwsp',2,'p_expression_ofwsp','parser.py',141),
('ofwsp -> comment','ofwsp',1,'p_expression_ofwsp','parser.py',142),
('ofwsp -> fwsp','ofwsp',1,'p_expression_ofwsp','parser.py',143),
('ofwsp -> <empty>','ofwsp',0,'p_expression_ofwsp','parser.py',144),
('fwsp -> FWSP','fwsp',1,'p_expression_fwsp','parser.py',148),
]
| 126.758242 | 2,514 | 0.638231 |
_tabversion = '3.10'
_lr_method = 'LALR'
_lr_signature = 'mailbox_or_urlFWSP AT DOT COMMA SEMICOLON LANGLE RANGLE ATOM DOT_ATOM LBRACKET RBRACKET DTEXT DQUOTE QTEXT QPAIR LPAREN RPAREN CTEXT URLmailbox_or_url_list : mailbox_or_url_list delim mailbox_or_url\n | mailbox_or_url_list delim\n | mailbox_or_urldelim : delim fwsp COMMA\n | delim fwsp SEMICOLON\n | COMMA\n | SEMICOLONmailbox_or_url : mailbox\n | urlurl : ofwsp URL ofwspmailbox : addr_spec\n | angle_addr\n | name_addrname_addr : ofwsp phrase angle_addrangle_addr : ofwsp LANGLE addr_spec RANGLE ofwspaddr_spec : ofwsp local_part AT domain ofwsplocal_part : DOT_ATOM\n | ATOM\n | quoted_stringdomain : DOT_ATOM\n | ATOM\n | domain_literalquoted_string : DQUOTE quoted_string_text DQUOTE\n | DQUOTE DQUOTEquoted_string_text : quoted_string_text QTEXT\n | quoted_string_text QPAIR\n | quoted_string_text fwsp\n | QTEXT\n | QPAIR\n | fwspdomain_literal : LBRACKET domain_literal_text RBRACKET\n | LBRACKET RBRACKETdomain_literal_text : domain_literal_text DTEXT\n | domain_literal_text fwsp\n | DTEXT\n | fwspcomment : LPAREN comment_text RPAREN\n | LPAREN RPARENcomment_text : comment_text CTEXT\n | comment_text fwsp\n | CTEXT\n | fwspphrase : phrase fwsp ATOM\n | phrase fwsp DOT_ATOM\n | phrase fwsp DOT\n | phrase fwsp quoted_string\n | phrase ATOM\n | phrase DOT_ATOM\n | phrase DOT\n | phrase quoted_string\n | ATOM\n | DOT_ATOM\n | DOT\n | quoted_stringofwsp : fwsp comment fwsp\n | fwsp comment\n | comment fwsp\n | comment\n | fwsp\n |fwsp : FWSP'
_lr_action_items = {'FWSP':([0,2,7,11,12,14,15,17,18,19,20,21,22,23,24,25,26,32,33,34,35,36,40,41,42,43,44,45,46,50,51,52,53,54,55,56,57,58,59,60,61,62,63,66,67,68,69,70,71,72,],[7,7,-61,7,7,7,7,-51,7,-52,7,-54,-53,-42,7,-38,-41,-30,-29,-28,-24,7,-48,-47,-50,-49,-40,-37,-39,7,7,7,-20,-21,-22,-27,-26,-25,-23,-44,-43,-46,-45,-36,-35,7,-32,-34,-33,-31,]),'LANGLE':([0,1,2,4,7,12,13,17,19,20,21,22,25,27,35,37,38,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,14,-61,-56,-57,-51,-52,-60,-54,-53,-38,-55,-24,-59,14,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'QPAIR':([7,18,32,33,34,36,56,57,58,],[-61,33,-30,-29,-28,57,-27,-26,-25,]),'URL':([0,1,2,4,7,12,13,25,27,45,],[-60,-59,-58,15,-61,-56,-57,-38,-55,-37,]),'QTEXT':([7,18,32,33,34,36,56,57,58,],[-61,34,-30,-29,-28,58,-27,-26,-25,]),'DTEXT':([7,52,66,67,68,70,71,],[-61,67,-36,-35,71,-34,-33,]),'DQUOTE':([0,1,2,4,7,12,13,14,17,18,19,20,21,22,25,27,28,32,33,34,35,36,37,40,41,42,43,45,56,57,58,59,60,61,62,63,],[-60,-59,-58,18,-61,-56,-57,-60,-51,35,-52,18,-54,-53,-38,-55,18,-30,-29,-28,-24,59,18,-48,-47,-50,-49,-37,-27,-26,-25,-23,-44,-43,-46,-45,]),'LBRACKET':([31,],[52,]),'DOT_ATOM':([0,1,2,4,7,12,13,14,17,19,20,21,22,25,27,28,31,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,19,-61,-56,-57,-60,-51,-52,40,-54,-53,-38,-55,47,53,-24,60,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'RPAREN':([7,11,23,24,26,44,46,],[-61,25,-42,45,-41,-40,-39,]),'AT':([16,17,19,21,35,47,48,49,59,],[31,-18,-17,-19,-24,-17,-18,-19,-23,]),'LPAREN':([0,1,7,14,15,17,19,20,21,22,35,37,40,41,42,43,50,51,53,54,55,59,60,61,62,63,69,72,],[11,11,-61,11,11,-51,-52,11,-54,-53,-24,11,-48,-47,-50,-49,11,11,-20,-21,-22,-23,-44,-43,-46,-45,-32,-31,]),'ATOM':([0,1,2,4,7,12,13,14,17,19,20,21,22,25,27,28,31,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,17,-61,-56,-57,-60,-51,-52,41,-54,-53,-38,-55,48,54,-24,61,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'RANGLE':([1,2,7,12,13,25,27,29,45,51,53,54,55,65,69,72,],[-59,-58,-61,-56,-57,-38,-55,50,-37,-60,-20,-21,-22,-16,-32,-31,]),'RBRACKET':([7,52,66,67,68,70,71,],[-61,69,-36,-35,72,-34,-33,]),'CTEXT':([7,11,23,24,26,44,46,],[-61,26,-42,46,-41,-40,-39,]),'DOT':([0,1,2,4,7,12,13,17,19,20,21,22,25,27,35,37,40,41,42,43,45,59,60,61,62,63,],[-60,-59,-58,22,-61,-56,-57,-51,-52,43,-54,-53,-38,-55,-24,63,-48,-47,-50,-49,-37,-23,-44,-43,-46,-45,]),'$end':([1,2,3,5,6,7,8,9,10,12,13,15,25,27,30,39,45,50,51,53,54,55,64,65,69,72,],[-59,-58,-13,-12,0,-61,-8,-9,-11,-56,-57,-60,-38,-55,-10,-14,-37,-60,-60,-20,-21,-22,-15,-16,-32,-31,]),}
_lr_action = {}
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = {}
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'fwsp':([0,2,11,12,14,15,18,20,24,36,50,51,52,68,],[1,13,23,27,1,1,32,37,44,56,1,1,66,70,]),'comment':([0,1,14,15,20,37,50,51,],[2,12,2,2,2,12,2,2,]),'domain':([31,],[51,]),'comment_text':([11,],[24,]),'name_addr':([0,],[3,]),'ofwsp':([0,14,15,20,50,51,],[4,28,30,38,64,65,]),'angle_addr':([0,20,],[5,39,]),'mailbox_or_url':([0,],[6,]),'local_part':([4,28,],[16,16,]),'domain_literal_text':([52,],[68,]),'mailbox':([0,],[8,]),'quoted_string_text':([18,],[36,]),'url':([0,],[9,]),'addr_spec':([0,14,],[10,29,]),'phrase':([4,],[20,]),'quoted_string':([4,20,28,37,],[21,42,49,62,]),'domain_literal':([31,],[55,]),}
_lr_goto = {}
for _k, _v in _lr_goto_items.items():
for _x, _y in zip(_v[0], _v[1]):
if not _x in _lr_goto: _lr_goto[_x] = {}
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> mailbox_or_url","S'",1,None,None,None),
('mailbox_or_url_list -> mailbox_or_url_list delim mailbox_or_url','mailbox_or_url_list',3,'p_expression_mailbox_or_url_list','parser.py',19),
('mailbox_or_url_list -> mailbox_or_url_list delim','mailbox_or_url_list',2,'p_expression_mailbox_or_url_list','parser.py',20),
('mailbox_or_url_list -> mailbox_or_url','mailbox_or_url_list',1,'p_expression_mailbox_or_url_list','parser.py',21),
('delim -> delim fwsp COMMA','delim',3,'p_delim','parser.py',30),
('delim -> delim fwsp SEMICOLON','delim',3,'p_delim','parser.py',31),
('delim -> COMMA','delim',1,'p_delim','parser.py',32),
('delim -> SEMICOLON','delim',1,'p_delim','parser.py',33),
('mailbox_or_url -> mailbox','mailbox_or_url',1,'p_expression_mailbox_or_url','parser.py',36),
('mailbox_or_url -> url','mailbox_or_url',1,'p_expression_mailbox_or_url','parser.py',37),
('url -> ofwsp URL ofwsp','url',3,'p_expression_url','parser.py',41),
('mailbox -> addr_spec','mailbox',1,'p_expression_mailbox','parser.py',45),
('mailbox -> angle_addr','mailbox',1,'p_expression_mailbox','parser.py',46),
('mailbox -> name_addr','mailbox',1,'p_expression_mailbox','parser.py',47),
('name_addr -> ofwsp phrase angle_addr','name_addr',3,'p_expression_name_addr','parser.py',51),
('angle_addr -> ofwsp LANGLE addr_spec RANGLE ofwsp','angle_addr',5,'p_expression_angle_addr','parser.py',55),
('addr_spec -> ofwsp local_part AT domain ofwsp','addr_spec',5,'p_expression_addr_spec','parser.py',59),
('local_part -> DOT_ATOM','local_part',1,'p_expression_local_part','parser.py',63),
('local_part -> ATOM','local_part',1,'p_expression_local_part','parser.py',64),
('local_part -> quoted_string','local_part',1,'p_expression_local_part','parser.py',65),
('domain -> DOT_ATOM','domain',1,'p_expression_domain','parser.py',69),
('domain -> ATOM','domain',1,'p_expression_domain','parser.py',70),
('domain -> domain_literal','domain',1,'p_expression_domain','parser.py',71),
('quoted_string -> DQUOTE quoted_string_text DQUOTE','quoted_string',3,'p_expression_quoted_string','parser.py',75),
('quoted_string -> DQUOTE DQUOTE','quoted_string',2,'p_expression_quoted_string','parser.py',76),
('quoted_string_text -> quoted_string_text QTEXT','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',83),
('quoted_string_text -> quoted_string_text QPAIR','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',84),
('quoted_string_text -> quoted_string_text fwsp','quoted_string_text',2,'p_expression_quoted_string_text','parser.py',85),
('quoted_string_text -> QTEXT','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',86),
('quoted_string_text -> QPAIR','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',87),
('quoted_string_text -> fwsp','quoted_string_text',1,'p_expression_quoted_string_text','parser.py',88),
('domain_literal -> LBRACKET domain_literal_text RBRACKET','domain_literal',3,'p_expression_domain_literal','parser.py',92),
('domain_literal -> LBRACKET RBRACKET','domain_literal',2,'p_expression_domain_literal','parser.py',93),
('domain_literal_text -> domain_literal_text DTEXT','domain_literal_text',2,'p_expression_domain_literal_text','parser.py',100),
('domain_literal_text -> domain_literal_text fwsp','domain_literal_text',2,'p_expression_domain_literal_text','parser.py',101),
('domain_literal_text -> DTEXT','domain_literal_text',1,'p_expression_domain_literal_text','parser.py',102),
('domain_literal_text -> fwsp','domain_literal_text',1,'p_expression_domain_literal_text','parser.py',103),
('comment -> LPAREN comment_text RPAREN','comment',3,'p_expression_comment','parser.py',107),
('comment -> LPAREN RPAREN','comment',2,'p_expression_comment','parser.py',108),
('comment_text -> comment_text CTEXT','comment_text',2,'p_expression_comment_text','parser.py',112),
('comment_text -> comment_text fwsp','comment_text',2,'p_expression_comment_text','parser.py',113),
('comment_text -> CTEXT','comment_text',1,'p_expression_comment_text','parser.py',114),
('comment_text -> fwsp','comment_text',1,'p_expression_comment_text','parser.py',115),
('phrase -> phrase fwsp ATOM','phrase',3,'p_expression_phrase','parser.py',119),
('phrase -> phrase fwsp DOT_ATOM','phrase',3,'p_expression_phrase','parser.py',120),
('phrase -> phrase fwsp DOT','phrase',3,'p_expression_phrase','parser.py',121),
('phrase -> phrase fwsp quoted_string','phrase',3,'p_expression_phrase','parser.py',122),
('phrase -> phrase ATOM','phrase',2,'p_expression_phrase','parser.py',123),
('phrase -> phrase DOT_ATOM','phrase',2,'p_expression_phrase','parser.py',124),
('phrase -> phrase DOT','phrase',2,'p_expression_phrase','parser.py',125),
('phrase -> phrase quoted_string','phrase',2,'p_expression_phrase','parser.py',126),
('phrase -> ATOM','phrase',1,'p_expression_phrase','parser.py',127),
('phrase -> DOT_ATOM','phrase',1,'p_expression_phrase','parser.py',128),
('phrase -> DOT','phrase',1,'p_expression_phrase','parser.py',129),
('phrase -> quoted_string','phrase',1,'p_expression_phrase','parser.py',130),
('ofwsp -> fwsp comment fwsp','ofwsp',3,'p_expression_ofwsp','parser.py',139),
('ofwsp -> fwsp comment','ofwsp',2,'p_expression_ofwsp','parser.py',140),
('ofwsp -> comment fwsp','ofwsp',2,'p_expression_ofwsp','parser.py',141),
('ofwsp -> comment','ofwsp',1,'p_expression_ofwsp','parser.py',142),
('ofwsp -> fwsp','ofwsp',1,'p_expression_ofwsp','parser.py',143),
('ofwsp -> <empty>','ofwsp',0,'p_expression_ofwsp','parser.py',144),
('fwsp -> FWSP','fwsp',1,'p_expression_fwsp','parser.py',148),
]
| true | true |
f738e78252750acaa726dff41bb06d29194b1d5d | 1,556 | py | Python | 2018/7/7.py | kristianwiklund/AOC2019 | a98affaccd53ca4ea2d3a8c3fa125680f1e8cc08 | [
"MIT"
] | 3 | 2020-12-02T18:18:05.000Z | 2021-12-03T18:39:26.000Z | 2018/7/7.py | kristianwiklund/AOC2019 | a98affaccd53ca4ea2d3a8c3fa125680f1e8cc08 | [
"MIT"
] | null | null | null | 2018/7/7.py | kristianwiklund/AOC2019 | a98affaccd53ca4ea2d3a8c3fa125680f1e8cc08 | [
"MIT"
] | null | null | null | import networkx as nx
import matplotlib.pyplot as plt
G = nx.DiGraph()
#with open ("shortinput.txt") as fd:
with open ("input.txt") as fd:
for line in fd:
x = line.split(" ")
before = x[1]
after = x[7]
G.add_edge(before, after, weight=ord(after)-64)
nx.draw(G, with_labels=True)
plt.savefig("maze.png")
helalistan=list(nx.lexicographical_topological_sort(G))
print("7A :"+"".join(helalistan))
# ---------------------
#ACHOQRXSEKUGMYIWDZLNBFTJVP
time=0
workers = [0,0,0,0,0,0,0,0,0,0]
doing = [None, None,None,None,None,None,None,None,None]
while list(G.nodes()) != []:
for i in range(0,6):
if workers[i] <= 0:
# finish what was done, then pull something
if doing[i]:
# print ("Worker "+str(i)+" is done with "+doing[i])
G.remove_node(doing[i])
doing[i] = None
for j in helalistan:
#print ("Trying to pull node "+j)
if not j in doing:
#print ("Nobody is working on "+j)
if G.has_node(j) and list(G.predecessors(j)) == []:
# print ("Worker "+str(i)+" pulls node "+j)
doing[i] = j
workers[i] = 60+ord(j)-65
break
else:
workers[i] = workers[i] - 1
# print("Tick: "+str(time) + " working on "+str(doing))
time=time+1
print("Total time for assembly: "+str(time-1))
| 24.3125 | 71 | 0.492288 | import networkx as nx
import matplotlib.pyplot as plt
G = nx.DiGraph()
with open ("input.txt") as fd:
for line in fd:
x = line.split(" ")
before = x[1]
after = x[7]
G.add_edge(before, after, weight=ord(after)-64)
nx.draw(G, with_labels=True)
plt.savefig("maze.png")
helalistan=list(nx.lexicographical_topological_sort(G))
print("7A :"+"".join(helalistan))
time=0
workers = [0,0,0,0,0,0,0,0,0,0]
doing = [None, None,None,None,None,None,None,None,None]
while list(G.nodes()) != []:
for i in range(0,6):
if workers[i] <= 0:
if doing[i]:
G.remove_node(doing[i])
doing[i] = None
for j in helalistan:
if not j in doing:
if G.has_node(j) and list(G.predecessors(j)) == []:
doing[i] = j
workers[i] = 60+ord(j)-65
break
else:
workers[i] = workers[i] - 1
time=time+1
print("Total time for assembly: "+str(time-1))
| true | true |
f738e7edc1e0b3c55014c9a208313ddbb2ef668a | 1,812 | py | Python | api/main.py | MineiToshio/cryptongo | 3cb1eddf45bf87a47a32597f7fa66eb8ac932f4a | [
"MIT"
] | 2 | 2019-09-25T18:13:34.000Z | 2020-02-13T16:54:13.000Z | api/main.py | MineiToshio/cryptongo | 3cb1eddf45bf87a47a32597f7fa66eb8ac932f4a | [
"MIT"
] | null | null | null | api/main.py | MineiToshio/cryptongo | 3cb1eddf45bf87a47a32597f7fa66eb8ac932f4a | [
"MIT"
] | null | null | null | import pymongo
from flask import Flask, jsonify, request
def get_db_connection(uri):
client = pymongo.MongoClient(uri)
return client.cryptongo
app = Flask(__name__)
db_connection = get_db_connection('mongodb://localhost:27017/')
def get_documents():
params = {}
name = request.args.get('name', '')
limit = int(request.args.get('limit', 0))
if name:
params.update({'name': name})
cursor = db_connection.tickers.find(params, {'_id':0, 'ticker_hash':0}).limit(limit)
return list(cursor)
def get_top20():
params = {}
name = request.args.get('name', '')
limit = int(request.args.get('limit', 0))
if name:
params.update({'name': name})
params.update({'rank': {'$lte': 20}})
cursor = db_connection.tickers.find(
params, {'_id': 0, 'ticker_hash': 0}
).limit(limit)
return list(cursor)
def remove_currency():
params = {}
name = request.args.get('name', '')
if name:
params.update({'name': name})
else:
return False
return db_connection.tickers.delete_many(params).deleted_count
#Correr el comando "FLASK_APP=main.py flask run"
@app.route("/")
def index():
return jsonify(
{
'name': 'Cryptongo API'
}
)
@app.route('/tickers', methods=['GET', 'DELETE'])
def tickers():
if request.method == 'GET':
return jsonify(get_documents())
elif request.method == 'DELETE':
result = remove_currency()
if result > 0:
return jsonify({
'text': 'Documentos eliminados'
}), 204
else:
return jsonify({
'error': 'No se encontraron los documentos'
}), 404
@app.route("/top20", methods=['GET'])
def top20():
return jsonify(
get_top20()
)
| 25.521127 | 88 | 0.587196 | import pymongo
from flask import Flask, jsonify, request
def get_db_connection(uri):
client = pymongo.MongoClient(uri)
return client.cryptongo
app = Flask(__name__)
db_connection = get_db_connection('mongodb://localhost:27017/')
def get_documents():
params = {}
name = request.args.get('name', '')
limit = int(request.args.get('limit', 0))
if name:
params.update({'name': name})
cursor = db_connection.tickers.find(params, {'_id':0, 'ticker_hash':0}).limit(limit)
return list(cursor)
def get_top20():
params = {}
name = request.args.get('name', '')
limit = int(request.args.get('limit', 0))
if name:
params.update({'name': name})
params.update({'rank': {'$lte': 20}})
cursor = db_connection.tickers.find(
params, {'_id': 0, 'ticker_hash': 0}
).limit(limit)
return list(cursor)
def remove_currency():
params = {}
name = request.args.get('name', '')
if name:
params.update({'name': name})
else:
return False
return db_connection.tickers.delete_many(params).deleted_count
@app.route("/")
def index():
return jsonify(
{
'name': 'Cryptongo API'
}
)
@app.route('/tickers', methods=['GET', 'DELETE'])
def tickers():
if request.method == 'GET':
return jsonify(get_documents())
elif request.method == 'DELETE':
result = remove_currency()
if result > 0:
return jsonify({
'text': 'Documentos eliminados'
}), 204
else:
return jsonify({
'error': 'No se encontraron los documentos'
}), 404
@app.route("/top20", methods=['GET'])
def top20():
return jsonify(
get_top20()
)
| true | true |
f738e8009f71e7b6b6f5602de347597c0a56c06c | 1,302 | py | Python | node_modules/_npm@6.14.9@npm/node_modules/node-gyp/gyp/gyp_main.py | TT555666/cms-server | 8b8302af597615501743042163504987fb27269e | [
"MIT"
] | 1,666 | 2017-01-12T03:58:44.000Z | 2017-08-20T23:39:20.000Z | node_modules/node-gyp/gyp/gyp_main.py | BABY636-Guarda-CRUX/cli | abdf52879fcf0e0f534ad977931f6935f5d1dce3 | [
"Artistic-2.0"
] | 95 | 2017-08-21T07:38:56.000Z | 2022-02-18T21:59:54.000Z | node_modules/node-gyp/gyp/gyp_main.py | mtdev2/cli | d73879893ca7c1d0ce2b5fcb2364200955febfc0 | [
"Artistic-2.0"
] | 161 | 2017-08-25T20:20:01.000Z | 2022-02-08T02:59:03.000Z | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
PY3 = bytes != str
# Below IsCygwin() function copied from pylib/gyp/common.py
def IsCygwin():
try:
out = subprocess.Popen("uname",
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return "CYGWIN" in str(stdout)
except Exception:
return False
def UnixifyPath(path):
try:
if not IsCygwin():
return path
out = subprocess.Popen(["cygpath", "-u", path],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, _ = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return str(stdout)
except Exception:
return path
# Make sure we're using the version of pylib in this repo, not one installed
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
# else the 'gyp' library will not be found
path = UnixifyPath(sys.argv[0])
sys.path.insert(0, os.path.join(os.path.dirname(path), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.script_main())
| 25.529412 | 77 | 0.672811 |
import os
import sys
import subprocess
PY3 = bytes != str
def IsCygwin():
try:
out = subprocess.Popen("uname",
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, stderr = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return "CYGWIN" in str(stdout)
except Exception:
return False
def UnixifyPath(path):
try:
if not IsCygwin():
return path
out = subprocess.Popen(["cygpath", "-u", path],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
stdout, _ = out.communicate()
if PY3:
stdout = stdout.decode("utf-8")
return str(stdout)
except Exception:
return path
# elsewhere on the system. Also convert to Unix style path on Cygwin systems,
# else the 'gyp' library will not be found
path = UnixifyPath(sys.argv[0])
sys.path.insert(0, os.path.join(os.path.dirname(path), 'pylib'))
import gyp
if __name__ == '__main__':
sys.exit(gyp.script_main())
| true | true |
f738e865f19a3185e155cb3d9afeef9554d24a04 | 5,739 | py | Python | data/anthro.py | WadhwaniAI/ESTRNN | 4af8d53b0ebb1655c40aaf4f6950904580a34aa2 | [
"MIT"
] | null | null | null | data/anthro.py | WadhwaniAI/ESTRNN | 4af8d53b0ebb1655c40aaf4f6950904580a34aa2 | [
"MIT"
] | null | null | null | data/anthro.py | WadhwaniAI/ESTRNN | 4af8d53b0ebb1655c40aaf4f6950904580a34aa2 | [
"MIT"
] | null | null | null | import os
import random
from os.path import join, basename, dirname
import cv2
import numpy as np
import torch
from glob import glob
import ipdb
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms
from utils import normalize, Crop, Flip, ToTensor
class AnthroDeblurDataset(Dataset):
"""
Structure of self_.records:
seq:
frame:
path of images -> {'Blur': <path>, 'Sharp': <path>}
"""
def __init__(self, path, frames, future_frames, past_frames, crop_size=(256, 256), data_format='RGB',
centralize=True, normalize=True):
assert frames - future_frames - past_frames >= 1
self.frames = frames
self.num_ff = future_frames
self.num_pf = past_frames
self.data_format = data_format
self.W = None
self.H = None
self.crop_h, self.crop_w = crop_size
self.normalize = normalize
self.centralize = centralize
self.transform = transforms.Compose([Crop(crop_size), ToTensor()])
self._seq_length = 200
self._samples = self._generate_samples(path, data_format)
def _generate_samples(self, dataset_path, data_format):
samples = list()
records = dict()
seq = basename(dataset_path)
records[seq] = list()
frames = sorted(glob(join(dataset_path, '*.jpg')))
for frame in frames[:self._seq_length]:
sample = dict()
sample['Blur'] = frame
sample['Sharp'] = frame
records[seq].append(sample)
self.H, self.W, _ = cv2.imread(frame).shape
for seq_records in records.values():
temp_length = len(seq_records) - (self.frames - 1)
if temp_length <= 0:
raise IndexError('Exceed the maximum length of the video sequence')
for idx in range(temp_length):
samples.append(seq_records[idx:idx + self.frames])
return samples
def __getitem__(self, item):
top = random.randint(0, self.H - self.crop_h)
left = random.randint(0, self.W - self.crop_w)
flip_lr = random.randint(0, 1)
flip_ud = random.randint(0, 1)
sample = {'top': top, 'left': left, 'flip_lr': flip_lr, 'flip_ud': flip_ud}
blur_imgs, sharp_imgs = [], []
for sample_dict in self._samples[item]:
blur_img, sharp_img = self._load_sample(sample_dict, sample)
blur_imgs.append(blur_img)
sharp_imgs.append(sharp_img)
sharp_imgs = sharp_imgs[self.num_pf:self.frames - self.num_ff]
return [torch.cat(item, dim=0) for item in [blur_imgs, sharp_imgs]]
def _load_sample(self, sample_dict, sample):
if self.data_format == 'RGB':
sample['image'] = cv2.imread(sample_dict['Blur'])
sample['label'] = cv2.imread(sample_dict['Sharp'])
else:
raise NotImplementedError
# elif self.data_format == 'RAW':
# sample['image'] = cv2.imread(sample_dict['Blur'], -1)[..., np.newaxis].astype(np.int32)
# sample['label'] = cv2.imread(sample_dict['Sharp'], -1)[..., np.newaxis].astype(np.int32)
sample = self.transform(sample)
val_range = 2.0 ** 8 - 1 if self.data_format == 'RGB' else 2.0 ** 16 - 1
blur_img = normalize(sample['image'], centralize=self.centralize, normalize=self.normalize, val_range=val_range)
sharp_img = normalize(sample['label'], centralize=self.centralize, normalize=self.normalize, val_range=val_range)
return blur_img, sharp_img
def __len__(self):
return len(self._samples)
class Dataloader:
def __init__(self, para, device_id, ds_type='train'):
path = join(para.data_root, para.dataset)
frames = para.frames
dataset = AnthroDeblurDataset(path, frames, para.future_frames, para.past_frames, para.patch_size, para.data_format,
para.centralize, para.normalize)
gpus = para.num_gpus
bs = para.batch_size
ds_len = len(dataset)
if para.trainer_mode == 'ddp':
sampler = torch.utils.data.distributed.DistributedSampler(
dataset,
num_replicas=para.num_gpus,
rank=device_id
)
self.loader = DataLoader(
dataset=dataset,
batch_size=para.batch_size,
shuffle=False,
num_workers=para.threads,
pin_memory=True,
sampler=sampler,
drop_last=True
)
loader_len = np.ceil(ds_len / gpus)
self.loader_len = int(np.ceil(loader_len / bs) * bs)
elif para.trainer_mode == 'dp':
self.loader = DataLoader(
dataset=dataset,
batch_size=para.batch_size,
shuffle=True,
num_workers=para.threads,
pin_memory=True,
drop_last=True
)
self.loader_len = int(np.ceil(ds_len / bs) * bs)
def __iter__(self):
return iter(self.loader)
def __len__(self):
return self.loader_len
if __name__ == '__main__':
from para import Parameter
para = Parameter().args
para.data_format = 'RGB'
para.data_root = '/home/users/aditya/projects/ESTRNN/data/'
para.dataset = 'anthro/358129091084785_19032020105115/video_1_baby_chessboard_ruler_4046788426114666387/'
dataloader = Dataloader(para, 0)
for x, y in dataloader:
print(x.shape, y.shape)
break
print(x.type(), y.type())
print(np.max(x.numpy()), np.min(x.numpy()))
print(np.max(y.numpy()), np.min(y.numpy()))
| 35.208589 | 124 | 0.600453 | import os
import random
from os.path import join, basename, dirname
import cv2
import numpy as np
import torch
from glob import glob
import ipdb
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms
from utils import normalize, Crop, Flip, ToTensor
class AnthroDeblurDataset(Dataset):
def __init__(self, path, frames, future_frames, past_frames, crop_size=(256, 256), data_format='RGB',
centralize=True, normalize=True):
assert frames - future_frames - past_frames >= 1
self.frames = frames
self.num_ff = future_frames
self.num_pf = past_frames
self.data_format = data_format
self.W = None
self.H = None
self.crop_h, self.crop_w = crop_size
self.normalize = normalize
self.centralize = centralize
self.transform = transforms.Compose([Crop(crop_size), ToTensor()])
self._seq_length = 200
self._samples = self._generate_samples(path, data_format)
def _generate_samples(self, dataset_path, data_format):
samples = list()
records = dict()
seq = basename(dataset_path)
records[seq] = list()
frames = sorted(glob(join(dataset_path, '*.jpg')))
for frame in frames[:self._seq_length]:
sample = dict()
sample['Blur'] = frame
sample['Sharp'] = frame
records[seq].append(sample)
self.H, self.W, _ = cv2.imread(frame).shape
for seq_records in records.values():
temp_length = len(seq_records) - (self.frames - 1)
if temp_length <= 0:
raise IndexError('Exceed the maximum length of the video sequence')
for idx in range(temp_length):
samples.append(seq_records[idx:idx + self.frames])
return samples
def __getitem__(self, item):
top = random.randint(0, self.H - self.crop_h)
left = random.randint(0, self.W - self.crop_w)
flip_lr = random.randint(0, 1)
flip_ud = random.randint(0, 1)
sample = {'top': top, 'left': left, 'flip_lr': flip_lr, 'flip_ud': flip_ud}
blur_imgs, sharp_imgs = [], []
for sample_dict in self._samples[item]:
blur_img, sharp_img = self._load_sample(sample_dict, sample)
blur_imgs.append(blur_img)
sharp_imgs.append(sharp_img)
sharp_imgs = sharp_imgs[self.num_pf:self.frames - self.num_ff]
return [torch.cat(item, dim=0) for item in [blur_imgs, sharp_imgs]]
def _load_sample(self, sample_dict, sample):
if self.data_format == 'RGB':
sample['image'] = cv2.imread(sample_dict['Blur'])
sample['label'] = cv2.imread(sample_dict['Sharp'])
else:
raise NotImplementedError
sample = self.transform(sample)
val_range = 2.0 ** 8 - 1 if self.data_format == 'RGB' else 2.0 ** 16 - 1
blur_img = normalize(sample['image'], centralize=self.centralize, normalize=self.normalize, val_range=val_range)
sharp_img = normalize(sample['label'], centralize=self.centralize, normalize=self.normalize, val_range=val_range)
return blur_img, sharp_img
def __len__(self):
return len(self._samples)
class Dataloader:
def __init__(self, para, device_id, ds_type='train'):
path = join(para.data_root, para.dataset)
frames = para.frames
dataset = AnthroDeblurDataset(path, frames, para.future_frames, para.past_frames, para.patch_size, para.data_format,
para.centralize, para.normalize)
gpus = para.num_gpus
bs = para.batch_size
ds_len = len(dataset)
if para.trainer_mode == 'ddp':
sampler = torch.utils.data.distributed.DistributedSampler(
dataset,
num_replicas=para.num_gpus,
rank=device_id
)
self.loader = DataLoader(
dataset=dataset,
batch_size=para.batch_size,
shuffle=False,
num_workers=para.threads,
pin_memory=True,
sampler=sampler,
drop_last=True
)
loader_len = np.ceil(ds_len / gpus)
self.loader_len = int(np.ceil(loader_len / bs) * bs)
elif para.trainer_mode == 'dp':
self.loader = DataLoader(
dataset=dataset,
batch_size=para.batch_size,
shuffle=True,
num_workers=para.threads,
pin_memory=True,
drop_last=True
)
self.loader_len = int(np.ceil(ds_len / bs) * bs)
def __iter__(self):
return iter(self.loader)
def __len__(self):
return self.loader_len
if __name__ == '__main__':
from para import Parameter
para = Parameter().args
para.data_format = 'RGB'
para.data_root = '/home/users/aditya/projects/ESTRNN/data/'
para.dataset = 'anthro/358129091084785_19032020105115/video_1_baby_chessboard_ruler_4046788426114666387/'
dataloader = Dataloader(para, 0)
for x, y in dataloader:
print(x.shape, y.shape)
break
print(x.type(), y.type())
print(np.max(x.numpy()), np.min(x.numpy()))
print(np.max(y.numpy()), np.min(y.numpy()))
| true | true |
f738e8964def8ab2e2e98eee4038f5f125d6f96c | 3,594 | py | Python | myapp/DeepSegmentor/data/__init__.py | AbelardoDV/crack-detector | c8f1ecf6eb617f1078c9bdf0ef7af89d95ba33ed | [
"MIT"
] | 6 | 2021-02-14T00:46:58.000Z | 2022-03-25T10:50:26.000Z | myapp/DeepSegmentor/data/__init__.py | AbelardoDV/crack-detector | c8f1ecf6eb617f1078c9bdf0ef7af89d95ba33ed | [
"MIT"
] | null | null | null | myapp/DeepSegmentor/data/__init__.py | AbelardoDV/crack-detector | c8f1ecf6eb617f1078c9bdf0ef7af89d95ba33ed | [
"MIT"
] | 6 | 2021-05-09T12:44:53.000Z | 2022-03-17T00:02:10.000Z | """This package includes all the modules related to data loading and preprocessing
To add a custom dataset class called 'dummy', you need to add a file called 'dummy_dataset.py' and define a subclass 'DummyDataset' inherited from BaseDataset.
You need to implement four functions:
-- <__init__>: initialize the class, first call BaseDataset.__init__(self, opt).
-- <__len__>: return the size of dataset.
-- <__getitem__>: get a data point from data loader.
-- <modify_commandline_options>: (optionally) add dataset-specific options and set default options.
Now you can use the dataset class by specifying flag '--dataset_mode dummy'.
See our template dataset class 'template_dataset.py' for more details.
"""
import importlib
import torch.utils.data
from myapp.DeepSegmentor.data.base_dataset import BaseDataset
def find_dataset_using_name(dataset_name):
"""Import the module "data/[dataset_name]_dataset.py".
In the file, the class called DatasetNameDataset() will
be instantiated. It has to be a subclass of BaseDataset,
and it is case-insensitive.
"""
dataset_filename = "myapp.DeepSegmentor.data." + dataset_name + "_dataset"
datasetlib = importlib.import_module(dataset_filename)
dataset = None
target_dataset_name = dataset_name.replace('_', '') + 'dataset'
for name, cls in datasetlib.__dict__.items():
if name.lower() == target_dataset_name.lower() \
and issubclass(cls, BaseDataset):
dataset = cls
if dataset is None:
raise NotImplementedError("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name))
return dataset
def get_option_setter(dataset_name):
"""Return the static method <modify_commandline_options> of the dataset class."""
dataset_class = find_dataset_using_name(dataset_name)
return dataset_class.modify_commandline_options
def create_dataset(opt):
"""Create a dataset given the option.
This function wraps the class CustomDatasetDataLoader.
This is the main interface between this package and 'train.py'/'test.py'
Example:
>>> from data import create_dataset
>>> dataset = create_dataset(opt)
"""
data_loader = CustomDatasetDataLoader(opt)
dataset = data_loader.load_data()
return dataset
class CustomDatasetDataLoader():
"""Wrapper class of Dataset class that performs multi-threaded data loading"""
def __init__(self, opt):
"""Initialize this class
Step 1: create a dataset instance given the name [dataset_mode]
Step 2: create a multi-threaded data loader.
"""
self.opt = opt
dataset_class = find_dataset_using_name(opt.dataset_mode)
self.dataset = dataset_class(opt)
print("dataset [%s] was created" % type(self.dataset).__name__)
self.dataloader = torch.utils.data.DataLoader(
self.dataset,
batch_size=opt.batch_size,
shuffle=not opt.serial_batches,
num_workers=int(opt.num_threads))
def load_data(self):
return self
def __len__(self):
"""Return the number of data in the dataset"""
return min(len(self.dataset), self.opt.max_dataset_size)
def __iter__(self):
"""Return a batch of data"""
for i, data in enumerate(self.dataloader):
if i * self.opt.batch_size >= self.opt.max_dataset_size:
break
yield data
| 38.234043 | 176 | 0.684474 | import importlib
import torch.utils.data
from myapp.DeepSegmentor.data.base_dataset import BaseDataset
def find_dataset_using_name(dataset_name):
dataset_filename = "myapp.DeepSegmentor.data." + dataset_name + "_dataset"
datasetlib = importlib.import_module(dataset_filename)
dataset = None
target_dataset_name = dataset_name.replace('_', '') + 'dataset'
for name, cls in datasetlib.__dict__.items():
if name.lower() == target_dataset_name.lower() \
and issubclass(cls, BaseDataset):
dataset = cls
if dataset is None:
raise NotImplementedError("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name))
return dataset
def get_option_setter(dataset_name):
dataset_class = find_dataset_using_name(dataset_name)
return dataset_class.modify_commandline_options
def create_dataset(opt):
data_loader = CustomDatasetDataLoader(opt)
dataset = data_loader.load_data()
return dataset
class CustomDatasetDataLoader():
def __init__(self, opt):
self.opt = opt
dataset_class = find_dataset_using_name(opt.dataset_mode)
self.dataset = dataset_class(opt)
print("dataset [%s] was created" % type(self.dataset).__name__)
self.dataloader = torch.utils.data.DataLoader(
self.dataset,
batch_size=opt.batch_size,
shuffle=not opt.serial_batches,
num_workers=int(opt.num_threads))
def load_data(self):
return self
def __len__(self):
return min(len(self.dataset), self.opt.max_dataset_size)
def __iter__(self):
for i, data in enumerate(self.dataloader):
if i * self.opt.batch_size >= self.opt.max_dataset_size:
break
yield data
| true | true |
f738e8ef5f24e0b2ca574e60daacbd4ce87ebfab | 3,472 | py | Python | tests/test_reduce_memory.py | nagomiso/komono | 3158dc14ebaee724defe63d54c214d40065558d7 | [
"MIT"
] | null | null | null | tests/test_reduce_memory.py | nagomiso/komono | 3158dc14ebaee724defe63d54c214d40065558d7 | [
"MIT"
] | null | null | null | tests/test_reduce_memory.py | nagomiso/komono | 3158dc14ebaee724defe63d54c214d40065558d7 | [
"MIT"
] | null | null | null | import pandas as pd
import pytest
from pandas.testing import assert_frame_equal, assert_series_equal
import komono.pandas._reduce_memory as rd
@pytest.fixture
def base_data():
return {
"int8": [-128, 127],
"int16": [-129, 127],
"Int8": [None, 127],
"Str": ["foo", "bar"],
}
@pytest.fixture
def base_dtype():
return {
"int8": "int64",
"int16": "int64",
"Int8": "Int64",
"Str": "string",
}
@pytest.fixture
def base_dataframe(base_data, base_dtype) -> pd.DataFrame:
return pd.DataFrame.from_dict(base_data).astype(base_dtype)
@pytest.mark.parametrize(
"min_,max_,expected_dtype",
[
(-128, 127, "int8"),
(-128, 128, "int16"),
(-129, 127, "int16"),
(-129, 128, "int16"),
(-32_768, 32_767, "int16"),
(-32_768, 32_768, "int32"),
(-32_769, 32_767, "int32"),
(-32_769, 32_768, "int32"),
(-2_147_483_648, 2_147_483_647, "int32"),
(-2_147_483_648, 2_147_483_648, "int64"),
(-2_147_483_649, 2_147_483_647, "int64"),
(-2_147_483_649, 2_147_483_648, "int64"),
],
)
def test_reduce_integer_series_not_nullable(min_, max_, expected_dtype):
series = pd.Series([min_, max_], dtype="int64")
dtype = str(series.dtype)
expected = pd.Series([min_, max_], dtype=expected_dtype)
actual = rd._reduce_integer_series(series, dtype=dtype)
assert_series_equal(actual, expected)
@pytest.mark.parametrize(
"min_,mid,max_,expected_dtype",
[
(-128, None, 127, "Int8"),
(-128, None, 128, "Int16"),
(-129, None, 127, "Int16"),
(-129, None, 128, "Int16"),
(-32_768, None, 32_767, "Int16"),
(-32_768, None, 32_768, "Int32"),
(-32_769, None, 32_767, "Int32"),
(-32_769, None, 32_768, "Int32"),
(-2_147_483_648, None, 2_147_483_647, "Int32"),
(-2_147_483_648, None, 2_147_483_648, "Int64"),
(-2_147_483_649, None, 2_147_483_647, "Int64"),
(-2_147_483_649, None, 2_147_483_648, "Int64"),
],
)
def test_reduce_integer_series_nullable(min_, mid, max_, expected_dtype):
series = pd.Series([min_, mid, max_], dtype="Int64")
dtype = str(series.dtype)
expected = pd.Series([min_, mid, max_], dtype=expected_dtype)
actual = rd._reduce_integer_series(series, dtype=dtype)
assert_series_equal(actual, expected)
@pytest.mark.parametrize(
"min_,max_,expected_dtype",
[
(-65500.0, 65500.0, "float16"),
(-65500.0, 65600.0, "float32"),
(-65600.0, 65500.0, "float32"),
(-65600.0, 65600.0, "float32"),
(-3.4028e38, 3.4028e38, "float32"),
(-3.4028235e38, 3.4028335e38, "float64"),
(-3.4028335e38, 3.4028235e38, "float64"),
(-3.4028335e38, 3.4028335e38, "float64"),
],
)
def test_reduce_float_series(min_, max_, expected_dtype):
series = pd.Series([min_, max_], dtype="float64")
expected = pd.Series([min_, max_], dtype=expected_dtype)
actual = rd._reduce_float_series(series)
assert_series_equal(actual, expected)
def test_reduce_memory_usage(base_data, base_dataframe):
expected = pd.DataFrame.from_dict(data=base_data,).astype(
{
"int8": "int8",
"int16": "int16",
"Int8": "Int8",
"Str": "string",
}
)
actual = rd.reduce_memory_usage(base_dataframe, verbose=True)
assert_frame_equal(actual, expected)
| 30.45614 | 73 | 0.604839 | import pandas as pd
import pytest
from pandas.testing import assert_frame_equal, assert_series_equal
import komono.pandas._reduce_memory as rd
@pytest.fixture
def base_data():
return {
"int8": [-128, 127],
"int16": [-129, 127],
"Int8": [None, 127],
"Str": ["foo", "bar"],
}
@pytest.fixture
def base_dtype():
return {
"int8": "int64",
"int16": "int64",
"Int8": "Int64",
"Str": "string",
}
@pytest.fixture
def base_dataframe(base_data, base_dtype) -> pd.DataFrame:
return pd.DataFrame.from_dict(base_data).astype(base_dtype)
@pytest.mark.parametrize(
"min_,max_,expected_dtype",
[
(-128, 127, "int8"),
(-128, 128, "int16"),
(-129, 127, "int16"),
(-129, 128, "int16"),
(-32_768, 32_767, "int16"),
(-32_768, 32_768, "int32"),
(-32_769, 32_767, "int32"),
(-32_769, 32_768, "int32"),
(-2_147_483_648, 2_147_483_647, "int32"),
(-2_147_483_648, 2_147_483_648, "int64"),
(-2_147_483_649, 2_147_483_647, "int64"),
(-2_147_483_649, 2_147_483_648, "int64"),
],
)
def test_reduce_integer_series_not_nullable(min_, max_, expected_dtype):
series = pd.Series([min_, max_], dtype="int64")
dtype = str(series.dtype)
expected = pd.Series([min_, max_], dtype=expected_dtype)
actual = rd._reduce_integer_series(series, dtype=dtype)
assert_series_equal(actual, expected)
@pytest.mark.parametrize(
"min_,mid,max_,expected_dtype",
[
(-128, None, 127, "Int8"),
(-128, None, 128, "Int16"),
(-129, None, 127, "Int16"),
(-129, None, 128, "Int16"),
(-32_768, None, 32_767, "Int16"),
(-32_768, None, 32_768, "Int32"),
(-32_769, None, 32_767, "Int32"),
(-32_769, None, 32_768, "Int32"),
(-2_147_483_648, None, 2_147_483_647, "Int32"),
(-2_147_483_648, None, 2_147_483_648, "Int64"),
(-2_147_483_649, None, 2_147_483_647, "Int64"),
(-2_147_483_649, None, 2_147_483_648, "Int64"),
],
)
def test_reduce_integer_series_nullable(min_, mid, max_, expected_dtype):
series = pd.Series([min_, mid, max_], dtype="Int64")
dtype = str(series.dtype)
expected = pd.Series([min_, mid, max_], dtype=expected_dtype)
actual = rd._reduce_integer_series(series, dtype=dtype)
assert_series_equal(actual, expected)
@pytest.mark.parametrize(
"min_,max_,expected_dtype",
[
(-65500.0, 65500.0, "float16"),
(-65500.0, 65600.0, "float32"),
(-65600.0, 65500.0, "float32"),
(-65600.0, 65600.0, "float32"),
(-3.4028e38, 3.4028e38, "float32"),
(-3.4028235e38, 3.4028335e38, "float64"),
(-3.4028335e38, 3.4028235e38, "float64"),
(-3.4028335e38, 3.4028335e38, "float64"),
],
)
def test_reduce_float_series(min_, max_, expected_dtype):
series = pd.Series([min_, max_], dtype="float64")
expected = pd.Series([min_, max_], dtype=expected_dtype)
actual = rd._reduce_float_series(series)
assert_series_equal(actual, expected)
def test_reduce_memory_usage(base_data, base_dataframe):
expected = pd.DataFrame.from_dict(data=base_data,).astype(
{
"int8": "int8",
"int16": "int16",
"Int8": "Int8",
"Str": "string",
}
)
actual = rd.reduce_memory_usage(base_dataframe, verbose=True)
assert_frame_equal(actual, expected)
| true | true |
f738e8f3c2ffddb15e62af6c07e36ec6aa5f61fb | 1,782 | py | Python | easy_dict/tests/test_04_mods.py | cahoy/NestedDictionary | 881f0ea8af36a60fcd1b9d7a84b1aec4cd7072b2 | [
"MIT"
] | 2 | 2021-02-13T03:58:59.000Z | 2021-09-15T01:02:12.000Z | easy_dict/tests/test_04_mods.py | cahoy/NestedDictionary | 881f0ea8af36a60fcd1b9d7a84b1aec4cd7072b2 | [
"MIT"
] | 1 | 2017-01-12T00:18:38.000Z | 2017-01-12T00:18:38.000Z | easy_dict/tests/test_04_mods.py | cahoy/NestedDictionary | 881f0ea8af36a60fcd1b9d7a84b1aec4cd7072b2 | [
"MIT"
] | 1 | 2022-02-16T18:28:52.000Z | 2022-02-16T18:28:52.000Z | from pytest import fixture, raises
import easy_dict as nd
@fixture()
def n():
return nd.NestedDict({'a': {'b': {'c': 123}}, 'd': {'e': 456}, 'f': {'e': 789}})
def test_mod_rooted_chain(n):
n['a']['b']['c'] = 234
assert n == {'a': {'b': {'c': 234}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_floating_chain(n):
n['b']['c'] = 345
assert n == {'a': {'b': {'c': 345}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_floating_single_key(n):
n['c'] = 456
assert n == {'a': {'b': {'c': 456}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_rooted_slash(n):
n['a/b/c'] = 567
assert n == {'a': {'b': {'c': 567}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_floating_slash(n):
n['b/c'] = 678
assert n == {'a': {'b': {'c': 678}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_floating_list(n):
n[['b', 'c']] = 789
assert n == {'a': {'b': {'c': 789}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_multiple_keys_multi_target(n):
n['e'] = 234
assert n == {'a': {'b': {'c': 123}}, 'd': {'e': 234}, 'f': {'e': 234}}
def test_mod_multiple_keys_single_target(n):
n['d/e'] = 234
assert n == {'a': {'b': {'c': 123}}, 'd': {'e': 234}, 'f': {'e': 789}}
def test_mod_one_out_of_many_keys():
k = nd.NestedDict({'a': {'b': {'c': {'e': 123}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}})
k['a/b/c/e'] = 234
assert k == {'a': {'b': {'c': {'e': 234}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}}
k['b/c/e'] = 345
assert k == {'a': {'b': {'c': {'e': 345}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}}
def test_mod_ambiguous_key():
k = nd.NestedDict({'a': {'b': {'c': {'e': 123}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}})
with raises(KeyError):
k['c/e'] = 456
| 27 | 95 | 0.442761 | from pytest import fixture, raises
import easy_dict as nd
@fixture()
def n():
return nd.NestedDict({'a': {'b': {'c': 123}}, 'd': {'e': 456}, 'f': {'e': 789}})
def test_mod_rooted_chain(n):
n['a']['b']['c'] = 234
assert n == {'a': {'b': {'c': 234}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_floating_chain(n):
n['b']['c'] = 345
assert n == {'a': {'b': {'c': 345}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_floating_single_key(n):
n['c'] = 456
assert n == {'a': {'b': {'c': 456}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_rooted_slash(n):
n['a/b/c'] = 567
assert n == {'a': {'b': {'c': 567}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_floating_slash(n):
n['b/c'] = 678
assert n == {'a': {'b': {'c': 678}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_with_floating_list(n):
n[['b', 'c']] = 789
assert n == {'a': {'b': {'c': 789}}, 'd': {'e': 456}, 'f': {'e': 789}}
def test_mod_multiple_keys_multi_target(n):
n['e'] = 234
assert n == {'a': {'b': {'c': 123}}, 'd': {'e': 234}, 'f': {'e': 234}}
def test_mod_multiple_keys_single_target(n):
n['d/e'] = 234
assert n == {'a': {'b': {'c': 123}}, 'd': {'e': 234}, 'f': {'e': 789}}
def test_mod_one_out_of_many_keys():
k = nd.NestedDict({'a': {'b': {'c': {'e': 123}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}})
k['a/b/c/e'] = 234
assert k == {'a': {'b': {'c': {'e': 234}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}}
k['b/c/e'] = 345
assert k == {'a': {'b': {'c': {'e': 345}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}}
def test_mod_ambiguous_key():
k = nd.NestedDict({'a': {'b': {'c': {'e': 123}}}, 'd': {'e': 123}, 'f': {'c': {'e': 123}}})
with raises(KeyError):
k['c/e'] = 456
| true | true |
f738e8fe93b26fc4b1d67c654e45efdee0d4fc59 | 1,114 | py | Python | app/service/auth/group_user.py | ryan4yin/flypy-backend | 7fcc2971ac27d3b44e352dfed73acd12e1913d65 | [
"MIT"
] | 6 | 2019-03-14T02:39:17.000Z | 2021-10-31T11:43:58.000Z | app/service/auth/group_user.py | ryan4yin/flypy-backend | 7fcc2971ac27d3b44e352dfed73acd12e1913d65 | [
"MIT"
] | null | null | null | app/service/auth/group_user.py | ryan4yin/flypy-backend | 7fcc2971ac27d3b44e352dfed73acd12e1913d65 | [
"MIT"
] | 2 | 2020-02-04T07:44:37.000Z | 2021-04-02T23:02:20.000Z | """
群成员权限验证
"""
from typing import Iterable, List
from app import db
from app.models import Group, GroupUser, MainUser, GroupUserRelation
from app.utils.db import get_group
def is_(role: List[str], main_user: MainUser, group_id, platform):
"""
该用户是否是指定群组的管理员
需要用户先绑定群组!
:param role: 群角色,可选 'admin' 或 'owner',可多选
:param main_user: 用户对象
:param group_id: 群组 id
:param platform: 群组所属平台
:return: boolean
"""
group = get_group(group_id, platform)
# 查询到 main_user 绑定的属于该群的账号
group_users_id: Iterable[GroupUser.id] = db.session.query(GroupUser.id) \
.filter_by(platform=platform, main_user_id=main_user.id)
group_user_relationship: Iterable[GroupUserRelation] = db.session.query(GroupUserRelation) \
.filter_by(platform=platform, group_db_id=group.id) \
.filter(GroupUserRelation.user_db_id.in_(group_users_id))
# 挨个检查各账号是否是 admin,是就返回 True
for r in group_user_relationship:
if 'admin' in role and r.is_admin:
return True
elif 'owner' in role and r.is_owner:
return True
return False
| 26.52381 | 96 | 0.691203 | from typing import Iterable, List
from app import db
from app.models import Group, GroupUser, MainUser, GroupUserRelation
from app.utils.db import get_group
def is_(role: List[str], main_user: MainUser, group_id, platform):
group = get_group(group_id, platform)
group_users_id: Iterable[GroupUser.id] = db.session.query(GroupUser.id) \
.filter_by(platform=platform, main_user_id=main_user.id)
group_user_relationship: Iterable[GroupUserRelation] = db.session.query(GroupUserRelation) \
.filter_by(platform=platform, group_db_id=group.id) \
.filter(GroupUserRelation.user_db_id.in_(group_users_id))
for r in group_user_relationship:
if 'admin' in role and r.is_admin:
return True
elif 'owner' in role and r.is_owner:
return True
return False
| true | true |
f738e9ae1be3a751cceefa326d7426a350d1e65f | 7,573 | py | Python | Pipelines/ADNI_AV1451/ADNI_V1_AV1451.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | 1 | 2015-08-11T17:39:26.000Z | 2015-08-11T17:39:26.000Z | Pipelines/ADNI_AV1451/ADNI_V1_AV1451.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | null | null | null | Pipelines/ADNI_AV1451/ADNI_V1_AV1451.py | sulantha2006/Processing_Pipeline | fb135560b7db79d811177cf02ee96e6081ce3364 | [
"Apache-2.0"
] | 2 | 2015-11-13T18:05:48.000Z | 2020-01-17T17:28:08.000Z | __author__ = 'sulantha'
from Utils.DbUtils import DbUtils
import Config.PipelineConfig as pc
from Pipelines.ADNI_T1.ADNI_T1_Helper import ADNI_T1_Helper
from Utils.PipelineLogger import PipelineLogger
import distutils.dir_util
import distutils.file_util
import shutil
import subprocess
from Manager.QSubJob import QSubJob
from Manager.QSubJobHanlder import QSubJobHandler
import socket,os
import ast
from Pipelines.Helpers.PETHelper import PETHelper
class ProcessingItemObj:
def __init__(self, processingItem):
self.processing_rid = processingItem[0]
self.study = processingItem[1]
self.subject_rid = processingItem[2]
self.modality = processingItem[3]
self.scan_date = processingItem[4].strftime("%Y-%m-%d")
self.scan_time = str(processingItem[5])
self.s_identifier = processingItem[6]
self.i_identifier = processingItem[7]
self.root_folder = processingItem[8]
self.converted_folder = processingItem[9]
self.version = processingItem[10]
self.table_id = processingItem[17]
self.parameters = processingItem[19]
self.manual_xfm = processingItem[20]
self.qc = processingItem[21]
class ADNI_V1_AV1451:
def __init__(self):
self.DBClient = DbUtils()
self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName)
self.PETHelper = PETHelper()
def process(self, processingItem):
processingItemObj = ProcessingItemObj(processingItem)
matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj)
if not matching_t1:
PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
return 0
processed = ADNI_T1_Helper().checkProcessed(matching_t1)
if not processed:
PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
return 0
else:
PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
if processingItemObj.manual_xfm == '':
manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
processingItemObj.manual_xfm = manualXFM
elif processingItemObj.manual_xfm == 'Req_man_reg':
coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
if coregDone:
manualXFM = coregDone
setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
self.DBClient.executeNoResult(setPPTableSQL)
else:
self.PETHelper.requestCoreg(processingItemObj, matching_t1)
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
else:
manualXFM = processingItemObj.manual_xfm
if manualXFM:
self.processPET(processingItemObj, processed)
else:
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
def getScanType(self, processingItemObj):
r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' "
"AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' "
"AND I_IDENTIFIER = '{4}'".format(processingItemObj.study,
processingItemObj.subject_rid,
processingItemObj.scan_date,
processingItemObj.s_identifier,
processingItemObj.i_identifier))
return r[0][0]
def processPET(self, processingItemObj, matchT1Path):
petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
processingItemObj.s_identifier, processingItemObj.i_identifier,
self.getScanType(processingItemObj))
processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
logDir = '{0}/logs'.format(processingItemObj.root_folder)
PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName))
try:
distutils.dir_util.mkpath(logDir)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
return 0
id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier)
paramStrd = ast.literal_eval(processingItemObj.parameters)
paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()])
paramStr = '({0})'.format(paramStrt)
petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_AV1451/ADNI_V1_AV1451_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500)
try:
processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder)
os.rename(processedFolder, processedFolder_del)
shutil.rmtree(processedFolder_del)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e))
try:
distutils.dir_util.mkpath(processedFolder)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e))
return 0
### This section is new for ADNI Pre processing - Per scanner type blurring. Only required if
### the images are aquired from different scanners and need to get to same PSF.
blur_x, blur_y, blur_z = self.PETHelper.getBlurringParams(processingItemObj)
### End pre processing.
PipelineLogger.log('manager', 'debug', 'Command : {0}'.format(petCMD))
p = subprocess.Popen(petCMD, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, executable='/bin/bash')
out, err = p.communicate()
PipelineLogger.log('manager', 'debug', 'Process Log Output : \n{0}'.format(out))
PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err))
QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av1451')
return 1
| 57.80916 | 279 | 0.628549 | __author__ = 'sulantha'
from Utils.DbUtils import DbUtils
import Config.PipelineConfig as pc
from Pipelines.ADNI_T1.ADNI_T1_Helper import ADNI_T1_Helper
from Utils.PipelineLogger import PipelineLogger
import distutils.dir_util
import distutils.file_util
import shutil
import subprocess
from Manager.QSubJob import QSubJob
from Manager.QSubJobHanlder import QSubJobHandler
import socket,os
import ast
from Pipelines.Helpers.PETHelper import PETHelper
class ProcessingItemObj:
def __init__(self, processingItem):
self.processing_rid = processingItem[0]
self.study = processingItem[1]
self.subject_rid = processingItem[2]
self.modality = processingItem[3]
self.scan_date = processingItem[4].strftime("%Y-%m-%d")
self.scan_time = str(processingItem[5])
self.s_identifier = processingItem[6]
self.i_identifier = processingItem[7]
self.root_folder = processingItem[8]
self.converted_folder = processingItem[9]
self.version = processingItem[10]
self.table_id = processingItem[17]
self.parameters = processingItem[19]
self.manual_xfm = processingItem[20]
self.qc = processingItem[21]
class ADNI_V1_AV1451:
def __init__(self):
self.DBClient = DbUtils()
self.MatchDBClient = DbUtils(database=pc.ADNI_dataMatchDBName)
self.PETHelper = PETHelper()
def process(self, processingItem):
processingItemObj = ProcessingItemObj(processingItem)
matching_t1 = ADNI_T1_Helper().getMatchingT1(processingItemObj)
if not matching_t1:
PipelineLogger.log('root', 'error', 'PET cannot be processed no matching T1 found. - {0} - {1} - {2}.'.format(processingItemObj.subject_rid, processingItemObj.modality, processingItemObj.scan_date))
return 0
processed = ADNI_T1_Helper().checkProcessed(matching_t1)
if not processed:
PipelineLogger.log('root', 'error', 'PET cannot be processed due to matching T1 not being processed - {0}'.format(matching_t1))
return 0
else:
PipelineLogger.log('root', 'INFO', '+++++++++ PET ready to be processed. Will check for xfm. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
if processingItemObj.manual_xfm == '':
manualXFM = self.PETHelper.getManualXFM(processingItemObj, matching_t1)
processingItemObj.manual_xfm = manualXFM
elif processingItemObj.manual_xfm == 'Req_man_reg':
coregDone = self.PETHelper.checkIfAlreadyDone(processingItemObj, matching_t1)
if coregDone:
manualXFM = coregDone
setPPTableSQL = "UPDATE {0}_{1}_Pipeline SET MANUAL_XFM = '{2}' WHERE RECORD_ID = {3}".format(processingItemObj.study, processingItemObj.modality, manualXFM, processingItemObj.table_id)
self.DBClient.executeNoResult(setPPTableSQL)
else:
self.PETHelper.requestCoreg(processingItemObj, matching_t1)
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
else:
manualXFM = processingItemObj.manual_xfm
if manualXFM:
self.processPET(processingItemObj, processed)
else:
PipelineLogger.log('root', 'INFO', 'Manual XFM was not found. Request to create one may have added. - {0} - {1}'.format(processingItemObj.subject_rid, processingItemObj.scan_date))
return 0
def getScanType(self, processingItemObj):
r = self.DBClient.executeAllResults("SELECT SCAN_TYPE FROM Conversion WHERE STUDY = '{0}' AND RID = '{1}' "
"AND SCAN_DATE = '{2}' AND S_IDENTIFIER = '{3}' "
"AND I_IDENTIFIER = '{4}'".format(processingItemObj.study,
processingItemObj.subject_rid,
processingItemObj.scan_date,
processingItemObj.s_identifier,
processingItemObj.i_identifier))
return r[0][0]
def processPET(self, processingItemObj, matchT1Path):
petFileName = '{0}/{1}_{2}{3}{4}{5}_{6}.mnc'.format(processingItemObj.converted_folder, processingItemObj.study,
processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''),
processingItemObj.s_identifier, processingItemObj.i_identifier,
self.getScanType(processingItemObj))
processedFolder = '{0}/processed'.format(processingItemObj.root_folder)
logDir = '{0}/logs'.format(processingItemObj.root_folder)
PipelineLogger.log('manager', 'info', 'PET processing starting for {0}'.format(petFileName))
try:
distutils.dir_util.mkpath(logDir)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating log folder \n {0}'.format(e))
return 0
id = '{0}{1}{2}{3}'.format(processingItemObj.subject_rid, processingItemObj.scan_date.replace('-', ''), processingItemObj.s_identifier, processingItemObj.i_identifier)
paramStrd = ast.literal_eval(processingItemObj.parameters)
paramStrt = ' '.join(['[\"{0}\"]=\"{1}\"'.format(k, v) for k,v in paramStrd.items()])
paramStr = '({0})'.format(paramStrt)
petCMD = "source /opt/minc-1.9.15/minc-toolkit-config.sh; Pipelines/ADNI_AV1451/ADNI_V1_AV1451_Process {0} {1} {2} {3} {4} {5} '{6}' {7} {8}".format(id, petFileName, processedFolder, matchT1Path, processingItemObj.manual_xfm, logDir, paramStr,socket.gethostname(), 50500)
try:
processedFolder_del = '{0}/processed_del'.format(processingItemObj.root_folder)
os.rename(processedFolder, processedFolder_del)
shutil.rmtree(processedFolder_del)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in deleting old processing folder. \n {0}'.format(e))
try:
distutils.dir_util.mkpath(processedFolder)
except Exception as e:
PipelineLogger.log('manager', 'error', 'Error in creating processing folder. \n {0}'.format(e))
return 0
bug', 'Process Log Output : \n{0}'.format(out))
PipelineLogger.log('manager', 'debug', 'Process Log Err : \n{0}'.format(err))
QSubJobHandler.submittedJobs[id] = QSubJob(id, '02:00:00', processingItemObj, 'av1451')
return 1
| true | true |
f738ea5fe3f4affcffd0189bbf82496b2931a05e | 1,893 | py | Python | source/force.py | swyang50066/sun-jupiter-earth-orbit | c50012ff1a187485b717d86a24c25cfe6edd78a1 | [
"MIT"
] | null | null | null | source/force.py | swyang50066/sun-jupiter-earth-orbit | c50012ff1a187485b717d86a24c25cfe6edd78a1 | [
"MIT"
] | null | null | null | source/force.py | swyang50066/sun-jupiter-earth-orbit | c50012ff1a187485b717d86a24c25cfe6edd78a1 | [
"MIT"
] | null | null | null | import numpy as np
from allvar import *
def _distance(r1, r2):
"""Return Euclidean _distance between positions"""
return np.sqrt(np.sum((r1 - r2)**2.))
def drdt(r, v):
"""Return position derivative
:param r: shape: (x_earth, y_earth, x_jupiter, y_jupiter))
:param v: shape: (vx_earth, vy_earth, vx_jupiter, vy_jupiter)
:return: velocities
"""
return v
def dvdt(r, v, eps=1.e-20):
"""Return position derivative
Central star have fixed position at (0, 0)
:param r: shape: (x_earth, y_earth, x_jupiter, y_jupiter)
:param v: shape: (vx_earth, vy_earth, vx_jupiter, vy_jupiter)
:return: accelerations
"""
# Geometric measurements
r_se, r_sj, r_ej = r[:2], r[2:], r[2:] - r[:2]
dist_se = _distance((0, 0), r_se)
dist_sj = _distance((0, 0), r_sj)
dist_ej = _distance(r_se, r_sj)
theta_se = np.math.atan(np.abs(r_se[1])/(np.abs(r_se[0]) + eps))
theta_sj = np.math.atan(np.abs(r_sj[1])/(np.abs(r_sj[0]) + eps))
theta_ej = np.math.atan(np.abs(r_ej[1])/(np.abs(r_ej[0]) + eps))
# Unit force functionals
const_se = GG*(EARTH_MASS/SOLAR_MASS)
f_se = -np.sign(r_se)*const_se*np.array(
[
np.cos(theta_se)/(dist_se + eps)**2.,
np.sin(theta_se)/(dist_se + eps)**2.
]
)
const_sj = GG*(JUPITER_MASS/SOLAR_MASS)
f_sj = -np.sign(r_sj)*const_sj*np.array(
[
np.cos(theta_sj)/(dist_sj + eps)**2.,
np.sin(theta_sj)/(dist_sj + eps)**2.
]
)
const_ej = GG*(EARTH_MASS*JUPITER_MASS/SOLAR_MASS**2.)
f_ej = -np.sign(r_ej)*const_ej*np.array(
[
np.cos(theta_ej)/(dist_ej + eps)**2.,
np.sin(theta_ej)/(dist_ej + eps)**2.
]
)
return np.hstack([
(f_se - f_ej)/(EARTH_MASS/SOLAR_MASS),
(f_sj + f_ej)/(JUPITER_MASS/SOLAR_MASS),
])
| 28.253731 | 68 | 0.578975 | import numpy as np
from allvar import *
def _distance(r1, r2):
return np.sqrt(np.sum((r1 - r2)**2.))
def drdt(r, v):
return v
def dvdt(r, v, eps=1.e-20):
r_se, r_sj, r_ej = r[:2], r[2:], r[2:] - r[:2]
dist_se = _distance((0, 0), r_se)
dist_sj = _distance((0, 0), r_sj)
dist_ej = _distance(r_se, r_sj)
theta_se = np.math.atan(np.abs(r_se[1])/(np.abs(r_se[0]) + eps))
theta_sj = np.math.atan(np.abs(r_sj[1])/(np.abs(r_sj[0]) + eps))
theta_ej = np.math.atan(np.abs(r_ej[1])/(np.abs(r_ej[0]) + eps))
const_se = GG*(EARTH_MASS/SOLAR_MASS)
f_se = -np.sign(r_se)*const_se*np.array(
[
np.cos(theta_se)/(dist_se + eps)**2.,
np.sin(theta_se)/(dist_se + eps)**2.
]
)
const_sj = GG*(JUPITER_MASS/SOLAR_MASS)
f_sj = -np.sign(r_sj)*const_sj*np.array(
[
np.cos(theta_sj)/(dist_sj + eps)**2.,
np.sin(theta_sj)/(dist_sj + eps)**2.
]
)
const_ej = GG*(EARTH_MASS*JUPITER_MASS/SOLAR_MASS**2.)
f_ej = -np.sign(r_ej)*const_ej*np.array(
[
np.cos(theta_ej)/(dist_ej + eps)**2.,
np.sin(theta_ej)/(dist_ej + eps)**2.
]
)
return np.hstack([
(f_se - f_ej)/(EARTH_MASS/SOLAR_MASS),
(f_sj + f_ej)/(JUPITER_MASS/SOLAR_MASS),
])
| true | true |
f738eae3572e7241435ab4c22191359b1e9a5923 | 4,073 | py | Python | tests/test_end2end.py | haojiepan1/CrossWOZ | 6d7b4c4cfb73a528b76074764687906abecc90b6 | [
"Apache-2.0"
] | 1 | 2020-03-09T02:09:10.000Z | 2020-03-09T02:09:10.000Z | tests/test_end2end.py | haojiepan1/CrossWOZ | 6d7b4c4cfb73a528b76074764687906abecc90b6 | [
"Apache-2.0"
] | null | null | null | tests/test_end2end.py | haojiepan1/CrossWOZ | 6d7b4c4cfb73a528b76074764687906abecc90b6 | [
"Apache-2.0"
] | null | null | null | from convlab2.nlu.svm.multiwoz import SVMNLU
from convlab2.nlu.jointBERT.multiwoz import BERTNLU
from convlab2.nlu.milu.multiwoz import MILU
from convlab2.dst.rule.multiwoz import RuleDST
from convlab2.policy.rule.multiwoz import RulePolicy
from convlab2.nlg.template.multiwoz import TemplateNLG
from convlab2.dialog_agent import PipelineAgent, BiSession
from convlab2.evaluator.multiwoz_eval import MultiWozEvaluator
from pprint import pprint
import random
import numpy as np
import torch
sys_nlu = BERTNLU(mode='all', config_file='multiwoz_all.json',
model_file='https://tatk-data.s3-ap-northeast-1.amazonaws.com/bert_multiwoz_all.zip')
# sys_nlu = SVMNLU(mode='sys')
# simple rule DST
sys_dst = RuleDST()
# rule policy
sys_policy = RulePolicy(character='sys')
# template NLG
sys_nlg = TemplateNLG(is_user=False)
# assemble
sys_agent = PipelineAgent(sys_nlu, sys_dst, sys_policy, sys_nlg, 'sys')
# user_nlu = sys_nlu
# user_nlu = SVMNLU(mode='all')
user_nlu = MILU(model_file="https://convlab.blob.core.windows.net/models/milu.tar.gz")
# not use dst
user_dst = None
# rule policy
user_policy = RulePolicy(character='usr')
# template NLG
user_nlg = TemplateNLG(is_user=True)
# assemble
user_agent = PipelineAgent(user_nlu, None, user_policy, user_nlg, 'user')
evaluator = MultiWozEvaluator()
sess = BiSession(sys_agent=sys_agent, user_agent=user_agent, kb_query=None, evaluator=evaluator)
random.seed(20200131)
np.random.seed(20190827)
torch.manual_seed(20200131)
sys_response = ''
sess.init_session()
print('init goal:')
pprint(sess.evaluator.goal)
print('-'*50)
for i in range(40):
sys_response, user_response, session_over, reward = sess.next_turn(sys_response)
print('user:', user_response)
print('sys:', sys_response)
print()
if session_over is True:
print('task complete:', user_policy.policy.goal.task_complete())
print('task success:', sess.evaluator.task_success())
print('book rate:', sess.evaluator.book_rate())
print('inform precision/recall/f1:', sess.evaluator.inform_F1())
print('-'*50)
print('final goal:')
pprint(sess.evaluator.goal)
print('='*100)
break
total_dialog = 10
random.seed(20200131)
goal_seeds = [random.randint(1,100000) for _ in range(total_dialog)]
precision = 0
recall = 0
f1 = 0
suc_num = 0
complete_num = 0
for j in range(total_dialog):
sys_response = ''
random.seed(goal_seeds[0])
np.random.seed(goal_seeds[0])
torch.manual_seed(goal_seeds[0])
goal_seeds.pop(0)
sess.init_session()
# print('init goal:')
# pprint(sess.evaluator.goal)
# print('-'*50)
for i in range(40):
sys_response, user_response, session_over, reward = sess.next_turn(
sys_response)
# print('user:', user_response)
# print('sys:', sys_response)
if session_over is True:
if sess.evaluator.task_success() == 1:
suc_num = suc_num+1
if user_policy.policy.goal.task_complete():
complete_num += 1
print('task complete:', user_policy.policy.goal.task_complete())
print('task success:', sess.evaluator.task_success())
print('book rate:', sess.evaluator.book_rate())
print('inform precision/recall/f1:', sess.evaluator.inform_F1())
stats = sess.evaluator.inform_F1()
if(stats[0] != None):
precision = precision+stats[0]
if(stats[1] != None):
recall = recall+stats[1]
if(stats[2] != None):
f1 = f1+stats[2]
else:
suc_num = suc_num-1
# print('-'*50)
# print('final goal:')
# pprint(sess.evaluator.goal)
# print('='*100)
break
print("complete number of dialogs/tot:", complete_num/total_dialog)
print("success number of dialogs/tot:", suc_num/total_dialog)
print("average precision:", precision/total_dialog)
print("average recall:", recall/total_dialog)
print("average f1:", f1/total_dialog) | 35.417391 | 103 | 0.674687 | from convlab2.nlu.svm.multiwoz import SVMNLU
from convlab2.nlu.jointBERT.multiwoz import BERTNLU
from convlab2.nlu.milu.multiwoz import MILU
from convlab2.dst.rule.multiwoz import RuleDST
from convlab2.policy.rule.multiwoz import RulePolicy
from convlab2.nlg.template.multiwoz import TemplateNLG
from convlab2.dialog_agent import PipelineAgent, BiSession
from convlab2.evaluator.multiwoz_eval import MultiWozEvaluator
from pprint import pprint
import random
import numpy as np
import torch
sys_nlu = BERTNLU(mode='all', config_file='multiwoz_all.json',
model_file='https://tatk-data.s3-ap-northeast-1.amazonaws.com/bert_multiwoz_all.zip')
sys_dst = RuleDST()
sys_policy = RulePolicy(character='sys')
sys_nlg = TemplateNLG(is_user=False)
sys_agent = PipelineAgent(sys_nlu, sys_dst, sys_policy, sys_nlg, 'sys')
user_nlu = MILU(model_file="https://convlab.blob.core.windows.net/models/milu.tar.gz")
user_dst = None
user_policy = RulePolicy(character='usr')
user_nlg = TemplateNLG(is_user=True)
user_agent = PipelineAgent(user_nlu, None, user_policy, user_nlg, 'user')
evaluator = MultiWozEvaluator()
sess = BiSession(sys_agent=sys_agent, user_agent=user_agent, kb_query=None, evaluator=evaluator)
random.seed(20200131)
np.random.seed(20190827)
torch.manual_seed(20200131)
sys_response = ''
sess.init_session()
print('init goal:')
pprint(sess.evaluator.goal)
print('-'*50)
for i in range(40):
sys_response, user_response, session_over, reward = sess.next_turn(sys_response)
print('user:', user_response)
print('sys:', sys_response)
print()
if session_over is True:
print('task complete:', user_policy.policy.goal.task_complete())
print('task success:', sess.evaluator.task_success())
print('book rate:', sess.evaluator.book_rate())
print('inform precision/recall/f1:', sess.evaluator.inform_F1())
print('-'*50)
print('final goal:')
pprint(sess.evaluator.goal)
print('='*100)
break
total_dialog = 10
random.seed(20200131)
goal_seeds = [random.randint(1,100000) for _ in range(total_dialog)]
precision = 0
recall = 0
f1 = 0
suc_num = 0
complete_num = 0
for j in range(total_dialog):
sys_response = ''
random.seed(goal_seeds[0])
np.random.seed(goal_seeds[0])
torch.manual_seed(goal_seeds[0])
goal_seeds.pop(0)
sess.init_session()
for i in range(40):
sys_response, user_response, session_over, reward = sess.next_turn(
sys_response)
if session_over is True:
if sess.evaluator.task_success() == 1:
suc_num = suc_num+1
if user_policy.policy.goal.task_complete():
complete_num += 1
print('task complete:', user_policy.policy.goal.task_complete())
print('task success:', sess.evaluator.task_success())
print('book rate:', sess.evaluator.book_rate())
print('inform precision/recall/f1:', sess.evaluator.inform_F1())
stats = sess.evaluator.inform_F1()
if(stats[0] != None):
precision = precision+stats[0]
if(stats[1] != None):
recall = recall+stats[1]
if(stats[2] != None):
f1 = f1+stats[2]
else:
suc_num = suc_num-1
break
print("complete number of dialogs/tot:", complete_num/total_dialog)
print("success number of dialogs/tot:", suc_num/total_dialog)
print("average precision:", precision/total_dialog)
print("average recall:", recall/total_dialog)
print("average f1:", f1/total_dialog) | true | true |
f738eb6ca204ada7a97361995987942b2201b0e0 | 5,821 | py | Python | legacy/brats/original_unet_train.py | zjdcts/CSAM-U-Net | 91fae3c6b4fc7247ba9ee2dc6e64b51da569bf2e | [
"MIT"
] | 1 | 2021-07-28T03:36:34.000Z | 2021-07-28T03:36:34.000Z | legacy/brats/original_unet_train.py | zjdcts/CSAM-U-Net | 91fae3c6b4fc7247ba9ee2dc6e64b51da569bf2e | [
"MIT"
] | null | null | null | legacy/brats/original_unet_train.py | zjdcts/CSAM-U-Net | 91fae3c6b4fc7247ba9ee2dc6e64b51da569bf2e | [
"MIT"
] | 1 | 2021-07-28T03:36:37.000Z | 2021-07-28T03:36:37.000Z | import os
import glob
from legacy.unet3dlegacy.data import write_data_to_file, open_data_file
from legacy.unet3dlegacy.generator import get_training_and_validation_generators
from legacy.unet3dlegacy.model import unet_model_3d
from legacy.unet3dlegacy.training import load_old_model, train_model
config = dict()
config["pool_size"] = (2, 2, 2) # pool size for the max pooling operations
config["image_shape"] = (144, 144, 144) # This determines what shape the images will be cropped/resampled to.
config["patch_shape"] = (64, 64, 64) # switch to None to train on the whole image
config["labels"] = (1, 2, 4) # the label numbers on the input image
config["n_labels"] = len(config["labels"])
config["all_modalities"] = ["t1", "t1ce", "flair", "t2"]
config["training_modalities"] = config["all_modalities"] # change this if you want to only use some of the modalities
config["nb_channels"] = len(config["training_modalities"])
if "patch_shape" in config and config["patch_shape"] is not None:
config["input_shape"] = tuple([config["nb_channels"]] + list(config["patch_shape"]))
else:
config["input_shape"] = tuple([config["nb_channels"]] + list(config["image_shape"]))
config["truth_channel"] = config["nb_channels"]
config["deconvolution"] = True # if False, will use upsampling instead of deconvolution
config["batch_size"] = 6
config["validation_batch_size"] = 12
config["n_epochs"] = 500 # cutoff the training after this many epochs
config["patience"] = 10 # learning rate will be reduced after this many epochs if the validation loss is not improving
config["early_stop"] = 50 # training will be stopped after this many epochs without the validation loss improving
config["initial_learning_rate"] = 0.00001
config["learning_rate_drop"] = 0.5 # factor by which the learning rate will be reduced
config["validation_split"] = 0.8 # portion of the data that will be used for training
config["flip"] = False # augments the data by randomly flipping an axis during
config["permute"] = False # data shape must be a cube. Augments the data by permuting in various directions
config["distort"] = None # switch to None if you want no distortion
config["augment"] = config["flip"] or config["distort"]
config["validation_patch_overlap"] = 0 # if > 0, during training, validation patches will be overlapping
config["training_patch_start_offset"] = (16, 16, 16) # randomly offset the first patch index by up to this offset
config["skip_blank"] = True # if True, then patches without any target will be skipped
config["data_file"] = os.path.abspath("brats_data.h5")
config["model_file"] = os.path.abspath("tumor_segmentation_model.h5")
config["training_file"] = os.path.abspath("training_ids.pkl")
config["validation_file"] = os.path.abspath("validation_ids.pkl")
config["overwrite"] = False # If True, will previous files. If False, will use previously written files.
def fetch_training_data_files():
training_data_files = list()
for subject_dir in glob.glob(os.path.join(os.path.dirname(__file__), "data", "preprocessed", "*", "*")):
subject_files = list()
for modality in config["training_modalities"] + ["truth"]:
subject_files.append(os.path.join(subject_dir, modality + ".nii.gz"))
training_data_files.append(tuple(subject_files))
return training_data_files
def main(overwrite=False):
# convert input images into an hdf5 file
if overwrite or not os.path.exists(config["data_file"]):
training_files = fetch_training_data_files()
write_data_to_file(training_files, config["data_file"], image_shape=config["image_shape"])
data_file_opened = open_data_file(config["data_file"])
if not overwrite and os.path.exists(config["model_file"]):
model = load_old_model(config["model_file"])
else:
# instantiate new model
model = unet_model_3d(input_shape=config["input_shape"],
pool_size=config["pool_size"],
n_labels=config["n_labels"],
initial_learning_rate=config["initial_learning_rate"],
deconvolution=config["deconvolution"])
# get training and testing generators
train_generator, validation_generator, n_train_steps, n_validation_steps = get_training_and_validation_generators(
data_file_opened,
batch_size=config["batch_size"],
data_split=config["validation_split"],
overwrite=overwrite,
validation_keys_file=config["validation_file"],
training_keys_file=config["training_file"],
n_labels=config["n_labels"],
labels=config["labels"],
patch_shape=config["patch_shape"],
validation_batch_size=config["validation_batch_size"],
validation_patch_overlap=config["validation_patch_overlap"],
training_patch_start_offset=config["training_patch_start_offset"],
permute=config["permute"],
augment=config["augment"],
skip_blank=config["skip_blank"],
augment_flip=config["flip"],
augment_distortion_factor=config["distort"])
# run training
train_model(model=model,
model_file=config["model_file"],
training_generator=train_generator,
validation_generator=validation_generator,
steps_per_epoch=n_train_steps,
validation_steps=n_validation_steps,
initial_learning_rate=config["initial_learning_rate"],
learning_rate_drop=config["learning_rate_drop"],
learning_rate_patience=config["patience"],
early_stopping_patience=config["early_stop"],
n_epochs=config["n_epochs"])
data_file_opened.close()
if __name__ == "__main__":
main(overwrite=config["overwrite"])
| 51.061404 | 119 | 0.706923 | import os
import glob
from legacy.unet3dlegacy.data import write_data_to_file, open_data_file
from legacy.unet3dlegacy.generator import get_training_and_validation_generators
from legacy.unet3dlegacy.model import unet_model_3d
from legacy.unet3dlegacy.training import load_old_model, train_model
config = dict()
config["pool_size"] = (2, 2, 2)
config["image_shape"] = (144, 144, 144)
config["patch_shape"] = (64, 64, 64)
config["labels"] = (1, 2, 4)
config["n_labels"] = len(config["labels"])
config["all_modalities"] = ["t1", "t1ce", "flair", "t2"]
config["training_modalities"] = config["all_modalities"]
config["nb_channels"] = len(config["training_modalities"])
if "patch_shape" in config and config["patch_shape"] is not None:
config["input_shape"] = tuple([config["nb_channels"]] + list(config["patch_shape"]))
else:
config["input_shape"] = tuple([config["nb_channels"]] + list(config["image_shape"]))
config["truth_channel"] = config["nb_channels"]
config["deconvolution"] = True
config["batch_size"] = 6
config["validation_batch_size"] = 12
config["n_epochs"] = 500
config["patience"] = 10
config["early_stop"] = 50
config["initial_learning_rate"] = 0.00001
config["learning_rate_drop"] = 0.5
config["validation_split"] = 0.8
config["flip"] = False
config["permute"] = False
config["distort"] = None
config["augment"] = config["flip"] or config["distort"]
config["validation_patch_overlap"] = 0
config["training_patch_start_offset"] = (16, 16, 16)
config["skip_blank"] = True
config["data_file"] = os.path.abspath("brats_data.h5")
config["model_file"] = os.path.abspath("tumor_segmentation_model.h5")
config["training_file"] = os.path.abspath("training_ids.pkl")
config["validation_file"] = os.path.abspath("validation_ids.pkl")
config["overwrite"] = False
def fetch_training_data_files():
training_data_files = list()
for subject_dir in glob.glob(os.path.join(os.path.dirname(__file__), "data", "preprocessed", "*", "*")):
subject_files = list()
for modality in config["training_modalities"] + ["truth"]:
subject_files.append(os.path.join(subject_dir, modality + ".nii.gz"))
training_data_files.append(tuple(subject_files))
return training_data_files
def main(overwrite=False):
if overwrite or not os.path.exists(config["data_file"]):
training_files = fetch_training_data_files()
write_data_to_file(training_files, config["data_file"], image_shape=config["image_shape"])
data_file_opened = open_data_file(config["data_file"])
if not overwrite and os.path.exists(config["model_file"]):
model = load_old_model(config["model_file"])
else:
model = unet_model_3d(input_shape=config["input_shape"],
pool_size=config["pool_size"],
n_labels=config["n_labels"],
initial_learning_rate=config["initial_learning_rate"],
deconvolution=config["deconvolution"])
train_generator, validation_generator, n_train_steps, n_validation_steps = get_training_and_validation_generators(
data_file_opened,
batch_size=config["batch_size"],
data_split=config["validation_split"],
overwrite=overwrite,
validation_keys_file=config["validation_file"],
training_keys_file=config["training_file"],
n_labels=config["n_labels"],
labels=config["labels"],
patch_shape=config["patch_shape"],
validation_batch_size=config["validation_batch_size"],
validation_patch_overlap=config["validation_patch_overlap"],
training_patch_start_offset=config["training_patch_start_offset"],
permute=config["permute"],
augment=config["augment"],
skip_blank=config["skip_blank"],
augment_flip=config["flip"],
augment_distortion_factor=config["distort"])
train_model(model=model,
model_file=config["model_file"],
training_generator=train_generator,
validation_generator=validation_generator,
steps_per_epoch=n_train_steps,
validation_steps=n_validation_steps,
initial_learning_rate=config["initial_learning_rate"],
learning_rate_drop=config["learning_rate_drop"],
learning_rate_patience=config["patience"],
early_stopping_patience=config["early_stop"],
n_epochs=config["n_epochs"])
data_file_opened.close()
if __name__ == "__main__":
main(overwrite=config["overwrite"])
| true | true |
f738ec615bbc59a200166241a693db04f8899803 | 1,074 | py | Python | bin/test-object-detector.py | ririko-nakamura/HOG-SVM-python | 5baa2d715b7c7a69a33328ef01f67d0af3c0b682 | [
"MIT"
] | 44 | 2018-12-07T09:44:36.000Z | 2022-03-08T02:43:13.000Z | bin/test-object-detector.py | deng88598424/HOG-SVM-python | 5baa2d715b7c7a69a33328ef01f67d0af3c0b682 | [
"MIT"
] | 5 | 2019-06-04T13:26:29.000Z | 2021-12-28T15:27:06.000Z | bin/test-object-detector.py | deng88598424/HOG-SVM-python | 5baa2d715b7c7a69a33328ef01f67d0af3c0b682 | [
"MIT"
] | 29 | 2019-01-17T07:36:35.000Z | 2022-03-22T12:59:23.000Z | #!/usr/bin/python
import os
# Link to the UIUC Car Database
# http://l2r.cs.uiuc.edu/~cogcomp/Data/Car/CarData.tar.gz
# dataset_url = "http://l2r.cs.uiuc.edu/~cogcomp/Data/Car/CarData.tar.gz"
# dataset_path = "../data/dataset/CarData.tar.gz"
# Fetch and extract the dataset
# if not os.path.exists(dataset_path):
# os.system("wget {} -O {}".format(dataset_url, dataset_path))
# os.system("tar -xvzf {} -C {}".format(dataset_path, os.path.split(dataset_path)[0]))
# Extract the features
# pos_path = "../data/dataset/CarData/pos"
# neg_path = "../data/dataset/CarData/neg"
# os.system("python ../object-detector/extract-features.py -p {} -n {}".format(pos_path, neg_path))
# Perform training
# pos_feat_path = "../data/features/pos"
# neg_feat_path = "../data/features/neg"
# os.system("python ../object-detector/train-classifier.py -p {} -n {}".format(pos_feat_path, neg_feat_path))
# Perform testing
test_im_path = "../data/dataset/CarData/TestImages/test-18.pgm"
os.system("python ../object-detector/test-classifier.py -i {} -d {}".format(test_im_path,2))
| 39.777778 | 109 | 0.699255 |
import os
test_im_path = "../data/dataset/CarData/TestImages/test-18.pgm"
os.system("python ../object-detector/test-classifier.py -i {} -d {}".format(test_im_path,2))
| true | true |
f738ecb6bc186b66228336eb1ee27467d2b2701a | 1,241 | py | Python | crudProj/crudProj/urls.py | Dishant15/simform-crud | 93a02898f6dc5e8fa509f324a1e6d4137d7d26ad | [
"Apache-2.0"
] | null | null | null | crudProj/crudProj/urls.py | Dishant15/simform-crud | 93a02898f6dc5e8fa509f324a1e6d4137d7d26ad | [
"Apache-2.0"
] | null | null | null | crudProj/crudProj/urls.py | Dishant15/simform-crud | 93a02898f6dc5e8fa509f324a1e6d4137d7d26ad | [
"Apache-2.0"
] | null | null | null | """crudProj URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import url, include
from crudApp import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^register/$', views.register, name='register'),
url(r'^user_login/$', views.user_login, name='user_login'),
url(r'^logout/$', views.user_logout, name='logout'),
url(r'^', include('crudApp.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| 35.457143 | 77 | 0.701048 | from django.contrib import admin
from django.urls import path
from django.conf.urls import url, include
from crudApp import views
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
url(r'^register/$', views.register, name='register'),
url(r'^user_login/$', views.user_login, name='user_login'),
url(r'^logout/$', views.user_logout, name='logout'),
url(r'^', include('crudApp.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
| true | true |
f738ed20226c816f2abe1cbbc4bdbd9d9c9c81d1 | 2,904 | py | Python | tests/on_policy_models/test_on_policy_model.py | HenryJia/lightning-baselines3 | 10d1a0eed6136978204323250e37d49915a12e14 | [
"MIT"
] | 3 | 2021-01-18T23:27:38.000Z | 2021-10-04T12:07:16.000Z | tests/on_policy_models/test_on_policy_model.py | HenryJia/lightning-baselines3 | 10d1a0eed6136978204323250e37d49915a12e14 | [
"MIT"
] | 8 | 2021-01-21T03:29:29.000Z | 2021-07-25T18:45:39.000Z | tests/on_policy_models/test_on_policy_model.py | HenryJia/lightning-baselines3 | 10d1a0eed6136978204323250e37d49915a12e14 | [
"MIT"
] | null | null | null | from collections import OrderedDict
import pytest
import gym
from gym import spaces
import torch
from torch import nn
import torch.nn.functional as F
from torch import distributions
import pytorch_lightning as pl
from lightning_baselines3.on_policy_models.on_policy_model import OnPolicyModel
class DummyModel(OnPolicyModel):
def __init__(self, *args, **kwargs):
super(DummyModel, self).__init__(*args, **kwargs)
if isinstance(self.action_space, spaces.Discrete):
self.p = nn.Parameter(torch.ones(1, self.action_space.n) * 0.5)
elif isinstance(self.action_space, spaces.Box):
self.p = nn.Parameter(torch.ones(1, self.action_space.shape[0] * 2) * 0.5)
else:
raise Exception('Incompatible environment action space')
def forward(self, x, **kwargs):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if isinstance(self.action_space, spaces.Discrete):
dist = distributions.Categorical(probs=F.softmax(p, dim=1))
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
dist = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2)
return dist, torch.ones_like(x)[:, :1]
def predict(self, x, deterministic=True):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if deterministic:
if isinstance(self.action_space, spaces.Discrete):
out = torch.max(p, dim=1)[1]
elif isinstance(self.action_space, spaces.Box):
out = torch.chunk(p, 2, dim=1)[0]
else:
if isinstance(self.action_space, spaces.Discrete):
out = distributions.Categorical(probs=F.softmax(p, dim=1)).sample()
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
out = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2).sample()
return out.cpu().numpy()
def training_step(self, x, batch_idx):
loss = self(x.observations)[0].entropy().mean()
self.log('loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
return optimizer
@pytest.mark.parametrize("env_id", ["CartPole-v1", "MountainCar-v0", "MountainCarContinuous-v0"])
def test_on_policy_model(env_id):
"""
Check that environmnent integrated in Gym pass the test.
:param env_id: (str)
"""
model = DummyModel(
env_id,
eval_env=env_id,
buffer_length=512,
num_rollouts=1,
batch_size=32,
epochs_per_rollout=10,
num_eval_episodes=10,
gamma=0.9,
gae_lambda=0.95,
use_sde=False,
sde_sample_freq=-1,
verbose=1,
seed=1234)
trainer = pl.Trainer(max_epochs=2, terminate_on_nan=True)
trainer.fit(model)
| 30.893617 | 97 | 0.625 | from collections import OrderedDict
import pytest
import gym
from gym import spaces
import torch
from torch import nn
import torch.nn.functional as F
from torch import distributions
import pytorch_lightning as pl
from lightning_baselines3.on_policy_models.on_policy_model import OnPolicyModel
class DummyModel(OnPolicyModel):
def __init__(self, *args, **kwargs):
super(DummyModel, self).__init__(*args, **kwargs)
if isinstance(self.action_space, spaces.Discrete):
self.p = nn.Parameter(torch.ones(1, self.action_space.n) * 0.5)
elif isinstance(self.action_space, spaces.Box):
self.p = nn.Parameter(torch.ones(1, self.action_space.shape[0] * 2) * 0.5)
else:
raise Exception('Incompatible environment action space')
def forward(self, x, **kwargs):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if isinstance(self.action_space, spaces.Discrete):
dist = distributions.Categorical(probs=F.softmax(p, dim=1))
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
dist = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2)
return dist, torch.ones_like(x)[:, :1]
def predict(self, x, deterministic=True):
p = self.p.expand(x.shape[0], self.p.shape[-1])
if deterministic:
if isinstance(self.action_space, spaces.Discrete):
out = torch.max(p, dim=1)[1]
elif isinstance(self.action_space, spaces.Box):
out = torch.chunk(p, 2, dim=1)[0]
else:
if isinstance(self.action_space, spaces.Discrete):
out = distributions.Categorical(probs=F.softmax(p, dim=1)).sample()
elif isinstance(self.action_space, spaces.Box):
p = torch.chunk(p, 2, dim=1)
out = distributions.Normal(loc=p[0], scale=1 + p[1] ** 2).sample()
return out.cpu().numpy()
def training_step(self, x, batch_idx):
loss = self(x.observations)[0].entropy().mean()
self.log('loss', loss)
return loss
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=1e-3)
return optimizer
@pytest.mark.parametrize("env_id", ["CartPole-v1", "MountainCar-v0", "MountainCarContinuous-v0"])
def test_on_policy_model(env_id):
model = DummyModel(
env_id,
eval_env=env_id,
buffer_length=512,
num_rollouts=1,
batch_size=32,
epochs_per_rollout=10,
num_eval_episodes=10,
gamma=0.9,
gae_lambda=0.95,
use_sde=False,
sde_sample_freq=-1,
verbose=1,
seed=1234)
trainer = pl.Trainer(max_epochs=2, terminate_on_nan=True)
trainer.fit(model)
| true | true |
f738ee018a1e60522d6b78e953990ae94a7f8ee5 | 13,942 | py | Python | vsts/vsts/vss_client.py | mariusvniekerk/vsts-python-api | db69b3bdb08d926ff64239c3d651741a2ae4ea87 | [
"MIT"
] | null | null | null | vsts/vsts/vss_client.py | mariusvniekerk/vsts-python-api | db69b3bdb08d926ff64239c3d651741a2ae4ea87 | [
"MIT"
] | null | null | null | vsts/vsts/vss_client.py | mariusvniekerk/vsts-python-api | db69b3bdb08d926ff64239c3d651741a2ae4ea87 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import logging
import os
import re
import uuid
from msrest import Deserializer, Serializer
from msrest.exceptions import DeserializationError, SerializationError
from msrest.pipeline import ClientRequest
from msrest.service_client import ServiceClient
from .exceptions import VstsAuthenticationError, VstsClientRequestError, VstsServiceError
from .vss_client_configuration import VssClientConfiguration
from . import models
from ._file_cache import OPTIONS_CACHE as OPTIONS_FILE_CACHE
logger = logging.getLogger(__name__)
class VssClient(object):
"""VssClient.
:param str base_url: Service URL
:param Authentication creds: Authenticated credentials.
"""
def __init__(self, base_url=None, creds=None):
self.config = VssClientConfiguration(base_url)
self._client = ServiceClient(creds, self.config)
_base_client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._base_deserialize = Deserializer(_base_client_models)
self._base_serialize = Serializer(_base_client_models)
self._all_host_types_locations = None
self._locations = None
self._suppress_fedauth_redirect = True
self.normalized_url = VssClient._normalize_url(base_url)
def add_user_agent(self, user_agent):
if user_agent is not None:
self.config.add_user_agent(user_agent)
def _send_request(self, request, headers=None, content=None, **operation_config):
"""Prepare and send request object according to configuration.
:param ClientRequest request: The request object to be sent.
:param dict headers: Any headers to add to the request.
:param content: Any body data to add to the request.
:param config: Any specific config overrides
"""
if TRACE_ENV_VAR in os.environ and os.environ[TRACE_ENV_VAR] == 'true':
print(request.method + ' ' + request.url)
logger.debug('%s %s', request.method, request.url)
logger.debug('Request content: %s', content)
response = self._client.send(request=request, headers=headers,
content=content, **operation_config)
logger.debug('Response content: %s', response.content)
if response.status_code < 200 or response.status_code >= 300:
self._handle_error(request, response)
return response
def _send(self, http_method, location_id, version, route_values=None,
query_parameters=None, content=None, media_type='application/json',
returns_collection=False):
request = self._create_request_message(http_method=http_method,
location_id=location_id,
route_values=route_values,
query_parameters=query_parameters)
negotiated_version = self._negotiate_request_version(
self._get_resource_location(location_id),
version)
if version != negotiated_version:
logger.info("Negotiated api version from '%s' down to '%s'. This means the client is newer than the server.",
version,
negotiated_version)
else:
logger.debug("Api version '%s'", negotiated_version)
# Construct headers
headers = {'Content-Type': media_type + '; charset=utf-8',
'Accept': 'application/json;api-version=' + negotiated_version}
if self.config.additional_headers is not None:
for key in self.config.additional_headers:
headers[key] = self.config.additional_headers[key]
if self._suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
if VssClient._session_header_key in VssClient._session_data and VssClient._session_header_key not in headers:
headers[VssClient._session_header_key] = VssClient._session_data[VssClient._session_header_key]
response = self._send_request(request=request, headers=headers, content=content)
if VssClient._session_header_key in response.headers:
VssClient._session_data[VssClient._session_header_key] = response.headers[VssClient._session_header_key]
if returns_collection:
if response.headers.get("transfer-encoding") == 'chunked':
wrapper = self._base_deserialize.deserialize_data(response.json(), 'VssJsonCollectionWrapper')
else:
wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
collection = wrapper.value
return collection
else:
return response
def _create_request_message(self, http_method, location_id, route_values=None,
query_parameters=None):
location = self._get_resource_location(location_id)
if location is None:
raise ValueError('API resource location ' + location_id + ' is not registered on '
+ self.config.base_url + '.')
if route_values is None:
route_values = {}
route_values['area'] = location.area
route_values['resource'] = location.resource_name
route_template = self._remove_optional_route_parameters(location.route_template,
route_values)
logger.debug('Route template: %s', location.route_template)
url = self._client.format_url(route_template, **route_values)
request = ClientRequest()
request.url = self._client.format_url(url)
if query_parameters:
request.format_parameters(query_parameters)
request.method = http_method
return request
@staticmethod
def _remove_optional_route_parameters(route_template, route_values):
new_template = ''
route_template = route_template.replace('{*', '{')
for path_segment in route_template.split('/'):
if (len(path_segment) <= 2 or not path_segment[0] == '{'
or not path_segment[len(path_segment) - 1] == '}'
or path_segment[1:len(path_segment) - 1] in route_values):
new_template = new_template + '/' + path_segment
return new_template
def _get_resource_location(self, location_id):
if self.config.base_url not in VssClient._locations_cache:
VssClient._locations_cache[self.config.base_url] = self._get_resource_locations(all_host_types=False)
for location in VssClient._locations_cache[self.config.base_url]:
if location.id == location_id:
return location
def _get_resource_locations(self, all_host_types):
# Check local client's cached Options first
if all_host_types:
if self._all_host_types_locations is not None:
return self._all_host_types_locations
elif self._locations is not None:
return self._locations
# Next check for options cached on disk
if not all_host_types and OPTIONS_FILE_CACHE[self.normalized_url]:
try:
logger.debug('File cache hit for options on: %s', self.normalized_url)
self._locations = self._base_deserialize.deserialize_data(OPTIONS_FILE_CACHE[self.normalized_url],
'[ApiResourceLocation]')
return self._locations
except DeserializationError as ex:
logger.debug(ex, exc_info=True)
else:
logger.debug('File cache miss for options on: %s', self.normalized_url)
# Last resort, make the call to the server
options_uri = self._combine_url(self.config.base_url, '_apis')
request = ClientRequest()
request.url = self._client.format_url(options_uri)
if all_host_types:
query_parameters = {'allHostTypes': True}
request.format_parameters(query_parameters)
request.method = 'OPTIONS'
headers = {'Accept': 'application/json'}
if self._suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
response = self._send_request(request, headers=headers)
wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
if wrapper is None:
raise VstsClientRequestError("Failed to retrieve resource locations from: {}".format(options_uri))
collection = wrapper.value
returned_locations = self._base_deserialize('[ApiResourceLocation]',
collection)
if all_host_types:
self._all_host_types_locations = returned_locations
else:
self._locations = returned_locations
try:
OPTIONS_FILE_CACHE[self.normalized_url] = wrapper.value
except SerializationError as ex:
logger.debug(ex, exc_info=True)
return returned_locations
@staticmethod
def _negotiate_request_version(location, version):
if location is None or version is None:
return version
pattern = r'(\d+(\.\d)?)(-preview(.(\d+))?)?'
match = re.match(pattern, version)
requested_api_version = match.group(1)
if requested_api_version is not None:
requested_api_version = float(requested_api_version)
if location.min_version > requested_api_version:
# Client is older than the server. The server no longer supports this
# resource (deprecated).
return
elif location.max_version < requested_api_version:
# Client is newer than the server. Negotiate down to the latest version
# on the server
negotiated_version = str(location.max_version)
if float(location.released_version) < location.max_version:
negotiated_version += '-preview'
return negotiated_version
else:
# We can send at the requested api version. Make sure the resource version
# is not bigger than what the server supports
negotiated_version = str(requested_api_version)
is_preview = match.group(3) is not None
if is_preview:
negotiated_version += '-preview'
if match.group(5) is not None:
if location.resource_version < int(match.group(5)):
negotiated_version += '.' + str(location.resource_version)
else:
negotiated_version += '.' + match.group(5)
return negotiated_version
@staticmethod
def _combine_url(part1, part2):
return part1.rstrip('/') + '/' + part2.strip('/')
def _handle_error(self, request, response):
content_type = response.headers.get('Content-Type')
error_message = ''
if content_type is None or content_type.find('text/plain') < 0:
try:
wrapped_exception = self._base_deserialize('WrappedException', response)
if wrapped_exception is not None and wrapped_exception.message is not None:
raise VstsServiceError(wrapped_exception)
else:
# System exceptions from controllers are not returning wrapped exceptions.
# Following code is to handle this unusual exception json case.
# TODO: dig into this.
collection_wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
if collection_wrapper is not None and collection_wrapper.value is not None:
wrapped_exception = self._base_deserialize('ImproperException', collection_wrapper.value)
if wrapped_exception is not None and wrapped_exception.message is not None:
raise VstsClientRequestError(wrapped_exception.message)
# if we get here we still have not raised an exception, try to deserialize as a System Exception
system_exception = self._base_deserialize('SystemException', response)
if system_exception is not None and system_exception.message is not None:
raise VstsClientRequestError(system_exception.message)
except DeserializationError:
pass
elif response.content is not None:
error_message = response.content.decode("utf-8") + ' '
if response.status_code == 401:
full_message_format = '{error_message}The requested resource requires user authentication: {url}'
raise VstsAuthenticationError(full_message_format.format(error_message=error_message,
url=request.url))
else:
full_message_format = '{error_message}Operation returned an invalid status code of {status_code}.'
raise VstsClientRequestError(full_message_format.format(error_message=error_message,
status_code=response.status_code))
@staticmethod
def _normalize_url(url):
return url.rstrip('/').lower()
_locations_cache = {}
_session_header_key = 'X-TFS-Session'
_session_data = {_session_header_key: str(uuid.uuid4())}
TRACE_ENV_VAR = 'vsts_python_print_urls'
| 50.698182 | 121 | 0.632047 |
from __future__ import print_function
import logging
import os
import re
import uuid
from msrest import Deserializer, Serializer
from msrest.exceptions import DeserializationError, SerializationError
from msrest.pipeline import ClientRequest
from msrest.service_client import ServiceClient
from .exceptions import VstsAuthenticationError, VstsClientRequestError, VstsServiceError
from .vss_client_configuration import VssClientConfiguration
from . import models
from ._file_cache import OPTIONS_CACHE as OPTIONS_FILE_CACHE
logger = logging.getLogger(__name__)
class VssClient(object):
def __init__(self, base_url=None, creds=None):
self.config = VssClientConfiguration(base_url)
self._client = ServiceClient(creds, self.config)
_base_client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._base_deserialize = Deserializer(_base_client_models)
self._base_serialize = Serializer(_base_client_models)
self._all_host_types_locations = None
self._locations = None
self._suppress_fedauth_redirect = True
self.normalized_url = VssClient._normalize_url(base_url)
def add_user_agent(self, user_agent):
if user_agent is not None:
self.config.add_user_agent(user_agent)
def _send_request(self, request, headers=None, content=None, **operation_config):
if TRACE_ENV_VAR in os.environ and os.environ[TRACE_ENV_VAR] == 'true':
print(request.method + ' ' + request.url)
logger.debug('%s %s', request.method, request.url)
logger.debug('Request content: %s', content)
response = self._client.send(request=request, headers=headers,
content=content, **operation_config)
logger.debug('Response content: %s', response.content)
if response.status_code < 200 or response.status_code >= 300:
self._handle_error(request, response)
return response
def _send(self, http_method, location_id, version, route_values=None,
query_parameters=None, content=None, media_type='application/json',
returns_collection=False):
request = self._create_request_message(http_method=http_method,
location_id=location_id,
route_values=route_values,
query_parameters=query_parameters)
negotiated_version = self._negotiate_request_version(
self._get_resource_location(location_id),
version)
if version != negotiated_version:
logger.info("Negotiated api version from '%s' down to '%s'. This means the client is newer than the server.",
version,
negotiated_version)
else:
logger.debug("Api version '%s'", negotiated_version)
headers = {'Content-Type': media_type + '; charset=utf-8',
'Accept': 'application/json;api-version=' + negotiated_version}
if self.config.additional_headers is not None:
for key in self.config.additional_headers:
headers[key] = self.config.additional_headers[key]
if self._suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
if VssClient._session_header_key in VssClient._session_data and VssClient._session_header_key not in headers:
headers[VssClient._session_header_key] = VssClient._session_data[VssClient._session_header_key]
response = self._send_request(request=request, headers=headers, content=content)
if VssClient._session_header_key in response.headers:
VssClient._session_data[VssClient._session_header_key] = response.headers[VssClient._session_header_key]
if returns_collection:
if response.headers.get("transfer-encoding") == 'chunked':
wrapper = self._base_deserialize.deserialize_data(response.json(), 'VssJsonCollectionWrapper')
else:
wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
collection = wrapper.value
return collection
else:
return response
def _create_request_message(self, http_method, location_id, route_values=None,
query_parameters=None):
location = self._get_resource_location(location_id)
if location is None:
raise ValueError('API resource location ' + location_id + ' is not registered on '
+ self.config.base_url + '.')
if route_values is None:
route_values = {}
route_values['area'] = location.area
route_values['resource'] = location.resource_name
route_template = self._remove_optional_route_parameters(location.route_template,
route_values)
logger.debug('Route template: %s', location.route_template)
url = self._client.format_url(route_template, **route_values)
request = ClientRequest()
request.url = self._client.format_url(url)
if query_parameters:
request.format_parameters(query_parameters)
request.method = http_method
return request
@staticmethod
def _remove_optional_route_parameters(route_template, route_values):
new_template = ''
route_template = route_template.replace('{*', '{')
for path_segment in route_template.split('/'):
if (len(path_segment) <= 2 or not path_segment[0] == '{'
or not path_segment[len(path_segment) - 1] == '}'
or path_segment[1:len(path_segment) - 1] in route_values):
new_template = new_template + '/' + path_segment
return new_template
def _get_resource_location(self, location_id):
if self.config.base_url not in VssClient._locations_cache:
VssClient._locations_cache[self.config.base_url] = self._get_resource_locations(all_host_types=False)
for location in VssClient._locations_cache[self.config.base_url]:
if location.id == location_id:
return location
def _get_resource_locations(self, all_host_types):
if all_host_types:
if self._all_host_types_locations is not None:
return self._all_host_types_locations
elif self._locations is not None:
return self._locations
# Next check for options cached on disk
if not all_host_types and OPTIONS_FILE_CACHE[self.normalized_url]:
try:
logger.debug('File cache hit for options on: %s', self.normalized_url)
self._locations = self._base_deserialize.deserialize_data(OPTIONS_FILE_CACHE[self.normalized_url],
'[ApiResourceLocation]')
return self._locations
except DeserializationError as ex:
logger.debug(ex, exc_info=True)
else:
logger.debug('File cache miss for options on: %s', self.normalized_url)
# Last resort, make the call to the server
options_uri = self._combine_url(self.config.base_url, '_apis')
request = ClientRequest()
request.url = self._client.format_url(options_uri)
if all_host_types:
query_parameters = {'allHostTypes': True}
request.format_parameters(query_parameters)
request.method = 'OPTIONS'
headers = {'Accept': 'application/json'}
if self._suppress_fedauth_redirect:
headers['X-TFS-FedAuthRedirect'] = 'Suppress'
response = self._send_request(request, headers=headers)
wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
if wrapper is None:
raise VstsClientRequestError("Failed to retrieve resource locations from: {}".format(options_uri))
collection = wrapper.value
returned_locations = self._base_deserialize('[ApiResourceLocation]',
collection)
if all_host_types:
self._all_host_types_locations = returned_locations
else:
self._locations = returned_locations
try:
OPTIONS_FILE_CACHE[self.normalized_url] = wrapper.value
except SerializationError as ex:
logger.debug(ex, exc_info=True)
return returned_locations
@staticmethod
def _negotiate_request_version(location, version):
if location is None or version is None:
return version
pattern = r'(\d+(\.\d)?)(-preview(.(\d+))?)?'
match = re.match(pattern, version)
requested_api_version = match.group(1)
if requested_api_version is not None:
requested_api_version = float(requested_api_version)
if location.min_version > requested_api_version:
# Client is older than the server. The server no longer supports this
# resource (deprecated).
return
elif location.max_version < requested_api_version:
# Client is newer than the server. Negotiate down to the latest version
# on the server
negotiated_version = str(location.max_version)
if float(location.released_version) < location.max_version:
negotiated_version += '-preview'
return negotiated_version
else:
# We can send at the requested api version. Make sure the resource version
# is not bigger than what the server supports
negotiated_version = str(requested_api_version)
is_preview = match.group(3) is not None
if is_preview:
negotiated_version += '-preview'
if match.group(5) is not None:
if location.resource_version < int(match.group(5)):
negotiated_version += '.' + str(location.resource_version)
else:
negotiated_version += '.' + match.group(5)
return negotiated_version
@staticmethod
def _combine_url(part1, part2):
return part1.rstrip('/') + '/' + part2.strip('/')
def _handle_error(self, request, response):
content_type = response.headers.get('Content-Type')
error_message = ''
if content_type is None or content_type.find('text/plain') < 0:
try:
wrapped_exception = self._base_deserialize('WrappedException', response)
if wrapped_exception is not None and wrapped_exception.message is not None:
raise VstsServiceError(wrapped_exception)
else:
# System exceptions from controllers are not returning wrapped exceptions.
# Following code is to handle this unusual exception json case.
# TODO: dig into this.
collection_wrapper = self._base_deserialize('VssJsonCollectionWrapper', response)
if collection_wrapper is not None and collection_wrapper.value is not None:
wrapped_exception = self._base_deserialize('ImproperException', collection_wrapper.value)
if wrapped_exception is not None and wrapped_exception.message is not None:
raise VstsClientRequestError(wrapped_exception.message)
# if we get here we still have not raised an exception, try to deserialize as a System Exception
system_exception = self._base_deserialize('SystemException', response)
if system_exception is not None and system_exception.message is not None:
raise VstsClientRequestError(system_exception.message)
except DeserializationError:
pass
elif response.content is not None:
error_message = response.content.decode("utf-8") + ' '
if response.status_code == 401:
full_message_format = '{error_message}The requested resource requires user authentication: {url}'
raise VstsAuthenticationError(full_message_format.format(error_message=error_message,
url=request.url))
else:
full_message_format = '{error_message}Operation returned an invalid status code of {status_code}.'
raise VstsClientRequestError(full_message_format.format(error_message=error_message,
status_code=response.status_code))
@staticmethod
def _normalize_url(url):
return url.rstrip('/').lower()
_locations_cache = {}
_session_header_key = 'X-TFS-Session'
_session_data = {_session_header_key: str(uuid.uuid4())}
TRACE_ENV_VAR = 'vsts_python_print_urls'
| true | true |
f738ee36d86611a0e4f64027426f2e2fee301bdd | 1,505 | py | Python | openapi/pagination/search.py | lendingblock/aio-openapi | cd274c618dfeb67c6e77b0293e6f5941e4c98f97 | [
"BSD-3-Clause"
] | 7 | 2018-05-22T16:15:32.000Z | 2019-01-23T19:50:58.000Z | openapi/pagination/search.py | lendingblock/aio-openapi | cd274c618dfeb67c6e77b0293e6f5941e4c98f97 | [
"BSD-3-Clause"
] | 116 | 2018-05-17T21:55:02.000Z | 2019-02-14T08:35:31.000Z | openapi/pagination/search.py | lendingblock/aio-openapi | cd274c618dfeb67c6e77b0293e6f5941e4c98f97 | [
"BSD-3-Clause"
] | null | null | null | from dataclasses import dataclass
from typing import Sequence
from openapi.data.fields import str_field
from openapi.utils import docjoin
from .pagination import from_filters_and_dataclass
class SearchVisitor:
def apply_search(self, search: str, search_fields: Sequence[str]) -> None:
raise NotImplementedError
@dataclass
class Search:
@classmethod
def create_search(cls, data: dict) -> "Search":
return cls()
def apply(self, visitor: SearchVisitor) -> None:
pass
def searchable(*searchable_fields) -> type:
"""Create a dataclass with `search_fields` class attribute and `search` field.
The search field is a set of field which can be used for searching and it is used
internally by the library, while the `search` field is the query string passed
in the url.
:param searchable_fields: fields which can be used for searching
"""
fields = docjoin(searchable_fields)
@dataclass
class Searchable(Search):
search_fields = frozenset(searchable_fields)
search: str = str_field(
description=(
"Search query string. " f"The search is performed on {fields} fields."
)
)
@classmethod
def create_search(cls, data: dict) -> "Searchable":
return from_filters_and_dataclass(Searchable, data)
def apply(self, visitor: SearchVisitor) -> None:
visitor.apply_search(self.search, self.search_fields)
return Searchable
| 28.942308 | 86 | 0.685714 | from dataclasses import dataclass
from typing import Sequence
from openapi.data.fields import str_field
from openapi.utils import docjoin
from .pagination import from_filters_and_dataclass
class SearchVisitor:
def apply_search(self, search: str, search_fields: Sequence[str]) -> None:
raise NotImplementedError
@dataclass
class Search:
@classmethod
def create_search(cls, data: dict) -> "Search":
return cls()
def apply(self, visitor: SearchVisitor) -> None:
pass
def searchable(*searchable_fields) -> type:
fields = docjoin(searchable_fields)
@dataclass
class Searchable(Search):
search_fields = frozenset(searchable_fields)
search: str = str_field(
description=(
"Search query string. " f"The search is performed on {fields} fields."
)
)
@classmethod
def create_search(cls, data: dict) -> "Searchable":
return from_filters_and_dataclass(Searchable, data)
def apply(self, visitor: SearchVisitor) -> None:
visitor.apply_search(self.search, self.search_fields)
return Searchable
| true | true |
f738ee521433ce2b9bd1a33e489995858abb8aa1 | 670 | py | Python | main/byacc/template.py | matu3ba/cports | deab766f255539c3679b78706ec4d194bc019dc9 | [
"BSD-2-Clause"
] | 46 | 2021-06-10T02:27:32.000Z | 2022-03-27T11:33:24.000Z | main/byacc/template.py | matu3ba/cports | deab766f255539c3679b78706ec4d194bc019dc9 | [
"BSD-2-Clause"
] | 58 | 2021-07-03T13:58:20.000Z | 2022-03-13T16:45:35.000Z | main/byacc/template.py | matu3ba/cports | deab766f255539c3679b78706ec4d194bc019dc9 | [
"BSD-2-Clause"
] | 6 | 2021-07-04T10:46:40.000Z | 2022-01-09T00:03:59.000Z | pkgname = "byacc"
pkgver = "20210808"
pkgrel = 0
build_style = "gnu_configure"
configure_args = ["--program-transform=s,^,b,"]
pkgdesc = "Berkeley yacc, a LALR(1) parser generator"
maintainer = "q66 <q66@chimera-linux.org>"
license="custom:byacc"
url = "http://invisible-island.net/byacc"
source = f"ftp://ftp.invisible-island.net/{pkgname}/{pkgname}-{pkgver}.tgz"
sha256 = "f158529be9d0594263c7f11a87616a49ea23e55ac63691252a2304fbbc7d3a83"
options = ["bootstrap"]
def post_install(self):
self.install_license("README")
self.install_license("LICENSE")
self.install_link("byacc", "usr/bin/yacc")
self.install_link("byacc.1", "usr/share/man/man1/yacc.1")
| 35.263158 | 75 | 0.732836 | pkgname = "byacc"
pkgver = "20210808"
pkgrel = 0
build_style = "gnu_configure"
configure_args = ["--program-transform=s,^,b,"]
pkgdesc = "Berkeley yacc, a LALR(1) parser generator"
maintainer = "q66 <q66@chimera-linux.org>"
license="custom:byacc"
url = "http://invisible-island.net/byacc"
source = f"ftp://ftp.invisible-island.net/{pkgname}/{pkgname}-{pkgver}.tgz"
sha256 = "f158529be9d0594263c7f11a87616a49ea23e55ac63691252a2304fbbc7d3a83"
options = ["bootstrap"]
def post_install(self):
self.install_license("README")
self.install_license("LICENSE")
self.install_link("byacc", "usr/bin/yacc")
self.install_link("byacc.1", "usr/share/man/man1/yacc.1")
| true | true |
f738ef9698057d4e6f0320a579c6146780b82dba | 584 | py | Python | odds/common/utils.py | conrad-evans/sports_betting_api | baa80df5608c1cc244f51be86ba29eaabd8f031e | [
"MIT"
] | null | null | null | odds/common/utils.py | conrad-evans/sports_betting_api | baa80df5608c1cc244f51be86ba29eaabd8f031e | [
"MIT"
] | null | null | null | odds/common/utils.py | conrad-evans/sports_betting_api | baa80df5608c1cc244f51be86ba29eaabd8f031e | [
"MIT"
] | null | null | null | import secrets
import time
class Utils:
_instance = None
def __init__(self) -> None:
pass
@staticmethod
def generateRandomId():
"""
Generates a random hexicdecimal string
Returns:
`str`: string with hexidecimal values
>>>
"""
token = secrets.token_hex()
now = time.time()
id = now + token
return id
@staticmethod
def getInstance(re_init=False):
if Utils._instance is None or re_init:
Utils._instance = Utils()
return Utils._instance
| 18.83871 | 49 | 0.563356 | import secrets
import time
class Utils:
_instance = None
def __init__(self) -> None:
pass
@staticmethod
def generateRandomId():
token = secrets.token_hex()
now = time.time()
id = now + token
return id
@staticmethod
def getInstance(re_init=False):
if Utils._instance is None or re_init:
Utils._instance = Utils()
return Utils._instance
| true | true |
f738f3a02a6643116edf664e741054e24c835882 | 332 | py | Python | sdk/python/pulumi_aws/ram/__init__.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/ram/__init__.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_aws/ram/__init__.py | lemonade-hq/pulumi-aws | 9ee22c65c7bad42d38b16879ccd56526d856a01a | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2021-03-08T15:05:29.000Z | 2021-03-08T15:05:29.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .principal_association import *
from .resource_association import *
from .resource_share import *
| 36.888889 | 87 | 0.740964 |
# Export this package's modules as members:
from .principal_association import *
from .resource_association import *
from .resource_share import *
| true | true |
f738f4f4e9aaf552abff28f5e76349433a08aac1 | 396 | py | Python | trio-stuff/interleaved_output.py | bmintz/python-snippets | 982861c173bf4bcd5d908514a9e8b1914a580a5d | [
"CC0-1.0"
] | 2 | 2018-11-12T10:33:13.000Z | 2019-02-24T05:01:40.000Z | trio-stuff/interleaved_output.py | iomintz/python-snippets | 982861c173bf4bcd5d908514a9e8b1914a580a5d | [
"CC0-1.0"
] | null | null | null | trio-stuff/interleaved_output.py | iomintz/python-snippets | 982861c173bf4bcd5d908514a9e8b1914a580a5d | [
"CC0-1.0"
] | 2 | 2018-11-24T08:16:59.000Z | 2019-02-24T04:41:30.000Z | #!/usr/bin/env python3
import trio
async def a():
print('enter a')
await trio.sleep(0)
print('leave a')
async def b():
print('enter b')
await trio.sleep(0)
print('leave b')
async def main():
async with trio.open_nursery() as nursery:
print(nursery.start_soon(a))
nursery.start_soon(b)
# seems like the output order is non-deterministic
if __name__ == '__main__':
trio.run(main)
| 16.5 | 50 | 0.694444 |
import trio
async def a():
print('enter a')
await trio.sleep(0)
print('leave a')
async def b():
print('enter b')
await trio.sleep(0)
print('leave b')
async def main():
async with trio.open_nursery() as nursery:
print(nursery.start_soon(a))
nursery.start_soon(b)
if __name__ == '__main__':
trio.run(main)
| true | true |
f738f581a62bbf2ba1039ce79893ba2e39af5318 | 179 | py | Python | fretboard/__init__.py | honorabel/python-fretboard | fb6a37e29bf6a19bee11940e9b42d58220c1306c | [
"MIT"
] | 69 | 2016-11-18T02:11:01.000Z | 2022-03-26T12:54:55.000Z | fretboard/__init__.py | honorabel/python-fretboard | fb6a37e29bf6a19bee11940e9b42d58220c1306c | [
"MIT"
] | 3 | 2018-05-24T15:08:46.000Z | 2020-09-19T21:49:36.000Z | fretboard/__init__.py | honorabel/python-fretboard | fb6a37e29bf6a19bee11940e9b42d58220c1306c | [
"MIT"
] | 16 | 2018-06-12T10:34:26.000Z | 2021-01-09T15:21:55.000Z | from .chord import Chord, BassChord, UkuleleChord
from .fretboard import Fretboard
__version__ = '1.0.0'
__author__ = 'Derek Payton <derek.payton@gmail.com>'
__license__ = 'MIT'
| 25.571429 | 52 | 0.765363 | from .chord import Chord, BassChord, UkuleleChord
from .fretboard import Fretboard
__version__ = '1.0.0'
__author__ = 'Derek Payton <derek.payton@gmail.com>'
__license__ = 'MIT'
| true | true |
f738f63479f804d9170fe21b0f803e86f9f518c6 | 4,680 | py | Python | tests/www/api/experimental/test_dag_runs_endpoint.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2021-07-30T17:29:38.000Z | 2022-03-06T08:44:23.000Z | tests/www/api/experimental/test_dag_runs_endpoint.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 210 | 2021-07-17T00:25:52.000Z | 2021-12-29T00:44:48.000Z | tests/www/api/experimental/test_dag_runs_endpoint.py | rliuamzn/airflow | 177dfbd12a42a5c229640c6c830f43f280ea5caa | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 3 | 2020-06-30T02:38:17.000Z | 2022-01-19T06:14:08.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import pytest
from airflow.api.common.experimental.trigger_dag import trigger_dag
from airflow.models import DagBag, DagRun
from airflow.models.serialized_dag import SerializedDagModel
from airflow.settings import Session
class TestDagRunsEndpoint:
@pytest.fixture(scope="class", autouse=True)
def _setup_session(self):
session = Session()
session.query(DagRun).delete()
session.commit()
session.close()
dagbag = DagBag(include_examples=True)
for dag in dagbag.dags.values():
dag.sync_to_db()
SerializedDagModel.write_dag(dag)
@pytest.fixture(autouse=True)
def _reset_test_session(self, experiemental_api_app):
self.app = experiemental_api_app.test_client()
yield
session = Session()
session.query(DagRun).delete()
session.commit()
session.close()
def test_get_dag_runs_success(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'example_bash_operator'
# Create DagRun
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_state_parameter(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=running'
dag_id = 'example_bash_operator'
# Create DagRun
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_capital_state_parameter(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=RUNNING'
dag_id = 'example_bash_operator'
# Create DagRun
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_state_no_result(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=dummy'
dag_id = 'example_bash_operator'
# Create DagRun
trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 0
def test_get_dag_runs_invalid_dag_id(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'DUMMY_DAG'
response = self.app.get(url_template.format(dag_id))
assert 400 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert not isinstance(data, list)
def test_get_dag_runs_no_runs(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'example_bash_operator'
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 0
| 36.850394 | 80 | 0.680556 |
import json
import pytest
from airflow.api.common.experimental.trigger_dag import trigger_dag
from airflow.models import DagBag, DagRun
from airflow.models.serialized_dag import SerializedDagModel
from airflow.settings import Session
class TestDagRunsEndpoint:
@pytest.fixture(scope="class", autouse=True)
def _setup_session(self):
session = Session()
session.query(DagRun).delete()
session.commit()
session.close()
dagbag = DagBag(include_examples=True)
for dag in dagbag.dags.values():
dag.sync_to_db()
SerializedDagModel.write_dag(dag)
@pytest.fixture(autouse=True)
def _reset_test_session(self, experiemental_api_app):
self.app = experiemental_api_app.test_client()
yield
session = Session()
session.query(DagRun).delete()
session.commit()
session.close()
def test_get_dag_runs_success(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'example_bash_operator'
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_state_parameter(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=running'
dag_id = 'example_bash_operator'
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_capital_state_parameter(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=RUNNING'
dag_id = 'example_bash_operator'
dag_run = trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 1
assert data[0]['dag_id'] == dag_id
assert data[0]['id'] == dag_run.id
def test_get_dag_runs_success_with_state_no_result(self):
url_template = '/api/experimental/dags/{}/dag_runs?state=dummy'
dag_id = 'example_bash_operator'
trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success')
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 0
def test_get_dag_runs_invalid_dag_id(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'DUMMY_DAG'
response = self.app.get(url_template.format(dag_id))
assert 400 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert not isinstance(data, list)
def test_get_dag_runs_no_runs(self):
url_template = '/api/experimental/dags/{}/dag_runs'
dag_id = 'example_bash_operator'
response = self.app.get(url_template.format(dag_id))
assert 200 == response.status_code
data = json.loads(response.data.decode('utf-8'))
assert isinstance(data, list)
assert len(data) == 0
| true | true |
f738f6c80a1a23e674eb48045329f0dee12e3ee9 | 21,450 | py | Python | salt/utils/templates.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | salt/utils/templates.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | salt/utils/templates.py | fake-name/salt | d8f04936e4407f51946e32e8166159778f6c31a5 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Template render systems
"""
from __future__ import absolute_import, print_function, unicode_literals
# Import Python libs
import codecs
import logging
import os
import sys
import tempfile
import traceback
# Import 3rd-party libs
import jinja2
import jinja2.ext
# Import Salt libs
import salt.utils.data
import salt.utils.dateutils
import salt.utils.files
import salt.utils.hashutils
import salt.utils.http
import salt.utils.jinja
import salt.utils.network
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.yamlencoding
from salt import __path__ as saltpath
from salt.exceptions import CommandExecutionError, SaltInvocationError, SaltRenderError
from salt.ext import six
from salt.utils.decorators.jinja import JinjaFilter, JinjaGlobal, JinjaTest
from salt.utils.odict import OrderedDict
from salt.utils.versions import LooseVersion
if sys.version_info[:2] >= (3, 5):
import importlib.machinery # pylint: disable=no-name-in-module,import-error
import importlib.util # pylint: disable=no-name-in-module,import-error
USE_IMPORTLIB = True
else:
import imp
USE_IMPORTLIB = False
log = logging.getLogger(__name__)
TEMPLATE_DIRNAME = os.path.join(saltpath[0], "templates")
# FIXME: also in salt/template.py
SLS_ENCODING = "utf-8" # this one has no BOM.
SLS_ENCODER = codecs.getencoder(SLS_ENCODING)
class AliasedLoader(object):
"""
Light wrapper around the LazyLoader to redirect 'cmd.run' calls to
'cmd.shell', for easy use of shellisms during templating calls
Dotted aliases ('cmd.run') must resolve to another dotted alias
(e.g. 'cmd.shell')
Non-dotted aliases ('cmd') must resolve to a dictionary of function
aliases for that module (e.g. {'run': 'shell'})
"""
def __init__(self, wrapped):
self.wrapped = wrapped
def __getitem__(self, name):
return self.wrapped[name]
def __getattr__(self, name):
return getattr(self.wrapped, name)
def __contains__(self, name):
return name in self.wrapped
class AliasedModule(object):
"""
Light wrapper around module objects returned by the LazyLoader's getattr
for the purposes of `salt.cmd.run()` syntax in templates
Allows for aliasing specific functions, such as `run` to `shell` for easy
use of shellisms during templating calls
"""
def __init__(self, wrapped, aliases):
self.aliases = aliases
self.wrapped = wrapped
def __getattr__(self, name):
return getattr(self.wrapped, name)
def wrap_tmpl_func(render_str):
def render_tmpl(
tmplsrc, from_str=False, to_str=False, context=None, tmplpath=None, **kws
):
if context is None:
context = {}
# Alias cmd.run to cmd.shell to make python_shell=True the default for
# templated calls
if "salt" in kws:
kws["salt"] = AliasedLoader(kws["salt"])
# We want explicit context to overwrite the **kws
kws.update(context)
context = kws
assert "opts" in context
assert "saltenv" in context
if "sls" in context:
slspath = context["sls"].replace(".", "/")
if tmplpath is not None:
context["tplpath"] = tmplpath
if not tmplpath.lower().replace("\\", "/").endswith("/init.sls"):
slspath = os.path.dirname(slspath)
template = tmplpath.replace("\\", "/")
i = template.rfind(slspath.replace(".", "/"))
if i != -1:
template = template[i:]
tpldir = os.path.dirname(template).replace("\\", "/")
tpldata = {
'tplfile': template,
'tpldir': '.' if tpldir == '' else tpldir,
'tpldot': tpldir.replace('/', '.'),
'tplroot': tpldir.split('/')[0],
}
context.update(tpldata)
context["slsdotpath"] = slspath.replace("/", ".")
context["slscolonpath"] = slspath.replace("/", ":")
context["sls_path"] = slspath.replace("/", "_")
context["slspath"] = slspath
if isinstance(tmplsrc, six.string_types):
if from_str:
tmplstr = tmplsrc
else:
try:
if tmplpath is not None:
tmplsrc = os.path.join(tmplpath, tmplsrc)
with codecs.open(tmplsrc, "r", SLS_ENCODING) as _tmplsrc:
tmplstr = _tmplsrc.read()
except (UnicodeDecodeError, ValueError, OSError, IOError) as exc:
if salt.utils.files.is_binary(tmplsrc):
# Template is a bin file, return the raw file
return dict(result=True, data=tmplsrc)
log.error(
"Exception occurred while reading file %s: %s",
tmplsrc,
exc,
exc_info_on_loglevel=logging.DEBUG,
)
six.reraise(*sys.exc_info())
else: # assume tmplsrc is file-like.
tmplstr = tmplsrc.read()
tmplsrc.close()
try:
output = render_str(tmplstr, context, tmplpath)
if salt.utils.platform.is_windows():
newline = False
if salt.utils.stringutils.to_unicode(
output, encoding=SLS_ENCODING
).endswith(("\n", os.linesep)):
newline = True
# Write out with Windows newlines
output = os.linesep.join(output.splitlines())
if newline:
output += os.linesep
except SaltRenderError as exc:
log.exception("Rendering exception occurred")
# return dict(result=False, data=six.text_type(exc))
raise
except Exception: # pylint: disable=broad-except
return dict(result=False, data=traceback.format_exc())
else:
if to_str: # then render as string
return dict(result=True, data=output)
with tempfile.NamedTemporaryFile(
"wb", delete=False, prefix=salt.utils.files.TEMPFILE_PREFIX
) as outf:
outf.write(
salt.utils.stringutils.to_bytes(output, encoding=SLS_ENCODING)
)
# Note: If nothing is replaced or added by the rendering
# function, then the contents of the output file will
# be exactly the same as the input.
return dict(result=True, data=outf.name)
render_tmpl.render_str = render_str
return render_tmpl
def _get_jinja_error_slug(tb_data):
"""
Return the line number where the template error was found
"""
try:
return [
x
for x in tb_data
if x[2] in ("top-level template code", "template", "<module>")
][-1]
except IndexError:
pass
def _get_jinja_error_message(tb_data):
"""
Return an understandable message from jinja error output
"""
try:
line = _get_jinja_error_slug(tb_data)
return "{0}({1}):\n{3}".format(*line)
except IndexError:
pass
return None
def _get_jinja_error_line(tb_data):
"""
Return the line number where the template error was found
"""
try:
return _get_jinja_error_slug(tb_data)[1]
except IndexError:
pass
return None
def _get_jinja_error(trace, context=None):
"""
Return the error line and error message output from
a stacktrace.
If we are in a macro, also output inside the message the
exact location of the error in the macro
"""
if not context:
context = {}
out = ""
error = _get_jinja_error_slug(trace)
line = _get_jinja_error_line(trace)
msg = _get_jinja_error_message(trace)
# if we failed on a nested macro, output a little more info
# to help debugging
# if sls is not found in context, add output only if we can
# resolve the filename
add_log = False
template_path = None
if "sls" not in context:
if (error[0] != "<unknown>") and os.path.exists(error[0]):
template_path = error[0]
add_log = True
else:
# the offender error is not from the called sls
filen = context["sls"].replace(".", "/")
if not error[0].endswith(filen) and os.path.exists(error[0]):
add_log = True
template_path = error[0]
# if we add a log, format explicitly the exception here
# by telling to output the macro context after the macro
# error log place at the beginning
if add_log:
if template_path:
out = "\n{0}\n".format(msg.splitlines()[0])
with salt.utils.files.fopen(template_path) as fp_:
template_contents = salt.utils.stringutils.to_unicode(fp_.read())
out += salt.utils.stringutils.get_context(
template_contents, line, marker=" <======================"
)
else:
out = "\n{0}\n".format(msg)
line = 0
return line, out
def render_jinja_tmpl(tmplstr, context, tmplpath=None):
opts = context["opts"]
saltenv = context["saltenv"]
loader = None
newline = False
if tmplstr and not isinstance(tmplstr, six.text_type):
# http://jinja.pocoo.org/docs/api/#unicode
tmplstr = tmplstr.decode(SLS_ENCODING)
if tmplstr.endswith(os.linesep):
newline = os.linesep
elif tmplstr.endswith("\n"):
newline = "\n"
if not saltenv:
if tmplpath:
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
else:
loader = salt.utils.jinja.SaltCacheLoader(
opts, saltenv, pillar_rend=context.get("_pillar_rend", False)
)
env_args = {"extensions": [], "loader": loader}
if hasattr(jinja2.ext, "with_"):
env_args["extensions"].append("jinja2.ext.with_")
if hasattr(jinja2.ext, "do"):
env_args["extensions"].append("jinja2.ext.do")
if hasattr(jinja2.ext, "loopcontrols"):
env_args["extensions"].append("jinja2.ext.loopcontrols")
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
opt_jinja_env = opts.get("jinja_env", {})
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
# Pass through trim_blocks and lstrip_blocks Jinja parameters
# trim_blocks removes newlines around Jinja blocks
# lstrip_blocks strips tabs and spaces from the beginning of
# line to the start of a block.
if opts.get("jinja_trim_blocks", False):
log.debug("Jinja2 trim_blocks is enabled")
log.warning(
"jinja_trim_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["trim_blocks"] = True
opt_jinja_sls_env["trim_blocks"] = True
if opts.get("jinja_lstrip_blocks", False):
log.debug("Jinja2 lstrip_blocks is enabled")
log.warning(
"jinja_lstrip_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["lstrip_blocks"] = True
opt_jinja_sls_env["lstrip_blocks"] = True
def opt_jinja_env_helper(opts, optname):
for k, v in six.iteritems(opts):
k = k.lower()
if hasattr(jinja2.defaults, k.upper()):
log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
env_args[k] = v
else:
log.warning("Jinja2 environment %s is not recognized", k)
if "sls" in context and context["sls"] != "":
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
if opts.get("allow_undefined", False):
jinja_env = jinja2.Environment(**env_args)
else:
jinja_env = jinja2.Environment(undefined=jinja2.StrictUndefined, **env_args)
tojson_filter = jinja_env.filters.get("tojson")
indent_filter = jinja_env.filters.get("indent")
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
if tojson_filter is not None:
# Use the existing tojson filter, if present (jinja2 >= 2.9)
jinja_env.filters["tojson"] = tojson_filter
if salt.utils.jinja.JINJA_VERSION >= LooseVersion("2.11"):
# Use the existing indent filter on Jinja versions where it's not broken
jinja_env.filters["indent"] = indent_filter
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
# globals
jinja_env.globals["odict"] = OrderedDict
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
jinja_env.tests["list"] = salt.utils.data.is_list
decoded_context = {}
for key, value in six.iteritems(context):
if not isinstance(value, six.string_types):
decoded_context[key] = value
continue
try:
decoded_context[key] = salt.utils.stringutils.to_unicode(
value, encoding=SLS_ENCODING
)
except UnicodeDecodeError as ex:
log.debug(
"Failed to decode using default encoding (%s), trying system encoding",
SLS_ENCODING,
)
decoded_context[key] = salt.utils.data.decode(value)
try:
template = jinja_env.from_string(tmplstr)
template.globals.update(decoded_context)
output = template.render(**decoded_context)
except jinja2.exceptions.UndefinedError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
out = _get_jinja_error(trace, context=decoded_context)[1]
tmplstr = ""
# Don't include the line number, since it is misreported
# https://github.com/mitsuhiko/jinja2/issues/276
raise SaltRenderError("Jinja variable {0}{1}".format(exc, out), buf=tmplstr)
except (
jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.TemplateSyntaxError,
) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Jinja syntax error: {0}{1}".format(exc, out), line, tmplstr
)
except (SaltInvocationError, CommandExecutionError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Problem running salt function in Jinja template: {0}{1}".format(exc, out),
line,
tmplstr,
)
except Exception as exc: # pylint: disable=broad-except
tracestr = traceback.format_exc()
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
else:
tmplstr += "\n{0}".format(tracestr)
log.debug("Jinja Error")
log.debug("Exception:", exc_info=True)
log.debug("Out: %s", out)
log.debug("Line: %s", line)
log.debug("TmplStr: %s", tmplstr)
log.debug("TraceStr: %s", tracestr)
raise SaltRenderError(
"Jinja error: {0}{1}".format(exc, out), line, tmplstr, trace=tracestr
)
# Workaround a bug in Jinja that removes the final newline
# (https://github.com/mitsuhiko/jinja2/issues/75)
if newline:
output += newline
return output
# pylint: disable=3rd-party-module-not-gated
def render_mako_tmpl(tmplstr, context, tmplpath=None):
import mako.exceptions # pylint: disable=no-name-in-module
from mako.template import Template # pylint: disable=no-name-in-module
from salt.utils.mako import SaltMakoTemplateLookup
saltenv = context["saltenv"]
lookup = None
if not saltenv:
if tmplpath:
# i.e., the template is from a file outside the state tree
from mako.lookup import TemplateLookup # pylint: disable=no-name-in-module
lookup = TemplateLookup(directories=[os.path.dirname(tmplpath)])
else:
lookup = SaltMakoTemplateLookup(
context["opts"], saltenv, pillar_rend=context.get("_pillar_rend", False)
)
try:
return Template(
tmplstr,
strict_undefined=True,
uri=context["sls"].replace(".", "/") if "sls" in context else None,
lookup=lookup,
).render(**context)
except Exception: # pylint: disable=broad-except
raise SaltRenderError(mako.exceptions.text_error_template().render())
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
from wemplate.wemplate import TemplateParser as Template
return Template(tmplstr).render(**context)
def render_genshi_tmpl(tmplstr, context, tmplpath=None):
"""
Render a Genshi template. A method should be passed in as part of the
context. If no method is passed in, xml is assumed. Valid methods are:
.. code-block:
- xml
- xhtml
- html
- text
- newtext
- oldtext
Note that the ``text`` method will call ``NewTextTemplate``. If ``oldtext``
is desired, it must be called explicitly
"""
method = context.get("method", "xml")
if method == "text" or method == "newtext":
from genshi.template import NewTextTemplate # pylint: disable=no-name-in-module
tmpl = NewTextTemplate(tmplstr)
elif method == "oldtext":
from genshi.template import OldTextTemplate # pylint: disable=no-name-in-module
tmpl = OldTextTemplate(tmplstr)
else:
from genshi.template import MarkupTemplate # pylint: disable=no-name-in-module
tmpl = MarkupTemplate(tmplstr)
return tmpl.generate(**context).render(method)
def render_cheetah_tmpl(tmplstr, context, tmplpath=None):
"""
Render a Cheetah template.
"""
from Cheetah.Template import Template
# Compile the template and render it into the class
tclass = Template.compile(tmplstr)
data = tclass(namespaces=[context])
# Figure out which method to call based on the type of tmplstr
if six.PY3 and isinstance(tmplstr, six.string_types):
# This should call .__unicode__()
res = str(data)
elif six.PY2 and isinstance(tmplstr, six.text_type):
# Expicitly call .__unicode__()
res = data.__unicode__()
elif isinstance(tmplstr, six.binary_type):
# This should call .__str()
res = str(data)
else:
raise SaltRenderError(
"Unknown type {!s} for Cheetah template while trying to render.".format(
type(tmplstr)
)
)
# Now we can decode it to the correct encoding
return salt.utils.data.decode(res)
# pylint: enable=3rd-party-module-not-gated
def py(sfn, string=False, **kwargs): # pylint: disable=C0103
"""
Render a template from a python source file
Returns::
{'result': bool,
'data': <Error data or rendered file path>}
"""
if not os.path.isfile(sfn):
return {}
base_fname = os.path.basename(sfn)
name = base_fname.split(".")[0]
if USE_IMPORTLIB:
# pylint: disable=no-member
loader = importlib.machinery.SourceFileLoader(name, sfn)
spec = importlib.util.spec_from_file_location(name, sfn, loader=loader)
if spec is None:
raise ImportError()
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
# pylint: enable=no-member
sys.modules[name] = mod
else:
mod = imp.load_source(name, sfn)
# File templates need these set as __var__
if "__env__" not in kwargs and "saltenv" in kwargs:
setattr(mod, "__env__", kwargs["saltenv"])
builtins = ["salt", "grains", "pillar", "opts"]
for builtin in builtins:
arg = "__{0}__".format(builtin)
setattr(mod, arg, kwargs[builtin])
for kwarg in kwargs:
setattr(mod, kwarg, kwargs[kwarg])
try:
data = mod.run()
if string:
return {"result": True, "data": data}
tgt = salt.utils.files.mkstemp()
with salt.utils.files.fopen(tgt, "w+") as target:
target.write(salt.utils.stringutils.to_str(data))
return {"result": True, "data": tgt}
except Exception: # pylint: disable=broad-except
trb = traceback.format_exc()
return {"result": False, "data": trb}
JINJA = wrap_tmpl_func(render_jinja_tmpl)
MAKO = wrap_tmpl_func(render_mako_tmpl)
WEMPY = wrap_tmpl_func(render_wempy_tmpl)
GENSHI = wrap_tmpl_func(render_genshi_tmpl)
CHEETAH = wrap_tmpl_func(render_cheetah_tmpl)
TEMPLATE_REGISTRY = {
"jinja": JINJA,
"mako": MAKO,
"py": py,
"wempy": WEMPY,
"genshi": GENSHI,
"cheetah": CHEETAH,
}
| 34.265176 | 138 | 0.619068 |
from __future__ import absolute_import, print_function, unicode_literals
import codecs
import logging
import os
import sys
import tempfile
import traceback
import jinja2
import jinja2.ext
import salt.utils.data
import salt.utils.dateutils
import salt.utils.files
import salt.utils.hashutils
import salt.utils.http
import salt.utils.jinja
import salt.utils.network
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.yamlencoding
from salt import __path__ as saltpath
from salt.exceptions import CommandExecutionError, SaltInvocationError, SaltRenderError
from salt.ext import six
from salt.utils.decorators.jinja import JinjaFilter, JinjaGlobal, JinjaTest
from salt.utils.odict import OrderedDict
from salt.utils.versions import LooseVersion
if sys.version_info[:2] >= (3, 5):
import importlib.machinery
import importlib.util
USE_IMPORTLIB = True
else:
import imp
USE_IMPORTLIB = False
log = logging.getLogger(__name__)
TEMPLATE_DIRNAME = os.path.join(saltpath[0], "templates")
SLS_ENCODING = "utf-8"
SLS_ENCODER = codecs.getencoder(SLS_ENCODING)
class AliasedLoader(object):
def __init__(self, wrapped):
self.wrapped = wrapped
def __getitem__(self, name):
return self.wrapped[name]
def __getattr__(self, name):
return getattr(self.wrapped, name)
def __contains__(self, name):
return name in self.wrapped
class AliasedModule(object):
def __init__(self, wrapped, aliases):
self.aliases = aliases
self.wrapped = wrapped
def __getattr__(self, name):
return getattr(self.wrapped, name)
def wrap_tmpl_func(render_str):
def render_tmpl(
tmplsrc, from_str=False, to_str=False, context=None, tmplpath=None, **kws
):
if context is None:
context = {}
if "salt" in kws:
kws["salt"] = AliasedLoader(kws["salt"])
kws.update(context)
context = kws
assert "opts" in context
assert "saltenv" in context
if "sls" in context:
slspath = context["sls"].replace(".", "/")
if tmplpath is not None:
context["tplpath"] = tmplpath
if not tmplpath.lower().replace("\\", "/").endswith("/init.sls"):
slspath = os.path.dirname(slspath)
template = tmplpath.replace("\\", "/")
i = template.rfind(slspath.replace(".", "/"))
if i != -1:
template = template[i:]
tpldir = os.path.dirname(template).replace("\\", "/")
tpldata = {
'tplfile': template,
'tpldir': '.' if tpldir == '' else tpldir,
'tpldot': tpldir.replace('/', '.'),
'tplroot': tpldir.split('/')[0],
}
context.update(tpldata)
context["slsdotpath"] = slspath.replace("/", ".")
context["slscolonpath"] = slspath.replace("/", ":")
context["sls_path"] = slspath.replace("/", "_")
context["slspath"] = slspath
if isinstance(tmplsrc, six.string_types):
if from_str:
tmplstr = tmplsrc
else:
try:
if tmplpath is not None:
tmplsrc = os.path.join(tmplpath, tmplsrc)
with codecs.open(tmplsrc, "r", SLS_ENCODING) as _tmplsrc:
tmplstr = _tmplsrc.read()
except (UnicodeDecodeError, ValueError, OSError, IOError) as exc:
if salt.utils.files.is_binary(tmplsrc):
return dict(result=True, data=tmplsrc)
log.error(
"Exception occurred while reading file %s: %s",
tmplsrc,
exc,
exc_info_on_loglevel=logging.DEBUG,
)
six.reraise(*sys.exc_info())
else:
tmplstr = tmplsrc.read()
tmplsrc.close()
try:
output = render_str(tmplstr, context, tmplpath)
if salt.utils.platform.is_windows():
newline = False
if salt.utils.stringutils.to_unicode(
output, encoding=SLS_ENCODING
).endswith(("\n", os.linesep)):
newline = True
output = os.linesep.join(output.splitlines())
if newline:
output += os.linesep
except SaltRenderError as exc:
log.exception("Rendering exception occurred")
raise
except Exception:
return dict(result=False, data=traceback.format_exc())
else:
if to_str:
return dict(result=True, data=output)
with tempfile.NamedTemporaryFile(
"wb", delete=False, prefix=salt.utils.files.TEMPFILE_PREFIX
) as outf:
outf.write(
salt.utils.stringutils.to_bytes(output, encoding=SLS_ENCODING)
)
return dict(result=True, data=outf.name)
render_tmpl.render_str = render_str
return render_tmpl
def _get_jinja_error_slug(tb_data):
try:
return [
x
for x in tb_data
if x[2] in ("top-level template code", "template", "<module>")
][-1]
except IndexError:
pass
def _get_jinja_error_message(tb_data):
try:
line = _get_jinja_error_slug(tb_data)
return "{0}({1}):\n{3}".format(*line)
except IndexError:
pass
return None
def _get_jinja_error_line(tb_data):
try:
return _get_jinja_error_slug(tb_data)[1]
except IndexError:
pass
return None
def _get_jinja_error(trace, context=None):
if not context:
context = {}
out = ""
error = _get_jinja_error_slug(trace)
line = _get_jinja_error_line(trace)
msg = _get_jinja_error_message(trace)
add_log = False
template_path = None
if "sls" not in context:
if (error[0] != "<unknown>") and os.path.exists(error[0]):
template_path = error[0]
add_log = True
else:
filen = context["sls"].replace(".", "/")
if not error[0].endswith(filen) and os.path.exists(error[0]):
add_log = True
template_path = error[0]
if add_log:
if template_path:
out = "\n{0}\n".format(msg.splitlines()[0])
with salt.utils.files.fopen(template_path) as fp_:
template_contents = salt.utils.stringutils.to_unicode(fp_.read())
out += salt.utils.stringutils.get_context(
template_contents, line, marker=" <======================"
)
else:
out = "\n{0}\n".format(msg)
line = 0
return line, out
def render_jinja_tmpl(tmplstr, context, tmplpath=None):
opts = context["opts"]
saltenv = context["saltenv"]
loader = None
newline = False
if tmplstr and not isinstance(tmplstr, six.text_type):
tmplstr = tmplstr.decode(SLS_ENCODING)
if tmplstr.endswith(os.linesep):
newline = os.linesep
elif tmplstr.endswith("\n"):
newline = "\n"
if not saltenv:
if tmplpath:
loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath))
else:
loader = salt.utils.jinja.SaltCacheLoader(
opts, saltenv, pillar_rend=context.get("_pillar_rend", False)
)
env_args = {"extensions": [], "loader": loader}
if hasattr(jinja2.ext, "with_"):
env_args["extensions"].append("jinja2.ext.with_")
if hasattr(jinja2.ext, "do"):
env_args["extensions"].append("jinja2.ext.do")
if hasattr(jinja2.ext, "loopcontrols"):
env_args["extensions"].append("jinja2.ext.loopcontrols")
env_args["extensions"].append(salt.utils.jinja.SerializerExtension)
opt_jinja_env = opts.get("jinja_env", {})
opt_jinja_sls_env = opts.get("jinja_sls_env", {})
opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {}
opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {}
if opts.get("jinja_trim_blocks", False):
log.debug("Jinja2 trim_blocks is enabled")
log.warning(
"jinja_trim_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["trim_blocks"] = True
opt_jinja_sls_env["trim_blocks"] = True
if opts.get("jinja_lstrip_blocks", False):
log.debug("Jinja2 lstrip_blocks is enabled")
log.warning(
"jinja_lstrip_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead"
)
opt_jinja_env["lstrip_blocks"] = True
opt_jinja_sls_env["lstrip_blocks"] = True
def opt_jinja_env_helper(opts, optname):
for k, v in six.iteritems(opts):
k = k.lower()
if hasattr(jinja2.defaults, k.upper()):
log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname)
env_args[k] = v
else:
log.warning("Jinja2 environment %s is not recognized", k)
if "sls" in context and context["sls"] != "":
opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env")
else:
opt_jinja_env_helper(opt_jinja_env, "jinja_env")
if opts.get("allow_undefined", False):
jinja_env = jinja2.Environment(**env_args)
else:
jinja_env = jinja2.Environment(undefined=jinja2.StrictUndefined, **env_args)
tojson_filter = jinja_env.filters.get("tojson")
indent_filter = jinja_env.filters.get("indent")
jinja_env.tests.update(JinjaTest.salt_jinja_tests)
jinja_env.filters.update(JinjaFilter.salt_jinja_filters)
if tojson_filter is not None:
jinja_env.filters["tojson"] = tojson_filter
if salt.utils.jinja.JINJA_VERSION >= LooseVersion("2.11"):
jinja_env.filters["indent"] = indent_filter
jinja_env.globals.update(JinjaGlobal.salt_jinja_globals)
# globals
jinja_env.globals["odict"] = OrderedDict
jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context
jinja_env.tests["list"] = salt.utils.data.is_list
decoded_context = {}
for key, value in six.iteritems(context):
if not isinstance(value, six.string_types):
decoded_context[key] = value
continue
try:
decoded_context[key] = salt.utils.stringutils.to_unicode(
value, encoding=SLS_ENCODING
)
except UnicodeDecodeError as ex:
log.debug(
"Failed to decode using default encoding (%s), trying system encoding",
SLS_ENCODING,
)
decoded_context[key] = salt.utils.data.decode(value)
try:
template = jinja_env.from_string(tmplstr)
template.globals.update(decoded_context)
output = template.render(**decoded_context)
except jinja2.exceptions.UndefinedError as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
out = _get_jinja_error(trace, context=decoded_context)[1]
tmplstr = ""
# Don't include the line number, since it is misreported
raise SaltRenderError("Jinja variable {0}{1}".format(exc, out), buf=tmplstr)
except (
jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.TemplateSyntaxError,
) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Jinja syntax error: {0}{1}".format(exc, out), line, tmplstr
)
except (SaltInvocationError, CommandExecutionError) as exc:
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
raise SaltRenderError(
"Problem running salt function in Jinja template: {0}{1}".format(exc, out),
line,
tmplstr,
)
except Exception as exc:
tracestr = traceback.format_exc()
trace = traceback.extract_tb(sys.exc_info()[2])
line, out = _get_jinja_error(trace, context=decoded_context)
if not line:
tmplstr = ""
else:
tmplstr += "\n{0}".format(tracestr)
log.debug("Jinja Error")
log.debug("Exception:", exc_info=True)
log.debug("Out: %s", out)
log.debug("Line: %s", line)
log.debug("TmplStr: %s", tmplstr)
log.debug("TraceStr: %s", tracestr)
raise SaltRenderError(
"Jinja error: {0}{1}".format(exc, out), line, tmplstr, trace=tracestr
)
if newline:
output += newline
return output
def render_mako_tmpl(tmplstr, context, tmplpath=None):
import mako.exceptions
from mako.template import Template
from salt.utils.mako import SaltMakoTemplateLookup
saltenv = context["saltenv"]
lookup = None
if not saltenv:
if tmplpath:
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=[os.path.dirname(tmplpath)])
else:
lookup = SaltMakoTemplateLookup(
context["opts"], saltenv, pillar_rend=context.get("_pillar_rend", False)
)
try:
return Template(
tmplstr,
strict_undefined=True,
uri=context["sls"].replace(".", "/") if "sls" in context else None,
lookup=lookup,
).render(**context)
except Exception:
raise SaltRenderError(mako.exceptions.text_error_template().render())
def render_wempy_tmpl(tmplstr, context, tmplpath=None):
from wemplate.wemplate import TemplateParser as Template
return Template(tmplstr).render(**context)
def render_genshi_tmpl(tmplstr, context, tmplpath=None):
method = context.get("method", "xml")
if method == "text" or method == "newtext":
from genshi.template import NewTextTemplate
tmpl = NewTextTemplate(tmplstr)
elif method == "oldtext":
from genshi.template import OldTextTemplate
tmpl = OldTextTemplate(tmplstr)
else:
from genshi.template import MarkupTemplate
tmpl = MarkupTemplate(tmplstr)
return tmpl.generate(**context).render(method)
def render_cheetah_tmpl(tmplstr, context, tmplpath=None):
from Cheetah.Template import Template
tclass = Template.compile(tmplstr)
data = tclass(namespaces=[context])
if six.PY3 and isinstance(tmplstr, six.string_types):
res = str(data)
elif six.PY2 and isinstance(tmplstr, six.text_type):
res = data.__unicode__()
elif isinstance(tmplstr, six.binary_type):
res = str(data)
else:
raise SaltRenderError(
"Unknown type {!s} for Cheetah template while trying to render.".format(
type(tmplstr)
)
)
return salt.utils.data.decode(res)
def py(sfn, string=False, **kwargs):
if not os.path.isfile(sfn):
return {}
base_fname = os.path.basename(sfn)
name = base_fname.split(".")[0]
if USE_IMPORTLIB:
loader = importlib.machinery.SourceFileLoader(name, sfn)
spec = importlib.util.spec_from_file_location(name, sfn, loader=loader)
if spec is None:
raise ImportError()
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
sys.modules[name] = mod
else:
mod = imp.load_source(name, sfn)
if "__env__" not in kwargs and "saltenv" in kwargs:
setattr(mod, "__env__", kwargs["saltenv"])
builtins = ["salt", "grains", "pillar", "opts"]
for builtin in builtins:
arg = "__{0}__".format(builtin)
setattr(mod, arg, kwargs[builtin])
for kwarg in kwargs:
setattr(mod, kwarg, kwargs[kwarg])
try:
data = mod.run()
if string:
return {"result": True, "data": data}
tgt = salt.utils.files.mkstemp()
with salt.utils.files.fopen(tgt, "w+") as target:
target.write(salt.utils.stringutils.to_str(data))
return {"result": True, "data": tgt}
except Exception:
trb = traceback.format_exc()
return {"result": False, "data": trb}
JINJA = wrap_tmpl_func(render_jinja_tmpl)
MAKO = wrap_tmpl_func(render_mako_tmpl)
WEMPY = wrap_tmpl_func(render_wempy_tmpl)
GENSHI = wrap_tmpl_func(render_genshi_tmpl)
CHEETAH = wrap_tmpl_func(render_cheetah_tmpl)
TEMPLATE_REGISTRY = {
"jinja": JINJA,
"mako": MAKO,
"py": py,
"wempy": WEMPY,
"genshi": GENSHI,
"cheetah": CHEETAH,
}
| true | true |
f738f711c7c8087a0ebd860809e2161fd2425045 | 49,489 | py | Python | test/backend/test_node.py | golunovas/onnx-tensorflow | b6340b3e66aa08af1ea4382e98257c2098177371 | [
"Apache-2.0"
] | null | null | null | test/backend/test_node.py | golunovas/onnx-tensorflow | b6340b3e66aa08af1ea4382e98257c2098177371 | [
"Apache-2.0"
] | null | null | null | test/backend/test_node.py | golunovas/onnx-tensorflow | b6340b3e66aa08af1ea4382e98257c2098177371 | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import unittest
import numpy as np
import tensorflow as tf
from onnx_tf.backend import run_node
from onnx_tf.common import supports_device
from onnx_tf.common.legacy import legacy_onnx_pre_ver, legacy_opset_pre_ver
from onnx import helper
from onnx import TensorProto
from onnx import defs
class TestNode(unittest.TestCase):
""" Tests for nodes
"""
def _get_rnd_float32(self, low=-1.0, high=1.0, shape=None):
output = np.random.uniform(low, high, shape)
if shape == None:
return np.float32(output)
else:
return output.astype(np.float32)
def _get_rnd_int(self, low, high=None, shape=None, dtype=np.int32):
return np.random.randint(low, high, size=shape, dtype=dtype)
def _elu(self, x):
# f(x) = alpha * (exp(x) - 1.) for x < 0,
# f(x) = x for x >= 0
if x < 0.:
return np.expm1(x)
return x
def _leaky_relu(self, x, alpha):
# f(x) = alpha * x for x < 0,
# f(x) = x for x >= 0
if x < 0.:
return alpha * x
return x
def test_abs(self):
node_def = helper.make_node("Abs", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.abs(x))
def test_acosh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Acosh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Acosh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arccosh(x))
def test_add(self):
node_def = helper.make_node("Add", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 10, 5, 5])
y = self._get_rnd_float32(shape=[10, 1, 1])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"],
np.add(x, y.reshape([1, 10, 1, 1])))
# node_def = helper.make_node("Add", ["A", "B"], ["C"], broadcast=1)
# a = self._get_rnd([10, 10])
# b = self._get_rnd([10, 10])
# output = run_node(node_def, [a, b])
# np.testing.assert_almost_equal(output["C"], np.add(a, b))
# node_def = helper.make_node("Add", ["A", "B"], ["C"], broadcast=1)
# a = self._get_rnd([10, 10])
# b = self._get_rnd([10,])
# output = run_node(node_def, [a, b])
# np.testing.assert_almost_equal(output["C"], np.add(a, b))
def test_arg_max(self):
# TODO: need to fix this test
return
for axis in [0, 1]:
node_def = helper.make_node(
"ArgMax", ["data"], ["reduced"], axis=axis, keepdims=0)
data = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [data])
np.testing.assert_almost_equal(output["reduced"],
np.argmax(data, axis=axis))
def test_arg_min(self):
# TODO: need to fix this test
return
for axis in [0, 1]:
node_def = helper.make_node(
"ArgMin", ["data"], ["reduced"], axis=axis, keepdims=0)
data = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [data])
np.testing.assert_almost_equal(output["reduced"],
np.argmin(data, axis=axis))
def test_asinh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Asinh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Asinh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arcsinh(x))
def test_atanh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Atanh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Atanh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arctanh(x))
def test_average_pool(self):
# TODO: fix this test
return
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
shape = [1, 1, 40, 40]
node_def = helper.make_node(
"AveragePool", ["X"], ["Y"],
kernel_shape=[1, 2],
pads=[1, 1],
strides=[1, 1])
x = self._get_rnd_float32(shape=shape)
output = run_node(node_def, [x], device=device)
test_output = np.zeros(shape)
for i1 in range(0, shape[0]):
for i2 in range(0, shape[1]):
for j1 in range(0, shape[2]):
for j2 in range(0, shape[3]):
test_output[i1][i2][j1][j2] = 0
count = 0
for k in range(j2, min(j2 + 2, shape[3])):
test_output[i1][i2][j1][j2] += x[i1][i2][j1][k]
count += 1
test_output[i1][i2][j1][j2] /= count
np.testing.assert_almost_equal(output["Y"], test_output)
def _batch_normalization(self, x, mean, variance, bias, scale,
variance_epsilon):
inv = np.reciprocal(np.sqrt(variance + variance_epsilon))
if scale is not None:
inv *= scale
return x * inv + (bias - mean * inv if bias is not None else -mean * inv)
def test_batch_normalization(self):
if legacy_opset_pre_ver(6):
raise unittest.SkipTest("Backend doesn't support consumed flag")
node_def = helper.make_node(
"BatchNormalization", ["X", "scale", "bias", "mean", "var"], ["Y"],
epsilon=0.001)
x_shape = [3, 5, 4, 2]
param_shape = [5]
_param_shape = [1, 5, 1, 1]
x = self._get_rnd_float32(0, 1, shape=x_shape)
m = self._get_rnd_float32(0, 1, shape=param_shape)
_m = m.reshape(_param_shape)
v = self._get_rnd_float32(0, 1, shape=param_shape)
_v = v.reshape(_param_shape)
scale = self._get_rnd_float32(0, 1, shape=param_shape)
_scale = scale.reshape(_param_shape)
bias = self._get_rnd_float32(0, 1, shape=param_shape)
_bias = bias.reshape(_param_shape)
golden = self._batch_normalization(x, _m, _v, _bias, _scale, 0.001)
output = run_node(node_def, [x, scale, bias, m, v])
np.testing.assert_almost_equal(output["Y"], golden, decimal=5)
def test_cast(self):
if legacy_onnx_pre_ver(1, 2) or legacy_opset_pre_ver(6):
test_cases = [("FLOAT", tf.float32), ("UINT8", tf.uint8),
("INT8", tf.int8), ("UINT16", tf.uint16), ("INT16",
tf.int16),
("INT32", tf.int32), ("INT64", tf.int64), ("BOOL", tf.bool),
("FLOAT16", tf.float16), ("DOUBLE", tf.float64),
("COMPLEX64", tf.complex64), ("COMPLEX128", tf.complex128)]
else:
test_cases = [(TensorProto.FLOAT,
tf.float32), (TensorProto.UINT8,
tf.uint8), (TensorProto.INT8, tf.int8),
(TensorProto.UINT16,
tf.uint16), (TensorProto.INT16,
tf.int16), (TensorProto.INT32, tf.int32),
(TensorProto.INT64,
tf.int64), (TensorProto.BOOL,
tf.bool), (TensorProto.FLOAT16, tf.float16),
(TensorProto.DOUBLE,
tf.float64), (TensorProto.COMPLEX64,
tf.complex64), (TensorProto.COMPLEX128,
tf.complex128)]
if not legacy_opset_pre_ver(9):
test_cases.append((TensorProto.STRING, tf.string))
for ty, tf_type in test_cases:
node_def = helper.make_node("Cast", ["input"], ["output"], to=ty)
vector = [2, 3]
output = run_node(node_def, [vector])
np.testing.assert_equal(output["output"].dtype, tf_type)
if not legacy_opset_pre_ver(9):
test_cases2 = [(TensorProto.FLOAT, tf.float32), (TensorProto.INT32,
tf.int32),
(TensorProto.INT64, tf.int64), (TensorProto.DOUBLE,
tf.float64)]
for ty, tf_type in test_cases2:
node_def = helper.make_node("Cast", ["input"], ["output"], to=ty)
vector = ['2', '3']
output = run_node(node_def, [vector])
np.testing.assert_equal(output["output"].dtype, tf_type)
def test_ceil(self):
node_def = helper.make_node("Ceil", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.ceil(x))
def test_compress(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't support Compress.".format(
defs.onnx_opset_version()))
axis = 1
node_def = helper.make_node(
"Compress", inputs=['X', 'condition'], outputs=['Y'], axis=axis)
x = self._get_rnd_float32(shape=[5, 5, 5])
cond = np.array([1, 0, 1])
output = run_node(node_def, inputs=[x, cond])
np.testing.assert_almost_equal(output['Y'], np.compress(cond, x, axis=axis))
def test_concat(self):
shape = [10, 20, 5]
for axis in range(len(shape)):
node_def = helper.make_node("Concat", ["X1", "X2"], ["Y"], axis=axis)
x1 = self._get_rnd_float32(shape=shape)
x2 = self._get_rnd_float32(shape=shape)
output = run_node(node_def, [x1, x2])
np.testing.assert_almost_equal(output["Y"], np.concatenate((x1, x2),
axis))
def test_constant(self):
shape = [16, 16]
values = np.random.randn(*shape).flatten().astype(float)
const2_onnx = helper.make_tensor("const2", TensorProto.DOUBLE, shape,
values)
node_def = helper.make_node("Constant", [], ["Y"], value=const2_onnx)
output = run_node(node_def, [])
np.testing.assert_equal(output["Y"].shape, shape)
np.testing.assert_almost_equal(output["Y"].flatten(), values)
# test sparse tensor
if not legacy_opset_pre_ver(11):
expected = np.array([[1, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0]])
x = np.array([[0, 0], [1, 2]]).flatten().astype(np.int64)
values = helper.make_tensor("values", TensorProto.INT32, [2], [1, 2])
indices = helper.make_tensor("indices", TensorProto.INT64, [2, 2], x)
a = helper.make_sparse_tensor(values, indices,[3, 4])
node_def = helper.make_node("Constant", [], ["Y"], sparse_value=a)
output = run_node(node_def, [])
b = tf.sparse_to_dense(output["Y"].indices, output["Y"].dense_shape, output["Y"].values)
result = b.eval(session=tf.Session())
np.testing.assert_equal(result, expected)
def test_constant_fill(self):
if not legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't support ConstantFill.".format(
defs.onnx_opset_version()))
shape = [1, 2, 3, 4]
extra_shape = [5, 6]
value = 3.
node_def = helper.make_node(
"ConstantFill",
["X"],
["Y"],
value=value,
extra_shape=extra_shape,
dtype=1,
)
x = self._get_rnd_float32(shape=shape)
y = np.zeros(shape + extra_shape)
y.fill(value)
output = run_node(node_def, [x])
np.testing.assert_equal(output["Y"].dtype, tf.float32)
np.testing.assert_equal(output["Y"], y)
def test_constant_of_shape(self):
if defs.onnx_opset_version() < 9:
raise unittest.SkipTest(
"ONNX version {} doesn't support ConstantOfShape.".format(
defs.onnx_opset_version()))
v = helper.make_tensor("value", TensorProto.FLOAT, [1], [1])
node_def = helper.make_node("ConstantOfShape", ["X"], ["Y"], value=v)
x = np.array([4, 3, 2])
output = run_node(node_def, inputs=[x])
np.testing.assert_almost_equal(output["Y"], np.ones(x, dtype=np.float32))
v = helper.make_tensor("value", TensorProto.INT32, [1], [0])
node_def = helper.make_node("ConstantOfShape", ["X"], ["Y"], value=v)
x = np.array([10, 6])
output = run_node(node_def, inputs=[x])
np.testing.assert_almost_equal(output["Y"], np.zeros(x, dtype=np.int32))
def test_conv(self):
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
N, C, H, W = 4, 3, 5, 5
x_shape = [N, C, H, W]
K, kH, kW = 6, 3, 3
weight_shape = [K, C, kH, kW]
node_def = helper.make_node(
"Conv", ["X", "weights"], ["Y"],
pads=[1, 1, 1, 1],
kernel_shape=[kH, kW])
x = self._get_rnd_float32(shape=x_shape)
weights = self._get_rnd_float32(shape=weight_shape)
output = run_node(node_def, [x, weights], device=device)
out_shape = [N, K, H, W]
test_output = np.zeros(out_shape)
for n in range(N):
for c in range(C):
for h in range(H):
for w in range(W):
for k in range(K):
for kh in range(kH):
for kw in range(kW):
h_in_range = (h - kH // 2 + kh) < H and (
h - kH // 2 + kh) >= 0
w_in_range = (w - kW // 2 + kw) < W and (
w - kW // 2 + kw) >= 0
if h_in_range and w_in_range:
test_output[n][k][h][w] += (x[n][c][h - kH // 2 + kh][
w - kW // 2 + kw] * weights[k][c][kh][kw])
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_conv_transpose(self):
# Fix test in the future.
return
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
node_def = helper.make_node(
"ConvTranspose", ["X", "weights"], ["Y"], pads=[1, 1])
x_shape = [1, 5, 4]
x = self._get_rnd(x_shape)
weight_shape = [5, 3, 2]
weights = self._get_rnd_float32(shape=weight_shape)
output = run_node(node_def, [x, weights], device=device)
out_shape = [x_shape[0], weight_shape[1], x_shape[2]]
test_output = np.zeros(out_shape)
for b in range(0, x_shape[0]):
for m in range(0, weight_shape[1]):
for h in range(0, x_shape[2]):
v = 0
for c in range(0, x_shape[1]):
for k in range(h, min(h + weight_shape[2], x_shape[2])):
v += x[b][c][k] * weights[c][m][k - h]
test_output[b][m][h] = v
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_cosh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Cosh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Cosh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.cosh(x))
def test_depth_to_space(self):
node_def = helper.make_node("DepthToSpace", ["X"], ["Y"], blocksize=2)
x_shape = [1, 12, 1, 1]
x = self._get_rnd_float32(shape=x_shape)
output = run_node(node_def, [x])
x = np.transpose(x, (0, 2, 3, 1))
y = np.reshape(np.swapaxes(x.reshape(1, 1, 1, 2, 2, 3), 2, 3), (1, 2, 2, 3))
y = np.transpose(y, (0, 3, 1, 2))
np.testing.assert_almost_equal(output["Y"], y, decimal=5)
def test_dequantize_linear(self):
node_def = helper.make_node("DequantizeLinear",
["x", "x_scale", "x_zero_point"], ["y"])
for x, x_zero_point in [
[
self._get_rnd_int(-128, 127, [2, 6], np.int8),
self._get_rnd_int(-128, 127, dtype=np.int8)
],
[
self._get_rnd_int(0, 255, [2, 6], np.uint8),
self._get_rnd_int(0, 255, dtype=np.uint8)
],
[
self._get_rnd_int(-512, 512, [2, 6]),
np.int32(0)
]
]:
x_scale = self._get_rnd_float32(-10., 10)
y = np.subtract(np.float32(x), np.float32(x_zero_point))
y = np.multiply(y, x_scale)
output = run_node(node_def, [x, x_scale, x_zero_point])
np.testing.assert_almost_equal(output["y"], y)
def test_div(self):
node_def = helper.make_node("Div", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.divide(x, y))
def test_dropout(self):
# Since current ONNX only support inference and
# dropout at inference mode is a no-op,
# therefore dropout is always a no-op operator
# in ONNX.
node_def = helper.make_node("Dropout", ["X"], ["Y"])
if legacy_opset_pre_ver(7):
# at inference mode, is_test is always set to 1
node_def = helper.make_node("Dropout", ["X"], ["Y"], is_test=1)
x = self._get_rnd_float32(shape=[3, 4, 5])
y = x
output = run_node(node_def, [x])
np.testing.assert_equal(output["Y"], y)
def test_dot(self):
# this op is removed
# remove this test in the future
return
node_def = helper.make_node("Dot", ["X", "Y"], ["Z"])
x = np.floor(self._get_rnd_float32(shape=[10, 10]))
y = np.floor(self._get_rnd_float32(shape=[10, 10]))
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.dot(x, y))
def test_elu(self):
node_def = helper.make_node("Elu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
output = run_node(node_def, [x])
test_output = [self._elu(a) for a in x]
np.testing.assert_almost_equal(output["Y"], test_output)
def test_equal(self):
node_def = helper.make_node("Equal", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 3, 3, 2])
y = self._get_rnd_float32(shape=[3, 3, 1])
output = run_node(node_def, [x, y])
np.testing.assert_equal(output["Z"], np.equal(x, np.reshape(
y, [1, 3, 3, 1])))
def test_erf(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Erf.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Erf", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
exp_output = np.vectorize(math.erf)(x).astype(np.float32)
np.testing.assert_almost_equal(output["Y"], exp_output)
def test_exp(self):
node_def = helper.make_node("Exp", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
x = x - 3.6
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.exp(x))
def test_eye_like(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support EyeLike.".format(
defs.onnx_opset_version()))
for shape in [[6, 10], [10, 6]]:
for off_diagonal_offset in [-10, -6, -3, 0, 3, 6, 7, 10]:
node_def = helper.make_node(
"EyeLike", ['x'], ['y'], dtype=1, k=off_diagonal_offset)
x = self._get_rnd_int(0, 100, shape=shape)
y = np.eye(shape[0], shape[1], k=off_diagonal_offset, dtype=np.float32)
output = run_node(node_def, [x])
np.testing.assert_equal(output['y'], y)
def test_flatten(self):
# If input tensor has shape (d_0, d_1, ... d_n) then the
# output will have shape:
#
# (d_0 X d_1 ... d_(axis-1), d_axis X d_(axis+1) ... X dn)
#
# TODO: pass axis attribute which is supported in newer
# versions of onnx
node_def = helper.make_node("Flatten", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 2, 3, 4, 5])
output = run_node(node_def, [x])
# TODO: pass axis=3 and uncomment the line below
# np.testing.assert_almost_equal(output["Y"], x.reshape([60, 20]))
np.testing.assert_almost_equal(output["Y"], x.reshape([10, 120]))
def test_gather(self):
node_def = helper.make_node("Gather", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = [[0, 1], [1, 2]]
output = run_node(node_def, [x, y])
test_output = np.zeros((2, 2, 10))
for i in range(0, 2):
for j in range(0, 10):
test_output[0][i][j] = x[i][j]
for i in range(0, 2):
for j in range(0, 10):
test_output[1][i][j] = x[i + 1][j]
np.testing.assert_almost_equal(output["Z"], test_output)
def test_gemm(self):
# Compute Y = alpha * A * B + beta * C
node_def = helper.make_node(
"Gemm", ["A", "B", "C"], ["Y"], transA=0, transB=0, alpha=1.0, beta=1.0)
x = np.floor(self._get_rnd_float32(shape=[10, 10]))
y = np.floor(self._get_rnd_float32(shape=[10, 10]))
z = np.floor(self._get_rnd_float32(shape=[10, 10]))
output = run_node(node_def, [x, y, z])
test_output = np.matmul(x, y) + z
np.testing.assert_almost_equal(output["Y"], test_output)
def test_global_average_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalAveragePool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
sum = 0
for j1 in range(0, 2):
for j2 in range(0, 3):
sum += x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = sum / 6.
np.testing.assert_almost_equal(output["Y"], test_output)
def test_image_sacler(self):
# Input: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
# Scale: (flout, default 1.0) the scale to apply
# Bias: applied to each channel, same size as C
# Output has same shape and type as input
x = self._get_rnd_float32(shape=[1, 3, 224, 224])
#random distribution over [0,1), so add 0.1
scale = np.random.rand(1)[0] + 0.1
bias = np.random.rand(3)
node_def = helper.make_node(
"ImageScaler", ["X"], ["Y"], scale=scale, bias=bias)
output = run_node(node_def, [x])
test_out = np.multiply(x, scale)
test_out = np.transpose(test_out, [0, 2, 3, 1])
test_out = np.add(test_out, bias)
test_out = np.transpose(test_out, [0, 3, 1, 2])
np.testing.assert_almost_equal(output["Y"], test_out)
def test_is_inf(self):
if legacy_opset_pre_ver(10):
raise unittest.SkipTest("ONNX version {} doesn't support IsInf.".format(
defs.onnx_opset_version()))
input = np.array(
[-1.2, np.nan, np.inf, 2.8, np.NINF, np.inf], dtype=np.float32)
expected_output = {
"node_def": np.isinf(input),
"node_def_neg_false": np.isposinf(input),
"node_def_pos_false": np.isneginf(input)
}
node_defs = {
"node_def":
helper.make_node("IsInf", ["X"], ["Y"]),
"node_def_neg_false":
helper.make_node("IsInf", ["X"], ["Y"], detect_negative=0),
"node_def_pos_false":
helper.make_node("IsInf", ["X"], ["Y"], detect_positive=0)
}
for key in node_defs:
output = run_node(node_defs[key], [input])
np.testing.assert_equal(output["Y"], expected_output[key])
def test_isnan(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support IsNaN.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("IsNaN", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 3])
x[0][1] = x[1][0] = x[2][2] = np.nan
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.isnan(x))
def test_global_lp_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalLpPool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
tmp = np.zeros([2, 3])
for j1 in range(0, 2):
for j2 in range(0, 3):
tmp[j1][j2] = x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = np.linalg.norm(tmp)
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_global_max_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalMaxPool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
max = x[i1][i2][0][0]
for j1 in range(0, 2):
for j2 in range(0, 3):
if max < x[i1][i2][j1][j2]:
max = x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = max
np.testing.assert_almost_equal(output["Y"], test_output)
def test_less(self):
node_def = helper.make_node("Less", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 3, 3, 2])
y = self._get_rnd_float32(shape=[3, 3, 1])
output = run_node(node_def, [x, y])
np.testing.assert_equal(output["Z"], np.less(x, np.reshape(y,
[1, 3, 3, 1])))
def test_lp_normalization(self):
for ordr in range(1, 3):
node_def = helper.make_node("LpNormalization", ["X"], ["Y"], p=ordr)
x = self._get_rnd([2, 2, 3, 2])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"],
x / np.expand_dims(np.linalg.norm(x, axis=-1, ord=ordr), -1),
rtol=1e-3)
def test_l_r_n(self):
# Each input value is divided by:
#
# (bias+(alpha/size)*sum(xi^2 for every xi in the local region))^beta
alpha = 2.0
beta = 1.0
bias = 5.0
size = 3
node_def = helper.make_node(
"LRN", ["X"], ["Y"], alpha=alpha, beta=beta, bias=bias, size=size)
x = self._get_rnd_float32(shape=[10, 2, 10, 10])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 10, 2])
x = np.transpose(x, axes=[0, 2, 3, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
for j1 in range(0, 10):
for j2 in range(0, 2):
sqr_sum = 0.
# size of 3 means radius 1 in TF speak
# i.e. the immediate neighbouring values
# if "previous" neighbour exists
if j2 > 0:
sqr_sum += x[i1][i2][j1][j2 - 1] * x[i1][i2][j1][j2 - 1]
# current value
sqr_sum += x[i1][i2][j1][j2] * x[i1][i2][j1][j2]
# if "next" neighbour exists
if j2 < 2 - 1:
sqr_sum += x[i1][i2][j1][j2 + 1] * x[i1][i2][j1][j2 + 1]
test_output[i1][i2][j1][j2] = \
x[i1][i2][j1][j2] / ((bias + (alpha * 1. / size) * sqr_sum) ** beta)
test_output = np.transpose(test_output, axes=[0, 3, 1, 2])
np.testing.assert_almost_equal(output["Y"], test_output)
def test_floor(self):
node_def = helper.make_node("Floor", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.floor(x))
def test_leakyrelu(self):
node_def = helper.make_node("LeakyRelu", ["X"], ["Y"], alpha=0.8)
x = np.floor(self._get_rnd_float32(shape=[100]))
output = run_node(node_def, [x])
test_output = [self._leaky_relu(a, 0.8) for a in x]
np.testing.assert_almost_equal(output["Y"], test_output)
def test_log(self):
node_def = helper.make_node("Log", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
x = x + 3.6
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.log(x))
def test_max(self):
node_def = helper.make_node("Max", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = np.maximum(np.maximum(np.maximum(x1, x2), x3), x4)
np.testing.assert_almost_equal(output["Z"], test_output)
def test_max_pool(self):
return
node_def = helper.make_node(
"MaxPool", ["X"], ["Y"],
dilations=[1, 1],
kernel_shape=[1, 2],
pads=[0, 0],
strides=[1, 2])
x = self._get_rnd_float32(shape=[10, 10, 4, 4])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 4, 2])
for i1 in range(0, 10):
for i2 in range(0, 10):
for j1 in range(0, 4):
for j2 in range(0, 2):
test_output[i1][i2][j1][j2] = \
max(x[i1][i2][j1][2*j2], x[i1][i2][j1][2*j2 + 1])
np.testing.assert_almost_equal(output["Y"], test_output)
def test_mean_variance_normalization(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't have test for MeanVarianceNormalization"
.format(defs.onnx_opset_version()))
input_data = self._get_rnd_float32(shape=[2,2,2,2])
# Calculate expected output data using formula:
# (Input - Mean)/SD
mean = np.mean(input_data, keepdims=1, axis=(0,2,3))
std = np.std(input_data, keepdims=1, axis=(0,2,3))
expected_output = (input_data - mean) / std
# Testing without "axes" argument should default to axes=[0,2,3]
node_def = helper.make_node("MeanVarianceNormalization", ["X"], ["Y"])
output = run_node(node_def, [input_data])
np.testing.assert_almost_equal(output["Y"], expected_output, decimal=5)
def test_min(self):
node_def = helper.make_node("Min", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = np.minimum(np.minimum(np.minimum(x1, x2), x3), x4)
np.testing.assert_almost_equal(output["Z"], test_output)
def test_mul(self):
node_def = helper.make_node("Mul", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 10, 5, 5])
y = self._get_rnd_float32(shape=[10, 1, 1])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"],
np.multiply(x, y.reshape([1, 10, 1, 1])))
def test_mod(self):
if legacy_opset_pre_ver(10):
raise unittest.SkipTest("ONNX version {} doesn't support Mod.".format(
defs.onnx_opset_version()))
x = self._get_rnd_float32(shape=[5, 5])
y = self._get_rnd_float32(shape=[5, 5])
node_def = helper.make_node("Mod", ["X", "Y"], ["Z"], fmod=0)
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.mod(x, y))
node_def = helper.make_node("Mod", ["X", "Y"], ["Z"], fmod=1)
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.fmod(x, y))
def test_neg(self):
node_def = helper.make_node("Neg", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.negative(x))
def test_non_zero(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support NonZero.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("NonZero", ["x"], ["y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
y = np.array(np.nonzero(x))
output = run_node(node_def, [x])
np.testing.assert_equal(output["y"], y)
def test_onehot(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support OneHot.".format(
defs.onnx_opset_version()))
indices = np.array([[0, 2], [1, 2], [0, 1]])
depth = np.int32(5)
on_value = 6.0
off_value = 2.0
values = np.array([off_value, on_value])
node_def = helper.make_node(
'OneHot', inputs=['indices', 'depth', 'values'], outputs=['y'], axis=-1)
y = (np.arange(depth) == indices[..., None]).astype(int)
y = y * (on_value - off_value) + off_value
output = run_node(node_def, inputs=[indices, depth, values])
np.testing.assert_equal(output['y'], y)
def test_range(self):
if legacy_opset_pre_ver(11):
raise unittest.SkipTest("ONNX version {} doesn't support Range.".format(
defs.onnx_opset_version()))
node_def = helper.make_node(
"Range", ['start', 'limit', 'delta'], ['y'])
# test positive_delta
start = self._get_rnd_int(low=0, high=3)
limit = self._get_rnd_int(low=10, high=30)
delta = np.int32(3)
output = run_node(node_def, [start, limit, delta])
np.testing.assert_equal(output['y'], range(start, limit, delta))
# test negative_delta
start = self._get_rnd_int(low=20, high=30)
limit = self._get_rnd_int(low=1, high=5)
delta = np.int32(-2)
output = run_node(node_def, [start, limit, delta])
np.testing.assert_equal(output['y'], range(start, limit, delta))
def test_round(self):
if legacy_opset_pre_ver(11):
raise unittest.SkipTest("ONNX version {} doesn't support Round.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Round", ["X"], ["Y"])
x = self._get_rnd_float32(-20.0, 20.0, shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.round(x))
def test_relu(self):
node_def = helper.make_node("Relu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.maximum(x, 0))
def test_pad(self):
node_def = helper.make_node(
"Pad", ["X"], ["Y"], mode="constant", pads=[1, 1, 1, 1], value=2.0)
x = self._get_rnd_float32(shape=[100, 100])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"],
np.lib.pad(
x, ((1, 1), (1, 1)),
'constant',
constant_values=(2, 2)))
def test_quantize_linear(self):
node_def = helper.make_node("QuantizeLinear",
["x", "y_scale", "y_zero_point"], ["y"])
for x in [
self._get_rnd_float32(-512., 512., [2, 6]),
self._get_rnd_int(-512, 512, [2, 6])
]:
y_scale = self._get_rnd_float32(-10., 10.)
for y_zero_point in [
self._get_rnd_int(-128, 127, dtype=np.int8),
self._get_rnd_int(0, 255, dtype=np.uint8)
]:
y = np.divide(x, y_scale)
y = np.round(y)
y = np.add(y, y_zero_point)
if y_zero_point.dtype.type is np.int8:
y = np.clip(y, -128, 127).astype(np.int8)
else:
y = np.clip(y, 0, 255).astype(np.uint8)
output = run_node(node_def, [x, y_scale, y_zero_point])
np.testing.assert_almost_equal(output["y"], y)
def test_reciprocal(self):
node_def = helper.make_node("Reciprocal", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], 1.0 / x)
def test_reduce_l1(self):
node_def = helper.make_node("ReduceL1", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"],
np.linalg.norm(x, 1, (1, 2), True))
def test_reduce_log_sum_exp(self):
node_def = helper.make_node("ReduceLogSumExp", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"],
np.log(np.sum(np.exp(x), axis=(1, 2), keepdims=True)),
rtol=1e-3)
def test_reduce_max(self):
node_def = helper.make_node("ReduceMax", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.max(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_mean(self):
node_def = helper.make_node("ReduceMean", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.mean(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_min(self):
node_def = helper.make_node("ReduceMin", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.min(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_prod(self):
node_def = helper.make_node("ReduceProd", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[1, 5, 5, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.prod(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_sum(self):
node_def = helper.make_node("ReduceSum", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.sum(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_sum_square(self):
node_def = helper.make_node("ReduceSumSquare", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.sum(np.square(x), (1, 2), keepdims=True), rtol=1e-3)
def test_pow(self):
node_def = helper.make_node("Pow", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=1000) / 2.0 + 0.5
y = self._get_rnd_float32(shape=1000) / 2.0 + 0.5
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.power(x, y))
def test_reshape(self):
x = self._get_rnd_float32(shape=100)
shape = [10, 10]
if defs.onnx_opset_version() < 5:
node_def = helper.make_node("Reshape", ["X"], ["Z"], shape=shape)
output = run_node(node_def, [x])
else:
node_def = helper.make_node("Reshape", ["X", "Y"], ["Z"])
output = run_node(node_def, [x, shape])
np.testing.assert_almost_equal(output["Z"], x.reshape([10, 10]))
def test_reshape_with_copy(self):
x = self._get_rnd_float32(shape=[10, 20 * 30])
shape = [0, 20, 30]
if defs.onnx_opset_version() < 5:
node_def = helper.make_node("Reshape", ["X"], ["Z"], shape=shape)
output = run_node(node_def, [x])
else:
node_def = helper.make_node("Reshape", ["X", "Y"], ["Z"])
output = run_node(node_def, [x, shape])
np.testing.assert_almost_equal(output["Z"], x.reshape([10, 20, 30]))
def test_selu(self):
node_def = helper.make_node("Selu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
alpha = 1.6732
gamma = 1.0507
x[x <= 0] = gamma * (alpha * np.exp(x[x <= 0]) - alpha)
x[x > 0] = gamma * x[x > 0]
np.testing.assert_allclose(output["Y"], x, rtol=1e-3, atol=1e-7)
def test_shape(self):
node_def = helper.make_node("Shape", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(output["Y"], np.shape(x))
def test_shrink(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Shrink.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Shrink", ["X"], ["Y"], bias=1.5, lambd=1.5)
X = np.arange(-2.0, 2.1, dtype=np.float32)
Y = np.array([-0.5, 0, 0, 0, 0.5], dtype=np.float32)
output = run_node(node_def, [X])
np.testing.assert_almost_equal(output["Y"], Y)
def test_sigmoid(self):
node_def = helper.make_node("Sigmoid", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], 1 / (1 + np.exp(-x)))
def test_sign(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Sign.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Sign", ["X"], ["Y"])
x = self._get_rnd_float32(-10, 10, [3, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sign(x))
def test_sinh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Sinh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Sinh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sinh(x))
def test_size(self):
node_def = helper.make_node("Size", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.size(x))
def test_slice(self):
# test case 1 with normal inputs
axes = [0, 1, 2]
starts = [0, 0, 0]
ends = [2, 2, 2]
steps = [1, 1, 1]
if legacy_opset_pre_ver(10):
node_def = helper.make_node(
"Slice", ["X"], ["S"], axes=axes, starts=starts, ends=ends)
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["S"], x[0:2, 0:2, 0:2])
else:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes", "steps"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes, steps])
np.testing.assert_almost_equal(output["S"], x[0:2, 0:2, 0:2])
# test case 2 with negative, out-of-bound and default inputs
axes = [0, 2]
starts = [0, -7]
ends = [-8, 20]
if legacy_opset_pre_ver(10):
node_def = helper.make_node(
"Slice", ["X"], ["S"], axes=axes, starts=starts, ends=ends)
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["S"], x[0:-8, :, -7:20])
else:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes])
np.testing.assert_almost_equal(output["S"], x[0:-8, :, -7:20])
# test case 3 with non-default steps
axes = [0, 1, 2]
starts = [0, 0, 0]
ends = [2, 2, 2]
steps = [2, -2, -1]
if legacy_opset_pre_ver(10) == False:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes", "steps"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes, steps])
np.testing.assert_almost_equal(output["S"], x[0:2:2, 0:2:-2, 0:2:-1])
def test_softplus(self):
node_def = helper.make_node("Softplus", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.log(np.exp(x) + 1))
def test_softsign(self):
node_def = helper.make_node("Softsign", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], x / (1 + np.abs(x)))
def test_space_to_depth(self):
node_def = helper.make_node("SpaceToDepth", ["X"], ["Y"], blocksize=2)
x_shape = [1, 3, 2, 2]
x = self._get_rnd_float32(shape=x_shape)
output = run_node(node_def, [x])
x = np.transpose(x, (0, 2, 3, 1))
y = np.reshape(
np.swapaxes(x.reshape(1, 1, 1, 1, 1, 12), 2, 3), (1, 1, 1, 12))
y = np.transpose(y, (0, 3, 1, 2))
np.testing.assert_allclose(output["Y"], y, rtol=1e-3)
def test_split(self):
split = [3, 3, 4]
node_def = helper.make_node(
"Split", ["X"], ["Z%i" % i for i in range(len(split))],
axis=0,
split=split)
x = self._get_rnd_float32(shape=[100]).reshape([10, 10])
output = run_node(node_def, [x])
for a, b in zip(list(output), np.split(x, np.cumsum(split))[:-1]):
np.testing.assert_almost_equal(a, b)
def test_sqrt(self):
node_def = helper.make_node("Sqrt", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000]) + 1.0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sqrt(x), decimal=5)
def test_squeeze(self):
node_def = helper.make_node("Squeeze", ["X"], ["Y"], axes=[2])
x = np.array([[[0], [1], [2]]])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.squeeze(x, axis=2))
def test_sub(self):
node_def = helper.make_node("Sub", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.subtract(x, y))
def test_sum(self):
node_def = helper.make_node("Sum", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = x1 + x2 + x3 + x4
np.testing.assert_almost_equal(output["Z"], test_output)
def test_tanh(self):
node_def = helper.make_node("Tanh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000]) + 1.0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.tanh(x), decimal=5)
def test_thresholded_relu(self):
alpha = 2.0
node_def = helper.make_node(
"ThresholdedRelu", ["X"], ["Y"], alpha=alpha)
x = self._get_rnd_float32(-3.0, 3.0, [10])
y = np.clip(x, alpha, np.inf)
y[y == alpha] = 0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], y)
def test_tile(self):
if legacy_onnx_pre_ver(1, 2):
raise unittest.SkipTest(
"The current version of ONNX does not record correctly the opset of Tile."
)
node_def = helper.make_node("Tile", ["X1", "X2"], ["Z"])
x = self._get_rnd_float32(shape=[3, 5, 5, 3])
repeats = [1, 1, 2, 1]
output = run_node(node_def, [x, repeats])
np.testing.assert_allclose(output["Z"], np.tile(x, repeats), rtol=1e-3)
def test_transpose(self):
node_def = helper.make_node("Transpose", ["X"], ["Y"], perm=[0, 2, 1])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.transpose(x, (0, 2, 1)))
def test_topk(self):
x = np.arange(15, dtype=np.float32).reshape(3, 5)
values = np.array([[4, 3], [9, 8], [14, 13]], dtype=np.float32)
indices = np.array([[4, 3], [4, 3], [4, 3]], dtype=np.int64)
if legacy_opset_pre_ver(10): # for opset = 1
node_def = helper.make_node("TopK", ["x"], ["values", "indices"], k=2)
output = run_node(node_def, [x])
elif legacy_opset_pre_ver(11): # for opset = 10
k = np.array([2], dtype=np.int64)
node_def = helper.make_node("TopK", ["x", "k"], ["values", "indices"])
output = run_node(node_def, [x, k])
else: # for opset = 11
x = np.array([[3, 2, 5, 10, 7], [12, 15, 10, 7, 20], [21, 16, 5, 3, 6]],
dtype=np.float32)
values = np.array([[3, 2], [10, 7], [5, 3]], dtype=np.float32)
indices = np.array([[0, 1], [2, 3], [2, 3]], dtype=np.int64)
k = np.array([2], dtype=np.int64)
node_def = helper.make_node(
"TopK", ["x", "k"], ["values", "indices"], largest=0, sorted=0)
output = run_node(node_def, [x, k])
np.testing.assert_almost_equal(output["values"], values)
np.testing.assert_almost_equal(output["indices"], indices)
def test_where(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Where.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Where", ["C", "X", "Y"], ["Z"])
c = np.array([[1, 0], [1, 1]], dtype=np.bool)
x = np.array([[1, 2], [3, 4]], dtype=np.float32)
y = np.array([[9, 8], [7, 6]], dtype=np.float32)
output = run_node(node_def, [c, x, y])
np.testing.assert_almost_equal(output["Z"], np.where(c, x, y))
if __name__ == '__main__':
unittest.main()
| 39.97496 | 94 | 0.591121 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import math
import unittest
import numpy as np
import tensorflow as tf
from onnx_tf.backend import run_node
from onnx_tf.common import supports_device
from onnx_tf.common.legacy import legacy_onnx_pre_ver, legacy_opset_pre_ver
from onnx import helper
from onnx import TensorProto
from onnx import defs
class TestNode(unittest.TestCase):
def _get_rnd_float32(self, low=-1.0, high=1.0, shape=None):
output = np.random.uniform(low, high, shape)
if shape == None:
return np.float32(output)
else:
return output.astype(np.float32)
def _get_rnd_int(self, low, high=None, shape=None, dtype=np.int32):
return np.random.randint(low, high, size=shape, dtype=dtype)
def _elu(self, x):
if x < 0.:
return np.expm1(x)
return x
def _leaky_relu(self, x, alpha):
if x < 0.:
return alpha * x
return x
def test_abs(self):
node_def = helper.make_node("Abs", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.abs(x))
def test_acosh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Acosh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Acosh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arccosh(x))
def test_add(self):
node_def = helper.make_node("Add", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 10, 5, 5])
y = self._get_rnd_float32(shape=[10, 1, 1])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"],
np.add(x, y.reshape([1, 10, 1, 1])))
# node_def = helper.make_node("Add", ["A", "B"], ["C"], broadcast=1)
# a = self._get_rnd([10, 10])
# b = self._get_rnd([10, 10])
# output = run_node(node_def, [a, b])
# np.testing.assert_almost_equal(output["C"], np.add(a, b))
# node_def = helper.make_node("Add", ["A", "B"], ["C"], broadcast=1)
# a = self._get_rnd([10, 10])
# b = self._get_rnd([10,])
# output = run_node(node_def, [a, b])
# np.testing.assert_almost_equal(output["C"], np.add(a, b))
def test_arg_max(self):
# TODO: need to fix this test
return
for axis in [0, 1]:
node_def = helper.make_node(
"ArgMax", ["data"], ["reduced"], axis=axis, keepdims=0)
data = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [data])
np.testing.assert_almost_equal(output["reduced"],
np.argmax(data, axis=axis))
def test_arg_min(self):
# TODO: need to fix this test
return
for axis in [0, 1]:
node_def = helper.make_node(
"ArgMin", ["data"], ["reduced"], axis=axis, keepdims=0)
data = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [data])
np.testing.assert_almost_equal(output["reduced"],
np.argmin(data, axis=axis))
def test_asinh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Asinh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Asinh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arcsinh(x))
def test_atanh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Atanh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Atanh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.arctanh(x))
def test_average_pool(self):
# TODO: fix this test
return
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
shape = [1, 1, 40, 40]
node_def = helper.make_node(
"AveragePool", ["X"], ["Y"],
kernel_shape=[1, 2],
pads=[1, 1],
strides=[1, 1])
x = self._get_rnd_float32(shape=shape)
output = run_node(node_def, [x], device=device)
test_output = np.zeros(shape)
for i1 in range(0, shape[0]):
for i2 in range(0, shape[1]):
for j1 in range(0, shape[2]):
for j2 in range(0, shape[3]):
test_output[i1][i2][j1][j2] = 0
count = 0
for k in range(j2, min(j2 + 2, shape[3])):
test_output[i1][i2][j1][j2] += x[i1][i2][j1][k]
count += 1
test_output[i1][i2][j1][j2] /= count
np.testing.assert_almost_equal(output["Y"], test_output)
def _batch_normalization(self, x, mean, variance, bias, scale,
variance_epsilon):
inv = np.reciprocal(np.sqrt(variance + variance_epsilon))
if scale is not None:
inv *= scale
return x * inv + (bias - mean * inv if bias is not None else -mean * inv)
def test_batch_normalization(self):
if legacy_opset_pre_ver(6):
raise unittest.SkipTest("Backend doesn't support consumed flag")
node_def = helper.make_node(
"BatchNormalization", ["X", "scale", "bias", "mean", "var"], ["Y"],
epsilon=0.001)
x_shape = [3, 5, 4, 2]
param_shape = [5]
_param_shape = [1, 5, 1, 1]
x = self._get_rnd_float32(0, 1, shape=x_shape)
m = self._get_rnd_float32(0, 1, shape=param_shape)
_m = m.reshape(_param_shape)
v = self._get_rnd_float32(0, 1, shape=param_shape)
_v = v.reshape(_param_shape)
scale = self._get_rnd_float32(0, 1, shape=param_shape)
_scale = scale.reshape(_param_shape)
bias = self._get_rnd_float32(0, 1, shape=param_shape)
_bias = bias.reshape(_param_shape)
golden = self._batch_normalization(x, _m, _v, _bias, _scale, 0.001)
output = run_node(node_def, [x, scale, bias, m, v])
np.testing.assert_almost_equal(output["Y"], golden, decimal=5)
def test_cast(self):
if legacy_onnx_pre_ver(1, 2) or legacy_opset_pre_ver(6):
test_cases = [("FLOAT", tf.float32), ("UINT8", tf.uint8),
("INT8", tf.int8), ("UINT16", tf.uint16), ("INT16",
tf.int16),
("INT32", tf.int32), ("INT64", tf.int64), ("BOOL", tf.bool),
("FLOAT16", tf.float16), ("DOUBLE", tf.float64),
("COMPLEX64", tf.complex64), ("COMPLEX128", tf.complex128)]
else:
test_cases = [(TensorProto.FLOAT,
tf.float32), (TensorProto.UINT8,
tf.uint8), (TensorProto.INT8, tf.int8),
(TensorProto.UINT16,
tf.uint16), (TensorProto.INT16,
tf.int16), (TensorProto.INT32, tf.int32),
(TensorProto.INT64,
tf.int64), (TensorProto.BOOL,
tf.bool), (TensorProto.FLOAT16, tf.float16),
(TensorProto.DOUBLE,
tf.float64), (TensorProto.COMPLEX64,
tf.complex64), (TensorProto.COMPLEX128,
tf.complex128)]
if not legacy_opset_pre_ver(9):
test_cases.append((TensorProto.STRING, tf.string))
for ty, tf_type in test_cases:
node_def = helper.make_node("Cast", ["input"], ["output"], to=ty)
vector = [2, 3]
output = run_node(node_def, [vector])
np.testing.assert_equal(output["output"].dtype, tf_type)
if not legacy_opset_pre_ver(9):
test_cases2 = [(TensorProto.FLOAT, tf.float32), (TensorProto.INT32,
tf.int32),
(TensorProto.INT64, tf.int64), (TensorProto.DOUBLE,
tf.float64)]
for ty, tf_type in test_cases2:
node_def = helper.make_node("Cast", ["input"], ["output"], to=ty)
vector = ['2', '3']
output = run_node(node_def, [vector])
np.testing.assert_equal(output["output"].dtype, tf_type)
def test_ceil(self):
node_def = helper.make_node("Ceil", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.ceil(x))
def test_compress(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't support Compress.".format(
defs.onnx_opset_version()))
axis = 1
node_def = helper.make_node(
"Compress", inputs=['X', 'condition'], outputs=['Y'], axis=axis)
x = self._get_rnd_float32(shape=[5, 5, 5])
cond = np.array([1, 0, 1])
output = run_node(node_def, inputs=[x, cond])
np.testing.assert_almost_equal(output['Y'], np.compress(cond, x, axis=axis))
def test_concat(self):
shape = [10, 20, 5]
for axis in range(len(shape)):
node_def = helper.make_node("Concat", ["X1", "X2"], ["Y"], axis=axis)
x1 = self._get_rnd_float32(shape=shape)
x2 = self._get_rnd_float32(shape=shape)
output = run_node(node_def, [x1, x2])
np.testing.assert_almost_equal(output["Y"], np.concatenate((x1, x2),
axis))
def test_constant(self):
shape = [16, 16]
values = np.random.randn(*shape).flatten().astype(float)
const2_onnx = helper.make_tensor("const2", TensorProto.DOUBLE, shape,
values)
node_def = helper.make_node("Constant", [], ["Y"], value=const2_onnx)
output = run_node(node_def, [])
np.testing.assert_equal(output["Y"].shape, shape)
np.testing.assert_almost_equal(output["Y"].flatten(), values)
if not legacy_opset_pre_ver(11):
expected = np.array([[1, 0, 0, 0], [0, 0, 2, 0], [0, 0, 0, 0]])
x = np.array([[0, 0], [1, 2]]).flatten().astype(np.int64)
values = helper.make_tensor("values", TensorProto.INT32, [2], [1, 2])
indices = helper.make_tensor("indices", TensorProto.INT64, [2, 2], x)
a = helper.make_sparse_tensor(values, indices,[3, 4])
node_def = helper.make_node("Constant", [], ["Y"], sparse_value=a)
output = run_node(node_def, [])
b = tf.sparse_to_dense(output["Y"].indices, output["Y"].dense_shape, output["Y"].values)
result = b.eval(session=tf.Session())
np.testing.assert_equal(result, expected)
def test_constant_fill(self):
if not legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't support ConstantFill.".format(
defs.onnx_opset_version()))
shape = [1, 2, 3, 4]
extra_shape = [5, 6]
value = 3.
node_def = helper.make_node(
"ConstantFill",
["X"],
["Y"],
value=value,
extra_shape=extra_shape,
dtype=1,
)
x = self._get_rnd_float32(shape=shape)
y = np.zeros(shape + extra_shape)
y.fill(value)
output = run_node(node_def, [x])
np.testing.assert_equal(output["Y"].dtype, tf.float32)
np.testing.assert_equal(output["Y"], y)
def test_constant_of_shape(self):
if defs.onnx_opset_version() < 9:
raise unittest.SkipTest(
"ONNX version {} doesn't support ConstantOfShape.".format(
defs.onnx_opset_version()))
v = helper.make_tensor("value", TensorProto.FLOAT, [1], [1])
node_def = helper.make_node("ConstantOfShape", ["X"], ["Y"], value=v)
x = np.array([4, 3, 2])
output = run_node(node_def, inputs=[x])
np.testing.assert_almost_equal(output["Y"], np.ones(x, dtype=np.float32))
v = helper.make_tensor("value", TensorProto.INT32, [1], [0])
node_def = helper.make_node("ConstantOfShape", ["X"], ["Y"], value=v)
x = np.array([10, 6])
output = run_node(node_def, inputs=[x])
np.testing.assert_almost_equal(output["Y"], np.zeros(x, dtype=np.int32))
def test_conv(self):
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
N, C, H, W = 4, 3, 5, 5
x_shape = [N, C, H, W]
K, kH, kW = 6, 3, 3
weight_shape = [K, C, kH, kW]
node_def = helper.make_node(
"Conv", ["X", "weights"], ["Y"],
pads=[1, 1, 1, 1],
kernel_shape=[kH, kW])
x = self._get_rnd_float32(shape=x_shape)
weights = self._get_rnd_float32(shape=weight_shape)
output = run_node(node_def, [x, weights], device=device)
out_shape = [N, K, H, W]
test_output = np.zeros(out_shape)
for n in range(N):
for c in range(C):
for h in range(H):
for w in range(W):
for k in range(K):
for kh in range(kH):
for kw in range(kW):
h_in_range = (h - kH // 2 + kh) < H and (
h - kH // 2 + kh) >= 0
w_in_range = (w - kW // 2 + kw) < W and (
w - kW // 2 + kw) >= 0
if h_in_range and w_in_range:
test_output[n][k][h][w] += (x[n][c][h - kH // 2 + kh][
w - kW // 2 + kw] * weights[k][c][kh][kw])
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_conv_transpose(self):
# Fix test in the future.
return
device = "CUDA"
if not supports_device(device):
raise unittest.SkipTest(
"Backend doesn't support device {}".format(device))
node_def = helper.make_node(
"ConvTranspose", ["X", "weights"], ["Y"], pads=[1, 1])
x_shape = [1, 5, 4]
x = self._get_rnd(x_shape)
weight_shape = [5, 3, 2]
weights = self._get_rnd_float32(shape=weight_shape)
output = run_node(node_def, [x, weights], device=device)
out_shape = [x_shape[0], weight_shape[1], x_shape[2]]
test_output = np.zeros(out_shape)
for b in range(0, x_shape[0]):
for m in range(0, weight_shape[1]):
for h in range(0, x_shape[2]):
v = 0
for c in range(0, x_shape[1]):
for k in range(h, min(h + weight_shape[2], x_shape[2])):
v += x[b][c][k] * weights[c][m][k - h]
test_output[b][m][h] = v
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_cosh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Cosh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Cosh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.cosh(x))
def test_depth_to_space(self):
node_def = helper.make_node("DepthToSpace", ["X"], ["Y"], blocksize=2)
x_shape = [1, 12, 1, 1]
x = self._get_rnd_float32(shape=x_shape)
output = run_node(node_def, [x])
x = np.transpose(x, (0, 2, 3, 1))
y = np.reshape(np.swapaxes(x.reshape(1, 1, 1, 2, 2, 3), 2, 3), (1, 2, 2, 3))
y = np.transpose(y, (0, 3, 1, 2))
np.testing.assert_almost_equal(output["Y"], y, decimal=5)
def test_dequantize_linear(self):
node_def = helper.make_node("DequantizeLinear",
["x", "x_scale", "x_zero_point"], ["y"])
for x, x_zero_point in [
[
self._get_rnd_int(-128, 127, [2, 6], np.int8),
self._get_rnd_int(-128, 127, dtype=np.int8)
],
[
self._get_rnd_int(0, 255, [2, 6], np.uint8),
self._get_rnd_int(0, 255, dtype=np.uint8)
],
[
self._get_rnd_int(-512, 512, [2, 6]),
np.int32(0)
]
]:
x_scale = self._get_rnd_float32(-10., 10)
y = np.subtract(np.float32(x), np.float32(x_zero_point))
y = np.multiply(y, x_scale)
output = run_node(node_def, [x, x_scale, x_zero_point])
np.testing.assert_almost_equal(output["y"], y)
def test_div(self):
node_def = helper.make_node("Div", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.divide(x, y))
def test_dropout(self):
# Since current ONNX only support inference and
# dropout at inference mode is a no-op,
# therefore dropout is always a no-op operator
# in ONNX.
node_def = helper.make_node("Dropout", ["X"], ["Y"])
if legacy_opset_pre_ver(7):
# at inference mode, is_test is always set to 1
node_def = helper.make_node("Dropout", ["X"], ["Y"], is_test=1)
x = self._get_rnd_float32(shape=[3, 4, 5])
y = x
output = run_node(node_def, [x])
np.testing.assert_equal(output["Y"], y)
def test_dot(self):
# this op is removed
# remove this test in the future
return
node_def = helper.make_node("Dot", ["X", "Y"], ["Z"])
x = np.floor(self._get_rnd_float32(shape=[10, 10]))
y = np.floor(self._get_rnd_float32(shape=[10, 10]))
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.dot(x, y))
def test_elu(self):
node_def = helper.make_node("Elu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
output = run_node(node_def, [x])
test_output = [self._elu(a) for a in x]
np.testing.assert_almost_equal(output["Y"], test_output)
def test_equal(self):
node_def = helper.make_node("Equal", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 3, 3, 2])
y = self._get_rnd_float32(shape=[3, 3, 1])
output = run_node(node_def, [x, y])
np.testing.assert_equal(output["Z"], np.equal(x, np.reshape(
y, [1, 3, 3, 1])))
def test_erf(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Erf.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Erf", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
exp_output = np.vectorize(math.erf)(x).astype(np.float32)
np.testing.assert_almost_equal(output["Y"], exp_output)
def test_exp(self):
node_def = helper.make_node("Exp", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
x = x - 3.6
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.exp(x))
def test_eye_like(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support EyeLike.".format(
defs.onnx_opset_version()))
for shape in [[6, 10], [10, 6]]:
for off_diagonal_offset in [-10, -6, -3, 0, 3, 6, 7, 10]:
node_def = helper.make_node(
"EyeLike", ['x'], ['y'], dtype=1, k=off_diagonal_offset)
x = self._get_rnd_int(0, 100, shape=shape)
y = np.eye(shape[0], shape[1], k=off_diagonal_offset, dtype=np.float32)
output = run_node(node_def, [x])
np.testing.assert_equal(output['y'], y)
def test_flatten(self):
# If input tensor has shape (d_0, d_1, ... d_n) then the
# output will have shape:
#
# (d_0 X d_1 ... d_(axis-1), d_axis X d_(axis+1) ... X dn)
#
# TODO: pass axis attribute which is supported in newer
# versions of onnx
node_def = helper.make_node("Flatten", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 2, 3, 4, 5])
output = run_node(node_def, [x])
# TODO: pass axis=3 and uncomment the line below
# np.testing.assert_almost_equal(output["Y"], x.reshape([60, 20]))
np.testing.assert_almost_equal(output["Y"], x.reshape([10, 120]))
def test_gather(self):
node_def = helper.make_node("Gather", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = [[0, 1], [1, 2]]
output = run_node(node_def, [x, y])
test_output = np.zeros((2, 2, 10))
for i in range(0, 2):
for j in range(0, 10):
test_output[0][i][j] = x[i][j]
for i in range(0, 2):
for j in range(0, 10):
test_output[1][i][j] = x[i + 1][j]
np.testing.assert_almost_equal(output["Z"], test_output)
def test_gemm(self):
# Compute Y = alpha * A * B + beta * C
node_def = helper.make_node(
"Gemm", ["A", "B", "C"], ["Y"], transA=0, transB=0, alpha=1.0, beta=1.0)
x = np.floor(self._get_rnd_float32(shape=[10, 10]))
y = np.floor(self._get_rnd_float32(shape=[10, 10]))
z = np.floor(self._get_rnd_float32(shape=[10, 10]))
output = run_node(node_def, [x, y, z])
test_output = np.matmul(x, y) + z
np.testing.assert_almost_equal(output["Y"], test_output)
def test_global_average_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalAveragePool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
sum = 0
for j1 in range(0, 2):
for j2 in range(0, 3):
sum += x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = sum / 6.
np.testing.assert_almost_equal(output["Y"], test_output)
def test_image_sacler(self):
# Input: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
# Scale: (flout, default 1.0) the scale to apply
# Bias: applied to each channel, same size as C
# Output has same shape and type as input
x = self._get_rnd_float32(shape=[1, 3, 224, 224])
#random distribution over [0,1), so add 0.1
scale = np.random.rand(1)[0] + 0.1
bias = np.random.rand(3)
node_def = helper.make_node(
"ImageScaler", ["X"], ["Y"], scale=scale, bias=bias)
output = run_node(node_def, [x])
test_out = np.multiply(x, scale)
test_out = np.transpose(test_out, [0, 2, 3, 1])
test_out = np.add(test_out, bias)
test_out = np.transpose(test_out, [0, 3, 1, 2])
np.testing.assert_almost_equal(output["Y"], test_out)
def test_is_inf(self):
if legacy_opset_pre_ver(10):
raise unittest.SkipTest("ONNX version {} doesn't support IsInf.".format(
defs.onnx_opset_version()))
input = np.array(
[-1.2, np.nan, np.inf, 2.8, np.NINF, np.inf], dtype=np.float32)
expected_output = {
"node_def": np.isinf(input),
"node_def_neg_false": np.isposinf(input),
"node_def_pos_false": np.isneginf(input)
}
node_defs = {
"node_def":
helper.make_node("IsInf", ["X"], ["Y"]),
"node_def_neg_false":
helper.make_node("IsInf", ["X"], ["Y"], detect_negative=0),
"node_def_pos_false":
helper.make_node("IsInf", ["X"], ["Y"], detect_positive=0)
}
for key in node_defs:
output = run_node(node_defs[key], [input])
np.testing.assert_equal(output["Y"], expected_output[key])
def test_isnan(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support IsNaN.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("IsNaN", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 3])
x[0][1] = x[1][0] = x[2][2] = np.nan
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.isnan(x))
def test_global_lp_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalLpPool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
tmp = np.zeros([2, 3])
for j1 in range(0, 2):
for j2 in range(0, 3):
tmp[j1][j2] = x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = np.linalg.norm(tmp)
np.testing.assert_almost_equal(output["Y"], test_output, decimal=5)
def test_global_max_pool(self):
# Image case: (N x C x H x W), where N is the batch size,
# C is the number of channels, and H and W are the height
# and the width of the data
#
# Non-image case: (N x C x D1 x D2 ... Dn)
#
# Output data tensor from pooling across the input tensor.
# Dimensions will be N x C x 1 x 1
node_def = helper.make_node("GlobalMaxPool", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[10, 10, 2, 3])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 1, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
max = x[i1][i2][0][0]
for j1 in range(0, 2):
for j2 in range(0, 3):
if max < x[i1][i2][j1][j2]:
max = x[i1][i2][j1][j2]
test_output[i1][i2][0][0] = max
np.testing.assert_almost_equal(output["Y"], test_output)
def test_less(self):
node_def = helper.make_node("Less", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 3, 3, 2])
y = self._get_rnd_float32(shape=[3, 3, 1])
output = run_node(node_def, [x, y])
np.testing.assert_equal(output["Z"], np.less(x, np.reshape(y,
[1, 3, 3, 1])))
def test_lp_normalization(self):
for ordr in range(1, 3):
node_def = helper.make_node("LpNormalization", ["X"], ["Y"], p=ordr)
x = self._get_rnd([2, 2, 3, 2])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"],
x / np.expand_dims(np.linalg.norm(x, axis=-1, ord=ordr), -1),
rtol=1e-3)
def test_l_r_n(self):
# Each input value is divided by:
#
# (bias+(alpha/size)*sum(xi^2 for every xi in the local region))^beta
alpha = 2.0
beta = 1.0
bias = 5.0
size = 3
node_def = helper.make_node(
"LRN", ["X"], ["Y"], alpha=alpha, beta=beta, bias=bias, size=size)
x = self._get_rnd_float32(shape=[10, 2, 10, 10])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 10, 2])
x = np.transpose(x, axes=[0, 2, 3, 1])
for i1 in range(0, 10):
for i2 in range(0, 10):
for j1 in range(0, 10):
for j2 in range(0, 2):
sqr_sum = 0.
# size of 3 means radius 1 in TF speak
# i.e. the immediate neighbouring values
# if "previous" neighbour exists
if j2 > 0:
sqr_sum += x[i1][i2][j1][j2 - 1] * x[i1][i2][j1][j2 - 1]
# current value
sqr_sum += x[i1][i2][j1][j2] * x[i1][i2][j1][j2]
# if "next" neighbour exists
if j2 < 2 - 1:
sqr_sum += x[i1][i2][j1][j2 + 1] * x[i1][i2][j1][j2 + 1]
test_output[i1][i2][j1][j2] = \
x[i1][i2][j1][j2] / ((bias + (alpha * 1. / size) * sqr_sum) ** beta)
test_output = np.transpose(test_output, axes=[0, 3, 1, 2])
np.testing.assert_almost_equal(output["Y"], test_output)
def test_floor(self):
node_def = helper.make_node("Floor", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.floor(x))
def test_leakyrelu(self):
node_def = helper.make_node("LeakyRelu", ["X"], ["Y"], alpha=0.8)
x = np.floor(self._get_rnd_float32(shape=[100]))
output = run_node(node_def, [x])
test_output = [self._leaky_relu(a, 0.8) for a in x]
np.testing.assert_almost_equal(output["Y"], test_output)
def test_log(self):
node_def = helper.make_node("Log", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[100])
x = x + 3.6
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.log(x))
def test_max(self):
node_def = helper.make_node("Max", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = np.maximum(np.maximum(np.maximum(x1, x2), x3), x4)
np.testing.assert_almost_equal(output["Z"], test_output)
def test_max_pool(self):
return
node_def = helper.make_node(
"MaxPool", ["X"], ["Y"],
dilations=[1, 1],
kernel_shape=[1, 2],
pads=[0, 0],
strides=[1, 2])
x = self._get_rnd_float32(shape=[10, 10, 4, 4])
output = run_node(node_def, [x])
test_output = np.zeros([10, 10, 4, 2])
for i1 in range(0, 10):
for i2 in range(0, 10):
for j1 in range(0, 4):
for j2 in range(0, 2):
test_output[i1][i2][j1][j2] = \
max(x[i1][i2][j1][2*j2], x[i1][i2][j1][2*j2 + 1])
np.testing.assert_almost_equal(output["Y"], test_output)
def test_mean_variance_normalization(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest(
"ONNX version {} doesn't have test for MeanVarianceNormalization"
.format(defs.onnx_opset_version()))
input_data = self._get_rnd_float32(shape=[2,2,2,2])
mean = np.mean(input_data, keepdims=1, axis=(0,2,3))
std = np.std(input_data, keepdims=1, axis=(0,2,3))
expected_output = (input_data - mean) / std
node_def = helper.make_node("MeanVarianceNormalization", ["X"], ["Y"])
output = run_node(node_def, [input_data])
np.testing.assert_almost_equal(output["Y"], expected_output, decimal=5)
def test_min(self):
node_def = helper.make_node("Min", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = np.minimum(np.minimum(np.minimum(x1, x2), x3), x4)
np.testing.assert_almost_equal(output["Z"], test_output)
def test_mul(self):
node_def = helper.make_node("Mul", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[5, 10, 5, 5])
y = self._get_rnd_float32(shape=[10, 1, 1])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"],
np.multiply(x, y.reshape([1, 10, 1, 1])))
def test_mod(self):
if legacy_opset_pre_ver(10):
raise unittest.SkipTest("ONNX version {} doesn't support Mod.".format(
defs.onnx_opset_version()))
x = self._get_rnd_float32(shape=[5, 5])
y = self._get_rnd_float32(shape=[5, 5])
node_def = helper.make_node("Mod", ["X", "Y"], ["Z"], fmod=0)
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.mod(x, y))
node_def = helper.make_node("Mod", ["X", "Y"], ["Z"], fmod=1)
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.fmod(x, y))
def test_neg(self):
node_def = helper.make_node("Neg", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.negative(x))
def test_non_zero(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support NonZero.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("NonZero", ["x"], ["y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
y = np.array(np.nonzero(x))
output = run_node(node_def, [x])
np.testing.assert_equal(output["y"], y)
def test_onehot(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support OneHot.".format(
defs.onnx_opset_version()))
indices = np.array([[0, 2], [1, 2], [0, 1]])
depth = np.int32(5)
on_value = 6.0
off_value = 2.0
values = np.array([off_value, on_value])
node_def = helper.make_node(
'OneHot', inputs=['indices', 'depth', 'values'], outputs=['y'], axis=-1)
y = (np.arange(depth) == indices[..., None]).astype(int)
y = y * (on_value - off_value) + off_value
output = run_node(node_def, inputs=[indices, depth, values])
np.testing.assert_equal(output['y'], y)
def test_range(self):
if legacy_opset_pre_ver(11):
raise unittest.SkipTest("ONNX version {} doesn't support Range.".format(
defs.onnx_opset_version()))
node_def = helper.make_node(
"Range", ['start', 'limit', 'delta'], ['y'])
start = self._get_rnd_int(low=0, high=3)
limit = self._get_rnd_int(low=10, high=30)
delta = np.int32(3)
output = run_node(node_def, [start, limit, delta])
np.testing.assert_equal(output['y'], range(start, limit, delta))
start = self._get_rnd_int(low=20, high=30)
limit = self._get_rnd_int(low=1, high=5)
delta = np.int32(-2)
output = run_node(node_def, [start, limit, delta])
np.testing.assert_equal(output['y'], range(start, limit, delta))
def test_round(self):
if legacy_opset_pre_ver(11):
raise unittest.SkipTest("ONNX version {} doesn't support Round.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Round", ["X"], ["Y"])
x = self._get_rnd_float32(-20.0, 20.0, shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.round(x))
def test_relu(self):
node_def = helper.make_node("Relu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.maximum(x, 0))
def test_pad(self):
node_def = helper.make_node(
"Pad", ["X"], ["Y"], mode="constant", pads=[1, 1, 1, 1], value=2.0)
x = self._get_rnd_float32(shape=[100, 100])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"],
np.lib.pad(
x, ((1, 1), (1, 1)),
'constant',
constant_values=(2, 2)))
def test_quantize_linear(self):
node_def = helper.make_node("QuantizeLinear",
["x", "y_scale", "y_zero_point"], ["y"])
for x in [
self._get_rnd_float32(-512., 512., [2, 6]),
self._get_rnd_int(-512, 512, [2, 6])
]:
y_scale = self._get_rnd_float32(-10., 10.)
for y_zero_point in [
self._get_rnd_int(-128, 127, dtype=np.int8),
self._get_rnd_int(0, 255, dtype=np.uint8)
]:
y = np.divide(x, y_scale)
y = np.round(y)
y = np.add(y, y_zero_point)
if y_zero_point.dtype.type is np.int8:
y = np.clip(y, -128, 127).astype(np.int8)
else:
y = np.clip(y, 0, 255).astype(np.uint8)
output = run_node(node_def, [x, y_scale, y_zero_point])
np.testing.assert_almost_equal(output["y"], y)
def test_reciprocal(self):
node_def = helper.make_node("Reciprocal", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], 1.0 / x)
def test_reduce_l1(self):
node_def = helper.make_node("ReduceL1", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"],
np.linalg.norm(x, 1, (1, 2), True))
def test_reduce_log_sum_exp(self):
node_def = helper.make_node("ReduceLogSumExp", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"],
np.log(np.sum(np.exp(x), axis=(1, 2), keepdims=True)),
rtol=1e-3)
def test_reduce_max(self):
node_def = helper.make_node("ReduceMax", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.max(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_mean(self):
node_def = helper.make_node("ReduceMean", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.mean(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_min(self):
node_def = helper.make_node("ReduceMin", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.min(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_prod(self):
node_def = helper.make_node("ReduceProd", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[1, 5, 5, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.prod(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_sum(self):
node_def = helper.make_node("ReduceSum", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.sum(x, (1, 2), keepdims=True), rtol=1e-3)
def test_reduce_sum_square(self):
node_def = helper.make_node("ReduceSumSquare", ["X"], ["Y"], axes=[1, 2])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(
output["Y"], np.sum(np.square(x), (1, 2), keepdims=True), rtol=1e-3)
def test_pow(self):
node_def = helper.make_node("Pow", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=1000) / 2.0 + 0.5
y = self._get_rnd_float32(shape=1000) / 2.0 + 0.5
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.power(x, y))
def test_reshape(self):
x = self._get_rnd_float32(shape=100)
shape = [10, 10]
if defs.onnx_opset_version() < 5:
node_def = helper.make_node("Reshape", ["X"], ["Z"], shape=shape)
output = run_node(node_def, [x])
else:
node_def = helper.make_node("Reshape", ["X", "Y"], ["Z"])
output = run_node(node_def, [x, shape])
np.testing.assert_almost_equal(output["Z"], x.reshape([10, 10]))
def test_reshape_with_copy(self):
x = self._get_rnd_float32(shape=[10, 20 * 30])
shape = [0, 20, 30]
if defs.onnx_opset_version() < 5:
node_def = helper.make_node("Reshape", ["X"], ["Z"], shape=shape)
output = run_node(node_def, [x])
else:
node_def = helper.make_node("Reshape", ["X", "Y"], ["Z"])
output = run_node(node_def, [x, shape])
np.testing.assert_almost_equal(output["Z"], x.reshape([10, 20, 30]))
def test_selu(self):
node_def = helper.make_node("Selu", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
alpha = 1.6732
gamma = 1.0507
x[x <= 0] = gamma * (alpha * np.exp(x[x <= 0]) - alpha)
x[x > 0] = gamma * x[x > 0]
np.testing.assert_allclose(output["Y"], x, rtol=1e-3, atol=1e-7)
def test_shape(self):
node_def = helper.make_node("Shape", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_allclose(output["Y"], np.shape(x))
def test_shrink(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Shrink.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Shrink", ["X"], ["Y"], bias=1.5, lambd=1.5)
X = np.arange(-2.0, 2.1, dtype=np.float32)
Y = np.array([-0.5, 0, 0, 0, 0.5], dtype=np.float32)
output = run_node(node_def, [X])
np.testing.assert_almost_equal(output["Y"], Y)
def test_sigmoid(self):
node_def = helper.make_node("Sigmoid", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], 1 / (1 + np.exp(-x)))
def test_sign(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Sign.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Sign", ["X"], ["Y"])
x = self._get_rnd_float32(-10, 10, [3, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sign(x))
def test_sinh(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Sinh.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Sinh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sinh(x))
def test_size(self):
node_def = helper.make_node("Size", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[5, 10, 10, 3])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.size(x))
def test_slice(self):
axes = [0, 1, 2]
starts = [0, 0, 0]
ends = [2, 2, 2]
steps = [1, 1, 1]
if legacy_opset_pre_ver(10):
node_def = helper.make_node(
"Slice", ["X"], ["S"], axes=axes, starts=starts, ends=ends)
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["S"], x[0:2, 0:2, 0:2])
else:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes", "steps"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes, steps])
np.testing.assert_almost_equal(output["S"], x[0:2, 0:2, 0:2])
axes = [0, 2]
starts = [0, -7]
ends = [-8, 20]
if legacy_opset_pre_ver(10):
node_def = helper.make_node(
"Slice", ["X"], ["S"], axes=axes, starts=starts, ends=ends)
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["S"], x[0:-8, :, -7:20])
else:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes])
np.testing.assert_almost_equal(output["S"], x[0:-8, :, -7:20])
axes = [0, 1, 2]
starts = [0, 0, 0]
ends = [2, 2, 2]
steps = [2, -2, -1]
if legacy_opset_pre_ver(10) == False:
node_def = helper.make_node(
"Slice", ["X", "starts", "ends", "axes", "steps"], ["S"])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x, starts, ends, axes, steps])
np.testing.assert_almost_equal(output["S"], x[0:2:2, 0:2:-2, 0:2:-1])
def test_softplus(self):
node_def = helper.make_node("Softplus", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.log(np.exp(x) + 1))
def test_softsign(self):
node_def = helper.make_node("Softsign", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[3, 4, 5])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], x / (1 + np.abs(x)))
def test_space_to_depth(self):
node_def = helper.make_node("SpaceToDepth", ["X"], ["Y"], blocksize=2)
x_shape = [1, 3, 2, 2]
x = self._get_rnd_float32(shape=x_shape)
output = run_node(node_def, [x])
x = np.transpose(x, (0, 2, 3, 1))
y = np.reshape(
np.swapaxes(x.reshape(1, 1, 1, 1, 1, 12), 2, 3), (1, 1, 1, 12))
y = np.transpose(y, (0, 3, 1, 2))
np.testing.assert_allclose(output["Y"], y, rtol=1e-3)
def test_split(self):
split = [3, 3, 4]
node_def = helper.make_node(
"Split", ["X"], ["Z%i" % i for i in range(len(split))],
axis=0,
split=split)
x = self._get_rnd_float32(shape=[100]).reshape([10, 10])
output = run_node(node_def, [x])
for a, b in zip(list(output), np.split(x, np.cumsum(split))[:-1]):
np.testing.assert_almost_equal(a, b)
def test_sqrt(self):
node_def = helper.make_node("Sqrt", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000]) + 1.0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.sqrt(x), decimal=5)
def test_squeeze(self):
node_def = helper.make_node("Squeeze", ["X"], ["Y"], axes=[2])
x = np.array([[[0], [1], [2]]])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.squeeze(x, axis=2))
def test_sub(self):
node_def = helper.make_node("Sub", ["X", "Y"], ["Z"])
x = self._get_rnd_float32(shape=[10, 10])
y = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x, y])
np.testing.assert_almost_equal(output["Z"], np.subtract(x, y))
def test_sum(self):
node_def = helper.make_node("Sum", ["X1", "X2", "X3", "X4"], ["Z"])
x1 = self._get_rnd_float32(shape=[10, 10])
x2 = self._get_rnd_float32(shape=[10, 10])
x3 = self._get_rnd_float32(shape=[10, 10])
x4 = self._get_rnd_float32(shape=[10, 10])
output = run_node(node_def, [x1, x2, x3, x4])
test_output = x1 + x2 + x3 + x4
np.testing.assert_almost_equal(output["Z"], test_output)
def test_tanh(self):
node_def = helper.make_node("Tanh", ["X"], ["Y"])
x = self._get_rnd_float32(shape=[1000]) + 1.0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.tanh(x), decimal=5)
def test_thresholded_relu(self):
alpha = 2.0
node_def = helper.make_node(
"ThresholdedRelu", ["X"], ["Y"], alpha=alpha)
x = self._get_rnd_float32(-3.0, 3.0, [10])
y = np.clip(x, alpha, np.inf)
y[y == alpha] = 0
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], y)
def test_tile(self):
if legacy_onnx_pre_ver(1, 2):
raise unittest.SkipTest(
"The current version of ONNX does not record correctly the opset of Tile."
)
node_def = helper.make_node("Tile", ["X1", "X2"], ["Z"])
x = self._get_rnd_float32(shape=[3, 5, 5, 3])
repeats = [1, 1, 2, 1]
output = run_node(node_def, [x, repeats])
np.testing.assert_allclose(output["Z"], np.tile(x, repeats), rtol=1e-3)
def test_transpose(self):
node_def = helper.make_node("Transpose", ["X"], ["Y"], perm=[0, 2, 1])
x = self._get_rnd_float32(shape=[1000]).reshape([10, 10, 10])
output = run_node(node_def, [x])
np.testing.assert_almost_equal(output["Y"], np.transpose(x, (0, 2, 1)))
def test_topk(self):
x = np.arange(15, dtype=np.float32).reshape(3, 5)
values = np.array([[4, 3], [9, 8], [14, 13]], dtype=np.float32)
indices = np.array([[4, 3], [4, 3], [4, 3]], dtype=np.int64)
if legacy_opset_pre_ver(10):
node_def = helper.make_node("TopK", ["x"], ["values", "indices"], k=2)
output = run_node(node_def, [x])
elif legacy_opset_pre_ver(11):
k = np.array([2], dtype=np.int64)
node_def = helper.make_node("TopK", ["x", "k"], ["values", "indices"])
output = run_node(node_def, [x, k])
else:
x = np.array([[3, 2, 5, 10, 7], [12, 15, 10, 7, 20], [21, 16, 5, 3, 6]],
dtype=np.float32)
values = np.array([[3, 2], [10, 7], [5, 3]], dtype=np.float32)
indices = np.array([[0, 1], [2, 3], [2, 3]], dtype=np.int64)
k = np.array([2], dtype=np.int64)
node_def = helper.make_node(
"TopK", ["x", "k"], ["values", "indices"], largest=0, sorted=0)
output = run_node(node_def, [x, k])
np.testing.assert_almost_equal(output["values"], values)
np.testing.assert_almost_equal(output["indices"], indices)
def test_where(self):
if legacy_opset_pre_ver(9):
raise unittest.SkipTest("ONNX version {} doesn't support Where.".format(
defs.onnx_opset_version()))
node_def = helper.make_node("Where", ["C", "X", "Y"], ["Z"])
c = np.array([[1, 0], [1, 1]], dtype=np.bool)
x = np.array([[1, 2], [3, 4]], dtype=np.float32)
y = np.array([[9, 8], [7, 6]], dtype=np.float32)
output = run_node(node_def, [c, x, y])
np.testing.assert_almost_equal(output["Z"], np.where(c, x, y))
if __name__ == '__main__':
unittest.main()
| true | true |
f738f76413d33197ccb79eae071324cc3b4852b8 | 85 | py | Python | django/django_fundamentals/django_intro/SemiRestfulTvShows/shows/apps.py | fatimaalheeh/python_stack | 9ba84e6dc030a65494f105152a97f0a38aa2e4f3 | [
"MIT"
] | 1 | 2021-07-21T23:50:14.000Z | 2021-07-21T23:50:14.000Z | django/app/shows/apps.py | danieltorrescode/themoviedb | b963b15c2812a33ff707a8077e99106204efe023 | [
"CNRI-Python"
] | 26 | 2021-02-08T07:57:42.000Z | 2022-03-01T02:07:58.000Z | django/app/shows/apps.py | danieltorrescode/themoviedb | b963b15c2812a33ff707a8077e99106204efe023 | [
"CNRI-Python"
] | null | null | null | from django.apps import AppConfig
class ShowsConfig(AppConfig):
name = 'shows'
| 14.166667 | 33 | 0.741176 | from django.apps import AppConfig
class ShowsConfig(AppConfig):
name = 'shows'
| true | true |
f738f7bdd36618d581b5ca730bc93d8921e26533 | 9,321 | py | Python | src/lib/Bcfg2/Server/Reports/reports/templatetags/bcfg2_tags.py | espro/bcfg2 | 8470a15940309428d9286f2ef6372749dd14bddf | [
"mpich2"
] | null | null | null | src/lib/Bcfg2/Server/Reports/reports/templatetags/bcfg2_tags.py | espro/bcfg2 | 8470a15940309428d9286f2ef6372749dd14bddf | [
"mpich2"
] | null | null | null | src/lib/Bcfg2/Server/Reports/reports/templatetags/bcfg2_tags.py | espro/bcfg2 | 8470a15940309428d9286f2ef6372749dd14bddf | [
"mpich2"
] | null | null | null | from django import template
from django.core.urlresolvers import resolve, reverse, Resolver404, NoReverseMatch
from django.utils.encoding import smart_unicode, smart_str
from datetime import datetime, timedelta
from Bcfg2.Server.Reports.utils import filter_list
register = template.Library()
__PAGE_NAV_LIMITS__ = (10, 25, 50, 100)
@register.inclusion_tag('widgets/page_bar.html', takes_context=True)
def page_navigator(context):
"""
Creates paginated links.
Expects the context to be a RequestContext and views.prepare_paginated_list()
to have populated page information.
"""
fragment = dict()
try:
path = context['request'].META['PATH_INFO']
total_pages = int(context['total_pages'])
records_per_page = int(context['records_per_page'])
except KeyError:
return fragment
except ValueError:
return fragment
if total_pages < 2:
return {}
try:
view, args, kwargs = resolve(path)
current_page = int(kwargs.get('page_number',1))
fragment['current_page'] = current_page
fragment['page_number'] = current_page
fragment['total_pages'] = total_pages
fragment['records_per_page'] = records_per_page
if current_page > 1:
kwargs['page_number'] = current_page - 1
fragment['prev_page'] = reverse(view, args=args, kwargs=kwargs)
if current_page < total_pages:
kwargs['page_number'] = current_page + 1
fragment['next_page'] = reverse(view, args=args, kwargs=kwargs)
view_range = 5
if total_pages > view_range:
pager_start = current_page - 2
pager_end = current_page + 2
if pager_start < 1:
pager_end += (1 - pager_start)
pager_start = 1
if pager_end > total_pages:
pager_start -= (pager_end - total_pages)
pager_end = total_pages
else:
pager_start = 1
pager_end = total_pages
if pager_start > 1:
kwargs['page_number'] = 1
fragment['first_page'] = reverse(view, args=args, kwargs=kwargs)
if pager_end < total_pages:
kwargs['page_number'] = total_pages
fragment['last_page'] = reverse(view, args=args, kwargs=kwargs)
pager = []
for page in range(pager_start, int(pager_end) + 1):
kwargs['page_number'] = page
pager.append( (page, reverse(view, args=args, kwargs=kwargs)) )
kwargs['page_number'] = 1
page_limits = []
for limit in __PAGE_NAV_LIMITS__:
kwargs['page_limit'] = limit
page_limits.append( (limit, reverse(view, args=args, kwargs=kwargs)) )
# resolver doesn't like this
del kwargs['page_number']
del kwargs['page_limit']
page_limits.append( ('all', reverse(view, args=args, kwargs=kwargs) + "|all") )
fragment['pager'] = pager
fragment['page_limits'] = page_limits
except Resolver404:
path = "404"
except NoReverseMatch:
nr = sys.exc_info()[1]
path = "NoReverseMatch: %s" % nr
except ValueError:
path = "ValueError"
#FIXME - Handle these
fragment['path'] = path
return fragment
@register.inclusion_tag('widgets/filter_bar.html', takes_context=True)
def filter_navigator(context):
try:
path = context['request'].META['PATH_INFO']
view, args, kwargs = resolve(path)
# Strip any page limits and numbers
if 'page_number' in kwargs:
del kwargs['page_number']
if 'page_limit' in kwargs:
del kwargs['page_limit']
filters = []
for filter in filter_list:
if filter in kwargs:
myargs = kwargs.copy()
del myargs[filter]
filters.append( (filter, reverse(view, args=args, kwargs=myargs) ) )
filters.sort(lambda x,y: cmp(x[0], y[0]))
return { 'filters': filters }
except (Resolver404, NoReverseMatch, ValueError, KeyError):
pass
return dict()
def _subtract_or_na(mdict, x, y):
"""
Shortcut for build_metric_list
"""
try:
return round(mdict[x] - mdict[y], 4)
except:
return "n/a"
@register.filter
def build_metric_list(mdict):
"""
Create a list of metric table entries
Moving this here it simplify the view. Should really handle the case where these
are missing...
"""
td_list = []
# parse
td_list.append( _subtract_or_na(mdict, 'config_parse', 'config_download'))
#probe
td_list.append( _subtract_or_na(mdict, 'probe_upload', 'start'))
#inventory
td_list.append( _subtract_or_na(mdict, 'inventory', 'initialization'))
#install
td_list.append( _subtract_or_na(mdict, 'install', 'inventory'))
#cfg download & parse
td_list.append( _subtract_or_na(mdict, 'config_parse', 'probe_upload'))
#total
td_list.append( _subtract_or_na(mdict, 'finished', 'start'))
return td_list
@register.filter
def isstale(timestamp, entry_max=None):
"""
Check for a stale timestamp
Compares two timestamps and returns True if the
difference is greater then 24 hours.
"""
if not entry_max:
entry_max = datetime.now()
return entry_max - timestamp > timedelta(hours=24)
@register.filter
def sort_interactions_by_name(value):
"""
Sort an interaction list by client name
"""
inters = list(value)
inters.sort(lambda a,b: cmp(a.client.name, b.client.name))
return inters
class AddUrlFilter(template.Node):
def __init__(self, filter_name, filter_value):
self.filter_name = filter_name
self.filter_value = filter_value
self.fallback_view = 'Bcfg2.Server.Reports.reports.views.render_history_view'
def render(self, context):
link = '#'
try:
path = context['request'].META['PATH_INFO']
view, args, kwargs = resolve(path)
filter_value = self.filter_value.resolve(context, True)
if filter_value:
filter_name = smart_str(self.filter_name)
filter_value = smart_unicode(filter_value)
kwargs[filter_name] = filter_value
# These two don't make sense
if filter_name == 'server' and 'hostname' in kwargs:
del kwargs['hostname']
elif filter_name == 'hostname' and 'server' in kwargs:
del kwargs['server']
try:
link = reverse(view, args=args, kwargs=kwargs)
except NoReverseMatch:
link = reverse(self.fallback_view, args=None,
kwargs={ filter_name: filter_value })
except NoReverseMatch:
rm = sys.exc_info()[1]
raise rm
except (Resolver404, ValueError):
pass
return link
@register.tag
def add_url_filter(parser, token):
"""
Return a url with the filter added to the current view.
Takes a new filter and resolves the current view with the new filter
applied. Resolves to Bcfg2.Server.Reports.reports.views.client_history
by default.
{% add_url_filter server=interaction.server %}
"""
try:
tag_name, filter_pair = token.split_contents()
filter_name, filter_value = filter_pair.split('=', 1)
filter_name = filter_name.strip()
filter_value = parser.compile_filter(filter_value)
except ValueError:
raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
if not filter_name or not filter_value:
raise template.TemplateSyntaxError("argument should be a filter=value pair")
return AddUrlFilter(filter_name, filter_value)
@register.filter
def sortwell(value):
"""
Sorts a list(or evaluates queryset to list) of bad, extra, or modified items in the best
way for presentation
"""
configItems = list(value)
configItems.sort(lambda x,y: cmp(x.entry.name, y.entry.name))
configItems.sort(lambda x,y: cmp(x.entry.kind, y.entry.kind))
return configItems
class MediaTag(template.Node):
def __init__(self, filter_value):
self.filter_value = filter_value
def render(self, context):
base = context['MEDIA_URL']
try:
request = context['request']
try:
base = request.environ['bcfg2.media_url']
except:
if request.path != request.META['PATH_INFO']:
offset = request.path.find(request.META['PATH_INFO'])
if offset > 0:
base = "%s/%s" % (request.path[:offset], \
context['MEDIA_URL'].strip('/'))
except:
pass
return "%s/%s" % (base, self.filter_value)
@register.tag
def to_media_url(parser, token):
"""
Return a url relative to the media_url.
{% to_media_url /bcfg2.css %}
"""
try:
tag_name, filter_value = token.split_contents()
filter_value = parser.compile_filter(filter_value)
except ValueError:
raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
return MediaTag(filter_value)
| 33.649819 | 110 | 0.623216 | from django import template
from django.core.urlresolvers import resolve, reverse, Resolver404, NoReverseMatch
from django.utils.encoding import smart_unicode, smart_str
from datetime import datetime, timedelta
from Bcfg2.Server.Reports.utils import filter_list
register = template.Library()
__PAGE_NAV_LIMITS__ = (10, 25, 50, 100)
@register.inclusion_tag('widgets/page_bar.html', takes_context=True)
def page_navigator(context):
fragment = dict()
try:
path = context['request'].META['PATH_INFO']
total_pages = int(context['total_pages'])
records_per_page = int(context['records_per_page'])
except KeyError:
return fragment
except ValueError:
return fragment
if total_pages < 2:
return {}
try:
view, args, kwargs = resolve(path)
current_page = int(kwargs.get('page_number',1))
fragment['current_page'] = current_page
fragment['page_number'] = current_page
fragment['total_pages'] = total_pages
fragment['records_per_page'] = records_per_page
if current_page > 1:
kwargs['page_number'] = current_page - 1
fragment['prev_page'] = reverse(view, args=args, kwargs=kwargs)
if current_page < total_pages:
kwargs['page_number'] = current_page + 1
fragment['next_page'] = reverse(view, args=args, kwargs=kwargs)
view_range = 5
if total_pages > view_range:
pager_start = current_page - 2
pager_end = current_page + 2
if pager_start < 1:
pager_end += (1 - pager_start)
pager_start = 1
if pager_end > total_pages:
pager_start -= (pager_end - total_pages)
pager_end = total_pages
else:
pager_start = 1
pager_end = total_pages
if pager_start > 1:
kwargs['page_number'] = 1
fragment['first_page'] = reverse(view, args=args, kwargs=kwargs)
if pager_end < total_pages:
kwargs['page_number'] = total_pages
fragment['last_page'] = reverse(view, args=args, kwargs=kwargs)
pager = []
for page in range(pager_start, int(pager_end) + 1):
kwargs['page_number'] = page
pager.append( (page, reverse(view, args=args, kwargs=kwargs)) )
kwargs['page_number'] = 1
page_limits = []
for limit in __PAGE_NAV_LIMITS__:
kwargs['page_limit'] = limit
page_limits.append( (limit, reverse(view, args=args, kwargs=kwargs)) )
del kwargs['page_number']
del kwargs['page_limit']
page_limits.append( ('all', reverse(view, args=args, kwargs=kwargs) + "|all") )
fragment['pager'] = pager
fragment['page_limits'] = page_limits
except Resolver404:
path = "404"
except NoReverseMatch:
nr = sys.exc_info()[1]
path = "NoReverseMatch: %s" % nr
except ValueError:
path = "ValueError"
#FIXME - Handle these
fragment['path'] = path
return fragment
@register.inclusion_tag('widgets/filter_bar.html', takes_context=True)
def filter_navigator(context):
try:
path = context['request'].META['PATH_INFO']
view, args, kwargs = resolve(path)
# Strip any page limits and numbers
if 'page_number' in kwargs:
del kwargs['page_number']
if 'page_limit' in kwargs:
del kwargs['page_limit']
filters = []
for filter in filter_list:
if filter in kwargs:
myargs = kwargs.copy()
del myargs[filter]
filters.append( (filter, reverse(view, args=args, kwargs=myargs) ) )
filters.sort(lambda x,y: cmp(x[0], y[0]))
return { 'filters': filters }
except (Resolver404, NoReverseMatch, ValueError, KeyError):
pass
return dict()
def _subtract_or_na(mdict, x, y):
try:
return round(mdict[x] - mdict[y], 4)
except:
return "n/a"
@register.filter
def build_metric_list(mdict):
td_list = []
# parse
td_list.append( _subtract_or_na(mdict, 'config_parse', 'config_download'))
#probe
td_list.append( _subtract_or_na(mdict, 'probe_upload', 'start'))
#inventory
td_list.append( _subtract_or_na(mdict, 'inventory', 'initialization'))
#install
td_list.append( _subtract_or_na(mdict, 'install', 'inventory'))
#cfg download & parse
td_list.append( _subtract_or_na(mdict, 'config_parse', 'probe_upload'))
#total
td_list.append( _subtract_or_na(mdict, 'finished', 'start'))
return td_list
@register.filter
def isstale(timestamp, entry_max=None):
if not entry_max:
entry_max = datetime.now()
return entry_max - timestamp > timedelta(hours=24)
@register.filter
def sort_interactions_by_name(value):
inters = list(value)
inters.sort(lambda a,b: cmp(a.client.name, b.client.name))
return inters
class AddUrlFilter(template.Node):
def __init__(self, filter_name, filter_value):
self.filter_name = filter_name
self.filter_value = filter_value
self.fallback_view = 'Bcfg2.Server.Reports.reports.views.render_history_view'
def render(self, context):
link = '
try:
path = context['request'].META['PATH_INFO']
view, args, kwargs = resolve(path)
filter_value = self.filter_value.resolve(context, True)
if filter_value:
filter_name = smart_str(self.filter_name)
filter_value = smart_unicode(filter_value)
kwargs[filter_name] = filter_value
# These two don't make sense
if filter_name == 'server' and 'hostname' in kwargs:
del kwargs['hostname']
elif filter_name == 'hostname' and 'server' in kwargs:
del kwargs['server']
try:
link = reverse(view, args=args, kwargs=kwargs)
except NoReverseMatch:
link = reverse(self.fallback_view, args=None,
kwargs={ filter_name: filter_value })
except NoReverseMatch:
rm = sys.exc_info()[1]
raise rm
except (Resolver404, ValueError):
pass
return link
@register.tag
def add_url_filter(parser, token):
try:
tag_name, filter_pair = token.split_contents()
filter_name, filter_value = filter_pair.split('=', 1)
filter_name = filter_name.strip()
filter_value = parser.compile_filter(filter_value)
except ValueError:
raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
if not filter_name or not filter_value:
raise template.TemplateSyntaxError("argument should be a filter=value pair")
return AddUrlFilter(filter_name, filter_value)
@register.filter
def sortwell(value):
configItems = list(value)
configItems.sort(lambda x,y: cmp(x.entry.name, y.entry.name))
configItems.sort(lambda x,y: cmp(x.entry.kind, y.entry.kind))
return configItems
class MediaTag(template.Node):
def __init__(self, filter_value):
self.filter_value = filter_value
def render(self, context):
base = context['MEDIA_URL']
try:
request = context['request']
try:
base = request.environ['bcfg2.media_url']
except:
if request.path != request.META['PATH_INFO']:
offset = request.path.find(request.META['PATH_INFO'])
if offset > 0:
base = "%s/%s" % (request.path[:offset], \
context['MEDIA_URL'].strip('/'))
except:
pass
return "%s/%s" % (base, self.filter_value)
@register.tag
def to_media_url(parser, token):
try:
tag_name, filter_value = token.split_contents()
filter_value = parser.compile_filter(filter_value)
except ValueError:
raise template.TemplateSyntaxError("%r tag requires exactly one argument" % token.contents.split()[0])
return MediaTag(filter_value)
| true | true |
f738f8af0f2cf36e0effa21aab9241a41cb0ed9c | 381 | py | Python | cracking_the_coding_interview_qs/17.15/get_longest_composite_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | cracking_the_coding_interview_qs/17.15/get_longest_composite_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | cracking_the_coding_interview_qs/17.15/get_longest_composite_test.py | angelusualle/algorithms | 86286a49db2a755bc57330cb455bcbd8241ea6be | [
"Apache-2.0"
] | null | null | null | import unittest
from get_longest_composite import get_longest_composite
class Test_Case_Get_Longest_Composite(unittest.TestCase):
def test_get_longest_composite(self):
self.assertEqual(get_longest_composite(['bobby', 'brosef', 'john', 'apple', 'seed', 'pear', 'punch', 'bottom', 'appleseeds', 'applejohn']), 'applejohn')
if __name__ == '__main__':
unittest.main() | 42.333333 | 160 | 0.745407 | import unittest
from get_longest_composite import get_longest_composite
class Test_Case_Get_Longest_Composite(unittest.TestCase):
def test_get_longest_composite(self):
self.assertEqual(get_longest_composite(['bobby', 'brosef', 'john', 'apple', 'seed', 'pear', 'punch', 'bottom', 'appleseeds', 'applejohn']), 'applejohn')
if __name__ == '__main__':
unittest.main() | true | true |
f738f96e3c7e444aa88fd83fb171e57d9f3c193d | 1,698 | py | Python | Picture/Exbar.py | hashtagSELFIE/That-s-a-Wrap- | 31c8b824742fee01c384eefa49f9f82d85518651 | [
"MIT"
] | 2 | 2018-11-30T04:13:04.000Z | 2018-11-30T13:01:12.000Z | Picture/Exbar.py | hashtagSELFIE/That-s-a-Wrap- | 31c8b824742fee01c384eefa49f9f82d85518651 | [
"MIT"
] | 1 | 2022-02-12T05:05:55.000Z | 2022-02-12T05:05:55.000Z | Picture/Exbar.py | hashtagSELFIE/That-s-a-Wrap- | 31c8b824742fee01c384eefa49f9f82d85518651 | [
"MIT"
] | 1 | 2018-12-03T07:33:39.000Z | 2018-12-03T07:33:39.000Z | import pygal
def picture():
"""picture Bar"""
line_chart = pygal.Bar()
line_chart.title = 'Director (in %)'
line_chart.x_labels = map(str, range(2002, 2013))
line_chart.add('Action',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Adventure',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('War',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Drama',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Science',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Family',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Thriller',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Crime',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Documentaries',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Animation',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Comedy',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Erotic',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Fantasy',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Musicals',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Romance',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Western',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.render_to_file('bar-chart.svg')
picture() | 70.75 | 95 | 0.570671 | import pygal
def picture():
line_chart = pygal.Bar()
line_chart.title = 'Director (in %)'
line_chart.x_labels = map(str, range(2002, 2013))
line_chart.add('Action',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Adventure',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('War',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Drama',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Science',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Family',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Thriller',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Crime',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Documentaries',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Animation',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Comedy',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Erotic',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.add('Fantasy',[None, None, 0, 16.6, 25, 31, 36.4, 45.5, 46.3, 42.8, 37.1])
line_chart.add('Musicals',[None, None, None, None, None, None, 0, 3.9, 10.8, 23.8, 35.3])
line_chart.add('Romance',[85.8, 84.6, 84.7, 74.5, 66, 58.6, 54.7, 44.8, 36.2, 26.6, 20.1])
line_chart.add('Western',[14.2, 15.4, 15.3, 8.9, 9, 10.4, 8.9, 5.8, 6.7, 6.8, 7.5])
line_chart.render_to_file('bar-chart.svg')
picture() | true | true |
f738f99974c4e4b13c1fb2b29395384be695d5c0 | 924 | py | Python | pyalmondplus/cli.py | penright/pyalmondplus | f6f223eb650352a17f3ce536b2258bed139cf317 | [
"Apache-2.0"
] | 1 | 2018-08-05T04:28:58.000Z | 2018-08-05T04:28:58.000Z | pyalmondplus/cli.py | penright/pyalmondplus | f6f223eb650352a17f3ce536b2258bed139cf317 | [
"Apache-2.0"
] | 311 | 2018-08-04T08:09:03.000Z | 2022-03-28T05:17:18.000Z | pyalmondplus/cli.py | penright/pyalmondplus | f6f223eb650352a17f3ce536b2258bed139cf317 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""Console script for pyalmondplus."""
import sys
import time
import click
import pyalmondplus.api
import threading
import asyncio
def do_commands(url, my_api):
click.echo("Connecting to " + url)
while True:
value = click.prompt("What next: ")
print("command is: " + value)
if value == "stop":
break
elif value == "dl":
my_api.get_device_list()
print("Do command is stopped")
my_api.stop()
time.sleep(3)
def api_start(url, my_api):
print("Do commands 1")
my_api.start()
print("Connected to Almond+")
@click.command()
@click.option('--url', default='')
def main(url):
my_api = pyalmondplus.api.PyAlmondPlus(url)
do_command = threading.Thread(target=do_commands, args=(url, my_api))
do_command.start()
start_api = threading.Thread(target=api_start, args=(url, my_api))
start_api.start()
| 23.1 | 73 | 0.640693 |
import sys
import time
import click
import pyalmondplus.api
import threading
import asyncio
def do_commands(url, my_api):
click.echo("Connecting to " + url)
while True:
value = click.prompt("What next: ")
print("command is: " + value)
if value == "stop":
break
elif value == "dl":
my_api.get_device_list()
print("Do command is stopped")
my_api.stop()
time.sleep(3)
def api_start(url, my_api):
print("Do commands 1")
my_api.start()
print("Connected to Almond+")
@click.command()
@click.option('--url', default='')
def main(url):
my_api = pyalmondplus.api.PyAlmondPlus(url)
do_command = threading.Thread(target=do_commands, args=(url, my_api))
do_command.start()
start_api = threading.Thread(target=api_start, args=(url, my_api))
start_api.start()
| true | true |
f738f9d16d50cf5b6e563ba2caffbc8115609b53 | 5,354 | py | Python | tapas/models/tapas_classifier_model_utils.py | Martin36/tapas | c2578e8dfaa862ae0eaa3ae3ade6808c81200ddf | [
"Apache-2.0"
] | 816 | 2020-03-31T15:15:56.000Z | 2022-03-31T19:28:02.000Z | tapas/models/tapas_classifier_model_utils.py | Fgerald/tapas | 2987658c3b65c5ab6e698d6c57823dc30d3d0f96 | [
"Apache-2.0"
] | 155 | 2020-05-02T15:45:42.000Z | 2022-03-31T08:35:23.000Z | tapas/models/tapas_classifier_model_utils.py | Fgerald/tapas | 2987658c3b65c5ab6e698d6c57823dc30d3d0f96 | [
"Apache-2.0"
] | 173 | 2020-05-01T02:39:38.000Z | 2022-03-30T06:43:29.000Z | # coding=utf-8
# Copyright 2019 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""TAPAS BERT model utils for classification."""
from typing import Dict, Text, Tuple, Optional
from tapas.models import segmented_tensor
import tensorflow.compat.v1 as tf
EPSILON_ZERO_DIVISION = 1e-10
CLOSE_ENOUGH_TO_LOG_ZERO = -10000.0
def classification_initializer():
"""Classification layer initializer."""
return tf.truncated_normal_initializer(stddev=0.02)
def extract_answer_from_features(
features, use_answer_as_supervision
):
"""Extracts the answer, numeric_values, numeric_values_scale."""
if use_answer_as_supervision:
answer = tf.squeeze(features["answer"], axis=[1])
numeric_values = features["numeric_values"]
numeric_values_scale = features["numeric_values_scale"]
else:
answer = None
numeric_values = None
numeric_values_scale = None
return answer, numeric_values, numeric_values_scale
def compute_token_logits(output_layer, temperature,
init_cell_selection_weights_to_zero):
"""Computes logits per token.
Args:
output_layer: <float>[batch_size, seq_length, hidden_dim] Output of the
encoder layer.
temperature: float Temperature for the Bernoulli distribution.
init_cell_selection_weights_to_zero: Whether the initial weights should be
set to 0. This ensures that all tokens have the same prior probability.
Returns:
<float>[batch_size, seq_length] Logits per token.
"""
hidden_size = output_layer.shape.as_list()[-1]
output_weights = tf.get_variable(
"output_weights", [hidden_size],
initializer=tf.zeros_initializer()
if init_cell_selection_weights_to_zero else classification_initializer())
output_bias = tf.get_variable(
"output_bias", shape=(), initializer=tf.zeros_initializer())
logits = (tf.einsum("bsj,j->bs", output_layer, output_weights) +
output_bias) / temperature
return logits
# TODO(eisenjulian): Move more methods from tapas_classifier_model
def compute_column_logits(output_layer,
cell_index,
cell_mask,
init_cell_selection_weights_to_zero,
allow_empty_column_selection):
"""Computes logits for each column.
Args:
output_layer: <float>[batch_size, seq_length, hidden_dim] Output of the
encoder layer.
cell_index: segmented_tensor.IndexMap [batch_size, seq_length] Index that
groups tokens into cells.
cell_mask: <float>[batch_size, max_num_rows * max_num_cols] Input mask per
cell, 1 for cells that exists in the example and 0 for padding.
init_cell_selection_weights_to_zero: Whether the initial weights should be
set to 0. This is also applied to column logits, as they are used to
select the cells. This ensures that all columns have the same prior
probability.
allow_empty_column_selection: Allow to select no column.
Returns:
<float>[batch_size, max_num_cols] Logits per column. Logits will be set to
a very low value (such that the probability is 0) for the special id 0
(which means "outside the table") or columns that do not apear in the
table.
"""
hidden_size = output_layer.shape.as_list()[-1]
column_output_weights = tf.get_variable(
"column_output_weights", [hidden_size],
initializer=tf.zeros_initializer()
if init_cell_selection_weights_to_zero else classification_initializer())
column_output_bias = tf.get_variable(
"column_output_bias", shape=(), initializer=tf.zeros_initializer())
token_logits = (
tf.einsum("bsj,j->bs", output_layer, column_output_weights) +
column_output_bias)
# Average the logits per cell and then per column.
# Note that by linearity it doesn't matter if we do the averaging on the
# embeddings or on the logits. For performance we do the projection first.
# [batch_size, max_num_cols * max_num_rows]
cell_logits, cell_logits_index = segmented_tensor.reduce_mean(
token_logits, cell_index)
column_index = cell_index.project_inner(cell_logits_index)
# [batch_size, max_num_cols]
column_logits, out_index = segmented_tensor.reduce_sum(
cell_logits * cell_mask, column_index)
cell_count, _ = segmented_tensor.reduce_sum(cell_mask, column_index)
column_logits /= cell_count + EPSILON_ZERO_DIVISION
# Mask columns that do not appear in the example.
is_padding = tf.logical_and(cell_count < 0.5,
tf.not_equal(out_index.indices, 0))
column_logits += CLOSE_ENOUGH_TO_LOG_ZERO * tf.cast(is_padding, tf.float32)
if not allow_empty_column_selection:
column_logits += CLOSE_ENOUGH_TO_LOG_ZERO * tf.cast(
tf.equal(out_index.indices, 0), tf.float32)
return column_logits
| 39.955224 | 79 | 0.734591 |
from typing import Dict, Text, Tuple, Optional
from tapas.models import segmented_tensor
import tensorflow.compat.v1 as tf
EPSILON_ZERO_DIVISION = 1e-10
CLOSE_ENOUGH_TO_LOG_ZERO = -10000.0
def classification_initializer():
return tf.truncated_normal_initializer(stddev=0.02)
def extract_answer_from_features(
features, use_answer_as_supervision
):
if use_answer_as_supervision:
answer = tf.squeeze(features["answer"], axis=[1])
numeric_values = features["numeric_values"]
numeric_values_scale = features["numeric_values_scale"]
else:
answer = None
numeric_values = None
numeric_values_scale = None
return answer, numeric_values, numeric_values_scale
def compute_token_logits(output_layer, temperature,
init_cell_selection_weights_to_zero):
hidden_size = output_layer.shape.as_list()[-1]
output_weights = tf.get_variable(
"output_weights", [hidden_size],
initializer=tf.zeros_initializer()
if init_cell_selection_weights_to_zero else classification_initializer())
output_bias = tf.get_variable(
"output_bias", shape=(), initializer=tf.zeros_initializer())
logits = (tf.einsum("bsj,j->bs", output_layer, output_weights) +
output_bias) / temperature
return logits
def compute_column_logits(output_layer,
cell_index,
cell_mask,
init_cell_selection_weights_to_zero,
allow_empty_column_selection):
hidden_size = output_layer.shape.as_list()[-1]
column_output_weights = tf.get_variable(
"column_output_weights", [hidden_size],
initializer=tf.zeros_initializer()
if init_cell_selection_weights_to_zero else classification_initializer())
column_output_bias = tf.get_variable(
"column_output_bias", shape=(), initializer=tf.zeros_initializer())
token_logits = (
tf.einsum("bsj,j->bs", output_layer, column_output_weights) +
column_output_bias)
# embeddings or on the logits. For performance we do the projection first.
# [batch_size, max_num_cols * max_num_rows]
cell_logits, cell_logits_index = segmented_tensor.reduce_mean(
token_logits, cell_index)
column_index = cell_index.project_inner(cell_logits_index)
# [batch_size, max_num_cols]
column_logits, out_index = segmented_tensor.reduce_sum(
cell_logits * cell_mask, column_index)
cell_count, _ = segmented_tensor.reduce_sum(cell_mask, column_index)
column_logits /= cell_count + EPSILON_ZERO_DIVISION
# Mask columns that do not appear in the example.
is_padding = tf.logical_and(cell_count < 0.5,
tf.not_equal(out_index.indices, 0))
column_logits += CLOSE_ENOUGH_TO_LOG_ZERO * tf.cast(is_padding, tf.float32)
if not allow_empty_column_selection:
column_logits += CLOSE_ENOUGH_TO_LOG_ZERO * tf.cast(
tf.equal(out_index.indices, 0), tf.float32)
return column_logits
| true | true |
f738f9df61c3fda0d35caee0b14e0e5a092dc611 | 942 | py | Python | Tests/generate_report.py | avpreserve/interstitial | 4ac1fa470670289350fe32227400b325a1f2a328 | [
"BSD-3-Clause"
] | 5 | 2015-11-06T18:00:11.000Z | 2017-12-14T16:22:33.000Z | Tests/generate_report.py | avpreserve/interstitial | 4ac1fa470670289350fe32227400b325a1f2a328 | [
"BSD-3-Clause"
] | 1 | 2021-02-09T18:03:10.000Z | 2021-02-09T18:03:10.000Z | Tests/generate_report.py | WeAreAVP/interstitial | 4ac1fa470670289350fe32227400b325a1f2a328 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: UTF-8 -*-
# Interstitial Error Detector
# Version 0.2, 2013-08-28
# Copyright (c) 2013 AudioVisual Preservation Solutions
# All rights reserved.
# Released under the Apache license, v. 2.0
# Created on Aug 12, 2014
# @author: Furqan Wasi <furqan@avpreserve.com>
import shlex, subprocess, os
# Constructor
if os.name == 'posix':
OsType = 'linux'
elif os.name == 'nt':
OsType = 'Windows'
elif os.name == 'os2':
OsType = 'check'
command_html_report = 'coverage html --omit=../GUI/*,../Tests/*,../Config/Validation.py,../Core/CustomException.py,../Core/Debugger.py'
if OsType == 'Windows':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
process_response = subprocess.call(command_html_report, startupinfo=startupinfo)
else:
args = shlex.split(command_html_report)
p = subprocess.Popen(args, stdout=subprocess.PIPE)
| 29.4375 | 136 | 0.685775 |
import shlex, subprocess, os
if os.name == 'posix':
OsType = 'linux'
elif os.name == 'nt':
OsType = 'Windows'
elif os.name == 'os2':
OsType = 'check'
command_html_report = 'coverage html --omit=../GUI/*,../Tests/*,../Config/Validation.py,../Core/CustomException.py,../Core/Debugger.py'
if OsType == 'Windows':
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
process_response = subprocess.call(command_html_report, startupinfo=startupinfo)
else:
args = shlex.split(command_html_report)
p = subprocess.Popen(args, stdout=subprocess.PIPE)
| true | true |
f738fae13d36290b3bda56030bc913e1edc0187f | 713 | py | Python | full-problems/gameWithStrings.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | full-problems/gameWithStrings.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | full-problems/gameWithStrings.py | vikas-t/DS-Algo | ea654d1cad5374c824c52da9d3815a9546eb43fa | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/game-with-string/0
import heapq
def sol(s, k):
"""
Keep decreasing the max frequency by 1 uptill k.
We store the frequencies in a max heap to get the max each time
"""
f = [0]*26
for x in s:
f[ord(x)-97] -= 1
# We store the negative of the frequencies
heapq.heapify(f)
# Make it a max heap
while k and f:
d = heapq.heappop(f)
heapq.heappush(f, d+1)
# Reduce the max frequency by 1 and k by 1 till k exists
k-=1
res = 0
for x in f:
res += x**2
# Return the result, we dont care for the '-' since its gets squared
return res | 24.586207 | 72 | 0.57784 |
import heapq
def sol(s, k):
f = [0]*26
for x in s:
f[ord(x)-97] -= 1
heapq.heapify(f)
while k and f:
d = heapq.heappop(f)
heapq.heappush(f, d+1)
k-=1
res = 0
for x in f:
res += x**2
return res | true | true |
f738fb591d3e4da7bac480926eae3a268abc1135 | 37,282 | py | Python | PythonAPI/carissma_project/lib/python3.5/site-packages/matplotlib/dviread.py | AbdulHoffmann/carla_carissma | 8d382769ffa02a6c61a22c57160285505f5ff0a4 | [
"MIT"
] | 445 | 2019-01-26T13:50:26.000Z | 2022-03-18T05:17:38.000Z | venv/lib/python3.7/site-packages/matplotlib/dviread.py | John1001Song/Big-Data-Robo-Adviser | 9444dce96954c546333d5aecc92a06c3bfd19aa5 | [
"MIT"
] | 242 | 2019-01-29T15:48:27.000Z | 2022-03-31T22:09:21.000Z | venv/lib/python3.7/site-packages/matplotlib/dviread.py | John1001Song/Big-Data-Robo-Adviser | 9444dce96954c546333d5aecc92a06c3bfd19aa5 | [
"MIT"
] | 64 | 2018-04-25T08:51:57.000Z | 2022-01-29T14:13:57.000Z | """
A module for reading dvi files output by TeX. Several limitations make
this not (currently) useful as a general-purpose dvi preprocessor, but
it is currently used by the pdf backend for processing usetex text.
Interface::
with Dvi(filename, 72) as dvi:
# iterate over pages:
for page in dvi:
w, h, d = page.width, page.height, page.descent
for x, y, font, glyph, width in page.text:
fontname = font.texname
pointsize = font.size
...
for x, y, height, width in page.boxes:
...
"""
from collections import namedtuple
import enum
from functools import lru_cache, partial, wraps
import logging
import os
import re
import struct
import subprocess
import textwrap
import numpy as np
from matplotlib import cbook, rcParams
_log = logging.getLogger(__name__)
# Many dvi related files are looked for by external processes, require
# additional parsing, and are used many times per rendering, which is why they
# are cached using lru_cache().
# Dvi is a bytecode format documented in
# http://mirrors.ctan.org/systems/knuth/dist/texware/dvitype.web
# http://texdoc.net/texmf-dist/doc/generic/knuth/texware/dvitype.pdf
#
# The file consists of a preamble, some number of pages, a postamble,
# and a finale. Different opcodes are allowed in different contexts,
# so the Dvi object has a parser state:
#
# pre: expecting the preamble
# outer: between pages (followed by a page or the postamble,
# also e.g. font definitions are allowed)
# page: processing a page
# post_post: state after the postamble (our current implementation
# just stops reading)
# finale: the finale (unimplemented in our current implementation)
_dvistate = enum.Enum('DviState', 'pre outer inpage post_post finale')
# The marks on a page consist of text and boxes. A page also has dimensions.
Page = namedtuple('Page', 'text boxes height width descent')
Text = namedtuple('Text', 'x y font glyph width')
Box = namedtuple('Box', 'x y height width')
# Opcode argument parsing
#
# Each of the following functions takes a Dvi object and delta,
# which is the difference between the opcode and the minimum opcode
# with the same meaning. Dvi opcodes often encode the number of
# argument bytes in this delta.
def _arg_raw(dvi, delta):
"""Return *delta* without reading anything more from the dvi file"""
return delta
def _arg(bytes, signed, dvi, _):
"""Read *bytes* bytes, returning the bytes interpreted as a
signed integer if *signed* is true, unsigned otherwise."""
return dvi._arg(bytes, signed)
def _arg_slen(dvi, delta):
"""Signed, length *delta*
Read *delta* bytes, returning None if *delta* is zero, and
the bytes interpreted as a signed integer otherwise."""
if delta == 0:
return None
return dvi._arg(delta, True)
def _arg_slen1(dvi, delta):
"""Signed, length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as signed."""
return dvi._arg(delta+1, True)
def _arg_ulen1(dvi, delta):
"""Unsigned length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as unsigned."""
return dvi._arg(delta+1, False)
def _arg_olen1(dvi, delta):
"""Optionally signed, length *delta*+1
Read *delta*+1 bytes, returning the bytes interpreted as
unsigned integer for 0<=*delta*<3 and signed if *delta*==3."""
return dvi._arg(delta + 1, delta == 3)
_arg_mapping = dict(raw=_arg_raw,
u1=partial(_arg, 1, False),
u4=partial(_arg, 4, False),
s4=partial(_arg, 4, True),
slen=_arg_slen,
olen1=_arg_olen1,
slen1=_arg_slen1,
ulen1=_arg_ulen1)
def _dispatch(table, min, max=None, state=None, args=('raw',)):
"""Decorator for dispatch by opcode. Sets the values in *table*
from *min* to *max* to this method, adds a check that the Dvi state
matches *state* if not None, reads arguments from the file according
to *args*.
*table*
the dispatch table to be filled in
*min*
minimum opcode for calling this function
*max*
maximum opcode for calling this function, None if only *min* is allowed
*state*
state of the Dvi object in which these opcodes are allowed
*args*
sequence of argument specifications:
``'raw'``: opcode minus minimum
``'u1'``: read one unsigned byte
``'u4'``: read four bytes, treat as an unsigned number
``'s4'``: read four bytes, treat as a signed number
``'slen'``: read (opcode - minimum) bytes, treat as signed
``'slen1'``: read (opcode - minimum + 1) bytes, treat as signed
``'ulen1'``: read (opcode - minimum + 1) bytes, treat as unsigned
``'olen1'``: read (opcode - minimum + 1) bytes, treat as unsigned
if under four bytes, signed if four bytes
"""
def decorate(method):
get_args = [_arg_mapping[x] for x in args]
@wraps(method)
def wrapper(self, byte):
if state is not None and self.state != state:
raise ValueError("state precondition failed")
return method(self, *[f(self, byte-min) for f in get_args])
if max is None:
table[min] = wrapper
else:
for i in range(min, max+1):
assert table[i] is None
table[i] = wrapper
return wrapper
return decorate
class Dvi(object):
"""
A reader for a dvi ("device-independent") file, as produced by TeX.
The current implementation can only iterate through pages in order,
and does not even attempt to verify the postamble.
This class can be used as a context manager to close the underlying
file upon exit. Pages can be read via iteration. Here is an overly
simple way to extract text without trying to detect whitespace::
>>> with matplotlib.dviread.Dvi('input.dvi', 72) as dvi:
... for page in dvi:
... print(''.join(chr(t.glyph) for t in page.text))
"""
# dispatch table
_dtable = [None] * 256
_dispatch = partial(_dispatch, _dtable)
def __init__(self, filename, dpi):
"""
Read the data from the file named *filename* and convert
TeX's internal units to units of *dpi* per inch.
*dpi* only sets the units and does not limit the resolution.
Use None to return TeX's internal units.
"""
_log.debug('Dvi: %s', filename)
self.file = open(filename, 'rb')
self.dpi = dpi
self.fonts = {}
self.state = _dvistate.pre
self.baseline = self._get_baseline(filename)
def _get_baseline(self, filename):
if rcParams['text.latex.preview']:
base, ext = os.path.splitext(filename)
baseline_filename = base + ".baseline"
if os.path.exists(baseline_filename):
with open(baseline_filename, 'rb') as fd:
l = fd.read().split()
height, depth, width = l
return float(depth)
return None
def __enter__(self):
"""
Context manager enter method, does nothing.
"""
return self
def __exit__(self, etype, evalue, etrace):
"""
Context manager exit method, closes the underlying file if it is open.
"""
self.close()
def __iter__(self):
"""
Iterate through the pages of the file.
Yields
------
Page
Details of all the text and box objects on the page.
The Page tuple contains lists of Text and Box tuples and
the page dimensions, and the Text and Box tuples contain
coordinates transformed into a standard Cartesian
coordinate system at the dpi value given when initializing.
The coordinates are floating point numbers, but otherwise
precision is not lost and coordinate values are not clipped to
integers.
"""
while self._read():
yield self._output()
def close(self):
"""
Close the underlying file if it is open.
"""
if not self.file.closed:
self.file.close()
def _output(self):
"""
Output the text and boxes belonging to the most recent page.
page = dvi._output()
"""
minx, miny, maxx, maxy = np.inf, np.inf, -np.inf, -np.inf
maxy_pure = -np.inf
for elt in self.text + self.boxes:
if isinstance(elt, Box):
x, y, h, w = elt
e = 0 # zero depth
else: # glyph
x, y, font, g, w = elt
h, e = font._height_depth_of(g)
minx = min(minx, x)
miny = min(miny, y - h)
maxx = max(maxx, x + w)
maxy = max(maxy, y + e)
maxy_pure = max(maxy_pure, y)
if self.dpi is None:
# special case for ease of debugging: output raw dvi coordinates
return Page(text=self.text, boxes=self.boxes,
width=maxx-minx, height=maxy_pure-miny,
descent=maxy-maxy_pure)
# convert from TeX's "scaled points" to dpi units
d = self.dpi / (72.27 * 2**16)
if self.baseline is None:
descent = (maxy - maxy_pure) * d
else:
descent = self.baseline
text = [Text((x-minx)*d, (maxy-y)*d - descent, f, g, w*d)
for (x, y, f, g, w) in self.text]
boxes = [Box((x-minx)*d, (maxy-y)*d - descent, h*d, w*d)
for (x, y, h, w) in self.boxes]
return Page(text=text, boxes=boxes, width=(maxx-minx)*d,
height=(maxy_pure-miny)*d, descent=descent)
def _read(self):
"""
Read one page from the file. Return True if successful,
False if there were no more pages.
"""
while True:
byte = self.file.read(1)[0]
self._dtable[byte](self, byte)
if byte == 140: # end of page
return True
if self.state is _dvistate.post_post: # end of file
self.close()
return False
def _arg(self, nbytes, signed=False):
"""
Read and return an integer argument *nbytes* long.
Signedness is determined by the *signed* keyword.
"""
str = self.file.read(nbytes)
value = str[0]
if signed and value >= 0x80:
value = value - 0x100
for i in range(1, nbytes):
value = 0x100*value + str[i]
return value
@_dispatch(min=0, max=127, state=_dvistate.inpage)
def _set_char_immediate(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(min=128, max=131, state=_dvistate.inpage, args=('olen1',))
def _set_char(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(132, state=_dvistate.inpage, args=('s4', 's4'))
def _set_rule(self, a, b):
self._put_rule_real(a, b)
self.h += b
@_dispatch(min=133, max=136, state=_dvistate.inpage, args=('olen1',))
def _put_char(self, char):
self._put_char_real(char)
def _put_char_real(self, char):
font = self.fonts[self.f]
if font._vf is None:
self.text.append(Text(self.h, self.v, font, char,
font._width_of(char)))
else:
scale = font._scale
for x, y, f, g, w in font._vf[char].text:
newf = DviFont(scale=_mul2012(scale, f._scale),
tfm=f._tfm, texname=f.texname, vf=f._vf)
self.text.append(Text(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
newf, g, newf._width_of(g)))
self.boxes.extend([Box(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
_mul2012(a, scale), _mul2012(b, scale))
for x, y, a, b in font._vf[char].boxes])
@_dispatch(137, state=_dvistate.inpage, args=('s4', 's4'))
def _put_rule(self, a, b):
self._put_rule_real(a, b)
def _put_rule_real(self, a, b):
if a > 0 and b > 0:
self.boxes.append(Box(self.h, self.v, a, b))
@_dispatch(138)
def _nop(self, _):
pass
@_dispatch(139, state=_dvistate.outer, args=('s4',)*11)
def _bop(self, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, p):
self.state = _dvistate.inpage
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack = []
self.text = [] # list of Text objects
self.boxes = [] # list of Box objects
@_dispatch(140, state=_dvistate.inpage)
def _eop(self, _):
self.state = _dvistate.outer
del self.h, self.v, self.w, self.x, self.y, self.z, self.stack
@_dispatch(141, state=_dvistate.inpage)
def _push(self, _):
self.stack.append((self.h, self.v, self.w, self.x, self.y, self.z))
@_dispatch(142, state=_dvistate.inpage)
def _pop(self, _):
self.h, self.v, self.w, self.x, self.y, self.z = self.stack.pop()
@_dispatch(min=143, max=146, state=_dvistate.inpage, args=('slen1',))
def _right(self, b):
self.h += b
@_dispatch(min=147, max=151, state=_dvistate.inpage, args=('slen',))
def _right_w(self, new_w):
if new_w is not None:
self.w = new_w
self.h += self.w
@_dispatch(min=152, max=156, state=_dvistate.inpage, args=('slen',))
def _right_x(self, new_x):
if new_x is not None:
self.x = new_x
self.h += self.x
@_dispatch(min=157, max=160, state=_dvistate.inpage, args=('slen1',))
def _down(self, a):
self.v += a
@_dispatch(min=161, max=165, state=_dvistate.inpage, args=('slen',))
def _down_y(self, new_y):
if new_y is not None:
self.y = new_y
self.v += self.y
@_dispatch(min=166, max=170, state=_dvistate.inpage, args=('slen',))
def _down_z(self, new_z):
if new_z is not None:
self.z = new_z
self.v += self.z
@_dispatch(min=171, max=234, state=_dvistate.inpage)
def _fnt_num_immediate(self, k):
self.f = k
@_dispatch(min=235, max=238, state=_dvistate.inpage, args=('olen1',))
def _fnt_num(self, new_f):
self.f = new_f
@_dispatch(min=239, max=242, args=('ulen1',))
def _xxx(self, datalen):
special = self.file.read(datalen)
_log.debug(
'Dvi._xxx: encountered special: %s',
''.join([chr(ch) if 32 <= ch < 127 else '<%02x>' % ch
for ch in special]))
@_dispatch(min=243, max=246, args=('olen1', 'u4', 'u4', 'u4', 'u1', 'u1'))
def _fnt_def(self, k, c, s, d, a, l):
self._fnt_def_real(k, c, s, d, a, l)
def _fnt_def_real(self, k, c, s, d, a, l):
n = self.file.read(a + l)
fontname = n[-l:].decode('ascii')
tfm = _tfmfile(fontname)
if tfm is None:
raise FileNotFoundError("missing font metrics file: %s" % fontname)
if c != 0 and tfm.checksum != 0 and c != tfm.checksum:
raise ValueError('tfm checksum mismatch: %s' % n)
vf = _vffile(fontname)
self.fonts[k] = DviFont(scale=s, tfm=tfm, texname=n, vf=vf)
@_dispatch(247, state=_dvistate.pre, args=('u1', 'u4', 'u4', 'u4', 'u1'))
def _pre(self, i, num, den, mag, k):
comment = self.file.read(k)
if i != 2:
raise ValueError("Unknown dvi format %d" % i)
if num != 25400000 or den != 7227 * 2**16:
raise ValueError("nonstandard units in dvi file")
# meaning: TeX always uses those exact values, so it
# should be enough for us to support those
# (There are 72.27 pt to an inch so 7227 pt =
# 7227 * 2**16 sp to 100 in. The numerator is multiplied
# by 10^5 to get units of 10**-7 meters.)
if mag != 1000:
raise ValueError("nonstandard magnification in dvi file")
# meaning: LaTeX seems to frown on setting \mag, so
# I think we can assume this is constant
self.state = _dvistate.outer
@_dispatch(248, state=_dvistate.outer)
def _post(self, _):
self.state = _dvistate.post_post
# TODO: actually read the postamble and finale?
# currently post_post just triggers closing the file
@_dispatch(249)
def _post_post(self, _):
raise NotImplementedError
@_dispatch(min=250, max=255)
def _malformed(self, offset):
raise ValueError("unknown command: byte %d", 250 + offset)
class DviFont(object):
"""
Encapsulation of a font that a DVI file can refer to.
This class holds a font's texname and size, supports comparison,
and knows the widths of glyphs in the same units as the AFM file.
There are also internal attributes (for use by dviread.py) that
are *not* used for comparison.
The size is in Adobe points (converted from TeX points).
Parameters
----------
scale : float
Factor by which the font is scaled from its natural size.
tfm : Tfm
TeX font metrics for this font
texname : bytes
Name of the font as used internally by TeX and friends, as an
ASCII bytestring. This is usually very different from any external
font names, and :class:`dviread.PsfontsMap` can be used to find
the external name of the font.
vf : Vf
A TeX "virtual font" file, or None if this font is not virtual.
Attributes
----------
texname : bytes
size : float
Size of the font in Adobe points, converted from the slightly
smaller TeX points.
widths : list
Widths of glyphs in glyph-space units, typically 1/1000ths of
the point size.
"""
__slots__ = ('texname', 'size', 'widths', '_scale', '_vf', '_tfm')
def __init__(self, scale, tfm, texname, vf):
if not isinstance(texname, bytes):
raise ValueError("texname must be a bytestring, got %s"
% type(texname))
self._scale, self._tfm, self.texname, self._vf = \
scale, tfm, texname, vf
self.size = scale * (72.0 / (72.27 * 2**16))
try:
nchars = max(tfm.width) + 1
except ValueError:
nchars = 0
self.widths = [(1000*tfm.width.get(char, 0)) >> 20
for char in range(nchars)]
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.texname == other.texname and self.size == other.size
def __ne__(self, other):
return not self.__eq__(other)
def _width_of(self, char):
"""
Width of char in dvi units. For internal use by dviread.py.
"""
width = self._tfm.width.get(char, None)
if width is not None:
return _mul2012(width, self._scale)
_log.debug('No width for char %d in font %s.', char, self.texname)
return 0
def _height_depth_of(self, char):
"""
Height and depth of char in dvi units. For internal use by dviread.py.
"""
result = []
for metric, name in ((self._tfm.height, "height"),
(self._tfm.depth, "depth")):
value = metric.get(char, None)
if value is None:
_log.debug('No %s for char %d in font %s',
name, char, self.texname)
result.append(0)
else:
result.append(_mul2012(value, self._scale))
return result
class Vf(Dvi):
"""
A virtual font (\\*.vf file) containing subroutines for dvi files.
Usage::
vf = Vf(filename)
glyph = vf[code]
glyph.text, glyph.boxes, glyph.width
Parameters
----------
filename : string or bytestring
Notes
-----
The virtual font format is a derivative of dvi:
http://mirrors.ctan.org/info/knuth/virtual-fonts
This class reuses some of the machinery of `Dvi`
but replaces the `_read` loop and dispatch mechanism.
"""
def __init__(self, filename):
Dvi.__init__(self, filename, 0)
try:
self._first_font = None
self._chars = {}
self._read()
finally:
self.close()
def __getitem__(self, code):
return self._chars[code]
def _read(self):
"""
Read one page from the file. Return True if successful,
False if there were no more pages.
"""
packet_char, packet_ends = None, None
packet_len, packet_width = None, None
while True:
byte = self.file.read(1)[0]
# If we are in a packet, execute the dvi instructions
if self.state is _dvistate.inpage:
byte_at = self.file.tell()-1
if byte_at == packet_ends:
self._finalize_packet(packet_char, packet_width)
packet_len, packet_char, packet_width = None, None, None
# fall through to out-of-packet code
elif byte_at > packet_ends:
raise ValueError("Packet length mismatch in vf file")
else:
if byte in (139, 140) or byte >= 243:
raise ValueError(
"Inappropriate opcode %d in vf file" % byte)
Dvi._dtable[byte](self, byte)
continue
# We are outside a packet
if byte < 242: # a short packet (length given by byte)
packet_len = byte
packet_char, packet_width = self._arg(1), self._arg(3)
packet_ends = self._init_packet(byte)
self.state = _dvistate.inpage
elif byte == 242: # a long packet
packet_len, packet_char, packet_width = \
[self._arg(x) for x in (4, 4, 4)]
self._init_packet(packet_len)
elif 243 <= byte <= 246:
k = self._arg(byte - 242, byte == 246)
c, s, d, a, l = [self._arg(x) for x in (4, 4, 4, 1, 1)]
self._fnt_def_real(k, c, s, d, a, l)
if self._first_font is None:
self._first_font = k
elif byte == 247: # preamble
i, k = self._arg(1), self._arg(1)
x = self.file.read(k)
cs, ds = self._arg(4), self._arg(4)
self._pre(i, x, cs, ds)
elif byte == 248: # postamble (just some number of 248s)
break
else:
raise ValueError("unknown vf opcode %d" % byte)
def _init_packet(self, pl):
if self.state != _dvistate.outer:
raise ValueError("Misplaced packet in vf file")
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack, self.text, self.boxes = [], [], []
self.f = self._first_font
return self.file.tell() + pl
def _finalize_packet(self, packet_char, packet_width):
self._chars[packet_char] = Page(
text=self.text, boxes=self.boxes, width=packet_width,
height=None, descent=None)
self.state = _dvistate.outer
def _pre(self, i, x, cs, ds):
if self.state is not _dvistate.pre:
raise ValueError("pre command in middle of vf file")
if i != 202:
raise ValueError("Unknown vf format %d" % i)
if len(x):
_log.debug('vf file comment: %s', x)
self.state = _dvistate.outer
# cs = checksum, ds = design size
def _fix2comp(num):
"""
Convert from two's complement to negative.
"""
assert 0 <= num < 2**32
if num & 2**31:
return num - 2**32
else:
return num
def _mul2012(num1, num2):
"""
Multiply two numbers in 20.12 fixed point format.
"""
# Separated into a function because >> has surprising precedence
return (num1*num2) >> 20
class Tfm(object):
"""
A TeX Font Metric file.
This implementation covers only the bare minimum needed by the Dvi class.
Parameters
----------
filename : string or bytestring
Attributes
----------
checksum : int
Used for verifying against the dvi file.
design_size : int
Design size of the font (unknown units)
width, height, depth : dict
Dimensions of each character, need to be scaled by the factor
specified in the dvi file. These are dicts because indexing may
not start from 0.
"""
__slots__ = ('checksum', 'design_size', 'width', 'height', 'depth')
def __init__(self, filename):
_log.debug('opening tfm file %s', filename)
with open(filename, 'rb') as file:
header1 = file.read(24)
lh, bc, ec, nw, nh, nd = \
struct.unpack('!6H', header1[2:14])
_log.debug('lh=%d, bc=%d, ec=%d, nw=%d, nh=%d, nd=%d',
lh, bc, ec, nw, nh, nd)
header2 = file.read(4*lh)
self.checksum, self.design_size = \
struct.unpack('!2I', header2[:8])
# there is also encoding information etc.
char_info = file.read(4*(ec-bc+1))
widths = file.read(4*nw)
heights = file.read(4*nh)
depths = file.read(4*nd)
self.width, self.height, self.depth = {}, {}, {}
widths, heights, depths = \
[struct.unpack('!%dI' % (len(x)/4), x)
for x in (widths, heights, depths)]
for idx, char in enumerate(range(bc, ec+1)):
byte0 = char_info[4*idx]
byte1 = char_info[4*idx+1]
self.width[char] = _fix2comp(widths[byte0])
self.height[char] = _fix2comp(heights[byte1 >> 4])
self.depth[char] = _fix2comp(depths[byte1 & 0xf])
PsFont = namedtuple('Font', 'texname psname effects encoding filename')
class PsfontsMap(object):
"""
A psfonts.map formatted file, mapping TeX fonts to PS fonts.
Usage::
>>> map = PsfontsMap(find_tex_file('pdftex.map'))
>>> entry = map[b'ptmbo8r']
>>> entry.texname
b'ptmbo8r'
>>> entry.psname
b'Times-Bold'
>>> entry.encoding
'/usr/local/texlive/2008/texmf-dist/fonts/enc/dvips/base/8r.enc'
>>> entry.effects
{'slant': 0.16700000000000001}
>>> entry.filename
Parameters
----------
filename : string or bytestring
Notes
-----
For historical reasons, TeX knows many Type-1 fonts by different
names than the outside world. (For one thing, the names have to
fit in eight characters.) Also, TeX's native fonts are not Type-1
but Metafont, which is nontrivial to convert to PostScript except
as a bitmap. While high-quality conversions to Type-1 format exist
and are shipped with modern TeX distributions, we need to know
which Type-1 fonts are the counterparts of which native fonts. For
these reasons a mapping is needed from internal font names to font
file names.
A texmf tree typically includes mapping files called e.g.
:file:`psfonts.map`, :file:`pdftex.map`, or :file:`dvipdfm.map`.
The file :file:`psfonts.map` is used by :program:`dvips`,
:file:`pdftex.map` by :program:`pdfTeX`, and :file:`dvipdfm.map`
by :program:`dvipdfm`. :file:`psfonts.map` might avoid embedding
the 35 PostScript fonts (i.e., have no filename for them, as in
the Times-Bold example above), while the pdf-related files perhaps
only avoid the "Base 14" pdf fonts. But the user may have
configured these files differently.
"""
__slots__ = ('_font', '_filename')
# Create a filename -> PsfontsMap cache, so that calling
# `PsfontsMap(filename)` with the same filename a second time immediately
# returns the same object.
@lru_cache()
def __new__(cls, filename):
self = object.__new__(cls)
self._font = {}
self._filename = os.fsdecode(filename)
with open(filename, 'rb') as file:
self._parse(file)
return self
def __getitem__(self, texname):
assert isinstance(texname, bytes)
try:
result = self._font[texname]
except KeyError:
fmt = ('A PostScript file for the font whose TeX name is "{0}" '
'could not be found in the file "{1}". The dviread module '
'can only handle fonts that have an associated PostScript '
'font file. '
'This problem can often be solved by installing '
'a suitable PostScript font package in your (TeX) '
'package manager.')
msg = fmt.format(texname.decode('ascii'), self._filename)
msg = textwrap.fill(msg, break_on_hyphens=False,
break_long_words=False)
_log.info(msg)
raise
fn, enc = result.filename, result.encoding
if fn is not None and not fn.startswith(b'/'):
fn = find_tex_file(fn)
if enc is not None and not enc.startswith(b'/'):
enc = find_tex_file(result.encoding)
return result._replace(filename=fn, encoding=enc)
def _parse(self, file):
"""
Parse the font mapping file.
The format is, AFAIK: texname fontname [effects and filenames]
Effects are PostScript snippets like ".177 SlantFont",
filenames begin with one or two less-than signs. A filename
ending in enc is an encoding file, other filenames are font
files. This can be overridden with a left bracket: <[foobar
indicates an encoding file named foobar.
There is some difference between <foo.pfb and <<bar.pfb in
subsetting, but I have no example of << in my TeX installation.
"""
# If the map file specifies multiple encodings for a font, we
# follow pdfTeX in choosing the last one specified. Such
# entries are probably mistakes but they have occurred.
# http://tex.stackexchange.com/questions/10826/
# http://article.gmane.org/gmane.comp.tex.pdftex/4914
empty_re = re.compile(br'%|\s*$')
word_re = re.compile(
br'''(?x) (?:
"<\[ (?P<enc1> [^"]+ )" | # quoted encoding marked by [
"< (?P<enc2> [^"]+.enc)" | # quoted encoding, ends in .enc
"<<? (?P<file1> [^"]+ )" | # quoted font file name
" (?P<eff1> [^"]+ )" | # quoted effects or font name
<\[ (?P<enc3> \S+ ) | # encoding marked by [
< (?P<enc4> \S+ .enc) | # encoding, ends in .enc
<<? (?P<file2> \S+ ) | # font file name
(?P<eff2> \S+ ) # effects or font name
)''')
effects_re = re.compile(
br'''(?x) (?P<slant> -?[0-9]*(?:\.[0-9]+)) \s* SlantFont
| (?P<extend>-?[0-9]*(?:\.[0-9]+)) \s* ExtendFont''')
lines = (line.strip()
for line in file
if not empty_re.match(line))
for line in lines:
effects, encoding, filename = b'', None, None
words = word_re.finditer(line)
# The named groups are mutually exclusive and are
# referenced below at an estimated order of probability of
# occurrence based on looking at my copy of pdftex.map.
# The font names are probably unquoted:
w = next(words)
texname = w.group('eff2') or w.group('eff1')
w = next(words)
psname = w.group('eff2') or w.group('eff1')
for w in words:
# Any effects are almost always quoted:
eff = w.group('eff1') or w.group('eff2')
if eff:
effects = eff
continue
# Encoding files usually have the .enc suffix
# and almost never need quoting:
enc = (w.group('enc4') or w.group('enc3') or
w.group('enc2') or w.group('enc1'))
if enc:
if encoding is not None:
_log.debug('Multiple encodings for %s = %s',
texname, psname)
encoding = enc
continue
# File names are probably unquoted:
filename = w.group('file2') or w.group('file1')
effects_dict = {}
for match in effects_re.finditer(effects):
slant = match.group('slant')
if slant:
effects_dict['slant'] = float(slant)
else:
effects_dict['extend'] = float(match.group('extend'))
self._font[texname] = PsFont(
texname=texname, psname=psname, effects=effects_dict,
encoding=encoding, filename=filename)
class Encoding(object):
"""
Parses a \\*.enc file referenced from a psfonts.map style file.
The format this class understands is a very limited subset of
PostScript.
Usage (subject to change)::
for name in Encoding(filename):
whatever(name)
Parameters
----------
filename : string or bytestring
Attributes
----------
encoding : list
List of character names
"""
__slots__ = ('encoding',)
def __init__(self, filename):
with open(filename, 'rb') as file:
_log.debug('Parsing TeX encoding %s', filename)
self.encoding = self._parse(file)
_log.debug('Result: %s', self.encoding)
def __iter__(self):
yield from self.encoding
@staticmethod
def _parse(file):
result = []
lines = (line.split(b'%', 1)[0].strip() for line in file)
data = b''.join(lines)
beginning = data.find(b'[')
if beginning < 0:
raise ValueError("Cannot locate beginning of encoding in {}"
.format(file))
data = data[beginning:]
end = data.find(b']')
if end < 0:
raise ValueError("Cannot locate end of encoding in {}"
.format(file))
data = data[:end]
return re.findall(br'/([^][{}<>\s]+)', data)
@lru_cache()
def find_tex_file(filename, format=None):
"""
Find a file in the texmf tree.
Calls :program:`kpsewhich` which is an interface to the kpathsea
library [1]_. Most existing TeX distributions on Unix-like systems use
kpathsea. It is also available as part of MikTeX, a popular
distribution on Windows.
Parameters
----------
filename : string or bytestring
format : string or bytestring
Used as the value of the `--format` option to :program:`kpsewhich`.
Could be e.g. 'tfm' or 'vf' to limit the search to that type of files.
References
----------
.. [1] `Kpathsea documentation <http://www.tug.org/kpathsea/>`_
The library that :program:`kpsewhich` is part of.
"""
# we expect these to always be ascii encoded, but use utf-8
# out of caution
if isinstance(filename, bytes):
filename = filename.decode('utf-8', errors='replace')
if isinstance(format, bytes):
format = format.decode('utf-8', errors='replace')
cmd = ['kpsewhich']
if format is not None:
cmd += ['--format=' + format]
cmd += [filename]
_log.debug('find_tex_file(%s): %s', filename, cmd)
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
result = pipe.communicate()[0].rstrip()
_log.debug('find_tex_file result: %s', result)
return result.decode('ascii')
@lru_cache()
def _fontfile(cls, suffix, texname):
filename = find_tex_file(texname + suffix)
return cls(filename) if filename else None
_tfmfile = partial(_fontfile, Tfm, ".tfm")
_vffile = partial(_fontfile, Vf, ".vf")
if __name__ == '__main__':
import sys
fname = sys.argv[1]
try:
dpi = float(sys.argv[2])
except IndexError:
dpi = None
with Dvi(fname, dpi) as dvi:
fontmap = PsfontsMap(find_tex_file('pdftex.map'))
for page in dvi:
print('=== new page ===')
fPrev = None
for x, y, f, c, w in page.text:
if f != fPrev:
print('font', f.texname, 'scaled', f._scale/pow(2.0, 20))
fPrev = f
print(x, y, c, 32 <= c < 128 and chr(c) or '.', w)
for x, y, w, h in page.boxes:
print(x, y, 'BOX', w, h)
| 35.304924 | 79 | 0.566466 |
from collections import namedtuple
import enum
from functools import lru_cache, partial, wraps
import logging
import os
import re
import struct
import subprocess
import textwrap
import numpy as np
from matplotlib import cbook, rcParams
_log = logging.getLogger(__name__)
_dvistate = enum.Enum('DviState', 'pre outer inpage post_post finale')
Page = namedtuple('Page', 'text boxes height width descent')
Text = namedtuple('Text', 'x y font glyph width')
Box = namedtuple('Box', 'x y height width')
def _arg_raw(dvi, delta):
return delta
def _arg(bytes, signed, dvi, _):
return dvi._arg(bytes, signed)
def _arg_slen(dvi, delta):
if delta == 0:
return None
return dvi._arg(delta, True)
def _arg_slen1(dvi, delta):
return dvi._arg(delta+1, True)
def _arg_ulen1(dvi, delta):
return dvi._arg(delta+1, False)
def _arg_olen1(dvi, delta):
return dvi._arg(delta + 1, delta == 3)
_arg_mapping = dict(raw=_arg_raw,
u1=partial(_arg, 1, False),
u4=partial(_arg, 4, False),
s4=partial(_arg, 4, True),
slen=_arg_slen,
olen1=_arg_olen1,
slen1=_arg_slen1,
ulen1=_arg_ulen1)
def _dispatch(table, min, max=None, state=None, args=('raw',)):
def decorate(method):
get_args = [_arg_mapping[x] for x in args]
@wraps(method)
def wrapper(self, byte):
if state is not None and self.state != state:
raise ValueError("state precondition failed")
return method(self, *[f(self, byte-min) for f in get_args])
if max is None:
table[min] = wrapper
else:
for i in range(min, max+1):
assert table[i] is None
table[i] = wrapper
return wrapper
return decorate
class Dvi(object):
_dtable = [None] * 256
_dispatch = partial(_dispatch, _dtable)
def __init__(self, filename, dpi):
_log.debug('Dvi: %s', filename)
self.file = open(filename, 'rb')
self.dpi = dpi
self.fonts = {}
self.state = _dvistate.pre
self.baseline = self._get_baseline(filename)
def _get_baseline(self, filename):
if rcParams['text.latex.preview']:
base, ext = os.path.splitext(filename)
baseline_filename = base + ".baseline"
if os.path.exists(baseline_filename):
with open(baseline_filename, 'rb') as fd:
l = fd.read().split()
height, depth, width = l
return float(depth)
return None
def __enter__(self):
return self
def __exit__(self, etype, evalue, etrace):
self.close()
def __iter__(self):
while self._read():
yield self._output()
def close(self):
if not self.file.closed:
self.file.close()
def _output(self):
minx, miny, maxx, maxy = np.inf, np.inf, -np.inf, -np.inf
maxy_pure = -np.inf
for elt in self.text + self.boxes:
if isinstance(elt, Box):
x, y, h, w = elt
e = 0
else:
x, y, font, g, w = elt
h, e = font._height_depth_of(g)
minx = min(minx, x)
miny = min(miny, y - h)
maxx = max(maxx, x + w)
maxy = max(maxy, y + e)
maxy_pure = max(maxy_pure, y)
if self.dpi is None:
return Page(text=self.text, boxes=self.boxes,
width=maxx-minx, height=maxy_pure-miny,
descent=maxy-maxy_pure)
d = self.dpi / (72.27 * 2**16)
if self.baseline is None:
descent = (maxy - maxy_pure) * d
else:
descent = self.baseline
text = [Text((x-minx)*d, (maxy-y)*d - descent, f, g, w*d)
for (x, y, f, g, w) in self.text]
boxes = [Box((x-minx)*d, (maxy-y)*d - descent, h*d, w*d)
for (x, y, h, w) in self.boxes]
return Page(text=text, boxes=boxes, width=(maxx-minx)*d,
height=(maxy_pure-miny)*d, descent=descent)
def _read(self):
while True:
byte = self.file.read(1)[0]
self._dtable[byte](self, byte)
if byte == 140: # end of page
return True
if self.state is _dvistate.post_post: # end of file
self.close()
return False
def _arg(self, nbytes, signed=False):
str = self.file.read(nbytes)
value = str[0]
if signed and value >= 0x80:
value = value - 0x100
for i in range(1, nbytes):
value = 0x100*value + str[i]
return value
@_dispatch(min=0, max=127, state=_dvistate.inpage)
def _set_char_immediate(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(min=128, max=131, state=_dvistate.inpage, args=('olen1',))
def _set_char(self, char):
self._put_char_real(char)
self.h += self.fonts[self.f]._width_of(char)
@_dispatch(132, state=_dvistate.inpage, args=('s4', 's4'))
def _set_rule(self, a, b):
self._put_rule_real(a, b)
self.h += b
@_dispatch(min=133, max=136, state=_dvistate.inpage, args=('olen1',))
def _put_char(self, char):
self._put_char_real(char)
def _put_char_real(self, char):
font = self.fonts[self.f]
if font._vf is None:
self.text.append(Text(self.h, self.v, font, char,
font._width_of(char)))
else:
scale = font._scale
for x, y, f, g, w in font._vf[char].text:
newf = DviFont(scale=_mul2012(scale, f._scale),
tfm=f._tfm, texname=f.texname, vf=f._vf)
self.text.append(Text(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
newf, g, newf._width_of(g)))
self.boxes.extend([Box(self.h + _mul2012(x, scale),
self.v + _mul2012(y, scale),
_mul2012(a, scale), _mul2012(b, scale))
for x, y, a, b in font._vf[char].boxes])
@_dispatch(137, state=_dvistate.inpage, args=('s4', 's4'))
def _put_rule(self, a, b):
self._put_rule_real(a, b)
def _put_rule_real(self, a, b):
if a > 0 and b > 0:
self.boxes.append(Box(self.h, self.v, a, b))
@_dispatch(138)
def _nop(self, _):
pass
@_dispatch(139, state=_dvistate.outer, args=('s4',)*11)
def _bop(self, c0, c1, c2, c3, c4, c5, c6, c7, c8, c9, p):
self.state = _dvistate.inpage
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack = []
self.text = [] # list of Text objects
self.boxes = [] # list of Box objects
@_dispatch(140, state=_dvistate.inpage)
def _eop(self, _):
self.state = _dvistate.outer
del self.h, self.v, self.w, self.x, self.y, self.z, self.stack
@_dispatch(141, state=_dvistate.inpage)
def _push(self, _):
self.stack.append((self.h, self.v, self.w, self.x, self.y, self.z))
@_dispatch(142, state=_dvistate.inpage)
def _pop(self, _):
self.h, self.v, self.w, self.x, self.y, self.z = self.stack.pop()
@_dispatch(min=143, max=146, state=_dvistate.inpage, args=('slen1',))
def _right(self, b):
self.h += b
@_dispatch(min=147, max=151, state=_dvistate.inpage, args=('slen',))
def _right_w(self, new_w):
if new_w is not None:
self.w = new_w
self.h += self.w
@_dispatch(min=152, max=156, state=_dvistate.inpage, args=('slen',))
def _right_x(self, new_x):
if new_x is not None:
self.x = new_x
self.h += self.x
@_dispatch(min=157, max=160, state=_dvistate.inpage, args=('slen1',))
def _down(self, a):
self.v += a
@_dispatch(min=161, max=165, state=_dvistate.inpage, args=('slen',))
def _down_y(self, new_y):
if new_y is not None:
self.y = new_y
self.v += self.y
@_dispatch(min=166, max=170, state=_dvistate.inpage, args=('slen',))
def _down_z(self, new_z):
if new_z is not None:
self.z = new_z
self.v += self.z
@_dispatch(min=171, max=234, state=_dvistate.inpage)
def _fnt_num_immediate(self, k):
self.f = k
@_dispatch(min=235, max=238, state=_dvistate.inpage, args=('olen1',))
def _fnt_num(self, new_f):
self.f = new_f
@_dispatch(min=239, max=242, args=('ulen1',))
def _xxx(self, datalen):
special = self.file.read(datalen)
_log.debug(
'Dvi._xxx: encountered special: %s',
''.join([chr(ch) if 32 <= ch < 127 else '<%02x>' % ch
for ch in special]))
@_dispatch(min=243, max=246, args=('olen1', 'u4', 'u4', 'u4', 'u1', 'u1'))
def _fnt_def(self, k, c, s, d, a, l):
self._fnt_def_real(k, c, s, d, a, l)
def _fnt_def_real(self, k, c, s, d, a, l):
n = self.file.read(a + l)
fontname = n[-l:].decode('ascii')
tfm = _tfmfile(fontname)
if tfm is None:
raise FileNotFoundError("missing font metrics file: %s" % fontname)
if c != 0 and tfm.checksum != 0 and c != tfm.checksum:
raise ValueError('tfm checksum mismatch: %s' % n)
vf = _vffile(fontname)
self.fonts[k] = DviFont(scale=s, tfm=tfm, texname=n, vf=vf)
@_dispatch(247, state=_dvistate.pre, args=('u1', 'u4', 'u4', 'u4', 'u1'))
def _pre(self, i, num, den, mag, k):
comment = self.file.read(k)
if i != 2:
raise ValueError("Unknown dvi format %d" % i)
if num != 25400000 or den != 7227 * 2**16:
raise ValueError("nonstandard units in dvi file")
# meaning: TeX always uses those exact values, so it
# should be enough for us to support those
# (There are 72.27 pt to an inch so 7227 pt =
# 7227 * 2**16 sp to 100 in. The numerator is multiplied
# by 10^5 to get units of 10**-7 meters.)
if mag != 1000:
raise ValueError("nonstandard magnification in dvi file")
# meaning: LaTeX seems to frown on setting \mag, so
# I think we can assume this is constant
self.state = _dvistate.outer
@_dispatch(248, state=_dvistate.outer)
def _post(self, _):
self.state = _dvistate.post_post
# TODO: actually read the postamble and finale?
# currently post_post just triggers closing the file
@_dispatch(249)
def _post_post(self, _):
raise NotImplementedError
@_dispatch(min=250, max=255)
def _malformed(self, offset):
raise ValueError("unknown command: byte %d", 250 + offset)
class DviFont(object):
__slots__ = ('texname', 'size', 'widths', '_scale', '_vf', '_tfm')
def __init__(self, scale, tfm, texname, vf):
if not isinstance(texname, bytes):
raise ValueError("texname must be a bytestring, got %s"
% type(texname))
self._scale, self._tfm, self.texname, self._vf = \
scale, tfm, texname, vf
self.size = scale * (72.0 / (72.27 * 2**16))
try:
nchars = max(tfm.width) + 1
except ValueError:
nchars = 0
self.widths = [(1000*tfm.width.get(char, 0)) >> 20
for char in range(nchars)]
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.texname == other.texname and self.size == other.size
def __ne__(self, other):
return not self.__eq__(other)
def _width_of(self, char):
width = self._tfm.width.get(char, None)
if width is not None:
return _mul2012(width, self._scale)
_log.debug('No width for char %d in font %s.', char, self.texname)
return 0
def _height_depth_of(self, char):
result = []
for metric, name in ((self._tfm.height, "height"),
(self._tfm.depth, "depth")):
value = metric.get(char, None)
if value is None:
_log.debug('No %s for char %d in font %s',
name, char, self.texname)
result.append(0)
else:
result.append(_mul2012(value, self._scale))
return result
class Vf(Dvi):
def __init__(self, filename):
Dvi.__init__(self, filename, 0)
try:
self._first_font = None
self._chars = {}
self._read()
finally:
self.close()
def __getitem__(self, code):
return self._chars[code]
def _read(self):
packet_char, packet_ends = None, None
packet_len, packet_width = None, None
while True:
byte = self.file.read(1)[0]
# If we are in a packet, execute the dvi instructions
if self.state is _dvistate.inpage:
byte_at = self.file.tell()-1
if byte_at == packet_ends:
self._finalize_packet(packet_char, packet_width)
packet_len, packet_char, packet_width = None, None, None
# fall through to out-of-packet code
elif byte_at > packet_ends:
raise ValueError("Packet length mismatch in vf file")
else:
if byte in (139, 140) or byte >= 243:
raise ValueError(
"Inappropriate opcode %d in vf file" % byte)
Dvi._dtable[byte](self, byte)
continue
# We are outside a packet
if byte < 242: # a short packet (length given by byte)
packet_len = byte
packet_char, packet_width = self._arg(1), self._arg(3)
packet_ends = self._init_packet(byte)
self.state = _dvistate.inpage
elif byte == 242: # a long packet
packet_len, packet_char, packet_width = \
[self._arg(x) for x in (4, 4, 4)]
self._init_packet(packet_len)
elif 243 <= byte <= 246:
k = self._arg(byte - 242, byte == 246)
c, s, d, a, l = [self._arg(x) for x in (4, 4, 4, 1, 1)]
self._fnt_def_real(k, c, s, d, a, l)
if self._first_font is None:
self._first_font = k
elif byte == 247: # preamble
i, k = self._arg(1), self._arg(1)
x = self.file.read(k)
cs, ds = self._arg(4), self._arg(4)
self._pre(i, x, cs, ds)
elif byte == 248: # postamble (just some number of 248s)
break
else:
raise ValueError("unknown vf opcode %d" % byte)
def _init_packet(self, pl):
if self.state != _dvistate.outer:
raise ValueError("Misplaced packet in vf file")
self.h, self.v, self.w, self.x, self.y, self.z = 0, 0, 0, 0, 0, 0
self.stack, self.text, self.boxes = [], [], []
self.f = self._first_font
return self.file.tell() + pl
def _finalize_packet(self, packet_char, packet_width):
self._chars[packet_char] = Page(
text=self.text, boxes=self.boxes, width=packet_width,
height=None, descent=None)
self.state = _dvistate.outer
def _pre(self, i, x, cs, ds):
if self.state is not _dvistate.pre:
raise ValueError("pre command in middle of vf file")
if i != 202:
raise ValueError("Unknown vf format %d" % i)
if len(x):
_log.debug('vf file comment: %s', x)
self.state = _dvistate.outer
# cs = checksum, ds = design size
def _fix2comp(num):
assert 0 <= num < 2**32
if num & 2**31:
return num - 2**32
else:
return num
def _mul2012(num1, num2):
# Separated into a function because >> has surprising precedence
return (num1*num2) >> 20
class Tfm(object):
__slots__ = ('checksum', 'design_size', 'width', 'height', 'depth')
def __init__(self, filename):
_log.debug('opening tfm file %s', filename)
with open(filename, 'rb') as file:
header1 = file.read(24)
lh, bc, ec, nw, nh, nd = \
struct.unpack('!6H', header1[2:14])
_log.debug('lh=%d, bc=%d, ec=%d, nw=%d, nh=%d, nd=%d',
lh, bc, ec, nw, nh, nd)
header2 = file.read(4*lh)
self.checksum, self.design_size = \
struct.unpack('!2I', header2[:8])
# there is also encoding information etc.
char_info = file.read(4*(ec-bc+1))
widths = file.read(4*nw)
heights = file.read(4*nh)
depths = file.read(4*nd)
self.width, self.height, self.depth = {}, {}, {}
widths, heights, depths = \
[struct.unpack('!%dI' % (len(x)/4), x)
for x in (widths, heights, depths)]
for idx, char in enumerate(range(bc, ec+1)):
byte0 = char_info[4*idx]
byte1 = char_info[4*idx+1]
self.width[char] = _fix2comp(widths[byte0])
self.height[char] = _fix2comp(heights[byte1 >> 4])
self.depth[char] = _fix2comp(depths[byte1 & 0xf])
PsFont = namedtuple('Font', 'texname psname effects encoding filename')
class PsfontsMap(object):
__slots__ = ('_font', '_filename')
# Create a filename -> PsfontsMap cache, so that calling
# `PsfontsMap(filename)` with the same filename a second time immediately
# returns the same object.
@lru_cache()
def __new__(cls, filename):
self = object.__new__(cls)
self._font = {}
self._filename = os.fsdecode(filename)
with open(filename, 'rb') as file:
self._parse(file)
return self
def __getitem__(self, texname):
assert isinstance(texname, bytes)
try:
result = self._font[texname]
except KeyError:
fmt = ('A PostScript file for the font whose TeX name is "{0}" '
'could not be found in the file "{1}". The dviread module '
'can only handle fonts that have an associated PostScript '
'font file. '
'This problem can often be solved by installing '
'a suitable PostScript font package in your (TeX) '
'package manager.')
msg = fmt.format(texname.decode('ascii'), self._filename)
msg = textwrap.fill(msg, break_on_hyphens=False,
break_long_words=False)
_log.info(msg)
raise
fn, enc = result.filename, result.encoding
if fn is not None and not fn.startswith(b'/'):
fn = find_tex_file(fn)
if enc is not None and not enc.startswith(b'/'):
enc = find_tex_file(result.encoding)
return result._replace(filename=fn, encoding=enc)
def _parse(self, file):
# If the map file specifies multiple encodings for a font, we
# follow pdfTeX in choosing the last one specified. Such
# entries are probably mistakes but they have occurred.
# http://tex.stackexchange.com/questions/10826/
# http://article.gmane.org/gmane.comp.tex.pdftex/4914
empty_re = re.compile(br'%|\s*$')
word_re = re.compile(
br'''(?x) (?:
"<\[ (?P<enc1> [^"]+ )" | # quoted encoding marked by [
"< (?P<enc2> [^"]+.enc)" | # quoted encoding, ends in .enc
"<<? (?P<file1> [^"]+ )" | # quoted font file name
" (?P<eff1> [^"]+ )" | # quoted effects or font name
<\[ (?P<enc3> \S+ ) | # encoding marked by [
< (?P<enc4> \S+ .enc) | # encoding, ends in .enc
<<? (?P<file2> \S+ ) | # font file name
(?P<eff2> \S+ ) # effects or font name
)''')
effects_re = re.compile(
br'''(?x) (?P<slant> -?[0-9]*(?:\.[0-9]+)) \s* SlantFont
| (?P<extend>-?[0-9]*(?:\.[0-9]+)) \s* ExtendFont''')
lines = (line.strip()
for line in file
if not empty_re.match(line))
for line in lines:
effects, encoding, filename = b'', None, None
words = word_re.finditer(line)
# The named groups are mutually exclusive and are
# referenced below at an estimated order of probability of
# occurrence based on looking at my copy of pdftex.map.
# The font names are probably unquoted:
w = next(words)
texname = w.group('eff2') or w.group('eff1')
w = next(words)
psname = w.group('eff2') or w.group('eff1')
for w in words:
# Any effects are almost always quoted:
eff = w.group('eff1') or w.group('eff2')
if eff:
effects = eff
continue
# Encoding files usually have the .enc suffix
# and almost never need quoting:
enc = (w.group('enc4') or w.group('enc3') or
w.group('enc2') or w.group('enc1'))
if enc:
if encoding is not None:
_log.debug('Multiple encodings for %s = %s',
texname, psname)
encoding = enc
continue
# File names are probably unquoted:
filename = w.group('file2') or w.group('file1')
effects_dict = {}
for match in effects_re.finditer(effects):
slant = match.group('slant')
if slant:
effects_dict['slant'] = float(slant)
else:
effects_dict['extend'] = float(match.group('extend'))
self._font[texname] = PsFont(
texname=texname, psname=psname, effects=effects_dict,
encoding=encoding, filename=filename)
class Encoding(object):
__slots__ = ('encoding',)
def __init__(self, filename):
with open(filename, 'rb') as file:
_log.debug('Parsing TeX encoding %s', filename)
self.encoding = self._parse(file)
_log.debug('Result: %s', self.encoding)
def __iter__(self):
yield from self.encoding
@staticmethod
def _parse(file):
result = []
lines = (line.split(b'%', 1)[0].strip() for line in file)
data = b''.join(lines)
beginning = data.find(b'[')
if beginning < 0:
raise ValueError("Cannot locate beginning of encoding in {}"
.format(file))
data = data[beginning:]
end = data.find(b']')
if end < 0:
raise ValueError("Cannot locate end of encoding in {}"
.format(file))
data = data[:end]
return re.findall(br'/([^][{}<>\s]+)', data)
@lru_cache()
def find_tex_file(filename, format=None):
# we expect these to always be ascii encoded, but use utf-8
# out of caution
if isinstance(filename, bytes):
filename = filename.decode('utf-8', errors='replace')
if isinstance(format, bytes):
format = format.decode('utf-8', errors='replace')
cmd = ['kpsewhich']
if format is not None:
cmd += ['--format=' + format]
cmd += [filename]
_log.debug('find_tex_file(%s): %s', filename, cmd)
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE)
result = pipe.communicate()[0].rstrip()
_log.debug('find_tex_file result: %s', result)
return result.decode('ascii')
@lru_cache()
def _fontfile(cls, suffix, texname):
filename = find_tex_file(texname + suffix)
return cls(filename) if filename else None
_tfmfile = partial(_fontfile, Tfm, ".tfm")
_vffile = partial(_fontfile, Vf, ".vf")
if __name__ == '__main__':
import sys
fname = sys.argv[1]
try:
dpi = float(sys.argv[2])
except IndexError:
dpi = None
with Dvi(fname, dpi) as dvi:
fontmap = PsfontsMap(find_tex_file('pdftex.map'))
for page in dvi:
print('=== new page ===')
fPrev = None
for x, y, f, c, w in page.text:
if f != fPrev:
print('font', f.texname, 'scaled', f._scale/pow(2.0, 20))
fPrev = f
print(x, y, c, 32 <= c < 128 and chr(c) or '.', w)
for x, y, w, h in page.boxes:
print(x, y, 'BOX', w, h)
| true | true |
f738fb755883c2f74681fac8b121abf0c9cd6e56 | 5,175 | py | Python | vehicle_detection.py | suhongkim/SSD-Vehicle-Detector | 8337d237f4c7923e55d02747ec37a60681e5beff | [
"MIT"
] | null | null | null | vehicle_detection.py | suhongkim/SSD-Vehicle-Detector | 8337d237f4c7923e55d02747ec37a60681e5beff | [
"MIT"
] | null | null | null | vehicle_detection.py | suhongkim/SSD-Vehicle-Detector | 8337d237f4c7923e55d02747ec37a60681e5beff | [
"MIT"
] | null | null | null | import os
import torch
from torch.utils.data import DataLoader
from cityscape_dataset import CityScapeDataset
from ssd_util import load_dataset_list, load_dataset_list_original, show_loss, show_log
from ssd_net import SSD
from ssd_train import train_net
from ssd_test import test_net
if __name__ == '__main__':
# Define Label Group
dataset_label_group = {
'background': [],
'sm_veh': ['motorcycle', 'motorcyclegroup', 'bicycle', 'bicyclegroup'],
'med_veh': ['car', 'cargroup'],
# 'ego_veh': ['ego vehicle'],
'big_veh': ['bus', 'trailer', 'truck'],
# 'people': ['person', 'persongroup'],
# 'riders': ['rider', 'ridergroup']
}
# Define Configurations
config = {'is_gpu': True,
'debug': False,
'n_aug': 1,
'n_batch': 64,
'n_worker': 4,
'lr': 0.001,
'max_epoch': 100,
'save_epochs': [10,20,30,40,50,60,70,80,90],
'is_lr_scheduled': False,
# 'class_labels': ['background', 'cargroup'],
# 'class_labels': ['background', 'persongroup', 'person', 'cargroup', 'car'],
'label_groups': dataset_label_group,
'class_labels': list(dataset_label_group.keys()),
'is_train': True,
'is_test': True,
'results_path': '/home/suhongk/sfuhome/CMPT742/Lab3/vehicle_detection_v2/results/SSD__28th_16:47_best_model.pth'
}
# crop original image
# person + persongroup , car+Cargroup
# Overfitted data for the unaug
# check training set
# Default Cuda Setting -------------------------------------------------
from torch.multiprocessing import Pool, Process, set_start_method
try:
set_start_method('spawn')
except RuntimeError:
pass
if torch.cuda.is_available():
torch.set_default_tensor_type('torch.cuda.FloatTensor')
torch.backends.cudnn.benchmark = True
# load dataset_list -------------------------------------------------
if config['is_gpu']:
sample_path = '/home/datasets/full_dataset/train_extra/'
label_path = '/home/datasets/full_dataset_labels/train_extra'
else:
sample_path = '../cityscapes_samples/'
label_path = '../cityscapes_samples_labels/'
dataset_list = load_dataset_list(sample_path, label_path, config['label_groups'])
# dataset_list = load_dataset_list_original(sample_path, label_path, config['class_labels'])
# Define dataset/dataloader -------------------------------------------
num_train = int(0.3 * len(dataset_list))
num_valid = int(0.1 * len(dataset_list))
if config['is_train']:
train_dataset = CityScapeDataset(dataset_list[:num_train], n_augmented=config['n_aug'], debug=config['debug'])
train_loader = DataLoader(train_dataset, batch_size=config['n_batch'], shuffle=True, num_workers=config['n_worker'])
print('Total training items: ', len(train_dataset))
print('Total training batches size in one epoch: ', len(train_loader))
valid_dataset = CityScapeDataset(dataset_list[num_train:(num_train + num_valid)], debug=config['debug'])
valid_loader = DataLoader(valid_dataset, batch_size=config['n_batch'], shuffle=True, num_workers=config['n_worker'])
print('Total validating items: ', len(valid_dataset))
print('Total validating batches size in one epoch: ', len(valid_loader))
if config['is_test']:
test_dataset = CityScapeDataset(dataset_list[(num_train + num_valid):], debug=config['debug'])
print('Total testing items: ', len(test_dataset))
# Train network -----------------------------------------------------
if config['is_train']:
lab_results_dir = "./results/" # for the results
results_path = train_net(train_loader, valid_loader, config['class_labels'], lab_results_dir,
learning_rate=config['lr'], is_lr_scheduled=config['is_lr_scheduled'],
max_epoch=config['max_epoch'], save_epochs=config['save_epochs'])
print('\n\n-----------------------\n\tresult_path:', results_path)
if not config['is_gpu']:
show_loss(results_path + '.loss')
# show_log(results_path + '__train.log')
# show_log(results_path + '__valid.log')
if config['is_test']:
test_net(test_dataset, config['class_labels'], (results_path + '__model.pth'))
# Train network -----------------------------------------------------
if config['is_test'] and not config['is_train']:
test_net(test_dataset, config['class_labels'], config['results_path'])
# pass
# Test Code ----------------------------------------------------------
# idx, (imgs, bbox_label, bbox_indices, _) = next(enumerate(train_loader))
# print(bbox_indices)
# test_dataset.__getitem__(9)
# net = SSD(len(class_labels))
# net.cuda()
# net.forward(torch.rand(1, 3, 300, 300))
| 45.394737 | 127 | 0.582802 | import os
import torch
from torch.utils.data import DataLoader
from cityscape_dataset import CityScapeDataset
from ssd_util import load_dataset_list, load_dataset_list_original, show_loss, show_log
from ssd_net import SSD
from ssd_train import train_net
from ssd_test import test_net
if __name__ == '__main__':
dataset_label_group = {
'background': [],
'sm_veh': ['motorcycle', 'motorcyclegroup', 'bicycle', 'bicyclegroup'],
'med_veh': ['car', 'cargroup'],
'big_veh': ['bus', 'trailer', 'truck'],
}
config = {'is_gpu': True,
'debug': False,
'n_aug': 1,
'n_batch': 64,
'n_worker': 4,
'lr': 0.001,
'max_epoch': 100,
'save_epochs': [10,20,30,40,50,60,70,80,90],
'is_lr_scheduled': False,
'label_groups': dataset_label_group,
'class_labels': list(dataset_label_group.keys()),
'is_train': True,
'is_test': True,
'results_path': '/home/suhongk/sfuhome/CMPT742/Lab3/vehicle_detection_v2/results/SSD__28th_16:47_best_model.pth'
}
from torch.multiprocessing import Pool, Process, set_start_method
try:
set_start_method('spawn')
except RuntimeError:
pass
if torch.cuda.is_available():
torch.set_default_tensor_type('torch.cuda.FloatTensor')
torch.backends.cudnn.benchmark = True
if config['is_gpu']:
sample_path = '/home/datasets/full_dataset/train_extra/'
label_path = '/home/datasets/full_dataset_labels/train_extra'
else:
sample_path = '../cityscapes_samples/'
label_path = '../cityscapes_samples_labels/'
dataset_list = load_dataset_list(sample_path, label_path, config['label_groups'])
num_train = int(0.3 * len(dataset_list))
num_valid = int(0.1 * len(dataset_list))
if config['is_train']:
train_dataset = CityScapeDataset(dataset_list[:num_train], n_augmented=config['n_aug'], debug=config['debug'])
train_loader = DataLoader(train_dataset, batch_size=config['n_batch'], shuffle=True, num_workers=config['n_worker'])
print('Total training items: ', len(train_dataset))
print('Total training batches size in one epoch: ', len(train_loader))
valid_dataset = CityScapeDataset(dataset_list[num_train:(num_train + num_valid)], debug=config['debug'])
valid_loader = DataLoader(valid_dataset, batch_size=config['n_batch'], shuffle=True, num_workers=config['n_worker'])
print('Total validating items: ', len(valid_dataset))
print('Total validating batches size in one epoch: ', len(valid_loader))
if config['is_test']:
test_dataset = CityScapeDataset(dataset_list[(num_train + num_valid):], debug=config['debug'])
print('Total testing items: ', len(test_dataset))
if config['is_train']:
lab_results_dir = "./results/"
results_path = train_net(train_loader, valid_loader, config['class_labels'], lab_results_dir,
learning_rate=config['lr'], is_lr_scheduled=config['is_lr_scheduled'],
max_epoch=config['max_epoch'], save_epochs=config['save_epochs'])
print('\n\n-----------------------\n\tresult_path:', results_path)
if not config['is_gpu']:
show_loss(results_path + '.loss')
if config['is_test']:
test_net(test_dataset, config['class_labels'], (results_path + '__model.pth'))
if config['is_test'] and not config['is_train']:
test_net(test_dataset, config['class_labels'], config['results_path'])
| true | true |
f738feb48528bc44a1c44d1a4c3cafea3b88882f | 2,149 | py | Python | CMSIS/DSP/Examples/ARM/arm_bayes_example/train.py | DavidLesnjak/CMSIS_5 | e0848410d137758a3356a5ee94ca4501cea708a8 | [
"Apache-2.0"
] | 2,293 | 2016-02-25T06:47:33.000Z | 2022-03-29T16:44:02.000Z | CMSIS/DSP/Examples/ARM/arm_bayes_example/train.py | DavidLesnjak/CMSIS_5 | e0848410d137758a3356a5ee94ca4501cea708a8 | [
"Apache-2.0"
] | 1,125 | 2016-02-27T09:56:01.000Z | 2022-03-31T13:57:05.000Z | CMSIS/DSP/Examples/ARM/arm_bayes_example/train.py | DavidLesnjak/CMSIS_5 | e0848410d137758a3356a5ee94ca4501cea708a8 | [
"Apache-2.0"
] | 1,160 | 2016-02-27T09:06:10.000Z | 2022-03-31T19:06:24.000Z | from sklearn.naive_bayes import GaussianNB
import random
import numpy as np
import math
from pylab import scatter,figure, clf, plot, xlabel, ylabel, xlim, ylim, title, grid, axes, show,semilogx, semilogy
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
# Generation of data to train the classifier
# 100 vectors are generated. Vector have dimension 2 so can be represented as points
NBVECS = 100
VECDIM = 2
# 3 cluster of points are generated
ballRadius = 1.0
x1 = [1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
x2 = [-1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
x3 = [0, -3] + ballRadius * np.random.randn(NBVECS,VECDIM)
# All points are concatenated
X_train=np.concatenate((x1,x2,x3))
# The classes are 0,1 and 2.
Y_train=np.concatenate((np.zeros(NBVECS),np.ones(NBVECS),2*np.ones(NBVECS)))
gnb = GaussianNB()
gnb.fit(X_train, Y_train)
print("Testing")
y_pred = gnb.predict([[1.5,1.0]])
print(y_pred)
y_pred = gnb.predict([[-1.5,1.0]])
print(y_pred)
y_pred = gnb.predict([[0,-3.0]])
print(y_pred)
# Dump of data for CMSIS-DSP
print("Parameters")
# Gaussian averages
print("Theta = ",list(np.reshape(gnb.theta_,np.size(gnb.theta_))))
# Gaussian variances
print("Sigma = ",list(np.reshape(gnb.sigma_,np.size(gnb.sigma_))))
# Class priors
print("Prior = ",list(np.reshape(gnb.class_prior_,np.size(gnb.class_prior_))))
print("Epsilon = ",gnb.epsilon_)
# Some bounds are computed for the graphical representation
x_min = X_train[:, 0].min()
x_max = X_train[:, 0].max()
y_min = X_train[:, 1].min()
y_max = X_train[:, 1].max()
font = FontProperties()
font.set_size(20)
r=plt.figure()
plt.axis('off')
plt.text(1.5,1.0,"A", verticalalignment='center', horizontalalignment='center',fontproperties=font)
plt.text(-1.5,1.0,"B",verticalalignment='center', horizontalalignment='center', fontproperties=font)
plt.text(0,-3,"C", verticalalignment='center', horizontalalignment='center',fontproperties=font)
scatter(x1[:,0],x1[:,1],s=1.0,color='#FF6B00')
scatter(x2[:,0],x2[:,1],s=1.0,color='#95D600')
scatter(x3[:,0],x3[:,1],s=1.0,color='#00C1DE')
#r.savefig('fig.jpeg')
#plt.close(r)
show() | 29.040541 | 115 | 0.717543 | from sklearn.naive_bayes import GaussianNB
import random
import numpy as np
import math
from pylab import scatter,figure, clf, plot, xlabel, ylabel, xlim, ylim, title, grid, axes, show,semilogx, semilogy
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
NBVECS = 100
VECDIM = 2
ballRadius = 1.0
x1 = [1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
x2 = [-1.5, 1] + ballRadius * np.random.randn(NBVECS,VECDIM)
x3 = [0, -3] + ballRadius * np.random.randn(NBVECS,VECDIM)
X_train=np.concatenate((x1,x2,x3))
Y_train=np.concatenate((np.zeros(NBVECS),np.ones(NBVECS),2*np.ones(NBVECS)))
gnb = GaussianNB()
gnb.fit(X_train, Y_train)
print("Testing")
y_pred = gnb.predict([[1.5,1.0]])
print(y_pred)
y_pred = gnb.predict([[-1.5,1.0]])
print(y_pred)
y_pred = gnb.predict([[0,-3.0]])
print(y_pred)
print("Parameters")
print("Theta = ",list(np.reshape(gnb.theta_,np.size(gnb.theta_))))
print("Sigma = ",list(np.reshape(gnb.sigma_,np.size(gnb.sigma_))))
print("Prior = ",list(np.reshape(gnb.class_prior_,np.size(gnb.class_prior_))))
print("Epsilon = ",gnb.epsilon_)
x_min = X_train[:, 0].min()
x_max = X_train[:, 0].max()
y_min = X_train[:, 1].min()
y_max = X_train[:, 1].max()
font = FontProperties()
font.set_size(20)
r=plt.figure()
plt.axis('off')
plt.text(1.5,1.0,"A", verticalalignment='center', horizontalalignment='center',fontproperties=font)
plt.text(-1.5,1.0,"B",verticalalignment='center', horizontalalignment='center', fontproperties=font)
plt.text(0,-3,"C", verticalalignment='center', horizontalalignment='center',fontproperties=font)
scatter(x1[:,0],x1[:,1],s=1.0,color='#FF6B00')
scatter(x2[:,0],x2[:,1],s=1.0,color='#95D600')
scatter(x3[:,0],x3[:,1],s=1.0,color='#00C1DE')
show() | true | true |
f738ff1b59bd6407c45587658b34887b5f3bc25d | 1,511 | py | Python | bilibili/bilibili/spiders/relationship.py | PyDee/Spiders | 6fc0a414060032b5ba4332302285e3fcc9a6113e | [
"Apache-2.0"
] | 6 | 2020-06-02T16:22:58.000Z | 2021-09-18T03:20:16.000Z | bilibili/bilibili/spiders/relationship.py | PyDee/Spiders | 6fc0a414060032b5ba4332302285e3fcc9a6113e | [
"Apache-2.0"
] | 4 | 2021-03-31T19:54:37.000Z | 2022-03-12T00:33:41.000Z | bilibili/bilibili/spiders/relationship.py | PyDee/Spiders | 6fc0a414060032b5ba4332302285e3fcc9a6113e | [
"Apache-2.0"
] | 5 | 2020-06-02T16:23:00.000Z | 2021-09-03T02:16:15.000Z | # -*- coding: utf-8 -*-
"""
获取用户关注
"""
import json
import redis
from scrapy_redis.spiders import RedisSpider
from ..items import Relationship
from .bos_filter import RedisDB, BosFilter
class RelationshipSpider(RedisSpider):
rdb = RedisDB()
r = redis.Redis(host="127.0.0.1")
name = 'relationship'
allowed_domains = ['api.bilibili.com']
redis_key = "bili_relation_list"
redis_set = "bili_relation_set"
def parse(self, response):
ret_dict = json.loads(response.text)
status_code = ret_dict.get('code')
if not status_code:
if 'data' in ret_dict.keys():
info_dict = ret_dict.get('data')
total = info_dict.get('total')
focus_list = info_dict.get('list')
user_id = response.url.strip('https://api.bilibili.com/x/relation/followings?vmid=').split('&')[0]
for focus_item in focus_list:
focus_info = Relationship()
focus_info['total'] = total
focus_info['user_id'] = user_id
focus_info['focus_id'] = focus_item.get('mid')
focus_info['focus_name'] = focus_item.get('uname')
focus_info['focus_face'] = focus_item.get('face')
focus_info['introduction'] = focus_item.get('sign')
# 将 follower id 写入到待抓取队列
self.r.sadd(self.redis_set, focus_info.get('focus_id'))
yield focus_info
| 36.853659 | 114 | 0.577763 |
import json
import redis
from scrapy_redis.spiders import RedisSpider
from ..items import Relationship
from .bos_filter import RedisDB, BosFilter
class RelationshipSpider(RedisSpider):
rdb = RedisDB()
r = redis.Redis(host="127.0.0.1")
name = 'relationship'
allowed_domains = ['api.bilibili.com']
redis_key = "bili_relation_list"
redis_set = "bili_relation_set"
def parse(self, response):
ret_dict = json.loads(response.text)
status_code = ret_dict.get('code')
if not status_code:
if 'data' in ret_dict.keys():
info_dict = ret_dict.get('data')
total = info_dict.get('total')
focus_list = info_dict.get('list')
user_id = response.url.strip('https://api.bilibili.com/x/relation/followings?vmid=').split('&')[0]
for focus_item in focus_list:
focus_info = Relationship()
focus_info['total'] = total
focus_info['user_id'] = user_id
focus_info['focus_id'] = focus_item.get('mid')
focus_info['focus_name'] = focus_item.get('uname')
focus_info['focus_face'] = focus_item.get('face')
focus_info['introduction'] = focus_item.get('sign')
self.r.sadd(self.redis_set, focus_info.get('focus_id'))
yield focus_info
| true | true |
f738ff759b6f7cf53b2e7583c0b4df87d9a35a39 | 1,352 | py | Python | kaggle_SIIM-ACR_Pneumothorax_Segmentation/utils/loss.py | allen050883/Project | 22a9f1e466e595d8808e59fc58801881f3399df4 | [
"MIT"
] | null | null | null | kaggle_SIIM-ACR_Pneumothorax_Segmentation/utils/loss.py | allen050883/Project | 22a9f1e466e595d8808e59fc58801881f3399df4 | [
"MIT"
] | null | null | null | kaggle_SIIM-ACR_Pneumothorax_Segmentation/utils/loss.py | allen050883/Project | 22a9f1e466e595d8808e59fc58801881f3399df4 | [
"MIT"
] | null | null | null | import torch
import torch.nn.functional as F
def dice_score(inputs, targets, smooth=1):
# Flatten label and prediction tensors
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice_score = (2.*intersection + smooth)/(inputs.sum() + targets.sum() + smooth)
return dice_score
def get_dice_loss(inputs, targets, smooth=1):
# Flatten label and prediction tensors
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice_loss = 1 - (2.*intersection + smooth)/(inputs.sum() + targets.sum() + smooth)
return dice_loss
def get_focal_loss(inputs, targets, alpha=0.8, gamma=2):
# Flatten label and prediction tensors
inputs = inputs.view(-1)
targets = targets.view(-1)
# First compute binary cross-entropy
BCE = F.binary_cross_entropy(inputs, targets, reduction='mean')
BCE_EXP = torch.exp(-BCE)
focal_loss = alpha * (1-BCE_EXP)**gamma * BCE
return focal_loss
def combo_loss(inputs, targets):
dice_loss = get_dice_loss(inputs, targets)
BCE = F.binary_cross_entropy(inputs, targets, reduction='mean')
focal_loss = get_focal_loss(inputs, targets)
return focal_loss - torch.log(1-dice_loss)
| 32.190476 | 88 | 0.647189 | import torch
import torch.nn.functional as F
def dice_score(inputs, targets, smooth=1):
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice_score = (2.*intersection + smooth)/(inputs.sum() + targets.sum() + smooth)
return dice_score
def get_dice_loss(inputs, targets, smooth=1):
inputs = inputs.view(-1)
targets = targets.view(-1)
intersection = (inputs * targets).sum()
dice_loss = 1 - (2.*intersection + smooth)/(inputs.sum() + targets.sum() + smooth)
return dice_loss
def get_focal_loss(inputs, targets, alpha=0.8, gamma=2):
inputs = inputs.view(-1)
targets = targets.view(-1)
BCE = F.binary_cross_entropy(inputs, targets, reduction='mean')
BCE_EXP = torch.exp(-BCE)
focal_loss = alpha * (1-BCE_EXP)**gamma * BCE
return focal_loss
def combo_loss(inputs, targets):
dice_loss = get_dice_loss(inputs, targets)
BCE = F.binary_cross_entropy(inputs, targets, reduction='mean')
focal_loss = get_focal_loss(inputs, targets)
return focal_loss - torch.log(1-dice_loss)
| true | true |
f738ffc00adfe3ae0e5db9437b05b358c231180f | 296 | py | Python | daiquiri/stats/admin.py | agy-why/daiquiri | 4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d | [
"Apache-2.0"
] | 14 | 2018-12-23T18:35:02.000Z | 2021-12-15T04:55:12.000Z | daiquiri/stats/admin.py | agy-why/daiquiri | 4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d | [
"Apache-2.0"
] | 40 | 2018-12-20T12:44:05.000Z | 2022-03-21T11:35:20.000Z | daiquiri/stats/admin.py | agy-why/daiquiri | 4d3e2ce51e202d5a8f1df404a0094a4e018dcb4d | [
"Apache-2.0"
] | 5 | 2019-05-16T08:03:35.000Z | 2021-08-23T20:03:11.000Z | from django.contrib import admin
from .models import Record
class RecordAdmin(admin.ModelAdmin):
search_fields = ('resource', 'user__username')
list_display = ('time', 'resource_type', 'client_ip', 'user')
list_display_links = ('time', )
admin.site.register(Record, RecordAdmin)
| 22.769231 | 65 | 0.722973 | from django.contrib import admin
from .models import Record
class RecordAdmin(admin.ModelAdmin):
search_fields = ('resource', 'user__username')
list_display = ('time', 'resource_type', 'client_ip', 'user')
list_display_links = ('time', )
admin.site.register(Record, RecordAdmin)
| true | true |
f73900a0283ab4836b6eccbee5cb864bc5a14b42 | 3,725 | py | Python | main.py | wcontreras785/Controlador-Termohigrometro-Micropayhon | a15c4b71eec8f0e8fb72f20e9de2e1032b97685c | [
"MIT"
] | null | null | null | main.py | wcontreras785/Controlador-Termohigrometro-Micropayhon | a15c4b71eec8f0e8fb72f20e9de2e1032b97685c | [
"MIT"
] | null | null | null | main.py | wcontreras785/Controlador-Termohigrometro-Micropayhon | a15c4b71eec8f0e8fb72f20e9de2e1032b97685c | [
"MIT"
] | null | null | null | from machine import SoftI2C, Pin, RTC
import onewire, ds18x20, time
import utime, dht, network, urequests
import OLED, ntptime
temp=30
url = "https://api.thingspeak.com/update?api_key=CTVG0E49RI7RSV78"
#------------------------------------------WIFI-------------------
def conectaWifi (red, password):
global miRed
miRed = network.WLAN(network.STA_IF)
if not miRed.isconnected(): #Si no está conectado…
miRed.active(True) #activa la interface
miRed.connect(red, password) #Intenta conectar con la red
print('Conectando a la red', red +"…")
timeout = time.time ()
while not miRed.isconnected(): #Mientras no se conecte..
if (time.ticks_diff (time.time (), timeout) > 10):
return False
return True
if conectaWifi (".", "V5MVH6F3SVDU"):
print ("Conexión exitosa!")
print('Datos de la red (IP/netmask/gw/DNS):', miRed.ifconfig())
#--------------------------SINCRONIZACIÓN DEL RELOJ INTERNO E IMPRESIÓN DE FECHA Y HORA----------------------
ntptime.settime()
rtc= RTC()
(year, month, mday, weekday, hour, minute, second, milisecond)=rtc.datetime()
rtc.init((year, month, mday, weekday, hour-5, minute, second, milisecond)) # GMT corrección -Colombia-: GMT-5
def rfecha():
print ("Fecha: {:02d}/{:02d}/{}".format(rtc.datetime()[2],
rtc.datetime()[1],
rtc.datetime()[0]))
def rhora():
print ("Hora: {:02d}:{:02d}:{:02d}".format(rtc.datetime()[4],
rtc.datetime()[5],
rtc.datetime()[6]))
rtcdate=rfecha()
#---------------------------------Oled------
i2c = SoftI2C(scl=Pin(4), sda=Pin(16)) # pines I2C
oled = OLED.SSD1306_I2C(128,64,i2c)
#-----------------------------------DHT-------
pindht =Pin(15, Pin.IN)
dht11 = dht.DHT11(pindht)
#-----------------------------------DS18B20----------
ds_pin=Pin(5)
ds = ds18x20.DS18X20(onewire.OneWire(ds_pin))
roms = ds.scan()
#-----------------------------------mocs------------------
moc1=Pin(27, Pin.OUT)
moc2=Pin(32, Pin.OUT)
#-----------------------------------Botones--------------
btn1=Pin(23, Pin.IN, Pin.PULL_UP) # 1 AL OPRIMIR 0
btn2=Pin(22, Pin.IN, Pin.PULL_DOWN)# 0 AL OPRIMIR 1
def ftemp():
global temp
print(round(temp,2))
utime.sleep_ms(200)
if btn1.value()==0:
temp=temp+0.1
if btn2.value()==1:
temp=temp-0.1
def calor():
if t >= temp or t2 >= temp:
moc1.value(0)
else:
utime.sleep_ms(500)
moc1.value(1)
print("Ajustando Temperatura")
rfecha()
rhora()
def motor():
utime.sleep(2)
moc2.value(1)
utime.sleep(2)
moc2.value(0)
while True:
ftemp()
utime.sleep(1)
dht11.measure()
t=dht11.temperature()
h=dht11.humidity()
#----------------------------------DS18B20----------
ds.convert_temp()
utime.sleep_ms(750) #The reading temperature needs at least 750ms
for rom in roms:
oled.fill(0)
t2=ds.read_temp(rom)
oled.text("T2: "+ str(round(t2,2)),5,30)
oled.text("T1: "+ str(t) +" C",5,20)
oled.text("Hum:"+ str(h) +" %",5,50)
oled.text("tem:"+ str(round(temp,2)) +" C",5,0)
oled.show()
motor()
calor()
time.sleep(4)
respuesta = urequests.get(url+"&field1="+str(temp)+"&field2="+str(t)+"&field3="+str(t2)+"&field4="+str(h))
print(respuesta.text)
print (respuesta.status_code)
respuesta.close ()
if __name__==("__main__"):
main()
| 30.040323 | 112 | 0.506577 | from machine import SoftI2C, Pin, RTC
import onewire, ds18x20, time
import utime, dht, network, urequests
import OLED, ntptime
temp=30
url = "https://api.thingspeak.com/update?api_key=CTVG0E49RI7RSV78"
def conectaWifi (red, password):
global miRed
miRed = network.WLAN(network.STA_IF)
if not miRed.isconnected():
miRed.active(True)
miRed.connect(red, password)
print('Conectando a la red', red +"…")
timeout = time.time ()
while not miRed.isconnected():
if (time.ticks_diff (time.time (), timeout) > 10):
return False
return True
if conectaWifi (".", "V5MVH6F3SVDU"):
print ("Conexión exitosa!")
print('Datos de la red (IP/netmask/gw/DNS):', miRed.ifconfig())
ntptime.settime()
rtc= RTC()
(year, month, mday, weekday, hour, minute, second, milisecond)=rtc.datetime()
rtc.init((year, month, mday, weekday, hour-5, minute, second, milisecond))
def rfecha():
print ("Fecha: {:02d}/{:02d}/{}".format(rtc.datetime()[2],
rtc.datetime()[1],
rtc.datetime()[0]))
def rhora():
print ("Hora: {:02d}:{:02d}:{:02d}".format(rtc.datetime()[4],
rtc.datetime()[5],
rtc.datetime()[6]))
rtcdate=rfecha()
i2c = SoftI2C(scl=Pin(4), sda=Pin(16))
oled = OLED.SSD1306_I2C(128,64,i2c)
pindht =Pin(15, Pin.IN)
dht11 = dht.DHT11(pindht)
ds_pin=Pin(5)
ds = ds18x20.DS18X20(onewire.OneWire(ds_pin))
roms = ds.scan()
moc1=Pin(27, Pin.OUT)
moc2=Pin(32, Pin.OUT)
btn1=Pin(23, Pin.IN, Pin.PULL_UP)
btn2=Pin(22, Pin.IN, Pin.PULL_DOWN)
def ftemp():
global temp
print(round(temp,2))
utime.sleep_ms(200)
if btn1.value()==0:
temp=temp+0.1
if btn2.value()==1:
temp=temp-0.1
def calor():
if t >= temp or t2 >= temp:
moc1.value(0)
else:
utime.sleep_ms(500)
moc1.value(1)
print("Ajustando Temperatura")
rfecha()
rhora()
def motor():
utime.sleep(2)
moc2.value(1)
utime.sleep(2)
moc2.value(0)
while True:
ftemp()
utime.sleep(1)
dht11.measure()
t=dht11.temperature()
h=dht11.humidity()
ds.convert_temp()
utime.sleep_ms(750)
for rom in roms:
oled.fill(0)
t2=ds.read_temp(rom)
oled.text("T2: "+ str(round(t2,2)),5,30)
oled.text("T1: "+ str(t) +" C",5,20)
oled.text("Hum:"+ str(h) +" %",5,50)
oled.text("tem:"+ str(round(temp,2)) +" C",5,0)
oled.show()
motor()
calor()
time.sleep(4)
respuesta = urequests.get(url+"&field1="+str(temp)+"&field2="+str(t)+"&field3="+str(t2)+"&field4="+str(h))
print(respuesta.text)
print (respuesta.status_code)
respuesta.close ()
if __name__==("__main__"):
main()
| true | true |
f73900beec9d1bdce44f7fe9ddbae6a26c87436c | 34,396 | py | Python | scripts/interp_sea_level_ICESat2_ATL07.py | mrsiegfried/read-ICESat-2 | 1406b92691d284616ca6c9d72646eca4592d1f1d | [
"MIT"
] | null | null | null | scripts/interp_sea_level_ICESat2_ATL07.py | mrsiegfried/read-ICESat-2 | 1406b92691d284616ca6c9d72646eca4592d1f1d | [
"MIT"
] | null | null | null | scripts/interp_sea_level_ICESat2_ATL07.py | mrsiegfried/read-ICESat-2 | 1406b92691d284616ca6c9d72646eca4592d1f1d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
u"""
interp_sea_level_ICESat2_ATL07.py
Written by Tyler Sutterley (05/2021)
Interpolates sea level anomalies (sla), absolute dynamic topography (adt) and
mean dynamic topography (mdt) to times and locations of ICESat-2 ATL07 data
https://www.aviso.altimetry.fr/en/data/products/sea-surface-height-products/
global/msla-h.html
ftp://ftp.sltac.cls.fr/Core/SEALEVEL_GLO_PHY_L4_REP_OBSERVATIONS_008_047/
dataset-duacs-rep-global-merged-allsat-phy-l4-v3
Note that the AVISO sea level data are gzip compressed netCDF4 files
COMMAND LINE OPTIONS:
-D X, --directory X: Working data directory
-V, --verbose: Output information about each created file
-M X, --mode X: Permission mode of directories and files created
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python
https://numpy.org
https://numpy.org/doc/stable/user/numpy-for-matlab-users.html
pyproj: Python interface to PROJ library
https://pypi.org/project/pyproj/
scikit-learn: Machine Learning in Python
https://scikit-learn.org/stable/index.html
https://github.com/scikit-learn/scikit-learn
h5py: Python interface for Hierarchal Data Format 5 (HDF5)
https://h5py.org
netCDF4: Python interface to the netCDF C library
https://unidata.github.io/netcdf4-python/netCDF4/index.html
PROGRAM DEPENDENCIES:
read_ICESat2_ATL07.py: reads ICESat-2 sea ice height data files
time.py: utilities for calculating time operations
utilities.py: download and management utilities for syncing files
UPDATE HISTORY:
Updated 05/2021: print full path of output filename
Written 03/2021
"""
from __future__ import print_function
import os
import re
import gzip
import h5py
import pyproj
import netCDF4
import argparse
import datetime
import numpy as np
import sklearn.neighbors
import icesat2_toolkit.time
from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07
#-- PURPOSE: set the hemisphere of interest based on the granule
def set_hemisphere(GRANULE):
if GRANULE in ('10','11','12'):
projection_flag = 'S'
elif GRANULE in ('03','04','05'):
projection_flag = 'N'
return projection_flag
#-- PURPOSE: interpolates to coordinates with inverse distance weighting
def inverse_distance(x, y, z, xi, yi, SEARCH='BallTree', N=10, POWER=2.0):
#-- number of output points
npts = len(xi)
#-- create neighbors object for coordinates
if (SEARCH == 'BallTree'):
tree = sklearn.neighbors.BallTree(np.c_[x,y])
elif (SEARCH == 'KDTree'):
tree = sklearn.neighbors.KDTree(np.c_[x,y])
#-- query the search tree to find the N closest points
dist,indices = tree.query(np.c_[xi,yi], k=N, return_distance=True)
#-- normalized weights if POWER > 0 (typically between 1 and 3)
#-- in the inverse distance weighting
power_inverse_distance = dist**(-POWER)
s = np.sum(power_inverse_distance, axis=1)
w = power_inverse_distance/np.broadcast_to(s[:,None],(npts,N))
#-- calculate interpolated fields by inverse distance weighting
return np.sum(w*z[indices],axis=1)
#-- PURPOSE interpolate sea level anomalies to lat/lon and then to time
def interpolate_sea_level(base_dir, xi, yi, CJD, HEM):
#-- EPSG projections for converting lat/lon to polar stereographic
EPSG = dict(N=3413,S=3031)
#-- pyproj transformer for converting to polar stereographic
crs1 = pyproj.CRS.from_string('epsg:4326')
crs2 = pyproj.CRS.from_string(EPSG[HEM])
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
#-- interpolate mean dynamic topography
input_file = 'mdt_cnes_cls2013_global.nc.gz'
#-- read bytes from compressed file
fd = gzip.open(os.path.join(base_dir,input_file),'rb')
#-- dictionary with input fields
dinput = {}
#-- read netCDF file for mean dynamic topography
with netCDF4.Dataset('mdt', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['mdt'] = np.ma.array(fileID['mdt'][0,:,:].copy(),
fill_value=fileID['mdt']._FillValue)
dinput['mdt'].mask = (dinput['mdt'].data == dinput['mdt'].fill_value)
#-- close the compressed file objects
fd.close()
#-- create 2-D grid coordinates from longitude and latitude vectors
gridlon,gridlat = np.meshgrid(dinput['lon'],dinput['lat'])
#-- convert from latitude/longitude into polar stereographic
xg,yg = transformer.transform(gridlon,gridlat)
#-- reduce to local coordinates to improve computational time
gridmask = np.logical_not(dinput['mdt'].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
#-- calculate mean dynamic topography by inverse distance weighting
MDT = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput['mdt'].data[indy,indx], xi, yi)
#-- CNES Julian Days before and after measurement
CJD1 = np.floor(CJD)
#-- scale for linearly interpolating to date
dt = (CJD - CJD1[0])
#-- output sea level anomaly and absolute dynamic topography
SLA = np.zeros_like(CJD)
ADT = np.zeros_like(CJD)
#-- for the range of dates
for day in range(2):
#-- convert from CNES Julians Days to calendar dates for time
JD1 = CJD1 + day + 2433282.5
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(JD1[0],
FORMAT='tuple', ASTYPE=int)
#-- sea level directory
ddir = os.path.join(base_dir, '{0:0.0f}'.format(YY))
#-- input file for day before the measurement
regex = re.compile(('dt_global_allsat_phy_l4_{0:4d}{1:02d}{2:02d}_'
'(\d{{4}})(\d{{2}})(\d{{2}}).nc.gz').format(YY,MM,DD))
input_file, = [fi for fi in os.listdir(ddir) if regex.match(fi)]
#-- dictionary with input fields
dinput = {}
#-- read bytes from compressed file
fd = gzip.open(os.path.join(ddir,input_file),'rb')
#-- read netCDF file for time
with netCDF4.Dataset('sla', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['sla'] = np.ma.array(fileID['sla'][0,:,:].copy(),
fill_value=fileID['sla']._FillValue)
dinput['adt'] = np.ma.array(fileID['adt'][0,:,:].copy(),
fill_value=fileID['adt']._FillValue)
#-- close the compressed file objects
fd.close()
#-- for each variable to interpolate
out = {}
for var in ['sla','adt']:
#-- reduce to local coordinates to improve computational time
gridmask = np.logical_not(dinput[var].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
#-- calculate variable by inverse distance weighting
out[var] = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput[var].data[indy,indx], xi, yi)
#-- linearly interpolate to date for iteration
SLA += out['sla']*(2.0*dt*day - dt - day + 1.0)
ADT += out['adt']*(2.0*dt*day - dt - day + 1.0)
#-- return interpolated values
return dict(h_mdt=MDT,h_sla=SLA,h_adt=ADT)
#-- PURPOSE: read ICESat-2 sea ice height (ATL07) from NSIDC
#-- interpolate AVISO sea level at points and times
def interp_sea_level_ICESat2(base_dir, FILE, VERBOSE=False, MODE=0o775):
#-- read data from input_file
print('{0} -->'.format(os.path.basename(FILE))) if VERBOSE else None
IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(FILE,
ATTRIBUTES=True)
DIRECTORY = os.path.dirname(FILE)
#-- extract parameters from ICESat-2 ATLAS HDF5 sea ice file name
rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})'
r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$')
SUB,PRD,HMN,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(FILE).pop()
#-- set the hemisphere flag based on ICESat-2 granule
HEM = set_hemisphere(HMN)
#-- HDF5 file attributes
attrib = {}
#-- mean dynamic topography
attrib['h_mdt'] = {}
attrib['h_mdt']['long_name'] = 'Mean Dynamic Topography'
attrib['h_mdt']['description'] = 'Sea surface height above geoid'
attrib['h_mdt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- sea level anomalies
attrib['h_sla'] = {}
attrib['h_sla']['long_name'] = 'Sea Level Anomaly'
attrib['h_sla']['description'] = 'Sea surface anomalies'
attrib['h_sla']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- absolute dynamic topography
attrib['h_adt'] = {}
attrib['h_adt']['long_name'] = 'Absolute Dynamic Topography'
attrib['h_adt']['description'] = ('Sea surface height above geoid calculated '
'by adding the mean dynamic topography to the sea level anomalies')
attrib['h_adt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
#-- EPSG projections for converting lat/lon to polar stereographic
EPSG = dict(N=3413,S=3031)
#-- pyproj transformer for converting to polar stereographic
crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(EPSG[HEM]))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
#-- number of GPS seconds between the GPS epoch
#-- and ATLAS Standard Data Product (SDP) epoch
atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch']
#-- copy variables for outputting to HDF5 file
IS2_atl07_corr = {}
IS2_atl07_fill = {}
IS2_atl07_dims = {}
IS2_atl07_corr_attrs = {}
#-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
#-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
#-- Add this value to delta time parameters to compute full gps_seconds
IS2_atl07_corr['ancillary_data'] = {}
IS2_atl07_corr_attrs['ancillary_data'] = {}
for key in ['atlas_sdp_gps_epoch']:
#-- get each HDF5 variable
IS2_atl07_corr['ancillary_data'][key] = IS2_atl07_mds['ancillary_data'][key]
#-- Getting attributes of group and included variables
IS2_atl07_corr_attrs['ancillary_data'][key] = {}
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][key].items():
IS2_atl07_corr_attrs['ancillary_data'][key][att_name] = att_val
#-- for each input beam within the file
for gtx in sorted(IS2_atl07_beams):
#-- output data dictionaries for beam
IS2_atl07_corr[gtx] = dict(sea_ice_segments={})
IS2_atl07_fill[gtx] = dict(sea_ice_segments={})
IS2_atl07_dims[gtx] = dict(sea_ice_segments={})
IS2_atl07_corr_attrs[gtx] = dict(sea_ice_segments={})
#-- number of segments
val = IS2_atl07_mds[gtx]['sea_ice_segments']
n_seg = len(val['height_segment_id'])
#-- convert time from ATLAS SDP to CNES JD
#-- days relative to 1950-01-01T00:00:00
gps_seconds = atlas_sdp_gps_epoch + val['delta_time']
leap_seconds = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
cnes_time = icesat2_toolkit.time.convert_delta_time(gps_seconds-leap_seconds,
epoch1=(1980,1,6,0,0,0), epoch2=(1950,1,1,0,0,0), scale=1.0/86400.0)
#-- extract lat/lon and convert to polar stereographic
X,Y = transformer.transform(val['longitude'],val['latitude'])
#-- interpolate sea level anomalies and dynamic topographies
interp = interpolate_sea_level(base_dir,X,Y,cnes_time,HEM)
#-- group attributes for beam
IS2_atl07_corr_attrs[gtx]['Description'] = IS2_atl07_attrs[gtx]['Description']
IS2_atl07_corr_attrs[gtx]['atlas_pce'] = IS2_atl07_attrs[gtx]['atlas_pce']
IS2_atl07_corr_attrs[gtx]['atlas_beam_type'] = IS2_atl07_attrs[gtx]['atlas_beam_type']
IS2_atl07_corr_attrs[gtx]['groundtrack_id'] = IS2_atl07_attrs[gtx]['groundtrack_id']
IS2_atl07_corr_attrs[gtx]['atmosphere_profile'] = IS2_atl07_attrs[gtx]['atmosphere_profile']
IS2_atl07_corr_attrs[gtx]['atlas_spot_number'] = IS2_atl07_attrs[gtx]['atlas_spot_number']
IS2_atl07_corr_attrs[gtx]['sc_orientation'] = IS2_atl07_attrs[gtx]['sc_orientation']
#-- group attributes for sea_ice_segments
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['Description'] = ("Top group for sea "
"ice segments as computed by the ATBD algorithm.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['data_rate'] = ("Data within this "
"group are stored at the variable segment rate.")
#-- geolocation, time and segment ID
#-- delta time
IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time'] = val['delta_time'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['standard_name'] = "time"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['source'] = "telemetry"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['calendar'] = "standard"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['description'] = ("Number of "
"GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch "
"offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS "
"seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP "
"epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time "
"parameters, the time in gps_seconds relative to the GPS epoch can be computed.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['coordinates'] = \
"height_segment_id latitude longitude"
#-- latitude
IS2_atl07_corr[gtx]['sea_ice_segments']['latitude'] = val['latitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['latitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['latitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['units'] = "degrees_north"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['long_name'] = "Latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['standard_name'] = "latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['description'] = ("Latitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_min'] = -90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_max'] = 90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['coordinates'] = \
"height_segment_id delta_time longitude"
#-- longitude
IS2_atl07_corr[gtx]['sea_ice_segments']['longitude'] = val['longitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['longitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['longitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['units'] = "degrees_east"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['long_name'] = "Longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['standard_name'] = "longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['description'] = ("Longitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_min'] = -180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_max'] = 180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['coordinates'] = \
"height_segment_id delta_time latitude"
#-- segment ID
IS2_atl07_corr[gtx]['sea_ice_segments']['height_segment_id'] = val['height_segment_id']
IS2_atl07_fill[gtx]['sea_ice_segments']['height_segment_id'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['height_segment_id'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['long_name'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['description'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['coordinates'] = \
"delta_time latitude longitude"
#-- geolocation segment beginning
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_beg'] = val['geoseg_beg'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_beg'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_beg'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['long_name'] = "Beginning GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['description'] = \
"Geolocation segment (geoseg) ID associated with the first photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- geolocation segment ending
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_end'] = val['geoseg_end'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_end'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_end'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['long_name'] = "Ending GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['description'] = \
"Geolocation segment (geoseg) ID associated with the last photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- along track distance
IS2_atl07_corr[gtx]['sea_ice_segments']['seg_dist_x'] = val['seg_dist_x'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['seg_dist_x'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['seg_dist_x'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['long_name'] = "Along track distance"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['description'] = \
"Along-track distance from the equator crossing to the segment center."
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
#-- geophysical variables
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['Description'] = ("Contains geophysical "
"parameters and corrections used to correct photon heights for geophysical effects, such as tides.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['data_rate'] = ("Data within this group "
"are stored at the sea_ice_height segment rate.")
#-- interpolated sea level products
for key,val in interp.items():
#-- copy output variables
sea_level = np.ma.zeros((n_seg))
sea_level.data[:] = np.copy(val)
#-- replace nan values with fill value
sea_level.mask = np.isnan(sea_level.data)
sea_level.data[sea_level.mask] = sea_level.fill_value
#-- add to output
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.fill_value
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'][key] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['long_name'] = attrib[key]['long_name']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['description'] = attrib[key]['description']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['source'] = 'AVISO/Copernicus'
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['reference'] = attrib[key]['reference']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['coordinates'] = \
"../height_segment_id ../delta_time ../latitude ../longitude"
#-- output HDF5 files with interpolated sea level data
fargs = (PRD,HEM,'AVISO_SEA_LEVEL',YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX)
file_format = '{0}-{1}_{2}_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5'
output_file = os.path.join(DIRECTORY,file_format.format(*fargs))
#-- print file information
print('\t{0}'.format(output_file)) if VERBOSE else None
HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_corr_attrs,
CLOBBER=True, INPUT=os.path.basename(FILE),
FILL_VALUE=IS2_atl07_fill, DIMENSIONS=IS2_atl07_dims,
FILENAME=output_file)
#-- change the permissions mode
os.chmod(output_file, MODE)
#-- PURPOSE: outputting the correction values for ICESat-2 data to HDF5
def HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_attrs, INPUT=None,
FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False):
#-- setting HDF5 clobber attribute
if CLOBBER:
clobber = 'w'
else:
clobber = 'w-'
#-- open output HDF5 file
fileID = h5py.File(os.path.expanduser(FILENAME), clobber)
#-- create HDF5 records
h5 = {}
#-- number of GPS seconds between the GPS epoch (1980-01-06T00:00:00Z UTC)
#-- and ATLAS Standard Data Product (SDP) epoch (2018-01-01T00:00:00Z UTC)
h5['ancillary_data'] = {}
for k,v in IS2_atl07_corr['ancillary_data'].items():
#-- Defining the HDF5 dataset variables
val = 'ancillary_data/{0}'.format(k)
h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
#-- add HDF5 variable attributes
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][k].items():
h5['ancillary_data'][k].attrs[att_name] = att_val
#-- write each output beam
beams = [k for k in IS2_atl07_corr.keys() if bool(re.match(r'gt\d[lr]',k))]
for gtx in beams:
fileID.create_group(gtx)
#-- add HDF5 group attributes for beam
for att_name in ['Description','atlas_pce','atlas_beam_type',
'groundtrack_id','atmosphere_profile','atlas_spot_number',
'sc_orientation']:
fileID[gtx].attrs[att_name] = IS2_atl07_attrs[gtx][att_name]
#-- create sea_ice_segments group
fileID[gtx].create_group('sea_ice_segments')
h5[gtx] = dict(sea_ice_segments={})
for att_name in ['Description','data_rate']:
att_val = IS2_atl07_attrs[gtx]['sea_ice_segments'][att_name]
fileID[gtx]['sea_ice_segments'].attrs[att_name] = att_val
#-- delta_time, geolocation and segment identification variables
for k in ['delta_time','latitude','longitude','height_segment_id',
'geoseg_beg','geoseg_end','seg_dist_x']:
#-- values and attributes
v = IS2_atl07_corr[gtx]['sea_ice_segments'][k]
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][k]
#-- Defining the HDF5 dataset variables
val = '{0}/{1}/{2}'.format(gtx,'sea_ice_segments',k)
if fillvalue:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue,
compression='gzip')
else:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, compression='gzip')
#-- create or attach dimensions for HDF5 variable
if DIMENSIONS[gtx]['sea_ice_segments'][k]:
#-- attach dimensions
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][k]):
h5[gtx]['sea_ice_segments'][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
else:
#-- make dimension
h5[gtx]['sea_ice_segments'][k].make_scale(k)
#-- add HDF5 variable attributes
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][k].attrs[att_name] = att_val
#-- add to geophysical corrections
key = 'geophysical'
fileID[gtx]['sea_ice_segments'].create_group(key)
h5[gtx]['sea_ice_segments'][key] = {}
for att_name in ['Description','data_rate']:
att_val=IS2_atl07_attrs[gtx]['sea_ice_segments'][key][att_name]
fileID[gtx]['sea_ice_segments'][key].attrs[att_name] = att_val
for k,v in IS2_atl07_corr[gtx]['sea_ice_segments'][key].items():
#-- attributes
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][key][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][key][k]
#-- Defining the HDF5 dataset variables
val = '{0}/{1}/{2}/{3}'.format(gtx,'sea_ice_segments',key,k)
if fillvalue:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, fillvalue=fillvalue, compression='gzip')
else:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
#-- attach dimensions
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][key][k]):
h5[gtx]['sea_ice_segments'][key][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
#-- add HDF5 variable attributes
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][key][k].attrs[att_name] = att_val
#-- HDF5 file title
fileID.attrs['featureType'] = 'trajectory'
fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Height'
fileID.attrs['summary'] = ('Estimates of the sea ice correction parameters '
'needed to interpret and assess the quality of sea height estimates.')
fileID.attrs['description'] = ('The data set (ATL07) contains along-track '
'heights for sea ice and open water leads (at varying length scales) '
'relative to the WGS84 ellipsoid (ITRF2014 reference frame) after '
'adjustment for geoidal and tidal variations, and inverted barometer '
'effects.')
date_created = datetime.datetime.today()
fileID.attrs['date_created'] = date_created.isoformat()
project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = project
platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = platform
#-- add attribute for elevation instrument and designated processing level
instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
fileID.attrs['instrument'] = instrument
fileID.attrs['source'] = 'Spacecraft'
fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2'
fileID.attrs['processing_level'] = '4'
#-- add attributes for input ATL07 file
fileID.attrs['input_files'] = os.path.basename(INPUT)
#-- find geospatial and temporal ranges
lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf)
for gtx in beams:
lon = IS2_atl07_corr[gtx]['sea_ice_segments']['longitude']
lat = IS2_atl07_corr[gtx]['sea_ice_segments']['latitude']
delta_time = IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time']
#-- setting the geospatial and temporal ranges
lnmn = lon.min() if (lon.min() < lnmn) else lnmn
lnmx = lon.max() if (lon.max() > lnmx) else lnmx
ltmn = lat.min() if (lat.min() < ltmn) else ltmn
ltmx = lat.max() if (lat.max() > ltmx) else ltmx
tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
#-- add geospatial and temporal attributes
fileID.attrs['geospatial_lat_min'] = ltmn
fileID.attrs['geospatial_lat_max'] = ltmx
fileID.attrs['geospatial_lon_min'] = lnmn
fileID.attrs['geospatial_lon_max'] = lnmx
fileID.attrs['geospatial_lat_units'] = "degrees_north"
fileID.attrs['geospatial_lon_units'] = "degrees_east"
fileID.attrs['geospatial_ellipsoid'] = "WGS84"
fileID.attrs['date_type'] = 'UTC'
fileID.attrs['time_type'] = 'CCSDS UTC-A'
#-- convert start and end time from ATLAS SDP seconds into GPS seconds
atlas_sdp_gps_epoch=IS2_atl07_corr['ancillary_data']['atlas_sdp_gps_epoch']
gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx])
#-- calculate leap seconds
leaps = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
#-- convert from seconds since 1980-01-06T00:00:00 to Modified Julian days
MJD = icesat2_toolkit.time.convert_delta_time(gps_seconds - leaps,
epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0)
#-- convert to calendar date
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(MJD + 2400000.5,
FORMAT='tuple')
#-- add attributes with measurement date start, end and duration
tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]),
int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1)))
fileID.attrs['time_coverage_start'] = tcs.isoformat()
tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]),
int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1)))
fileID.attrs['time_coverage_end'] = tce.isoformat()
fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn)
#-- Closing the HDF5 file
fileID.close()
#-- Main program that calls interp_sea_level_ICESat2()
def main():
#-- Read the system arguments listed after the program
parser = argparse.ArgumentParser(
description="""Interpolates AVISO sea level anomalies, absolute
dynamic topography and mean dynamic topography to ICESat-2
ATL07 sea ice height data
"""
)
#-- command line parameters
parser.add_argument('infile',
type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+',
help='ICESat-2 ATL07 file to run')
#-- directory with sea level data
parser.add_argument('--directory','-D',
type=lambda p: os.path.abspath(os.path.expanduser(p)),
default=os.getcwd(),
help='Working data directory')
#-- verbosity settings
#-- verbose will output information about each output file
parser.add_argument('--verbose','-V',
default=False, action='store_true',
help='Output information about each created file')
#-- permissions mode of the local files (number in octal)
parser.add_argument('--mode','-M',
type=lambda x: int(x,base=8), default=0o775,
help='Permission mode of directories and files created')
args = parser.parse_args()
#-- run for each input ATL07 file
for FILE in args.infile:
interp_sea_level_ICESat2(args.directory, FILE,
VERBOSE=args.verbose, MODE=args.mode)
#-- run main program
if __name__ == '__main__':
main() | 54.424051 | 121 | 0.658129 |
from __future__ import print_function
import os
import re
import gzip
import h5py
import pyproj
import netCDF4
import argparse
import datetime
import numpy as np
import sklearn.neighbors
import icesat2_toolkit.time
from icesat2_toolkit.read_ICESat2_ATL07 import read_HDF5_ATL07
def set_hemisphere(GRANULE):
if GRANULE in ('10','11','12'):
projection_flag = 'S'
elif GRANULE in ('03','04','05'):
projection_flag = 'N'
return projection_flag
def inverse_distance(x, y, z, xi, yi, SEARCH='BallTree', N=10, POWER=2.0):
npts = len(xi)
if (SEARCH == 'BallTree'):
tree = sklearn.neighbors.BallTree(np.c_[x,y])
elif (SEARCH == 'KDTree'):
tree = sklearn.neighbors.KDTree(np.c_[x,y])
dist,indices = tree.query(np.c_[xi,yi], k=N, return_distance=True)
power_inverse_distance = dist**(-POWER)
s = np.sum(power_inverse_distance, axis=1)
w = power_inverse_distance/np.broadcast_to(s[:,None],(npts,N))
return np.sum(w*z[indices],axis=1)
def interpolate_sea_level(base_dir, xi, yi, CJD, HEM):
EPSG = dict(N=3413,S=3031)
crs1 = pyproj.CRS.from_string('epsg:4326')
crs2 = pyproj.CRS.from_string(EPSG[HEM])
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
input_file = 'mdt_cnes_cls2013_global.nc.gz'
fd = gzip.open(os.path.join(base_dir,input_file),'rb')
dinput = {}
with netCDF4.Dataset('mdt', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['mdt'] = np.ma.array(fileID['mdt'][0,:,:].copy(),
fill_value=fileID['mdt']._FillValue)
dinput['mdt'].mask = (dinput['mdt'].data == dinput['mdt'].fill_value)
fd.close()
gridlon,gridlat = np.meshgrid(dinput['lon'],dinput['lat'])
xg,yg = transformer.transform(gridlon,gridlat)
gridmask = np.logical_not(dinput['mdt'].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
MDT = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput['mdt'].data[indy,indx], xi, yi)
CJD1 = np.floor(CJD)
dt = (CJD - CJD1[0])
SLA = np.zeros_like(CJD)
ADT = np.zeros_like(CJD)
for day in range(2):
JD1 = CJD1 + day + 2433282.5
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(JD1[0],
FORMAT='tuple', ASTYPE=int)
ddir = os.path.join(base_dir, '{0:0.0f}'.format(YY))
regex = re.compile(('dt_global_allsat_phy_l4_{0:4d}{1:02d}{2:02d}_'
'(\d{{4}})(\d{{2}})(\d{{2}}).nc.gz').format(YY,MM,DD))
input_file, = [fi for fi in os.listdir(ddir) if regex.match(fi)]
dinput = {}
fd = gzip.open(os.path.join(ddir,input_file),'rb')
with netCDF4.Dataset('sla', mode='r', memory=fd.read()) as fileID:
dinput['lon'] = fileID['lon'][:].copy()
dinput['lat'] = fileID['lat'][:].copy()
dinput['sla'] = np.ma.array(fileID['sla'][0,:,:].copy(),
fill_value=fileID['sla']._FillValue)
dinput['adt'] = np.ma.array(fileID['adt'][0,:,:].copy(),
fill_value=fileID['adt']._FillValue)
fd.close()
out = {}
for var in ['sla','adt']:
gridmask = np.logical_not(dinput[var].mask)
if (HEM.upper() == 'N'):
gridmask &= (gridlat >= 50.0)
elif (HEM.upper() == 'S'):
gridmask &= (gridlat <= -50.0)
indy,indx = np.nonzero(gridmask)
out[var] = inverse_distance(xg[indy,indx], yg[indy,indx],
dinput[var].data[indy,indx], xi, yi)
SLA += out['sla']*(2.0*dt*day - dt - day + 1.0)
ADT += out['adt']*(2.0*dt*day - dt - day + 1.0)
return dict(h_mdt=MDT,h_sla=SLA,h_adt=ADT)
def interp_sea_level_ICESat2(base_dir, FILE, VERBOSE=False, MODE=0o775):
print('{0} -->'.format(os.path.basename(FILE))) if VERBOSE else None
IS2_atl07_mds,IS2_atl07_attrs,IS2_atl07_beams = read_HDF5_ATL07(FILE,
ATTRIBUTES=True)
DIRECTORY = os.path.dirname(FILE)
rx = re.compile(r'(processed_)?(ATL\d{2})-(\d{2})_(\d{4})(\d{2})(\d{2})'
r'(\d{2})(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$')
SUB,PRD,HMN,YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX=rx.findall(FILE).pop()
HEM = set_hemisphere(HMN)
attrib = {}
attrib['h_mdt'] = {}
attrib['h_mdt']['long_name'] = 'Mean Dynamic Topography'
attrib['h_mdt']['description'] = 'Sea surface height above geoid'
attrib['h_mdt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
attrib['h_sla'] = {}
attrib['h_sla']['long_name'] = 'Sea Level Anomaly'
attrib['h_sla']['description'] = 'Sea surface anomalies'
attrib['h_sla']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
attrib['h_adt'] = {}
attrib['h_adt']['long_name'] = 'Absolute Dynamic Topography'
attrib['h_adt']['description'] = ('Sea surface height above geoid calculated '
'by adding the mean dynamic topography to the sea level anomalies')
attrib['h_adt']['reference'] = ('https://www.aviso.altimetry.fr/en/data/'
'products/sea-surface-height-products/global/msla-h.html')
EPSG = dict(N=3413,S=3031)
crs1 = pyproj.CRS.from_string("epsg:{0:d}".format(4326))
crs2 = pyproj.CRS.from_string("epsg:{0:d}".format(EPSG[HEM]))
transformer = pyproj.Transformer.from_crs(crs1, crs2, always_xy=True)
atlas_sdp_gps_epoch = IS2_atl07_mds['ancillary_data']['atlas_sdp_gps_epoch']
IS2_atl07_corr = {}
IS2_atl07_fill = {}
IS2_atl07_dims = {}
IS2_atl07_corr_attrs = {}
IS2_atl07_corr['ancillary_data'] = {}
IS2_atl07_corr_attrs['ancillary_data'] = {}
for key in ['atlas_sdp_gps_epoch']:
IS2_atl07_corr['ancillary_data'][key] = IS2_atl07_mds['ancillary_data'][key]
IS2_atl07_corr_attrs['ancillary_data'][key] = {}
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][key].items():
IS2_atl07_corr_attrs['ancillary_data'][key][att_name] = att_val
for gtx in sorted(IS2_atl07_beams):
IS2_atl07_corr[gtx] = dict(sea_ice_segments={})
IS2_atl07_fill[gtx] = dict(sea_ice_segments={})
IS2_atl07_dims[gtx] = dict(sea_ice_segments={})
IS2_atl07_corr_attrs[gtx] = dict(sea_ice_segments={})
val = IS2_atl07_mds[gtx]['sea_ice_segments']
n_seg = len(val['height_segment_id'])
gps_seconds = atlas_sdp_gps_epoch + val['delta_time']
leap_seconds = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
cnes_time = icesat2_toolkit.time.convert_delta_time(gps_seconds-leap_seconds,
epoch1=(1980,1,6,0,0,0), epoch2=(1950,1,1,0,0,0), scale=1.0/86400.0)
X,Y = transformer.transform(val['longitude'],val['latitude'])
interp = interpolate_sea_level(base_dir,X,Y,cnes_time,HEM)
IS2_atl07_corr_attrs[gtx]['Description'] = IS2_atl07_attrs[gtx]['Description']
IS2_atl07_corr_attrs[gtx]['atlas_pce'] = IS2_atl07_attrs[gtx]['atlas_pce']
IS2_atl07_corr_attrs[gtx]['atlas_beam_type'] = IS2_atl07_attrs[gtx]['atlas_beam_type']
IS2_atl07_corr_attrs[gtx]['groundtrack_id'] = IS2_atl07_attrs[gtx]['groundtrack_id']
IS2_atl07_corr_attrs[gtx]['atmosphere_profile'] = IS2_atl07_attrs[gtx]['atmosphere_profile']
IS2_atl07_corr_attrs[gtx]['atlas_spot_number'] = IS2_atl07_attrs[gtx]['atlas_spot_number']
IS2_atl07_corr_attrs[gtx]['sc_orientation'] = IS2_atl07_attrs[gtx]['sc_orientation']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['Description'] = ("Top group for sea "
"ice segments as computed by the ATBD algorithm.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['data_rate'] = ("Data within this "
"group are stored at the variable segment rate.")
IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time'] = val['delta_time'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['delta_time'] = None
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['units'] = "seconds since 2018-01-01"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['long_name'] = "Elapsed GPS seconds"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['standard_name'] = "time"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['source'] = "telemetry"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['calendar'] = "standard"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['description'] = ("Number of "
"GPS seconds since the ATLAS SDP epoch. The ATLAS Standard Data Products (SDP) epoch "
"offset is defined within /ancillary_data/atlas_sdp_gps_epoch as the number of GPS "
"seconds between the GPS epoch (1980-01-06T00:00:00.000000Z UTC) and the ATLAS SDP "
"epoch. By adding the offset contained within atlas_sdp_gps_epoch to delta time "
"parameters, the time in gps_seconds relative to the GPS epoch can be computed.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['delta_time']['coordinates'] = \
"height_segment_id latitude longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['latitude'] = val['latitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['latitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['latitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['units'] = "degrees_north"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['long_name'] = "Latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['standard_name'] = "latitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['description'] = ("Latitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_min'] = -90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['valid_max'] = 90.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['latitude']['coordinates'] = \
"height_segment_id delta_time longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['longitude'] = val['longitude'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['longitude'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['longitude'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['units'] = "degrees_east"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['contentType'] = "physicalMeasurement"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['long_name'] = "Longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['standard_name'] = "longitude"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['description'] = ("Longitude of "
"segment center")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_min'] = -180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['valid_max'] = 180.0
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['longitude']['coordinates'] = \
"height_segment_id delta_time latitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['height_segment_id'] = val['height_segment_id']
IS2_atl07_fill[gtx]['sea_ice_segments']['height_segment_id'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['height_segment_id'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['long_name'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['description'] = \
"Identifier of each height segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['height_segment_id']['coordinates'] = \
"delta_time latitude longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_beg'] = val['geoseg_beg'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_beg'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_beg'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['long_name'] = "Beginning GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['description'] = \
"Geolocation segment (geoseg) ID associated with the first photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_beg']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['geoseg_end'] = val['geoseg_end'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geoseg_end'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['geoseg_end'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['units'] = "1"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['long_name'] = "Ending GEOSEG"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['description'] = \
"Geolocation segment (geoseg) ID associated with the last photon used in this sea ice segment"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geoseg_end']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['seg_dist_x'] = val['seg_dist_x'].copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['seg_dist_x'] = None
IS2_atl07_dims[gtx]['sea_ice_segments']['seg_dist_x'] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['long_name'] = "Along track distance"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['description'] = \
"Along-track distance from the equator crossing to the segment center."
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['seg_dist_x']['coordinates'] = \
"height_segment_id delta_time latitude longitude"
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['Description'] = ("Contains geophysical "
"parameters and corrections used to correct photon heights for geophysical effects, such as tides.")
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical']['data_rate'] = ("Data within this group "
"are stored at the sea_ice_height segment rate.")
for key,val in interp.items():
sea_level = np.ma.zeros((n_seg))
sea_level.data[:] = np.copy(val)
sea_level.mask = np.isnan(sea_level.data)
sea_level.data[sea_level.mask] = sea_level.fill_value
IS2_atl07_corr[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.copy()
IS2_atl07_fill[gtx]['sea_ice_segments']['geophysical'][key] = sea_level.fill_value
IS2_atl07_dims[gtx]['sea_ice_segments']['geophysical'][key] = ['delta_time']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key] = {}
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['units'] = "meters"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['contentType'] = "referenceInformation"
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['long_name'] = attrib[key]['long_name']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['description'] = attrib[key]['description']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['source'] = 'AVISO/Copernicus'
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['reference'] = attrib[key]['reference']
IS2_atl07_corr_attrs[gtx]['sea_ice_segments']['geophysical'][key]['coordinates'] = \
"../height_segment_id ../delta_time ../latitude ../longitude"
fargs = (PRD,HEM,'AVISO_SEA_LEVEL',YY,MM,DD,HH,MN,SS,TRK,CYCL,SN,RL,VERS,AUX)
file_format = '{0}-{1}_{2}_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5'
output_file = os.path.join(DIRECTORY,file_format.format(*fargs))
print('\t{0}'.format(output_file)) if VERBOSE else None
HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_corr_attrs,
CLOBBER=True, INPUT=os.path.basename(FILE),
FILL_VALUE=IS2_atl07_fill, DIMENSIONS=IS2_atl07_dims,
FILENAME=output_file)
os.chmod(output_file, MODE)
def HDF5_ATL07_corr_write(IS2_atl07_corr, IS2_atl07_attrs, INPUT=None,
FILENAME='', FILL_VALUE=None, DIMENSIONS=None, CLOBBER=False):
if CLOBBER:
clobber = 'w'
else:
clobber = 'w-'
fileID = h5py.File(os.path.expanduser(FILENAME), clobber)
h5 = {}
h5['ancillary_data'] = {}
for k,v in IS2_atl07_corr['ancillary_data'].items():
val = 'ancillary_data/{0}'.format(k)
h5['ancillary_data'][k] = fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
for att_name,att_val in IS2_atl07_attrs['ancillary_data'][k].items():
h5['ancillary_data'][k].attrs[att_name] = att_val
beams = [k for k in IS2_atl07_corr.keys() if bool(re.match(r'gt\d[lr]',k))]
for gtx in beams:
fileID.create_group(gtx)
for att_name in ['Description','atlas_pce','atlas_beam_type',
'groundtrack_id','atmosphere_profile','atlas_spot_number',
'sc_orientation']:
fileID[gtx].attrs[att_name] = IS2_atl07_attrs[gtx][att_name]
fileID[gtx].create_group('sea_ice_segments')
h5[gtx] = dict(sea_ice_segments={})
for att_name in ['Description','data_rate']:
att_val = IS2_atl07_attrs[gtx]['sea_ice_segments'][att_name]
fileID[gtx]['sea_ice_segments'].attrs[att_name] = att_val
for k in ['delta_time','latitude','longitude','height_segment_id',
'geoseg_beg','geoseg_end','seg_dist_x']:
v = IS2_atl07_corr[gtx]['sea_ice_segments'][k]
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][k]
val = '{0}/{1}/{2}'.format(gtx,'sea_ice_segments',k)
if fillvalue:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, fillvalue=fillvalue,
compression='gzip')
else:
h5[gtx]['sea_ice_segments'][k] = fileID.create_dataset(val,
np.shape(v), data=v, dtype=v.dtype, compression='gzip')
if DIMENSIONS[gtx]['sea_ice_segments'][k]:
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][k]):
h5[gtx]['sea_ice_segments'][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
else:
h5[gtx]['sea_ice_segments'][k].make_scale(k)
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][k].attrs[att_name] = att_val
key = 'geophysical'
fileID[gtx]['sea_ice_segments'].create_group(key)
h5[gtx]['sea_ice_segments'][key] = {}
for att_name in ['Description','data_rate']:
att_val=IS2_atl07_attrs[gtx]['sea_ice_segments'][key][att_name]
fileID[gtx]['sea_ice_segments'][key].attrs[att_name] = att_val
for k,v in IS2_atl07_corr[gtx]['sea_ice_segments'][key].items():
attrs = IS2_atl07_attrs[gtx]['sea_ice_segments'][key][k]
fillvalue = FILL_VALUE[gtx]['sea_ice_segments'][key][k]
val = '{0}/{1}/{2}/{3}'.format(gtx,'sea_ice_segments',key,k)
if fillvalue:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, fillvalue=fillvalue, compression='gzip')
else:
h5[gtx]['sea_ice_segments'][key][k] = \
fileID.create_dataset(val, np.shape(v), data=v,
dtype=v.dtype, compression='gzip')
for i,dim in enumerate(DIMENSIONS[gtx]['sea_ice_segments'][key][k]):
h5[gtx]['sea_ice_segments'][key][k].dims[i].attach_scale(
h5[gtx]['sea_ice_segments'][dim])
for att_name,att_val in attrs.items():
h5[gtx]['sea_ice_segments'][key][k].attrs[att_name] = att_val
fileID.attrs['featureType'] = 'trajectory'
fileID.attrs['title'] = 'ATLAS/ICESat-2 L3A Sea Ice Height'
fileID.attrs['summary'] = ('Estimates of the sea ice correction parameters '
'needed to interpret and assess the quality of sea height estimates.')
fileID.attrs['description'] = ('The data set (ATL07) contains along-track '
'heights for sea ice and open water leads (at varying length scales) '
'relative to the WGS84 ellipsoid (ITRF2014 reference frame) after '
'adjustment for geoidal and tidal variations, and inverted barometer '
'effects.')
date_created = datetime.datetime.today()
fileID.attrs['date_created'] = date_created.isoformat()
project = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = project
platform = 'ICESat-2 > Ice, Cloud, and land Elevation Satellite-2'
fileID.attrs['project'] = platform
instrument = 'ATLAS > Advanced Topographic Laser Altimeter System'
fileID.attrs['instrument'] = instrument
fileID.attrs['source'] = 'Spacecraft'
fileID.attrs['references'] = 'https://nsidc.org/data/icesat-2'
fileID.attrs['processing_level'] = '4'
fileID.attrs['input_files'] = os.path.basename(INPUT)
lnmn,lnmx,ltmn,ltmx,tmn,tmx = (np.inf,-np.inf,np.inf,-np.inf,np.inf,-np.inf)
for gtx in beams:
lon = IS2_atl07_corr[gtx]['sea_ice_segments']['longitude']
lat = IS2_atl07_corr[gtx]['sea_ice_segments']['latitude']
delta_time = IS2_atl07_corr[gtx]['sea_ice_segments']['delta_time']
lnmn = lon.min() if (lon.min() < lnmn) else lnmn
lnmx = lon.max() if (lon.max() > lnmx) else lnmx
ltmn = lat.min() if (lat.min() < ltmn) else ltmn
ltmx = lat.max() if (lat.max() > ltmx) else ltmx
tmn = delta_time.min() if (delta_time.min() < tmn) else tmn
tmx = delta_time.max() if (delta_time.max() > tmx) else tmx
fileID.attrs['geospatial_lat_min'] = ltmn
fileID.attrs['geospatial_lat_max'] = ltmx
fileID.attrs['geospatial_lon_min'] = lnmn
fileID.attrs['geospatial_lon_max'] = lnmx
fileID.attrs['geospatial_lat_units'] = "degrees_north"
fileID.attrs['geospatial_lon_units'] = "degrees_east"
fileID.attrs['geospatial_ellipsoid'] = "WGS84"
fileID.attrs['date_type'] = 'UTC'
fileID.attrs['time_type'] = 'CCSDS UTC-A'
atlas_sdp_gps_epoch=IS2_atl07_corr['ancillary_data']['atlas_sdp_gps_epoch']
gps_seconds = atlas_sdp_gps_epoch + np.array([tmn,tmx])
leaps = icesat2_toolkit.time.count_leap_seconds(gps_seconds)
MJD = icesat2_toolkit.time.convert_delta_time(gps_seconds - leaps,
epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0)
YY,MM,DD,HH,MN,SS = icesat2_toolkit.time.convert_julian(MJD + 2400000.5,
FORMAT='tuple')
tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]),
int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1)))
fileID.attrs['time_coverage_start'] = tcs.isoformat()
tce = datetime.datetime(int(YY[1]), int(MM[1]), int(DD[1]),
int(HH[1]), int(MN[1]), int(SS[1]), int(1e6*(SS[1] % 1)))
fileID.attrs['time_coverage_end'] = tce.isoformat()
fileID.attrs['time_coverage_duration'] = '{0:0.0f}'.format(tmx-tmn)
fileID.close()
def main():
parser = argparse.ArgumentParser(
description="""Interpolates AVISO sea level anomalies, absolute
dynamic topography and mean dynamic topography to ICESat-2
ATL07 sea ice height data
"""
)
parser.add_argument('infile',
type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+',
help='ICESat-2 ATL07 file to run')
parser.add_argument('--directory','-D',
type=lambda p: os.path.abspath(os.path.expanduser(p)),
default=os.getcwd(),
help='Working data directory')
parser.add_argument('--verbose','-V',
default=False, action='store_true',
help='Output information about each created file')
parser.add_argument('--mode','-M',
type=lambda x: int(x,base=8), default=0o775,
help='Permission mode of directories and files created')
args = parser.parse_args()
for FILE in args.infile:
interp_sea_level_ICESat2(args.directory, FILE,
VERBOSE=args.verbose, MODE=args.mode)
if __name__ == '__main__':
main() | true | true |
f739014e51e38690018ed000125672340d61618f | 8,914 | py | Python | src/argteller/builder/access_object.py | mozjay0619/argteller-viz | 963c6d43019efb2b0e9bcdb4b3053b57cd4ff373 | [
"BSD-3-Clause"
] | null | null | null | src/argteller/builder/access_object.py | mozjay0619/argteller-viz | 963c6d43019efb2b0e9bcdb4b3053b57cd4ff373 | [
"BSD-3-Clause"
] | 7 | 2021-08-04T15:54:07.000Z | 2021-09-17T17:40:38.000Z | src/argteller/builder/access_object.py | mozjay0619/argteller-viz | 963c6d43019efb2b0e9bcdb4b3053b57cd4ff373 | [
"BSD-3-Clause"
] | null | null | null | from ..tree.tree_node import TreeNode
from ..tree.tree_builder import display_tree
from ..widgets.dynamic_widgets import DynamicWidget
from ..utils.data_structure_utils import nested_defaultdict
try:
from IPython.display import display
import ipywidgets as widgets
from ipywidgets import HBox, Label, VBox
from ipywidgets import Button, Layout, HTML
module_found = True
except ModuleNotFoundError:
module_found = False
from threading import Event
class AccessObject():
"""Creates the DynamicWidgets based on the input tree.
"""
def __init__(self, root, node_dicts):
self.initial_event = Event()
param_setter_event = Event()
self.module_found = module_found
if not self.module_found:
return
self.root, self.node_dicts = root, node_dicts
self.widget_dicts = nested_defaultdict(dict)
self.widget_nodes = nested_defaultdict(dict) # this should replace widget_dicts, so we don't have duplicates
# we don't have duplicates they are the same objects.
self.param_vboxes = {}
for topic in self.root.children:
param_widgets = []
for param in topic.children:
param_widget = DynamicWidget(topic.name, param, self.widget_dicts, self.widget_nodes, self.initial_event, param_setter_event)
param_widgets.append(param_widget)
param_vbox = VBox(param_widgets)
self.param_vboxes[topic.name] = param_vbox
self.initial_event.set()
def display_tree(self):
display_tree(self.root)
def get_topics(self):
return self.root.get_children_names()
def get_params(self, topic=None):
if topic:
return list(self.widget_dicts[topic].keys())
else:
l = []
self._find_params(self.root, l)
return l
def get_effective_params(self, topic=None):
effective_params = []
params = self.get_params(topic)
for param in params:
widget_type = self.widget_nodes[topic][param].type
if widget_type=='text':
if not self.get_value(param, topic) == '':
effective_params.append(param)
elif widget_type=='choice':
if not self.get_value(param, topic) is None:
effective_params.append(param)
elif widget_type=='boolean':
effective_params.append(param)
return effective_params
def _find_params(self, node, l):
depth = node.depth
node_type = node.primary_type
node_name = node.name
if node_type != 'root':
if node_type == 'topic':
depth += 1
if node_type in ['param', 'optional']:
if node_name not in l:
l.append(node_name)
for child in node.children:
self._find_params(child, l)
def get_value(self, param, topic=None):
"""This will return the string casted user input values. We will not
cast this value to the castable type since the access_object is meant
for the internal uses only. All widget values are internally treated
as strings, so we will keep it that way.
The returned values will be casted into castable types in the class
decorator just before the values are returned to the user.
"""
return self.get_widget(param, topic).value
def set_value(self, value, param, topic=None):
try:
widget_type = self.get_widget_node(param, topic).type
except:
print("The parameter [ {} ] in topic [ {} ] does not exist anymore. Skipping it.".format(param, topic))
return
if widget_type=='boolean':
self.get_widget(param, topic).value = eval(value)
else:
self.get_widget(param, topic).value = str(value)
def get_vbox(self, topic):
return self.param_vboxes[topic]
def get_widget_node(self, param, topic=None):
if topic:
try:
return self.widget_nodes[topic][param]
except:
return None
else:
params = []
topics = []
for topic, param_dict in self.widget_nodes.items():
if param in param_dict:
params.append(param_dict[param])
topics.append(topic)
if len(params) > 1:
raise TypeError('Specify the topic!', topics)
return params[0]
def get_widget(self, param, topic=None):
if '/' in param:
topic, param = param.split('/')
if topic:
return self.widget_dicts[topic][param].children[-1]
else:
params = []
topics = []
for topic, param_dict in self.widget_dicts.items():
if param in param_dict:
params.append(param_dict[param])
topics.append(topic)
if len(params) > 1:
raise TypeError('Specify the topic!', topics)
return params[0].children[-1]
def get_node(self, node, topic=None):
if topic:
return self.node_dicts[topic][node]
else:
nodes = []
topics = []
for topic, node_dict in self.node_dicts.items():
if node in node_dict:
nodes.append(node_dict[node])
topics.append(topic)
if len(nodes) > 1:
raise TypeError('Specify the topic!', topics)
if len(nodes)==0:
return None
return nodes[0]
def node_exists(self, node, topic=None):
node = self.get_node(node, topic)
if node is None:
return False
else:
return True
def get_active_param_values(self):
dsl_gen = [""]
added_params = []
for topic in self.root.children:
if topic.name not in self.topic_choice_widget.value:
continue
dsl_gen[0] += "{}\n".format(topic.name)
for param in topic.children: # genesis params
self._follow_branch(param, topic, dsl_gen, added_params)
if len(added_params)==0:
dsl = dsl_gen[0][0:-1]
dsl_gen[0] = '\n'.join(dsl.split('\n')[0:-1])
dsl_gen[0] += "\n"
return dsl_gen[0][0:-2]
def _follow_branch(self, param, topic, dsl_gen, added_params):
"""Notice the similarity to _add_widgets method in DynamicWidget
class
"""
input_value = self.get_value(param.name, topic.name)
# print(param.name, param.secondary_type, input_value, '+++', param.secondary_type=='boolean', type(input_value))
# if param.name=='simulate_treatment_effect':
# print()
# print(param.children)
# print()
if param.name in self.widget_nodes[topic.name]: # For the topic/param names
widget_type = self.widget_nodes[topic.name][param.name].type
else:
widget_type = None
if widget_type=='text':
if not input_value == '':
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
elif widget_type=='choice':
if not input_value is None:
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
elif widget_type=='boolean':
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
if input_value:
for child_node in param.children:
if child_node.primary_type=='param' or child_node.primary_type=='optional':
self._follow_branch(child_node, topic, dsl_gen, added_params)
for child_node in param.children: # Since this is choice param, child_nodes are all options
if child_node.name==input_value:
for _child_node in child_node.children:
self._follow_branch(_child_node, topic, dsl_gen, added_params)
| 26.372781 | 141 | 0.538479 | from ..tree.tree_node import TreeNode
from ..tree.tree_builder import display_tree
from ..widgets.dynamic_widgets import DynamicWidget
from ..utils.data_structure_utils import nested_defaultdict
try:
from IPython.display import display
import ipywidgets as widgets
from ipywidgets import HBox, Label, VBox
from ipywidgets import Button, Layout, HTML
module_found = True
except ModuleNotFoundError:
module_found = False
from threading import Event
class AccessObject():
def __init__(self, root, node_dicts):
self.initial_event = Event()
param_setter_event = Event()
self.module_found = module_found
if not self.module_found:
return
self.root, self.node_dicts = root, node_dicts
self.widget_dicts = nested_defaultdict(dict)
self.widget_nodes = nested_defaultdict(dict)
# we don't have duplicates they are the same objects.
self.param_vboxes = {}
for topic in self.root.children:
param_widgets = []
for param in topic.children:
param_widget = DynamicWidget(topic.name, param, self.widget_dicts, self.widget_nodes, self.initial_event, param_setter_event)
param_widgets.append(param_widget)
param_vbox = VBox(param_widgets)
self.param_vboxes[topic.name] = param_vbox
self.initial_event.set()
def display_tree(self):
display_tree(self.root)
def get_topics(self):
return self.root.get_children_names()
def get_params(self, topic=None):
if topic:
return list(self.widget_dicts[topic].keys())
else:
l = []
self._find_params(self.root, l)
return l
def get_effective_params(self, topic=None):
effective_params = []
params = self.get_params(topic)
for param in params:
widget_type = self.widget_nodes[topic][param].type
if widget_type=='text':
if not self.get_value(param, topic) == '':
effective_params.append(param)
elif widget_type=='choice':
if not self.get_value(param, topic) is None:
effective_params.append(param)
elif widget_type=='boolean':
effective_params.append(param)
return effective_params
def _find_params(self, node, l):
depth = node.depth
node_type = node.primary_type
node_name = node.name
if node_type != 'root':
if node_type == 'topic':
depth += 1
if node_type in ['param', 'optional']:
if node_name not in l:
l.append(node_name)
for child in node.children:
self._find_params(child, l)
def get_value(self, param, topic=None):
return self.get_widget(param, topic).value
def set_value(self, value, param, topic=None):
try:
widget_type = self.get_widget_node(param, topic).type
except:
print("The parameter [ {} ] in topic [ {} ] does not exist anymore. Skipping it.".format(param, topic))
return
if widget_type=='boolean':
self.get_widget(param, topic).value = eval(value)
else:
self.get_widget(param, topic).value = str(value)
def get_vbox(self, topic):
return self.param_vboxes[topic]
def get_widget_node(self, param, topic=None):
if topic:
try:
return self.widget_nodes[topic][param]
except:
return None
else:
params = []
topics = []
for topic, param_dict in self.widget_nodes.items():
if param in param_dict:
params.append(param_dict[param])
topics.append(topic)
if len(params) > 1:
raise TypeError('Specify the topic!', topics)
return params[0]
def get_widget(self, param, topic=None):
if '/' in param:
topic, param = param.split('/')
if topic:
return self.widget_dicts[topic][param].children[-1]
else:
params = []
topics = []
for topic, param_dict in self.widget_dicts.items():
if param in param_dict:
params.append(param_dict[param])
topics.append(topic)
if len(params) > 1:
raise TypeError('Specify the topic!', topics)
return params[0].children[-1]
def get_node(self, node, topic=None):
if topic:
return self.node_dicts[topic][node]
else:
nodes = []
topics = []
for topic, node_dict in self.node_dicts.items():
if node in node_dict:
nodes.append(node_dict[node])
topics.append(topic)
if len(nodes) > 1:
raise TypeError('Specify the topic!', topics)
if len(nodes)==0:
return None
return nodes[0]
def node_exists(self, node, topic=None):
node = self.get_node(node, topic)
if node is None:
return False
else:
return True
def get_active_param_values(self):
dsl_gen = [""]
added_params = []
for topic in self.root.children:
if topic.name not in self.topic_choice_widget.value:
continue
dsl_gen[0] += "{}\n".format(topic.name)
for param in topic.children:
self._follow_branch(param, topic, dsl_gen, added_params)
if len(added_params)==0:
dsl = dsl_gen[0][0:-1]
dsl_gen[0] = '\n'.join(dsl.split('\n')[0:-1])
dsl_gen[0] += "\n"
return dsl_gen[0][0:-2]
def _follow_branch(self, param, topic, dsl_gen, added_params):
input_value = self.get_value(param.name, topic.name)
if param.name in self.widget_nodes[topic.name]:
widget_type = self.widget_nodes[topic.name][param.name].type
else:
widget_type = None
if widget_type=='text':
if not input_value == '':
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
elif widget_type=='choice':
if not input_value is None:
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
elif widget_type=='boolean':
dsl_gen[0] += "-{}:{}\n".format(param.name, input_value)
added_params.append(param.name)
if input_value:
for child_node in param.children:
if child_node.primary_type=='param' or child_node.primary_type=='optional':
self._follow_branch(child_node, topic, dsl_gen, added_params)
for child_node in param.children:
if child_node.name==input_value:
for _child_node in child_node.children:
self._follow_branch(_child_node, topic, dsl_gen, added_params)
| true | true |
f73901e1ff6cfefad9f65ca0f31da49a0cddd668 | 8,751 | py | Python | models/reid.py | FDU-VTS/Person-Search | 36a1eab8d8fdf149e32dece030edff02dbc8a915 | [
"Apache-2.0"
] | null | null | null | models/reid.py | FDU-VTS/Person-Search | 36a1eab8d8fdf149e32dece030edff02dbc8a915 | [
"Apache-2.0"
] | null | null | null | models/reid.py | FDU-VTS/Person-Search | 36a1eab8d8fdf149e32dece030edff02dbc8a915 | [
"Apache-2.0"
] | null | null | null | # encoding: utf-8
"""
@author: liaoxingyu
@contact: sherlockliao01@gmail.com
"""
import math
import torch
from torch import nn
from torch.utils import model_zoo
from models.context_block import *
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
'resnext50_32x4d': 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',
'resnext101_32x8d': 'https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth',
'wide_resnet50_2': 'https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth',
'wide_resnet101_2': 'https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth',
}
model_layers = {
'resnet50': [3, 4, 6, 3],
'resnet101': [3, 4, 23, 3]
}
__all__ = ['ResNet', 'Bottleneck']
class IBN(nn.Module):
"""
IBN with BN:IN = 7:1
"""
def __init__(self, planes):
super(IBN, self).__init__()
half1 = int(planes / 8)
self.half = half1
half2 = planes - half1
self.IN = nn.InstanceNorm2d(half1, affine=True)
self.BN = nn.BatchNorm2d(half2)
def forward(self, x):
split = torch.split(x, self.half, dim=1)
out1 = self.IN(split[0].contiguous())
out2 = self.BN(torch.cat(split[1:], dim=1).contiguous())
out = torch.cat((out1, out2), 1)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, with_ibn=False, gcb=None, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.with_gcb = gcb is not None
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
if with_ibn:
self.bn1 = IBN(planes)
else:
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
# GCNet
if self.with_gcb:
gcb_inplanes = planes * self.expansion
self.context_block = ContextBlock(inplanes=gcb_inplanes, **gcb)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.with_gcb:
out = self.context_block(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, last_stride, with_ibn, gcb, stage_with_gcb, block, layers):
scale = 64
self.inplanes = scale
super().__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, scale, layers[0], with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[0] else None)
self.layer2 = self._make_layer(block, scale * 2, layers[1], stride=2, with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[1] else None)
self.layer3 = self._make_layer(block, scale * 4, layers[2], stride=2, with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[2] else None)
self.layer4 = self._make_layer(block, scale * 8, layers[3], stride=last_stride,
gcb=gcb if stage_with_gcb[3] else None)
def _make_layer(self, block, planes, blocks, stride=1, with_ibn=False, gcb=None):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
if planes == 512:
with_ibn = False
layers.append(block(self.inplanes, planes, with_ibn, gcb, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, with_ibn, gcb))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x
def load_pretrain(self, model_path=''):
with_model_path = (model_path is not '')
if not with_model_path: # resnet pretrain
state_dict = model_zoo.load_url(model_urls[self._model_name])
state_dict.pop('fc.weight')
state_dict.pop('fc.bias')
self.load_state_dict(state_dict)
else:
# ibn pretrain
state_dict = torch.load(model_path)['state_dict']
state_dict.pop('module.fc.weight')
state_dict.pop('module.fc.bias')
new_state_dict = {}
for k in state_dict:
new_k = '.'.join(k.split('.')[1:]) # remove module in name
if self.state_dict()[new_k].shape == state_dict[k].shape:
new_state_dict[new_k] = state_dict[k]
state_dict = new_state_dict
self.load_state_dict(state_dict, strict=False)
def random_init(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
@classmethod
def from_name(cls, model_name, last_stride, with_ibn, gcb, stage_with_gcb):
cls._model_name = model_name
return ResNet(last_stride, with_ibn, gcb, stage_with_gcb, block=Bottleneck, layers=model_layers[model_name])
class Baseline(nn.Module):
in_planes = 2048
def __init__(self,
backbone,
num_classes,
last_stride,
with_ibn,
gcb,
stage_with_gcb,
pretrain=True,
model_path=''):
super().__init__()
try:
self.base = ResNet.from_name(backbone, last_stride, with_ibn, gcb, stage_with_gcb)
except:
print(f'not support {backbone} backbone')
if pretrain:
self.base.load_pretrain(model_path)
self.gap = nn.AdaptiveAvgPool2d(1)
self.num_classes = num_classes
self.bottleneck = nn.BatchNorm1d(self.in_planes)
self.bottleneck.bias.requires_grad_(False) # no shift
self.classifier = nn.Linear(self.in_planes, self.num_classes, bias=False)
def forward(self, x, label=None):
base = self.base(x)
global_feat = self.gap(base) # (b, 2048, 1, 1)
global_feat = global_feat.view(-1, global_feat.size()[1])
feat = self.bottleneck(global_feat) # normalize for angular softmax
return feat, torch.sum(base*feat.unsqueeze(-1).unsqueeze(-1), dim=1)
def load_params_wo_fc(self, state_dict):
# new_state_dict = {}
# for k, v in state_dict.items():
# k = '.'.join(k.split('.')[1:])
# new_state_dict[k] = v
# state_dict = new_state_dict
state_dict.pop('classifier.weight')
res = self.load_state_dict(state_dict, strict=False)
assert str(res.missing_keys) == str(['classifier.weight',]), 'issue loading pretrained weights'
if __name__ == "__main__":
model = Baseline(
'resnet50',
1453,
1,
True,
"ratio",
(False, False, False, False),
pretrain = False,
model_path = '')
print(model) | 34.864542 | 116 | 0.587019 |
import math
import torch
from torch import nn
from torch.utils import model_zoo
from models.context_block import *
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
'resnext50_32x4d': 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',
'resnext101_32x8d': 'https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth',
'wide_resnet50_2': 'https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth',
'wide_resnet101_2': 'https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth',
}
model_layers = {
'resnet50': [3, 4, 6, 3],
'resnet101': [3, 4, 23, 3]
}
__all__ = ['ResNet', 'Bottleneck']
class IBN(nn.Module):
def __init__(self, planes):
super(IBN, self).__init__()
half1 = int(planes / 8)
self.half = half1
half2 = planes - half1
self.IN = nn.InstanceNorm2d(half1, affine=True)
self.BN = nn.BatchNorm2d(half2)
def forward(self, x):
split = torch.split(x, self.half, dim=1)
out1 = self.IN(split[0].contiguous())
out2 = self.BN(torch.cat(split[1:], dim=1).contiguous())
out = torch.cat((out1, out2), 1)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, with_ibn=False, gcb=None, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.with_gcb = gcb is not None
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
if with_ibn:
self.bn1 = IBN(planes)
else:
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
if self.with_gcb:
gcb_inplanes = planes * self.expansion
self.context_block = ContextBlock(inplanes=gcb_inplanes, **gcb)
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.with_gcb:
out = self.context_block(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, last_stride, with_ibn, gcb, stage_with_gcb, block, layers):
scale = 64
self.inplanes = scale
super().__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, scale, layers[0], with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[0] else None)
self.layer2 = self._make_layer(block, scale * 2, layers[1], stride=2, with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[1] else None)
self.layer3 = self._make_layer(block, scale * 4, layers[2], stride=2, with_ibn=with_ibn,
gcb=gcb if stage_with_gcb[2] else None)
self.layer4 = self._make_layer(block, scale * 8, layers[3], stride=last_stride,
gcb=gcb if stage_with_gcb[3] else None)
def _make_layer(self, block, planes, blocks, stride=1, with_ibn=False, gcb=None):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
if planes == 512:
with_ibn = False
layers.append(block(self.inplanes, planes, with_ibn, gcb, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, with_ibn, gcb))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x
def load_pretrain(self, model_path=''):
with_model_path = (model_path is not '')
if not with_model_path:
state_dict = model_zoo.load_url(model_urls[self._model_name])
state_dict.pop('fc.weight')
state_dict.pop('fc.bias')
self.load_state_dict(state_dict)
else:
state_dict = torch.load(model_path)['state_dict']
state_dict.pop('module.fc.weight')
state_dict.pop('module.fc.bias')
new_state_dict = {}
for k in state_dict:
new_k = '.'.join(k.split('.')[1:])
if self.state_dict()[new_k].shape == state_dict[k].shape:
new_state_dict[new_k] = state_dict[k]
state_dict = new_state_dict
self.load_state_dict(state_dict, strict=False)
def random_init(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
@classmethod
def from_name(cls, model_name, last_stride, with_ibn, gcb, stage_with_gcb):
cls._model_name = model_name
return ResNet(last_stride, with_ibn, gcb, stage_with_gcb, block=Bottleneck, layers=model_layers[model_name])
class Baseline(nn.Module):
in_planes = 2048
def __init__(self,
backbone,
num_classes,
last_stride,
with_ibn,
gcb,
stage_with_gcb,
pretrain=True,
model_path=''):
super().__init__()
try:
self.base = ResNet.from_name(backbone, last_stride, with_ibn, gcb, stage_with_gcb)
except:
print(f'not support {backbone} backbone')
if pretrain:
self.base.load_pretrain(model_path)
self.gap = nn.AdaptiveAvgPool2d(1)
self.num_classes = num_classes
self.bottleneck = nn.BatchNorm1d(self.in_planes)
self.bottleneck.bias.requires_grad_(False)
self.classifier = nn.Linear(self.in_planes, self.num_classes, bias=False)
def forward(self, x, label=None):
base = self.base(x)
global_feat = self.gap(base)
global_feat = global_feat.view(-1, global_feat.size()[1])
feat = self.bottleneck(global_feat)
return feat, torch.sum(base*feat.unsqueeze(-1).unsqueeze(-1), dim=1)
def load_params_wo_fc(self, state_dict):
state_dict.pop('classifier.weight')
res = self.load_state_dict(state_dict, strict=False)
assert str(res.missing_keys) == str(['classifier.weight',]), 'issue loading pretrained weights'
if __name__ == "__main__":
model = Baseline(
'resnet50',
1453,
1,
True,
"ratio",
(False, False, False, False),
pretrain = False,
model_path = '')
print(model) | true | true |
f73901ef4984a451692d472712810fe344735601 | 965 | py | Python | qiskit/providers/basicaer/__init__.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | 1 | 2021-10-13T14:37:54.000Z | 2021-10-13T14:37:54.000Z | qiskit/providers/basicaer/__init__.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | null | null | null | qiskit/providers/basicaer/__init__.py | lerongil/qiskit-terra | a25af2a2378bc3d4f5ec73b948d048d1b707454c | [
"Apache-2.0"
] | 2 | 2020-02-10T16:34:18.000Z | 2020-05-22T08:37:07.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""BasicAer Provider: Contains Python simulators."""
from .basicaerprovider import BasicAerProvider
from .basicaerjob import BasicAerJob
from .qasm_simulator import QasmSimulatorPy
from .statevector_simulator import StatevectorSimulatorPy
from .unitary_simulator import UnitarySimulatorPy
from .exceptions import BasicAerError
# Global instance to be used as the entry point for convenience.
BasicAer = BasicAerProvider() # pylint: disable=invalid-name
| 37.115385 | 77 | 0.786528 |
from .basicaerprovider import BasicAerProvider
from .basicaerjob import BasicAerJob
from .qasm_simulator import QasmSimulatorPy
from .statevector_simulator import StatevectorSimulatorPy
from .unitary_simulator import UnitarySimulatorPy
from .exceptions import BasicAerError
BasicAer = BasicAerProvider()
| true | true |
f73904b8d9c1cb95aa33edbe0ce7ade57ca5ceab | 1,757 | py | Python | test/test_image_helpers.py | vincentriche/yoga | 583217064e31e303cfe93eeaa5a0a25a7fb2c2b1 | [
"BSD-3-Clause"
] | null | null | null | test/test_image_helpers.py | vincentriche/yoga | 583217064e31e303cfe93eeaa5a0a25a7fb2c2b1 | [
"BSD-3-Clause"
] | null | null | null | test/test_image_helpers.py | vincentriche/yoga | 583217064e31e303cfe93eeaa5a0a25a7fb2c2b1 | [
"BSD-3-Clause"
] | null | null | null | import pytest
from PIL import Image
from yoga.image import helpers
class Test_image_have_alpha(object):
@pytest.mark.parametrize(
"image_path",
[
"test/images/image1.jpg",
"test/images/unused-alpha.png",
"test/images/indexed.png",
"test/images/grayscale.png",
],
)
def test_image_without_alpha(self, image_path):
image = Image.open(image_path)
assert not helpers.image_have_alpha(image)
def test_image_with_alpha(self):
image = Image.open("test/images/alpha.png")
assert helpers.image_have_alpha(image)
@pytest.mark.parametrize(
"image_path, threshold, is_alpha",
[
("test/images/threshold.png", 0xEF, True),
("test/images/threshold.png", 0xE0, False),
],
)
def test_alpha_threshold(self, image_path, threshold, is_alpha):
image = Image.open("test/images/threshold.png")
if is_alpha:
assert helpers.image_have_alpha(image, threshold)
else:
assert not helpers.image_have_alpha(image, threshold)
class Test_gess_image_format(object):
@pytest.mark.parametrize(
"image_path, expected_format",
[
("test/images/image1.jpg", "jpeg"),
("test/images/alpha.png", "png"),
],
)
def test_supported_image_format(self, image_path, expected_format):
image_bytes = open(image_path, "rb").read()
assert helpers.guess_image_format(image_bytes) == expected_format
def test_unsuported_image_format(self):
image_bytes = open("test/images/alpha.svg", "rb").read()
with pytest.raises(ValueError):
helpers.guess_image_format(image_bytes)
| 31.375 | 73 | 0.63062 | import pytest
from PIL import Image
from yoga.image import helpers
class Test_image_have_alpha(object):
@pytest.mark.parametrize(
"image_path",
[
"test/images/image1.jpg",
"test/images/unused-alpha.png",
"test/images/indexed.png",
"test/images/grayscale.png",
],
)
def test_image_without_alpha(self, image_path):
image = Image.open(image_path)
assert not helpers.image_have_alpha(image)
def test_image_with_alpha(self):
image = Image.open("test/images/alpha.png")
assert helpers.image_have_alpha(image)
@pytest.mark.parametrize(
"image_path, threshold, is_alpha",
[
("test/images/threshold.png", 0xEF, True),
("test/images/threshold.png", 0xE0, False),
],
)
def test_alpha_threshold(self, image_path, threshold, is_alpha):
image = Image.open("test/images/threshold.png")
if is_alpha:
assert helpers.image_have_alpha(image, threshold)
else:
assert not helpers.image_have_alpha(image, threshold)
class Test_gess_image_format(object):
@pytest.mark.parametrize(
"image_path, expected_format",
[
("test/images/image1.jpg", "jpeg"),
("test/images/alpha.png", "png"),
],
)
def test_supported_image_format(self, image_path, expected_format):
image_bytes = open(image_path, "rb").read()
assert helpers.guess_image_format(image_bytes) == expected_format
def test_unsuported_image_format(self):
image_bytes = open("test/images/alpha.svg", "rb").read()
with pytest.raises(ValueError):
helpers.guess_image_format(image_bytes)
| true | true |
f739068bdaf87f5feab35741d7c2f3304166461b | 86 | py | Python | src/final_exam/q_player/main_player.py | acc-cosc-1336/cosc-1336-spring-2018-Miguelh1997 | ac4b0405c4070758d0fc07458d4dca8a8a0313de | [
"MIT"
] | null | null | null | src/final_exam/q_player/main_player.py | acc-cosc-1336/cosc-1336-spring-2018-Miguelh1997 | ac4b0405c4070758d0fc07458d4dca8a8a0313de | [
"MIT"
] | null | null | null | src/final_exam/q_player/main_player.py | acc-cosc-1336/cosc-1336-spring-2018-Miguelh1997 | ac4b0405c4070758d0fc07458d4dca8a8a0313de | [
"MIT"
] | 1 | 2018-02-13T03:32:50.000Z | 2018-02-13T03:32:50.000Z | from player import Player
p = Player(7,11)
new = p.check_come_out_roll()
print(new)
| 12.285714 | 29 | 0.732558 | from player import Player
p = Player(7,11)
new = p.check_come_out_roll()
print(new)
| true | true |
f7390997788e9f8d8c665a703e70cda0e5065dcd | 226 | py | Python | Products/admin.py | Annukumari99312/e-commerce | 3e7c206fb545f41adfa09bdbc64c765eb8be8b0c | [
"MIT"
] | null | null | null | Products/admin.py | Annukumari99312/e-commerce | 3e7c206fb545f41adfa09bdbc64c765eb8be8b0c | [
"MIT"
] | null | null | null | Products/admin.py | Annukumari99312/e-commerce | 3e7c206fb545f41adfa09bdbc64c765eb8be8b0c | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ['__str__', 'slug']
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
| 17.384615 | 42 | 0.721239 | from django.contrib import admin
from .models import Product
class ProductAdmin(admin.ModelAdmin):
list_display = ['__str__', 'slug']
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
| true | true |
f7390b592a5ce8e43b6054ce5bf5ec8c333bb565 | 15,777 | py | Python | devel/lib/python2.7/dist-packages/robotnik_msgs/msg/_SetElevatorActionResult.py | Jam-cpu/Masters-Project---Final | 0b266b1f117a579b96507249f0a128d0e3cc082a | [
"BSD-3-Clause-Clear"
] | null | null | null | devel/lib/python2.7/dist-packages/robotnik_msgs/msg/_SetElevatorActionResult.py | Jam-cpu/Masters-Project---Final | 0b266b1f117a579b96507249f0a128d0e3cc082a | [
"BSD-3-Clause-Clear"
] | null | null | null | devel/lib/python2.7/dist-packages/robotnik_msgs/msg/_SetElevatorActionResult.py | Jam-cpu/Masters-Project---Final | 0b266b1f117a579b96507249f0a128d0e3cc082a | [
"BSD-3-Clause-Clear"
] | null | null | null | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from robotnik_msgs/SetElevatorActionResult.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import actionlib_msgs.msg
import genpy
import robotnik_msgs.msg
import std_msgs.msg
class SetElevatorActionResult(genpy.Message):
_md5sum = "d72997606702a7ef168a85ecdb795c78"
_type = "robotnik_msgs/SetElevatorActionResult"
_has_header = True # flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
SetElevatorResult result
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: robotnik_msgs/SetElevatorResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
bool result
robotnik_msgs/ElevatorStatus status
================================================================================
MSG: robotnik_msgs/ElevatorStatus
# state
string RAISING=raising
string LOWERING=lowering
string IDLE=idle
string ERROR_G_IO=error_getting_io
string ERROR_S_IO=error_setting_io
string ERROR_TIMEOUT=error_timeout_in_action
# position
string UP=up
string DOWN=down
string UNKNOWN=unknown
# IDLE, RAISING, LOWERING
string state
# UP, DOWN, UNKNOWN
string position
float32 height
"""
__slots__ = ['header','status','result']
_slot_types = ['std_msgs/Header','actionlib_msgs/GoalStatus','robotnik_msgs/SetElevatorResult']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,status,result
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SetElevatorActionResult, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
else:
self.header = std_msgs.msg.Header()
self.status = actionlib_msgs.msg.GoalStatus()
self.result = robotnik_msgs.msg.SetElevatorResult()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.result
buff.write(_get_struct_B().pack(_x))
_x = self.result.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.result,) = _get_struct_B().unpack(str[start:end])
self.result.result = bool(self.result.result)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.position = str[start:end]
start = end
end += 4
(self.result.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.result
buff.write(_get_struct_B().pack(_x))
_x = self.result.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.result,) = _get_struct_B().unpack(str[start:end])
self.result.result = bool(self.result.result)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.position = str[start:end]
start = end
end += 4
(self.result.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
_struct_f = None
def _get_struct_f():
global _struct_f
if _struct_f is None:
_struct_f = struct.Struct("<f")
return _struct_f
| 36.352535 | 145 | 0.623566 |
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import actionlib_msgs.msg
import genpy
import robotnik_msgs.msg
import std_msgs.msg
class SetElevatorActionResult(genpy.Message):
_md5sum = "d72997606702a7ef168a85ecdb795c78"
_type = "robotnik_msgs/SetElevatorActionResult"
_has_header = True
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
SetElevatorResult result
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: robotnik_msgs/SetElevatorResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
bool result
robotnik_msgs/ElevatorStatus status
================================================================================
MSG: robotnik_msgs/ElevatorStatus
# state
string RAISING=raising
string LOWERING=lowering
string IDLE=idle
string ERROR_G_IO=error_getting_io
string ERROR_S_IO=error_setting_io
string ERROR_TIMEOUT=error_timeout_in_action
# position
string UP=up
string DOWN=down
string UNKNOWN=unknown
# IDLE, RAISING, LOWERING
string state
# UP, DOWN, UNKNOWN
string position
float32 height
"""
__slots__ = ['header','status','result']
_slot_types = ['std_msgs/Header','actionlib_msgs/GoalStatus','robotnik_msgs/SetElevatorResult']
def __init__(self, *args, **kwds):
if args or kwds:
super(SetElevatorActionResult, self).__init__(*args, **kwds)
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
else:
self.header = std_msgs.msg.Header()
self.status = actionlib_msgs.msg.GoalStatus()
self.result = robotnik_msgs.msg.SetElevatorResult()
def _get_types(self):
return self._slot_types
def serialize(self, buff):
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.result
buff.write(_get_struct_B().pack(_x))
_x = self.result.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.result,) = _get_struct_B().unpack(str[start:end])
self.result.result = bool(self.result.result)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.position = str[start:end]
start = end
end += 4
(self.result.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e)
def serialize_numpy(self, buff, numpy):
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs))
_x = self.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.status
buff.write(_get_struct_B().pack(_x))
_x = self.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.result
buff.write(_get_struct_B().pack(_x))
_x = self.result.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.result.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
if python3:
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = actionlib_msgs.msg.GoalStatus()
if self.result is None:
self.result = robotnik_msgs.msg.SetElevatorResult()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.status.goal_id.stamp.secs, _x.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.goal_id.id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.goal_id.id = str[start:end]
start = end
end += 1
(self.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.text = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.text = str[start:end]
start = end
end += 1
(self.result.result,) = _get_struct_B().unpack(str[start:end])
self.result.result = bool(self.result.result)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.result.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.result.status.position = str[start:end]
start = end
end += 4
(self.result.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e)
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
_struct_f = None
def _get_struct_f():
global _struct_f
if _struct_f is None:
_struct_f = struct.Struct("<f")
return _struct_f
| true | true |
f7390c2e11c7451649e3783cb5ad8d2db9e0dc57 | 17,027 | py | Python | setup.py | RobertHalwass/habitat-sim | a329a90a70767c92789bdbeb2a983161d1207e98 | [
"MIT"
] | null | null | null | setup.py | RobertHalwass/habitat-sim | a329a90a70767c92789bdbeb2a983161d1207e98 | [
"MIT"
] | null | null | null | setup.py | RobertHalwass/habitat-sim | a329a90a70767c92789bdbeb2a983161d1207e98 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Adapted from: http://www.benjack.io/2017/06/12/python-cpp-tests.html
"""
import argparse
import builtins
import glob
import json
import os
import os.path as osp
import re
import shlex
import shutil
import subprocess
import sys
from distutils.util import strtobool
from distutils.version import StrictVersion
from setuptools import Extension, find_packages, setup
from setuptools.command.build_ext import build_ext
try:
import cmake
# If the cmake python package is installed, use that exe
CMAKE_BIN_DIR = cmake.CMAKE_BIN_DIR
except ImportError:
CMAKE_BIN_DIR = ""
sys.path.insert(0, osp.dirname(__file__))
ARG_CACHE_BLACKLIST = {"force_cmake", "cache_args", "inplace"}
def str2bool(input_str: str) -> bool:
return bool(strtobool(input_str.lower()))
def is_pip():
# This will end with python if driven with python setup.py ...
return osp.basename(os.environ.get("_", "/pip/no")).startswith("pip")
# TODO refactor to the proper way to pass options to setup.py so pip can do so.
def build_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"--headless",
dest="headless",
default=str2bool(os.environ.get("HEADLESS", str(is_pip()))),
action="store_true",
help="""Build in headless mode.
Use "HEADLESS=True pip install ." to build in headless mode with pip""",
)
parser.add_argument(
"--with-cuda",
action="store_true",
default=str2bool(os.environ.get("WITH_CUDA", "False")),
dest="with_cuda",
help="Build CUDA enabled features. Requires CUDA to be installed",
)
parser.add_argument(
"--bullet",
"--with-bullet",
dest="with_bullet",
default=str2bool(os.environ.get("WITH_BULLET", str(is_pip()))),
action="store_true",
help="""Build with Bullet simulation engine. Default to True when pip installing.
Default value is otherwise false or provided WITH_BULLET=ON or WITH_BULLET_OFF when doing pip install.""",
)
parser.add_argument("--no-bullet", dest="with_bullet", action="store_false")
parser.add_argument(
"--vhacd",
dest="with_vhacd",
action="store_true",
help="""Build with VHACD convex hull decomposition and voxelization engine.""",
)
parser.add_argument(
"--cmake",
"--force-cmake",
dest="force_cmake",
action="store_true",
help="Forces cmake to be rerun. This argument is not cached",
)
parser.add_argument(
"--build-tests", dest="build_tests", action="store_true", help="Build tests"
)
parser.add_argument(
"--build-datatool",
dest="build_datatool",
action="store_true",
help="Build data tool",
)
parser.add_argument(
"--cmake-args",
type=str,
default=os.environ.get("CMAKE_ARGS", ""),
help="""Additional arguements to be passed to cmake.
Note that you will need to do `--cmake-args="..."` as `--cmake-args "..."`
will generally not be parsed correctly
You may need to use --force-cmake to ensure cmake is rerun with new args.
Use "CMAKE_ARGS="..." pip install ." to set cmake args with pip""",
)
parser.add_argument(
"--no-update-submodules",
dest="no_update_submodules",
action="store_true",
help="Don't update git submodules",
)
parser.add_argument(
"--build-type",
dest="build_type",
default=None,
help="CMake configuration to build with (Release, Debug, etc...)",
)
parser.add_argument(
"--no-lto",
dest="lto",
default=None,
action="store_false",
help="Disables Link Time Optimization: faster compile times but worse performance.",
)
parser.add_argument(
"--lto",
dest="lto",
action="store_true",
help="Enables Link Time Optimization: better performance but longer compile time",
)
parser.add_argument(
"--cache-args",
dest="cache_args",
action="store_true",
help="""Caches the arguements sent to setup.py
and reloads them on the next invocation. This argument is not cached""",
)
parser.add_argument(
"--skip-install-magnum",
dest="skip_install_magnum",
action="store_true",
help="Don't install magnum. "
"This is nice for incrementally building for development but "
"can cause install magnum bindings to fall out-of-sync",
)
parser.add_argument(
"--build-basis-compressor",
"--basis-compressor",
dest="build_basis_compressor",
action="store_true",
help="Wether or not to build the basis compressor."
" Loading basis compressed meshes does NOT require this.",
)
return parser
parseable_args = []
unparseable_args = []
for i, arg in enumerate(sys.argv):
if arg == "--":
unparseable_args = sys.argv[i:]
break
parseable_args.append(arg)
parser = build_parser()
args, filtered_args = parser.parse_known_args(args=parseable_args)
sys.argv = filtered_args + unparseable_args
def in_git():
try:
subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"])
return True
except (OSError, subprocess.SubprocessError):
return False
def has_ninja():
try:
subprocess.check_output(["ninja", "--version"])
return True
except (OSError, subprocess.SubprocessError):
return False
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=""):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
# populated in CMakeBuild.build_extension()
_cmake_build_dir = None
class CMakeBuild(build_ext):
def finalize_options(self):
super().finalize_options()
cacheable_params = [
opt[0].replace("=", "").replace("-", "_") for opt in self.user_options
]
args_cache_file = ".setuppy_args_cache.json"
if not args.cache_args and osp.exists(args_cache_file):
with open(args_cache_file, "r") as f:
cached_args = json.load(f)
for k, v in cached_args["args"].items():
setattr(args, k, v)
for k, v in cached_args["build_ext"].items():
setattr(self, k, v)
elif args.cache_args:
cache = dict(
args={
k: v for k, v in vars(args).items() if k not in ARG_CACHE_BLACKLIST
},
build_ext={
k: getattr(self, k)
for k in cacheable_params
if k not in ARG_CACHE_BLACKLIST
},
)
with open(args_cache_file, "w") as f:
json.dump(cache, f, indent=4, sort_keys=True)
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
# Save the CMake build directory -- that's where the generated setup.py
# for magnum-bindings will appear which we need to run later
global _cmake_build_dir
_cmake_build_dir = self.build_temp
def run(self):
try:
subprocess.check_output([osp.join(CMAKE_BIN_DIR, "cmake"), "--version"])
except (OSError, subprocess.SubprocessError):
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
# Init & update all submodules if not already (the user might be pinned
# on some particular commit or have working tree changes, don't destroy
# those)
if in_git() and not args.no_update_submodules:
subprocess.check_call(
["git", "submodule", "update", "--init", "--recursive"]
)
cmake_args = [
"-DBUILD_PYTHON_BINDINGS=ON",
"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=" + extdir,
"-DPYTHON_EXECUTABLE=" + sys.executable,
"-DCMAKE_EXPORT_COMPILE_COMMANDS={}".format("OFF" if is_pip() else "ON"),
"-DREL_BUILD_RPATH={}".format("OFF" if self.inplace else "ON"),
]
if args.lto is not None:
cmake_args += [
"-DCMAKE_INTERPROCEDURAL_OPTIMIZATION={}".format(
"ON" if args.lto else "OFF"
)
]
cmake_args += shlex.split(args.cmake_args)
build_type = args.build_type
assert not (
build_type is not None and self.debug
), f"Debug and Build-Type flags conflict: {self.debug}, {build_type}"
if build_type is None:
build_type = "Debug" if self.debug else "RelWithDebInfo"
build_args = ["--config", build_type]
cmake_args += ["-DCMAKE_BUILD_TYPE=" + build_type]
build_args += ["--"]
if has_ninja():
cmake_args += ["-GNinja"]
# Make it possible to *reduce* the number of jobs. Ninja requires a
# number passed to -j (and builds on all cores by default), while make
# doesn't require a number (but builds sequentially by default), so we
# add the argument only when it's not ninja or the number of jobs is
# specified.
if not has_ninja() or self.parallel:
build_args += ["-j{}".format(self.parallel) if self.parallel else "-j"]
cmake_args += [
"-DBUILD_GUI_VIEWERS={}".format("ON" if not args.headless else "OFF")
]
if sys.platform not in ["darwin", "win32", "win64"]:
cmake_args += [
# So Magnum itself prefers EGL over GLX for windowless apps.
# Makes sense only on platforms with EGL (Linux, BSD, ...).
"-DTARGET_HEADLESS={}".format("ON" if args.headless else "OFF")
]
# NOTE: BUILD_TEST is intentional as opposed to BUILD_TESTS which collides
# with definition used by some of our dependencies
cmake_args += ["-DBUILD_TEST={}".format("ON" if args.build_tests else "OFF")]
cmake_args += [
"-DBUILD_WITH_BULLET={}".format("ON" if args.with_bullet else "OFF")
]
cmake_args += [
"-DBUILD_WITH_VHACD={}".format("ON" if args.with_vhacd else "OFF")
]
cmake_args += [
"-DBUILD_DATATOOL={}".format("ON" if args.build_datatool else "OFF")
]
cmake_args += ["-DBUILD_WITH_CUDA={}".format("ON" if args.with_cuda else "OFF")]
cmake_args += [
"-DBUILD_BASIS_COMPRESSOR={}".format(
"ON" if args.build_basis_compressor else "OFF"
)
]
env = os.environ.copy()
env["CXXFLAGS"] = '{} -DVERSION_INFO=\\"{}\\"'.format(
env.get("CXXFLAGS", ""), self.distribution.get_version()
)
if is_pip() or self.run_cmake(cmake_args):
os.makedirs(self.build_temp, exist_ok=True)
# Remove invalid cmakefiles if is is_pip()
for cmake_cache_f in [
"CMakeFiles",
"CMakeCache.txt",
"cmake_install.cmake",
]:
cmake_cache_f = osp.join(self.build_temp, cmake_cache_f)
if is_pip() and osp.exists(cmake_cache_f):
if osp.isdir(cmake_cache_f):
shutil.rmtree(cmake_cache_f)
else:
os.remove(cmake_cache_f)
subprocess.check_call(
[osp.join(CMAKE_BIN_DIR, "cmake")]
+ cmake_args
+ [osp.realpath(ext.sourcedir)],
env=env,
cwd=self.build_temp,
)
if not is_pip():
self.create_compile_commands()
subprocess.check_call(
[osp.join(CMAKE_BIN_DIR, "cmake"), "--build", self.build_temp] + build_args
)
print() # Add an empty line for cleaner output
# The things following this don't work with pip
if is_pip():
return
if not args.headless:
link_dst = osp.join(self.build_temp, "viewer")
if not osp.islink(link_dst):
os.symlink(
osp.abspath(osp.join(self.build_temp, "utils/viewer/viewer")),
link_dst,
)
def run_cmake(self, cmake_args):
if args.force_cmake:
return True
cache_parser = re.compile(r"(?P<K>\w+?)(:\w+?|)=(?P<V>.*?)$")
cmake_cache = osp.join(self.build_temp, "CMakeCache.txt")
if osp.exists(cmake_cache):
with open(cmake_cache, "r") as f:
cache_contents = f.readlines()
for arg in cmake_args:
if arg[0:2] == "-G":
continue
k, v = arg.split("=", 1)
# Strip +D
k = k[2:]
for l in cache_contents:
match = cache_parser.match(l)
if match is None:
continue
if match.group("K") == k and match.group("V") != v:
return True
return False
return True
def create_compile_commands(self):
def load(filename):
with open(filename) as f:
return json.load(f)
command_files = [osp.join(self.build_temp, "compile_commands.json")]
command_files += glob.glob("{}/*/compile_commands.json".format(self.build_temp))
all_commands = [entry for f in command_files for entry in load(f)]
# cquery does not like c++ compiles that start with gcc.
# It forgets to include the c++ header directories.
# We can work around this by replacing the gcc calls that python
# setup.py generates with g++ calls instead
for command in all_commands:
if command["command"].startswith("gcc "):
command["command"] = "g++ " + command["command"][4:]
new_contents = json.dumps(all_commands, indent=2)
contents = ""
if os.path.exists("compile_commands.json"):
with open("compile_commands.json", "r") as f:
contents = f.read()
if contents != new_contents:
with open("compile_commands.json", "w") as f:
f.write(new_contents)
if __name__ == "__main__":
assert StrictVersion(
"{}.{}".format(sys.version_info[0], sys.version_info[1])
) >= StrictVersion("3.6"), "Must use python3.6 or newer"
with open("./requirements.txt", "r") as f:
requirements = [l.strip() for l in f.readlines() if len(l.strip()) > 0]
# Only install pytest if we are running tests
if {"pytest", "test", "ptr"}.intersection(sys.argv):
setup_requires = ["pytest-runner"]
else:
setup_requires = []
builtins.__HSIM_SETUP__ = True
import habitat_sim
setup(
name="habitat_sim",
version=habitat_sim.__version__,
author="FAIR A-STAR",
description="A high performance simulator for training embodied agents",
long_description="",
packages=find_packages(),
install_requires=requirements,
setup_requires=setup_requires,
tests_require=["hypothesis", "pytest-benchmark", "pytest"],
python_requires=">=3.6",
# add extension module
ext_modules=[CMakeExtension("habitat_sim._ext.habitat_sim_bindings", "src")],
# add custom build_ext command
cmdclass=dict(build_ext=CMakeBuild),
zip_safe=False,
include_package_data=True,
)
pymagnum_build_dir = osp.join(
_cmake_build_dir, "deps", "magnum-bindings", "src", "python"
)
if (
not args.skip_install_magnum
and "sdist" not in sys.argv
and os.path.exists(pymagnum_build_dir)
):
subprocess.check_call(
[sys.executable, "-m", "pip", "install", pymagnum_build_dir]
)
else:
if not os.path.exists(pymagnum_build_dir) and "sdist" not in sys.argv:
print(
f"{pymagnum_build_dir} does not exist and therefore we cannot install magnum-bindings directly."
)
print(
"Assuming magnum bindings are already installed (or we're inside pip and *\\_('-')_/*)"
)
print(
f"Run '{sys.executable} -m pip install {pymagnum_build_dir}' if this assumption is incorrect"
)
| 34.054 | 112 | 0.58789 |
import argparse
import builtins
import glob
import json
import os
import os.path as osp
import re
import shlex
import shutil
import subprocess
import sys
from distutils.util import strtobool
from distutils.version import StrictVersion
from setuptools import Extension, find_packages, setup
from setuptools.command.build_ext import build_ext
try:
import cmake
CMAKE_BIN_DIR = cmake.CMAKE_BIN_DIR
except ImportError:
CMAKE_BIN_DIR = ""
sys.path.insert(0, osp.dirname(__file__))
ARG_CACHE_BLACKLIST = {"force_cmake", "cache_args", "inplace"}
def str2bool(input_str: str) -> bool:
return bool(strtobool(input_str.lower()))
def is_pip():
return osp.basename(os.environ.get("_", "/pip/no")).startswith("pip")
def build_parser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
"--headless",
dest="headless",
default=str2bool(os.environ.get("HEADLESS", str(is_pip()))),
action="store_true",
help="""Build in headless mode.
Use "HEADLESS=True pip install ." to build in headless mode with pip""",
)
parser.add_argument(
"--with-cuda",
action="store_true",
default=str2bool(os.environ.get("WITH_CUDA", "False")),
dest="with_cuda",
help="Build CUDA enabled features. Requires CUDA to be installed",
)
parser.add_argument(
"--bullet",
"--with-bullet",
dest="with_bullet",
default=str2bool(os.environ.get("WITH_BULLET", str(is_pip()))),
action="store_true",
help="""Build with Bullet simulation engine. Default to True when pip installing.
Default value is otherwise false or provided WITH_BULLET=ON or WITH_BULLET_OFF when doing pip install.""",
)
parser.add_argument("--no-bullet", dest="with_bullet", action="store_false")
parser.add_argument(
"--vhacd",
dest="with_vhacd",
action="store_true",
help="""Build with VHACD convex hull decomposition and voxelization engine.""",
)
parser.add_argument(
"--cmake",
"--force-cmake",
dest="force_cmake",
action="store_true",
help="Forces cmake to be rerun. This argument is not cached",
)
parser.add_argument(
"--build-tests", dest="build_tests", action="store_true", help="Build tests"
)
parser.add_argument(
"--build-datatool",
dest="build_datatool",
action="store_true",
help="Build data tool",
)
parser.add_argument(
"--cmake-args",
type=str,
default=os.environ.get("CMAKE_ARGS", ""),
help="""Additional arguements to be passed to cmake.
Note that you will need to do `--cmake-args="..."` as `--cmake-args "..."`
will generally not be parsed correctly
You may need to use --force-cmake to ensure cmake is rerun with new args.
Use "CMAKE_ARGS="..." pip install ." to set cmake args with pip""",
)
parser.add_argument(
"--no-update-submodules",
dest="no_update_submodules",
action="store_true",
help="Don't update git submodules",
)
parser.add_argument(
"--build-type",
dest="build_type",
default=None,
help="CMake configuration to build with (Release, Debug, etc...)",
)
parser.add_argument(
"--no-lto",
dest="lto",
default=None,
action="store_false",
help="Disables Link Time Optimization: faster compile times but worse performance.",
)
parser.add_argument(
"--lto",
dest="lto",
action="store_true",
help="Enables Link Time Optimization: better performance but longer compile time",
)
parser.add_argument(
"--cache-args",
dest="cache_args",
action="store_true",
help="""Caches the arguements sent to setup.py
and reloads them on the next invocation. This argument is not cached""",
)
parser.add_argument(
"--skip-install-magnum",
dest="skip_install_magnum",
action="store_true",
help="Don't install magnum. "
"This is nice for incrementally building for development but "
"can cause install magnum bindings to fall out-of-sync",
)
parser.add_argument(
"--build-basis-compressor",
"--basis-compressor",
dest="build_basis_compressor",
action="store_true",
help="Wether or not to build the basis compressor."
" Loading basis compressed meshes does NOT require this.",
)
return parser
parseable_args = []
unparseable_args = []
for i, arg in enumerate(sys.argv):
if arg == "--":
unparseable_args = sys.argv[i:]
break
parseable_args.append(arg)
parser = build_parser()
args, filtered_args = parser.parse_known_args(args=parseable_args)
sys.argv = filtered_args + unparseable_args
def in_git():
try:
subprocess.check_output(["git", "rev-parse", "--is-inside-work-tree"])
return True
except (OSError, subprocess.SubprocessError):
return False
def has_ninja():
try:
subprocess.check_output(["ninja", "--version"])
return True
except (OSError, subprocess.SubprocessError):
return False
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=""):
Extension.__init__(self, name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
_cmake_build_dir = None
class CMakeBuild(build_ext):
def finalize_options(self):
super().finalize_options()
cacheable_params = [
opt[0].replace("=", "").replace("-", "_") for opt in self.user_options
]
args_cache_file = ".setuppy_args_cache.json"
if not args.cache_args and osp.exists(args_cache_file):
with open(args_cache_file, "r") as f:
cached_args = json.load(f)
for k, v in cached_args["args"].items():
setattr(args, k, v)
for k, v in cached_args["build_ext"].items():
setattr(self, k, v)
elif args.cache_args:
cache = dict(
args={
k: v for k, v in vars(args).items() if k not in ARG_CACHE_BLACKLIST
},
build_ext={
k: getattr(self, k)
for k in cacheable_params
if k not in ARG_CACHE_BLACKLIST
},
)
with open(args_cache_file, "w") as f:
json.dump(cache, f, indent=4, sort_keys=True)
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
# for magnum-bindings will appear which we need to run later
global _cmake_build_dir
_cmake_build_dir = self.build_temp
def run(self):
try:
subprocess.check_output([osp.join(CMAKE_BIN_DIR, "cmake"), "--version"])
except (OSError, subprocess.SubprocessError):
raise RuntimeError(
"CMake must be installed to build the following extensions: "
+ ", ".join(e.name for e in self.extensions)
)
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
# Init & update all submodules if not already (the user might be pinned
# on some particular commit or have working tree changes, don't destroy
if in_git() and not args.no_update_submodules:
subprocess.check_call(
["git", "submodule", "update", "--init", "--recursive"]
)
cmake_args = [
"-DBUILD_PYTHON_BINDINGS=ON",
"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=" + extdir,
"-DPYTHON_EXECUTABLE=" + sys.executable,
"-DCMAKE_EXPORT_COMPILE_COMMANDS={}".format("OFF" if is_pip() else "ON"),
"-DREL_BUILD_RPATH={}".format("OFF" if self.inplace else "ON"),
]
if args.lto is not None:
cmake_args += [
"-DCMAKE_INTERPROCEDURAL_OPTIMIZATION={}".format(
"ON" if args.lto else "OFF"
)
]
cmake_args += shlex.split(args.cmake_args)
build_type = args.build_type
assert not (
build_type is not None and self.debug
), f"Debug and Build-Type flags conflict: {self.debug}, {build_type}"
if build_type is None:
build_type = "Debug" if self.debug else "RelWithDebInfo"
build_args = ["--config", build_type]
cmake_args += ["-DCMAKE_BUILD_TYPE=" + build_type]
build_args += ["--"]
if has_ninja():
cmake_args += ["-GNinja"]
# add the argument only when it's not ninja or the number of jobs is
if not has_ninja() or self.parallel:
build_args += ["-j{}".format(self.parallel) if self.parallel else "-j"]
cmake_args += [
"-DBUILD_GUI_VIEWERS={}".format("ON" if not args.headless else "OFF")
]
if sys.platform not in ["darwin", "win32", "win64"]:
cmake_args += [
"-DTARGET_HEADLESS={}".format("ON" if args.headless else "OFF")
]
cmake_args += ["-DBUILD_TEST={}".format("ON" if args.build_tests else "OFF")]
cmake_args += [
"-DBUILD_WITH_BULLET={}".format("ON" if args.with_bullet else "OFF")
]
cmake_args += [
"-DBUILD_WITH_VHACD={}".format("ON" if args.with_vhacd else "OFF")
]
cmake_args += [
"-DBUILD_DATATOOL={}".format("ON" if args.build_datatool else "OFF")
]
cmake_args += ["-DBUILD_WITH_CUDA={}".format("ON" if args.with_cuda else "OFF")]
cmake_args += [
"-DBUILD_BASIS_COMPRESSOR={}".format(
"ON" if args.build_basis_compressor else "OFF"
)
]
env = os.environ.copy()
env["CXXFLAGS"] = '{} -DVERSION_INFO=\\"{}\\"'.format(
env.get("CXXFLAGS", ""), self.distribution.get_version()
)
if is_pip() or self.run_cmake(cmake_args):
os.makedirs(self.build_temp, exist_ok=True)
for cmake_cache_f in [
"CMakeFiles",
"CMakeCache.txt",
"cmake_install.cmake",
]:
cmake_cache_f = osp.join(self.build_temp, cmake_cache_f)
if is_pip() and osp.exists(cmake_cache_f):
if osp.isdir(cmake_cache_f):
shutil.rmtree(cmake_cache_f)
else:
os.remove(cmake_cache_f)
subprocess.check_call(
[osp.join(CMAKE_BIN_DIR, "cmake")]
+ cmake_args
+ [osp.realpath(ext.sourcedir)],
env=env,
cwd=self.build_temp,
)
if not is_pip():
self.create_compile_commands()
subprocess.check_call(
[osp.join(CMAKE_BIN_DIR, "cmake"), "--build", self.build_temp] + build_args
)
print()
if is_pip():
return
if not args.headless:
link_dst = osp.join(self.build_temp, "viewer")
if not osp.islink(link_dst):
os.symlink(
osp.abspath(osp.join(self.build_temp, "utils/viewer/viewer")),
link_dst,
)
def run_cmake(self, cmake_args):
if args.force_cmake:
return True
cache_parser = re.compile(r"(?P<K>\w+?)(:\w+?|)=(?P<V>.*?)$")
cmake_cache = osp.join(self.build_temp, "CMakeCache.txt")
if osp.exists(cmake_cache):
with open(cmake_cache, "r") as f:
cache_contents = f.readlines()
for arg in cmake_args:
if arg[0:2] == "-G":
continue
k, v = arg.split("=", 1)
# Strip +D
k = k[2:]
for l in cache_contents:
match = cache_parser.match(l)
if match is None:
continue
if match.group("K") == k and match.group("V") != v:
return True
return False
return True
def create_compile_commands(self):
def load(filename):
with open(filename) as f:
return json.load(f)
command_files = [osp.join(self.build_temp, "compile_commands.json")]
command_files += glob.glob("{}/*/compile_commands.json".format(self.build_temp))
all_commands = [entry for f in command_files for entry in load(f)]
# cquery does not like c++ compiles that start with gcc.
# It forgets to include the c++ header directories.
# We can work around this by replacing the gcc calls that python
# setup.py generates with g++ calls instead
for command in all_commands:
if command["command"].startswith("gcc "):
command["command"] = "g++ " + command["command"][4:]
new_contents = json.dumps(all_commands, indent=2)
contents = ""
if os.path.exists("compile_commands.json"):
with open("compile_commands.json", "r") as f:
contents = f.read()
if contents != new_contents:
with open("compile_commands.json", "w") as f:
f.write(new_contents)
if __name__ == "__main__":
assert StrictVersion(
"{}.{}".format(sys.version_info[0], sys.version_info[1])
) >= StrictVersion("3.6"), "Must use python3.6 or newer"
with open("./requirements.txt", "r") as f:
requirements = [l.strip() for l in f.readlines() if len(l.strip()) > 0]
# Only install pytest if we are running tests
if {"pytest", "test", "ptr"}.intersection(sys.argv):
setup_requires = ["pytest-runner"]
else:
setup_requires = []
builtins.__HSIM_SETUP__ = True
import habitat_sim
setup(
name="habitat_sim",
version=habitat_sim.__version__,
author="FAIR A-STAR",
description="A high performance simulator for training embodied agents",
long_description="",
packages=find_packages(),
install_requires=requirements,
setup_requires=setup_requires,
tests_require=["hypothesis", "pytest-benchmark", "pytest"],
python_requires=">=3.6",
# add extension module
ext_modules=[CMakeExtension("habitat_sim._ext.habitat_sim_bindings", "src")],
# add custom build_ext command
cmdclass=dict(build_ext=CMakeBuild),
zip_safe=False,
include_package_data=True,
)
pymagnum_build_dir = osp.join(
_cmake_build_dir, "deps", "magnum-bindings", "src", "python"
)
if (
not args.skip_install_magnum
and "sdist" not in sys.argv
and os.path.exists(pymagnum_build_dir)
):
subprocess.check_call(
[sys.executable, "-m", "pip", "install", pymagnum_build_dir]
)
else:
if not os.path.exists(pymagnum_build_dir) and "sdist" not in sys.argv:
print(
f"{pymagnum_build_dir} does not exist and therefore we cannot install magnum-bindings directly."
)
print(
"Assuming magnum bindings are already installed (or we're inside pip and *\\_('-')_/*)"
)
print(
f"Run '{sys.executable} -m pip install {pymagnum_build_dir}' if this assumption is incorrect"
)
| true | true |
f7390cd227338410fabb9b4cd5b45f0af8f77191 | 11,974 | py | Python | ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/dcenodetopologyrange_34374b8565456318538178dbf3a92ccb.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 20 | 2019-05-07T01:59:14.000Z | 2022-02-11T05:24:47.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/dcenodetopologyrange_34374b8565456318538178dbf3a92ccb.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 60 | 2019-04-03T18:59:35.000Z | 2022-02-22T12:05:05.000Z | ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/dcenodetopologyrange_34374b8565456318538178dbf3a92ccb.py | OpenIxia/ixnetwork_restpy | f628db450573a104f327cf3c737ca25586e067ae | [
"MIT"
] | 13 | 2019-05-20T10:48:31.000Z | 2021-10-06T07:45:44.000Z | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class DceNodeTopologyRange(Base):
"""Sets the DCE Node Topology of a particular DCE ISIS Topology Range.
The DceNodeTopologyRange class encapsulates a list of dceNodeTopologyRange resources that are managed by the user.
A list of resources can be retrieved from the server using the DceNodeTopologyRange.find() method.
The list can be managed by using the DceNodeTopologyRange.add() and DceNodeTopologyRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'dceNodeTopologyRange'
_SDM_ATT_MAP = {
'BroadcastPriority': 'broadcastPriority',
'IncludeL2Topology': 'includeL2Topology',
'InternodeNicknameIncrement': 'internodeNicknameIncrement',
'NicknameCount': 'nicknameCount',
'NoOfTreesToCompute': 'noOfTreesToCompute',
'StartNickname': 'startNickname',
'TopologyCount': 'topologyCount',
'TopologyId': 'topologyId',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(DceNodeTopologyRange, self).__init__(parent, list_op)
@property
def DceNodeInterestedVlanRange(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.dcenodeinterestedvlanrange_b15c6822c508a957290b5d3ed2b3ea4e.DceNodeInterestedVlanRange): An instance of the DceNodeInterestedVlanRange class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.dcenodeinterestedvlanrange_b15c6822c508a957290b5d3ed2b3ea4e import DceNodeInterestedVlanRange
if self._properties.get('DceNodeInterestedVlanRange', None) is not None:
return self._properties.get('DceNodeInterestedVlanRange')
else:
return DceNodeInterestedVlanRange(self)
@property
def BroadcastPriority(self):
# type: () -> int
"""
Returns
-------
- number: Sets the priority in which the topology is broadcast.
"""
return self._get_attribute(self._SDM_ATT_MAP['BroadcastPriority'])
@BroadcastPriority.setter
def BroadcastPriority(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['BroadcastPriority'], value)
@property
def IncludeL2Topology(self):
# type: () -> bool
"""
Returns
-------
- bool: If true, includes the L2 topology.
"""
return self._get_attribute(self._SDM_ATT_MAP['IncludeL2Topology'])
@IncludeL2Topology.setter
def IncludeL2Topology(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['IncludeL2Topology'], value)
@property
def InternodeNicknameIncrement(self):
# type: () -> int
"""
Returns
-------
- number: The increment step to be used for creating the internode increment.
"""
return self._get_attribute(self._SDM_ATT_MAP['InternodeNicknameIncrement'])
@InternodeNicknameIncrement.setter
def InternodeNicknameIncrement(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['InternodeNicknameIncrement'], value)
@property
def NicknameCount(self):
# type: () -> int
"""
Returns
-------
- number: The count of the nickname.
"""
return self._get_attribute(self._SDM_ATT_MAP['NicknameCount'])
@NicknameCount.setter
def NicknameCount(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['NicknameCount'], value)
@property
def NoOfTreesToCompute(self):
# type: () -> int
"""
Returns
-------
- number: The number of trees to compute.
"""
return self._get_attribute(self._SDM_ATT_MAP['NoOfTreesToCompute'])
@NoOfTreesToCompute.setter
def NoOfTreesToCompute(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['NoOfTreesToCompute'], value)
@property
def StartNickname(self):
# type: () -> int
"""
Returns
-------
- number: If true, uses the nickname.
"""
return self._get_attribute(self._SDM_ATT_MAP['StartNickname'])
@StartNickname.setter
def StartNickname(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['StartNickname'], value)
@property
def TopologyCount(self):
# type: () -> int
"""
Returns
-------
- number: The count of the topology.
"""
return self._get_attribute(self._SDM_ATT_MAP['TopologyCount'])
@TopologyCount.setter
def TopologyCount(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['TopologyCount'], value)
@property
def TopologyId(self):
# type: () -> int
"""
Returns
-------
- number: The unique identification number of the topology range.
"""
return self._get_attribute(self._SDM_ATT_MAP['TopologyId'])
@TopologyId.setter
def TopologyId(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['TopologyId'], value)
def update(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
# type: (int, bool, int, int, int, int, int, int) -> DceNodeTopologyRange
"""Updates dceNodeTopologyRange resource on the server.
Args
----
- BroadcastPriority (number): Sets the priority in which the topology is broadcast.
- IncludeL2Topology (bool): If true, includes the L2 topology.
- InternodeNicknameIncrement (number): The increment step to be used for creating the internode increment.
- NicknameCount (number): The count of the nickname.
- NoOfTreesToCompute (number): The number of trees to compute.
- StartNickname (number): If true, uses the nickname.
- TopologyCount (number): The count of the topology.
- TopologyId (number): The unique identification number of the topology range.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
# type: (int, bool, int, int, int, int, int, int) -> DceNodeTopologyRange
"""Adds a new dceNodeTopologyRange resource on the server and adds it to the container.
Args
----
- BroadcastPriority (number): Sets the priority in which the topology is broadcast.
- IncludeL2Topology (bool): If true, includes the L2 topology.
- InternodeNicknameIncrement (number): The increment step to be used for creating the internode increment.
- NicknameCount (number): The count of the nickname.
- NoOfTreesToCompute (number): The number of trees to compute.
- StartNickname (number): If true, uses the nickname.
- TopologyCount (number): The count of the topology.
- TopologyId (number): The unique identification number of the topology range.
Returns
-------
- self: This instance with all currently retrieved dceNodeTopologyRange resources using find and the newly added dceNodeTopologyRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained dceNodeTopologyRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
# type: (int, bool, int, int, int, int, int, int) -> DceNodeTopologyRange
"""Finds and retrieves dceNodeTopologyRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve dceNodeTopologyRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all dceNodeTopologyRange resources from the server.
Args
----
- BroadcastPriority (number): Sets the priority in which the topology is broadcast.
- IncludeL2Topology (bool): If true, includes the L2 topology.
- InternodeNicknameIncrement (number): The increment step to be used for creating the internode increment.
- NicknameCount (number): The count of the nickname.
- NoOfTreesToCompute (number): The number of trees to compute.
- StartNickname (number): If true, uses the nickname.
- TopologyCount (number): The count of the topology.
- TopologyId (number): The unique identification number of the topology range.
Returns
-------
- self: This instance with matching dceNodeTopologyRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of dceNodeTopologyRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the dceNodeTopologyRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 42.310954 | 219 | 0.674461 |
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
from typing import List, Any, Union
class DceNodeTopologyRange(Base):
__slots__ = ()
_SDM_NAME = 'dceNodeTopologyRange'
_SDM_ATT_MAP = {
'BroadcastPriority': 'broadcastPriority',
'IncludeL2Topology': 'includeL2Topology',
'InternodeNicknameIncrement': 'internodeNicknameIncrement',
'NicknameCount': 'nicknameCount',
'NoOfTreesToCompute': 'noOfTreesToCompute',
'StartNickname': 'startNickname',
'TopologyCount': 'topologyCount',
'TopologyId': 'topologyId',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(DceNodeTopologyRange, self).__init__(parent, list_op)
@property
def DceNodeInterestedVlanRange(self):
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.dcenodeinterestedvlanrange_b15c6822c508a957290b5d3ed2b3ea4e import DceNodeInterestedVlanRange
if self._properties.get('DceNodeInterestedVlanRange', None) is not None:
return self._properties.get('DceNodeInterestedVlanRange')
else:
return DceNodeInterestedVlanRange(self)
@property
def BroadcastPriority(self):
return self._get_attribute(self._SDM_ATT_MAP['BroadcastPriority'])
@BroadcastPriority.setter
def BroadcastPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['BroadcastPriority'], value)
@property
def IncludeL2Topology(self):
return self._get_attribute(self._SDM_ATT_MAP['IncludeL2Topology'])
@IncludeL2Topology.setter
def IncludeL2Topology(self, value):
self._set_attribute(self._SDM_ATT_MAP['IncludeL2Topology'], value)
@property
def InternodeNicknameIncrement(self):
return self._get_attribute(self._SDM_ATT_MAP['InternodeNicknameIncrement'])
@InternodeNicknameIncrement.setter
def InternodeNicknameIncrement(self, value):
self._set_attribute(self._SDM_ATT_MAP['InternodeNicknameIncrement'], value)
@property
def NicknameCount(self):
return self._get_attribute(self._SDM_ATT_MAP['NicknameCount'])
@NicknameCount.setter
def NicknameCount(self, value):
self._set_attribute(self._SDM_ATT_MAP['NicknameCount'], value)
@property
def NoOfTreesToCompute(self):
return self._get_attribute(self._SDM_ATT_MAP['NoOfTreesToCompute'])
@NoOfTreesToCompute.setter
def NoOfTreesToCompute(self, value):
self._set_attribute(self._SDM_ATT_MAP['NoOfTreesToCompute'], value)
@property
def StartNickname(self):
return self._get_attribute(self._SDM_ATT_MAP['StartNickname'])
@StartNickname.setter
def StartNickname(self, value):
self._set_attribute(self._SDM_ATT_MAP['StartNickname'], value)
@property
def TopologyCount(self):
return self._get_attribute(self._SDM_ATT_MAP['TopologyCount'])
@TopologyCount.setter
def TopologyCount(self, value):
self._set_attribute(self._SDM_ATT_MAP['TopologyCount'], value)
@property
def TopologyId(self):
return self._get_attribute(self._SDM_ATT_MAP['TopologyId'])
@TopologyId.setter
def TopologyId(self, value):
self._set_attribute(self._SDM_ATT_MAP['TopologyId'], value)
def update(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
self._delete()
def find(self, BroadcastPriority=None, IncludeL2Topology=None, InternodeNicknameIncrement=None, NicknameCount=None, NoOfTreesToCompute=None, StartNickname=None, TopologyCount=None, TopologyId=None):
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
return self._read(href)
| true | true |
f7390cf292e209f021713892d3fb85ddd364f972 | 3,048 | py | Python | packages/w3af/w3af/plugins/evasion/full_width_encode.py | ZooAtmosphereGroup/HelloPackages | 0ccffd33bf927b13d28c8f715ed35004c33465d9 | [
"Apache-2.0"
] | null | null | null | packages/w3af/w3af/plugins/evasion/full_width_encode.py | ZooAtmosphereGroup/HelloPackages | 0ccffd33bf927b13d28c8f715ed35004c33465d9 | [
"Apache-2.0"
] | null | null | null | packages/w3af/w3af/plugins/evasion/full_width_encode.py | ZooAtmosphereGroup/HelloPackages | 0ccffd33bf927b13d28c8f715ed35004c33465d9 | [
"Apache-2.0"
] | null | null | null | """
full_width_encode.py
Copyright 2006 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import urllib
from w3af.core.controllers.plugins.evasion_plugin import EvasionPlugin
from w3af.core.data.parsers.doc.url import parse_qs
class full_width_encode(EvasionPlugin):
"""
Evade detection using full width encoding.
:author: Andres Riancho (andres.riancho@gmail.com)
"""
def modify_request(self, request):
"""
Mangles the request
:param request: HTTPRequest instance that is going to be modified by
the evasion plugin
:return: The modified request
"""
# First we mangle the URL
path = request.url_object.get_path()
path = self._mutate(path)
# Now we mangle the postdata
data = request.get_data()
if data:
try:
# Only mangle the postdata if it is a url encoded string
parse_qs(data)
except:
pass
else:
# We get here only if the parsing was successful
data = self._mutate(data)
# Finally, we set all the mutants to the request in order to return it
new_url = request.url_object.copy()
new_url.set_path(path)
new_req = request.copy()
new_req.set_data(data)
new_req.set_uri(new_url)
return new_req
def _mutate(self, to_mutate):
to_mutate = urllib.unquote(to_mutate)
mutant = ''
for char in to_mutate:
if char not in ['?', '/', '&', '\\', '=', '%', '+']:
# The "- 0x20" was taken from UFF00.pdf
char = "%%uFF%02x" % (ord(char) - 0x20)
mutant += char
return mutant
def get_priority(self):
"""
This function is called when sorting evasion plugins.
Each evasion plugin should implement this.
:return: An integer specifying the priority. 0 is run first, 100 last.
"""
return 50
def get_long_desc(self):
"""
:return: A DETAILED description of the plugin functions and features.
"""
return """
This evasion plugin does full width encoding as described here:
- http://www.kb.cert.org/vuls/id/739224
Example:
Input: '/bar/foo.asp'
Output: '/b%uFF61r/%uFF66oo.asp'
"""
| 29.882353 | 78 | 0.614829 | import urllib
from w3af.core.controllers.plugins.evasion_plugin import EvasionPlugin
from w3af.core.data.parsers.doc.url import parse_qs
class full_width_encode(EvasionPlugin):
def modify_request(self, request):
path = request.url_object.get_path()
path = self._mutate(path)
data = request.get_data()
if data:
try:
parse_qs(data)
except:
pass
else:
data = self._mutate(data)
new_url = request.url_object.copy()
new_url.set_path(path)
new_req = request.copy()
new_req.set_data(data)
new_req.set_uri(new_url)
return new_req
def _mutate(self, to_mutate):
to_mutate = urllib.unquote(to_mutate)
mutant = ''
for char in to_mutate:
if char not in ['?', '/', '&', '\\', '=', '%', '+']:
char = "%%uFF%02x" % (ord(char) - 0x20)
mutant += char
return mutant
def get_priority(self):
return 50
def get_long_desc(self):
return """
This evasion plugin does full width encoding as described here:
- http://www.kb.cert.org/vuls/id/739224
Example:
Input: '/bar/foo.asp'
Output: '/b%uFF61r/%uFF66oo.asp'
"""
| true | true |
f7390dc9891b3c2959a43d730cd03c3815a2319d | 9,135 | py | Python | tests/utilities/test_storage.py | nicolasiltis/prefect | 4298105651c2fe02b21013ae8a0468e9e101154d | [
"Apache-2.0"
] | 3 | 2021-11-09T10:46:58.000Z | 2022-03-11T04:22:35.000Z | tests/utilities/test_storage.py | nicolasiltis/prefect | 4298105651c2fe02b21013ae8a0468e9e101154d | [
"Apache-2.0"
] | 8 | 2021-10-11T16:42:59.000Z | 2022-03-31T08:42:24.000Z | tests/utilities/test_storage.py | nicolasiltis/prefect | 4298105651c2fe02b21013ae8a0468e9e101154d | [
"Apache-2.0"
] | 1 | 2022-03-11T04:22:40.000Z | 2022-03-11T04:22:40.000Z | import os
import sys
import types
import textwrap
import pytest
import cloudpickle
import prefect
from prefect import Flow, Task
from prefect.storage import Docker, Local
from prefect.exceptions import FlowStorageError
from prefect.run_configs import DockerRun, UniversalRun
from prefect.utilities.storage import (
get_flow_image,
extract_flow_from_file,
extract_flow_from_module,
flow_to_bytes_pickle,
flow_from_bytes_pickle,
)
def test_get_flow_image_docker_storage():
flow = Flow(
"test",
run_config=UniversalRun(),
storage=Docker(registry_url="test", image_name="name", image_tag="tag"),
)
image = get_flow_image(flow=flow)
assert image == "test/name:tag"
def test_get_flow_image_run_config():
flow = Flow(
"test",
run_config=DockerRun(image="repo/name:tag"),
storage=Local(),
)
image = get_flow_image(flow=flow)
assert image == "repo/name:tag"
def test_get_flow_image_raises_on_missing_info():
flow = Flow(
"test",
run_config=UniversalRun(),
storage=Local(),
)
with pytest.raises(ValueError):
get_flow_image(flow=flow)
class TestExtractFlowFromFile:
@pytest.fixture
def flow_path(self, tmpdir):
contents = """from prefect import Flow\nf=Flow('flow-1')\nf2=Flow('flow-2')"""
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(contents)
return full_path
@pytest.fixture
def flow_path_with_additional_file(self, tmpdir):
contents = """\
from prefect import Flow
from pathlib import Path
with open(str(Path(__file__).resolve().parent)+"/test.txt", "r") as f:
name = f.read()
f2 = Flow(name)
"""
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(textwrap.dedent(contents))
with open(os.path.join(tmpdir, "test.txt"), "w") as f:
f.write("test-flow")
return full_path
def test_extract_flow_from_file_path(self, flow_path):
flow = extract_flow_from_file(file_path=flow_path)
assert flow.name == "flow-1"
assert flow.run().is_successful()
flow = extract_flow_from_file(file_path=flow_path, flow_name="flow-1")
assert flow.name == "flow-1"
flow = extract_flow_from_file(file_path=flow_path, flow_name="flow-2")
assert flow.name == "flow-2"
def test_extract_flow_from_file_path_can_load_files_from_same_directory(
self, flow_path_with_additional_file
):
flow = extract_flow_from_file(file_path=flow_path_with_additional_file)
assert flow.name == "test-flow"
assert flow.run().is_successful()
def test_extract_flow_from_file_contents(self, flow_path):
with open(flow_path, "r") as f:
contents = f.read()
flow = extract_flow_from_file(file_contents=contents)
assert flow.name == "flow-1"
assert flow.run().is_successful()
flow = extract_flow_from_file(file_contents=contents, flow_name="flow-1")
assert flow.name == "flow-1"
flow = extract_flow_from_file(file_contents=contents, flow_name="flow-2")
assert flow.name == "flow-2"
def test_extract_flow_from_file_errors(self, flow_path):
with pytest.raises(ValueError, match="but not both"):
extract_flow_from_file(file_path="", file_contents="")
with pytest.raises(ValueError, match="Provide either"):
extract_flow_from_file()
expected = (
"Flow 'not-real' not found in file. Found flows:\n- 'flow-1'\n- 'flow-2'"
)
with pytest.raises(ValueError, match=expected):
extract_flow_from_file(file_path=flow_path, flow_name="not-real")
with pytest.raises(ValueError, match="No flows found in file."):
extract_flow_from_file(file_contents="")
@pytest.mark.parametrize("method", ["run", "register"])
def test_extract_flow_from_file_raises_on_run_register(self, tmpdir, method):
contents = f"from prefect import Flow\nf=Flow('test-flow')\nf.{method}()"
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(contents)
with prefect.context({"loading_flow": True}):
with pytest.warns(Warning):
extract_flow_from_file(file_path=full_path)
@pytest.fixture
def mymodule(monkeypatch):
mod_name = "mymodule"
module = types.ModuleType(mod_name)
monkeypatch.setitem(sys.modules, mod_name, module)
return module
def test_extract_flow_from_module(mymodule):
class Obj:
flow = Flow("multi-level flow")
mymodule.flow = Flow("top level flow")
mymodule.multi_level = Obj()
mymodule.bad_type = 1
# module with single top-level flow has flow auto-inferred
assert extract_flow_from_module("mymodule") is mymodule.flow
# Specifying name/attribute still works
assert extract_flow_from_module("mymodule", "top level flow") is mymodule.flow
assert extract_flow_from_module("mymodule:flow", "top level flow") is mymodule.flow
# Multi-level attrs work
assert extract_flow_from_module("mymodule:multi_level.flow") is Obj.flow
# Multiple top-level flows
mymodule.flow2 = Flow("a second flow")
assert extract_flow_from_module("mymodule", "top level flow") is mymodule.flow
assert extract_flow_from_module("mymodule", "a second flow") is mymodule.flow2
# Multiple flows not auto-inferred
with pytest.raises(ValueError, match="Multiple flows found"):
extract_flow_from_module("mymodule")
# Name not found
with pytest.raises(ValueError, match="Failed to find flow"):
extract_flow_from_module("mymodule", "unknown name")
# Name doesn't match specified object
with pytest.raises(ValueError, match="Flow at 'mymodule:flow' is named"):
extract_flow_from_module("mymodule:flow", "incorrect name")
# Not a flow object
with pytest.raises(TypeError, match="Object at 'mymodule:bad_type'"):
extract_flow_from_module("mymodule:bad_type")
def test_extract_flow_from_module_callable_objects(mymodule):
flow1 = Flow("flow 1")
flow2 = Flow("flow 2")
class Obj:
def build_flow(self):
return flow2
mymodule.build_flow = lambda: flow1
mymodule.multi_level = Obj()
mymodule.bad_type = lambda: 1
assert extract_flow_from_module("mymodule:build_flow") is flow1
assert extract_flow_from_module("mymodule:build_flow", "flow 1") is flow1
assert extract_flow_from_module("mymodule:multi_level.build_flow") is flow2
with pytest.raises(TypeError, match="Object at 'mymodule:bad_type'"):
extract_flow_from_module("mymodule:bad_type")
class RaiseOnLoad(Task):
def __init__(self, exc):
super().__init__()
self.exc = exc
@staticmethod
def _raise(exc):
raise exc
def __reduce__(self):
return (self._raise, (self.exc,))
class TestFlowToFromBytesPickle:
def test_serialize_deserialize(self):
s = flow_to_bytes_pickle(Flow("test"))
assert isinstance(s, bytes)
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
def test_flow_from_bytes_loads_raw_pickle(self):
"""Older versions of prefect serialized flows as straight pickle bytes.
This checks that we can still deserialize these payloads"""
s = cloudpickle.dumps(Flow("test"))
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
def test_flow_from_bytes_warns_prefect_version_mismatch(self, monkeypatch):
s = flow_to_bytes_pickle(Flow("test"))
monkeypatch.setattr(prefect, "__version__", "0.1.0")
with pytest.warns(UserWarning, match="This flow was built using Prefect"):
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
@pytest.mark.parametrize("version_mismatch", [False, True])
@pytest.mark.parametrize("import_error", [False, True])
def test_flow_from_bytes_error(self, monkeypatch, version_mismatch, import_error):
exc = ImportError("mymodule") if import_error else ValueError("Oh no!")
flow = Flow("test", tasks=[RaiseOnLoad(exc)])
s = flow_to_bytes_pickle(flow)
if version_mismatch:
monkeypatch.setattr(prefect, "__version__", "0.0.1")
monkeypatch.setattr(cloudpickle, "__version__", "0.0.2")
with pytest.raises(
FlowStorageError, match="An error occurred while unpickling"
) as exc:
flow_from_bytes_pickle(s)
msg = "mymodule" if import_error else "Oh no!"
assert msg in str(exc.value)
# Extra components only present if relevant
assert ("missing Python module" in str(exc.value)) == import_error
assert ("version mismatches" in str(exc.value)) == version_mismatch
| 33.218182 | 87 | 0.67214 | import os
import sys
import types
import textwrap
import pytest
import cloudpickle
import prefect
from prefect import Flow, Task
from prefect.storage import Docker, Local
from prefect.exceptions import FlowStorageError
from prefect.run_configs import DockerRun, UniversalRun
from prefect.utilities.storage import (
get_flow_image,
extract_flow_from_file,
extract_flow_from_module,
flow_to_bytes_pickle,
flow_from_bytes_pickle,
)
def test_get_flow_image_docker_storage():
flow = Flow(
"test",
run_config=UniversalRun(),
storage=Docker(registry_url="test", image_name="name", image_tag="tag"),
)
image = get_flow_image(flow=flow)
assert image == "test/name:tag"
def test_get_flow_image_run_config():
flow = Flow(
"test",
run_config=DockerRun(image="repo/name:tag"),
storage=Local(),
)
image = get_flow_image(flow=flow)
assert image == "repo/name:tag"
def test_get_flow_image_raises_on_missing_info():
flow = Flow(
"test",
run_config=UniversalRun(),
storage=Local(),
)
with pytest.raises(ValueError):
get_flow_image(flow=flow)
class TestExtractFlowFromFile:
@pytest.fixture
def flow_path(self, tmpdir):
contents = """from prefect import Flow\nf=Flow('flow-1')\nf2=Flow('flow-2')"""
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(contents)
return full_path
@pytest.fixture
def flow_path_with_additional_file(self, tmpdir):
contents = """\
from prefect import Flow
from pathlib import Path
with open(str(Path(__file__).resolve().parent)+"/test.txt", "r") as f:
name = f.read()
f2 = Flow(name)
"""
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(textwrap.dedent(contents))
with open(os.path.join(tmpdir, "test.txt"), "w") as f:
f.write("test-flow")
return full_path
def test_extract_flow_from_file_path(self, flow_path):
flow = extract_flow_from_file(file_path=flow_path)
assert flow.name == "flow-1"
assert flow.run().is_successful()
flow = extract_flow_from_file(file_path=flow_path, flow_name="flow-1")
assert flow.name == "flow-1"
flow = extract_flow_from_file(file_path=flow_path, flow_name="flow-2")
assert flow.name == "flow-2"
def test_extract_flow_from_file_path_can_load_files_from_same_directory(
self, flow_path_with_additional_file
):
flow = extract_flow_from_file(file_path=flow_path_with_additional_file)
assert flow.name == "test-flow"
assert flow.run().is_successful()
def test_extract_flow_from_file_contents(self, flow_path):
with open(flow_path, "r") as f:
contents = f.read()
flow = extract_flow_from_file(file_contents=contents)
assert flow.name == "flow-1"
assert flow.run().is_successful()
flow = extract_flow_from_file(file_contents=contents, flow_name="flow-1")
assert flow.name == "flow-1"
flow = extract_flow_from_file(file_contents=contents, flow_name="flow-2")
assert flow.name == "flow-2"
def test_extract_flow_from_file_errors(self, flow_path):
with pytest.raises(ValueError, match="but not both"):
extract_flow_from_file(file_path="", file_contents="")
with pytest.raises(ValueError, match="Provide either"):
extract_flow_from_file()
expected = (
"Flow 'not-real' not found in file. Found flows:\n- 'flow-1'\n- 'flow-2'"
)
with pytest.raises(ValueError, match=expected):
extract_flow_from_file(file_path=flow_path, flow_name="not-real")
with pytest.raises(ValueError, match="No flows found in file."):
extract_flow_from_file(file_contents="")
@pytest.mark.parametrize("method", ["run", "register"])
def test_extract_flow_from_file_raises_on_run_register(self, tmpdir, method):
contents = f"from prefect import Flow\nf=Flow('test-flow')\nf.{method}()"
full_path = os.path.join(tmpdir, "flow.py")
with open(full_path, "w") as f:
f.write(contents)
with prefect.context({"loading_flow": True}):
with pytest.warns(Warning):
extract_flow_from_file(file_path=full_path)
@pytest.fixture
def mymodule(monkeypatch):
mod_name = "mymodule"
module = types.ModuleType(mod_name)
monkeypatch.setitem(sys.modules, mod_name, module)
return module
def test_extract_flow_from_module(mymodule):
class Obj:
flow = Flow("multi-level flow")
mymodule.flow = Flow("top level flow")
mymodule.multi_level = Obj()
mymodule.bad_type = 1
assert extract_flow_from_module("mymodule") is mymodule.flow
assert extract_flow_from_module("mymodule", "top level flow") is mymodule.flow
assert extract_flow_from_module("mymodule:flow", "top level flow") is mymodule.flow
assert extract_flow_from_module("mymodule:multi_level.flow") is Obj.flow
mymodule.flow2 = Flow("a second flow")
assert extract_flow_from_module("mymodule", "top level flow") is mymodule.flow
assert extract_flow_from_module("mymodule", "a second flow") is mymodule.flow2
with pytest.raises(ValueError, match="Multiple flows found"):
extract_flow_from_module("mymodule")
with pytest.raises(ValueError, match="Failed to find flow"):
extract_flow_from_module("mymodule", "unknown name")
with pytest.raises(ValueError, match="Flow at 'mymodule:flow' is named"):
extract_flow_from_module("mymodule:flow", "incorrect name")
# Not a flow object
with pytest.raises(TypeError, match="Object at 'mymodule:bad_type'"):
extract_flow_from_module("mymodule:bad_type")
def test_extract_flow_from_module_callable_objects(mymodule):
flow1 = Flow("flow 1")
flow2 = Flow("flow 2")
class Obj:
def build_flow(self):
return flow2
mymodule.build_flow = lambda: flow1
mymodule.multi_level = Obj()
mymodule.bad_type = lambda: 1
assert extract_flow_from_module("mymodule:build_flow") is flow1
assert extract_flow_from_module("mymodule:build_flow", "flow 1") is flow1
assert extract_flow_from_module("mymodule:multi_level.build_flow") is flow2
with pytest.raises(TypeError, match="Object at 'mymodule:bad_type'"):
extract_flow_from_module("mymodule:bad_type")
class RaiseOnLoad(Task):
def __init__(self, exc):
super().__init__()
self.exc = exc
@staticmethod
def _raise(exc):
raise exc
def __reduce__(self):
return (self._raise, (self.exc,))
class TestFlowToFromBytesPickle:
def test_serialize_deserialize(self):
s = flow_to_bytes_pickle(Flow("test"))
assert isinstance(s, bytes)
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
def test_flow_from_bytes_loads_raw_pickle(self):
s = cloudpickle.dumps(Flow("test"))
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
def test_flow_from_bytes_warns_prefect_version_mismatch(self, monkeypatch):
s = flow_to_bytes_pickle(Flow("test"))
monkeypatch.setattr(prefect, "__version__", "0.1.0")
with pytest.warns(UserWarning, match="This flow was built using Prefect"):
flow = flow_from_bytes_pickle(s)
assert isinstance(flow, Flow)
assert flow.name == "test"
@pytest.mark.parametrize("version_mismatch", [False, True])
@pytest.mark.parametrize("import_error", [False, True])
def test_flow_from_bytes_error(self, monkeypatch, version_mismatch, import_error):
exc = ImportError("mymodule") if import_error else ValueError("Oh no!")
flow = Flow("test", tasks=[RaiseOnLoad(exc)])
s = flow_to_bytes_pickle(flow)
if version_mismatch:
monkeypatch.setattr(prefect, "__version__", "0.0.1")
monkeypatch.setattr(cloudpickle, "__version__", "0.0.2")
with pytest.raises(
FlowStorageError, match="An error occurred while unpickling"
) as exc:
flow_from_bytes_pickle(s)
msg = "mymodule" if import_error else "Oh no!"
assert msg in str(exc.value)
# Extra components only present if relevant
assert ("missing Python module" in str(exc.value)) == import_error
assert ("version mismatches" in str(exc.value)) == version_mismatch
| true | true |
f7390dce5ed924db4eacae76988830e0d21eb18c | 7,605 | py | Python | iot/api-client/http_example/cloudiot_http_example.py | bxue16/bx_byte1 | e8c2face224f16ab2d2fa927a0c944176e4dd557 | [
"Apache-2.0"
] | null | null | null | iot/api-client/http_example/cloudiot_http_example.py | bxue16/bx_byte1 | e8c2face224f16ab2d2fa927a0c944176e4dd557 | [
"Apache-2.0"
] | null | null | null | iot/api-client/http_example/cloudiot_http_example.py | bxue16/bx_byte1 | e8c2face224f16ab2d2fa927a0c944176e4dd557 | [
"Apache-2.0"
] | 1 | 2018-09-19T05:55:27.000Z | 2018-09-19T05:55:27.000Z | #!/usr/bin/env python
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python sample for connecting to Google Cloud IoT Core via HTTP, using JWT.
This example connects to Google Cloud IoT Core via HTTP, using a JWT for device
authentication. After connecting, by default the device publishes 100 messages
to the server at a rate of one per second, and then exits.
Before you run the sample, you must register your device as described in the
README in the parent folder.
"""
import argparse
import base64
import datetime
import json
import time
from google.api_core import retry
import jwt
import requests
_BASE_URL = 'https://cloudiot-device.googleapis.com/v1beta1'
_BACKOFF_DURATION = 60
def create_jwt(project_id, private_key_file, algorithm):
token = {
# The time the token was issued.
'iat': datetime.datetime.utcnow(),
# Token expiration time.
'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=60),
# The audience field should always be set to the GCP project id.
'aud': project_id
}
# Read the private key file.
with open(private_key_file, 'r') as f:
private_key = f.read()
print('Creating JWT using {} from private key file {}'.format(
algorithm, private_key_file))
return jwt.encode(token, private_key, algorithm=algorithm).decode('ascii')
@retry.Retry(
predicate=retry.if_exception_type(AssertionError),
deadline=_BACKOFF_DURATION)
def publish_message(
message, message_type, base_url, project_id, cloud_region, registry_id,
device_id, jwt_token):
headers = {
'authorization': 'Bearer {}'.format(jwt_token),
'content-type': 'application/json',
'cache-control': 'no-cache'
}
# Publish to the events or state topic based on the flag.
url_suffix = 'publishEvent' if message_type == 'event' else 'setState'
publish_url = (
'{}/projects/{}/locations/{}/registries/{}/devices/{}:{}').format(
base_url, project_id, cloud_region, registry_id, device_id,
url_suffix)
body = None
msg_bytes = base64.urlsafe_b64encode(message.encode('utf-8'))
if message_type == 'event':
body = {'binary_data': msg_bytes.decode('ascii')}
else:
body = {
'state': {'binary_data': msg_bytes.decode('ascii')}
}
resp = requests.post(
publish_url, data=json.dumps(body), headers=headers)
if (resp.status_code != 200):
print('Response came back {}, retrying'.format(resp.status_code))
raise AssertionError('Not OK response: {}'.format(resp.status_code))
return resp
@retry.Retry(
predicate=retry.if_exception_type(AssertionError),
deadline=_BACKOFF_DURATION)
# [START iot_http_getconfig]
def get_config(
version, message_type, base_url, project_id, cloud_region, registry_id,
device_id, jwt_token):
headers = {
'authorization': 'Bearer {}'.format(jwt_token),
'content-type': 'application/json',
'cache-control': 'no-cache'
}
basepath = '{}/projects/{}/locations/{}/registries/{}/devices/{}/'
template = basepath + 'config?local_version={}'
config_url = template.format(
base_url, project_id, cloud_region, registry_id, device_id, version)
resp = requests.get(config_url, headers=headers)
if (resp.status_code != 200):
print('Error getting config: {}, retrying'.format(resp.status_code))
raise AssertionError('Not OK response: {}'.format(resp.status_code))
return resp
# [END iot_http_getconfig]
def parse_command_line_args():
"""Parse command line arguments."""
parser = argparse.ArgumentParser(description=(
'Example Google Cloud IoT Core HTTP device connection code.'))
parser.add_argument(
'--project_id', required=True, help='GCP cloud project name')
parser.add_argument(
'--registry_id', required=True, help='Cloud IoT Core registry id')
parser.add_argument(
'--device_id', required=True, help='Cloud IoT Core device id')
parser.add_argument(
'--private_key_file',
required=True,
help='Path to private key file.')
parser.add_argument(
'--algorithm',
choices=('RS256', 'ES256'),
required=True,
help='The encryption algorithm to use to generate the JWT.')
parser.add_argument(
'--cloud_region', default='us-central1', help='GCP cloud region')
parser.add_argument(
'--ca_certs',
default='roots.pem',
help=('CA root from https://pki.google.com/roots.pem'))
parser.add_argument(
'--num_messages',
type=int,
default=100,
help='Number of messages to publish.')
parser.add_argument(
'--message_type',
choices=('event', 'state'),
default='event',
required=True,
help=('Indicates whether the message to be published is a '
'telemetry event or a device state message.'))
parser.add_argument(
'--base_url',
default=_BASE_URL,
help=('Base URL for the Cloud IoT Core Device Service API'))
parser.add_argument(
'--jwt_expires_minutes',
default=20,
type=int,
help=('Expiration time, in minutes, for JWT tokens.'))
return parser.parse_args()
def main():
args = parse_command_line_args()
jwt_token = create_jwt(
args.project_id, args.private_key_file, args.algorithm)
jwt_iat = datetime.datetime.utcnow()
jwt_exp_mins = args.jwt_expires_minutes
print('Latest configuration: {}'.format(get_config(
'0', args.message_type, args.base_url, args.project_id,
args.cloud_region, args.registry_id, args.device_id, jwt_token).text))
# Publish num_messages mesages to the HTTP bridge once per second.
for i in range(1, args.num_messages + 1):
seconds_since_issue = (datetime.datetime.utcnow() - jwt_iat).seconds
if seconds_since_issue > 60 * jwt_exp_mins:
print('Refreshing token after {}s').format(seconds_since_issue)
jwt_token = create_jwt(
args.project_id, args.private_key_file, args.algorithm)
jwt_iat = datetime.datetime.utcnow()
payload = '{}/{}-payload-{}'.format(
args.registry_id, args.device_id, i)
print('Publishing message {}/{}: \'{}\''.format(
i, args.num_messages, payload))
resp = publish_message(
payload, args.message_type, args.base_url, args.project_id,
args.cloud_region, args.registry_id, args.device_id, jwt_token)
print('HTTP response: ', resp)
# Send events every second. State should not be updated as often
time.sleep(1 if args.message_type == 'event' else 5)
print('Finished.')
if __name__ == '__main__':
main()
| 35.372093 | 79 | 0.644313 |
import argparse
import base64
import datetime
import json
import time
from google.api_core import retry
import jwt
import requests
_BASE_URL = 'https://cloudiot-device.googleapis.com/v1beta1'
_BACKOFF_DURATION = 60
def create_jwt(project_id, private_key_file, algorithm):
token = {
'iat': datetime.datetime.utcnow(),
'exp': datetime.datetime.utcnow() + datetime.timedelta(minutes=60),
'aud': project_id
}
with open(private_key_file, 'r') as f:
private_key = f.read()
print('Creating JWT using {} from private key file {}'.format(
algorithm, private_key_file))
return jwt.encode(token, private_key, algorithm=algorithm).decode('ascii')
@retry.Retry(
predicate=retry.if_exception_type(AssertionError),
deadline=_BACKOFF_DURATION)
def publish_message(
message, message_type, base_url, project_id, cloud_region, registry_id,
device_id, jwt_token):
headers = {
'authorization': 'Bearer {}'.format(jwt_token),
'content-type': 'application/json',
'cache-control': 'no-cache'
}
url_suffix = 'publishEvent' if message_type == 'event' else 'setState'
publish_url = (
'{}/projects/{}/locations/{}/registries/{}/devices/{}:{}').format(
base_url, project_id, cloud_region, registry_id, device_id,
url_suffix)
body = None
msg_bytes = base64.urlsafe_b64encode(message.encode('utf-8'))
if message_type == 'event':
body = {'binary_data': msg_bytes.decode('ascii')}
else:
body = {
'state': {'binary_data': msg_bytes.decode('ascii')}
}
resp = requests.post(
publish_url, data=json.dumps(body), headers=headers)
if (resp.status_code != 200):
print('Response came back {}, retrying'.format(resp.status_code))
raise AssertionError('Not OK response: {}'.format(resp.status_code))
return resp
@retry.Retry(
predicate=retry.if_exception_type(AssertionError),
deadline=_BACKOFF_DURATION)
def get_config(
version, message_type, base_url, project_id, cloud_region, registry_id,
device_id, jwt_token):
headers = {
'authorization': 'Bearer {}'.format(jwt_token),
'content-type': 'application/json',
'cache-control': 'no-cache'
}
basepath = '{}/projects/{}/locations/{}/registries/{}/devices/{}/'
template = basepath + 'config?local_version={}'
config_url = template.format(
base_url, project_id, cloud_region, registry_id, device_id, version)
resp = requests.get(config_url, headers=headers)
if (resp.status_code != 200):
print('Error getting config: {}, retrying'.format(resp.status_code))
raise AssertionError('Not OK response: {}'.format(resp.status_code))
return resp
def parse_command_line_args():
parser = argparse.ArgumentParser(description=(
'Example Google Cloud IoT Core HTTP device connection code.'))
parser.add_argument(
'--project_id', required=True, help='GCP cloud project name')
parser.add_argument(
'--registry_id', required=True, help='Cloud IoT Core registry id')
parser.add_argument(
'--device_id', required=True, help='Cloud IoT Core device id')
parser.add_argument(
'--private_key_file',
required=True,
help='Path to private key file.')
parser.add_argument(
'--algorithm',
choices=('RS256', 'ES256'),
required=True,
help='The encryption algorithm to use to generate the JWT.')
parser.add_argument(
'--cloud_region', default='us-central1', help='GCP cloud region')
parser.add_argument(
'--ca_certs',
default='roots.pem',
help=('CA root from https://pki.google.com/roots.pem'))
parser.add_argument(
'--num_messages',
type=int,
default=100,
help='Number of messages to publish.')
parser.add_argument(
'--message_type',
choices=('event', 'state'),
default='event',
required=True,
help=('Indicates whether the message to be published is a '
'telemetry event or a device state message.'))
parser.add_argument(
'--base_url',
default=_BASE_URL,
help=('Base URL for the Cloud IoT Core Device Service API'))
parser.add_argument(
'--jwt_expires_minutes',
default=20,
type=int,
help=('Expiration time, in minutes, for JWT tokens.'))
return parser.parse_args()
def main():
args = parse_command_line_args()
jwt_token = create_jwt(
args.project_id, args.private_key_file, args.algorithm)
jwt_iat = datetime.datetime.utcnow()
jwt_exp_mins = args.jwt_expires_minutes
print('Latest configuration: {}'.format(get_config(
'0', args.message_type, args.base_url, args.project_id,
args.cloud_region, args.registry_id, args.device_id, jwt_token).text))
for i in range(1, args.num_messages + 1):
seconds_since_issue = (datetime.datetime.utcnow() - jwt_iat).seconds
if seconds_since_issue > 60 * jwt_exp_mins:
print('Refreshing token after {}s').format(seconds_since_issue)
jwt_token = create_jwt(
args.project_id, args.private_key_file, args.algorithm)
jwt_iat = datetime.datetime.utcnow()
payload = '{}/{}-payload-{}'.format(
args.registry_id, args.device_id, i)
print('Publishing message {}/{}: \'{}\''.format(
i, args.num_messages, payload))
resp = publish_message(
payload, args.message_type, args.base_url, args.project_id,
args.cloud_region, args.registry_id, args.device_id, jwt_token)
print('HTTP response: ', resp)
time.sleep(1 if args.message_type == 'event' else 5)
print('Finished.')
if __name__ == '__main__':
main()
| true | true |
f7390e4926bf78ad6d372fb87866cacff08ee9fb | 7,677 | py | Python | tests/e2e/test_clickhouse.py | a-dot/clickhouse-operator | cecb54ad1ade33a9d748a0108e765abad9b2412e | [
"Apache-2.0"
] | null | null | null | tests/e2e/test_clickhouse.py | a-dot/clickhouse-operator | cecb54ad1ade33a9d748a0108e765abad9b2412e | [
"Apache-2.0"
] | null | null | null | tests/e2e/test_clickhouse.py | a-dot/clickhouse-operator | cecb54ad1ade33a9d748a0108e765abad9b2412e | [
"Apache-2.0"
] | null | null | null | import time
import e2e.clickhouse as clickhouse
import e2e.kubectl as kubectl
import e2e.yaml_manifest as yaml_manifest
import e2e.settings as settings
import e2e.util as util
from testflows.core import *
from testflows.asserts import error
@TestScenario
@Name("test_ch_001. Insert quorum")
def test_ch_001(self):
util.require_keeper(keeper_type=self.context.keeper_type)
quorum_template = "manifests/chit/tpl-clickhouse-21.8.yaml"
chit_data = yaml_manifest.get_manifest_data(util.get_full_path(quorum_template))
kubectl.launch(f"delete chit {chit_data['metadata']['name']}", ns=settings.test_namespace, ok_to_fail=True)
kubectl.create_and_check(
"manifests/chi/test-ch-001-insert-quorum.yaml",
{
"apply_templates": {quorum_template},
"pod_count": 2,
"do_not_delete": 1,
})
chi = yaml_manifest.get_chi_name(util.get_full_path("manifests/chi/test-ch-001-insert-quorum.yaml"))
chi_data = kubectl.get("chi", ns=settings.test_namespace, name=chi)
util.wait_clickhouse_cluster_ready(chi_data)
host0 = "chi-test-ch-001-insert-quorum-default-0-0"
host1 = "chi-test-ch-001-insert-quorum-default-0-1"
create_table = """
create table t1 on cluster default (a Int8, d Date default today())
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by d order by a
TTL d + interval 5 second
SETTINGS merge_with_ttl_timeout=5""".replace('\r', '').replace('\n', '')
create_mv_table2 = """
create table t2 on cluster default (a Int8)
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by tuple() order by a""".replace('\r', '').replace('\n', '')
create_mv_table3 = """
create table t3 on cluster default (a Int8)
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by tuple() order by a""".replace('\r', '').replace('\n', '')
create_mv2 = "create materialized view t_mv2 on cluster default to t2 as select a from t1"
create_mv3 = "create materialized view t_mv3 on cluster default to t3 as select a from t1"
with Given("Tables t1, t2, t3 and MVs t1->t2, t1-t3 are created"):
clickhouse.query(chi, create_table)
clickhouse.query(chi, create_mv_table2)
clickhouse.query(chi, create_mv_table3)
clickhouse.query(chi, create_mv2)
clickhouse.query(chi, create_mv3)
with When("Add a row to an old partition"):
clickhouse.query(chi, "insert into t1(a,d) values(6, today()-1)", host=host0)
with When("Stop fetches for t1 at replica1"):
clickhouse.query(chi, "system stop fetches default.t1", host=host1)
with Then("Wait 10 seconds and the data should be dropped by TTL"):
time.sleep(10)
out = clickhouse.query(chi, "select count() from t1 where a=6", host=host0)
assert out == "0", error()
with When("Resume fetches for t1 at replica1"):
clickhouse.query(chi, "system start fetches default.t1", host=host1)
time.sleep(5)
with Then("Inserts should resume"):
clickhouse.query(chi, "insert into t1(a) values(7)", host=host0)
clickhouse.query(chi, "insert into t1(a) values(1)")
with When("Stop fetches for t2 at replica1"):
clickhouse.query(chi, "system stop fetches default.t2", host=host1)
with Then("Insert should fail since it can not reach the quorum"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(2)", host=host0)
assert "Timeout while waiting for quorum" in out, error()
# kubectl(f"exec {host0}-0 -n test -- cp /var/lib//clickhouse/data/default/t2/all_1_1_0/a.mrk2 /var/lib//clickhouse/data/default/t2/all_1_1_0/a.bin")
# with Then("Corrupt data part in t2"):
# kubectl(f"exec {host0}-0 -n test -- sed -i \"s/b/c/\" /var/lib/clickhouse/data/default/t2/all_1_1_0/a.bin")
with When("Resume fetches for t2 at replica1"):
clickhouse.query(chi, "system start fetches default.t2", host=host1)
i = 0
while "2" != clickhouse.query(chi, "select active_replicas from system.replicas where database='default' and table='t1'", pod=host0) and i < 10:
with Then("Not ready, wait 5 seconds"):
time.sleep(5)
i += 1
with Then("Inserts should fail with an error regarding not satisfied quorum"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(3)", host=host0)
assert "Quorum for previous write has not been satisfied yet" in out, error()
with And("Second insert of the same block should pass"):
clickhouse.query(chi, "insert into t1(a) values(3)", host=host0)
with And("Insert of the new block should fail"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(4)", host=host0)
assert "Quorum for previous write has not been satisfied yet" in out, error()
with And("Second insert of the same block with 'deduplicate_blocks_in_dependent_materialized_views' setting should fail"):
out = clickhouse.query_with_error(
chi,
"set deduplicate_blocks_in_dependent_materialized_views=1; insert into t1(a) values(5)",
host=host0
)
assert "Quorum for previous write has not been satisfied yet" in out, error()
out = clickhouse.query_with_error(
chi, "select t1.a t1_a, t2.a t2_a from t1 left outer join t2 using (a) order by t1_a settings join_use_nulls=1"
)
note(out)
# cat /var/log/clickhouse-server/clickhouse-server.log | grep t2 | grep -E "all_1_1_0|START|STOP"
@TestScenario
@Name("test_ch_002. Row-level security")
def test_ch_002(self):
kubectl.create_and_check(
"manifests/chi/test-ch-002-row-level.yaml",
{
"apply_templates": {"manifests/chit/tpl-clickhouse-21.8.yaml"},
"do_not_delete": 1,
})
chi = "test-ch-002-row-level"
create_table = """create table test (d Date default today(), team LowCardinality(String), user String) Engine = MergeTree() PARTITION BY d ORDER BY d;"""
with When("Create test table"):
clickhouse.query(chi, create_table)
with And("Insert some data"):
clickhouse.query(
chi, "INSERT INTO test(team, user) values('team1', 'user1'),('team2', 'user2'),('team3', 'user3'),('team4', 'user4')"
)
with Then("Make another query for different users. It should be restricted to corresponding team by row-level security"):
for user in ['user1', 'user2', 'user3', 'user4']:
out = clickhouse.query(chi, "select user from test", user=user, pwd=user)
assert out == user, error()
with Then("Make a count() query for different users. It should be restricted to corresponding team by row-level security"):
for user in ['user1', 'user2', 'user3', 'user4']:
out = clickhouse.query(chi, "select count() from test", user=user, pwd=user)
assert out == "1", error()
kubectl.delete_chi(chi)
@TestFeature
@Name("e2e.test_clickhouse")
def test(self):
util.clean_namespace(delete_chi=False)
all_tests = [
test_ch_001,
test_ch_002,
]
run_test = all_tests
# placeholder for selective test running
# run_test = [test_ch_002]
for t in run_test:
Scenario(test=t)()
| 42.65 | 157 | 0.639703 | import time
import e2e.clickhouse as clickhouse
import e2e.kubectl as kubectl
import e2e.yaml_manifest as yaml_manifest
import e2e.settings as settings
import e2e.util as util
from testflows.core import *
from testflows.asserts import error
@TestScenario
@Name("test_ch_001. Insert quorum")
def test_ch_001(self):
util.require_keeper(keeper_type=self.context.keeper_type)
quorum_template = "manifests/chit/tpl-clickhouse-21.8.yaml"
chit_data = yaml_manifest.get_manifest_data(util.get_full_path(quorum_template))
kubectl.launch(f"delete chit {chit_data['metadata']['name']}", ns=settings.test_namespace, ok_to_fail=True)
kubectl.create_and_check(
"manifests/chi/test-ch-001-insert-quorum.yaml",
{
"apply_templates": {quorum_template},
"pod_count": 2,
"do_not_delete": 1,
})
chi = yaml_manifest.get_chi_name(util.get_full_path("manifests/chi/test-ch-001-insert-quorum.yaml"))
chi_data = kubectl.get("chi", ns=settings.test_namespace, name=chi)
util.wait_clickhouse_cluster_ready(chi_data)
host0 = "chi-test-ch-001-insert-quorum-default-0-0"
host1 = "chi-test-ch-001-insert-quorum-default-0-1"
create_table = """
create table t1 on cluster default (a Int8, d Date default today())
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by d order by a
TTL d + interval 5 second
SETTINGS merge_with_ttl_timeout=5""".replace('\r', '').replace('\n', '')
create_mv_table2 = """
create table t2 on cluster default (a Int8)
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by tuple() order by a""".replace('\r', '').replace('\n', '')
create_mv_table3 = """
create table t3 on cluster default (a Int8)
Engine = ReplicatedMergeTree('/clickhouse/tables/{table}', '{replica}')
partition by tuple() order by a""".replace('\r', '').replace('\n', '')
create_mv2 = "create materialized view t_mv2 on cluster default to t2 as select a from t1"
create_mv3 = "create materialized view t_mv3 on cluster default to t3 as select a from t1"
with Given("Tables t1, t2, t3 and MVs t1->t2, t1-t3 are created"):
clickhouse.query(chi, create_table)
clickhouse.query(chi, create_mv_table2)
clickhouse.query(chi, create_mv_table3)
clickhouse.query(chi, create_mv2)
clickhouse.query(chi, create_mv3)
with When("Add a row to an old partition"):
clickhouse.query(chi, "insert into t1(a,d) values(6, today()-1)", host=host0)
with When("Stop fetches for t1 at replica1"):
clickhouse.query(chi, "system stop fetches default.t1", host=host1)
with Then("Wait 10 seconds and the data should be dropped by TTL"):
time.sleep(10)
out = clickhouse.query(chi, "select count() from t1 where a=6", host=host0)
assert out == "0", error()
with When("Resume fetches for t1 at replica1"):
clickhouse.query(chi, "system start fetches default.t1", host=host1)
time.sleep(5)
with Then("Inserts should resume"):
clickhouse.query(chi, "insert into t1(a) values(7)", host=host0)
clickhouse.query(chi, "insert into t1(a) values(1)")
with When("Stop fetches for t2 at replica1"):
clickhouse.query(chi, "system stop fetches default.t2", host=host1)
with Then("Insert should fail since it can not reach the quorum"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(2)", host=host0)
assert "Timeout while waiting for quorum" in out, error()
with When("Resume fetches for t2 at replica1"):
clickhouse.query(chi, "system start fetches default.t2", host=host1)
i = 0
while "2" != clickhouse.query(chi, "select active_replicas from system.replicas where database='default' and table='t1'", pod=host0) and i < 10:
with Then("Not ready, wait 5 seconds"):
time.sleep(5)
i += 1
with Then("Inserts should fail with an error regarding not satisfied quorum"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(3)", host=host0)
assert "Quorum for previous write has not been satisfied yet" in out, error()
with And("Second insert of the same block should pass"):
clickhouse.query(chi, "insert into t1(a) values(3)", host=host0)
with And("Insert of the new block should fail"):
out = clickhouse.query_with_error(chi, "insert into t1(a) values(4)", host=host0)
assert "Quorum for previous write has not been satisfied yet" in out, error()
with And("Second insert of the same block with 'deduplicate_blocks_in_dependent_materialized_views' setting should fail"):
out = clickhouse.query_with_error(
chi,
"set deduplicate_blocks_in_dependent_materialized_views=1; insert into t1(a) values(5)",
host=host0
)
assert "Quorum for previous write has not been satisfied yet" in out, error()
out = clickhouse.query_with_error(
chi, "select t1.a t1_a, t2.a t2_a from t1 left outer join t2 using (a) order by t1_a settings join_use_nulls=1"
)
note(out)
@TestScenario
@Name("test_ch_002. Row-level security")
def test_ch_002(self):
kubectl.create_and_check(
"manifests/chi/test-ch-002-row-level.yaml",
{
"apply_templates": {"manifests/chit/tpl-clickhouse-21.8.yaml"},
"do_not_delete": 1,
})
chi = "test-ch-002-row-level"
create_table = """create table test (d Date default today(), team LowCardinality(String), user String) Engine = MergeTree() PARTITION BY d ORDER BY d;"""
with When("Create test table"):
clickhouse.query(chi, create_table)
with And("Insert some data"):
clickhouse.query(
chi, "INSERT INTO test(team, user) values('team1', 'user1'),('team2', 'user2'),('team3', 'user3'),('team4', 'user4')"
)
with Then("Make another query for different users. It should be restricted to corresponding team by row-level security"):
for user in ['user1', 'user2', 'user3', 'user4']:
out = clickhouse.query(chi, "select user from test", user=user, pwd=user)
assert out == user, error()
with Then("Make a count() query for different users. It should be restricted to corresponding team by row-level security"):
for user in ['user1', 'user2', 'user3', 'user4']:
out = clickhouse.query(chi, "select count() from test", user=user, pwd=user)
assert out == "1", error()
kubectl.delete_chi(chi)
@TestFeature
@Name("e2e.test_clickhouse")
def test(self):
util.clean_namespace(delete_chi=False)
all_tests = [
test_ch_001,
test_ch_002,
]
run_test = all_tests
for t in run_test:
Scenario(test=t)()
| true | true |
f7390f2181b2fb75bb45374f2417c1618860d2fb | 387 | py | Python | djnic/djnic/wsgi.py | avdata99/nic | 70399bd78fd2b4b496d338e7959867ad12cdf477 | [
"MIT"
] | 8 | 2021-05-01T13:03:22.000Z | 2021-12-17T21:50:04.000Z | djnic/djnic/wsgi.py | avdata99/nic | 70399bd78fd2b4b496d338e7959867ad12cdf477 | [
"MIT"
] | 16 | 2020-11-20T23:18:22.000Z | 2021-04-08T20:09:35.000Z | djnic/djnic/wsgi.py | OpenDataCordoba/nic | f9528856e13d106bdfb476cab1236bc5b8a92183 | [
"MIT"
] | null | null | null | """
WSGI config for djnic project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djnic.settings')
application = get_wsgi_application()
| 22.764706 | 78 | 0.782946 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djnic.settings')
application = get_wsgi_application()
| true | true |
f7390f52affc6ece0f1ab2fdfedbef5e4b8f2be3 | 7,721 | py | Python | doc/conf.py | dbolshak/secure-xgboost | fd4f240fc35b07329b39795fc17a5ad5540656ec | [
"Apache-2.0"
] | 77 | 2019-10-28T04:41:29.000Z | 2022-03-04T03:56:47.000Z | doc/conf.py | dbolshak/secure-xgboost | fd4f240fc35b07329b39795fc17a5ad5540656ec | [
"Apache-2.0"
] | 61 | 2020-02-25T21:18:39.000Z | 2020-06-20T10:13:39.000Z | doc/conf.py | chester-leung/mc2-xgboost | 71c00b7c597a7145aae179980052bff79ec61846 | [
"Apache-2.0"
] | 27 | 2019-10-02T00:04:18.000Z | 2022-03-02T07:57:46.000Z | # -*- coding: utf-8 -*-
#
# documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 23 19:40:08 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from subprocess import call
# from sh.contrib import git
# import urllib.request
# from urllib.error import HTTPError
from recommonmark.parser import CommonMarkParser
import sys
import re
import os, subprocess
import shlex
# import guzzle_sphinx_theme
# git_branch = os.getenv('SPHINX_GIT_BRANCH', default=None)
# if git_branch is None:
# # If SPHINX_GIT_BRANCH environment variable is not given, run git to determine branch name
# git_branch = [re.sub(r'origin/', '', x.lstrip(' ')) for x in str(git.branch('-r', '--contains', 'HEAD')).rstrip('\n').split('\n')]
# git_branch = [x for x in git_branch if 'HEAD' not in x]
# print('git_branch = {}'.format(git_branch[0]))
# try:
# filename, _ = urllib.request.urlretrieve('https://s3-us-west-2.amazonaws.com/xgboost-docs/{}.tar.bz2'.format(git_branch[0]))
# call('if [ -d tmp ]; then rm -rf tmp; fi; mkdir -p tmp/jvm; cd tmp/jvm; tar xvf {}'.format(filename), shell=True)
# except HTTPError:
# print('JVM doc not found. Skipping...')
# try:
# filename, _ = urllib.request.urlretrieve('https://s3-us-west-2.amazonaws.com/xgboost-docs/doxygen/{}.tar.bz2'.format(git_branch[0]))
# call('mkdir -p tmp/dev; cd tmp/dev; tar xvf {}; mv doc_doxygen/html/* .; rm -rf doc_doxygen'.format(filename), shell=True)
# except HTTPError:
# print('C API doc not found. Skipping...')
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))
libpath = os.path.join(curr_path, '../python-package/')
sys.path.insert(0, libpath)
sys.path.insert(0, curr_path)
# -- mock out modules
import mock
MOCK_MODULES = ['scipy', 'scipy.sparse', 'sklearn', 'pandas']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
# -- General configuration ------------------------------------------------
# General information about the project.
project = u'Secure XGBoost'
author = u'%s developers' % project
copyright = u'2020, %s' % author
github_doc_root = 'https://github.com/mc2-project/secure-xgboost/tree/master/doc'
os.environ['XGBOOST_BUILD_DOC'] = '1'
# Version information.
import securexgboost
# version = securexgboost.__version__
# release = securexgboost.__version__
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones
extensions = [
'matplotlib.sphinxext.plot_directive',
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'sphinx.ext.intersphinx',
'breathe',
'sphinx.ext.autosectionlabel'
]
graphviz_output_format = 'png'
plot_formats = [('svg', 300), ('png', 100), ('hires.png', 300)]
plot_html_show_source_link = False
plot_html_show_formats = False
# Breathe extension variables
breathe_projects = {"xgboost": "doxyxml/"}
breathe_default_project = "xgboost"
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_parsers = {
'.md': CommonMarkParser,
}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
autoclass_content = 'both'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
html_extra_path = ['./tmp']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "furo"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"light_css_variables": {
"color-brand-primary": "#00B0FF",
"color-brand-content": "#00B0FF",
"color-admonition-background": "#F0F0F0",
},
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = project + 'doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, '%s.tex' % project, project,
author, 'manual'),
]
intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'pandas': ('http://pandas-docs.github.io/pandas-docs-travis/', None),
'sklearn': ('http://scikit-learn.org/stable', None)}
# hook for doxygen
def run_doxygen(folder):
"""Run the doxygen make command in the designated folder."""
try:
retcode = subprocess.call("cd %s; make doxygen" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:
sys.stderr.write("doxygen execution failed: %s" % e)
def generate_doxygen_xml(app):
"""Run the doxygen make commands if we're on the ReadTheDocs server"""
read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
if read_the_docs_build:
run_doxygen('..')
def setup(app):
app.add_stylesheet('custom.css')
| 35.417431 | 137 | 0.696671 |
from subprocess import call
from recommonmark.parser import CommonMarkParser
import sys
import re
import os, subprocess
import shlex
))
libpath = os.path.join(curr_path, '../python-package/')
sys.path.insert(0, libpath)
sys.path.insert(0, curr_path)
import mock
MOCK_MODULES = ['scipy', 'scipy.sparse', 'sklearn', 'pandas']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
project = u'Secure XGBoost'
author = u'%s developers' % project
copyright = u'2020, %s' % author
github_doc_root = 'https://github.com/mc2-project/secure-xgboost/tree/master/doc'
os.environ['XGBOOST_BUILD_DOC'] = '1'
import securexgboost
extensions = [
'matplotlib.sphinxext.plot_directive',
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'sphinx.ext.intersphinx',
'breathe',
'sphinx.ext.autosectionlabel'
]
graphviz_output_format = 'png'
plot_formats = [('svg', 300), ('png', 100), ('hires.png', 300)]
plot_html_show_source_link = False
plot_html_show_formats = False
breathe_projects = {"xgboost": "doxyxml/"}
breathe_default_project = "xgboost"
templates_path = ['_templates']
source_parsers = {
'.md': CommonMarkParser,
}
source_suffix = ['.rst', '.md']
master_doc = 'index'
language = None
autoclass_content = 'both'
exclude_patterns = ['_build']
html_extra_path = ['./tmp']
pygments_style = 'sphinx'
todo_include_todos = False
html_theme = "furo"
html_theme_options = {
"light_css_variables": {
"color-brand-primary": "#00B0FF",
"color-brand-content": "#00B0FF",
"color-admonition-background": "#F0F0F0",
},
}
html_static_path = ['_static']
htmlhelp_basename = project + 'doc'
latex_elements = {
}
latex_documents = [
(master_doc, '%s.tex' % project, project,
author, 'manual'),
]
intersphinx_mapping = {'python': ('https://docs.python.org/3.6', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None),
'pandas': ('http://pandas-docs.github.io/pandas-docs-travis/', None),
'sklearn': ('http://scikit-learn.org/stable', None)}
def run_doxygen(folder):
try:
retcode = subprocess.call("cd %s; make doxygen" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:
sys.stderr.write("doxygen execution failed: %s" % e)
def generate_doxygen_xml(app):
read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
if read_the_docs_build:
run_doxygen('..')
def setup(app):
app.add_stylesheet('custom.css')
| true | true |
f7390f5e6066b32bfb5e0deb3646be9a163e3667 | 25,060 | py | Python | SimG4Core/Application/python/g4SimHits_cfi.py | AFJohan92/cmssw | c5b36f05986c35998ddd4c873dc6812646579744 | [
"Apache-2.0"
] | 2 | 2020-01-21T11:23:39.000Z | 2020-01-21T11:23:42.000Z | SimG4Core/Application/python/g4SimHits_cfi.py | AFJohan92/cmssw | c5b36f05986c35998ddd4c873dc6812646579744 | [
"Apache-2.0"
] | null | null | null | SimG4Core/Application/python/g4SimHits_cfi.py | AFJohan92/cmssw | c5b36f05986c35998ddd4c873dc6812646579744 | [
"Apache-2.0"
] | 3 | 2019-03-09T13:06:43.000Z | 2020-07-03T00:47:30.000Z | import FWCore.ParameterSet.Config as cms
from SimG4Core.Application.hectorParameter_cfi import *
## HF Raddam Dose Class in /SimG4CMS/Calo
from SimG4CMS.Calo.HFDarkeningParams_cff import *
## This object is used to customise g4SimHits for different running scenarios
common_heavy_suppression = cms.PSet(
NeutronThreshold = cms.double(30.0),
ProtonThreshold = cms.double(30.0),
IonThreshold = cms.double(30.0)
)
common_maximum_time = cms.PSet(
MaxTrackTime = cms.double(500.0),
MaxTimeNames = cms.vstring('ZDCRegion'),
MaxTrackTimes = cms.vdouble(2000.0),
#DeadRegions = cms.vstring('QuadRegion','CastorRegion','InterimRegion'),
DeadRegions = cms.vstring('QuadRegion','InterimRegion'),
CriticalEnergyForVacuum = cms.double(2.0),
CriticalDensity = cms.double(1e-15)
)
common_UsePMT = cms.PSet(
UseR7600UPMT = cms.bool(False)
)
common_UseHF = cms.PSet(
Lambda1 = cms.double(280.0),
Lambda2 = cms.double(700.0),
Gain = cms.double(0.33),
CheckSurvive = cms.bool(False),
FibreR = cms.untracked.double(0.3)
)
common_UseLuminosity = cms.PSet(
InstLuminosity = cms.double(0.),
DelivLuminosity = cms.double(5000.)
)
g4SimHits = cms.EDProducer("OscarMTProducer",
g4GeometryDD4hepSource = cms.bool(False),
NonBeamEvent = cms.bool(False),
G4EventManagerVerbosity = cms.untracked.int32(0),
UseMagneticField = cms.bool(True),
StoreRndmSeeds = cms.bool(False),
RestoreRndmSeeds = cms.bool(False),
PhysicsTablesDirectory = cms.untracked.string('PhysicsTables'),
StorePhysicsTables = cms.untracked.bool(False),
RestorePhysicsTables = cms.untracked.bool(False),
UseParametrisedEMPhysics = cms.untracked.bool(True),
CheckGeometry = cms.untracked.bool(False),
G4CheckOverlap = cms.untracked.PSet(
OutputBaseName = cms.string('2017'),
MaterialFlag = cms.bool(True),
GeomFlag = cms.bool(True),
OverlapFlag = cms.bool(False),
RegionFlag = cms.bool(True), # if true - selection by G4Region name
gdmlFlag = cms.bool(False), # if true - dump gdml file
Verbose = cms.bool(True),
Tolerance = cms.double(0.0),
Resolution = cms.int32(10000),
ErrorThreshold = cms.int32(1),
Level = cms.int32(1),
Depth = cms.int32(3), # -1 means check whatever depth
PVname = cms.string(''),
LVname = cms.string(''),
NodeNames = cms.vstring('World')
),
G4Commands = cms.vstring(),
SteppingVerbosity = cms.untracked.int32(0),
StepVerboseThreshold = cms.untracked.double(0.1), # in GeV
VerboseEvents = cms.untracked.vint32(),
VertexNumber = cms.untracked.vint32(),
VerboseTracks = cms.untracked.vint32(),
FileNameField = cms.untracked.string(''),
FileNameGDML = cms.untracked.string(''),
FileNameRegions = cms.untracked.string(''),
Watchers = cms.VPSet(),
HepMCProductLabel = cms.InputTag("generatorSmeared"),
theLHCTlinkTag = cms.InputTag("LHCTransport"),
CustomUIsession = cms.untracked.PSet(
Type = cms.untracked.string("MessageLogger"), # alternatives: MessageLoggerThreadPrefix, FilePerThread
ThreadPrefix = cms.untracked.string("W"), # for MessageLoggerThreadPrefix
ThreadFile = cms.untracked.string("sim_output_thread"), # for FilePerThread
),
MagneticField = cms.PSet(
UseLocalMagFieldManager = cms.bool(False),
Verbosity = cms.bool(False),
ConfGlobalMFM = cms.PSet(
Volume = cms.string('OCMS'),
OCMS = cms.PSet(
Stepper = cms.string('G4DormandPrince745'),
Type = cms.string('CMSIMField'),
StepperParam = cms.PSet(
VacRegions = cms.vstring(),
# VacRegions = cms.vstring('DefaultRegionForTheWorld','BeamPipeVacuum','BeamPipeOutside'),
MaximumEpsilonStep = cms.untracked.double(0.01), ## in mm
DeltaOneStep = cms.double(0.001), ## in mm
MaximumLoopCounts = cms.untracked.double(1000.0),
DeltaChord = cms.double(0.001), ## in mm
MinStep = cms.double(0.1), ## in mm
DeltaIntersectionAndOneStep = cms.untracked.double(-1.0),
DeltaIntersection = cms.double(0.0001),## in mm
MaxStep = cms.double(150.), ## in cm
MinimumEpsilonStep = cms.untracked.double(1e-05), ## in mm
EnergyThSimple = cms.double(0.015), ## in GeV
DeltaChordSimple = cms.double(0.1), ## in mm
DeltaOneStepSimple = cms.double(0.1), ## in mm
DeltaIntersectionSimple = cms.double(0.01), ## in mm
MaxStepSimple = cms.double(50.), ## in cm
)
)
),
delta = cms.double(1.0)
),
Physics = cms.PSet(
common_maximum_time,
# NOTE : if you want EM Physics only,
# please select "SimG4Core/Physics/DummyPhysics" for type
# and turn ON DummyEMPhysics
#
type = cms.string('SimG4Core/Physics/FTFP_BERT_EMM'),
DummyEMPhysics = cms.bool(False),
CutsPerRegion = cms.bool(True),
CutsOnProton = cms.bool(True),
DefaultCutValue = cms.double(1.0), ## cuts in cm
G4BremsstrahlungThreshold = cms.double(0.5), ## cut in GeV
Verbosity = cms.untracked.int32(0),
# 1 will print cuts as they get set from DD
# 2 will do as 1 + will dump Geant4 table of cuts
MonopoleCharge = cms.untracked.int32(1),
MonopoleDeltaRay = cms.untracked.bool(True),
MonopoleMultiScatter = cms.untracked.bool(False),
MonopoleTransport = cms.untracked.bool(True),
MonopoleMass = cms.untracked.double(0),
ExoticaTransport = cms.untracked.bool(False),
ExoticaPhysicsSS = cms.untracked.bool(False),
RhadronPhysics = cms.bool(False),
DarkMPFactor = cms.double(1.0),
Region = cms.string(''),
TrackingCut = cms.bool(False),
SRType = cms.bool(True),
FlagMuNucl = cms.bool(False),
FlagFluo = cms.bool(False),
EMPhysics = cms.untracked.bool(True),
HadPhysics = cms.untracked.bool(True),
FlagBERT = cms.untracked.bool(False),
EminFTFP = cms.double(3.), # in GeV
EmaxBERT = cms.double(6.), # in GeV
EminQGSP = cms.double(12.), # in GeV
EmaxFTFP = cms.double(30.), # in GeV
EmaxBERTpi = cms.double(12.), # in GeV
LowEnergyGflashEcal = cms.bool(False),
LowEnergyGflashEcalEmax = cms.double(100),
GflashEcal = cms.bool(False),
GflashHcal = cms.bool(False),
GflashEcalHad = cms.bool(False),
GflashHcalHad = cms.bool(False),
bField = cms.double(3.8),
energyScaleEB = cms.double(1.032),
energyScaleEE = cms.double(1.024),
ThermalNeutrons = cms.untracked.bool(False),
RusRoElectronEnergyLimit = cms.double(0.0),
RusRoEcalElectron = cms.double(1.0),
RusRoHcalElectron = cms.double(1.0),
RusRoMuonIronElectron = cms.double(1.0),
RusRoPreShowerElectron = cms.double(1.0),
RusRoCastorElectron = cms.double(1.0),
RusRoWorldElectron = cms.double(1.0),
ElectronStepLimit = cms.bool(False),
ElectronRangeTest = cms.bool(False),
PositronStepLimit = cms.bool(False),
ProtonRegionLimit = cms.bool(False),
PionRegionLimit = cms.bool(False),
LimitsPerRegion = cms.vstring('EcalRegion','HcalRegion'),
EnergyLimitsE = cms.vdouble(0.,0.0),
EnergyLimitsH = cms.vdouble(0.,0.0),
EnergyFactorsE = cms.vdouble(1.,0.0),
EnergyRMSE = cms.vdouble(0.0,0.0),
MinStepLimit = cms.double(1.0),
ModifyTransportation = cms.bool(False),
ThresholdWarningEnergy = cms.untracked.double(100.0),
ThresholdImportantEnergy = cms.untracked.double(250.0),
ThresholdTrials = cms.untracked.int32(10)
),
Generator = cms.PSet(
HectorEtaCut,
HepMCProductLabel = cms.InputTag('generatorSmeared'),
ApplyPCuts = cms.bool(True),
ApplyPtransCut = cms.bool(False),
MinPCut = cms.double(0.04), ## the cut is in GeV
MaxPCut = cms.double(99999.0), ## the pmax=99.TeV
ApplyEtaCuts = cms.bool(True),
MinEtaCut = cms.double(-5.5),
MaxEtaCut = cms.double(5.5),
RDecLenCut = cms.double(2.9), ## (cm) the cut on vertex radius
LDecLenCut = cms.double(30.0), ## (cm) decay volume length
ApplyPhiCuts = cms.bool(False),
MinPhiCut = cms.double(-3.14159265359), ## (radians)
MaxPhiCut = cms.double(3.14159265359), ## according to CMS conventions
ApplyLumiMonitorCuts = cms.bool(False), ## primary for lumi monitors
Verbosity = cms.untracked.int32(0),
PDGselection = cms.PSet(
PDGfilterSel = cms.bool(False), ## filter out unwanted particles
PDGfilter = cms.vint32(21,1,2,3,4,5,6) ## list of unwanted particles (gluons and quarks)
)
),
RunAction = cms.PSet(
StopFile = cms.string('')
),
EventAction = cms.PSet(
debug = cms.untracked.bool(False),
StopFile = cms.string(''),
PrintRandomSeed = cms.bool(False),
CollapsePrimaryVertices = cms.bool(False)
),
StackingAction = cms.PSet(
common_heavy_suppression,
common_maximum_time,
KillDeltaRay = cms.bool(False),
TrackNeutrino = cms.bool(False),
KillHeavy = cms.bool(False),
KillGamma = cms.bool(True),
GammaThreshold = cms.double(0.0001), ## (MeV)
SaveFirstLevelSecondary = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInTracker = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInCalo = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInMuon = cms.untracked.bool(False),
SaveAllPrimaryDecayProductsAndConversions = cms.untracked.bool(True),
RusRoGammaEnergyLimit = cms.double(5.0), ## (MeV)
RusRoEcalGamma = cms.double(0.3),
RusRoHcalGamma = cms.double(0.3),
RusRoMuonIronGamma = cms.double(0.3),
RusRoPreShowerGamma = cms.double(0.3),
RusRoCastorGamma = cms.double(0.3),
RusRoWorldGamma = cms.double(0.3),
RusRoNeutronEnergyLimit = cms.double(10.0), ## (MeV)
RusRoEcalNeutron = cms.double(0.1),
RusRoHcalNeutron = cms.double(0.1),
RusRoMuonIronNeutron = cms.double(0.1),
RusRoPreShowerNeutron = cms.double(0.1),
RusRoCastorNeutron = cms.double(0.1),
RusRoWorldNeutron = cms.double(0.1),
RusRoProtonEnergyLimit = cms.double(0.0),
RusRoEcalProton = cms.double(1.0),
RusRoHcalProton = cms.double(1.0),
RusRoMuonIronProton = cms.double(1.0),
RusRoPreShowerProton = cms.double(1.0),
RusRoCastorProton = cms.double(1.0),
RusRoWorldProton = cms.double(1.0)
),
TrackingAction = cms.PSet(
DetailedTiming = cms.untracked.bool(False),
CheckTrack = cms.untracked.bool(False)
),
SteppingAction = cms.PSet(
common_maximum_time,
EkinNames = cms.vstring(),
EkinThresholds = cms.vdouble(),
EkinParticles = cms.vstring()
),
TrackerSD = cms.PSet(
ZeroEnergyLoss = cms.bool(False),
PrintHits = cms.bool(False),
ElectronicSigmaInNanoSeconds = cms.double(12.06),
NeverAccumulate = cms.bool(False),
EnergyThresholdForPersistencyInGeV = cms.double(0.2),
EnergyThresholdForHistoryInGeV = cms.double(0.05)
),
MuonSD = cms.PSet(
EnergyThresholdForPersistency = cms.double(1.0),
PrintHits = cms.bool(False),
AllMuonsPersistent = cms.bool(True)
),
CaloSD = cms.PSet(
common_heavy_suppression,
SuppressHeavy = cms.bool(False),
EminTrack = cms.double(1.0),
TmaxHit = cms.double(1000.0),
HCNames = cms.vstring('EcalHitsEB','EcalHitsEE','EcalHitsES','HcalHits','ZDCHITS'),
EminHits = cms.vdouble(0.015,0.010,0.0,0.0,0.0),
EminHitsDepth = cms.vdouble(0.0,0.0,0.0,0.0,0.0),
TmaxHits = cms.vdouble(500.0,500.0,500.0,500.0,2000.0),
UseResponseTables = cms.vint32(0,0,0,0,0),
BeamPosition = cms.double(0.0),
CorrectTOFBeam = cms.bool(False),
UseFineCaloID = cms.bool(False),
DetailedTiming = cms.untracked.bool(False),
UseMap = cms.untracked.bool(False),
Verbosity = cms.untracked.int32(0),
CheckHits = cms.untracked.int32(25)
),
CaloResponse = cms.PSet(
UseResponseTable = cms.bool(True),
ResponseScale = cms.double(1.0),
ResponseFile = cms.FileInPath('SimG4CMS/Calo/data/responsTBpim50.dat')
),
ECalSD = cms.PSet(
common_UseLuminosity,
UseBirkLaw = cms.bool(True),
BirkL3Parametrization = cms.bool(True),
BirkSlope = cms.double(0.253694),
BirkCut = cms.double(0.1),
BirkC1 = cms.double(0.03333),
BirkC3 = cms.double(1.0),
BirkC2 = cms.double(0.0),
SlopeLightYield = cms.double(0.02),
StoreSecondary = cms.bool(False),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
XtalMat = cms.untracked.string('E_PbWO4'),
TestBeam = cms.untracked.bool(False),
NullNumbering = cms.untracked.bool(False),
StoreRadLength = cms.untracked.bool(False),
ScaleRadLength = cms.untracked.double(1.0),
StoreLayerTimeSim = cms.untracked.bool(False),
AgeingWithSlopeLY = cms.untracked.bool(False)
),
HCalSD = cms.PSet(
common_UseLuminosity,
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0060),
UseShowerLibrary = cms.bool(True),
UseParametrize = cms.bool(False),
UsePMTHits = cms.bool(False),
UseFibreBundleHits = cms.bool(False),
TestNumberingScheme = cms.bool(False),
doNeutralDensityFilter = cms.bool(False),
EminHitHB = cms.double(0.0),
EminHitHE = cms.double(0.0),
EminHitHO = cms.double(0.0),
EminHitHF = cms.double(0.0),
BetaThreshold = cms.double(0.7),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
HBDarkening = cms.bool(False),
HEDarkening = cms.bool(False),
HFDarkening = cms.bool(False),
UseHF = cms.untracked.bool(True),
ForTBH2 = cms.untracked.bool(False),
ForTBHCAL = cms.untracked.bool(False),
UseLayerWt = cms.untracked.bool(False),
WtFile = cms.untracked.string('None'),
TestNS = cms.untracked.bool(False),
HFDarkeningParameterBlock = HFDarkeningParameterBlock
),
CaloTrkProcessing = cms.PSet(
TestBeam = cms.bool(False),
EminTrack = cms.double(0.01),
PutHistory = cms.bool(False),
DoFineCalo = cms.bool(False),
EminFineTrack = cms.double(10000.0),
EminFinePhoton = cms.double(5000.0)
),
HFShower = cms.PSet(
common_UsePMT,
common_UseHF,
ProbMax = cms.double(1.0),
CFibre = cms.double(0.5),
PEPerGeV = cms.double(0.31),
TrackEM = cms.bool(False),
UseShowerLibrary = cms.bool(True),
UseHFGflash = cms.bool(False),
EminLibrary = cms.double(0.0),
OnlyLong = cms.bool(True),
LambdaMean = cms.double(350.0),
ApplyFiducialCut = cms.bool(True),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5),
ParametrizeLast = cms.untracked.bool(False)
),
HFShowerLibrary = cms.PSet(
FileName = cms.FileInPath('SimG4CMS/Calo/data/HFShowerLibrary_oldpmt_noatt_eta4_16en_v3.root'),
BackProbability = cms.double(0.2),
TreeEMID = cms.string('emParticles'),
TreeHadID = cms.string('hadParticles'),
Verbosity = cms.untracked.bool(False),
ApplyFiducialCut= cms.bool(True),
BranchPost = cms.untracked.string(''),
BranchEvt = cms.untracked.string(''),
BranchPre = cms.untracked.string('')
),
HFShowerPMT = cms.PSet(
common_UsePMT,
common_UseHF,
PEPerGeVPMT = cms.double(1.0),
RefIndex = cms.double(1.52),
Aperture = cms.double(0.99),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFShowerStraightBundle = cms.PSet(
common_UsePMT,
common_UseHF,
FactorBundle = cms.double(1.0),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFShowerConicalBundle = cms.PSet(
common_UsePMT,
common_UseHF,
FactorBundle = cms.double(1.0),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFGflash = cms.PSet(
BField = cms.untracked.double(3.8),
WatcherOn = cms.untracked.bool(True),
FillHisto = cms.untracked.bool(True)
),
CastorSD = cms.PSet(
useShowerLibrary = cms.bool(True),
minEnergyInGeVforUsingSLibrary = cms.double(1.0),
nonCompensationFactor = cms.double(0.817),
Verbosity = cms.untracked.int32(0)
),
CastorShowerLibrary = cms.PSet(
FileName = cms.FileInPath('SimG4CMS/Forward/data/CastorShowerLibrary_CMSSW500_Standard.root'),
BranchEvt = cms.untracked.string('hadShowerLibInfo.'),
BranchEM = cms.untracked.string('emParticles.'),
BranchHAD = cms.untracked.string('hadParticles.'),
Verbosity = cms.untracked.bool(False)
),
BHMSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
MtdSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.01), #stepping = 10 ps (for timing)
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
CheckID = cms.untracked.bool(True),
),
HGCSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.001), #stepping = 1 ps (for timing)
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
RejectMouseBite = cms.bool(False),
RotatedWafer = cms.bool(False),
CornerMinMask = cms.int32(0),
WaferAngles = cms.untracked.vdouble(90.0,30.0),
WaferSize = cms.untracked.double(123.7),
MouseBite = cms.untracked.double(2.5),
CheckID = cms.untracked.bool(True),
),
HGCScintSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
EminHit = cms.double(0.0),
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0052),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
),
HFNoseSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.001), #stepping = 1 ps (for timing)
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
RejectMouseBite = cms.bool(False),
RotatedWafer = cms.bool(False),
CornerMinMask = cms.int32(0),
WaferAngles = cms.untracked.vdouble(90.0,30.0),
CheckID = cms.untracked.bool(True),
),
TotemRPSD = cms.PSet(
Verbosity = cms.int32(0)
),
TotemSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
TotemT2ScintSD = cms.PSet(
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.006),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
),
PPSDiamondSD = cms.PSet(
Verbosity = cms.int32(0)
),
PPSPixelSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
ZdcSD = cms.PSet(
Verbosity = cms.int32(0),
UseShowerLibrary = cms.bool(True),
UseShowerHits = cms.bool(False),
FiberDirection = cms.double(45.0),
ZdcHitEnergyCut = cms.double(10.0)
),
ZdcShowerLibrary = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
FP420SD = cms.PSet(
Verbosity = cms.untracked.int32(2)
),
BscSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
PltSD = cms.PSet(
EnergyThresholdForPersistencyInGeV = cms.double(0.2),
EnergyThresholdForHistoryInGeV = cms.double(0.05)
),
Bcm1fSD = cms.PSet(
EnergyThresholdForPersistencyInGeV = cms.double(0.010),
EnergyThresholdForHistoryInGeV = cms.double(0.005)
),
HcalTB02SD = cms.PSet(
UseBirkLaw = cms.untracked.bool(False),
BirkC1 = cms.untracked.double(0.013),
BirkC3 = cms.untracked.double(1.75),
BirkC2 = cms.untracked.double(0.0568)
),
EcalTBH4BeamSD = cms.PSet(
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568)
),
HGCalTestBeamSD = cms.PSet(
Material = cms.string('Scintillator'),
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568),
),
HcalTB06BeamSD = cms.PSet(
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568)
),
AHCalSD = cms.PSet(
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0052),
EminHit = cms.double(0.0),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
),
)
##
## Change the HFShowerLibrary file used for Run 2
##
from Configuration.Eras.Modifier_run2_common_cff import run2_common
run2_common.toModify( g4SimHits.HFShowerLibrary, FileName = 'SimG4CMS/Calo/data/HFShowerLibrary_npmt_noatt_eta4_16en_v4.root' )
run2_common.toModify( g4SimHits.HFShower, ProbMax = 0.5)
from Configuration.Eras.Modifier_run2_HCAL_2017_cff import run2_HCAL_2017
run2_HCAL_2017.toModify( g4SimHits, HCalSD = dict( TestNumberingScheme = True ) )
from Configuration.Eras.Modifier_phase2_timing_cff import phase2_timing
phase2_timing.toModify( g4SimHits.ECalSD,
StoreLayerTimeSim = cms.untracked.bool(True),
TimeSliceUnit = cms.double(0.001) )
from Configuration.ProcessModifiers.dd4hep_cff import dd4hep
dd4hep.toModify( g4SimHits, g4GeometryDD4hepSource = True )
| 42.691652 | 127 | 0.580686 | import FWCore.ParameterSet.Config as cms
from SimG4Core.Application.hectorParameter_cfi import *
f import *
0),
ProtonThreshold = cms.double(30.0),
IonThreshold = cms.double(30.0)
)
common_maximum_time = cms.PSet(
MaxTrackTime = cms.double(500.0),
MaxTimeNames = cms.vstring('ZDCRegion'),
MaxTrackTimes = cms.vdouble(2000.0),
DeadRegions = cms.vstring('QuadRegion','InterimRegion'),
CriticalEnergyForVacuum = cms.double(2.0),
CriticalDensity = cms.double(1e-15)
)
common_UsePMT = cms.PSet(
UseR7600UPMT = cms.bool(False)
)
common_UseHF = cms.PSet(
Lambda1 = cms.double(280.0),
Lambda2 = cms.double(700.0),
Gain = cms.double(0.33),
CheckSurvive = cms.bool(False),
FibreR = cms.untracked.double(0.3)
)
common_UseLuminosity = cms.PSet(
InstLuminosity = cms.double(0.),
DelivLuminosity = cms.double(5000.)
)
g4SimHits = cms.EDProducer("OscarMTProducer",
g4GeometryDD4hepSource = cms.bool(False),
NonBeamEvent = cms.bool(False),
G4EventManagerVerbosity = cms.untracked.int32(0),
UseMagneticField = cms.bool(True),
StoreRndmSeeds = cms.bool(False),
RestoreRndmSeeds = cms.bool(False),
PhysicsTablesDirectory = cms.untracked.string('PhysicsTables'),
StorePhysicsTables = cms.untracked.bool(False),
RestorePhysicsTables = cms.untracked.bool(False),
UseParametrisedEMPhysics = cms.untracked.bool(True),
CheckGeometry = cms.untracked.bool(False),
G4CheckOverlap = cms.untracked.PSet(
OutputBaseName = cms.string('2017'),
MaterialFlag = cms.bool(True),
GeomFlag = cms.bool(True),
OverlapFlag = cms.bool(False),
RegionFlag = cms.bool(True),
gdmlFlag = cms.bool(False),
Verbose = cms.bool(True),
Tolerance = cms.double(0.0),
Resolution = cms.int32(10000),
ErrorThreshold = cms.int32(1),
Level = cms.int32(1),
Depth = cms.int32(3),
PVname = cms.string(''),
LVname = cms.string(''),
NodeNames = cms.vstring('World')
),
G4Commands = cms.vstring(),
SteppingVerbosity = cms.untracked.int32(0),
StepVerboseThreshold = cms.untracked.double(0.1),
VerboseEvents = cms.untracked.vint32(),
VertexNumber = cms.untracked.vint32(),
VerboseTracks = cms.untracked.vint32(),
FileNameField = cms.untracked.string(''),
FileNameGDML = cms.untracked.string(''),
FileNameRegions = cms.untracked.string(''),
Watchers = cms.VPSet(),
HepMCProductLabel = cms.InputTag("generatorSmeared"),
theLHCTlinkTag = cms.InputTag("LHCTransport"),
CustomUIsession = cms.untracked.PSet(
Type = cms.untracked.string("MessageLogger"),
ThreadPrefix = cms.untracked.string("W"),
ThreadFile = cms.untracked.string("sim_output_thread"),
),
MagneticField = cms.PSet(
UseLocalMagFieldManager = cms.bool(False),
Verbosity = cms.bool(False),
ConfGlobalMFM = cms.PSet(
Volume = cms.string('OCMS'),
OCMS = cms.PSet(
Stepper = cms.string('G4DormandPrince745'),
Type = cms.string('CMSIMField'),
StepperParam = cms.PSet(
VacRegions = cms.vstring(),
MaximumEpsilonStep = cms.untracked.double(0.01), DeltaOneStep = cms.double(0.001), MaximumLoopCounts = cms.untracked.double(1000.0),
DeltaChord = cms.double(0.001), MinStep = cms.double(0.1), DeltaIntersectionAndOneStep = cms.untracked.double(-1.0),
DeltaIntersection = cms.double(0.0001), MaxStep = cms.double(150.), MinimumEpsilonStep = cms.untracked.double(1e-05), EnergyThSimple = cms.double(0.015), DeltaChordSimple = cms.double(0.1), DeltaOneStepSimple = cms.double(0.1), DeltaIntersectionSimple = cms.double(0.01), MaxStepSimple = cms.double(50.), )
)
),
delta = cms.double(1.0)
),
Physics = cms.PSet(
common_maximum_time,
type = cms.string('SimG4Core/Physics/FTFP_BERT_EMM'),
DummyEMPhysics = cms.bool(False),
CutsPerRegion = cms.bool(True),
CutsOnProton = cms.bool(True),
DefaultCutValue = cms.double(1.0), remsstrahlungThreshold = cms.double(0.5), bosity = cms.untracked.int32(0),
MonopoleCharge = cms.untracked.int32(1),
MonopoleDeltaRay = cms.untracked.bool(True),
MonopoleMultiScatter = cms.untracked.bool(False),
MonopoleTransport = cms.untracked.bool(True),
MonopoleMass = cms.untracked.double(0),
ExoticaTransport = cms.untracked.bool(False),
ExoticaPhysicsSS = cms.untracked.bool(False),
RhadronPhysics = cms.bool(False),
DarkMPFactor = cms.double(1.0),
Region = cms.string(''),
TrackingCut = cms.bool(False),
SRType = cms.bool(True),
FlagMuNucl = cms.bool(False),
FlagFluo = cms.bool(False),
EMPhysics = cms.untracked.bool(True),
HadPhysics = cms.untracked.bool(True),
FlagBERT = cms.untracked.bool(False),
EminFTFP = cms.double(3.),
EmaxBERT = cms.double(6.),
EminQGSP = cms.double(12.),
EmaxFTFP = cms.double(30.),
EmaxBERTpi = cms.double(12.),
LowEnergyGflashEcal = cms.bool(False),
LowEnergyGflashEcalEmax = cms.double(100),
GflashEcal = cms.bool(False),
GflashHcal = cms.bool(False),
GflashEcalHad = cms.bool(False),
GflashHcalHad = cms.bool(False),
bField = cms.double(3.8),
energyScaleEB = cms.double(1.032),
energyScaleEE = cms.double(1.024),
ThermalNeutrons = cms.untracked.bool(False),
RusRoElectronEnergyLimit = cms.double(0.0),
RusRoEcalElectron = cms.double(1.0),
RusRoHcalElectron = cms.double(1.0),
RusRoMuonIronElectron = cms.double(1.0),
RusRoPreShowerElectron = cms.double(1.0),
RusRoCastorElectron = cms.double(1.0),
RusRoWorldElectron = cms.double(1.0),
ElectronStepLimit = cms.bool(False),
ElectronRangeTest = cms.bool(False),
PositronStepLimit = cms.bool(False),
ProtonRegionLimit = cms.bool(False),
PionRegionLimit = cms.bool(False),
LimitsPerRegion = cms.vstring('EcalRegion','HcalRegion'),
EnergyLimitsE = cms.vdouble(0.,0.0),
EnergyLimitsH = cms.vdouble(0.,0.0),
EnergyFactorsE = cms.vdouble(1.,0.0),
EnergyRMSE = cms.vdouble(0.0,0.0),
MinStepLimit = cms.double(1.0),
ModifyTransportation = cms.bool(False),
ThresholdWarningEnergy = cms.untracked.double(100.0),
ThresholdImportantEnergy = cms.untracked.double(250.0),
ThresholdTrials = cms.untracked.int32(10)
),
Generator = cms.PSet(
HectorEtaCut,
HepMCProductLabel = cms.InputTag('generatorSmeared'),
ApplyPCuts = cms.bool(True),
ApplyPtransCut = cms.bool(False),
MinPCut = cms.double(0.04), ms.double(99999.0), uts = cms.bool(True),
MinEtaCut = cms.double(-5.5),
MaxEtaCut = cms.double(5.5),
RDecLenCut = cms.double(2.9), e(30.0), s.bool(False),
MinPhiCut = cms.double(-3.14159265359), xPhiCut = cms.double(3.14159265359), = cms.bool(False), tracked.int32(0),
PDGselection = cms.PSet(
PDGfilterSel = cms.bool(False), nt32(21,1,2,3,4,5,6) StopFile = cms.string('')
),
EventAction = cms.PSet(
debug = cms.untracked.bool(False),
StopFile = cms.string(''),
PrintRandomSeed = cms.bool(False),
CollapsePrimaryVertices = cms.bool(False)
),
StackingAction = cms.PSet(
common_heavy_suppression,
common_maximum_time,
KillDeltaRay = cms.bool(False),
TrackNeutrino = cms.bool(False),
KillHeavy = cms.bool(False),
KillGamma = cms.bool(True),
GammaThreshold = cms.double(0.0001), SaveFirstLevelSecondary = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInTracker = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInCalo = cms.untracked.bool(False),
SavePrimaryDecayProductsAndConversionsInMuon = cms.untracked.bool(False),
SaveAllPrimaryDecayProductsAndConversions = cms.untracked.bool(True),
RusRoGammaEnergyLimit = cms.double(5.0), RusRoEcalGamma = cms.double(0.3),
RusRoHcalGamma = cms.double(0.3),
RusRoMuonIronGamma = cms.double(0.3),
RusRoPreShowerGamma = cms.double(0.3),
RusRoCastorGamma = cms.double(0.3),
RusRoWorldGamma = cms.double(0.3),
RusRoNeutronEnergyLimit = cms.double(10.0), RusRoEcalNeutron = cms.double(0.1),
RusRoHcalNeutron = cms.double(0.1),
RusRoMuonIronNeutron = cms.double(0.1),
RusRoPreShowerNeutron = cms.double(0.1),
RusRoCastorNeutron = cms.double(0.1),
RusRoWorldNeutron = cms.double(0.1),
RusRoProtonEnergyLimit = cms.double(0.0),
RusRoEcalProton = cms.double(1.0),
RusRoHcalProton = cms.double(1.0),
RusRoMuonIronProton = cms.double(1.0),
RusRoPreShowerProton = cms.double(1.0),
RusRoCastorProton = cms.double(1.0),
RusRoWorldProton = cms.double(1.0)
),
TrackingAction = cms.PSet(
DetailedTiming = cms.untracked.bool(False),
CheckTrack = cms.untracked.bool(False)
),
SteppingAction = cms.PSet(
common_maximum_time,
EkinNames = cms.vstring(),
EkinThresholds = cms.vdouble(),
EkinParticles = cms.vstring()
),
TrackerSD = cms.PSet(
ZeroEnergyLoss = cms.bool(False),
PrintHits = cms.bool(False),
ElectronicSigmaInNanoSeconds = cms.double(12.06),
NeverAccumulate = cms.bool(False),
EnergyThresholdForPersistencyInGeV = cms.double(0.2),
EnergyThresholdForHistoryInGeV = cms.double(0.05)
),
MuonSD = cms.PSet(
EnergyThresholdForPersistency = cms.double(1.0),
PrintHits = cms.bool(False),
AllMuonsPersistent = cms.bool(True)
),
CaloSD = cms.PSet(
common_heavy_suppression,
SuppressHeavy = cms.bool(False),
EminTrack = cms.double(1.0),
TmaxHit = cms.double(1000.0),
HCNames = cms.vstring('EcalHitsEB','EcalHitsEE','EcalHitsES','HcalHits','ZDCHITS'),
EminHits = cms.vdouble(0.015,0.010,0.0,0.0,0.0),
EminHitsDepth = cms.vdouble(0.0,0.0,0.0,0.0,0.0),
TmaxHits = cms.vdouble(500.0,500.0,500.0,500.0,2000.0),
UseResponseTables = cms.vint32(0,0,0,0,0),
BeamPosition = cms.double(0.0),
CorrectTOFBeam = cms.bool(False),
UseFineCaloID = cms.bool(False),
DetailedTiming = cms.untracked.bool(False),
UseMap = cms.untracked.bool(False),
Verbosity = cms.untracked.int32(0),
CheckHits = cms.untracked.int32(25)
),
CaloResponse = cms.PSet(
UseResponseTable = cms.bool(True),
ResponseScale = cms.double(1.0),
ResponseFile = cms.FileInPath('SimG4CMS/Calo/data/responsTBpim50.dat')
),
ECalSD = cms.PSet(
common_UseLuminosity,
UseBirkLaw = cms.bool(True),
BirkL3Parametrization = cms.bool(True),
BirkSlope = cms.double(0.253694),
BirkCut = cms.double(0.1),
BirkC1 = cms.double(0.03333),
BirkC3 = cms.double(1.0),
BirkC2 = cms.double(0.0),
SlopeLightYield = cms.double(0.02),
StoreSecondary = cms.bool(False),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
XtalMat = cms.untracked.string('E_PbWO4'),
TestBeam = cms.untracked.bool(False),
NullNumbering = cms.untracked.bool(False),
StoreRadLength = cms.untracked.bool(False),
ScaleRadLength = cms.untracked.double(1.0),
StoreLayerTimeSim = cms.untracked.bool(False),
AgeingWithSlopeLY = cms.untracked.bool(False)
),
HCalSD = cms.PSet(
common_UseLuminosity,
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0060),
UseShowerLibrary = cms.bool(True),
UseParametrize = cms.bool(False),
UsePMTHits = cms.bool(False),
UseFibreBundleHits = cms.bool(False),
TestNumberingScheme = cms.bool(False),
doNeutralDensityFilter = cms.bool(False),
EminHitHB = cms.double(0.0),
EminHitHE = cms.double(0.0),
EminHitHO = cms.double(0.0),
EminHitHF = cms.double(0.0),
BetaThreshold = cms.double(0.7),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
HBDarkening = cms.bool(False),
HEDarkening = cms.bool(False),
HFDarkening = cms.bool(False),
UseHF = cms.untracked.bool(True),
ForTBH2 = cms.untracked.bool(False),
ForTBHCAL = cms.untracked.bool(False),
UseLayerWt = cms.untracked.bool(False),
WtFile = cms.untracked.string('None'),
TestNS = cms.untracked.bool(False),
HFDarkeningParameterBlock = HFDarkeningParameterBlock
),
CaloTrkProcessing = cms.PSet(
TestBeam = cms.bool(False),
EminTrack = cms.double(0.01),
PutHistory = cms.bool(False),
DoFineCalo = cms.bool(False),
EminFineTrack = cms.double(10000.0),
EminFinePhoton = cms.double(5000.0)
),
HFShower = cms.PSet(
common_UsePMT,
common_UseHF,
ProbMax = cms.double(1.0),
CFibre = cms.double(0.5),
PEPerGeV = cms.double(0.31),
TrackEM = cms.bool(False),
UseShowerLibrary = cms.bool(True),
UseHFGflash = cms.bool(False),
EminLibrary = cms.double(0.0),
OnlyLong = cms.bool(True),
LambdaMean = cms.double(350.0),
ApplyFiducialCut = cms.bool(True),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5),
ParametrizeLast = cms.untracked.bool(False)
),
HFShowerLibrary = cms.PSet(
FileName = cms.FileInPath('SimG4CMS/Calo/data/HFShowerLibrary_oldpmt_noatt_eta4_16en_v3.root'),
BackProbability = cms.double(0.2),
TreeEMID = cms.string('emParticles'),
TreeHadID = cms.string('hadParticles'),
Verbosity = cms.untracked.bool(False),
ApplyFiducialCut= cms.bool(True),
BranchPost = cms.untracked.string(''),
BranchEvt = cms.untracked.string(''),
BranchPre = cms.untracked.string('')
),
HFShowerPMT = cms.PSet(
common_UsePMT,
common_UseHF,
PEPerGeVPMT = cms.double(1.0),
RefIndex = cms.double(1.52),
Aperture = cms.double(0.99),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFShowerStraightBundle = cms.PSet(
common_UsePMT,
common_UseHF,
FactorBundle = cms.double(1.0),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFShowerConicalBundle = cms.PSet(
common_UsePMT,
common_UseHF,
FactorBundle = cms.double(1.0),
RefIndex = cms.double(1.459),
Aperture = cms.double(0.33),
ApertureTrapped = cms.double(0.22),
CosApertureTrapped= cms.double(0.5),
SinPsiMax = cms.untracked.double(0.5)
),
HFGflash = cms.PSet(
BField = cms.untracked.double(3.8),
WatcherOn = cms.untracked.bool(True),
FillHisto = cms.untracked.bool(True)
),
CastorSD = cms.PSet(
useShowerLibrary = cms.bool(True),
minEnergyInGeVforUsingSLibrary = cms.double(1.0),
nonCompensationFactor = cms.double(0.817),
Verbosity = cms.untracked.int32(0)
),
CastorShowerLibrary = cms.PSet(
FileName = cms.FileInPath('SimG4CMS/Forward/data/CastorShowerLibrary_CMSSW500_Standard.root'),
BranchEvt = cms.untracked.string('hadShowerLibInfo.'),
BranchEM = cms.untracked.string('emParticles.'),
BranchHAD = cms.untracked.string('hadParticles.'),
Verbosity = cms.untracked.bool(False)
),
BHMSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
MtdSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.01),
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
CheckID = cms.untracked.bool(True),
),
HGCSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.001),
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
RejectMouseBite = cms.bool(False),
RotatedWafer = cms.bool(False),
CornerMinMask = cms.int32(0),
WaferAngles = cms.untracked.vdouble(90.0,30.0),
WaferSize = cms.untracked.double(123.7),
MouseBite = cms.untracked.double(2.5),
CheckID = cms.untracked.bool(True),
),
HGCScintSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
EminHit = cms.double(0.0),
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0052),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
),
HFNoseSD = cms.PSet(
Verbosity = cms.untracked.int32(0),
TimeSliceUnit = cms.double(0.001),
IgnoreTrackID = cms.bool(False),
EminHit = cms.double(0.0),
FiducialCut = cms.bool(False),
DistanceFromEdge = cms.double(1.0),
StoreAllG4Hits = cms.bool(False),
RejectMouseBite = cms.bool(False),
RotatedWafer = cms.bool(False),
CornerMinMask = cms.int32(0),
WaferAngles = cms.untracked.vdouble(90.0,30.0),
CheckID = cms.untracked.bool(True),
),
TotemRPSD = cms.PSet(
Verbosity = cms.int32(0)
),
TotemSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
TotemT2ScintSD = cms.PSet(
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.006),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
),
PPSDiamondSD = cms.PSet(
Verbosity = cms.int32(0)
),
PPSPixelSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
ZdcSD = cms.PSet(
Verbosity = cms.int32(0),
UseShowerLibrary = cms.bool(True),
UseShowerHits = cms.bool(False),
FiberDirection = cms.double(45.0),
ZdcHitEnergyCut = cms.double(10.0)
),
ZdcShowerLibrary = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
FP420SD = cms.PSet(
Verbosity = cms.untracked.int32(2)
),
BscSD = cms.PSet(
Verbosity = cms.untracked.int32(0)
),
PltSD = cms.PSet(
EnergyThresholdForPersistencyInGeV = cms.double(0.2),
EnergyThresholdForHistoryInGeV = cms.double(0.05)
),
Bcm1fSD = cms.PSet(
EnergyThresholdForPersistencyInGeV = cms.double(0.010),
EnergyThresholdForHistoryInGeV = cms.double(0.005)
),
HcalTB02SD = cms.PSet(
UseBirkLaw = cms.untracked.bool(False),
BirkC1 = cms.untracked.double(0.013),
BirkC3 = cms.untracked.double(1.75),
BirkC2 = cms.untracked.double(0.0568)
),
EcalTBH4BeamSD = cms.PSet(
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568)
),
HGCalTestBeamSD = cms.PSet(
Material = cms.string('Scintillator'),
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568),
),
HcalTB06BeamSD = cms.PSet(
UseBirkLaw = cms.bool(False),
BirkC1 = cms.double(0.013),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.0568)
),
AHCalSD = cms.PSet(
UseBirkLaw = cms.bool(True),
BirkC3 = cms.double(1.75),
BirkC2 = cms.double(0.142),
BirkC1 = cms.double(0.0052),
EminHit = cms.double(0.0),
TimeSliceUnit = cms.double(1),
IgnoreTrackID = cms.bool(False),
),
)
f import run2_common
run2_common.toModify( g4SimHits.HFShowerLibrary, FileName = 'SimG4CMS/Calo/data/HFShowerLibrary_npmt_noatt_eta4_16en_v4.root' )
run2_common.toModify( g4SimHits.HFShower, ProbMax = 0.5)
from Configuration.Eras.Modifier_run2_HCAL_2017_cff import run2_HCAL_2017
run2_HCAL_2017.toModify( g4SimHits, HCalSD = dict( TestNumberingScheme = True ) )
from Configuration.Eras.Modifier_phase2_timing_cff import phase2_timing
phase2_timing.toModify( g4SimHits.ECalSD,
StoreLayerTimeSim = cms.untracked.bool(True),
TimeSliceUnit = cms.double(0.001) )
from Configuration.ProcessModifiers.dd4hep_cff import dd4hep
dd4hep.toModify( g4SimHits, g4GeometryDD4hepSource = True )
| true | true |
f7390fff6ac1b29779d480d4cd31cc41351d32f0 | 4,304 | py | Python | webcam_animal_classifier.py | MaximeSorgenfrei/cat_dog_cnn | bc1301fb683de2111db2c25b9da22608ede8e070 | [
"MIT"
] | null | null | null | webcam_animal_classifier.py | MaximeSorgenfrei/cat_dog_cnn | bc1301fb683de2111db2c25b9da22608ede8e070 | [
"MIT"
] | null | null | null | webcam_animal_classifier.py | MaximeSorgenfrei/cat_dog_cnn | bc1301fb683de2111db2c25b9da22608ede8e070 | [
"MIT"
] | null | null | null | import cv2
import keras
from keras.models import Sequential, Model
from keras.callbacks import EarlyStopping
from keras.optimizers import Adam
import json
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
from random import shuffle
# import tensorflow as tf
import time
file_list = os.listdir("./")
keras_model_files = []
for file in file_list:
if file.split(".")[-1] in ["h5","npy"]:
print(file)
keras_model_files.append(file)
# load model from file
keras_model_file_i_want_to_use = keras_model_files[0]
model = keras.models.load_model(keras_model_file_i_want_to_use)
model.summary()
# classes = ["ape", "bear", "bee", "beetle", "bird", "bos", "canine", "deer", "elephants", "feline", "frogs", "gekko", "golden moles", "hare", "human", "lemur", "loris", "none", "rodent", "salamander", "scorpions", "shark", "sheep", "snake", "spider", "squirrel", "turtle", "whale"]
# read directories, resize and label data
# Write some Text
# dict
with open(keras_model_files[1],"r") as f:
class_list = json.load(f)
class_stats = pd.DataFrame(data={"classes":class_list})
classes = class_stats["classes"].to_dict()
f.close()
print("Classes: {}".format(classes))
print("Using following model file for predictions:\n{}".format(keras_model_file_i_want_to_use))
font = cv2.FONT_HERSHEY_COMPLEX
bottomLeftCornerOfText = (50,50)
bottomLeftCornerOfText2 = (50,75)
fontScale = 0.5
fontColor = (255,255,255)
lineType = 2
width, height = 50, 50
cap_width = 1280
cap_height = 720
roi_width = 400
roi_height = 300
WebCam_cap = cv2.VideoCapture(0)
WebCam_cap.set(cv2.CAP_PROP_FRAME_WIDTH, cap_width)
WebCam_cap.set(cv2.CAP_PROP_FRAME_HEIGHT, cap_height)
SETTING_PHOTOFRAME = True
while True:
# get frame
ret, frame = WebCam_cap.read()
# print(type(frame), frame.shape)
try:
# reduce frame to 50x50 pixles
# image = cv2.imread(frame, cv2.IMREAD_GRAYSCALE)
if SETTING_PHOTOFRAME:
roi = np.ones_like(frame)
roi[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :] = frame[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :]
image = frame[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :]
# print("image shape: ",image.shape)
else:
image = frame
# resize, turn to gray and reshape for CNN
image = cv2.resize(image, (height, width), interpolation=cv2.INTER_AREA)
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
image_to_predict = np.reshape(image, (1, height, width, 1))
# predict with NN
pred = model.predict_classes(image_to_predict, verbose=0)
pred_ = model.predict(image_to_predict, verbose=0)
prediction = "{}: {} | {}: {}".format(classes[0], pred_[0][0], classes[1], pred_[0][1])
if pred_[0][pred[0]] > 0.30:
prediction_class = "Predicted class: {} [{:.2f}]".format(classes[pred[0]], pred_[0][pred[0]])
else:
prediction_class = "No significant prediction possible!"
# print prediction and class to frame
# cv2.putText(frame, prediction, bottomLeftCornerOfText, font, fontScale, fontColor, lineType)
if SETTING_PHOTOFRAME:
cv2.putText(roi, prediction_class, bottomLeftCornerOfText2, font, fontScale, fontColor, lineType)
else:
cv2.putText(frame, prediction_class, bottomLeftCornerOfText2, font, fontScale, fontColor, lineType)
# ax[i].set_title("{}: {}-{} ({})".format(i, pred, classes[pred[0]], np.round(pred_, decimals=4)))
# display resut
# cv2.namedWindow("Result", cv2.WINDOW_AUTOSIZE)
# cv2.imshow("Result", image)
except Exception as e:
print(e)
else:
cv2.namedWindow("WebCam", cv2.WINDOW_AUTOSIZE)
if SETTING_PHOTOFRAME:
cv2.imshow("WebCam", roi)
else:
cv2.imshow("WebCam", frame)
if cv2.waitKey(1) & 0xFF==ord("q"):
break
WebCam_cap.release()
cv2.destroyAllWindows()
| 39.486239 | 282 | 0.655901 | import cv2
import keras
from keras.models import Sequential, Model
from keras.callbacks import EarlyStopping
from keras.optimizers import Adam
import json
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
from random import shuffle
import time
file_list = os.listdir("./")
keras_model_files = []
for file in file_list:
if file.split(".")[-1] in ["h5","npy"]:
print(file)
keras_model_files.append(file)
keras_model_file_i_want_to_use = keras_model_files[0]
model = keras.models.load_model(keras_model_file_i_want_to_use)
model.summary()
with open(keras_model_files[1],"r") as f:
class_list = json.load(f)
class_stats = pd.DataFrame(data={"classes":class_list})
classes = class_stats["classes"].to_dict()
f.close()
print("Classes: {}".format(classes))
print("Using following model file for predictions:\n{}".format(keras_model_file_i_want_to_use))
font = cv2.FONT_HERSHEY_COMPLEX
bottomLeftCornerOfText = (50,50)
bottomLeftCornerOfText2 = (50,75)
fontScale = 0.5
fontColor = (255,255,255)
lineType = 2
width, height = 50, 50
cap_width = 1280
cap_height = 720
roi_width = 400
roi_height = 300
WebCam_cap = cv2.VideoCapture(0)
WebCam_cap.set(cv2.CAP_PROP_FRAME_WIDTH, cap_width)
WebCam_cap.set(cv2.CAP_PROP_FRAME_HEIGHT, cap_height)
SETTING_PHOTOFRAME = True
while True:
ret, frame = WebCam_cap.read()
try:
if SETTING_PHOTOFRAME:
roi = np.ones_like(frame)
roi[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :] = frame[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :]
image = frame[int((cap_height-roi_height)/2):-int((cap_height-roi_height)/2), int((cap_width-roi_width)/2):-int((cap_width-roi_width)/2), :]
else:
image = frame
image = cv2.resize(image, (height, width), interpolation=cv2.INTER_AREA)
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
image_to_predict = np.reshape(image, (1, height, width, 1))
pred = model.predict_classes(image_to_predict, verbose=0)
pred_ = model.predict(image_to_predict, verbose=0)
prediction = "{}: {} | {}: {}".format(classes[0], pred_[0][0], classes[1], pred_[0][1])
if pred_[0][pred[0]] > 0.30:
prediction_class = "Predicted class: {} [{:.2f}]".format(classes[pred[0]], pred_[0][pred[0]])
else:
prediction_class = "No significant prediction possible!"
if SETTING_PHOTOFRAME:
cv2.putText(roi, prediction_class, bottomLeftCornerOfText2, font, fontScale, fontColor, lineType)
else:
cv2.putText(frame, prediction_class, bottomLeftCornerOfText2, font, fontScale, fontColor, lineType)
except Exception as e:
print(e)
else:
cv2.namedWindow("WebCam", cv2.WINDOW_AUTOSIZE)
if SETTING_PHOTOFRAME:
cv2.imshow("WebCam", roi)
else:
cv2.imshow("WebCam", frame)
if cv2.waitKey(1) & 0xFF==ord("q"):
break
WebCam_cap.release()
cv2.destroyAllWindows()
| true | true |
f73910657fede1ff95dd3e376594d155dc4a8249 | 1,324 | py | Python | computer_vision/learning-opencv-practical/image-process-100ask/Question_41_50/answers/answer_48.py | magic428/subjects_notes | 6930adbb3f445c11ca9d024abb12a53d6aca19e7 | [
"MIT"
] | 2 | 2020-03-18T17:13:00.000Z | 2020-03-25T02:34:03.000Z | computer_vision/learning-opencv-practical/image-process-100ask/Question_41_50/answers/answer_48.py | magic428/subjects_notes | 6930adbb3f445c11ca9d024abb12a53d6aca19e7 | [
"MIT"
] | null | null | null | computer_vision/learning-opencv-practical/image-process-100ask/Question_41_50/answers/answer_48.py | magic428/subjects_notes | 6930adbb3f445c11ca9d024abb12a53d6aca19e7 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import matplotlib.pyplot as plt
# Read image
img = cv2.imread("imori.jpg").astype(np.float32)
H, W, C = img.shape
# Otsu binary
## Grayscale
out = 0.2126 * img[..., 2] + 0.7152 * img[..., 1] + 0.0722 * img[..., 0]
out = out.astype(np.uint8)
## Determine threshold of Otsu's binarization
max_sigma = 0
max_t = 0
for _t in range(1, 255):
v0 = out[np.where(out < _t)]
m0 = np.mean(v0) if len(v0) > 0 else 0.
w0 = len(v0) / (H * W)
v1 = out[np.where(out >= _t)]
m1 = np.mean(v1) if len(v1) > 0 else 0.
w1 = len(v1) / (H * W)
sigma = w0 * w1 * ((m0 - m1) ** 2)
if sigma > max_sigma:
max_sigma = sigma
max_t = _t
## Binarization
#print("threshold >>", max_t)
th = max_t
out[out < th] = 0
out[out >= th] = 255
# Morphology filter
MF = np.array(((0, 1, 0),
(1, 0, 1),
(0, 1, 0)), dtype=np.int)
# Morphology - erode
Erode_time = 2
for i in range(Erode_time):
tmp = np.pad(out, (1, 1), 'edge')
for y in range(1, H+1):
for x in range(1, W+1):
if np.sum(MF * tmp[y-1:y+2, x-1:x+2]) < 255*4:
out[y-1, x-1] = 0
# Save result
cv2.imwrite("out.jpg", out)
cv2.imshow("result", out)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 22.827586 | 73 | 0.52568 | import cv2
import numpy as np
import matplotlib.pyplot as plt
img = cv2.imread("imori.jpg").astype(np.float32)
H, W, C = img.shape
6 * img[..., 2] + 0.7152 * img[..., 1] + 0.0722 * img[..., 0]
out = out.astype(np.uint8)
1, 255):
v0 = out[np.where(out < _t)]
m0 = np.mean(v0) if len(v0) > 0 else 0.
w0 = len(v0) / (H * W)
v1 = out[np.where(out >= _t)]
m1 = np.mean(v1) if len(v1) > 0 else 0.
w1 = len(v1) / (H * W)
sigma = w0 * w1 * ((m0 - m1) ** 2)
if sigma > max_sigma:
max_sigma = sigma
max_t = _t
## Binarization
#print("threshold >>", max_t)
th = max_t
out[out < th] = 0
out[out >= th] = 255
# Morphology filter
MF = np.array(((0, 1, 0),
(1, 0, 1),
(0, 1, 0)), dtype=np.int)
# Morphology - erode
Erode_time = 2
for i in range(Erode_time):
tmp = np.pad(out, (1, 1), 'edge')
for y in range(1, H+1):
for x in range(1, W+1):
if np.sum(MF * tmp[y-1:y+2, x-1:x+2]) < 255*4:
out[y-1, x-1] = 0
# Save result
cv2.imwrite("out.jpg", out)
cv2.imshow("result", out)
cv2.waitKey(0)
cv2.destroyAllWindows()
| true | true |
f73910f59d21f2145a1c358627a1f69dca0b5486 | 86,942 | py | Python | google/ads/google_ads/v4/proto/resources/campaign_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | 1 | 2021-04-09T04:28:47.000Z | 2021-04-09T04:28:47.000Z | google/ads/google_ads/v4/proto/resources/campaign_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v4/proto/resources/campaign_pb2.py | arammaliachi/google-ads-python | a4fe89567bd43eb784410523a6306b5d1dd9ee67 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v4/proto/resources/campaign.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v4.proto.common import bidding_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2
from google.ads.google_ads.v4.proto.common import custom_parameter_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2
from google.ads.google_ads.v4.proto.common import frequency_cap_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2
from google.ads.google_ads.v4.proto.common import real_time_bidding_setting_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2
from google.ads.google_ads.v4.proto.common import targeting_setting_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2
from google.ads.google_ads.v4.proto.enums import ad_serving_optimization_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2
from google.ads.google_ads.v4.proto.enums import advertising_channel_sub_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2
from google.ads.google_ads.v4.proto.enums import advertising_channel_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2
from google.ads.google_ads.v4.proto.enums import app_campaign_app_store_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2
from google.ads.google_ads.v4.proto.enums import app_campaign_bidding_strategy_goal_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2
from google.ads.google_ads.v4.proto.enums import bidding_strategy_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2
from google.ads.google_ads.v4.proto.enums import brand_safety_suitability_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2
from google.ads.google_ads.v4.proto.enums import campaign_experiment_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2
from google.ads.google_ads.v4.proto.enums import campaign_serving_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2
from google.ads.google_ads.v4.proto.enums import campaign_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2
from google.ads.google_ads.v4.proto.enums import location_source_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2
from google.ads.google_ads.v4.proto.enums import negative_geo_target_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2
from google.ads.google_ads.v4.proto.enums import optimization_goal_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2
from google.ads.google_ads.v4.proto.enums import payment_mode_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2
from google.ads.google_ads.v4.proto.enums import positive_geo_target_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2
from google.ads.google_ads.v4.proto.enums import vanity_pharma_display_url_mode_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2
from google.ads.google_ads.v4.proto.enums import vanity_pharma_text_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v4/proto/resources/campaign.proto',
package='google.ads.googleads.v4.resources',
syntax='proto3',
serialized_options=_b('\n%com.google.ads.googleads.v4.resourcesB\rCampaignProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v4/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V4.Resources\312\002!Google\\Ads\\GoogleAds\\V4\\Resources\352\002%Google::Ads::GoogleAds::V4::Resources'),
serialized_pb=_b('\n6google/ads/googleads_v4/proto/resources/campaign.proto\x12!google.ads.googleads.v4.resources\x1a\x32google/ads/googleads_v4/proto/common/bidding.proto\x1a;google/ads/googleads_v4/proto/common/custom_parameter.proto\x1a\x38google/ads/googleads_v4/proto/common/frequency_cap.proto\x1a\x44google/ads/googleads_v4/proto/common/real_time_bidding_setting.proto\x1a<google/ads/googleads_v4/proto/common/targeting_setting.proto\x1aHgoogle/ads/googleads_v4/proto/enums/ad_serving_optimization_status.proto\x1a\x46google/ads/googleads_v4/proto/enums/advertising_channel_sub_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/advertising_channel_type.proto\x1a@google/ads/googleads_v4/proto/enums/app_campaign_app_store.proto\x1aQgoogle/ads/googleads_v4/proto/enums/app_campaign_bidding_strategy_goal_type.proto\x1a?google/ads/googleads_v4/proto/enums/bidding_strategy_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/brand_safety_suitability.proto\x1a\x42google/ads/googleads_v4/proto/enums/campaign_experiment_type.proto\x1a\x41google/ads/googleads_v4/proto/enums/campaign_serving_status.proto\x1a\x39google/ads/googleads_v4/proto/enums/campaign_status.proto\x1a>google/ads/googleads_v4/proto/enums/location_source_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/negative_geo_target_type.proto\x1a@google/ads/googleads_v4/proto/enums/optimization_goal_type.proto\x1a\x36google/ads/googleads_v4/proto/enums/payment_mode.proto\x1a\x42google/ads/googleads_v4/proto/enums/positive_geo_target_type.proto\x1aHgoogle/ads/googleads_v4/proto/enums/vanity_pharma_display_url_mode.proto\x1a<google/ads/googleads_v4/proto/enums/vanity_pharma_text.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto\"\xc2\x31\n\x08\x43\x61mpaign\x12@\n\rresource_name\x18\x01 \x01(\tB)\xe0\x41\x05\xfa\x41#\n!googleads.googleapis.com/Campaign\x12,\n\x02id\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x03\x12*\n\x04name\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12P\n\x06status\x18\x05 \x01(\x0e\x32@.google.ads.googleads.v4.enums.CampaignStatusEnum.CampaignStatus\x12k\n\x0eserving_status\x18\x15 \x01(\x0e\x32N.google.ads.googleads.v4.enums.CampaignServingStatusEnum.CampaignServingStatusB\x03\xe0\x41\x03\x12\x82\x01\n\x1e\x61\x64_serving_optimization_status\x18\x08 \x01(\x0e\x32Z.google.ads.googleads.v4.enums.AdServingOptimizationStatusEnum.AdServingOptimizationStatus\x12w\n\x18\x61\x64vertising_channel_type\x18\t \x01(\x0e\x32P.google.ads.googleads.v4.enums.AdvertisingChannelTypeEnum.AdvertisingChannelTypeB\x03\xe0\x41\x05\x12\x81\x01\n\x1c\x61\x64vertising_channel_sub_type\x18\n \x01(\x0e\x32V.google.ads.googleads.v4.enums.AdvertisingChannelSubTypeEnum.AdvertisingChannelSubTypeB\x03\xe0\x41\x05\x12;\n\x15tracking_url_template\x18\x0b \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12N\n\x15url_custom_parameters\x18\x0c \x03(\x0b\x32/.google.ads.googleads.v4.common.CustomParameter\x12Y\n\x19real_time_bidding_setting\x18\' \x01(\x0b\x32\x36.google.ads.googleads.v4.common.RealTimeBiddingSetting\x12U\n\x10network_settings\x18\x0e \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.NetworkSettings\x12X\n\rhotel_setting\x18 \x01(\x0b\x32<.google.ads.googleads.v4.resources.Campaign.HotelSettingInfoB\x03\xe0\x41\x05\x12g\n\x1a\x64ynamic_search_ads_setting\x18! \x01(\x0b\x32\x43.google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting\x12U\n\x10shopping_setting\x18$ \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.ShoppingSetting\x12K\n\x11targeting_setting\x18+ \x01(\x0b\x32\x30.google.ads.googleads.v4.common.TargetingSetting\x12\x61\n\x17geo_target_type_setting\x18/ \x01(\x0b\x32@.google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting\x12`\n\x16local_campaign_setting\x18\x32 \x01(\x0b\x32@.google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting\x12\\\n\x14\x61pp_campaign_setting\x18\x33 \x01(\x0b\x32>.google.ads.googleads.v4.resources.Campaign.AppCampaignSetting\x12\\\n\x06labels\x18\x35 \x03(\x0b\x32\x1c.google.protobuf.StringValueB.\xe0\x41\x03\xfa\x41(\n&googleads.googleapis.com/CampaignLabel\x12n\n\x0f\x65xperiment_type\x18\x11 \x01(\x0e\x32P.google.ads.googleads.v4.enums.CampaignExperimentTypeEnum.CampaignExperimentTypeB\x03\xe0\x41\x03\x12^\n\rbase_campaign\x18\x1c \x01(\x0b\x32\x1c.google.protobuf.StringValueB)\xe0\x41\x03\xfa\x41#\n!googleads.googleapis.com/Campaign\x12\x63\n\x0f\x63\x61mpaign_budget\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValueB,\xfa\x41)\n\'googleads.googleapis.com/CampaignBudget\x12n\n\x15\x62idding_strategy_type\x18\x16 \x01(\x0e\x32J.google.ads.googleads.v4.enums.BiddingStrategyTypeEnum.BiddingStrategyTypeB\x03\xe0\x41\x03\x12\x30\n\nstart_date\x18\x13 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08\x65nd_date\x18\x14 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x10\x66inal_url_suffix\x18& \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12I\n\x0e\x66requency_caps\x18( \x03(\x0b\x32\x31.google.ads.googleads.v4.common.FrequencyCapEntry\x12}\n\x1evideo_brand_safety_suitability\x18* \x01(\x0e\x32P.google.ads.googleads.v4.enums.BrandSafetySuitabilityEnum.BrandSafetySuitabilityB\x03\xe0\x41\x03\x12O\n\rvanity_pharma\x18, \x01(\x0b\x32\x38.google.ads.googleads.v4.resources.Campaign.VanityPharma\x12\x61\n\x16selective_optimization\x18- \x01(\x0b\x32\x41.google.ads.googleads.v4.resources.Campaign.SelectiveOptimization\x12\x66\n\x19optimization_goal_setting\x18\x36 \x01(\x0b\x32\x43.google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting\x12Z\n\x10tracking_setting\x18. \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.TrackingSettingB\x03\xe0\x41\x03\x12P\n\x0cpayment_mode\x18\x34 \x01(\x0e\x32:.google.ads.googleads.v4.enums.PaymentModeEnum.PaymentMode\x12=\n\x12optimization_score\x18\x37 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x03\xe0\x41\x03\x12g\n\x10\x62idding_strategy\x18\x17 \x01(\x0b\x32\x1c.google.protobuf.StringValueB-\xfa\x41*\n(googleads.googleapis.com/BiddingStrategyH\x00\x12@\n\ncommission\x18\x31 \x01(\x0b\x32*.google.ads.googleads.v4.common.CommissionH\x00\x12?\n\nmanual_cpc\x18\x18 \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpcH\x00\x12?\n\nmanual_cpm\x18\x19 \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpmH\x00\x12\x44\n\nmanual_cpv\x18% \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpvB\x03\xe0\x41\x03H\x00\x12S\n\x14maximize_conversions\x18\x1e \x01(\x0b\x32\x33.google.ads.googleads.v4.common.MaximizeConversionsH\x00\x12\\\n\x19maximize_conversion_value\x18\x1f \x01(\x0b\x32\x37.google.ads.googleads.v4.common.MaximizeConversionValueH\x00\x12?\n\ntarget_cpa\x18\x1a \x01(\x0b\x32).google.ads.googleads.v4.common.TargetCpaH\x00\x12X\n\x17target_impression_share\x18\x30 \x01(\x0b\x32\x35.google.ads.googleads.v4.common.TargetImpressionShareH\x00\x12\x41\n\x0btarget_roas\x18\x1d \x01(\x0b\x32*.google.ads.googleads.v4.common.TargetRoasH\x00\x12\x43\n\x0ctarget_spend\x18\x1b \x01(\x0b\x32+.google.ads.googleads.v4.common.TargetSpendH\x00\x12\x41\n\x0bpercent_cpc\x18\" \x01(\x0b\x32*.google.ads.googleads.v4.common.PercentCpcH\x00\x12?\n\ntarget_cpm\x18) \x01(\x0b\x32).google.ads.googleads.v4.common.TargetCpmH\x00\x1a\x85\x02\n\x0fNetworkSettings\x12\x38\n\x14target_google_search\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15target_search_network\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12:\n\x16target_content_network\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x41\n\x1dtarget_partner_search_network\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1aM\n\x10HotelSettingInfo\x12\x39\n\x0fhotel_center_id\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x05\x1a~\n\x14LocalCampaignSetting\x12\x66\n\x14location_source_type\x18\x01 \x01(\x0e\x32H.google.ads.googleads.v4.enums.LocationSourceTypeEnum.LocationSourceType\x1a\xba\x02\n\x12\x41ppCampaignSetting\x12\x8c\x01\n\x1a\x62idding_strategy_goal_type\x18\x01 \x01(\x0e\x32h.google.ads.googleads.v4.enums.AppCampaignBiddingStrategyGoalTypeEnum.AppCampaignBiddingStrategyGoalType\x12\x31\n\x06\x61pp_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x05\x12\x62\n\tapp_store\x18\x03 \x01(\x0e\x32J.google.ads.googleads.v4.enums.AppCampaignAppStoreEnum.AppCampaignAppStoreB\x03\xe0\x41\x05\x1a\x91\x02\n\x17\x44ynamicSearchAdsSetting\x12\x31\n\x0b\x64omain_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x33\n\rlanguage_code\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12:\n\x16use_supplied_urls_only\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12R\n\x05\x66\x65\x65\x64s\x18\x05 \x03(\x0b\x32\x1c.google.protobuf.StringValueB%\xe0\x41\x03\xfa\x41\x1f\n\x1dgoogleads.googleapis.com/Feed\x1a\x81\x01\n\x15SelectiveOptimization\x12h\n\x12\x63onversion_actions\x18\x01 \x03(\x0b\x32\x1c.google.protobuf.StringValueB.\xfa\x41+\n)googleads.googleapis.com/ConversionAction\x1a\xec\x01\n\x0fShoppingSetting\x12\x35\n\x0bmerchant_id\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x05\x12\x38\n\rsales_country\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x05\x12\x36\n\x11\x63\x61mpaign_priority\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0c\x65nable_local\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1aJ\n\x0fTrackingSetting\x12\x37\n\x0ctracking_url\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x03\x1a\xfa\x01\n\x14GeoTargetTypeSetting\x12p\n\x18positive_geo_target_type\x18\x01 \x01(\x0e\x32N.google.ads.googleads.v4.enums.PositiveGeoTargetTypeEnum.PositiveGeoTargetType\x12p\n\x18negative_geo_target_type\x18\x02 \x01(\x0e\x32N.google.ads.googleads.v4.enums.NegativeGeoTargetTypeEnum.NegativeGeoTargetType\x1a\x88\x01\n\x17OptimizationGoalSetting\x12m\n\x17optimization_goal_types\x18\x01 \x03(\x0e\x32L.google.ads.googleads.v4.enums.OptimizationGoalTypeEnum.OptimizationGoalType\x1a\xf3\x01\n\x0cVanityPharma\x12\x80\x01\n\x1evanity_pharma_display_url_mode\x18\x01 \x01(\x0e\x32X.google.ads.googleads.v4.enums.VanityPharmaDisplayUrlModeEnum.VanityPharmaDisplayUrlMode\x12`\n\x12vanity_pharma_text\x18\x02 \x01(\x0e\x32\x44.google.ads.googleads.v4.enums.VanityPharmaTextEnum.VanityPharmaText:Q\xea\x41N\n!googleads.googleapis.com/Campaign\x12)customers/{customer}/campaigns/{campaign}B\x1b\n\x19\x63\x61mpaign_bidding_strategyB\xfa\x01\n%com.google.ads.googleads.v4.resourcesB\rCampaignProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v4/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V4.Resources\xca\x02!Google\\Ads\\GoogleAds\\V4\\Resources\xea\x02%Google::Ads::GoogleAds::V4::Resourcesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_CAMPAIGN_NETWORKSETTINGS = _descriptor.Descriptor(
name='NetworkSettings',
full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='target_google_search', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_google_search', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_search_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_search_network', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_content_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_content_network', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_partner_search_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_partner_search_network', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5747,
serialized_end=6008,
)
_CAMPAIGN_HOTELSETTINGINFO = _descriptor.Descriptor(
name='HotelSettingInfo',
full_name='google.ads.googleads.v4.resources.Campaign.HotelSettingInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hotel_center_id', full_name='google.ads.googleads.v4.resources.Campaign.HotelSettingInfo.hotel_center_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6010,
serialized_end=6087,
)
_CAMPAIGN_LOCALCAMPAIGNSETTING = _descriptor.Descriptor(
name='LocalCampaignSetting',
full_name='google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location_source_type', full_name='google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting.location_source_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6089,
serialized_end=6215,
)
_CAMPAIGN_APPCAMPAIGNSETTING = _descriptor.Descriptor(
name='AppCampaignSetting',
full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bidding_strategy_goal_type', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.bidding_strategy_goal_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_id', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.app_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_store', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.app_store', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6218,
serialized_end=6532,
)
_CAMPAIGN_DYNAMICSEARCHADSSETTING = _descriptor.Descriptor(
name='DynamicSearchAdsSetting',
full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='domain_name', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.domain_name', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='language_code', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.language_code', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='use_supplied_urls_only', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.use_supplied_urls_only', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feeds', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.feeds', index=3,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A\037\n\035googleads.googleapis.com/Feed'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6535,
serialized_end=6808,
)
_CAMPAIGN_SELECTIVEOPTIMIZATION = _descriptor.Descriptor(
name='SelectiveOptimization',
full_name='google.ads.googleads.v4.resources.Campaign.SelectiveOptimization',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='conversion_actions', full_name='google.ads.googleads.v4.resources.Campaign.SelectiveOptimization.conversion_actions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A+\n)googleads.googleapis.com/ConversionAction'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6811,
serialized_end=6940,
)
_CAMPAIGN_SHOPPINGSETTING = _descriptor.Descriptor(
name='ShoppingSetting',
full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='merchant_id', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.merchant_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sales_country', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.sales_country', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_priority', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.campaign_priority', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enable_local', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.enable_local', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6943,
serialized_end=7179,
)
_CAMPAIGN_TRACKINGSETTING = _descriptor.Descriptor(
name='TrackingSetting',
full_name='google.ads.googleads.v4.resources.Campaign.TrackingSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tracking_url', full_name='google.ads.googleads.v4.resources.Campaign.TrackingSetting.tracking_url', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7181,
serialized_end=7255,
)
_CAMPAIGN_GEOTARGETTYPESETTING = _descriptor.Descriptor(
name='GeoTargetTypeSetting',
full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='positive_geo_target_type', full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting.positive_geo_target_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='negative_geo_target_type', full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting.negative_geo_target_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7258,
serialized_end=7508,
)
_CAMPAIGN_OPTIMIZATIONGOALSETTING = _descriptor.Descriptor(
name='OptimizationGoalSetting',
full_name='google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='optimization_goal_types', full_name='google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting.optimization_goal_types', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7511,
serialized_end=7647,
)
_CAMPAIGN_VANITYPHARMA = _descriptor.Descriptor(
name='VanityPharma',
full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vanity_pharma_display_url_mode', full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma.vanity_pharma_display_url_mode', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vanity_pharma_text', full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma.vanity_pharma_text', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7650,
serialized_end=7893,
)
_CAMPAIGN = _descriptor.Descriptor(
name='Campaign',
full_name='google.ads.googleads.v4.resources.Campaign',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v4.resources.Campaign.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005\372A#\n!googleads.googleapis.com/Campaign'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='google.ads.googleads.v4.resources.Campaign.id', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='google.ads.googleads.v4.resources.Campaign.name', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='google.ads.googleads.v4.resources.Campaign.status', index=3,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='serving_status', full_name='google.ads.googleads.v4.resources.Campaign.serving_status', index=4,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_serving_optimization_status', full_name='google.ads.googleads.v4.resources.Campaign.ad_serving_optimization_status', index=5,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_channel_type', full_name='google.ads.googleads.v4.resources.Campaign.advertising_channel_type', index=6,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_channel_sub_type', full_name='google.ads.googleads.v4.resources.Campaign.advertising_channel_sub_type', index=7,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tracking_url_template', full_name='google.ads.googleads.v4.resources.Campaign.tracking_url_template', index=8,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url_custom_parameters', full_name='google.ads.googleads.v4.resources.Campaign.url_custom_parameters', index=9,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='real_time_bidding_setting', full_name='google.ads.googleads.v4.resources.Campaign.real_time_bidding_setting', index=10,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='network_settings', full_name='google.ads.googleads.v4.resources.Campaign.network_settings', index=11,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_setting', full_name='google.ads.googleads.v4.resources.Campaign.hotel_setting', index=12,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dynamic_search_ads_setting', full_name='google.ads.googleads.v4.resources.Campaign.dynamic_search_ads_setting', index=13,
number=33, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shopping_setting', full_name='google.ads.googleads.v4.resources.Campaign.shopping_setting', index=14,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targeting_setting', full_name='google.ads.googleads.v4.resources.Campaign.targeting_setting', index=15,
number=43, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='geo_target_type_setting', full_name='google.ads.googleads.v4.resources.Campaign.geo_target_type_setting', index=16,
number=47, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_campaign_setting', full_name='google.ads.googleads.v4.resources.Campaign.local_campaign_setting', index=17,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_campaign_setting', full_name='google.ads.googleads.v4.resources.Campaign.app_campaign_setting', index=18,
number=51, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='google.ads.googleads.v4.resources.Campaign.labels', index=19,
number=53, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A(\n&googleads.googleapis.com/CampaignLabel'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='experiment_type', full_name='google.ads.googleads.v4.resources.Campaign.experiment_type', index=20,
number=17, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_campaign', full_name='google.ads.googleads.v4.resources.Campaign.base_campaign', index=21,
number=28, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A#\n!googleads.googleapis.com/Campaign'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_budget', full_name='google.ads.googleads.v4.resources.Campaign.campaign_budget', index=22,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A)\n\'googleads.googleapis.com/CampaignBudget'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_strategy_type', full_name='google.ads.googleads.v4.resources.Campaign.bidding_strategy_type', index=23,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_date', full_name='google.ads.googleads.v4.resources.Campaign.start_date', index=24,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_date', full_name='google.ads.googleads.v4.resources.Campaign.end_date', index=25,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_url_suffix', full_name='google.ads.googleads.v4.resources.Campaign.final_url_suffix', index=26,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='frequency_caps', full_name='google.ads.googleads.v4.resources.Campaign.frequency_caps', index=27,
number=40, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='video_brand_safety_suitability', full_name='google.ads.googleads.v4.resources.Campaign.video_brand_safety_suitability', index=28,
number=42, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vanity_pharma', full_name='google.ads.googleads.v4.resources.Campaign.vanity_pharma', index=29,
number=44, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='selective_optimization', full_name='google.ads.googleads.v4.resources.Campaign.selective_optimization', index=30,
number=45, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='optimization_goal_setting', full_name='google.ads.googleads.v4.resources.Campaign.optimization_goal_setting', index=31,
number=54, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tracking_setting', full_name='google.ads.googleads.v4.resources.Campaign.tracking_setting', index=32,
number=46, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_mode', full_name='google.ads.googleads.v4.resources.Campaign.payment_mode', index=33,
number=52, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='optimization_score', full_name='google.ads.googleads.v4.resources.Campaign.optimization_score', index=34,
number=55, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_strategy', full_name='google.ads.googleads.v4.resources.Campaign.bidding_strategy', index=35,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A*\n(googleads.googleapis.com/BiddingStrategy'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commission', full_name='google.ads.googleads.v4.resources.Campaign.commission', index=36,
number=49, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpc', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpc', index=37,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpm', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpm', index=38,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpv', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpv', index=39,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maximize_conversions', full_name='google.ads.googleads.v4.resources.Campaign.maximize_conversions', index=40,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maximize_conversion_value', full_name='google.ads.googleads.v4.resources.Campaign.maximize_conversion_value', index=41,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_cpa', full_name='google.ads.googleads.v4.resources.Campaign.target_cpa', index=42,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_impression_share', full_name='google.ads.googleads.v4.resources.Campaign.target_impression_share', index=43,
number=48, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_roas', full_name='google.ads.googleads.v4.resources.Campaign.target_roas', index=44,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_spend', full_name='google.ads.googleads.v4.resources.Campaign.target_spend', index=45,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='percent_cpc', full_name='google.ads.googleads.v4.resources.Campaign.percent_cpc', index=46,
number=34, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_cpm', full_name='google.ads.googleads.v4.resources.Campaign.target_cpm', index=47,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CAMPAIGN_NETWORKSETTINGS, _CAMPAIGN_HOTELSETTINGINFO, _CAMPAIGN_LOCALCAMPAIGNSETTING, _CAMPAIGN_APPCAMPAIGNSETTING, _CAMPAIGN_DYNAMICSEARCHADSSETTING, _CAMPAIGN_SELECTIVEOPTIMIZATION, _CAMPAIGN_SHOPPINGSETTING, _CAMPAIGN_TRACKINGSETTING, _CAMPAIGN_GEOTARGETTYPESETTING, _CAMPAIGN_OPTIMIZATIONGOALSETTING, _CAMPAIGN_VANITYPHARMA, ],
enum_types=[
],
serialized_options=_b('\352AN\n!googleads.googleapis.com/Campaign\022)customers/{customer}/campaigns/{campaign}'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='campaign_bidding_strategy', full_name='google.ads.googleads.v4.resources.Campaign.campaign_bidding_strategy',
index=0, containing_type=None, fields=[]),
],
serialized_start=1667,
serialized_end=8005,
)
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_google_search'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_search_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_content_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_partner_search_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.containing_type = _CAMPAIGN
_CAMPAIGN_HOTELSETTINGINFO.fields_by_name['hotel_center_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN_HOTELSETTINGINFO.containing_type = _CAMPAIGN
_CAMPAIGN_LOCALCAMPAIGNSETTING.fields_by_name['location_source_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2._LOCATIONSOURCETYPEENUM_LOCATIONSOURCETYPE
_CAMPAIGN_LOCALCAMPAIGNSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['bidding_strategy_goal_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2._APPCAMPAIGNBIDDINGSTRATEGYGOALTYPEENUM_APPCAMPAIGNBIDDINGSTRATEGYGOALTYPE
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_store'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2._APPCAMPAIGNAPPSTOREENUM_APPCAMPAIGNAPPSTORE
_CAMPAIGN_APPCAMPAIGNSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['domain_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['language_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['use_supplied_urls_only'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['feeds'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_SELECTIVEOPTIMIZATION.fields_by_name['conversion_actions'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_SELECTIVEOPTIMIZATION.containing_type = _CAMPAIGN
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['merchant_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['sales_country'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['campaign_priority'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['enable_local'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_SHOPPINGSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_TRACKINGSETTING.fields_by_name['tracking_url'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_TRACKINGSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_GEOTARGETTYPESETTING.fields_by_name['positive_geo_target_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2._POSITIVEGEOTARGETTYPEENUM_POSITIVEGEOTARGETTYPE
_CAMPAIGN_GEOTARGETTYPESETTING.fields_by_name['negative_geo_target_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2._NEGATIVEGEOTARGETTYPEENUM_NEGATIVEGEOTARGETTYPE
_CAMPAIGN_GEOTARGETTYPESETTING.containing_type = _CAMPAIGN
_CAMPAIGN_OPTIMIZATIONGOALSETTING.fields_by_name['optimization_goal_types'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2._OPTIMIZATIONGOALTYPEENUM_OPTIMIZATIONGOALTYPE
_CAMPAIGN_OPTIMIZATIONGOALSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_VANITYPHARMA.fields_by_name['vanity_pharma_display_url_mode'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2._VANITYPHARMADISPLAYURLMODEENUM_VANITYPHARMADISPLAYURLMODE
_CAMPAIGN_VANITYPHARMA.fields_by_name['vanity_pharma_text'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2._VANITYPHARMATEXTENUM_VANITYPHARMATEXT
_CAMPAIGN_VANITYPHARMA.containing_type = _CAMPAIGN
_CAMPAIGN.fields_by_name['id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN.fields_by_name['name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2._CAMPAIGNSTATUSENUM_CAMPAIGNSTATUS
_CAMPAIGN.fields_by_name['serving_status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2._CAMPAIGNSERVINGSTATUSENUM_CAMPAIGNSERVINGSTATUS
_CAMPAIGN.fields_by_name['ad_serving_optimization_status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2._ADSERVINGOPTIMIZATIONSTATUSENUM_ADSERVINGOPTIMIZATIONSTATUS
_CAMPAIGN.fields_by_name['advertising_channel_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2._ADVERTISINGCHANNELTYPEENUM_ADVERTISINGCHANNELTYPE
_CAMPAIGN.fields_by_name['advertising_channel_sub_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2._ADVERTISINGCHANNELSUBTYPEENUM_ADVERTISINGCHANNELSUBTYPE
_CAMPAIGN.fields_by_name['tracking_url_template'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['url_custom_parameters'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2._CUSTOMPARAMETER
_CAMPAIGN.fields_by_name['real_time_bidding_setting'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2._REALTIMEBIDDINGSETTING
_CAMPAIGN.fields_by_name['network_settings'].message_type = _CAMPAIGN_NETWORKSETTINGS
_CAMPAIGN.fields_by_name['hotel_setting'].message_type = _CAMPAIGN_HOTELSETTINGINFO
_CAMPAIGN.fields_by_name['dynamic_search_ads_setting'].message_type = _CAMPAIGN_DYNAMICSEARCHADSSETTING
_CAMPAIGN.fields_by_name['shopping_setting'].message_type = _CAMPAIGN_SHOPPINGSETTING
_CAMPAIGN.fields_by_name['targeting_setting'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2._TARGETINGSETTING
_CAMPAIGN.fields_by_name['geo_target_type_setting'].message_type = _CAMPAIGN_GEOTARGETTYPESETTING
_CAMPAIGN.fields_by_name['local_campaign_setting'].message_type = _CAMPAIGN_LOCALCAMPAIGNSETTING
_CAMPAIGN.fields_by_name['app_campaign_setting'].message_type = _CAMPAIGN_APPCAMPAIGNSETTING
_CAMPAIGN.fields_by_name['labels'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['experiment_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2._CAMPAIGNEXPERIMENTTYPEENUM_CAMPAIGNEXPERIMENTTYPE
_CAMPAIGN.fields_by_name['base_campaign'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['campaign_budget'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['bidding_strategy_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2._BIDDINGSTRATEGYTYPEENUM_BIDDINGSTRATEGYTYPE
_CAMPAIGN.fields_by_name['start_date'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['end_date'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['final_url_suffix'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['frequency_caps'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2._FREQUENCYCAPENTRY
_CAMPAIGN.fields_by_name['video_brand_safety_suitability'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2._BRANDSAFETYSUITABILITYENUM_BRANDSAFETYSUITABILITY
_CAMPAIGN.fields_by_name['vanity_pharma'].message_type = _CAMPAIGN_VANITYPHARMA
_CAMPAIGN.fields_by_name['selective_optimization'].message_type = _CAMPAIGN_SELECTIVEOPTIMIZATION
_CAMPAIGN.fields_by_name['optimization_goal_setting'].message_type = _CAMPAIGN_OPTIMIZATIONGOALSETTING
_CAMPAIGN.fields_by_name['tracking_setting'].message_type = _CAMPAIGN_TRACKINGSETTING
_CAMPAIGN.fields_by_name['payment_mode'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2._PAYMENTMODEENUM_PAYMENTMODE
_CAMPAIGN.fields_by_name['optimization_score'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_CAMPAIGN.fields_by_name['bidding_strategy'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['commission'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._COMMISSION
_CAMPAIGN.fields_by_name['manual_cpc'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPC
_CAMPAIGN.fields_by_name['manual_cpm'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPM
_CAMPAIGN.fields_by_name['manual_cpv'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPV
_CAMPAIGN.fields_by_name['maximize_conversions'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MAXIMIZECONVERSIONS
_CAMPAIGN.fields_by_name['maximize_conversion_value'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MAXIMIZECONVERSIONVALUE
_CAMPAIGN.fields_by_name['target_cpa'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETCPA
_CAMPAIGN.fields_by_name['target_impression_share'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETIMPRESSIONSHARE
_CAMPAIGN.fields_by_name['target_roas'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETROAS
_CAMPAIGN.fields_by_name['target_spend'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETSPEND
_CAMPAIGN.fields_by_name['percent_cpc'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._PERCENTCPC
_CAMPAIGN.fields_by_name['target_cpm'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETCPM
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['bidding_strategy'])
_CAMPAIGN.fields_by_name['bidding_strategy'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['commission'])
_CAMPAIGN.fields_by_name['commission'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpc'])
_CAMPAIGN.fields_by_name['manual_cpc'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpm'])
_CAMPAIGN.fields_by_name['manual_cpm'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpv'])
_CAMPAIGN.fields_by_name['manual_cpv'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['maximize_conversions'])
_CAMPAIGN.fields_by_name['maximize_conversions'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['maximize_conversion_value'])
_CAMPAIGN.fields_by_name['maximize_conversion_value'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_cpa'])
_CAMPAIGN.fields_by_name['target_cpa'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_impression_share'])
_CAMPAIGN.fields_by_name['target_impression_share'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_roas'])
_CAMPAIGN.fields_by_name['target_roas'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_spend'])
_CAMPAIGN.fields_by_name['target_spend'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['percent_cpc'])
_CAMPAIGN.fields_by_name['percent_cpc'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_cpm'])
_CAMPAIGN.fields_by_name['target_cpm'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
DESCRIPTOR.message_types_by_name['Campaign'] = _CAMPAIGN
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Campaign = _reflection.GeneratedProtocolMessageType('Campaign', (_message.Message,), dict(
NetworkSettings = _reflection.GeneratedProtocolMessageType('NetworkSettings', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_NETWORKSETTINGS,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The network settings for the campaign.
Attributes:
target_google_search:
Whether ads will be served with google.com search results.
target_search_network:
Whether ads will be served on partner sites in the Google
Search Network (requires ``target_google_search`` to also be
``true``).
target_content_network:
Whether ads will be served on specified placements in the
Google Display Network. Placements are specified using the
Placement criterion.
target_partner_search_network:
Whether ads will be served on the Google Partner Network. This
is available only to some select Google partner accounts.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.NetworkSettings)
))
,
HotelSettingInfo = _reflection.GeneratedProtocolMessageType('HotelSettingInfo', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_HOTELSETTINGINFO,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for hotel ads.
Attributes:
hotel_center_id:
Immutable. The linked Hotel Center account.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.HotelSettingInfo)
))
,
LocalCampaignSetting = _reflection.GeneratedProtocolMessageType('LocalCampaignSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_LOCALCAMPAIGNSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign setting for local campaigns.
Attributes:
location_source_type:
The location source type for this local campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting)
))
,
AppCampaignSetting = _reflection.GeneratedProtocolMessageType('AppCampaignSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_APPCAMPAIGNSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for App Campaigns.
Attributes:
bidding_strategy_goal_type:
Represents the goal which the bidding strategy of this app
campaign should optimize towards.
app_id:
Immutable. A string that uniquely identifies a mobile
application.
app_store:
Immutable. The application store that distributes this
specific app.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.AppCampaignSetting)
))
,
DynamicSearchAdsSetting = _reflection.GeneratedProtocolMessageType('DynamicSearchAdsSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_DYNAMICSEARCHADSSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The setting for controlling Dynamic Search Ads (DSA).
Attributes:
domain_name:
The Internet domain name that this setting represents, e.g.,
"google.com" or "www.google.com".
language_code:
The language code specifying the language of the domain, e.g.,
"en".
use_supplied_urls_only:
Whether the campaign uses advertiser supplied URLs
exclusively.
feeds:
Output only. The list of page feeds associated with the
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting)
))
,
SelectiveOptimization = _reflection.GeneratedProtocolMessageType('SelectiveOptimization', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_SELECTIVEOPTIMIZATION,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Selective optimization setting for this campaign, which includes a set
of conversion actions to optimize this campaign towards.
Attributes:
conversion_actions:
The selected set of conversion actions for optimizing this
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.SelectiveOptimization)
))
,
ShoppingSetting = _reflection.GeneratedProtocolMessageType('ShoppingSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_SHOPPINGSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The setting for Shopping campaigns. Defines the universe of products
that can be advertised by the campaign, and how this campaign interacts
with other Shopping campaigns.
Attributes:
merchant_id:
Immutable. ID of the Merchant Center account. This field is
required for create operations. This field is immutable for
Shopping campaigns.
sales_country:
Immutable. Sales country of products to include in the
campaign. This field is required for Shopping campaigns. This
field is immutable. This field is optional for non-Shopping
campaigns, but it must be equal to 'ZZ' if set.
campaign_priority:
Priority of the campaign. Campaigns with numerically higher
priorities take precedence over those with lower priorities.
This field is required for Shopping campaigns, with values
between 0 and 2, inclusive. This field is optional for Smart
Shopping campaigns, but must be equal to 3 if set.
enable_local:
Whether to include local products.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.ShoppingSetting)
))
,
TrackingSetting = _reflection.GeneratedProtocolMessageType('TrackingSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_TRACKINGSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for tracking information.
Attributes:
tracking_url:
Output only. The url used for dynamic tracking.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.TrackingSetting)
))
,
GeoTargetTypeSetting = _reflection.GeneratedProtocolMessageType('GeoTargetTypeSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_GEOTARGETTYPESETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Represents a collection of settings related to ads geotargeting.
Attributes:
positive_geo_target_type:
The setting used for positive geotargeting in this particular
campaign.
negative_geo_target_type:
The setting used for negative geotargeting in this particular
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting)
))
,
OptimizationGoalSetting = _reflection.GeneratedProtocolMessageType('OptimizationGoalSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_OPTIMIZATIONGOALSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Optimization goal setting for this campaign, which includes a set of
optimization goal types.
Attributes:
optimization_goal_types:
The list of optimization goal types.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting)
))
,
VanityPharma = _reflection.GeneratedProtocolMessageType('VanityPharma', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_VANITYPHARMA,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Describes how unbranded pharma ads will be displayed.
Attributes:
vanity_pharma_display_url_mode:
The display mode for vanity pharma URLs.
vanity_pharma_text:
The text that will be displayed in display URL of the text ad
when website description is the selected display mode for
vanity pharma URLs.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.VanityPharma)
))
,
DESCRIPTOR = _CAMPAIGN,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """A campaign.
Attributes:
resource_name:
Immutable. The resource name of the campaign. Campaign
resource names have the form:
``customers/{customer_id}/campaigns/{campaign_id}``
id:
Output only. The ID of the campaign.
name:
The name of the campaign. This field is required and should
not be empty when creating new campaigns. It must not contain
any null (code point 0x0), NL line feed (code point 0xA) or
carriage return (code point 0xD) characters.
status:
The status of the campaign. When a new campaign is added, the
status defaults to ENABLED.
serving_status:
Output only. The ad serving status of the campaign.
ad_serving_optimization_status:
The ad serving optimization status of the campaign.
advertising_channel_type:
Immutable. The primary serving target for ads within the
campaign. The targeting options can be refined in
``network_settings``. This field is required and should not
be empty when creating new campaigns. Can be set only when
creating campaigns. After the campaign is created, the field
can not be changed.
advertising_channel_sub_type:
Immutable. Optional refinement to
``advertising_channel_type``. Must be a valid sub-type of the
parent channel type. Can be set only when creating campaigns.
After campaign is created, the field can not be changed.
tracking_url_template:
The URL template for constructing a tracking URL.
url_custom_parameters:
The list of mappings used to substitute custom parameter tags
in a ``tracking_url_template``, ``final_urls``, or
``mobile_final_urls``.
real_time_bidding_setting:
Settings for Real-Time Bidding, a feature only available for
campaigns targeting the Ad Exchange network.
network_settings:
The network settings for the campaign.
hotel_setting:
Immutable. The hotel setting for the campaign.
dynamic_search_ads_setting:
The setting for controlling Dynamic Search Ads (DSA).
shopping_setting:
The setting for controlling Shopping campaigns.
targeting_setting:
Setting for targeting related features.
geo_target_type_setting:
The setting for ads geotargeting.
local_campaign_setting:
The setting for local campaign.
app_campaign_setting:
The setting related to App Campaign.
labels:
Output only. The resource names of labels attached to this
campaign.
experiment_type:
Output only. The type of campaign: normal, draft, or
experiment.
base_campaign:
Output only. The resource name of the base campaign of a draft
or experiment campaign. For base campaigns, this is equal to
``resource_name``. This field is read-only.
campaign_budget:
The budget of the campaign.
bidding_strategy_type:
Output only. The type of bidding strategy. A bidding strategy
can be created by setting either the bidding scheme to create
a standard bidding strategy or the ``bidding_strategy`` field
to create a portfolio bidding strategy. This field is read-
only.
start_date:
The date when campaign started. This field must not be used in
WHERE clauses.
end_date:
The date when campaign ended. This field must not be used in
WHERE clauses.
final_url_suffix:
Suffix used to append query parameters to landing pages that
are served with parallel tracking.
frequency_caps:
A list that limits how often each user will see this
campaign's ads.
video_brand_safety_suitability:
Output only. 3-Tier Brand Safety setting for the campaign.
vanity_pharma:
Describes how unbranded pharma ads will be displayed.
selective_optimization:
Selective optimization setting for this campaign, which
includes a set of conversion actions to optimize this campaign
towards.
optimization_goal_setting:
Optimization goal setting for this campaign, which includes a
set of optimization goal types.
tracking_setting:
Output only. Campaign-level settings for tracking information.
payment_mode:
Payment mode for the campaign.
optimization_score:
Output only. Optimization score of the campaign. Optimization
score is an estimate of how well a campaign is set to perform.
It ranges from 0% (0.0) to 100% (1.0), with 100% indicating
that the campaign is performing at full potential. See "About
optimization score" at https://support.google.com/google-
ads/answer/9061546. This field is read-only.
campaign_bidding_strategy:
The bidding strategy for the campaign. Must be either
portfolio (created via BiddingStrategy service) or standard,
that is embedded into the campaign.
bidding_strategy:
Portfolio bidding strategy used by campaign.
commission:
Commission is an automatic bidding strategy in which the
advertiser pays a certain portion of the conversion value.
manual_cpc:
Standard Manual CPC bidding strategy. Manual click-based
bidding where user pays per click.
manual_cpm:
Standard Manual CPM bidding strategy. Manual impression-based
bidding where user pays per thousand impressions.
manual_cpv:
Output only. A bidding strategy that pays a configurable
amount per video view.
maximize_conversions:
Standard Maximize Conversions bidding strategy that
automatically maximizes number of conversions given a daily
budget.
maximize_conversion_value:
Standard Maximize Conversion Value bidding strategy that
automatically sets bids to maximize revenue while spending
your budget.
target_cpa:
Standard Target CPA bidding strategy that automatically sets
bids to help get as many conversions as possible at the target
cost-per-acquisition (CPA) you set.
target_impression_share:
Target Impression Share bidding strategy. An automated bidding
strategy that sets bids to achieve a desired percentage of
impressions.
target_roas:
Standard Target ROAS bidding strategy that automatically
maximizes revenue while averaging a specific target return on
ad spend (ROAS).
target_spend:
Standard Target Spend bidding strategy that automatically sets
your bids to help get as many clicks as possible within your
budget.
percent_cpc:
Standard Percent Cpc bidding strategy where bids are a
fraction of the advertised price for some good or service.
target_cpm:
A bidding strategy that automatically optimizes cost per
thousand impressions.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign)
))
_sym_db.RegisterMessage(Campaign)
_sym_db.RegisterMessage(Campaign.NetworkSettings)
_sym_db.RegisterMessage(Campaign.HotelSettingInfo)
_sym_db.RegisterMessage(Campaign.LocalCampaignSetting)
_sym_db.RegisterMessage(Campaign.AppCampaignSetting)
_sym_db.RegisterMessage(Campaign.DynamicSearchAdsSetting)
_sym_db.RegisterMessage(Campaign.SelectiveOptimization)
_sym_db.RegisterMessage(Campaign.ShoppingSetting)
_sym_db.RegisterMessage(Campaign.TrackingSetting)
_sym_db.RegisterMessage(Campaign.GeoTargetTypeSetting)
_sym_db.RegisterMessage(Campaign.OptimizationGoalSetting)
_sym_db.RegisterMessage(Campaign.VanityPharma)
DESCRIPTOR._options = None
_CAMPAIGN_HOTELSETTINGINFO.fields_by_name['hotel_center_id']._options = None
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_id']._options = None
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_store']._options = None
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['feeds']._options = None
_CAMPAIGN_SELECTIVEOPTIMIZATION.fields_by_name['conversion_actions']._options = None
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['merchant_id']._options = None
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['sales_country']._options = None
_CAMPAIGN_TRACKINGSETTING.fields_by_name['tracking_url']._options = None
_CAMPAIGN.fields_by_name['resource_name']._options = None
_CAMPAIGN.fields_by_name['id']._options = None
_CAMPAIGN.fields_by_name['serving_status']._options = None
_CAMPAIGN.fields_by_name['advertising_channel_type']._options = None
_CAMPAIGN.fields_by_name['advertising_channel_sub_type']._options = None
_CAMPAIGN.fields_by_name['hotel_setting']._options = None
_CAMPAIGN.fields_by_name['labels']._options = None
_CAMPAIGN.fields_by_name['experiment_type']._options = None
_CAMPAIGN.fields_by_name['base_campaign']._options = None
_CAMPAIGN.fields_by_name['campaign_budget']._options = None
_CAMPAIGN.fields_by_name['bidding_strategy_type']._options = None
_CAMPAIGN.fields_by_name['video_brand_safety_suitability']._options = None
_CAMPAIGN.fields_by_name['tracking_setting']._options = None
_CAMPAIGN.fields_by_name['optimization_score']._options = None
_CAMPAIGN.fields_by_name['bidding_strategy']._options = None
_CAMPAIGN.fields_by_name['manual_cpv']._options = None
_CAMPAIGN._options = None
# @@protoc_insertion_point(module_scope)
| 62.503235 | 10,767 | 0.790826 |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v4.proto.common import bidding_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2
from google.ads.google_ads.v4.proto.common import custom_parameter_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2
from google.ads.google_ads.v4.proto.common import frequency_cap_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2
from google.ads.google_ads.v4.proto.common import real_time_bidding_setting_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2
from google.ads.google_ads.v4.proto.common import targeting_setting_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2
from google.ads.google_ads.v4.proto.enums import ad_serving_optimization_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2
from google.ads.google_ads.v4.proto.enums import advertising_channel_sub_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2
from google.ads.google_ads.v4.proto.enums import advertising_channel_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2
from google.ads.google_ads.v4.proto.enums import app_campaign_app_store_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2
from google.ads.google_ads.v4.proto.enums import app_campaign_bidding_strategy_goal_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2
from google.ads.google_ads.v4.proto.enums import bidding_strategy_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2
from google.ads.google_ads.v4.proto.enums import brand_safety_suitability_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2
from google.ads.google_ads.v4.proto.enums import campaign_experiment_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2
from google.ads.google_ads.v4.proto.enums import campaign_serving_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2
from google.ads.google_ads.v4.proto.enums import campaign_status_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2
from google.ads.google_ads.v4.proto.enums import location_source_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2
from google.ads.google_ads.v4.proto.enums import negative_geo_target_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2
from google.ads.google_ads.v4.proto.enums import optimization_goal_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2
from google.ads.google_ads.v4.proto.enums import payment_mode_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2
from google.ads.google_ads.v4.proto.enums import positive_geo_target_type_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2
from google.ads.google_ads.v4.proto.enums import vanity_pharma_display_url_mode_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2
from google.ads.google_ads.v4.proto.enums import vanity_pharma_text_pb2 as google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v4/proto/resources/campaign.proto',
package='google.ads.googleads.v4.resources',
syntax='proto3',
serialized_options=_b('\n%com.google.ads.googleads.v4.resourcesB\rCampaignProtoP\001ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v4/resources;resources\242\002\003GAA\252\002!Google.Ads.GoogleAds.V4.Resources\312\002!Google\\Ads\\GoogleAds\\V4\\Resources\352\002%Google::Ads::GoogleAds::V4::Resources'),
serialized_pb=_b('\n6google/ads/googleads_v4/proto/resources/campaign.proto\x12!google.ads.googleads.v4.resources\x1a\x32google/ads/googleads_v4/proto/common/bidding.proto\x1a;google/ads/googleads_v4/proto/common/custom_parameter.proto\x1a\x38google/ads/googleads_v4/proto/common/frequency_cap.proto\x1a\x44google/ads/googleads_v4/proto/common/real_time_bidding_setting.proto\x1a<google/ads/googleads_v4/proto/common/targeting_setting.proto\x1aHgoogle/ads/googleads_v4/proto/enums/ad_serving_optimization_status.proto\x1a\x46google/ads/googleads_v4/proto/enums/advertising_channel_sub_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/advertising_channel_type.proto\x1a@google/ads/googleads_v4/proto/enums/app_campaign_app_store.proto\x1aQgoogle/ads/googleads_v4/proto/enums/app_campaign_bidding_strategy_goal_type.proto\x1a?google/ads/googleads_v4/proto/enums/bidding_strategy_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/brand_safety_suitability.proto\x1a\x42google/ads/googleads_v4/proto/enums/campaign_experiment_type.proto\x1a\x41google/ads/googleads_v4/proto/enums/campaign_serving_status.proto\x1a\x39google/ads/googleads_v4/proto/enums/campaign_status.proto\x1a>google/ads/googleads_v4/proto/enums/location_source_type.proto\x1a\x42google/ads/googleads_v4/proto/enums/negative_geo_target_type.proto\x1a@google/ads/googleads_v4/proto/enums/optimization_goal_type.proto\x1a\x36google/ads/googleads_v4/proto/enums/payment_mode.proto\x1a\x42google/ads/googleads_v4/proto/enums/positive_geo_target_type.proto\x1aHgoogle/ads/googleads_v4/proto/enums/vanity_pharma_display_url_mode.proto\x1a<google/ads/googleads_v4/proto/enums/vanity_pharma_text.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto\"\xc2\x31\n\x08\x43\x61mpaign\x12@\n\rresource_name\x18\x01 \x01(\tB)\xe0\x41\x05\xfa\x41#\n!googleads.googleapis.com/Campaign\x12,\n\x02id\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x03\x12*\n\x04name\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12P\n\x06status\x18\x05 \x01(\x0e\x32@.google.ads.googleads.v4.enums.CampaignStatusEnum.CampaignStatus\x12k\n\x0eserving_status\x18\x15 \x01(\x0e\x32N.google.ads.googleads.v4.enums.CampaignServingStatusEnum.CampaignServingStatusB\x03\xe0\x41\x03\x12\x82\x01\n\x1e\x61\x64_serving_optimization_status\x18\x08 \x01(\x0e\x32Z.google.ads.googleads.v4.enums.AdServingOptimizationStatusEnum.AdServingOptimizationStatus\x12w\n\x18\x61\x64vertising_channel_type\x18\t \x01(\x0e\x32P.google.ads.googleads.v4.enums.AdvertisingChannelTypeEnum.AdvertisingChannelTypeB\x03\xe0\x41\x05\x12\x81\x01\n\x1c\x61\x64vertising_channel_sub_type\x18\n \x01(\x0e\x32V.google.ads.googleads.v4.enums.AdvertisingChannelSubTypeEnum.AdvertisingChannelSubTypeB\x03\xe0\x41\x05\x12;\n\x15tracking_url_template\x18\x0b \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12N\n\x15url_custom_parameters\x18\x0c \x03(\x0b\x32/.google.ads.googleads.v4.common.CustomParameter\x12Y\n\x19real_time_bidding_setting\x18\' \x01(\x0b\x32\x36.google.ads.googleads.v4.common.RealTimeBiddingSetting\x12U\n\x10network_settings\x18\x0e \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.NetworkSettings\x12X\n\rhotel_setting\x18 \x01(\x0b\x32<.google.ads.googleads.v4.resources.Campaign.HotelSettingInfoB\x03\xe0\x41\x05\x12g\n\x1a\x64ynamic_search_ads_setting\x18! \x01(\x0b\x32\x43.google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting\x12U\n\x10shopping_setting\x18$ \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.ShoppingSetting\x12K\n\x11targeting_setting\x18+ \x01(\x0b\x32\x30.google.ads.googleads.v4.common.TargetingSetting\x12\x61\n\x17geo_target_type_setting\x18/ \x01(\x0b\x32@.google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting\x12`\n\x16local_campaign_setting\x18\x32 \x01(\x0b\x32@.google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting\x12\\\n\x14\x61pp_campaign_setting\x18\x33 \x01(\x0b\x32>.google.ads.googleads.v4.resources.Campaign.AppCampaignSetting\x12\\\n\x06labels\x18\x35 \x03(\x0b\x32\x1c.google.protobuf.StringValueB.\xe0\x41\x03\xfa\x41(\n&googleads.googleapis.com/CampaignLabel\x12n\n\x0f\x65xperiment_type\x18\x11 \x01(\x0e\x32P.google.ads.googleads.v4.enums.CampaignExperimentTypeEnum.CampaignExperimentTypeB\x03\xe0\x41\x03\x12^\n\rbase_campaign\x18\x1c \x01(\x0b\x32\x1c.google.protobuf.StringValueB)\xe0\x41\x03\xfa\x41#\n!googleads.googleapis.com/Campaign\x12\x63\n\x0f\x63\x61mpaign_budget\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.StringValueB,\xfa\x41)\n\'googleads.googleapis.com/CampaignBudget\x12n\n\x15\x62idding_strategy_type\x18\x16 \x01(\x0e\x32J.google.ads.googleads.v4.enums.BiddingStrategyTypeEnum.BiddingStrategyTypeB\x03\xe0\x41\x03\x12\x30\n\nstart_date\x18\x13 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12.\n\x08\x65nd_date\x18\x14 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x36\n\x10\x66inal_url_suffix\x18& \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12I\n\x0e\x66requency_caps\x18( \x03(\x0b\x32\x31.google.ads.googleads.v4.common.FrequencyCapEntry\x12}\n\x1evideo_brand_safety_suitability\x18* \x01(\x0e\x32P.google.ads.googleads.v4.enums.BrandSafetySuitabilityEnum.BrandSafetySuitabilityB\x03\xe0\x41\x03\x12O\n\rvanity_pharma\x18, \x01(\x0b\x32\x38.google.ads.googleads.v4.resources.Campaign.VanityPharma\x12\x61\n\x16selective_optimization\x18- \x01(\x0b\x32\x41.google.ads.googleads.v4.resources.Campaign.SelectiveOptimization\x12\x66\n\x19optimization_goal_setting\x18\x36 \x01(\x0b\x32\x43.google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting\x12Z\n\x10tracking_setting\x18. \x01(\x0b\x32;.google.ads.googleads.v4.resources.Campaign.TrackingSettingB\x03\xe0\x41\x03\x12P\n\x0cpayment_mode\x18\x34 \x01(\x0e\x32:.google.ads.googleads.v4.enums.PaymentModeEnum.PaymentMode\x12=\n\x12optimization_score\x18\x37 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x03\xe0\x41\x03\x12g\n\x10\x62idding_strategy\x18\x17 \x01(\x0b\x32\x1c.google.protobuf.StringValueB-\xfa\x41*\n(googleads.googleapis.com/BiddingStrategyH\x00\x12@\n\ncommission\x18\x31 \x01(\x0b\x32*.google.ads.googleads.v4.common.CommissionH\x00\x12?\n\nmanual_cpc\x18\x18 \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpcH\x00\x12?\n\nmanual_cpm\x18\x19 \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpmH\x00\x12\x44\n\nmanual_cpv\x18% \x01(\x0b\x32).google.ads.googleads.v4.common.ManualCpvB\x03\xe0\x41\x03H\x00\x12S\n\x14maximize_conversions\x18\x1e \x01(\x0b\x32\x33.google.ads.googleads.v4.common.MaximizeConversionsH\x00\x12\\\n\x19maximize_conversion_value\x18\x1f \x01(\x0b\x32\x37.google.ads.googleads.v4.common.MaximizeConversionValueH\x00\x12?\n\ntarget_cpa\x18\x1a \x01(\x0b\x32).google.ads.googleads.v4.common.TargetCpaH\x00\x12X\n\x17target_impression_share\x18\x30 \x01(\x0b\x32\x35.google.ads.googleads.v4.common.TargetImpressionShareH\x00\x12\x41\n\x0btarget_roas\x18\x1d \x01(\x0b\x32*.google.ads.googleads.v4.common.TargetRoasH\x00\x12\x43\n\x0ctarget_spend\x18\x1b \x01(\x0b\x32+.google.ads.googleads.v4.common.TargetSpendH\x00\x12\x41\n\x0bpercent_cpc\x18\" \x01(\x0b\x32*.google.ads.googleads.v4.common.PercentCpcH\x00\x12?\n\ntarget_cpm\x18) \x01(\x0b\x32).google.ads.googleads.v4.common.TargetCpmH\x00\x1a\x85\x02\n\x0fNetworkSettings\x12\x38\n\x14target_google_search\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x39\n\x15target_search_network\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12:\n\x16target_content_network\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x41\n\x1dtarget_partner_search_network\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1aM\n\x10HotelSettingInfo\x12\x39\n\x0fhotel_center_id\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x05\x1a~\n\x14LocalCampaignSetting\x12\x66\n\x14location_source_type\x18\x01 \x01(\x0e\x32H.google.ads.googleads.v4.enums.LocationSourceTypeEnum.LocationSourceType\x1a\xba\x02\n\x12\x41ppCampaignSetting\x12\x8c\x01\n\x1a\x62idding_strategy_goal_type\x18\x01 \x01(\x0e\x32h.google.ads.googleads.v4.enums.AppCampaignBiddingStrategyGoalTypeEnum.AppCampaignBiddingStrategyGoalType\x12\x31\n\x06\x61pp_id\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x05\x12\x62\n\tapp_store\x18\x03 \x01(\x0e\x32J.google.ads.googleads.v4.enums.AppCampaignAppStoreEnum.AppCampaignAppStoreB\x03\xe0\x41\x05\x1a\x91\x02\n\x17\x44ynamicSearchAdsSetting\x12\x31\n\x0b\x64omain_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12\x33\n\rlanguage_code\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValue\x12:\n\x16use_supplied_urls_only\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12R\n\x05\x66\x65\x65\x64s\x18\x05 \x03(\x0b\x32\x1c.google.protobuf.StringValueB%\xe0\x41\x03\xfa\x41\x1f\n\x1dgoogleads.googleapis.com/Feed\x1a\x81\x01\n\x15SelectiveOptimization\x12h\n\x12\x63onversion_actions\x18\x01 \x03(\x0b\x32\x1c.google.protobuf.StringValueB.\xfa\x41+\n)googleads.googleapis.com/ConversionAction\x1a\xec\x01\n\x0fShoppingSetting\x12\x35\n\x0bmerchant_id\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x05\x12\x38\n\rsales_country\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x05\x12\x36\n\x11\x63\x61mpaign_priority\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0c\x65nable_local\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1aJ\n\x0fTrackingSetting\x12\x37\n\x0ctracking_url\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x03\x1a\xfa\x01\n\x14GeoTargetTypeSetting\x12p\n\x18positive_geo_target_type\x18\x01 \x01(\x0e\x32N.google.ads.googleads.v4.enums.PositiveGeoTargetTypeEnum.PositiveGeoTargetType\x12p\n\x18negative_geo_target_type\x18\x02 \x01(\x0e\x32N.google.ads.googleads.v4.enums.NegativeGeoTargetTypeEnum.NegativeGeoTargetType\x1a\x88\x01\n\x17OptimizationGoalSetting\x12m\n\x17optimization_goal_types\x18\x01 \x03(\x0e\x32L.google.ads.googleads.v4.enums.OptimizationGoalTypeEnum.OptimizationGoalType\x1a\xf3\x01\n\x0cVanityPharma\x12\x80\x01\n\x1evanity_pharma_display_url_mode\x18\x01 \x01(\x0e\x32X.google.ads.googleads.v4.enums.VanityPharmaDisplayUrlModeEnum.VanityPharmaDisplayUrlMode\x12`\n\x12vanity_pharma_text\x18\x02 \x01(\x0e\x32\x44.google.ads.googleads.v4.enums.VanityPharmaTextEnum.VanityPharmaText:Q\xea\x41N\n!googleads.googleapis.com/Campaign\x12)customers/{customer}/campaigns/{campaign}B\x1b\n\x19\x63\x61mpaign_bidding_strategyB\xfa\x01\n%com.google.ads.googleads.v4.resourcesB\rCampaignProtoP\x01ZJgoogle.golang.org/genproto/googleapis/ads/googleads/v4/resources;resources\xa2\x02\x03GAA\xaa\x02!Google.Ads.GoogleAds.V4.Resources\xca\x02!Google\\Ads\\GoogleAds\\V4\\Resources\xea\x02%Google::Ads::GoogleAds::V4::Resourcesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2.DESCRIPTOR,google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_CAMPAIGN_NETWORKSETTINGS = _descriptor.Descriptor(
name='NetworkSettings',
full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='target_google_search', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_google_search', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_search_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_search_network', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_content_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_content_network', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_partner_search_network', full_name='google.ads.googleads.v4.resources.Campaign.NetworkSettings.target_partner_search_network', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5747,
serialized_end=6008,
)
_CAMPAIGN_HOTELSETTINGINFO = _descriptor.Descriptor(
name='HotelSettingInfo',
full_name='google.ads.googleads.v4.resources.Campaign.HotelSettingInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='hotel_center_id', full_name='google.ads.googleads.v4.resources.Campaign.HotelSettingInfo.hotel_center_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6010,
serialized_end=6087,
)
_CAMPAIGN_LOCALCAMPAIGNSETTING = _descriptor.Descriptor(
name='LocalCampaignSetting',
full_name='google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location_source_type', full_name='google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting.location_source_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6089,
serialized_end=6215,
)
_CAMPAIGN_APPCAMPAIGNSETTING = _descriptor.Descriptor(
name='AppCampaignSetting',
full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='bidding_strategy_goal_type', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.bidding_strategy_goal_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_id', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.app_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_store', full_name='google.ads.googleads.v4.resources.Campaign.AppCampaignSetting.app_store', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6218,
serialized_end=6532,
)
_CAMPAIGN_DYNAMICSEARCHADSSETTING = _descriptor.Descriptor(
name='DynamicSearchAdsSetting',
full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='domain_name', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.domain_name', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='language_code', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.language_code', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='use_supplied_urls_only', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.use_supplied_urls_only', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='feeds', full_name='google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting.feeds', index=3,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A\037\n\035googleads.googleapis.com/Feed'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6535,
serialized_end=6808,
)
_CAMPAIGN_SELECTIVEOPTIMIZATION = _descriptor.Descriptor(
name='SelectiveOptimization',
full_name='google.ads.googleads.v4.resources.Campaign.SelectiveOptimization',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='conversion_actions', full_name='google.ads.googleads.v4.resources.Campaign.SelectiveOptimization.conversion_actions', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A+\n)googleads.googleapis.com/ConversionAction'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6811,
serialized_end=6940,
)
_CAMPAIGN_SHOPPINGSETTING = _descriptor.Descriptor(
name='ShoppingSetting',
full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='merchant_id', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.merchant_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sales_country', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.sales_country', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_priority', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.campaign_priority', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enable_local', full_name='google.ads.googleads.v4.resources.Campaign.ShoppingSetting.enable_local', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6943,
serialized_end=7179,
)
_CAMPAIGN_TRACKINGSETTING = _descriptor.Descriptor(
name='TrackingSetting',
full_name='google.ads.googleads.v4.resources.Campaign.TrackingSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tracking_url', full_name='google.ads.googleads.v4.resources.Campaign.TrackingSetting.tracking_url', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7181,
serialized_end=7255,
)
_CAMPAIGN_GEOTARGETTYPESETTING = _descriptor.Descriptor(
name='GeoTargetTypeSetting',
full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='positive_geo_target_type', full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting.positive_geo_target_type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='negative_geo_target_type', full_name='google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting.negative_geo_target_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7258,
serialized_end=7508,
)
_CAMPAIGN_OPTIMIZATIONGOALSETTING = _descriptor.Descriptor(
name='OptimizationGoalSetting',
full_name='google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='optimization_goal_types', full_name='google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting.optimization_goal_types', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7511,
serialized_end=7647,
)
_CAMPAIGN_VANITYPHARMA = _descriptor.Descriptor(
name='VanityPharma',
full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vanity_pharma_display_url_mode', full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma.vanity_pharma_display_url_mode', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vanity_pharma_text', full_name='google.ads.googleads.v4.resources.Campaign.VanityPharma.vanity_pharma_text', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7650,
serialized_end=7893,
)
_CAMPAIGN = _descriptor.Descriptor(
name='Campaign',
full_name='google.ads.googleads.v4.resources.Campaign',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v4.resources.Campaign.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005\372A#\n!googleads.googleapis.com/Campaign'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id', full_name='google.ads.googleads.v4.resources.Campaign.id', index=1,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='google.ads.googleads.v4.resources.Campaign.name', index=2,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='google.ads.googleads.v4.resources.Campaign.status', index=3,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='serving_status', full_name='google.ads.googleads.v4.resources.Campaign.serving_status', index=4,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ad_serving_optimization_status', full_name='google.ads.googleads.v4.resources.Campaign.ad_serving_optimization_status', index=5,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_channel_type', full_name='google.ads.googleads.v4.resources.Campaign.advertising_channel_type', index=6,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='advertising_channel_sub_type', full_name='google.ads.googleads.v4.resources.Campaign.advertising_channel_sub_type', index=7,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tracking_url_template', full_name='google.ads.googleads.v4.resources.Campaign.tracking_url_template', index=8,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='url_custom_parameters', full_name='google.ads.googleads.v4.resources.Campaign.url_custom_parameters', index=9,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='real_time_bidding_setting', full_name='google.ads.googleads.v4.resources.Campaign.real_time_bidding_setting', index=10,
number=39, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='network_settings', full_name='google.ads.googleads.v4.resources.Campaign.network_settings', index=11,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hotel_setting', full_name='google.ads.googleads.v4.resources.Campaign.hotel_setting', index=12,
number=32, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\005'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dynamic_search_ads_setting', full_name='google.ads.googleads.v4.resources.Campaign.dynamic_search_ads_setting', index=13,
number=33, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shopping_setting', full_name='google.ads.googleads.v4.resources.Campaign.shopping_setting', index=14,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='targeting_setting', full_name='google.ads.googleads.v4.resources.Campaign.targeting_setting', index=15,
number=43, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='geo_target_type_setting', full_name='google.ads.googleads.v4.resources.Campaign.geo_target_type_setting', index=16,
number=47, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='local_campaign_setting', full_name='google.ads.googleads.v4.resources.Campaign.local_campaign_setting', index=17,
number=50, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='app_campaign_setting', full_name='google.ads.googleads.v4.resources.Campaign.app_campaign_setting', index=18,
number=51, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='labels', full_name='google.ads.googleads.v4.resources.Campaign.labels', index=19,
number=53, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A(\n&googleads.googleapis.com/CampaignLabel'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='experiment_type', full_name='google.ads.googleads.v4.resources.Campaign.experiment_type', index=20,
number=17, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='base_campaign', full_name='google.ads.googleads.v4.resources.Campaign.base_campaign', index=21,
number=28, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003\372A#\n!googleads.googleapis.com/Campaign'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='campaign_budget', full_name='google.ads.googleads.v4.resources.Campaign.campaign_budget', index=22,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A)\n\'googleads.googleapis.com/CampaignBudget'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_strategy_type', full_name='google.ads.googleads.v4.resources.Campaign.bidding_strategy_type', index=23,
number=22, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='start_date', full_name='google.ads.googleads.v4.resources.Campaign.start_date', index=24,
number=19, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='end_date', full_name='google.ads.googleads.v4.resources.Campaign.end_date', index=25,
number=20, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='final_url_suffix', full_name='google.ads.googleads.v4.resources.Campaign.final_url_suffix', index=26,
number=38, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='frequency_caps', full_name='google.ads.googleads.v4.resources.Campaign.frequency_caps', index=27,
number=40, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='video_brand_safety_suitability', full_name='google.ads.googleads.v4.resources.Campaign.video_brand_safety_suitability', index=28,
number=42, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='vanity_pharma', full_name='google.ads.googleads.v4.resources.Campaign.vanity_pharma', index=29,
number=44, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='selective_optimization', full_name='google.ads.googleads.v4.resources.Campaign.selective_optimization', index=30,
number=45, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='optimization_goal_setting', full_name='google.ads.googleads.v4.resources.Campaign.optimization_goal_setting', index=31,
number=54, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tracking_setting', full_name='google.ads.googleads.v4.resources.Campaign.tracking_setting', index=32,
number=46, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='payment_mode', full_name='google.ads.googleads.v4.resources.Campaign.payment_mode', index=33,
number=52, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='optimization_score', full_name='google.ads.googleads.v4.resources.Campaign.optimization_score', index=34,
number=55, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bidding_strategy', full_name='google.ads.googleads.v4.resources.Campaign.bidding_strategy', index=35,
number=23, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\372A*\n(googleads.googleapis.com/BiddingStrategy'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commission', full_name='google.ads.googleads.v4.resources.Campaign.commission', index=36,
number=49, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpc', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpc', index=37,
number=24, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpm', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpm', index=38,
number=25, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='manual_cpv', full_name='google.ads.googleads.v4.resources.Campaign.manual_cpv', index=39,
number=37, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\003'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maximize_conversions', full_name='google.ads.googleads.v4.resources.Campaign.maximize_conversions', index=40,
number=30, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='maximize_conversion_value', full_name='google.ads.googleads.v4.resources.Campaign.maximize_conversion_value', index=41,
number=31, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_cpa', full_name='google.ads.googleads.v4.resources.Campaign.target_cpa', index=42,
number=26, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_impression_share', full_name='google.ads.googleads.v4.resources.Campaign.target_impression_share', index=43,
number=48, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_roas', full_name='google.ads.googleads.v4.resources.Campaign.target_roas', index=44,
number=29, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_spend', full_name='google.ads.googleads.v4.resources.Campaign.target_spend', index=45,
number=27, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='percent_cpc', full_name='google.ads.googleads.v4.resources.Campaign.percent_cpc', index=46,
number=34, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='target_cpm', full_name='google.ads.googleads.v4.resources.Campaign.target_cpm', index=47,
number=41, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_CAMPAIGN_NETWORKSETTINGS, _CAMPAIGN_HOTELSETTINGINFO, _CAMPAIGN_LOCALCAMPAIGNSETTING, _CAMPAIGN_APPCAMPAIGNSETTING, _CAMPAIGN_DYNAMICSEARCHADSSETTING, _CAMPAIGN_SELECTIVEOPTIMIZATION, _CAMPAIGN_SHOPPINGSETTING, _CAMPAIGN_TRACKINGSETTING, _CAMPAIGN_GEOTARGETTYPESETTING, _CAMPAIGN_OPTIMIZATIONGOALSETTING, _CAMPAIGN_VANITYPHARMA, ],
enum_types=[
],
serialized_options=_b('\352AN\n!googleads.googleapis.com/Campaign\022)customers/{customer}/campaigns/{campaign}'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='campaign_bidding_strategy', full_name='google.ads.googleads.v4.resources.Campaign.campaign_bidding_strategy',
index=0, containing_type=None, fields=[]),
],
serialized_start=1667,
serialized_end=8005,
)
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_google_search'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_search_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_content_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.fields_by_name['target_partner_search_network'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_NETWORKSETTINGS.containing_type = _CAMPAIGN
_CAMPAIGN_HOTELSETTINGINFO.fields_by_name['hotel_center_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN_HOTELSETTINGINFO.containing_type = _CAMPAIGN
_CAMPAIGN_LOCALCAMPAIGNSETTING.fields_by_name['location_source_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_location__source__type__pb2._LOCATIONSOURCETYPEENUM_LOCATIONSOURCETYPE
_CAMPAIGN_LOCALCAMPAIGNSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['bidding_strategy_goal_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__bidding__strategy__goal__type__pb2._APPCAMPAIGNBIDDINGSTRATEGYGOALTYPEENUM_APPCAMPAIGNBIDDINGSTRATEGYGOALTYPE
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_store'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_app__campaign__app__store__pb2._APPCAMPAIGNAPPSTOREENUM_APPCAMPAIGNAPPSTORE
_CAMPAIGN_APPCAMPAIGNSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['domain_name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['language_code'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['use_supplied_urls_only'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['feeds'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_DYNAMICSEARCHADSSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_SELECTIVEOPTIMIZATION.fields_by_name['conversion_actions'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_SELECTIVEOPTIMIZATION.containing_type = _CAMPAIGN
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['merchant_id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['sales_country'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['campaign_priority'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['enable_local'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE
_CAMPAIGN_SHOPPINGSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_TRACKINGSETTING.fields_by_name['tracking_url'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN_TRACKINGSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_GEOTARGETTYPESETTING.fields_by_name['positive_geo_target_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_positive__geo__target__type__pb2._POSITIVEGEOTARGETTYPEENUM_POSITIVEGEOTARGETTYPE
_CAMPAIGN_GEOTARGETTYPESETTING.fields_by_name['negative_geo_target_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_negative__geo__target__type__pb2._NEGATIVEGEOTARGETTYPEENUM_NEGATIVEGEOTARGETTYPE
_CAMPAIGN_GEOTARGETTYPESETTING.containing_type = _CAMPAIGN
_CAMPAIGN_OPTIMIZATIONGOALSETTING.fields_by_name['optimization_goal_types'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_optimization__goal__type__pb2._OPTIMIZATIONGOALTYPEENUM_OPTIMIZATIONGOALTYPE
_CAMPAIGN_OPTIMIZATIONGOALSETTING.containing_type = _CAMPAIGN
_CAMPAIGN_VANITYPHARMA.fields_by_name['vanity_pharma_display_url_mode'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__display__url__mode__pb2._VANITYPHARMADISPLAYURLMODEENUM_VANITYPHARMADISPLAYURLMODE
_CAMPAIGN_VANITYPHARMA.fields_by_name['vanity_pharma_text'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_vanity__pharma__text__pb2._VANITYPHARMATEXTENUM_VANITYPHARMATEXT
_CAMPAIGN_VANITYPHARMA.containing_type = _CAMPAIGN
_CAMPAIGN.fields_by_name['id'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT64VALUE
_CAMPAIGN.fields_by_name['name'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__status__pb2._CAMPAIGNSTATUSENUM_CAMPAIGNSTATUS
_CAMPAIGN.fields_by_name['serving_status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__serving__status__pb2._CAMPAIGNSERVINGSTATUSENUM_CAMPAIGNSERVINGSTATUS
_CAMPAIGN.fields_by_name['ad_serving_optimization_status'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_ad__serving__optimization__status__pb2._ADSERVINGOPTIMIZATIONSTATUSENUM_ADSERVINGOPTIMIZATIONSTATUS
_CAMPAIGN.fields_by_name['advertising_channel_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__type__pb2._ADVERTISINGCHANNELTYPEENUM_ADVERTISINGCHANNELTYPE
_CAMPAIGN.fields_by_name['advertising_channel_sub_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_advertising__channel__sub__type__pb2._ADVERTISINGCHANNELSUBTYPEENUM_ADVERTISINGCHANNELSUBTYPE
_CAMPAIGN.fields_by_name['tracking_url_template'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['url_custom_parameters'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_custom__parameter__pb2._CUSTOMPARAMETER
_CAMPAIGN.fields_by_name['real_time_bidding_setting'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_real__time__bidding__setting__pb2._REALTIMEBIDDINGSETTING
_CAMPAIGN.fields_by_name['network_settings'].message_type = _CAMPAIGN_NETWORKSETTINGS
_CAMPAIGN.fields_by_name['hotel_setting'].message_type = _CAMPAIGN_HOTELSETTINGINFO
_CAMPAIGN.fields_by_name['dynamic_search_ads_setting'].message_type = _CAMPAIGN_DYNAMICSEARCHADSSETTING
_CAMPAIGN.fields_by_name['shopping_setting'].message_type = _CAMPAIGN_SHOPPINGSETTING
_CAMPAIGN.fields_by_name['targeting_setting'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_targeting__setting__pb2._TARGETINGSETTING
_CAMPAIGN.fields_by_name['geo_target_type_setting'].message_type = _CAMPAIGN_GEOTARGETTYPESETTING
_CAMPAIGN.fields_by_name['local_campaign_setting'].message_type = _CAMPAIGN_LOCALCAMPAIGNSETTING
_CAMPAIGN.fields_by_name['app_campaign_setting'].message_type = _CAMPAIGN_APPCAMPAIGNSETTING
_CAMPAIGN.fields_by_name['labels'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['experiment_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_campaign__experiment__type__pb2._CAMPAIGNEXPERIMENTTYPEENUM_CAMPAIGNEXPERIMENTTYPE
_CAMPAIGN.fields_by_name['base_campaign'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['campaign_budget'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['bidding_strategy_type'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_bidding__strategy__type__pb2._BIDDINGSTRATEGYTYPEENUM_BIDDINGSTRATEGYTYPE
_CAMPAIGN.fields_by_name['start_date'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['end_date'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['final_url_suffix'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['frequency_caps'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_frequency__cap__pb2._FREQUENCYCAPENTRY
_CAMPAIGN.fields_by_name['video_brand_safety_suitability'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_brand__safety__suitability__pb2._BRANDSAFETYSUITABILITYENUM_BRANDSAFETYSUITABILITY
_CAMPAIGN.fields_by_name['vanity_pharma'].message_type = _CAMPAIGN_VANITYPHARMA
_CAMPAIGN.fields_by_name['selective_optimization'].message_type = _CAMPAIGN_SELECTIVEOPTIMIZATION
_CAMPAIGN.fields_by_name['optimization_goal_setting'].message_type = _CAMPAIGN_OPTIMIZATIONGOALSETTING
_CAMPAIGN.fields_by_name['tracking_setting'].message_type = _CAMPAIGN_TRACKINGSETTING
_CAMPAIGN.fields_by_name['payment_mode'].enum_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_enums_dot_payment__mode__pb2._PAYMENTMODEENUM_PAYMENTMODE
_CAMPAIGN.fields_by_name['optimization_score'].message_type = google_dot_protobuf_dot_wrappers__pb2._DOUBLEVALUE
_CAMPAIGN.fields_by_name['bidding_strategy'].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE
_CAMPAIGN.fields_by_name['commission'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._COMMISSION
_CAMPAIGN.fields_by_name['manual_cpc'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPC
_CAMPAIGN.fields_by_name['manual_cpm'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPM
_CAMPAIGN.fields_by_name['manual_cpv'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MANUALCPV
_CAMPAIGN.fields_by_name['maximize_conversions'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MAXIMIZECONVERSIONS
_CAMPAIGN.fields_by_name['maximize_conversion_value'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._MAXIMIZECONVERSIONVALUE
_CAMPAIGN.fields_by_name['target_cpa'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETCPA
_CAMPAIGN.fields_by_name['target_impression_share'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETIMPRESSIONSHARE
_CAMPAIGN.fields_by_name['target_roas'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETROAS
_CAMPAIGN.fields_by_name['target_spend'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETSPEND
_CAMPAIGN.fields_by_name['percent_cpc'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._PERCENTCPC
_CAMPAIGN.fields_by_name['target_cpm'].message_type = google_dot_ads_dot_googleads__v4_dot_proto_dot_common_dot_bidding__pb2._TARGETCPM
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['bidding_strategy'])
_CAMPAIGN.fields_by_name['bidding_strategy'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['commission'])
_CAMPAIGN.fields_by_name['commission'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpc'])
_CAMPAIGN.fields_by_name['manual_cpc'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpm'])
_CAMPAIGN.fields_by_name['manual_cpm'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['manual_cpv'])
_CAMPAIGN.fields_by_name['manual_cpv'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['maximize_conversions'])
_CAMPAIGN.fields_by_name['maximize_conversions'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['maximize_conversion_value'])
_CAMPAIGN.fields_by_name['maximize_conversion_value'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_cpa'])
_CAMPAIGN.fields_by_name['target_cpa'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_impression_share'])
_CAMPAIGN.fields_by_name['target_impression_share'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_roas'])
_CAMPAIGN.fields_by_name['target_roas'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_spend'])
_CAMPAIGN.fields_by_name['target_spend'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['percent_cpc'])
_CAMPAIGN.fields_by_name['percent_cpc'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
_CAMPAIGN.oneofs_by_name['campaign_bidding_strategy'].fields.append(
_CAMPAIGN.fields_by_name['target_cpm'])
_CAMPAIGN.fields_by_name['target_cpm'].containing_oneof = _CAMPAIGN.oneofs_by_name['campaign_bidding_strategy']
DESCRIPTOR.message_types_by_name['Campaign'] = _CAMPAIGN
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Campaign = _reflection.GeneratedProtocolMessageType('Campaign', (_message.Message,), dict(
NetworkSettings = _reflection.GeneratedProtocolMessageType('NetworkSettings', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_NETWORKSETTINGS,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The network settings for the campaign.
Attributes:
target_google_search:
Whether ads will be served with google.com search results.
target_search_network:
Whether ads will be served on partner sites in the Google
Search Network (requires ``target_google_search`` to also be
``true``).
target_content_network:
Whether ads will be served on specified placements in the
Google Display Network. Placements are specified using the
Placement criterion.
target_partner_search_network:
Whether ads will be served on the Google Partner Network. This
is available only to some select Google partner accounts.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.NetworkSettings)
))
,
HotelSettingInfo = _reflection.GeneratedProtocolMessageType('HotelSettingInfo', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_HOTELSETTINGINFO,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for hotel ads.
Attributes:
hotel_center_id:
Immutable. The linked Hotel Center account.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.HotelSettingInfo)
))
,
LocalCampaignSetting = _reflection.GeneratedProtocolMessageType('LocalCampaignSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_LOCALCAMPAIGNSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign setting for local campaigns.
Attributes:
location_source_type:
The location source type for this local campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.LocalCampaignSetting)
))
,
AppCampaignSetting = _reflection.GeneratedProtocolMessageType('AppCampaignSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_APPCAMPAIGNSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for App Campaigns.
Attributes:
bidding_strategy_goal_type:
Represents the goal which the bidding strategy of this app
campaign should optimize towards.
app_id:
Immutable. A string that uniquely identifies a mobile
application.
app_store:
Immutable. The application store that distributes this
specific app.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.AppCampaignSetting)
))
,
DynamicSearchAdsSetting = _reflection.GeneratedProtocolMessageType('DynamicSearchAdsSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_DYNAMICSEARCHADSSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The setting for controlling Dynamic Search Ads (DSA).
Attributes:
domain_name:
The Internet domain name that this setting represents, e.g.,
"google.com" or "www.google.com".
language_code:
The language code specifying the language of the domain, e.g.,
"en".
use_supplied_urls_only:
Whether the campaign uses advertiser supplied URLs
exclusively.
feeds:
Output only. The list of page feeds associated with the
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.DynamicSearchAdsSetting)
))
,
SelectiveOptimization = _reflection.GeneratedProtocolMessageType('SelectiveOptimization', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_SELECTIVEOPTIMIZATION,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Selective optimization setting for this campaign, which includes a set
of conversion actions to optimize this campaign towards.
Attributes:
conversion_actions:
The selected set of conversion actions for optimizing this
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.SelectiveOptimization)
))
,
ShoppingSetting = _reflection.GeneratedProtocolMessageType('ShoppingSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_SHOPPINGSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """The setting for Shopping campaigns. Defines the universe of products
that can be advertised by the campaign, and how this campaign interacts
with other Shopping campaigns.
Attributes:
merchant_id:
Immutable. ID of the Merchant Center account. This field is
required for create operations. This field is immutable for
Shopping campaigns.
sales_country:
Immutable. Sales country of products to include in the
campaign. This field is required for Shopping campaigns. This
field is immutable. This field is optional for non-Shopping
campaigns, but it must be equal to 'ZZ' if set.
campaign_priority:
Priority of the campaign. Campaigns with numerically higher
priorities take precedence over those with lower priorities.
This field is required for Shopping campaigns, with values
between 0 and 2, inclusive. This field is optional for Smart
Shopping campaigns, but must be equal to 3 if set.
enable_local:
Whether to include local products.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.ShoppingSetting)
))
,
TrackingSetting = _reflection.GeneratedProtocolMessageType('TrackingSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_TRACKINGSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Campaign-level settings for tracking information.
Attributes:
tracking_url:
Output only. The url used for dynamic tracking.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.TrackingSetting)
))
,
GeoTargetTypeSetting = _reflection.GeneratedProtocolMessageType('GeoTargetTypeSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_GEOTARGETTYPESETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Represents a collection of settings related to ads geotargeting.
Attributes:
positive_geo_target_type:
The setting used for positive geotargeting in this particular
campaign.
negative_geo_target_type:
The setting used for negative geotargeting in this particular
campaign.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.GeoTargetTypeSetting)
))
,
OptimizationGoalSetting = _reflection.GeneratedProtocolMessageType('OptimizationGoalSetting', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_OPTIMIZATIONGOALSETTING,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Optimization goal setting for this campaign, which includes a set of
optimization goal types.
Attributes:
optimization_goal_types:
The list of optimization goal types.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.OptimizationGoalSetting)
))
,
VanityPharma = _reflection.GeneratedProtocolMessageType('VanityPharma', (_message.Message,), dict(
DESCRIPTOR = _CAMPAIGN_VANITYPHARMA,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """Describes how unbranded pharma ads will be displayed.
Attributes:
vanity_pharma_display_url_mode:
The display mode for vanity pharma URLs.
vanity_pharma_text:
The text that will be displayed in display URL of the text ad
when website description is the selected display mode for
vanity pharma URLs.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v4.resources.Campaign.VanityPharma)
))
,
DESCRIPTOR = _CAMPAIGN,
__module__ = 'google.ads.googleads_v4.proto.resources.campaign_pb2'
,
__doc__ = """A campaign.
Attributes:
resource_name:
Immutable. The resource name of the campaign. Campaign
resource names have the form:
``customers/{customer_id}/campaigns/{campaign_id}``
id:
Output only. The ID of the campaign.
name:
The name of the campaign. This field is required and should
not be empty when creating new campaigns. It must not contain
any null (code point 0x0), NL line feed (code point 0xA) or
carriage return (code point 0xD) characters.
status:
The status of the campaign. When a new campaign is added, the
status defaults to ENABLED.
serving_status:
Output only. The ad serving status of the campaign.
ad_serving_optimization_status:
The ad serving optimization status of the campaign.
advertising_channel_type:
Immutable. The primary serving target for ads within the
campaign. The targeting options can be refined in
``network_settings``. This field is required and should not
be empty when creating new campaigns. Can be set only when
creating campaigns. After the campaign is created, the field
can not be changed.
advertising_channel_sub_type:
Immutable. Optional refinement to
``advertising_channel_type``. Must be a valid sub-type of the
parent channel type. Can be set only when creating campaigns.
After campaign is created, the field can not be changed.
tracking_url_template:
The URL template for constructing a tracking URL.
url_custom_parameters:
The list of mappings used to substitute custom parameter tags
in a ``tracking_url_template``, ``final_urls``, or
``mobile_final_urls``.
real_time_bidding_setting:
Settings for Real-Time Bidding, a feature only available for
campaigns targeting the Ad Exchange network.
network_settings:
The network settings for the campaign.
hotel_setting:
Immutable. The hotel setting for the campaign.
dynamic_search_ads_setting:
The setting for controlling Dynamic Search Ads (DSA).
shopping_setting:
The setting for controlling Shopping campaigns.
targeting_setting:
Setting for targeting related features.
geo_target_type_setting:
The setting for ads geotargeting.
local_campaign_setting:
The setting for local campaign.
app_campaign_setting:
The setting related to App Campaign.
labels:
Output only. The resource names of labels attached to this
campaign.
experiment_type:
Output only. The type of campaign: normal, draft, or
experiment.
base_campaign:
Output only. The resource name of the base campaign of a draft
or experiment campaign. For base campaigns, this is equal to
``resource_name``. This field is read-only.
campaign_budget:
The budget of the campaign.
bidding_strategy_type:
Output only. The type of bidding strategy. A bidding strategy
can be created by setting either the bidding scheme to create
a standard bidding strategy or the ``bidding_strategy`` field
to create a portfolio bidding strategy. This field is read-
only.
start_date:
The date when campaign started. This field must not be used in
WHERE clauses.
end_date:
The date when campaign ended. This field must not be used in
WHERE clauses.
final_url_suffix:
Suffix used to append query parameters to landing pages that
are served with parallel tracking.
frequency_caps:
A list that limits how often each user will see this
campaign's ads.
video_brand_safety_suitability:
Output only. 3-Tier Brand Safety setting for the campaign.
vanity_pharma:
Describes how unbranded pharma ads will be displayed.
selective_optimization:
Selective optimization setting for this campaign, which
includes a set of conversion actions to optimize this campaign
towards.
optimization_goal_setting:
Optimization goal setting for this campaign, which includes a
set of optimization goal types.
tracking_setting:
Output only. Campaign-level settings for tracking information.
payment_mode:
Payment mode for the campaign.
optimization_score:
Output only. Optimization score of the campaign. Optimization
score is an estimate of how well a campaign is set to perform.
It ranges from 0% (0.0) to 100% (1.0), with 100% indicating
that the campaign is performing at full potential. See "About
optimization score" at https://support.google.com/google-
ads/answer/9061546. This field is read-only.
campaign_bidding_strategy:
The bidding strategy for the campaign. Must be either
portfolio (created via BiddingStrategy service) or standard,
that is embedded into the campaign.
bidding_strategy:
Portfolio bidding strategy used by campaign.
commission:
Commission is an automatic bidding strategy in which the
advertiser pays a certain portion of the conversion value.
manual_cpc:
Standard Manual CPC bidding strategy. Manual click-based
bidding where user pays per click.
manual_cpm:
Standard Manual CPM bidding strategy. Manual impression-based
bidding where user pays per thousand impressions.
manual_cpv:
Output only. A bidding strategy that pays a configurable
amount per video view.
maximize_conversions:
Standard Maximize Conversions bidding strategy that
automatically maximizes number of conversions given a daily
budget.
maximize_conversion_value:
Standard Maximize Conversion Value bidding strategy that
automatically sets bids to maximize revenue while spending
your budget.
target_cpa:
Standard Target CPA bidding strategy that automatically sets
bids to help get as many conversions as possible at the target
cost-per-acquisition (CPA) you set.
target_impression_share:
Target Impression Share bidding strategy. An automated bidding
strategy that sets bids to achieve a desired percentage of
impressions.
target_roas:
Standard Target ROAS bidding strategy that automatically
maximizes revenue while averaging a specific target return on
ad spend (ROAS).
target_spend:
Standard Target Spend bidding strategy that automatically sets
your bids to help get as many clicks as possible within your
budget.
percent_cpc:
Standard Percent Cpc bidding strategy where bids are a
fraction of the advertised price for some good or service.
target_cpm:
A bidding strategy that automatically optimizes cost per
thousand impressions.
""",
))
_sym_db.RegisterMessage(Campaign)
_sym_db.RegisterMessage(Campaign.NetworkSettings)
_sym_db.RegisterMessage(Campaign.HotelSettingInfo)
_sym_db.RegisterMessage(Campaign.LocalCampaignSetting)
_sym_db.RegisterMessage(Campaign.AppCampaignSetting)
_sym_db.RegisterMessage(Campaign.DynamicSearchAdsSetting)
_sym_db.RegisterMessage(Campaign.SelectiveOptimization)
_sym_db.RegisterMessage(Campaign.ShoppingSetting)
_sym_db.RegisterMessage(Campaign.TrackingSetting)
_sym_db.RegisterMessage(Campaign.GeoTargetTypeSetting)
_sym_db.RegisterMessage(Campaign.OptimizationGoalSetting)
_sym_db.RegisterMessage(Campaign.VanityPharma)
DESCRIPTOR._options = None
_CAMPAIGN_HOTELSETTINGINFO.fields_by_name['hotel_center_id']._options = None
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_id']._options = None
_CAMPAIGN_APPCAMPAIGNSETTING.fields_by_name['app_store']._options = None
_CAMPAIGN_DYNAMICSEARCHADSSETTING.fields_by_name['feeds']._options = None
_CAMPAIGN_SELECTIVEOPTIMIZATION.fields_by_name['conversion_actions']._options = None
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['merchant_id']._options = None
_CAMPAIGN_SHOPPINGSETTING.fields_by_name['sales_country']._options = None
_CAMPAIGN_TRACKINGSETTING.fields_by_name['tracking_url']._options = None
_CAMPAIGN.fields_by_name['resource_name']._options = None
_CAMPAIGN.fields_by_name['id']._options = None
_CAMPAIGN.fields_by_name['serving_status']._options = None
_CAMPAIGN.fields_by_name['advertising_channel_type']._options = None
_CAMPAIGN.fields_by_name['advertising_channel_sub_type']._options = None
_CAMPAIGN.fields_by_name['hotel_setting']._options = None
_CAMPAIGN.fields_by_name['labels']._options = None
_CAMPAIGN.fields_by_name['experiment_type']._options = None
_CAMPAIGN.fields_by_name['base_campaign']._options = None
_CAMPAIGN.fields_by_name['campaign_budget']._options = None
_CAMPAIGN.fields_by_name['bidding_strategy_type']._options = None
_CAMPAIGN.fields_by_name['video_brand_safety_suitability']._options = None
_CAMPAIGN.fields_by_name['tracking_setting']._options = None
_CAMPAIGN.fields_by_name['optimization_score']._options = None
_CAMPAIGN.fields_by_name['bidding_strategy']._options = None
_CAMPAIGN.fields_by_name['manual_cpv']._options = None
_CAMPAIGN._options = None
| true | true |
f73910ff97bc6e0563c1408384190999e737fcfd | 12,628 | py | Python | build.py | charlie45000/corunners-example | 0de8463074fa9a02d0a361fc3dd50bf620991ea2 | [
"Apache-2.0"
] | null | null | null | build.py | charlie45000/corunners-example | 0de8463074fa9a02d0a361fc3dd50bf620991ea2 | [
"Apache-2.0"
] | 1 | 2021-06-02T09:10:15.000Z | 2021-06-02T09:10:15.000Z | build.py | charlie45000/corunners-example | 0de8463074fa9a02d0a361fc3dd50bf620991ea2 | [
"Apache-2.0"
] | 1 | 2021-03-08T13:18:03.000Z | 2021-03-08T13:18:03.000Z | #! /usr/bin/env python3
import argparse
from pathlib import Path
import subprocess
import sys
import scripts.templates
from scripts.templates import P2020, MPC5777M, CORES, TOP_DIR, PSY_DIR, STUBS_DIR, SRC_DIR, CFG_DIR, Help, AGENT_CONFIG_HJSON_TEMPLATE, CORUNNER_CONFIG_HJSON_TEMPLATE, CORUNNER_KMEMORY_JSON_TEMPLATE, COMPILE_CONFIG_HJSON_TEMPLATE, PSYMODULE_CONFIG_HJSON_TEMPLATE, FLASHLIKE
from scripts.scriptutil import load_db, load_json, dump_json, write_template, psyko
from operator import itemgetter
def corunner_to_list(s):
"""
This function takes the corunner string (a comma separated list: <core>,<start address of read>) and returns a python list of the same form (with none as second element if there is only a core). This allows to set a start address for each corunner in case there is at least two.
Should not be used except as an argument parser type.
"""
pars = s.split(',')
pars[0] = int(pars[0])
assert pars[0] in CORES, \
f"The corunner id must be one of {CORES.join(', ')}"
l = len(pars)
if l > 2:
raise argparse.ArgumentTypeError("Corunners parameters must be of type <core>[,<start address of read>]")
elif l == 2 and pars[1] != '':
return pars
else:
return [pars[0], None]
def cor_cores(cors):
"""
Takes a list returned by corunner_to_list and returns a list containing only the cores in the same order (to know which cores are used).
"""
return [i[0] for i in cors]
def getopts(argv):
parser = argparse.ArgumentParser(description='Corunners builder')
parser.add_argument("--psyko", "-P", type=Path,
help=Help.PSYKO, required=True)
parser.add_argument("--kdbv", type=Path, required=True)
parser.add_argument("--rtk-dir", "-K", type=Path,
help=Help.RTK_DIR, required=True)
parser.add_argument("--product", "-p", type=str,
help=Help.PRODUCT, required=True,
choices=[P2020,MPC5777M])
parser.add_argument("--corunner", "-C", type=corunner_to_list,
action="append", help=Help.CORUNNER, default=[])
parser.add_argument("--task", "-T", type=str, choices=["H", "G"]+FLASHLIKE,
help=Help.TASK, required=True)
parser.add_argument("--core", "-c", type=int, choices=CORES,
help=Help.CORE, required=True)
parser.add_argument("--local-corunners", action='store_true',
help=Help.LOCAL_CORUNNERS)
parser.add_argument("--build-dir", type=Path, default=TOP_DIR / "build",
help=Help.BUILD_DIR)
parser.add_argument("--mem-conf", type=Path,
help=Help.MEM_CONF)
parser.add_argument("--output", "-o", type=Path,
help=Help.OUTPUT)
args = parser.parse_args(argv[1:])
assert args.core not in cor_cores(args.corunner)
if args.output is None:
args.output = args.build_dir / "program.elf"
return args
def gen_agent_config(output_filename, name, core):
write_template(output_filename, AGENT_CONFIG_HJSON_TEMPLATE, {
"agent_name": name,
"agent_core": core,
})
def gen_corunner_config(conf_filename, identifier, symbol, object_file, kmem_filename):
write_template(conf_filename, CORUNNER_CONFIG_HJSON_TEMPLATE, {
"corunner_id": identifier,
"corunner_symbol": symbol,
"corunner_object": str(object_file)
})
write_template(kmem_filename, CORUNNER_KMEMORY_JSON_TEMPLATE, {
'symbol': symbol,
})
def gen_corunner_source(output_filename, symbol, read=dict()):
cmd = [sys.executable, TOP_DIR / "scripts" / "gen-corunner.py", symbol]
if read:
cmd += ["--read"]
if 'nop' in read:
cmd += ["--nop", str(read['nop'])]
if 'start' in read:
cmd += ["--startaddr", str(read['start'])]
if 'size' in read:
cmd += ["--tablesize", str(read['size'])]
if 'stride' in read:
cmd += ["--stride", str(read['stride'])]
else:
cmd += ["--jump", "2048"]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) as proc:
with open(output_filename, "w") as fileh:
fileh.write(proc.stdout.read())
def gen_kmem_final(default, config, memreport, kdbv, tasks, corunners=list()):
config_json = load_json(config)
cmd = [sys.executable, TOP_DIR / 'scripts' / 'gen-kmem.py', '--config', config]
del_list = []
config_json['memreport'] = str(memreport)
config_json['default_kmemory'] = str(default)
for el in config_json['elements']:
if el['type'] == 'corunner':
if corunners:
el['names'] = corunners
else:
del_list.append(el)
elif el['type'] == 'task':
el['names'] = tasks
else:
del_list.append(el)
for el in del_list:
config_json['elements'].remove(el)
dump_json(config_json, config)
ret = subprocess.check_call(cmd)
def get_sources(task_name):
c_sources = [
SRC_DIR / "crc.c",
SRC_DIR / "filter.c",
SRC_DIR / "filter2.c",
]
psy_sources = [PSY_DIR / f"task_{task_name}.psy"]
if task_name not in FLASHLIKE:
c_sources += [
STUBS_DIR / f"suite_task_{task_name}.c",
]
psy_sources += [STUBS_DIR / f"for_task_{task_name}.psy"]
return {
"c": c_sources,
"asm": [],
"psy": psy_sources,
}
def main(argv):
args = getopts(argv)
used_cores = cor_cores(args.corunner) + [args.core]
args.corunner.sort(key=itemgetter(0))
def object_of(source_filename, extension = ".o"):
return args.build_dir / (source_filename.name + extension)
sources = get_sources(args.task)
ag_config = args.build_dir / "task.hjson"
app_configs = [
args.build_dir / "app.hjson",
CFG_DIR / f"task_{args.task}.hjson",
ag_config,
]
tasks = [f'task_{args.task}']
part_configs = []
compile_config = args.build_dir / "compile.hjson"
partition_config = args.build_dir / "partition.hjson"
psymodule_config = args.build_dir / "psymodule.hjson"
gen_agent_config(ag_config, f"task_{args.task}", args.core)
mem_configs = []
corunners = []
for corunner, cor_start in args.corunner:
# The read corunner is created only if a start address si provided for this corunner.
use_read = bool(cor_start)
read_args = dict()
co_config = args.build_dir / f"corunner_{corunner}.hjson"
co_kmem = args.build_dir / f"corunner_{corunner}_kmem.json"
co_file = args.build_dir / f"corunner_{corunner}"
if use_read:
read_args['start'] = cor_start
read_args['size'] = int(env.get(f"CORUNNER_READ_SIZE_{corunner}", "0x2000"), 16)
symbol = f"co_runner_read{corunner}" if read_args else f"co_runner_flash{corunner}"
co_file = co_file.with_suffix('.asm')
sources["asm"].append(co_file)
gen_corunner_source(co_file, symbol, read_args)
app_configs.append(co_config)
mem_configs.append(co_kmem)
corunners.append(symbol)
gen_corunner_config(co_config, corunner, symbol, object_of(co_file), co_kmem)
if args.task not in FLASHLIKE:
stub_config = args.build_dir / "stub.hjson"
gen_agent_config(stub_config, f"sends_to_task_{args.task}", args.core)
app_configs.append(stub_config)
tasks.append(f'sends_to_task_{args.task}')
write_template(compile_config, COMPILE_CONFIG_HJSON_TEMPLATE, {})
write_template(psymodule_config, PSYMODULE_CONFIG_HJSON_TEMPLATE, {})
#==========================================================================
# The functions below are just helpers to call the PsyC compiler psyko,
# with a convenient access to global variables such as the path to the
# compiler and the path to the RTK.
psykonf = {'product': args.product, 'rtk_dir': args.rtk_dir, 'psyko': args.psyko, 'cwd': TOP_DIR}
def psyko_cc(c_source):
generated_object = object_of(c_source)
psyko(psykonf, "cc", c_source, compile_config, "-o", generated_object)
return generated_object
def psyko_as(asm_source):
generated_object = object_of(asm_source)
psyko(psykonf, "as", asm_source, compile_config, "-o", generated_object)
return generated_object
def psyko_module(psy_source):
generated_object = object_of(psy_source, ".psyo")
psyko(psykonf, "module", psy_source, psymodule_config, "-o", generated_object)
return generated_object
def psyko_partition(name, objects, configs):
generated_object = args.build_dir / (name + ".parto")
psyko(psykonf, "partition", "-o", generated_object, '--gendir',
args.build_dir / 'gen' / 'part', *objects, *configs)
return generated_object
def psyko_app(partos, configs):
elf = args.build_dir / "program.elf"
gendir = args.build_dir / "gen" / "app"
psyko(psykonf, "app", "-a", args.build_dir / "program.app", "-b", args.output,
'--gendir', gendir, *partos, *configs)
return gendir
def psyko_memconf(t, files, configs=[], cor_kmems=[]):
"""
This function generates a valid default memconf used to perform the first compilation. It creates a default kmemory for the task and adds the configs for all the corunners.
"""
kmemconf = args.build_dir / ('kmemconf_'+t+'.json')
psyko(psykonf, 'gen-mem-conf', '-t', t, '--gendir', args.build_dir / 'gen' / 'memconf', '-o', kmemconf, *files, *configs)
if cor_kmems:
def_memconf = load_json(kmemconf)
cor_memconf = []
for kmem in cor_kmems:
cor_memconf.append(load_json(kmem))
max_reg = def_memconf['kmemory']['regions'][0]
if len(def_memconf['kmemory']['regions']) > 1:
for reg in def_memconf['kmemory']['regions'][1:]:
if reg['size'] > max_reg['size']:
max_reg = reg
if 'domains' not in max_reg:
max_reg['domains'] = []
out = cor_memconf[0]['domains'][0]['output_sections'][0]
out['physical_address'] = mar_reg['physical_address']
stacks = {obj['id']: obj
for obj in def_memconf['kmemory']['objects']
if obj['id'] in [f"core_{core}_co_runner_stack.c"
for core in used_cores]}
for core in cor_cores(args.corunner):
stack = f"core_{core}_co_runner_stack.c"
for corunner in corunners:
symbol = corunner if corunner[-1] == str(core) else ''
stacks[stack]['groups'] = [f'.stack_{symbol}']
for cor in cor_memconf:
max_reg['domains'] += cor['domains']
def_memconf['kmemory']['groups'] += cor['groups']
def_memconf['kmemory']['objects'] += cor['objects']
dump_json(def_memconf, f=kmemconf)
return kmemconf
#==========================================================================
# Compile all the C, ASM and PsyC sources.
# ASM sources are only present when co-runners are enabled.
parto_objects = []
for c_source in sources["c"]:
parto_objects.append(psyko_cc(c_source))
for asm_source in sources.get("asm", []):
parto_objects.append(psyko_as(asm_source))
for psy_source in sources["psy"]:
parto_objects.append(psyko_module(psy_source))
#==========================================================================
# Generate a single partition, and then executable to be able to get the size of the sections
parto = psyko_partition("main", parto_objects, part_configs)
mem_configs = [psyko_memconf('app', [parto], app_configs, mem_configs)]
mem_configs.append("--overwrite-memory-configuration")
gendir = psyko_app([parto], app_configs+mem_configs)
assert args.output.is_file(), "first app compilation not successfull"
# Finally generate the final memory configs and the executable
if args.mem_conf:
args.output.unlink()
gen_kmem_final(mem_configs[0], args.mem_conf,
gendir / 'applink' / 'memreport_out.ks', args.kdbv, tasks, corunners)
psyko_app([parto], app_configs+mem_configs)
assert args.output.is_file(), "final app compilation not successfull"
if __name__ == "__main__":
main(sys.argv)
| 42.375839 | 282 | 0.617041 |
import argparse
from pathlib import Path
import subprocess
import sys
import scripts.templates
from scripts.templates import P2020, MPC5777M, CORES, TOP_DIR, PSY_DIR, STUBS_DIR, SRC_DIR, CFG_DIR, Help, AGENT_CONFIG_HJSON_TEMPLATE, CORUNNER_CONFIG_HJSON_TEMPLATE, CORUNNER_KMEMORY_JSON_TEMPLATE, COMPILE_CONFIG_HJSON_TEMPLATE, PSYMODULE_CONFIG_HJSON_TEMPLATE, FLASHLIKE
from scripts.scriptutil import load_db, load_json, dump_json, write_template, psyko
from operator import itemgetter
def corunner_to_list(s):
pars = s.split(',')
pars[0] = int(pars[0])
assert pars[0] in CORES, \
f"The corunner id must be one of {CORES.join(', ')}"
l = len(pars)
if l > 2:
raise argparse.ArgumentTypeError("Corunners parameters must be of type <core>[,<start address of read>]")
elif l == 2 and pars[1] != '':
return pars
else:
return [pars[0], None]
def cor_cores(cors):
return [i[0] for i in cors]
def getopts(argv):
parser = argparse.ArgumentParser(description='Corunners builder')
parser.add_argument("--psyko", "-P", type=Path,
help=Help.PSYKO, required=True)
parser.add_argument("--kdbv", type=Path, required=True)
parser.add_argument("--rtk-dir", "-K", type=Path,
help=Help.RTK_DIR, required=True)
parser.add_argument("--product", "-p", type=str,
help=Help.PRODUCT, required=True,
choices=[P2020,MPC5777M])
parser.add_argument("--corunner", "-C", type=corunner_to_list,
action="append", help=Help.CORUNNER, default=[])
parser.add_argument("--task", "-T", type=str, choices=["H", "G"]+FLASHLIKE,
help=Help.TASK, required=True)
parser.add_argument("--core", "-c", type=int, choices=CORES,
help=Help.CORE, required=True)
parser.add_argument("--local-corunners", action='store_true',
help=Help.LOCAL_CORUNNERS)
parser.add_argument("--build-dir", type=Path, default=TOP_DIR / "build",
help=Help.BUILD_DIR)
parser.add_argument("--mem-conf", type=Path,
help=Help.MEM_CONF)
parser.add_argument("--output", "-o", type=Path,
help=Help.OUTPUT)
args = parser.parse_args(argv[1:])
assert args.core not in cor_cores(args.corunner)
if args.output is None:
args.output = args.build_dir / "program.elf"
return args
def gen_agent_config(output_filename, name, core):
write_template(output_filename, AGENT_CONFIG_HJSON_TEMPLATE, {
"agent_name": name,
"agent_core": core,
})
def gen_corunner_config(conf_filename, identifier, symbol, object_file, kmem_filename):
write_template(conf_filename, CORUNNER_CONFIG_HJSON_TEMPLATE, {
"corunner_id": identifier,
"corunner_symbol": symbol,
"corunner_object": str(object_file)
})
write_template(kmem_filename, CORUNNER_KMEMORY_JSON_TEMPLATE, {
'symbol': symbol,
})
def gen_corunner_source(output_filename, symbol, read=dict()):
cmd = [sys.executable, TOP_DIR / "scripts" / "gen-corunner.py", symbol]
if read:
cmd += ["--read"]
if 'nop' in read:
cmd += ["--nop", str(read['nop'])]
if 'start' in read:
cmd += ["--startaddr", str(read['start'])]
if 'size' in read:
cmd += ["--tablesize", str(read['size'])]
if 'stride' in read:
cmd += ["--stride", str(read['stride'])]
else:
cmd += ["--jump", "2048"]
with subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True) as proc:
with open(output_filename, "w") as fileh:
fileh.write(proc.stdout.read())
def gen_kmem_final(default, config, memreport, kdbv, tasks, corunners=list()):
config_json = load_json(config)
cmd = [sys.executable, TOP_DIR / 'scripts' / 'gen-kmem.py', '--config', config]
del_list = []
config_json['memreport'] = str(memreport)
config_json['default_kmemory'] = str(default)
for el in config_json['elements']:
if el['type'] == 'corunner':
if corunners:
el['names'] = corunners
else:
del_list.append(el)
elif el['type'] == 'task':
el['names'] = tasks
else:
del_list.append(el)
for el in del_list:
config_json['elements'].remove(el)
dump_json(config_json, config)
ret = subprocess.check_call(cmd)
def get_sources(task_name):
c_sources = [
SRC_DIR / "crc.c",
SRC_DIR / "filter.c",
SRC_DIR / "filter2.c",
]
psy_sources = [PSY_DIR / f"task_{task_name}.psy"]
if task_name not in FLASHLIKE:
c_sources += [
STUBS_DIR / f"suite_task_{task_name}.c",
]
psy_sources += [STUBS_DIR / f"for_task_{task_name}.psy"]
return {
"c": c_sources,
"asm": [],
"psy": psy_sources,
}
def main(argv):
args = getopts(argv)
used_cores = cor_cores(args.corunner) + [args.core]
args.corunner.sort(key=itemgetter(0))
def object_of(source_filename, extension = ".o"):
return args.build_dir / (source_filename.name + extension)
sources = get_sources(args.task)
ag_config = args.build_dir / "task.hjson"
app_configs = [
args.build_dir / "app.hjson",
CFG_DIR / f"task_{args.task}.hjson",
ag_config,
]
tasks = [f'task_{args.task}']
part_configs = []
compile_config = args.build_dir / "compile.hjson"
partition_config = args.build_dir / "partition.hjson"
psymodule_config = args.build_dir / "psymodule.hjson"
gen_agent_config(ag_config, f"task_{args.task}", args.core)
mem_configs = []
corunners = []
for corunner, cor_start in args.corunner:
use_read = bool(cor_start)
read_args = dict()
co_config = args.build_dir / f"corunner_{corunner}.hjson"
co_kmem = args.build_dir / f"corunner_{corunner}_kmem.json"
co_file = args.build_dir / f"corunner_{corunner}"
if use_read:
read_args['start'] = cor_start
read_args['size'] = int(env.get(f"CORUNNER_READ_SIZE_{corunner}", "0x2000"), 16)
symbol = f"co_runner_read{corunner}" if read_args else f"co_runner_flash{corunner}"
co_file = co_file.with_suffix('.asm')
sources["asm"].append(co_file)
gen_corunner_source(co_file, symbol, read_args)
app_configs.append(co_config)
mem_configs.append(co_kmem)
corunners.append(symbol)
gen_corunner_config(co_config, corunner, symbol, object_of(co_file), co_kmem)
if args.task not in FLASHLIKE:
stub_config = args.build_dir / "stub.hjson"
gen_agent_config(stub_config, f"sends_to_task_{args.task}", args.core)
app_configs.append(stub_config)
tasks.append(f'sends_to_task_{args.task}')
write_template(compile_config, COMPILE_CONFIG_HJSON_TEMPLATE, {})
write_template(psymodule_config, PSYMODULE_CONFIG_HJSON_TEMPLATE, {})
psykonf = {'product': args.product, 'rtk_dir': args.rtk_dir, 'psyko': args.psyko, 'cwd': TOP_DIR}
def psyko_cc(c_source):
generated_object = object_of(c_source)
psyko(psykonf, "cc", c_source, compile_config, "-o", generated_object)
return generated_object
def psyko_as(asm_source):
generated_object = object_of(asm_source)
psyko(psykonf, "as", asm_source, compile_config, "-o", generated_object)
return generated_object
def psyko_module(psy_source):
generated_object = object_of(psy_source, ".psyo")
psyko(psykonf, "module", psy_source, psymodule_config, "-o", generated_object)
return generated_object
def psyko_partition(name, objects, configs):
generated_object = args.build_dir / (name + ".parto")
psyko(psykonf, "partition", "-o", generated_object, '--gendir',
args.build_dir / 'gen' / 'part', *objects, *configs)
return generated_object
def psyko_app(partos, configs):
elf = args.build_dir / "program.elf"
gendir = args.build_dir / "gen" / "app"
psyko(psykonf, "app", "-a", args.build_dir / "program.app", "-b", args.output,
'--gendir', gendir, *partos, *configs)
return gendir
def psyko_memconf(t, files, configs=[], cor_kmems=[]):
kmemconf = args.build_dir / ('kmemconf_'+t+'.json')
psyko(psykonf, 'gen-mem-conf', '-t', t, '--gendir', args.build_dir / 'gen' / 'memconf', '-o', kmemconf, *files, *configs)
if cor_kmems:
def_memconf = load_json(kmemconf)
cor_memconf = []
for kmem in cor_kmems:
cor_memconf.append(load_json(kmem))
max_reg = def_memconf['kmemory']['regions'][0]
if len(def_memconf['kmemory']['regions']) > 1:
for reg in def_memconf['kmemory']['regions'][1:]:
if reg['size'] > max_reg['size']:
max_reg = reg
if 'domains' not in max_reg:
max_reg['domains'] = []
out = cor_memconf[0]['domains'][0]['output_sections'][0]
out['physical_address'] = mar_reg['physical_address']
stacks = {obj['id']: obj
for obj in def_memconf['kmemory']['objects']
if obj['id'] in [f"core_{core}_co_runner_stack.c"
for core in used_cores]}
for core in cor_cores(args.corunner):
stack = f"core_{core}_co_runner_stack.c"
for corunner in corunners:
symbol = corunner if corunner[-1] == str(core) else ''
stacks[stack]['groups'] = [f'.stack_{symbol}']
for cor in cor_memconf:
max_reg['domains'] += cor['domains']
def_memconf['kmemory']['groups'] += cor['groups']
def_memconf['kmemory']['objects'] += cor['objects']
dump_json(def_memconf, f=kmemconf)
return kmemconf
parto_objects = []
for c_source in sources["c"]:
parto_objects.append(psyko_cc(c_source))
for asm_source in sources.get("asm", []):
parto_objects.append(psyko_as(asm_source))
for psy_source in sources["psy"]:
parto_objects.append(psyko_module(psy_source))
parto = psyko_partition("main", parto_objects, part_configs)
mem_configs = [psyko_memconf('app', [parto], app_configs, mem_configs)]
mem_configs.append("--overwrite-memory-configuration")
gendir = psyko_app([parto], app_configs+mem_configs)
assert args.output.is_file(), "first app compilation not successfull"
if args.mem_conf:
args.output.unlink()
gen_kmem_final(mem_configs[0], args.mem_conf,
gendir / 'applink' / 'memreport_out.ks', args.kdbv, tasks, corunners)
psyko_app([parto], app_configs+mem_configs)
assert args.output.is_file(), "final app compilation not successfull"
if __name__ == "__main__":
main(sys.argv)
| true | true |
f7391168b99f6f2fdae5a55cffae7b304ded5b5e | 1,429 | py | Python | ctsutils/test.py | 534ttl3/ctsutils | b070bf349d4a112df576404a3948e0de60f24927 | [
"MIT"
] | null | null | null | ctsutils/test.py | 534ttl3/ctsutils | b070bf349d4a112df576404a3948e0de60f24927 | [
"MIT"
] | null | null | null | ctsutils/test.py | 534ttl3/ctsutils | b070bf349d4a112df576404a3948e0de60f24927 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from ctsutils.cparameterspace import CParam, CParameterSpace
def foo(X, Y, Y2):
""" """
return (1 - X / 2 + X ** 5 + (Y + Y2 ) ** 3) * np.exp(-X ** 2 - (Y + Y2 ) ** 2) # calcul du tableau des valeurs de Z
def foo(X, Y, Y2, Y3):
""" """
return (1 - X / 2 + X ** 5 + (Y + Y2 + Y3) ** 3) * np.exp(-X ** 2 - (Y + Y2 + Y3) ** 2) # calcul du tableau des valeurs de Z
ps = CParameterSpace([CParam("x", np.linspace(-3, 3, 51), unit="m"),
CParam("y", np.linspace(-2, 2, 41)),
CParam("y2", np.linspace(-1, 1, 31)),
CParam("y3", np.linspace(-1, 1, 10))])
# import pdb; pdb.set_trace() # noqa BREAKPOINT
# x = ps.get_arr("x")
Z = ps.calc_function(foo, args_param_names=("x", "y", "y2", "y3"))
integrals = ps.calc_integral(Z, "x")
# import pdb; pdb.set_trace() # noqa BREAKPOINT
# fig, ax = plt.subplots(1, 1)
# ps.plot(Z, ordering_of_params_names=("y2", "y"), ax=ax)
# plt.show()
# import pdb; pdb.set_trace() # noqa BREAKPOINT
fig, ax = plt.subplots(1, 1)
#ps.plot(Z, z_label="Z", ordering_of_params_name_and_value=(("y3", None), ("y2", None)), ax=ax)
ps.plot(integrals, z_label="integrals", ordering_of_params_name_and_value=(("y3", None), ("y2", None)), ax=ax)
# ps.plot(integrals, z_label="integrals", ordering_of_params_name_and_value=(("y2", None), ("y", None)), ax=ax)
plt.show()
| 32.477273 | 129 | 0.582225 | import numpy as np
import matplotlib.pyplot as plt
from ctsutils.cparameterspace import CParam, CParameterSpace
def foo(X, Y, Y2):
return (1 - X / 2 + X ** 5 + (Y + Y2 ) ** 3) * np.exp(-X ** 2 - (Y + Y2 ) ** 2)
def foo(X, Y, Y2, Y3):
return (1 - X / 2 + X ** 5 + (Y + Y2 + Y3) ** 3) * np.exp(-X ** 2 - (Y + Y2 + Y3) ** 2)
ps = CParameterSpace([CParam("x", np.linspace(-3, 3, 51), unit="m"),
CParam("y", np.linspace(-2, 2, 41)),
CParam("y2", np.linspace(-1, 1, 31)),
CParam("y3", np.linspace(-1, 1, 10))])
nction(foo, args_param_names=("x", "y", "y2", "y3"))
integrals = ps.calc_integral(Z, "x")
1)
ps.plot(integrals, z_label="integrals", ordering_of_params_name_and_value=(("y3", None), ("y2", None)), ax=ax)
plt.show()
| true | true |
f73911a7c4582d4ab6d999c25103c7c89a6d2334 | 243 | py | Python | globalvalues/constants/settingsconstants.py | adriangrepo/qreservoir | 20fba1b1fd1a42add223d9e8af2d267665bec493 | [
"MIT"
] | 2 | 2019-10-04T13:54:51.000Z | 2021-05-21T19:36:15.000Z | globalvalues/constants/settingsconstants.py | adriangrepo/qreservoir | 20fba1b1fd1a42add223d9e8af2d267665bec493 | [
"MIT"
] | 3 | 2019-11-19T17:06:09.000Z | 2020-01-18T20:39:54.000Z | globalvalues/constants/settingsconstants.py | adriangrepo/qreservoir | 20fba1b1fd1a42add223d9e8af2d267665bec493 | [
"MIT"
] | 2 | 2020-07-02T13:20:48.000Z | 2020-11-11T00:18:51.000Z | '''
Created on 28 Jun 2015
@author: a
'''
class SettingsConstants(object):
'''
Constants related to settings dialogs
'''
SETTINGS_ITEM_HISTORY_HEADERS = ["Item", "Date", "Action", "Details", "User", "Version", "Build date"]
| 18.692308 | 106 | 0.641975 |
class SettingsConstants(object):
SETTINGS_ITEM_HISTORY_HEADERS = ["Item", "Date", "Action", "Details", "User", "Version", "Build date"]
| true | true |
f739129a416d3f39cfab817aa6633728cddb057c | 6,298 | py | Python | tests/examples/test_examples.py | brucebcampbell/mlflow | 9aca8e27198f16ce4fa1e7a0a502554f2f81068b | [
"Apache-2.0"
] | null | null | null | tests/examples/test_examples.py | brucebcampbell/mlflow | 9aca8e27198f16ce4fa1e7a0a502554f2f81068b | [
"Apache-2.0"
] | null | null | null | tests/examples/test_examples.py | brucebcampbell/mlflow | 9aca8e27198f16ce4fa1e7a0a502554f2f81068b | [
"Apache-2.0"
] | null | null | null | import os
import os.path
import re
import shutil
import mlflow
from mlflow import cli
from mlflow.utils import process
from tests.integration.utils import invoke_cli_runner
import pytest
import json
import hashlib
EXAMPLES_DIR = "examples"
def hash_conda_env(conda_env_path):
# use the same hashing logic as `_get_conda_env_name` in mlflow/utils/conda.py
return hashlib.sha1(open(conda_env_path).read().encode("utf-8")).hexdigest()
def get_conda_envs():
stdout = process.exec_cmd(["conda", "env", "list", "--json"])[1]
return [os.path.basename(env) for env in json.loads(stdout)["envs"]]
def is_mlflow_conda_env(env_name):
return re.search(r"^mlflow-\w{40}$", env_name) is not None
def remove_conda_env(env_name):
process.exec_cmd(["conda", "remove", "--name", env_name, "--yes", "--all"])
def get_free_disk_space():
# https://stackoverflow.com/a/48929832/6943581
return shutil.disk_usage("/")[-1] / (2 ** 30)
def is_conda_yaml(path):
return bool(re.search("conda.ya?ml$", path))
def find_conda_yaml(directory):
conda_yaml = list(filter(is_conda_yaml, os.listdir(directory)))[0]
return os.path.join(directory, conda_yaml)
def replace_mlflow_with_dev_version(yml_path):
with open(yml_path, "r") as f:
old_src = f.read()
mlflow_dir = os.path.dirname(mlflow.__path__[0])
new_src = re.sub(r"- mlflow.*\n", "- {}\n".format(mlflow_dir), old_src)
with open(yml_path, "w") as f:
f.write(new_src)
@pytest.fixture(scope="function", autouse=True)
def clean_envs_and_cache():
yield
if get_free_disk_space() < 7.0: # unit: GiB
process.exec_cmd(["./dev/remove-conda-envs.sh"])
@pytest.fixture(scope="function", autouse=True)
def report_free_disk_space(capsys):
yield
with capsys.disabled():
print(" | Free disk space: {:.1f} GiB".format(get_free_disk_space()), end="")
@pytest.mark.large
@pytest.mark.parametrize(
"directory, params",
[
("h2o", []),
("hyperparam", ["-e", "train", "-P", "epochs=1"]),
("hyperparam", ["-e", "random", "-P", "epochs=1"]),
("hyperparam", ["-e", "gpyopt", "-P", "epochs=1"]),
("hyperparam", ["-e", "hyperopt", "-P", "epochs=1"]),
(
"lightgbm",
["-P", "learning_rate=0.1", "-P", "colsample_bytree=0.8", "-P", "subsample=0.9"],
),
("statsmodels", ["-P", "inverse_method=qr"]),
("pytorch", ["-P", "epochs=2"]),
("sklearn_logistic_regression", []),
("sklearn_elasticnet_wine", ["-P", "alpha=0.5"]),
(os.path.join("sklearn_elasticnet_diabetes", "linux"), []),
("spacy", []),
(os.path.join("tensorflow", "tf1"), ["-P", "steps=10"]),
(
"xgboost",
["-P", "learning_rate=0.3", "-P", "colsample_bytree=0.8", "-P", "subsample=0.9"],
),
("fastai", ["-P", "lr=0.02", "-P", "epochs=3"]),
(os.path.join("pytorch", "MNIST"), ["-P", "max_epochs=1"]),
(
os.path.join("pytorch", "BertNewsClassification"),
["-P", "max_epochs=1", "-P", "num_samples=100", "-P", "dataset=20newsgroups"],
),
(
os.path.join("pytorch", "AxHyperOptimizationPTL"),
["-P", "max_epochs=10", "-P", "total_trials=1"],
),
(
os.path.join("pytorch", "IterativePruning"),
["-P", "max_epochs=1", "-P", "total_trials=1"],
),
(os.path.join("pytorch", "CaptumExample"), ["-P", "max_epochs=50"]),
],
)
def test_mlflow_run_example(directory, params, tmpdir):
example_dir = os.path.join(EXAMPLES_DIR, directory)
tmp_example_dir = os.path.join(tmpdir.strpath, directory)
shutil.copytree(example_dir, tmp_example_dir)
conda_yml_path = find_conda_yaml(tmp_example_dir)
replace_mlflow_with_dev_version(conda_yml_path)
# remove old conda environments to free disk space
envs = list(filter(is_mlflow_conda_env, get_conda_envs()))
current_env_name = "mlflow-" + hash_conda_env(conda_yml_path)
envs_to_remove = list(filter(lambda e: e != current_env_name, envs))
for env in envs_to_remove:
remove_conda_env(env)
cli_run_list = [tmp_example_dir] + params
invoke_cli_runner(cli.run, cli_run_list)
@pytest.mark.large
@pytest.mark.parametrize(
"directory, command",
[
("docker", ["docker", "build", "-t", "mlflow-docker-example", "-f", "Dockerfile", "."]),
("gluon", ["python", "train.py"]),
("keras", ["python", "train.py"]),
(
"lightgbm",
[
"python",
"train.py",
"--learning-rate",
"0.2",
"--colsample-bytree",
"0.8",
"--subsample",
"0.9",
],
),
("statsmodels", ["python", "train.py", "--inverse-method", "qr"]),
("quickstart", ["python", "mlflow_tracking.py"]),
("remote_store", ["python", "remote_server.py"]),
(
"xgboost",
[
"python",
"train.py",
"--learning-rate",
"0.2",
"--colsample-bytree",
"0.8",
"--subsample",
"0.9",
],
),
("catboost", ["python", "train.py"]),
("prophet", ["python", "train.py"]),
("sklearn_autolog", ["python", "linear_regression.py"]),
("sklearn_autolog", ["python", "pipeline.py"]),
("sklearn_autolog", ["python", "grid_search_cv.py"]),
("pyspark_ml_autologging", ["python", "logistic_regression.py"]),
("pyspark_ml_autologging", ["python", "one_vs_rest.py"]),
("pyspark_ml_autologging", ["python", "pipeline.py"]),
("shap", ["python", "regression.py"]),
("shap", ["python", "binary_classification.py"]),
("shap", ["python", "multiclass_classification.py"]),
("shap", ["python", "explainer_logging.py"]),
("ray_serve", ["python", "train_model.py"]),
("pip_requirements", ["python", "pip_requirements.py"]),
],
)
def test_command_example(directory, command):
cwd_dir = os.path.join(EXAMPLES_DIR, directory)
process.exec_cmd(command, cwd=cwd_dir)
| 33.322751 | 96 | 0.566529 | import os
import os.path
import re
import shutil
import mlflow
from mlflow import cli
from mlflow.utils import process
from tests.integration.utils import invoke_cli_runner
import pytest
import json
import hashlib
EXAMPLES_DIR = "examples"
def hash_conda_env(conda_env_path):
return hashlib.sha1(open(conda_env_path).read().encode("utf-8")).hexdigest()
def get_conda_envs():
stdout = process.exec_cmd(["conda", "env", "list", "--json"])[1]
return [os.path.basename(env) for env in json.loads(stdout)["envs"]]
def is_mlflow_conda_env(env_name):
return re.search(r"^mlflow-\w{40}$", env_name) is not None
def remove_conda_env(env_name):
process.exec_cmd(["conda", "remove", "--name", env_name, "--yes", "--all"])
def get_free_disk_space():
return shutil.disk_usage("/")[-1] / (2 ** 30)
def is_conda_yaml(path):
return bool(re.search("conda.ya?ml$", path))
def find_conda_yaml(directory):
conda_yaml = list(filter(is_conda_yaml, os.listdir(directory)))[0]
return os.path.join(directory, conda_yaml)
def replace_mlflow_with_dev_version(yml_path):
with open(yml_path, "r") as f:
old_src = f.read()
mlflow_dir = os.path.dirname(mlflow.__path__[0])
new_src = re.sub(r"- mlflow.*\n", "- {}\n".format(mlflow_dir), old_src)
with open(yml_path, "w") as f:
f.write(new_src)
@pytest.fixture(scope="function", autouse=True)
def clean_envs_and_cache():
yield
if get_free_disk_space() < 7.0:
process.exec_cmd(["./dev/remove-conda-envs.sh"])
@pytest.fixture(scope="function", autouse=True)
def report_free_disk_space(capsys):
yield
with capsys.disabled():
print(" | Free disk space: {:.1f} GiB".format(get_free_disk_space()), end="")
@pytest.mark.large
@pytest.mark.parametrize(
"directory, params",
[
("h2o", []),
("hyperparam", ["-e", "train", "-P", "epochs=1"]),
("hyperparam", ["-e", "random", "-P", "epochs=1"]),
("hyperparam", ["-e", "gpyopt", "-P", "epochs=1"]),
("hyperparam", ["-e", "hyperopt", "-P", "epochs=1"]),
(
"lightgbm",
["-P", "learning_rate=0.1", "-P", "colsample_bytree=0.8", "-P", "subsample=0.9"],
),
("statsmodels", ["-P", "inverse_method=qr"]),
("pytorch", ["-P", "epochs=2"]),
("sklearn_logistic_regression", []),
("sklearn_elasticnet_wine", ["-P", "alpha=0.5"]),
(os.path.join("sklearn_elasticnet_diabetes", "linux"), []),
("spacy", []),
(os.path.join("tensorflow", "tf1"), ["-P", "steps=10"]),
(
"xgboost",
["-P", "learning_rate=0.3", "-P", "colsample_bytree=0.8", "-P", "subsample=0.9"],
),
("fastai", ["-P", "lr=0.02", "-P", "epochs=3"]),
(os.path.join("pytorch", "MNIST"), ["-P", "max_epochs=1"]),
(
os.path.join("pytorch", "BertNewsClassification"),
["-P", "max_epochs=1", "-P", "num_samples=100", "-P", "dataset=20newsgroups"],
),
(
os.path.join("pytorch", "AxHyperOptimizationPTL"),
["-P", "max_epochs=10", "-P", "total_trials=1"],
),
(
os.path.join("pytorch", "IterativePruning"),
["-P", "max_epochs=1", "-P", "total_trials=1"],
),
(os.path.join("pytorch", "CaptumExample"), ["-P", "max_epochs=50"]),
],
)
def test_mlflow_run_example(directory, params, tmpdir):
example_dir = os.path.join(EXAMPLES_DIR, directory)
tmp_example_dir = os.path.join(tmpdir.strpath, directory)
shutil.copytree(example_dir, tmp_example_dir)
conda_yml_path = find_conda_yaml(tmp_example_dir)
replace_mlflow_with_dev_version(conda_yml_path)
envs = list(filter(is_mlflow_conda_env, get_conda_envs()))
current_env_name = "mlflow-" + hash_conda_env(conda_yml_path)
envs_to_remove = list(filter(lambda e: e != current_env_name, envs))
for env in envs_to_remove:
remove_conda_env(env)
cli_run_list = [tmp_example_dir] + params
invoke_cli_runner(cli.run, cli_run_list)
@pytest.mark.large
@pytest.mark.parametrize(
"directory, command",
[
("docker", ["docker", "build", "-t", "mlflow-docker-example", "-f", "Dockerfile", "."]),
("gluon", ["python", "train.py"]),
("keras", ["python", "train.py"]),
(
"lightgbm",
[
"python",
"train.py",
"--learning-rate",
"0.2",
"--colsample-bytree",
"0.8",
"--subsample",
"0.9",
],
),
("statsmodels", ["python", "train.py", "--inverse-method", "qr"]),
("quickstart", ["python", "mlflow_tracking.py"]),
("remote_store", ["python", "remote_server.py"]),
(
"xgboost",
[
"python",
"train.py",
"--learning-rate",
"0.2",
"--colsample-bytree",
"0.8",
"--subsample",
"0.9",
],
),
("catboost", ["python", "train.py"]),
("prophet", ["python", "train.py"]),
("sklearn_autolog", ["python", "linear_regression.py"]),
("sklearn_autolog", ["python", "pipeline.py"]),
("sklearn_autolog", ["python", "grid_search_cv.py"]),
("pyspark_ml_autologging", ["python", "logistic_regression.py"]),
("pyspark_ml_autologging", ["python", "one_vs_rest.py"]),
("pyspark_ml_autologging", ["python", "pipeline.py"]),
("shap", ["python", "regression.py"]),
("shap", ["python", "binary_classification.py"]),
("shap", ["python", "multiclass_classification.py"]),
("shap", ["python", "explainer_logging.py"]),
("ray_serve", ["python", "train_model.py"]),
("pip_requirements", ["python", "pip_requirements.py"]),
],
)
def test_command_example(directory, command):
cwd_dir = os.path.join(EXAMPLES_DIR, directory)
process.exec_cmd(command, cwd=cwd_dir)
| true | true |
f739129edf4dad67c2248bd06ec3d471d9ba24b7 | 1,126 | py | Python | almost-triangles/almost-triangles.py | cdstanford/curiosities | 511e55b9dbf2b49db4593be92a0e4fce07888eaf | [
"MIT"
] | 1 | 2022-01-18T06:02:01.000Z | 2022-01-18T06:02:01.000Z | almost-triangles/almost-triangles.py | cdstanford/curiosities | 511e55b9dbf2b49db4593be92a0e4fce07888eaf | [
"MIT"
] | null | null | null | almost-triangles/almost-triangles.py | cdstanford/curiosities | 511e55b9dbf2b49db4593be92a0e4fce07888eaf | [
"MIT"
] | null | null | null | """
A triangle is an "almost right triangle" if one of its angles differs from
90 degrees by at most 15 degrees. A triangle is an "almost isosceles
triangle" if two of its angles differ from each other by at most 15
degrees. Prove that all acute triangles are either almost right or almost
isosceles.
Note: if "at most 15" is replaced by "less than 15" in the problem statement
(change "<= 15" to "< 15" everywhere below), the formula becomes satisfiable
and we get the following counterexample: a triangle with angles 45, 60, and 75.
"""
import z3
def triangle(x, y, z):
return z3.And(x > 0, y > 0, z > 0, x + y + z == 180)
def acute(x, y, z):
return z3.And(x < 90, y < 90, z < 90)
def abs(x):
return z3.If(x > 0, x, -x)
def almost_right(x, y, z):
return z3.Or(abs(x - 90) <= 15, abs(y - 90) <= 15, abs(z - 90) <= 15)
def almost_isosceles(x, y, z):
return z3.Or(abs(x - y) <= 15, abs(x - z) <= 15, abs(y - z) <= 15)
x = z3.Real("x")
y = z3.Real("y")
z = z3.Real("z")
z3.solve(
triangle(x, y, z),
acute(x, y, z),
z3.Not(almost_right(x, y, z)),
z3.Not(almost_isosceles(x, y, z)),
)
| 28.15 | 79 | 0.626998 |
import z3
def triangle(x, y, z):
return z3.And(x > 0, y > 0, z > 0, x + y + z == 180)
def acute(x, y, z):
return z3.And(x < 90, y < 90, z < 90)
def abs(x):
return z3.If(x > 0, x, -x)
def almost_right(x, y, z):
return z3.Or(abs(x - 90) <= 15, abs(y - 90) <= 15, abs(z - 90) <= 15)
def almost_isosceles(x, y, z):
return z3.Or(abs(x - y) <= 15, abs(x - z) <= 15, abs(y - z) <= 15)
x = z3.Real("x")
y = z3.Real("y")
z = z3.Real("z")
z3.solve(
triangle(x, y, z),
acute(x, y, z),
z3.Not(almost_right(x, y, z)),
z3.Not(almost_isosceles(x, y, z)),
)
| true | true |
f739132662219264ff68cf9039a96b10b8ef6495 | 4,179 | py | Python | lib/streamlit/util.py | sdrees/streamlit | d02c3fca9114c6b550785cca2fbe05ec37295f2b | [
"Apache-2.0"
] | 1 | 2019-11-01T08:37:00.000Z | 2019-11-01T08:37:00.000Z | lib/streamlit/util.py | sdrees/streamlit | d02c3fca9114c6b550785cca2fbe05ec37295f2b | [
"Apache-2.0"
] | null | null | null | lib/streamlit/util.py | sdrees/streamlit | d02c3fca9114c6b550785cca2fbe05ec37295f2b | [
"Apache-2.0"
] | null | null | null | # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A bunch of useful utilities."""
import functools
import os
import subprocess
from typing import Any, Dict, List, TypeVar
from typing_extensions import Final
from streamlit import env_util
# URL of Streamlit's help page.
HELP_DOC: Final = "https://docs.streamlit.io/"
def memoize(func):
"""Decorator to memoize the result of a no-args func."""
result = [] # type: List[Any]
@functools.wraps(func)
def wrapped_func():
if not result:
result.append(func())
return result[0]
return wrapped_func
def open_browser(url):
"""Open a web browser pointing to a given URL.
We use this function instead of Python's `webbrowser` module because this
way we can capture stdout/stderr to avoid polluting the terminal with the
browser's messages. For example, Chrome always prints things like "Created
new window in existing browser session", and those get on the user's way.
url : str
The URL. Must include the protocol.
"""
# Treat Windows separately because:
# 1. /dev/null doesn't exist.
# 2. subprocess.Popen(['start', url]) doesn't actually pop up the
# browser even though 'start url' works from the command prompt.
# Fun!
# Also, use webbrowser if we are on Linux and xdg-open is not installed.
#
# We don't use the webbrowser module on Linux and Mac because some browsers
# (ahem... Chrome) always print "Opening in existing browser session" to
# the terminal, which is spammy and annoying. So instead we start the
# browser ourselves and send all its output to /dev/null.
if env_util.IS_WINDOWS:
_open_browser_with_webbrowser(url)
return
if env_util.IS_LINUX_OR_BSD:
if env_util.is_executable_in_path("xdg-open"):
_open_browser_with_command("xdg-open", url)
return
_open_browser_with_webbrowser(url)
return
if env_util.IS_DARWIN:
_open_browser_with_command("open", url)
return
import platform
raise Error('Cannot open browser in platform "%s"' % platform.system())
def _open_browser_with_webbrowser(url):
import webbrowser
webbrowser.open(url)
def _open_browser_with_command(command, url):
cmd_line = [command, url]
with open(os.devnull, "w") as devnull:
subprocess.Popen(cmd_line, stdout=devnull, stderr=subprocess.STDOUT)
def _maybe_tuple_to_list(item: Any) -> Any:
"""Convert a tuple to a list. Leave as is if it's not a tuple."""
if isinstance(item, tuple):
return list(item)
return item
def repr_(cls) -> str:
classname = cls.__class__.__name__
args = ", ".join([f"{k}={repr(v)}" for (k, v) in cls.__dict__.items()])
return f"{classname}({args})"
def index_(iterable, x) -> int:
"""Return zero-based index of the first item whose value is equal to x.
Raises a ValueError if there is no such item.
We need a custom implementation instead of the built-in list .index() to
be compatible with NumPy array and Pandas Series.
Parameters
----------
iterable : list, tuple, numpy.ndarray, pandas.Series
Returns
-------
int
"""
for i, value in enumerate(iterable):
if x == value:
return i
raise ValueError("{} is not in iterable".format(str(x)))
_Value = TypeVar("_Value")
def lower_clean_dict_keys(dict: Dict[str, _Value]) -> Dict[str, _Value]:
return {k.lower().strip(): v for k, v in dict.items()}
# TODO: Move this into errors.py? Replace with StreamlitAPIException?
class Error(Exception):
pass
| 29.223776 | 79 | 0.682699 |
import functools
import os
import subprocess
from typing import Any, Dict, List, TypeVar
from typing_extensions import Final
from streamlit import env_util
HELP_DOC: Final = "https://docs.streamlit.io/"
def memoize(func):
result = [] # type: List[Any]
@functools.wraps(func)
def wrapped_func():
if not result:
result.append(func())
return result[0]
return wrapped_func
def open_browser(url):
# Treat Windows separately because:
# 1. /dev/null doesn't exist.
# browser even though 'start url' works from the command prompt.
# Fun!
# Also, use webbrowser if we are on Linux and xdg-open is not installed.
#
# We don't use the webbrowser module on Linux and Mac because some browsers
if env_util.IS_WINDOWS:
_open_browser_with_webbrowser(url)
return
if env_util.IS_LINUX_OR_BSD:
if env_util.is_executable_in_path("xdg-open"):
_open_browser_with_command("xdg-open", url)
return
_open_browser_with_webbrowser(url)
return
if env_util.IS_DARWIN:
_open_browser_with_command("open", url)
return
import platform
raise Error('Cannot open browser in platform "%s"' % platform.system())
def _open_browser_with_webbrowser(url):
import webbrowser
webbrowser.open(url)
def _open_browser_with_command(command, url):
cmd_line = [command, url]
with open(os.devnull, "w") as devnull:
subprocess.Popen(cmd_line, stdout=devnull, stderr=subprocess.STDOUT)
def _maybe_tuple_to_list(item: Any) -> Any:
if isinstance(item, tuple):
return list(item)
return item
def repr_(cls) -> str:
classname = cls.__class__.__name__
args = ", ".join([f"{k}={repr(v)}" for (k, v) in cls.__dict__.items()])
return f"{classname}({args})"
def index_(iterable, x) -> int:
for i, value in enumerate(iterable):
if x == value:
return i
raise ValueError("{} is not in iterable".format(str(x)))
_Value = TypeVar("_Value")
def lower_clean_dict_keys(dict: Dict[str, _Value]) -> Dict[str, _Value]:
return {k.lower().strip(): v for k, v in dict.items()}
class Error(Exception):
pass
| true | true |
f7391359e8c5b762abefb4eac0e7c17892575845 | 250 | py | Python | src/atcoder/abc212/b/sol_0.py | kagemeka/competitive-programming | c70fe481bcd518f507b885fc9234691d8ce63171 | [
"MIT"
] | 1 | 2021-07-11T03:20:10.000Z | 2021-07-11T03:20:10.000Z | src/atcoder/abc212/b/sol_0.py | kagemeka/competitive-programming | c70fe481bcd518f507b885fc9234691d8ce63171 | [
"MIT"
] | 39 | 2021-07-10T05:21:09.000Z | 2021-12-15T06:10:12.000Z | src/atcoder/abc212/b/sol_0.py | kagemeka/competitive-programming | c70fe481bcd518f507b885fc9234691d8ce63171 | [
"MIT"
] | null | null | null | def main():
s = input()
if len(set(s)) == 1:
print('Weak')
return
*s, = map(int, list(s))
for i in range(3):
if (
s[i + 1]
!= (s[i] + 1) % 10
):
print('Strong')
return
print('Weak')
main() | 12.5 | 25 | 0.408 | def main():
s = input()
if len(set(s)) == 1:
print('Weak')
return
*s, = map(int, list(s))
for i in range(3):
if (
s[i + 1]
!= (s[i] + 1) % 10
):
print('Strong')
return
print('Weak')
main() | true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.