hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71a0aff4fcdf231c01d2475d9139acabde40491 | 1,135 | py | Python | setup.py | hugis/robotframework-djangorobotlibrary | 89400ea24a5d8ecf4c619fd39dc7d0a547c73fe7 | [
"MIT"
] | null | null | null | setup.py | hugis/robotframework-djangorobotlibrary | 89400ea24a5d8ecf4c619fd39dc7d0a547c73fe7 | [
"MIT"
] | null | null | null | setup.py | hugis/robotframework-djangorobotlibrary | 89400ea24a5d8ecf4c619fd39dc7d0a547c73fe7 | [
"MIT"
] | null | null | null | from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="robotframework-djangorobotlibrary",
version="19.1a0",
description="A Robot Framework library for Django.",
long_description=long_description,
url="https://github.com/hugis/robotframework-djangorobotlibrary",
author="Peter Hyben",
author_email="peter.hyben@hugis.eu",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Environment :: Web Environment",
"Framework :: Robot Framework",
"Framework :: Django",
"Framework :: Django :: 2.2",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords="robotframework django test",
packages=find_packages(),
install_requires=["Django>=2.2", "factory_boy", "robotframework"],
project_urls={
"Source": "https://github.com/hugis/robotframework-djangorobotlibrary"
},
)
| 31.527778 | 78 | 0.656388 | from os import path
from setuptools import setup, find_packages
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
setup(
name="robotframework-djangorobotlibrary",
version="19.1a0",
description="A Robot Framework library for Django.",
long_description=long_description,
url="https://github.com/hugis/robotframework-djangorobotlibrary",
author="Peter Hyben",
author_email="peter.hyben@hugis.eu",
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Environment :: Web Environment",
"Framework :: Robot Framework",
"Framework :: Django",
"Framework :: Django :: 2.2",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords="robotframework django test",
packages=find_packages(),
install_requires=["Django>=2.2", "factory_boy", "robotframework"],
project_urls={
"Source": "https://github.com/hugis/robotframework-djangorobotlibrary"
},
)
| true | true |
f71a0b9b1f1d422978ee7d52875c6f364e06e910 | 201 | py | Python | api/words_vector/admin.py | leandrocamposcardoso/VetorDePalavras | 76d442d0343e85a0edc55ca91b76480c30b3127a | [
"MIT"
] | null | null | null | api/words_vector/admin.py | leandrocamposcardoso/VetorDePalavras | 76d442d0343e85a0edc55ca91b76480c30b3127a | [
"MIT"
] | null | null | null | api/words_vector/admin.py | leandrocamposcardoso/VetorDePalavras | 76d442d0343e85a0edc55ca91b76480c30b3127a | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Logs
# Register your models here.
@admin.register(Logs)
class TextAdmin(admin.ModelAdmin):
list_display = ('files', 'vocabulary', 'vectors')
| 20.1 | 53 | 0.746269 | from django.contrib import admin
from .models import Logs
@admin.register(Logs)
class TextAdmin(admin.ModelAdmin):
list_display = ('files', 'vocabulary', 'vectors')
| true | true |
f71a0bd6b7d9c82ddfd1fe5eeabf8b4cdd16ce54 | 1,108 | py | Python | fake_fs.py | osteotek/yamr | d54a092a8520c4b3133db9a87d4fc013879fbf33 | [
"MIT"
] | 3 | 2017-07-11T15:33:35.000Z | 2021-03-11T22:14:33.000Z | fake_fs.py | osteotek/yamr | d54a092a8520c4b3133db9a87d4fc013879fbf33 | [
"MIT"
] | null | null | null | fake_fs.py | osteotek/yamr | d54a092a8520c4b3133db9a87d4fc013879fbf33 | [
"MIT"
] | 1 | 2017-02-19T21:46:35.000Z | 2017-02-19T21:46:35.000Z | import os
from enums import Status
class FakeFS:
def __init__(self, base_dir="/var/fake_fs"):
self.base_dir = base_dir
def get_chunk(self, path):
full_path = self.base_dir + path
if not os.path.isfile(full_path):
return {'status': Status.not_found}
data = None
with open(full_path, 'r') as f:
data = f.read()
return {'status': Status.ok, 'data': data}
def download_to(self, v_path, l_path):
full_path = self.base_dir + v_path
if not os.path.isfile(full_path):
return {'status': Status.not_found}
data = None
with open(full_path, 'r') as f:
data = f.read()
os.makedirs(os.path.dirname(l_path), exist_ok=True)
with open(l_path, "w") as f:
f.write(data)
return {'status': Status.ok}
def save(self, data, path):
full_path = self.base_dir + path
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, 'w+') as f:
f.write(data)
return {'status': Status.ok} | 25.767442 | 62 | 0.5713 | import os
from enums import Status
class FakeFS:
def __init__(self, base_dir="/var/fake_fs"):
self.base_dir = base_dir
def get_chunk(self, path):
full_path = self.base_dir + path
if not os.path.isfile(full_path):
return {'status': Status.not_found}
data = None
with open(full_path, 'r') as f:
data = f.read()
return {'status': Status.ok, 'data': data}
def download_to(self, v_path, l_path):
full_path = self.base_dir + v_path
if not os.path.isfile(full_path):
return {'status': Status.not_found}
data = None
with open(full_path, 'r') as f:
data = f.read()
os.makedirs(os.path.dirname(l_path), exist_ok=True)
with open(l_path, "w") as f:
f.write(data)
return {'status': Status.ok}
def save(self, data, path):
full_path = self.base_dir + path
os.makedirs(os.path.dirname(full_path), exist_ok=True)
with open(full_path, 'w+') as f:
f.write(data)
return {'status': Status.ok} | true | true |
f71a0c12785a008b991a752c3e60e2420e801e74 | 879 | py | Python | MatchSocks.py | zubin-madon/PottyPunksNFT | d43234641ea3f30c963fb3af7edb249862a62788 | [
"MIT"
] | null | null | null | MatchSocks.py | zubin-madon/PottyPunksNFT | d43234641ea3f30c963fb3af7edb249862a62788 | [
"MIT"
] | null | null | null | MatchSocks.py | zubin-madon/PottyPunksNFT | d43234641ea3f30c963fb3af7edb249862a62788 | [
"MIT"
] | null | null | null | #Match socks to pant colour.
import numpy as np
from PIL import Image
import urllib.request
import os
directory = 'layers/layers_for_art_engine/Pant'
for filename in os.listdir(directory):
image = os.path.join(directory, filename)
pant = Image.open(image)
socks = Image.open('layers/socks.png') #change the file path with your own of course!
width, height = socks.size
pant_color = pant.getpixel((200, 350))
for x in range(width):
for y in range(height):
current_color = socks.getpixel((x, y))
r = pant_color[0]
g = pant_color[1]
b = pant_color[2]
a = current_color[-1]
if current_color != (255, 255, 255, 0):
socks.putpixel((x, y), (r, g, b, a))
pant.paste(socks, (0, 0), socks) #combine the new coloured socks with the pant layer.
pant.save(image)
| 35.16 | 89 | 0.622298 |
import numpy as np
from PIL import Image
import urllib.request
import os
directory = 'layers/layers_for_art_engine/Pant'
for filename in os.listdir(directory):
image = os.path.join(directory, filename)
pant = Image.open(image)
socks = Image.open('layers/socks.png')
width, height = socks.size
pant_color = pant.getpixel((200, 350))
for x in range(width):
for y in range(height):
current_color = socks.getpixel((x, y))
r = pant_color[0]
g = pant_color[1]
b = pant_color[2]
a = current_color[-1]
if current_color != (255, 255, 255, 0):
socks.putpixel((x, y), (r, g, b, a))
pant.paste(socks, (0, 0), socks)
pant.save(image)
| true | true |
f71a0cdd77d197858c517e9b653ef4a7fe7e5d24 | 1,462 | py | Python | gae/third_party/poster/__init__.py | Purus/LaunchKitDocker | b8aaf9f1d8943a76ae7e0a81e15e6bebd4b9b08e | [
"Apache-2.0"
] | 2,341 | 2016-07-27T17:23:23.000Z | 2022-03-28T03:55:15.000Z | gae/third_party/poster/__init__.py | Purus/LaunchKitDocker | b8aaf9f1d8943a76ae7e0a81e15e6bebd4b9b08e | [
"Apache-2.0"
] | 52 | 2016-07-27T23:12:21.000Z | 2022-03-11T23:17:41.000Z | gae/third_party/poster/__init__.py | Purus/LaunchKitDocker | b8aaf9f1d8943a76ae7e0a81e15e6bebd4b9b08e | [
"Apache-2.0"
] | 324 | 2016-07-27T18:34:53.000Z | 2022-03-25T08:56:24.000Z | # Copyright (c) 2011 Chris AtLee
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""poster module
Support for streaming HTTP uploads, and multipart/form-data encoding
```poster.version``` is a 3-tuple of integers representing the version number.
New releases of poster will always have a version number that compares greater
than an older version of poster.
New in version 0.6."""
import streaminghttp
import encode
version = (0, 8, 1) # Thanks JP!
| 44.30303 | 79 | 0.776334 |
import streaminghttp
import encode
version = (0, 8, 1)
| true | true |
f71a0d63e90a61ad5e75bd468ec2c1a1b9348342 | 5,306 | py | Python | test/functional/abc-p2p-avalanche.py | kryvel/bitcoin-abc | 6330d8ccc8b1b720c42c8c9239dadc8240ca5025 | [
"MIT"
] | null | null | null | test/functional/abc-p2p-avalanche.py | kryvel/bitcoin-abc | 6330d8ccc8b1b720c42c8c9239dadc8240ca5025 | [
"MIT"
] | null | null | null | test/functional/abc-p2p-avalanche.py | kryvel/bitcoin-abc | 6330d8ccc8b1b720c42c8c9239dadc8240ca5025 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the resolution of forks via avalanche."""
import random
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.messages import AvalancheVote, CInv, msg_avapoll
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, wait_until
from test_framework import schnorr
BLOCK_ACCEPTED = 0
BLOCK_REJECTED = 1
BLOCK_UNKNOWN = -1
class TestNode(P2PInterface):
def __init__(self):
self.last_avaresponse = None
super().__init__()
def on_avaresponse(self, message):
self.last_avaresponse = message.response
def send_poll(self, hashes):
msg = msg_avapoll()
for h in hashes:
msg.poll.invs.append(CInv(2, h))
self.send_message(msg)
def wait_for_avaresponse(self, timeout=10):
self.sync_with_ping()
def test_function():
m = self.last_message.get("avaresponse")
return m is not None and m != self.last_avaresponse
wait_until(test_function, timeout=timeout, lock=mininode_lock)
class AvalancheTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
# Create a fake node and connect it to our real node.
poll_node = TestNode()
node.add_p2p_connection(poll_node)
poll_node.wait_for_verack()
poll_node.sync_with_ping()
# Generate many block and poll for them.
address = node.get_deterministic_priv_key().address
node.generatetoaddress(100, address)
# Get the key so we can verify signatures.
avakey = bytes.fromhex(node.getavalanchekey())
self.log.info("Poll for the chain tip...")
best_block_hash = int(node.getbestblockhash(), 16)
poll_node.send_poll([best_block_hash])
poll_node.wait_for_avaresponse()
def assert_response(response, expected):
r = response.response
assert_equal(r.cooldown, 0)
# Verify signature.
assert schnorr.verify(response.sig, avakey, r.get_hash())
votes = r.votes
self.log.info("response: {}".format(repr(response)))
assert_equal(len(votes), len(expected))
for i in range(0, len(votes)):
assert_equal(repr(votes[i]), repr(expected[i]))
assert_response(poll_node.last_avaresponse, [
AvalancheVote(BLOCK_ACCEPTED, best_block_hash)])
self.log.info("Poll for a selection of blocks...")
various_block_hashes = [
int(node.getblockhash(0), 16),
int(node.getblockhash(1), 16),
int(node.getblockhash(10), 16),
int(node.getblockhash(25), 16),
int(node.getblockhash(42), 16),
int(node.getblockhash(96), 16),
int(node.getblockhash(99), 16),
int(node.getblockhash(100), 16),
]
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes])
self.log.info(
"Poll for a selection of blocks, but some are now invalid...")
invalidated_block = node.getblockhash(75)
node.invalidateblock(invalidated_block)
# We need to send the coin to a new address in order to make sure we do
# not regenerate the same block.
node.generatetoaddress(
30, 'bchreg:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v7ej0fffv')
node.reconsiderblock(invalidated_block)
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:5]] +
[AvalancheVote(BLOCK_REJECTED, h) for h in various_block_hashes[-3:]])
self.log.info("Poll for unknown blocks...")
various_block_hashes = [
int(node.getblockhash(0), 16),
int(node.getblockhash(25), 16),
int(node.getblockhash(42), 16),
various_block_hashes[5],
various_block_hashes[6],
various_block_hashes[7],
random.randrange(1 << 255, (1 << 256) - 1),
random.randrange(1 << 255, (1 << 256) - 1),
random.randrange(1 << 255, (1 << 256) - 1),
]
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:3]] +
[AvalancheVote(BLOCK_REJECTED, h) for h in various_block_hashes[3:6]] +
[AvalancheVote(BLOCK_UNKNOWN, h) for h in various_block_hashes[-3:]])
if __name__ == '__main__':
AvalancheTest().main()
| 37.366197 | 95 | 0.637392 |
import random
from test_framework.mininode import P2PInterface, mininode_lock
from test_framework.messages import AvalancheVote, CInv, msg_avapoll
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, wait_until
from test_framework import schnorr
BLOCK_ACCEPTED = 0
BLOCK_REJECTED = 1
BLOCK_UNKNOWN = -1
class TestNode(P2PInterface):
def __init__(self):
self.last_avaresponse = None
super().__init__()
def on_avaresponse(self, message):
self.last_avaresponse = message.response
def send_poll(self, hashes):
msg = msg_avapoll()
for h in hashes:
msg.poll.invs.append(CInv(2, h))
self.send_message(msg)
def wait_for_avaresponse(self, timeout=10):
self.sync_with_ping()
def test_function():
m = self.last_message.get("avaresponse")
return m is not None and m != self.last_avaresponse
wait_until(test_function, timeout=timeout, lock=mininode_lock)
class AvalancheTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-enableavalanche=1', '-avacooldown=0']]
def run_test(self):
node = self.nodes[0]
poll_node = TestNode()
node.add_p2p_connection(poll_node)
poll_node.wait_for_verack()
poll_node.sync_with_ping()
address = node.get_deterministic_priv_key().address
node.generatetoaddress(100, address)
avakey = bytes.fromhex(node.getavalanchekey())
self.log.info("Poll for the chain tip...")
best_block_hash = int(node.getbestblockhash(), 16)
poll_node.send_poll([best_block_hash])
poll_node.wait_for_avaresponse()
def assert_response(response, expected):
r = response.response
assert_equal(r.cooldown, 0)
assert schnorr.verify(response.sig, avakey, r.get_hash())
votes = r.votes
self.log.info("response: {}".format(repr(response)))
assert_equal(len(votes), len(expected))
for i in range(0, len(votes)):
assert_equal(repr(votes[i]), repr(expected[i]))
assert_response(poll_node.last_avaresponse, [
AvalancheVote(BLOCK_ACCEPTED, best_block_hash)])
self.log.info("Poll for a selection of blocks...")
various_block_hashes = [
int(node.getblockhash(0), 16),
int(node.getblockhash(1), 16),
int(node.getblockhash(10), 16),
int(node.getblockhash(25), 16),
int(node.getblockhash(42), 16),
int(node.getblockhash(96), 16),
int(node.getblockhash(99), 16),
int(node.getblockhash(100), 16),
]
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes])
self.log.info(
"Poll for a selection of blocks, but some are now invalid...")
invalidated_block = node.getblockhash(75)
node.invalidateblock(invalidated_block)
node.generatetoaddress(
30, 'bchreg:pqv2r67sgz3qumufap3h2uuj0zfmnzuv8v7ej0fffv')
node.reconsiderblock(invalidated_block)
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:5]] +
[AvalancheVote(BLOCK_REJECTED, h) for h in various_block_hashes[-3:]])
self.log.info("Poll for unknown blocks...")
various_block_hashes = [
int(node.getblockhash(0), 16),
int(node.getblockhash(25), 16),
int(node.getblockhash(42), 16),
various_block_hashes[5],
various_block_hashes[6],
various_block_hashes[7],
random.randrange(1 << 255, (1 << 256) - 1),
random.randrange(1 << 255, (1 << 256) - 1),
random.randrange(1 << 255, (1 << 256) - 1),
]
poll_node.send_poll(various_block_hashes)
poll_node.wait_for_avaresponse()
assert_response(poll_node.last_avaresponse,
[AvalancheVote(BLOCK_ACCEPTED, h) for h in various_block_hashes[:3]] +
[AvalancheVote(BLOCK_REJECTED, h) for h in various_block_hashes[3:6]] +
[AvalancheVote(BLOCK_UNKNOWN, h) for h in various_block_hashes[-3:]])
if __name__ == '__main__':
AvalancheTest().main()
| true | true |
f71a0d98d569fd7b3be4fc2f4b330fae23d90e4b | 132,009 | py | Python | tofu/geom/_core_optics.py | Didou09/tofu | 4a4e1f058bab8e7556ed9d518f90807cec605476 | [
"MIT"
] | 6 | 2016-09-15T17:01:19.000Z | 2017-03-06T22:53:10.000Z | tofu/geom/_core_optics.py | Didou09/tofu | 4a4e1f058bab8e7556ed9d518f90807cec605476 | [
"MIT"
] | 9 | 2016-09-14T17:23:52.000Z | 2017-04-13T07:30:07.000Z | tofu/geom/_core_optics.py | Didou09/tofu | 4a4e1f058bab8e7556ed9d518f90807cec605476 | [
"MIT"
] | null | null | null |
"""
This module is the geometrical part of the ToFu general package
It includes all functions and object classes necessary for tomography on Tokamaks
"""
# Built-in
import sys
import os
import warnings
import copy
# Common
import numpy as np
import scipy.interpolate as scpinterp
import scipy.stats as scpstats
import datetime as dtm
import matplotlib.pyplot as plt
import matplotlib as mpl
# ToFu-specific
from tofu import __version__ as __version__
import tofu.pathfile as tfpf
import tofu.utils as utils
from . import _def as _def
from . import _GG as _GG
from . import _core
from . import _check_optics
from . import _comp_optics as _comp_optics
from . import _plot_optics as _plot_optics
import tofu.spectro._rockingcurve as _rockingcurve
__all__ = ['CrystalBragg']
_Type = 'Tor'
_NTHREADS = 16
# rotate / translate instance
_RETURN_COPY = False
_USE_NON_PARALLELISM = True
"""
###############################################################################
###############################################################################
Ves class and functions
###############################################################################
###############################################################################
"""
class CrystalBragg(utils.ToFuObject):
""" A class defining crystals for Bragg diffraction
A crystal can be of Type flat, cylindrical or spherical
It is characterized by its:
- geometry (Type, dimensions, curvature radii and position/orientation)
- Material and lattice
- Bragg parameters (angle vs lambda)
Parameters
----------
Id : str / tfpf.ID
A name string or a pre-built tfpf.ID class to be used to identify this
particular instance, if a string is provided, it is fed to tfpf.ID()
dgeom : dict
An array (2,N) or (N,2) defining the contour of the vacuum vessel in a
cross-section, if not closed, will be closed automatically
dspectral: str
Flag indicating whether the vessel will be a torus ('Tor') or a linear
device ('Lin')
SavePath : None / str
If provided, forces the default saving path of the object to the
provided value
"""
# Fixed (class-wise) dictionary of default properties
_ddef = {
'Id': {
'shot': 0, 'Exp': 'dummy', 'Diag': 'dummy',
'include': [
'Mod', 'Cls', 'Exp', 'Diag', 'Name', 'shot', 'version',
],
},
'dgeom': {'Type': 'sph', 'Typeoutline': 'rect'},
'dmat': {},
'dbragg': {'braggref': np.pi/4.},
'dmisc': {'color': 'k'},
}
_dplot = {'cross':{'Elt':'P',
'dP':{'color':'k','lw':2},
'dI':{'color':'k','ls':'--','marker':'x','ms':8,'mew':2},
'dBs':{'color':'b','ls':'--','marker':'x','ms':8,'mew':2},
'dBv':{'color':'g','ls':'--','marker':'x','ms':8,'mew':2},
'dVect':{'color':'r','scale':10}},
'hor':{'Elt':'P',
'dP':{'color':'k','lw':2},
'dI':{'color':'k','ls':'--'},
'dBs':{'color':'b','ls':'--'},
'dBv':{'color':'g','ls':'--'},
'Nstep':50},
'3d':{}}
# _DEFLAMB = 3.971561e-10
# _DEFNPEAKS = 12
# _DREFLECT_DTYPES = {'specular':0, 'diffusive':1, 'ccube':2}
# Does not exist beofre Python 3.6 !!!
def __init_subclass__(cls, color='k', **kwdargs):
# Python 2
super(CrystalBragg,cls).__init_subclass__(**kwdargs)
# Python 3
#super().__init_subclass__(**kwdargs)
cls._ddef = copy.deepcopy(CrystalBragg._ddef)
cls._dplot = copy.deepcopy(CrystalBragg._dplot)
cls._set_color_ddef(cls._color)
@classmethod
def _set_color_ddef(cls, color):
cls._ddef['dmisc']['color'] = mpl.colors.to_rgba(color)
def __init__(self, dgeom=None, dmat=None, dbragg=None,
Id=None, Name=None, Exp=None, Diag=None, shot=None,
fromdict=None, sep=None,
SavePath=os.path.abspath('./'),
SavePath_Include=tfpf.defInclude, color=None):
# To replace __init_subclass__ for Python 2
if sys.version[0]=='2':
self._dstrip = utils.ToFuObjectBase._dstrip.copy()
self.__class__._strip_init()
# Create a dplot at instance level
self._dplot = copy.deepcopy(self.__class__._dplot)
kwdargs = locals()
del kwdargs['self']
# super()
super(CrystalBragg,self).__init__(**kwdargs)
def _reset(self):
# super()
super(CrystalBragg,self)._reset()
self._dgeom = dict.fromkeys(self._get_keys_dgeom())
self._dmat = dict.fromkeys(self._get_keys_dmat())
self._dbragg = dict.fromkeys(self._get_keys_dbragg())
self._dmisc = dict.fromkeys(self._get_keys_dmisc())
#self._dplot = copy.deepcopy(self.__class__._ddef['dplot'])
@classmethod
def _checkformat_inputs_Id(cls, Id=None, Name=None,
Exp=None, Diag=None, shot=None, Type=None,
include=None,
**kwdargs):
if Id is not None:
assert isinstance(Id,utils.ID)
Name, Exp, Type = Id.Name, Id.Exp, Id.Type
if Type is None:
Type = cls._ddef['dgeom']['Type']
if Exp is None:
Exp = cls._ddef['Id']['Exp']
if Diag is None:
Diag = cls._ddef['Id']['Diag']
if shot is None:
shot = cls._ddef['Id']['shot']
if include is None:
include = cls._ddef['Id']['include']
dins = {'Name':{'var':Name, 'cls':str},
'Exp': {'var':Exp, 'cls':str},
'Diag': {'var':Diag, 'cls':str},
'shot': {'var':shot, 'cls':int},
'Type': {'var':Type, 'in':['sph']},
'include':{'var':include, 'listof':str}}
dins, err, msg = cls._check_InputsGeneric(dins)
if err:
raise Exception(msg)
kwdargs.update({'Name':Name, 'shot':shot,
'Exp':Exp, 'Diag':Diag, 'Type':Type,
'include':include})
return kwdargs
###########
# Get largs
###########
@staticmethod
def _get_largs_dgeom(sino=True):
largs = ['dgeom']
return largs
@staticmethod
def _get_largs_dmat():
largs = ['dmat']
return largs
@staticmethod
def _get_largs_dbragg():
largs = ['dbragg']
return largs
@staticmethod
def _get_largs_dmisc():
largs = ['color']
return largs
###########
# Get keys of dictionnaries
###########
@staticmethod
def _get_keys_dgeom():
lk = ['Type', 'Typeoutline',
'summit', 'center', 'extenthalf', 'surface',
'nin', 'nout', 'e1', 'e2', 'rcurve',
'move', 'move_param', 'move_kwdargs']
return lk
@staticmethod
def _get_keys_dmat():
lk = ['formula', 'density', 'symmetry',
'lengths', 'angles', 'cut', 'd',
'alpha', 'beta', 'nin', 'nout', 'e1', 'e2']
return lk
@staticmethod
def _get_keys_dbragg():
lk = ['rockingcurve', 'lambref', 'braggref']
return lk
@staticmethod
def _get_keys_dmisc():
lk = ['color']
return lk
###########
# _init
###########
def _init(self, dgeom=None, dmat=None, dbragg=None,
color=None, **kwdargs):
allkwds = dict(locals(), **kwdargs)
largs = self._get_largs_dgeom()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dgeom(**kwds)
largs = self._get_largs_dmat()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dmat(**kwds)
largs = self._get_largs_dbragg()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dbragg(**kwds)
largs = self._get_largs_dmisc()
kwds = self._extract_kwdargs(allkwds, largs)
self._set_dmisc(**kwds)
self._dstrip['strip'] = 0
###########
# set dictionaries
###########
def set_dgeom(self, dgeom=None):
self._dgeom = _check_optics._checkformat_dgeom(
dgeom=dgeom, ddef=self._ddef['dgeom'],
valid_keys=self._get_keys_dgeom(),
)
if self._dgeom['move'] is not None:
self.set_move(
move=self._dgeom['move'],
param=self._dgeom['move_param'],
**self._dgeom['move_kwdargs'],
)
def set_dmat(self, dmat=None):
self._dmat = _check_optics._checkformat_dmat(
dmat=dmat, dgeom=self._dgeom,
ddef=self._ddef['dmat'],
valid_keys=self._get_keys_dmat()
)
def set_dbragg(self, dbragg=None):
self._dbragg = _check_optics._checkformat_dbragg(
dbragg=dbragg,
ddef=self._ddef['dbragg'],
valid_keys=self._get_keys_dbragg(),
dmat=self._dmat,
)
def _set_color(self, color=None):
color = _check_optics._checkformat_inputs_dmisc(
color=color, ddef=self._ddef,
)
self._dmisc['color'] = color
self._dplot['cross']['dP']['color'] = color
self._dplot['hor']['dP']['color'] = color
# self._dplot['3d']['dP']['color'] = color
def _set_dmisc(self, color=None):
self._set_color(color)
###########
# strip dictionaries
###########
def _strip_dgeom(self, lkeep=None):
lkeep = self._get_keys_dgeom()
utils.ToFuObject._strip_dict(self._dgeom, lkeep=lkeep)
def _strip_dmat(self, lkeep=None):
lkeep = self._get_keys_dmat()
utils.ToFuObject._strip_dict(self._dmat, lkeep=lkeep)
def _strip_dbragg(self, lkeep=None):
lkeep = self._get_keys_dbragg()
utils.ToFuObject._strip_dict(self._dbragg, lkeep=lkeep)
def _strip_dmisc(self, lkeep=['color']):
utils.ToFuObject._strip_dict(self._dmisc, lkeep=lkeep)
###########
# rebuild dictionaries
###########
def _rebuild_dgeom(self, lkeep=None):
lkeep = self._get_keys_dgeom()
reset = utils.ToFuObject._test_Rebuild(self._dgeom, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dgeom,
lkeep=lkeep, dname='dgeom')
self._set_dgeom(dgeom=self._dgeom)
def _rebuild_dmat(self, lkeep=None):
lkeep = self._get_keys_dmat()
reset = utils.ToFuObject._test_Rebuild(self._dmat, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dmat,
lkeep=lkeep, dname='dmat')
self.set_dmat(self._dmat)
def _rebuild_dbragg(self, lkeep=None):
lkeep = self._get_keys_dbragg()
reset = utils.ToFuObject._test_Rebuild(self._dbragg, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dbragg,
lkeep=lkeep, dname='dbragg')
self.set_dbragg(self._dbragg)
def _rebuild_dmisc(self, lkeep=['color']):
reset = utils.ToFuObject._test_Rebuild(self._dmisc, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dmisc,
lkeep=lkeep, dname='dmisc')
self._set_dmisc(color=self.dmisc['color'])
###########
# _strip and get/from dict
###########
@classmethod
def _strip_init(cls):
cls._dstrip['allowed'] = [0,1]
nMax = max(cls._dstrip['allowed'])
doc = """
1: Remove nothing"""
doc = utils.ToFuObjectBase.strip.__doc__.format(doc,nMax)
if sys.version[0]=='2':
cls.strip.__func__.__doc__ = doc
else:
cls.strip.__doc__ = doc
def strip(self, strip=0):
# super()
super(CrystalBragg, self).strip(strip=strip)
def _strip(self, strip=0):
if strip==0:
self._rebuild_dgeom()
self._rebuild_dmat()
self._rebuild_dbragg()
self._rebuild_dmisc()
else:
self._strip_dgeom()
self._strip_dmat()
self._strip_dbragg()
self._strip_dmisc()
def _to_dict(self):
dout = {'dgeom':{'dict':self._dgeom, 'lexcept':None},
'dmat':{'dict':self._dmat, 'lexcept':None},
'dbragg':{'dict':self._dbragg, 'lexcept':None},
'dmisc':{'dict':self._dmisc, 'lexcept':None},
'dplot':{'dict':self._dplot, 'lexcept':None}}
return dout
def _from_dict(self, fd):
self._dgeom.update(**fd.get('dgeom', {}))
self._dmat.update(**fd.get('dmat', {}))
self._dbragg.update(**fd.get('dbragg', {}))
self._dmisc.update(**fd.get('dmisc', {}))
self._dplot.update(**fd.get('dplot', {}))
# -----------
# Properties
# -----------
@property
def Type(self):
"""Return the type of structure """
return self._Id.Type
@property
def dgeom(self):
return self._dgeom
@property
def dmat(self):
"""Return the polygon defining the structure cross-section"""
return self._dmat
@property
def dbragg(self):
"""Return the polygon defining the structure cross-section"""
return self._dbragg
@property
def dmisc(self):
return self._dmisc
# @property
# def nin(self):
# return self._dgeom['nin']
# @property
# def nout(self):
# return self._dgeom['nout']
# @property
# def e1(self):
# return self._dgeom['e1']
# @property
# def e2(self):
# return self._dgeom['e2']
@property
def summit(self):
return self._dgeom['summit']
@property
def center(self):
return self._dgeom['center']
@property
def ismobile(self):
return self._dgeom['move'] not in [None, False]
@property
def rockingcurve(self):
if self._dbragg.get('rockingcurve') is not None:
if self._dbragg['rockingcurve'].get('type') is not None:
return self._dbragg['rockingcurve']
raise Exception("rockingcurve was not set!")
# --------------------------------------
# methods for getting unit vectors basis
# --------------------------------------
def get_unit_vectors(self, use_non_parallelism=None):
""" Return the unit vectors (direct orthonormal basis)
Depending on:
use_non_parallelism: True => return the geometrical basis
use_non_parallelism: False => return the mesh basis
"""
if use_non_parallelism is None:
use_non_parallelism = _USE_NON_PARALLELISM
if use_non_parallelism is True:
nout = self._dmat['nout']
e1 = self._dmat['e1']
e2 = self._dmat['e2']
else:
nout = self._dgeom['nout']
e1 = self._dgeom['e1']
e2 = self._dgeom['e2']
return nout, e1, e2, use_non_parallelism
# -----------------
# methods for color
# -----------------
def set_color(self, col):
self._set_color(col)
def get_color(self):
return self._dmisc['color']
# -----------------
# methods for printing
# -----------------
def get_summary(self, sep=' ', line='-', just='l',
table_sep=None, verb=True, return_=False):
""" Summary description of the object content """
# -----------------------
# Build material
col0 = [
'formula', 'symmetry', 'cut', 'density',
'd (A)',
'bragg({:9.6} A) (deg)'.format(self._dbragg['lambref']*1e10),
'Type', 'outline', 'surface (cm²)', 'rcurve', 'rocking curve',
]
ar0 = [self._dmat['formula'], self._dmat['symmetry'],
str(self._dmat['cut']), str(self._dmat['density']),
'{0:5.3f}'.format(self._dmat['d']*1.e10),
str(self._dbragg['braggref']*180./np.pi),
self._dgeom['Type'], self._dgeom['Typeoutline'],
'{0:5.1f}'.format(self._dgeom['surface']*1.e4),
'{0:6.3f}'.format(self._dgeom['rcurve'])]
try:
ar0.append(self.rockingcurve['type'])
except Exception as err:
ar0.append('None')
# -----------------------
# Build geometry
col1 = ['half-extent', 'summit', 'center', 'nout', 'e1',
'alpha', 'beta']
ar1 = [
str(np.round(self._dgeom['extenthalf'], decimals=3)),
str(np.round(self._dgeom['summit'], decimals=2)),
str(np.round(self._dgeom['center'], decimals=2)),
str(np.round(self._dmat['nout'], decimals=3)),
str(np.round(self._dmat['e1'], decimals=3)),
str(np.round(self._dmat['alpha'], decimals=6)),
str(np.round(self._dmat['beta'], decimals=6)),
]
if self._dgeom.get('move') not in [None, False]:
col1 += ['move', 'param']
ar1 += [self._dgeom['move'],
str(np.round(self._dgeom['move_param'], decimals=5))]
if self._dmisc.get('color') is not None:
col1.append('color')
ar1.append(str(self._dmisc['color']))
lcol = [col0, col1]
lar = [ar0, ar1]
return self._get_summary(lar, lcol,
sep=sep, line=line, table_sep=table_sep,
verb=verb, return_=return_)
# -----------------
# methods for moving
# -----------------
def _update_or_copy(self, dgeom, pinhole=None,
return_copy=None,
name=None, diag=None, shot=None):
if return_copy is None:
return_copy = _RETURN_COPY
for kk, vv in self._dgeom.items():
if kk not in dgeom.keys():
dgeom[kk] = vv
if return_copy is True:
if name is None:
name = self.Id.Name + 'copy'
if diag is None:
diag = self.Id.Diag
if shot is None:
diag = self.Id.shot
return self.__class__(dgeom=dgeom,
dbragg=self._dbragg,
dmat=self._dmat,
color=self._dmisc['color'],
Exp=self.Id.Exp,
Diag=diag,
Name=name,
shot=shot,
SavePath=self.Id.SavePath)
else:
dgeom0 = self.dgeom
try:
self.set_dgeom(dgeom=dgeom)
self._dmat = _check_optics._checkformat_dmat(
dmat={
k0: v0 for k0, v0 in self._dmat.items()
if k0 not in ['nin', 'nout', 'e1', 'e2']
},
dgeom=self._dgeom,
ddef=self._ddef['dmat'],
valid_keys=self._get_keys_dmat()
)
except Exception as err:
# Make sure instance does not move
self.set_dgeom(dgeom=dgeom0)
msg = (str(err)
+ "\nAn exception occured during updating\n"
+ " => instance unmoved")
raise Exception(msg)
def _rotate_or_translate(self, func, **kwdargs):
pts = np.array([self._dgeom['summit'], self._dgeom['center']]).T
if 'rotate' in func.__name__:
vect = np.array([
self._dgeom['nout'],
self._dgeom['e1'],
self._dgeom['e2']
]).T
pts, vect = func(pts=pts, vect=vect, **kwdargs)
return {'summit': pts[:, 0], 'center': pts[:, 1],
'nout': vect[:, 0], 'nin': -vect[:, 0],
'e1': vect[:, 1], 'e2': vect[:, 2]}
else:
pts = func(pts=pts, **kwdargs)
return {'summit': pts[:, 0], 'center': pts[:, 1]}
def translate_in_cross_section(self, distance=None, direction_rz=None,
phi=None,
return_copy=None,
diag=None, name=None, shot=None):
""" Translate the instance in the cross-section """
if phi is None:
phi = np.arctan2(*self.summit[1::-1])
msg = ("Poloidal plane was not explicitely specified\n"
+ " => phi set to self.summit's phi ({})".format(phi))
warnings.warn(msg)
dgeom = self._rotate_or_translate(
self._translate_pts_poloidal_plane,
phi=phi, direction_rz=direction_rz, distance=distance)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def translate_3d(self, distance=None, direction=None,
return_copy=None,
diag=None, name=None, shot=None):
""" Translate the instance in provided direction """
dgeom = self._rotate_or_translate(
self._translate_pts_3d,
direction=direction, distance=distance)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_in_cross_section(self, angle=None, axis_rz=None,
phi=None,
return_copy=None,
diag=None, name=None, shot=None):
""" Rotate the instance in the cross-section """
if phi is None:
phi = np.arctan2(*self.summit[1::-1])
msg = ("Poloidal plane was not explicitely specified\n"
+ " => phi set to self.summit's phi ({})".format(phi))
warnings.warn(msg)
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_in_poloidal_plane,
axis_rz=axis_rz, angle=angle, phi=phi)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_around_torusaxis(self, angle=None,
return_copy=None,
diag=None, name=None, shot=None):
""" Rotate the instance around the torus axis """
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_around_torusaxis,
angle=angle)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_around_3daxis(self, angle=None, axis=None,
return_copy=None,
diag=None, name=None, shot=None):
""" Rotate the instance around the provided 3d axis """
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_around_3daxis,
axis=axis, angle=angle)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def set_move(self, move=None, param=None, **kwdargs):
""" Set the default movement parameters
A default movement can be set for the instance, it can be any of the
pre-implemented movement (rotations or translations)
This default movement is the one that will be called when using
self.move()
Specify the type of movement via the name of the method (passed as a
str to move)
Specify, for the geometry of the instance at the time of defining this
default movement, the current value of the associated movement
parameter (angle / distance). This is used to set an arbitrary
difference for user who want to use absolute position values
The desired incremental movement to be performed when calling self.move
will be deduced by substracting the stored param value to the provided
param value. Just set the current param value to 0 if you don't care
about a custom absolute reference.
kwdargs must be a parameters relevant to the chosen method (axis,
direction...)
e.g.:
self.set_move(move='rotate_around_3daxis',
param=0.,
axis=([0.,0.,0.], [1.,0.,0.]))
self.set_move(move='translate_3d',
param=0.,
direction=[0.,1.,0.])
"""
move, param, kwdargs = self._checkformat_set_move(move, param, kwdargs)
self._dgeom['move'] = move
self._dgeom['move_param'] = param
if isinstance(kwdargs, dict) and len(kwdargs) == 0:
kwdargs = None
self._dgeom['move_kwdargs'] = kwdargs
def move(self, param):
""" Set new position to desired param according to default movement
Can only be used if default movement was set before
See self.set_move()
"""
param = self._move(param, dictname='_dgeom')
self._dgeom['move_param'] = param
# -----------------
# methods for rocking curve
# -----------------
def get_rockingcurve_func(self, lamb=None, n=None):
""" Return the rocking curve function
Also return the wavelength (lamb) (in meters) for which it was computed
and the associated reference bragg angle (in rad)
"""
drock = self.rockingcurve
if drock['type'] == 'tabulated-1d':
if lamb is not None and lamb != drock['lamb']:
msg = ("rocking curve was tabulated only for:\n"
+ "\tlamb = {} m\n".format(lamb)
+ " => Please let lamb=None")
raise Exception(msg)
lamb = drock['lamb']
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
func = scpinterp.interp1d(drock['dangle'] + bragg, drock['value'],
kind='linear', bounds_error=False,
fill_value=0, assume_sorted=True)
elif drock['type'] == 'tabulated-2d':
lmin, lmax = drock['lamb'].min(), drock['lamb'].max()
if lamb is None:
lamb = drock['lamb']
if lamb < lmin or lamb > lmax:
msg = ("rocking curve was tabulated only in interval:\n"
+ "\tlamb in [{}; {}] m\n".format(lmin, lmax)
+ " => Please set lamb accordingly")
raise Exception(msg)
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
def func(angle, lamb=lamb, bragg=bragg, drock=drock):
return scpinterp.interp2d(drock['dangle']+bragg, drock['lamb'],
drock['value'], kind='linear',
bounds_error=False, fill_value=0,
assume_sorted=True)(angle, lamb)
else:
# TBC
raise NotImplementedError
def func(angle, d=d, delta_bragg=delta_bragg,
Rmax=drock['Rmax'], sigma=drock['sigma']):
core = sigma**2/((angle - (bragg+delta_bragg))**2 + sigma**2)
if Rmax is None:
return core/(sigma*np.pi)
else:
return Rmax*core
return func, lamb, bragg
def plot_rockingcurve(self, lamb=None, n=None, sigma=None,
npts=None, color=None, ang_units=None,
dmargin=None, fs=None, ax=None, legend=None):
drock = self.rockingcurve
func, lamb, bragg = self.get_rockingcurve_func(lamb=lamb, n=n)
axtit = 'Rocking curve for ' + self.Id.Name
return _plot_optics.CrystalBragg_plot_rockingcurve(
func=func, bragg=bragg, lamb=lamb,
sigma=sigma, npts=npts,
ang_units=ang_units, axtit=axtit, color=color,
fs=fs, ax=ax, legend=legend)
def compute_rockingcurve(
self, ih=None, ik=None, il=None, lamb=None,
use_non_parallelism=None, na=None,
alpha_limits=None,
therm_exp=None, plot_therm_exp=None,
plot_asf=None, plot_power_ratio=None,
plot_asymmetry=None, plot_cmaps=None,
verb=None, returnas=None,
):
return _rockingcurve.compute_rockingcurve(
ih=ih, ik=ik, il=il, lamb=lamb,
use_non_parallelism=use_non_parallelism, na=na,
alpha_limits=alpha_limits,
therm_exp=therm_exp, plot_therm_exp=plot_therm_exp,
plot_asf=plot_asf, plot_power_ratio=plot_power_ratio,
plot_asymmetry=plot_asymmetry, plot_cmaps=plot_cmaps,
verb=None, returnas=None,
)
def plot_var_temp_changes_wavelengths(
self, ih=None, ik=None, il=None, lambdas=None,
use_non_parallelism=None, na=None,
alpha_limits=None,
therm_exp=None, plot_therm_exp=None,
plot_asf=None, plot_power_ratio=None,
plot_asymmetry=None, plot_cmaps=None,
quantity=None,
curv_radius=None, pixel_size=None,
):
return _rockingcurve.plot_var_temp_changes_wavelengths(
ih=ih, ik=ik, il=il, lambdas=lambdas,
use_non_parallelism=use_non_parallelism, na=na,
alpha_limits=alpha_limits,
therm_exp=therm_exp, plot_therm_exp=plot_therm_exp,
plot_asf=plot_asf, plot_power_ratio=plot_power_ratio,
plot_asymmetry=plot_asymmetry, plot_cmaps=plot_cmaps,
quantity=quantity,
curv_radius=curv_radius, pixel_size=pixel_size,
)
# -----------------
# methods for surface and contour sampling
# -----------------
def sample_outline_plot(self, use_non_parallelism=None, res=None):
if self._dgeom['Type'] == 'sph':
if self._dgeom['Typeoutline'] == 'rect':
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
outline = _comp_optics.CrystBragg_sample_outline_plot_sphrect(
self._dgeom['summit'] - nout*self._dgeom['rcurve'],
nout,
e1,
e2,
self._dgeom['rcurve'],
self._dgeom['extenthalf'],
res,
)
else:
raise NotImplementedError
else:
raise NotImplementedError
return outline
# -----------------
# methods for surface and contour sampling
# -----------------
def _checkformat_bragglamb(self, bragg=None, lamb=None, n=None):
lc = [lamb is not None, bragg is not None]
if not any(lc):
lamb = self._dbragg['lambref']
lc[0] = True
assert np.sum(lc) == 1, "Provide lamb xor bragg!"
if lc[0]:
bragg = self.get_bragg_from_lamb(
np.atleast_1d(lamb), n=n,
)
else:
bragg = np.atleast_1d(bragg)
return bragg
def _checkformat_get_Rays_from(self, phi=None, bragg=None):
assert phi is not None
assert bragg is not None
bragg = np.atleast_1d(bragg)
phi = np.atleast_1d(phi)
nrays = max(phi.size, bragg.size)
if not phi.shape == bragg.shape:
if phi.size == 1:
phi = np.full(bragg.shape, phi[0])
elif bragg.size == 1:
bragg = np.full(phi.shape, bragg[0])
else:
msg = "phi and bragg/lamb must have the same shape!\n"
msg += " phi.shape: %s\n"%str(phi.shape)
msg += " bragg/lamb.shape: %s\n"%str(bragg.shape)
raise Exception(msg)
return phi, bragg
def _get_rays_from_cryst(
self,
phi=None, bragg=None,
lamb=None, n=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
use_non_parallelism=None,
include_summit=None,
grid=None,
):
# Get phi, bragg
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb)
phi, bragg = self._checkformat_get_Rays_from(phi=phi, bragg=bragg)
# assert phi.ndim == 1
# Get local summits, nout, e1, e2
pts_start, nout, e1, e2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
)
nin = -nout
# reshape for broadcast
if grid is True:
nin = nin[..., None]
e1 = e1[..., None]
e2 = e2[..., None]
else:
assert bragg.shape == nin.shape[1:]
# Compute start point (D) and unit vectors (us)
vect = (
np.sin(bragg)*nin
+ np.cos(bragg)*(np.cos(phi)*e1 + np.sin(phi)*e2)
)
return pts_start, vect
def get_rays_from_cryst(
self,
phi=None, bragg=None,
lamb=None, n=None,
dtheta=None, psi=None,
use_non_parallelism=None,
ntheta=None, npsi=None,
include_summit=None,
det=None, config=None, length=None,
returnas=None,
return_xixj=None,
grid=None,
):
""" Return rays stemming from the crystal
The rays are defined by a start point (on the crystal surface) and
either an end point or a unit vector
Start points
------------
The start point is the crystal summit by default
But that can be changed using:
- ('dtheta', 'psi'): can be arbitrary but with same shape
up to 4 dimensions
- ('ntheta', 'npsi', 'include_summit'): will be used to
compute the envelop (contour) of the crystal, as 2 1d arrays
These arguments are fed to self.get_local_noute1e2() which will compute
the start points and return them as shape (3, psi.shape)
End point or unit vector
------------------------
End point are computed automatically if:
- 'config' is provided: ray-tracing is done like for any camera
- 'det' is provided: xi and xj can be computed
Returning format
----------------
The rays can be returned as:
- '(pts, vect, length)': a tuple of:
- pts: array of start points on the crystal
(only the summit by default)
- vect: array
- length:
- '(pts, vect)': a tuple with only pts and vect
- 'pts': a tuple, where both start and end points are returned
All arrays represent (X, Y, Z) cartesian coordinates in the tokamak's
frame
Optionally, can return the (xi, xj) coordinates of points if a detector
(det) is provided.
"""
# -----------
# Check input
if returnas is None:
returnas = 'pts'
if return_xixj is None:
return_xixj = False
lret = ['(pts, vect, length)', '(pts, vect)', 'pts'] # , object]
if returnas not in lret:
msg = (
"Arg returnas must be in:\n"
+ "\t- '(pts, vect, length)': starting points, unit vector,"
+ " length\n"
+ "\t- 'pts': starting and ending points\n"
# + "\t- object: CamLOS1D instance\n"
)
raise Exception(msg)
det = self._checkformat_det(det)
if length is None:
length = 10.
if grid is None:
try:
grid = bragg.shape != dtheta.shape
except Exception as err:
grid = True
# -----------
# Get starting point and vectors
pts_start, vect = self._get_rays_from_cryst(
phi=phi, bragg=bragg,
lamb=lamb, n=n,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
grid=grid,
)
if returnas == '(pts, vect)':
return pts_start, vect
# -----------
# Get length (minimum between conf, det, length)
vshape = vect.shape
dk = {
k0: np.full(vshape[1:], np.nan)
for k0 in ['config', 'det', 'length']
}
xi, xj = None, None
if config is not None:
# Here insert ray-tracing from config!
if vshape != pts_start.shape:
if len(vshape) == 3 and len(pts_start.shape) == 2:
D = np.reshape(
np.repeat(pts_start[..., None], vshape[-1], axis=-1),
(3, -1),
)
u = vect.reshape((3, -1))
else:
msg = (
"Not treated case!\n"
f"\t- pts_start.shape: {pts_start.shape}\n"
f"\t- vect.shape: {vshape}\n"
)
raise Exception(msg)
else:
if len(vshape) > 2:
D = pts_start.reshape((3, -1))
u = vect.reshape((3, -1))
else:
D = pts_start
u = vect
rays = _core.Rays(
dgeom=(D, u),
config=config,
strict=False,
Name='dummy',
Diag='dummy',
Exp='dummy',
)
if u.shape != vshape:
kout = rays.dgeom['kOut'].reshape(vshape[1:])
else:
kout = rays.dgeom['kOut']
dk['config'] = kout
if det is not None and det is not False:
shape = tuple([3] + [1 for ii in range(vect.ndim-1)])
cent = det['cent'].reshape(shape)
nout = det['nout'].reshape(shape)
if grid is True:
k = (
np.sum((cent-pts_start[..., None])*nout, axis=0)
/ np.sum(vect*nout, axis=0)
)
else:
k = (
np.sum((cent-pts_start)*nout, axis=0)
/ np.sum(vect*nout, axis=0)
)
dk['det'][k >= 0.] = k[k >= 0.]
if return_xixj is True:
if grid:
pts_end = pts_start[..., None] + dk['det'][None, ...]*vect
else:
pts_end = pts_start + dk['det'][None, ...]*vect
ei = det['ei'].reshape(shape)
ej = det['ej'].reshape(shape)
xi = np.sum((pts_end - cent)*ei, axis=0)
xj = np.sum((pts_end - cent)*ej, axis=0)
if length is not None:
dk['length'][:] = length
k = np.nanmin([vv for vv in dk.values() if vv is not None], axis=0)
# -----------
# return
if returnas == 'pts':
if grid:
pts_end = pts_start[..., None] + k[None, ...]*vect
if return_xixj:
return pts_start, pts_end, xi, xj
else:
return pts_start, pts_end
else:
pts_end = pts_start + k[None, ...]*vect
if return_xixj:
return pts_start, pts_end, xi, xj
else:
return pts_start, pts_end
elif returnas == '(pts, vect, length)':
if return_xixj:
return pts_start, vect, k, xi, xj
else:
return pts_start, vect, k
# -----------------
# methods for crystal splitting
# -----------------
def split(self, direction=None, nb=None):
# ------------
# check inputs
if direction is None:
direction = 'e1'
if direction not in ['e1', 'e2']:
msg = (
"Arg direction must be either:\n"
"\t- 'e1': split along vector 'e1' (~horizontally)\n"
"\t- 'e2': split along vector 'e2' (~vertically)\n"
f"You provided: {direction}"
)
raise Exception(msg)
if nb is None:
nb = 2
if not (isinstance(nb, int) and nb > 1):
msg = (
"Arg nb must be a int > 1 !\n"
"It specifies the number of equal parts desired\n"
f"You provided: {nb}"
)
raise Exception(msg)
# ---------------
# split
edges = np.linspace(-1, 1, nb+1)
mid = 0.5*(edges[1:] + edges[:-1])[None, :]
if direction == 'e2':
dtheta = mid*self._dgeom['extenthalf'][1]
psi = np.zeros((1, nb), dtype=float)
extenthalf = [
self._dgeom['extenthalf'][0],
self._dgeom['extenthalf'][1]/nb,
]
else:
dtheta = np.zeros((1, nb), dtype=float)
psi = mid*self._dgeom['extenthalf'][0]
extenthalf = [
self._dgeom['extenthalf'][0]/nb,
self._dgeom['extenthalf'][1],
]
nouts = (
np.cos(dtheta)*(
self._dgeom['nout'][:, None]*np.cos(psi)
+ self._dgeom['e1'][:, None]*np.sin(psi)
)
+ np.sin(dtheta)*self._dgeom['e2'][:, None]
)
e1s = (
-self._dgeom['nout'][:, None]*np.sin(psi)
+ self._dgeom['e1'][:, None]*np.cos(psi)
)
e2s = np.array([
nouts[1, :]*e1s[2, :] - nouts[2, :]*e1s[1, :],
nouts[2, :]*e1s[0, :] - nouts[0, :]*e1s[2, :],
nouts[0, :]*e1s[1, :] - nouts[1, :]*e1s[0, :],
])
# -----------
# Construct list of instances
lobj = [
self.__class__(
dgeom={
'rcurve': self._dgeom['rcurve'],
'center': self._dgeom['center'],
'nout': nouts[:, ii],
'e1': e1s[:, ii],
'e2': e2s[:, ii],
'extenthalf': extenthalf,
},
dmat={
k0: v0 for k0, v0 in self._dmat.items()
if k0 not in ['nin', 'nout', 'e1', 'e2']
},
dbragg=dict(self._dbragg),
Name=f"{self.Id.Name}{ii}",
Exp=self.Id.Exp,
)
for ii in range(nb)
]
return lobj
# -----------------
# methods for general plotting
# -----------------
def plot(
self, dcryst=None,
phi=None, bragg=None, lamb=None, pts=None,
n=None, config=None, det=None, length=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
include_summit=None,
dax=None, proj=None, res=None, element=None,
color=None, ddet=None,
dleg=None, draw=True, dmargin=None,
use_non_parallelism=None, grid=None,
rays_npts=None, rays_color=None,
fs=None, wintit=None, tit=None,
):
""" Plot the crystal in desired projeection
The projection is 3d, cross-section or horizontal
Optionaly add rays reflected on cryst at:
- lamb / phi: desired wavelength and incidence angle
and either:
- psi, dtheta : desired pts on the crystal surface
- pts: emitted from desired pts (e.g.: in the plasma)
(need to be refresh with get_rays_from_cryst method
if new pts are wanted)
Parameters
----------
dax: None / dict
dict of axes to be used, with keys:
- 'cross': axe where to plot cross-section view
- 'hor': axe where to plot horizontal (from top) view
- '3d': axe where to plot 3d view
if None, a new figure and axes are created
proj: None / str
key indicating which plot to make:
- 'cross': cross-section projection
- 'hor': horizontal projection
- 'all': cross-section + horizontal view
- '3d': 3d view
element: None / str
char string where each letter indicates an element to plot
- 'o': outline (edges of crystal)
- 's': summit (geometrical center of the crystal)
- 'c': center (of the sphere of curvature)
- 'r': rowland circle (plotted in e1 direction)
- 'v': local unit vectors e1, e2, nout
If None, default to 'oscvr'
res: None / float
Resolution for the discretization of the outline
dcryst: None / dict
dict of dict for plotting the various elements of the crystal:
- 'outline': dict of properties fed to plot()
- 'cent': dict of properties fed to plot()
- 'summit': dict of properties fed to plot()
- 'rowland': dict of properties fed to plot()
- 'vectors': dict of properties fed to quiver()
ddet: None / dict
dict of dict for plotting the various elements of the det:
- 'outline': dict of properties fed to plot()
- 'cent': dict of properties fed to plot()
- 'vectors': dict of properties fed to quiver()
color: None / str / tuple
color to be used for plotting
Overwrites all colors in dcryst and ddet
det: None / dict
Optionnal associated detector to be plotted, as a dict with keys:
- 'cent': 1d array of cartesian coordinates of the center
- 'nout': 1d array of cartesian coordinates of unit vector
oriented towards the crystal
- 'ei': 1d array of cartesian coordinates of unit vector
- 'ej': 1d array of cartesian coordinates of unit vector
- 'outline': 2d array of outline coordinates in (ei, ej)
dleg: None / dict
dict of properties to be passed to plt.legend()
if False legend is not plotted
use_non_parallelism: None / str
Return the unit vectors (direct orthonormal basis)
Depending on:
- use_non_parallelism: True => return the geometrical basis
- use_non_parallelism: False => return the mesh basis
"""
if det is None:
det = False
det = self._checkformat_det(det)
lc = [
dtheta is not None or psi is not None or phi is not None,
pts is not None
]
if np.sum(lc) == 2:
msg = (
"For ray tracing, please provide either:\n"
+ "\t- dtheta, psi, phi, lamb/bragg\n"
+ "\t- pts, lamb/bragg\n"
)
raise Exception(msg)
# Add rays?
if lc[0]:
# Get one way
# pts.shape = (3, nlamb, npts, ndtheta)
pts_summit, pts1 = self.get_rays_from_cryst(
phi=phi, lamb=lamb, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=False,
grid=grid,
)
# Get the other way
pts2, xi, xj = self.get_rays_from_cryst(
phi=phi+np.pi, lamb=lamb, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=True,
grid=grid,
)[1:]
elif lc[1]:
c0 = (
isinstance(pts, np.ndarray)
and pts.ndim == 2
and pts.shape[0] == 3
)
if not c0:
msg = ("Arg pts must be a (3, npts) np.array!")
raise Exception(msg)
# pts.shape = (nlamb, npts, ndtheta)
dtheta, psi, phi, bragg, _, _ = self.calc_raytracing_from_lambpts(
pts=pts,
lamb=lamb,
ndtheta=ntheta,
)
pts_summit, pts2, xi, xj = self.get_rays_from_cryst(
phi=phi+np.pi, lamb=None, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=True,
grid=grid,
)
pts1 = np.repeat(
np.repeat(
np.repeat(
pts[:, None, :], dtheta.shape[0], axis=1,
)[..., None],
dtheta.shape[2],
axis=-1,
)[..., None],
2,
axis=-1,
)
else:
pts_summit, pts1, pts2, xi, xj = None, None, None, None, None
return _plot_optics.CrystalBragg_plot(
cryst=self, dcryst=dcryst,
det=det, ddet=ddet,
dax=dax, proj=proj, res=res, element=element,
color=color,
pts_summit=pts_summit, pts1=pts1, pts2=pts2,
xi=xi, xj=xj,
rays_color=rays_color, rays_npts=rays_npts,
dleg=dleg, draw=draw, fs=fs, dmargin=dmargin,
use_non_parallelism=use_non_parallelism,
wintit=wintit, tit=tit,
)
# -----------------
# methods for generic first-approx
# -----------------
def get_phi_from_magaxis_summit(
self,
axis_r,
axis_z,
axis_npts=None,
lamb=None,
lamb_tol=None,
bragg=None,
n=None,
use_non_parallelism=None,
):
""" Return phi of a magnteic axis (at lamb with tolerance)
axis_r and axis_z must be np.ndarrays of the same shape
The magnetic axis is discretized toroidally in axis_npts (def: 1000)
The pts closest to the chosen lamb are picked
If no pts is found within tolerance, an error is raised
"""
# --------------------
# Check / format input
if axis_npts is None:
axis_npts = 1000
axis_r = np.atleast_1d(axis_r)
axis_z = np.atleast_1d(axis_z)
assert axis_r.shape == axis_z.shape
if lamb_tol is None:
lamb_tol = 0.01e-10
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
# --------------
# Disretize axis
shaperz = axis_r.shape
phi_ax = np.full(shaperz, np.nan)
# Compute phi
theta_cryst = np.arctan2(
self._dgeom['summit'][1],
self._dgeom['summit'][0],
)
theta_ax = theta_cryst + np.pi/2*np.linspace(-1, 1, axis_npts)
shapetheta = np.r_[[1 for ii in shaperz], axis_npts]
theta_ax = theta_ax.reshape(shapetheta)
axis_x = (axis_r[..., None] * np.cos(theta_ax)).ravel()
axis_y = (axis_r[..., None] * np.sin(theta_ax)).ravel()
axis_z = (np.repeat(axis_z[..., None], axis_npts, axis=-1)).ravel()
# ----------------------------------------------
# Compute bragg, phi, lamb of each point on axis
(
bragg_ax_full, phi_ax_full, lamb_ax_full,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=np.array([axis_x, axis_y, axis_z]),
dtheta=None, psi=None,
ntheta=None, npsi=None,
n=None,
use_non_parallelism=use_non_parallelism,
grid=None,
return_lamb=True,
)
# -------------------------------------
# Select points on axis closest to lamb
# lamb_ax_full = self.get_lamb_from_bragg(bragg_ax_full)
shape_full = tuple(np.r_[shaperz, axis_npts])
lamb_ax_full = lamb_ax_full.reshape(shape_full)
phi_ax_full = phi_ax_full.reshape(shape_full)
dlamb = np.abs(lamb_ax_full - lamb)
indok = np.any(dlamb <= lamb_tol, axis=-1)
indmin = np.nanargmin(dlamb[indok, :], axis=-1)
indtup = tuple([iii for iii in indok.nonzero()] + [indmin])
phi_ax[indok] = phi_ax_full[indtup]
return phi_ax
def get_bragg_from_lamb(self, lamb=None, n=None):
""" Braggs' law: n*lamb = 2dsin(bragg) """
if self._dmat['d'] is None:
msg = "Interplane distance d no set !\n"
msg += " => self.set_dmat({'d':...})"
raise Exception(msg)
if lamb is None:
lamb = self._dbragg['lambref']
return _comp_optics.get_bragg_from_lamb(
np.atleast_1d(lamb), self._dmat['d'], n=n,
)
def get_lamb_from_bragg(self, bragg=None, n=None):
""" Braggs' law: n*lamb = 2dsin(bragg) """
if self._dmat['d'] is None:
msg = "Interplane distance d no set !\n"
msg += " => self.set_dmat({'d':...})"
raise Exception(msg)
if bragg is None:
bragg = self._dbragg['braggref']
return _comp_optics.get_lamb_from_bragg(np.atleast_1d(bragg),
self._dmat['d'], n=n)
def update_non_parallelism(self, alpha=None, beta=None):
""" Compute new values of unit vectors nout, e1 and e2 into
dmat basis, due to non parallelism
Update new values into dmat dict
"""
if alpha is None:
alpha = 0
if beta is None:
beta = 0
(self._dmat['nin'], self._dmat['nout'], self._dmat['e1'],
self._dmat['e2']) = _comp_optics.get_vectors_from_angles(
alpha, beta,
self._dgeom['nout'], self._dgeom['e1'],
self._dgeom['e2'],
)
self._dmat['alpha'], self._dmat['beta'] = alpha, beta
def calc_meridional_sagital_focus(
self,
rcurve=None,
bragg=None,
alpha=None,
use_non_parallelism=None,
verb=None,
):
""" Compute sagittal and meridional focuses distances.
Optionnal result according to non-parallelism, using first the
update_non_parallelism method.
parameters
----------
rcurve: float
in dgeom dict., curvature radius of the crystal.
bragg: float
in dbragg dict., reference bragg angle of the crystal.
alpha: float
in dmat dict., amplitude of the non-parallelism
as an a angle defined by user, in radian.
use_non_parallelism: str
Need to be True to use new alpha angle
Return
------
merid_ref: float
Distance crystal-meridional focus (m), for a perfect crystal
sagit_ref: float
Distance crystal-sagital focus (m), for a perfect crystal
merid_unp: float
Distance crystal-meridional focus (m), using non_parallelism
sagit_unp: float
Distance crystal-sagital focus (m), using non_parallelism
"""
# Check inputs
if rcurve is None:
rcurve = self._dgeom['rcurve']
if bragg is None:
bragg = self._dbragg['braggref']
if use_non_parallelism is True:
alpha = self._dmat['alpha']
if use_non_parallelism is False:
alpha = 0.0
# Compute
return _comp_optics.calc_meridional_sagital_focus(
rcurve=rcurve,
bragg=bragg,
alpha=alpha,
use_non_parallelism=use_non_parallelism,
verb=verb,
)
def get_rowland_dist_from_lambbragg(self, bragg=None, lamb=None, n=None):
""" Return the array of dist from cryst summit to pts on rowland """
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
if np.all(np.isnan(bragg)):
msg = ("There is no available bragg angle!\n"
+ " => Check the vlue of self.dmat['d'] vs lamb")
raise Exception(msg)
return _comp_optics.get_rowland_dist_from_bragg(
bragg=bragg, rcurve=self._dgeom['rcurve'],
)
def get_detector_ideal(
self,
bragg=None, lamb=None,
rcurve=None, n=None,
ddist=None, di=None, dj=None,
dtheta=None, dpsi=None, tilt=None,
lamb0=None, lamb1=None, dist01=None,
use_non_parallelism=None,
tangent_to_rowland=None, plot=False,
):
""" Return approximate ideal detector geometry
Assumes infinitesimal and ideal crystal
Returns a dict containing the position and orientation of a detector if
it was placed ideally on the rowland circle, centered on the
desired bragg angle (in rad) or wavelength (in m)
The detector can be tangential to the Rowland circle or perpendicular
to the line between the crystal and the detector
Assumes detector center matching lamb (m) / bragg (rad)
The detector can be translated towards / away from the crystal
to make sure the distance between 2 spectral lines
(lamb0 and lamb1) on the detector's plane matches
a desired distance (dist01, in m)
Finally, a desired offset (translation) can be added
via (ddist, di, dj), in m
Similarly, an extra rotation can be added via (dtheta, dpsi, tilt)
Detector is described by center position
and (nout, ei, ej) unit vectors
By convention, nout = np.cross(ei, ej)
Vectors (ei, ej) define an orthogonal frame in the detector's plane
All coordinates are 3d (X, Y, Z in the tokamak's frame)
Return:
-------
det: dict
dict of detector geometrical characteristics:
'cent': np.ndarray
(3,) array of (x, y, z) coordinates of detector center
'nout': np.ndarray
(3,) array of (x, y, z) coordinates of unit vector
perpendicular to detector' surface
oriented towards crystal
'ei': np.ndarray
(3,) array of (x, y, z) coordinates of unit vector
defining first coordinate in detector's plane
'ej': np.ndarray
(3,) array of (x, y, z) coordinates of unit vector
defining second coordinate in detector's plane
'outline': np.darray
(2, N) array to build detector's contour
where the last point is identical to the first.
(for example for WEST X2D spectrometer:
x*np.r_[-1,-1,1,1,-1], y*np.r_[-1,1,1,-1,-1])
"""
# ---------------------
# Check / format inputs
if rcurve is None:
rcurve = self._dgeom['rcurve']
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
if np.all(np.isnan(bragg)):
msg = ("There is no available bragg angle!\n"
+ " => Check the vlue of self.dmat['d'] vs lamb")
raise Exception(msg)
lc = [lamb0 is not None, lamb1 is not None, dist01 is not None]
if any(lc) and not all(lc):
msg = (
"Arg lamb0, lamb1 and dist01 must be provided together:\n"
+ "\t- lamb0: line0 wavelength ({})\n".format(lamb0)
+ "\t- lamb1: line1 wavelength ({})\n".format(lamb1)
+ "\t- dist01: distance (m) on detector between lines "
+ "({})".format(dist01)
)
raise Exception(msg)
bragg01 = None
if all(lc):
bragg01 = self._checkformat_bragglamb(
lamb=np.r_[lamb0, lamb1], n=n,
)
# split into 2 different condition because of dmat
lc = [rcurve is None, self._dgeom['summit'] is None]
if any(lc):
msg = (
"Some missing fields in dgeom for computation:"
+ "\n\t-" + "\n\t-".join(['rcurve'] + 'summit')
)
raise Exception(msg)
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
lc = [cc is None for cc in [nout, e1, e2]]
if any(lc):
msg = (
"""
Field 'nout', 'e1', 'e2' missing!
"""
)
raise Exception(msg)
# Compute crystal-centered parameters in (nout, e1, e2)
(det_dist, n_crystdet_rel,
det_nout_rel, det_ei_rel) = _comp_optics.get_approx_detector_rel(
rcurve, bragg,
bragg01=bragg01, dist01=dist01,
tangent_to_rowland=tangent_to_rowland)
# Deduce absolute position in (x, y, z)
det_cent, det_nout, det_ei, det_ej = _comp_optics.get_det_abs_from_rel(
det_dist, n_crystdet_rel, det_nout_rel, det_ei_rel,
self._dgeom['summit'], nout, e1, e2,
ddist=ddist, di=di, dj=dj,
dtheta=dtheta, dpsi=dpsi, tilt=tilt)
if plot:
dax = self.plot()
p0 = np.repeat(det_cent[:,None], 3, axis=1)
vv = np.vstack((det_nout, det_ei, det_ej)).T
dax['cross'].plot(np.hypot(det_cent[0], det_cent[1]),
det_cent[2], 'xb')
dax['hor'].plot(det_cent[0], det_cent[1], 'xb')
dax['cross'].quiver(np.hypot(p0[0, :], p0[1, :]), p0[2, :],
np.hypot(vv[0, :], vv[1, :]), vv[2, :],
units='xy', color='b')
dax['hor'].quiver(p0[0, :], p0[1, :], vv[0, :], vv[1, :],
units='xy', color='b')
return {'cent': det_cent, 'nout': det_nout,
'ei': det_ei, 'ej': det_ej}
def _checkformat_det(self, det=None):
lc = [det is None, det is False, isinstance(det, dict)]
msg = ("det must be:\n"
+ "\t- False: not det provided\n"
+ "\t- None: use default approx det from:\n"
+ "\t self.get_detector_ideal()\n"
+ "\t- dict: a dictionary of 3d (x,y,z) coordinates of a point"
+ " (local frame center) and 3 unit vectors forming a direct "
+ "orthonormal basis attached to the detector's frame\n"
+ "\t\t\t\t- 'cent': detector center\n"
+ "\t\t\t\t- 'nout': unit vector perpendicular to surface, "
+ "in direction of the crystal\n"
+ "\t\t\t\t- 'ei': unit vector, first coordinate on surface\n"
+ "\t\t\t\t- 'ej': unit vector, second coordinate on surfacei\n"
+ " You provided: {}".format(det))
if not any(lc):
raise Exception(msg)
if lc[0]:
det = self.get_detector_ideal(lamb=self._dbragg['lambref'])
elif lc[2]:
lk = ['cent', 'nout', 'ei', 'ej']
c0 = (isinstance(det, dict)
and all([(kk in det.keys()
and hasattr(det[kk], '__iter__')
and np.atleast_1d(det[kk]).size == 3
and not np.any(np.isnan(det[kk])))
for kk in lk]))
if not c0:
raise Exception(msg)
for k0 in lk:
det[k0] = np.atleast_1d(det[k0]).ravel()
return det
def get_local_noute1e2(
self,
dtheta=None, psi=None,
ntheta=None, npsi=None,
use_non_parallelism=None,
include_summit=None,
):
""" Return (vout, ve1, ve2) associated to pts on the crystal's surface
All points on the spherical crystal's surface are identified
by (dtheta, psi) coordinates, where:
- theta = np.pi/2 + dtheta (dtheta=0 default) for the center
(for the diffracted beam), from frame's basis vector ez
- psi = 0 for the center, positive in direction of e1
They are the spherical coordinates from a sphere centered on the
crystal's center of curvature.
Args (dtheta, psi) can be:
- arbitrary: same shape and dimension up to 4
- 'envelop': will be computed to represent the crystal contour
will be returned as 2 1d arrays
Return the pts themselves and the 3 perpendicular local unit vectors
(nout, e1, e2), where nout is towards the outside of the sphere and
nout = np.cross(e1, e2)
In all cases, the output have shape (3, psi.shape)
Return:
-------
summ: np.ndarray
coordinates of the points on the surface
vout: np.ndarray
coordinates of outward unit vector
ve1: np.ndarray
coordinates of first tangential unit vector
ve2: np.ndarray
coordinates of second tangential unit vector
All are cartesian (X, Y, Z) coordinates in the tokamak's frame
"""
# Get local basis at crystal summit
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
nin = -nout
# Get vectors at any points from psi & dtheta
vout, ve1, ve2 = _comp_optics.CrystBragg_get_noute1e2_from_psitheta(
nout, e1, e2,
psi=psi, dtheta=dtheta,
e1e2=True, sameshape=False,
extenthalf_psi=self._dgeom['extenthalf'][0],
extenthalf_dtheta=self._dgeom['extenthalf'][1],
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
)
vin = -vout
# cent no longer dgeom['center'] because no longer a fixed point
cent = self._dgeom['summit'] + self._dgeom['rcurve']*nin
reshape = np.r_[3, [1 for ii in range(vout.ndim - 1)]]
cent = cent.reshape(reshape)
# Redefining summit according to nout at each point at crystal
summ = cent + self._dgeom['rcurve']*vout
return summ, vout, ve1, ve2
def calc_xixj_from_braggphi(
self,
phi=None,
bragg=None,
lamb=None,
n=None,
dtheta=None,
psi=None,
det=None,
use_non_parallelism=None,
strict=None,
return_strict=None,
data=None,
plot=True,
dax=None,
):
""" Assuming crystal's summit as frame origin
According to [1], this assumes a local frame centered on the crystal
These calculations are independent from the tokamak's frame:
The origin of the local frame is the crystal's summit
The (O, ez) axis is the crystal's normal
The crystal is tangent to (O, ex, ey)
[1] tofu/Notes_Upgrades/SpectroX2D/SpectroX2D_EllipsesOnPlane.pdf
Parameters:
-----------
Z: float
Detector's plane intersection with (O, ez) axis
n: np.ndarray
(3,) array containing local (x,y,z) coordinates of the plane's
normal vector
"""
if return_strict is None:
return_strict = False
# Check / format inputs
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
phi = np.atleast_1d(phi)
# Check / get det
det = self._checkformat_det(det)
# Get local summit nout, e1, e2 if non-centered
if dtheta is None:
dtheta = 0.
if psi is None:
psi = 0.
# Probably to update with use_non_parallelism?
# Get back summit & vectors at any point at the crystal surface,
# according to parallelism properties
summit, nout, e1, e2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=None, npsi=None,
include_summit=False,
)
# Compute
xi, xj, strict = _comp_optics.calc_xixj_from_braggphi(
det_cent=det['cent'],
det_nout=det['nout'], det_ei=det['ei'], det_ej=det['ej'],
det_outline=det.get('outline'),
summit=summit, nout=nout, e1=e1, e2=e2,
bragg=bragg, phi=phi, strict=strict,
)
if plot:
dax = _plot_optics.CrystalBragg_plot_approx_detector_params(
bragg, xi, xj, data, dax,
)
if return_strict is True:
return xi, xj, strict
else:
return xi, xj
def plot_line_on_det_tracing(
self, lamb=None, n=None,
nphi=None,
det=None, johann=None,
use_non_parallelism=None,
lpsi=None, ldtheta=None,
strict=None,
ax=None, dleg=None,
rocking=None, fs=None, dmargin=None,
wintit=None, tit=None,
):
""" Visualize the de-focusing by ray-tracing of chosen lamb
Possibility to plot few wavelength' arcs on the same plot.
Args:
- lamb: array of min size 1, in 1e-10 [m]
- det: dict
- xi_bounds: np.min & np.max of _XI
- xj_bounds: np.min & np.max of _XJ
(from "inputs_temp/XICS_allshots_C34.py" l.649)
- johann: True or False
"""
# Check / format inputs
if lamb is None:
lamb = self._dbragg['lambref']
lamb = np.atleast_1d(lamb).ravel()
nlamb = lamb.size
if johann is None:
johann = lpsi is not None or ldtheta is not None
if rocking is None:
rocking = False
if det is None or det.get('outline') is None:
msg = ("Please provide det as a dict with 'outline'!")
raise Exception(msg)
# Get local basis
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
nin = -nout
# Compute lamb / phi
_, phi = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=det['outline'][0, :], xj=det['outline'][1, :], det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=False,
)
phimin, phimax = np.nanmin(phi), np.nanmax(phi)
phimin, phimax = phimin-(phimax-phimin)/10, phimax+(phimax-phimin)/10
# Get reference ray-tracing
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
if nphi is None:
nphi = 100
phi = np.linspace(phimin, phimax, nphi)
xi = np.full((nlamb, nphi), np.nan)
xj = np.full((nlamb, nphi), np.nan)
for ll in range(nlamb):
xi[ll, :], xj[ll, :] = self.calc_xixj_from_braggphi(
bragg=np.full(phi.shape, bragg[ll]),
phi=phi,
dtheta=0.,
psi=0.,
n=n,
det=det,
use_non_parallelism=use_non_parallelism,
strict=strict,
plot=False,
)
# Get johann-error raytracing (multiple positions on crystal)
xi_er, xj_er = None, None
if johann and not rocking:
if lpsi is None:
lpsi = np.linspace(-1., 1., 15)
if ldtheta is None:
ldtheta = np.linspace(-1., 1., 15)
lpsi, ldtheta = np.meshgrid(lpsi, ldtheta)
lpsi = lpsi.ravel()
ldtheta = ldtheta.ravel()
lpsi = self._dgeom['extenthalf'][0]*np.r_[lpsi]
ldtheta = self._dgeom['extenthalf'][1]*np.r_[ldtheta]
npsi = lpsi.size
assert npsi == ldtheta.size
xi_er = np.full((nlamb, npsi*nphi), np.nan)
xj_er = np.full((nlamb, npsi*nphi), np.nan)
for l in range(nlamb):
for ii in range(npsi):
i0 = np.arange(ii*nphi, (ii+1)*nphi)
xi_er[l, i0], xj_er[l, i0] = self.calc_xixj_from_braggphi(
phi=phi, bragg=bragg[l], lamb=None, n=n,
dtheta=ldtheta[ii], psi=lpsi[ii],
det=det, plot=False,
use_non_parallelism=use_non_parallelism,
strict=strict,
)
# Get rocking curve error
if rocking:
pass
# Plot
return _plot_optics.CrystalBragg_plot_line_tracing_on_det(
lamb, xi, xj, xi_er, xj_er,
det=det, ax=ax, dleg=dleg,
johann=johann, rocking=rocking,
fs=fs, dmargin=dmargin, wintit=wintit, tit=tit)
def calc_johannerror(
self,
xi=None, xj=None, err=None,
det=None, n=None,
lpsi=None, ldtheta=None,
lambda_interval_min=None,
lambda_interval_max=None,
use_non_parallelism=None,
plot=True, fs=None, cmap=None,
vmin=None, vmax=None, tit=None, wintit=None,
):
""" Plot the johann error
The johann error is the error (scattering) induced by defocalization
due to finite crystal dimensions
There is a johann error on wavelength (lamb => loss of spectral
resolution) and on directionality (phi)
If provided, lpsi and ldtheta are taken as normalized variations with
respect to the crystal summit and to its extenthalf.
Typical values are:
- lpsi = [-1, 1, 1, -1]
- ldtheta = [-1, -1, 1, 1]
They must have the same len()
First affecting a reference lambda according to:
- pixel's position
- crystal's summit
Then, computing error on bragg and phi angles on each pixels by
computing lambda and phi from the crystal's outline
Provide lambda_interval_min/max to ensure the given wavelength interval
is detected over the whole surface area.
A True/False boolean is then returned.
"""
# Check xi, xj once before to avoid doing it twice
if err is None:
err = 'abs'
if lambda_interval_min is None:
lambda_interval_min = 3.93e-10
if lambda_interval_max is None:
lambda_interval_max = 4.00e-10
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
# Check / format inputs
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
# Only one summit was selected
bragg, phi, lamb = bragg[..., 0], phi[..., 0], lamb[..., 0]
# Check lambda interval into lamb array
c0 = (
np.min(lamb) < lambda_interval_min
and np.max(lamb) > lambda_interval_max
)
if c0:
test_lambda_interv = True
else:
test_lambda_interv = False
# Get err from multiple ldtheta, lpsi
if lpsi is None:
lpsi = np.r_[-1., 0., 1., 1., 1., 0., -1, -1]
lpsi = self._dgeom['extenthalf'][0]*np.r_[lpsi]
if ldtheta is None:
ldtheta = np.r_[-1., -1., -1., 0., 1., 1., 1., 0.]
ldtheta = self._dgeom['extenthalf'][1]*np.r_[ldtheta]
npsi = lpsi.size
assert npsi == ldtheta.size
(
braggerr, phierr, lamberr,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=ldtheta, psi=lpsi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
err_lamb = np.nanmax(np.abs(lamb[..., None] - lamberr), axis=-1)
err_phi = np.nanmax(np.abs(phi[..., None] - phierr), axis=-1)
# absolute vs relative error
if 'rel' in err:
if err == 'rel':
err_lamb = 100.*err_lamb / (np.nanmax(lamb) - np.nanmin(lamb))
err_phi = 100.*err_phi / (np.nanmax(phi) - np.nanmin(phi))
elif err == 'rel2':
err_lamb = 100.*err_lamb / np.mean(lamb)
err_phi = 100.*err_phi / np.mean(phi)
err_lamb_units = '%'
err_phi_units = '%'
else:
err_lamb_units = 'm'
err_phi_units = 'rad'
if plot is True:
ax = _plot_optics.CrystalBragg_plot_johannerror(
xi, xj, lamb, phi,
err_lamb, err_phi,
err_lamb_units=err_lamb_units,
err_phi_units=err_phi_units,
cmap=cmap, vmin=vmin, vmax=vmax,
fs=fs, tit=tit, wintit=wintit,
)
return (
err_lamb, err_phi, err_lamb_units, err_phi_units,
test_lambda_interv,
)
def plot_focal_error_summed(
self,
dist_min=None, dist_max=None,
di_min=None, di_max=None,
ndist=None, ndi=None,
lamb=None, bragg=None,
xi=None, xj=None,
err=None,
use_non_parallelism=None,
tangent_to_rowland=None, n=None,
plot=None,
pts=None,
det_ref=None, plot_dets=None, nsort=None,
dcryst=None,
lambda_interval_min=None,
lambda_interval_max=None,
contour=None,
fs=None,
ax=None,
cmap=None,
vmin=None,
vmax=None,
return_ax=None,
):
"""
Using the calc_johannerror method, computing the sum of the
focalization error over the whole detector for different positions
characterized by the translations ddist and di in the equatorial plane
(dist_min, dist_max, ndist) (di_min, di_max, ndi).
Parameters:
-----------
- lamb/bragg : float
Automatically set to crystal's references
- xi, xj : np.ndarray
pixelization of the detector
(from "inputs_temp/XICS_allshots_C34.py" l.649)
- alpha, beta : float
Values of Non Parallelism references angles
- use_non_parallelism : str
- tangent_to_rowland : str
- plot_dets : str
Possibility to plot the nsort- detectors with the lowest
summed focalization error, next to the Best Approximate Real
detector
dict(np.load('det37_CTVD_incC4_New.npz', allow_pickle=True))
- nsort : float
Number of best detector's position to plot
- lambda_interv_min/max : float
To ensure the given wavelength interval is detected over the whole
surface area. A True/False boolean is then returned.
"""
# Check / format inputs
if dist_min is None:
dist_min = -0.15
if dist_max is None:
dist_max = 0.15
if di_min is None:
di_min = -0.40
if di_max is None:
di_max = 0.40
if ndist is None:
ndist = 21
if ndi is None:
ndi = 21
if err is None:
err = 'rel'
if plot is None:
plot = True
if plot_dets is None:
plot_dets = det_ref is not None
if nsort is None:
nsort = 5
if return_ax is None:
return_ax = True
if lambda_interval_min is None:
lambda_interval_min = 3.93e-10
if lambda_interval_max is None:
lambda_interval_max = 4.00e-10
l0 = [dist_min, dist_max, ndist, di_min, di_max, ndi]
c0 = any([l00 is not None for l00 in l0])
if not c0:
msg = (
"Please give the ranges of ddist and di translations\n"
"\t to compute the different detector's position\n"
"\t Provided:\n"
"\t\t- dist_min, dist_max, ndist: ({}, {}, {})\n".format(
dist_min, dist_max, ndist,
)
+ "\t\t- di_min, di_max, ndi: ({}, {}, {})\n".format(
di_min, di_max, ndi,
)
)
raise Exception(msg)
# ------------
# Compute local coordinates of det_ref
(
ddist0, di0, dj0,
dtheta0, dpsi0, tilt0,
) = self._get_local_coordinates_of_det(
bragg=bragg,
lamb=lamb,
det_ref=det_ref,
use_non_parallelism=use_non_parallelism,
)
# angle between nout vectors from get_det_approx() &
## get_det_approx(tangent=False)
det1 = self.get_detector_ideal(
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=True,
)
det2 = self.get_detector_ideal(
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=False,
)
cos_angle_nout = np.sum(
det1['nout'] * det2['nout']
) / (
np.linalg.norm(det1['nout'] * np.linalg.norm(det2['nout']))
)
angle_nout = np.arccos(cos_angle_nout)
# Compute
ddist = np.linspace(dist_min, dist_max, int(ndist))
di = np.linspace(di_min, di_max, int(ndi))
error_lambda = np.full((di.size, ddist.size), np.nan)
test_lamb_interv = np.zeros((di.size, ddist.size), dtype='bool')
end = '\r'
for ii in range(ddist.size):
for jj in range(di.size):
# print progression
if ii == ndist-1 and jj == ndi-1:
end = '\n'
msg = (
"Computing mean focal error for det "
f"({ii+1}, {jj+1})/({ndist}, {ndi})"
).ljust(60)
print(msg, end=end, flush=True)
# Get det
dpsi0bis = float(dpsi0)
if tangent_to_rowland:
dpsi0bis = dpsi0 - angle_nout
det = self.get_detector_ideal(
ddist=ddist[ii],
di=di[jj],
dj=dj0,
dtheta=dtheta0,
dpsi=dpsi0bis,
tilt=tilt0,
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=False,
)
# Integrate error
(
error_lambda_temp, test_lamb_interv[jj, ii],
) = self.calc_johannerror(
xi=xi, xj=xj,
det=det,
err=err,
lambda_interval_min=lambda_interval_min,
lambda_interval_max=lambda_interval_max,
plot=False,
)[::4]
error_lambda[jj, ii] = np.nanmean(error_lambda_temp)
if 'rel' in err:
units = '%'
else:
units = 'm'
if plot:
ax = _plot_optics.CrystalBragg_plot_focal_error_summed(
cryst=self, dcryst=dcryst,
lamb=lamb, bragg=bragg,
error_lambda=error_lambda,
ddist=ddist, di=di,
ddist0=ddist0, di0=di0, dj0=dj0,
dtheta0=dtheta0, dpsi0=dpsi0, tilt0=tilt0,
angle_nout=angle_nout,
det_ref=det_ref,
units=units,
plot_dets=plot_dets, nsort=nsort,
tangent_to_rowland=tangent_to_rowland,
use_non_parallelism=use_non_parallelism,
pts=pts,
test_lamb_interv=test_lamb_interv,
contour=contour,
fs=fs,
ax=ax,
cmap=cmap,
vmin=vmin,
vmax=vmax,
)
if return_ax:
return error_lambda, ddist, di, test_lamb_interv, ax
else:
return error_lambda, ddist, di, test_lamb_interv
def _get_local_coordinates_of_det(
self,
bragg=None,
lamb=None,
det_ref=None,
use_non_parallelism=None,
):
"""
Computation of translation (ddist, di, dj) and angular
(dtheta, dpsi, tilt) properties of an arbitrary detector choosen by
the user.
"""
# ------------
# check inputs
if det_ref is None:
msg = (
"You need to provide your arbitrary detector\n"
+ "\t in order to compute its spatial properties !\n"
+ "\t You provided: {}".format(det)
)
raise Exception(msg)
# Checkformat det
det_ref = self._checkformat_det(det=det_ref)
# ------------
# get approx detect
det_approx = self.get_detector_ideal(
bragg=bragg, lamb=lamb,
tangent_to_rowland=False,
use_non_parallelism=use_non_parallelism,
)
# ------------
# get vector delta between centers
delta = det_ref['cent'] - det_approx['cent']
ddist = np.sum(delta * (-det_approx['nout']))
di = np.sum(delta * det_approx['ei'])
dj = np.sum(delta * det_approx['ej'])
# ---------------
# get angles from unit vectors
dtheta, dpsi, tilt = None, None, None
# use formulas in _comp_optics.get_det_abs_from_rel()
sindtheta = np.sum(det_approx['ej'] * det_ref['nout'])
costheta_cospsi = np.sum(det_approx['nout'] * det_ref['nout'])
costheta_sinpsi = np.sum(det_approx['ei'] * det_ref['nout'])
costheta = np.sqrt(costheta_cospsi**2 + costheta_sinpsi**2)
dtheta = np.arctan2(sindtheta, costheta)
dpsi = np.arctan2(
costheta_sinpsi / costheta,
costheta_cospsi / costheta,
)
# ---------
# tilt
det_ei2 = (
np.cos(dpsi)*det_approx['ei'] - np.sin(dpsi)*det_approx['nout']
)
det_ej2 = np.cross(det_ref['nout'], det_ei2)
costilt = np.sum(det_ref['ei']*det_ei2)
sintilt = np.sum(det_ref['ei']*det_ej2)
tilt = np.arctan2(sintilt, costilt)
return ddist, di, dj, dtheta, dpsi, tilt
def get_lambbraggphi_from_ptsxixj_dthetapsi(
self,
pts=None,
xi=None, xj=None, det=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
n=None,
use_non_parallelism=None,
grid=None,
return_lamb=None,
):
""" Return the lamb, bragg and phi for provided pts and dtheta/psi
if grid = True:
compute all pts / dtheta/psi comnbinations
=> return (npts, ndtheta) arrays
else:
each pts is associated to a single dtheta/psi
=> assumes npts == ndtheta == npsi
=> return (npts,) arrays
"""
# Check / Format inputs
if return_lamb is None:
return_lamb = True
det = self._checkformat_det(det)
# Get local basis
summ, vout, ve1, ve2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
use_non_parallelism=use_non_parallelism,
include_summit=True,
)
# Derive bragg, phi
bragg, phi = _comp_optics.calc_braggphi_from_xixjpts(
pts=pts,
xi=xi, xj=xj, det=det,
summit=summ, nin=-vout, e1=ve1, e2=ve2,
grid=grid,
)
# Derive lamb
if return_lamb is True:
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
return bragg, phi, lamb
else:
return bragg, phi
def get_lamb_avail_from_pts(
self,
pts=None,
n=None, ndtheta=None,
det=None, nlamb=None, klamb=None,
use_non_parallelism=None,
strict=None,
return_phidtheta=None,
return_xixj=None,
):
""" Return the wavelength accessible from plasma points on the crystal
For a given plasma point, only a certain lambda interval can be
bragg-diffracted on the crystal (due to bragg's law and the crystal's
dimensions)
Beware, for a given pts and lamb, there can be up to 2 sets of
solutions
All non-valid solutions are set to nans, such that most of the time
there is only one
For a set of given:
- pts (3, npts) array, (x, y, z) coordinates
Using:
- nlamb: sampling of the lamb interval (default: 100)
- ndtheta: sampling of the lamb interval (default: 20)
- det: (optional) a detector dict, for xi and xj
Returns:
- lamb: (npts, nlamb) array of sampled valid wavelength interval
- phi: (npts, nlamb, ndtheta, 2) array of phi
- dtheta: (npts, nlamb, ndtheta, 2) array of dtheta
- psi: (npts, nlamb, ndtheta, 2) array of psi
And optionally (return_xixj=True and det provided as dict):
- xi: (npts, nlamb, ndtheta, 2) array of xi
- xj: (npts, nlamb, ndtheta, 2) array of xj
The result is computed with or w/o taking into account non-parallelism
"""
# Check / format
if ndtheta is None:
ndtheta = 20
if nlamb is None:
nlamb = 100
assert nlamb >= 2, "nlamb must be >= 2"
if return_phidtheta is None:
return_phidtheta = True
if return_xixj is None:
return_xixj = det is not None
if det is None:
return_xixj = False
if det is None:
strict = False
# Get lamb min / max
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=pts,
dtheta='envelop', psi='envelop',
ntheta=None, npsi=None,
n=n, grid=True,
use_non_parallelism=use_non_parallelism,
return_lamb=True,
)
lambmin = np.nanmin(lamb, axis=1)
lambmax = np.nanmax(lamb, axis=1)
if klamb is None:
klamb = np.linspace(0, 1, nlamb)
elif not (isinstance(klamb, np.ndarray) and klamb.ndim == 1):
msg = "Please provide klamb as a 1d vector!"
raise Exception(msg)
nlamb = klamb.size
lamb = lambmin[:, None] + (lambmax-lambmin)[:, None]*klamb
return _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=lamb,
n=n,
ndtheta=ndtheta,
pts=pts,
use_non_parallelism=use_non_parallelism,
return_phidtheta=return_phidtheta,
return_xixj=return_xixj,
strict=strict,
det=det,
)
def _calc_dthetapsiphi_from_lambpts(
self,
pts=None, bragg=None, lamb=None,
n=None, ndtheta=None,
use_non_parallelism=None,
grid=None,
):
# Check / Format inputs
pts = _comp_optics._checkformat_pts(pts)
npts = pts.shape[1]
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
# get nout, e1, e2
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism
)
# Compute dtheta, psi, indnan (nlamb, npts, ndtheta)
# In general there are 2 solutions! (only close to rowland in practice)
dtheta, psi, indok, grid = _comp_optics.calc_dthetapsiphi_from_lambpts(
pts,
bragg,
summit=self._dgeom['summit'], # To be updated (non-paralellism)?
rcurve=self._dgeom['rcurve'],
nout=nout, e1=e1, e2=e2,
extenthalf=self._dgeom['extenthalf'],
ndtheta=ndtheta,
grid=grid,
)
# reshape bragg for matching dtheta.shape
if grid is True:
bragg = np.repeat(
np.repeat(
np.repeat(bragg[:, None], npts, axis=-1)[..., None],
dtheta.shape[2],
axis=-1,
)[..., None],
2,
axis=-1,
)
pts = pts[:, None, :, None, None]
else:
bragg = np.repeat(
np.repeat(bragg[:, None], dtheta.shape[1], axis=1)[..., None],
2,
axis=-1,
)
pts = pts[..., None, None]
bragg[~indok] = np.nan
# Get corresponding phi and re-check bragg, for safety
bragg2, phi = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=pts,
dtheta=dtheta, psi=psi,
grid=False,
use_non_parallelism=use_non_parallelism,
return_lamb=False,
)
c0 = (
bragg2.shape == bragg.shape
and np.allclose(bragg, bragg2, equal_nan=True)
)
if not c0:
try:
plt.figure()
plt.plot(bragg, bragg2, '.')
except Exception as err:
pass
msg = (
"Inconsistency detected in bragg angle computations:\n"
+ "\t- from the points and lamb\n"
+ "\t- from the points and (dtheta, psi)\n"
+ "\nContext:\n"
+ "\t- use_non_parallelism: {}\n".format(use_non_parallelism)
+ "\t- bragg.shape = {}\n".format(bragg.shape)
+ "\t- bragg2.shape = {}\n".format(bragg2.shape)
)
raise Exception(msg)
return dtheta, psi, phi, bragg
def calc_raytracing_from_lambpts(
self,
lamb=None, bragg=None, pts=None,
xi_bounds=None, xj_bounds=None, nphi=None,
det=None, n=None, ndtheta=None,
johann=False, lpsi=None, ldtheta=None,
rocking=False, strict=None, plot=None, fs=None,
dmargin=None, wintit=None,
tit=None, proj=None,
legend=None, draw=None, returnas=None,
):
""" Visualize the de-focusing by ray-tracing of chosen lamb
If plot, 3 different plots can be produced:
- det: plots the intersection of rays with detector plane
- '2d': plots the geometry of the rays in 2d cross and hor
- '3d': plots the geometry of the rays in 3d
Specify the plotting option by setting plot to any of these (or a list)
"""
# Check / format inputs
if returnas is None:
returnas = 'data'
if plot is None or plot is True:
plot = ['det', '3d']
if isinstance(plot, str):
plot = plot.split('+')
assert all([ss in ['det', '2d', '3d'] for ss in plot])
assert returnas in ['data', 'ax']
pts = _comp_optics._checkformat_pts(pts)
npts = pts.shape[1]
# Get dtheta, psi and phi from pts/lamb
dtheta, psi, phi, bragg = self._calc_dthetapsiphi_from_lambpts(
pts=pts, lamb=lamb, bragg=bragg, n=n, ndtheta=ndtheta,
)
ndtheta = dtheta.shape[-1]
# assert dtheta.shape == (nlamb, npts, ndtheta)
# Check / get det
det = self._checkformat_det(det)
# Compute xi, xj of reflexion (phi -> phi + np.pi)
xi, xj = self.calc_xixj_from_braggphi(
bragg=bragg, phi=phi+np.pi, n=n,
dtheta=dtheta, psi=psi,
det=det, strict=strict, plot=False,
)
# Plot to be checked - unnecessary ?
plot = False
if plot is not False:
ptscryst, ptsdet = None, None
if '2d' in plot or '3d' in plot:
ptscryst = self.get_local_noute1e2(dtheta, psi)[0]
ptsdet = (det['cent'][:, None, None, None]
+ xi[None, ...]*det['ei'][:, None, None, None]
+ xj[None, ...]*det['ej'][:, None, None, None])
ax = _plot_optics.CrystalBragg_plot_raytracing_from_lambpts(
xi=xi, xj=xj, lamb=lamb,
xi_bounds=xi_bounds, xj_bounds=xj_bounds,
pts=pts, ptscryst=ptscryst, ptsdet=ptsdet,
det_cent=det['cent'], det_nout=det['nout'],
det_ei=det['ei'], det_ej=det['ej'],
cryst=self, proj=plot, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, legend=legend, draw=draw)
if returnas == 'ax':
return ax
return dtheta, psi, phi, bragg, xi, xj
def _calc_spect1d_from_data2d(self, data, lamb, phi,
nlambfit=None, nphifit=None,
nxi=None, nxj=None,
spect1d=None, mask=None, vertsum1d=None):
if nlambfit is None:
nlambfit = nxi
if nphifit is None:
nphifit = nxj
return _comp_optics._calc_spect1d_from_data2d(
data, lamb, phi,
nlambfit=nlambfit,
nphifit=nphifit,
spect1d=spect1d,
mask=mask,
vertsum1d=vertsum1d,
)
def plot_data_vs_lambphi(
self,
xi=None, xj=None, data=None, mask=None,
det=None, dtheta=None, psi=None, n=None,
nlambfit=None, nphifit=None,
magaxis=None, npaxis=None,
dlines=None, spect1d='mean',
lambmin=None, lambmax=None,
xjcut=None, dxj=None,
plot=True, fs=None, tit=None, wintit=None,
cmap=None, vmin=None, vmax=None,
returnas=None,
):
# Check / format inputs
assert data is not None
if returnas is None:
returnas = 'spect'
lreturn = ['ax', 'spect']
if returnas not in lreturn:
msg = ("Arg returnas must be in {}\n:".format(lreturn)
+ "\t- 'spect': return a 1d vertically averaged spectrum\n"
+ "\t- 'ax' : return a list of axes instances")
raise Exception(msg)
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
nxi = xi.size if xi is not None else np.unique(xii).size
nxj = xj.size if xj is not None else np.unique(xjj).size
# Compute lamb / phi
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
# Compute lambfit / phifit and spectrum1d
(spect1d, lambfit, phifit,
vertsum1d, phiminmax) = self._calc_spect1d_from_data2d(
data, lamb, phi,
nlambfit=nlambfit, nphifit=nphifit, nxi=nxi, nxj=nxj,
spect1d=spect1d, mask=mask, vertsum1d=True
)
# Get phiref from mag axis
lambax, phiax = None, None
if magaxis is not None:
if npaxis is None:
npaxis = 1000
thetacryst = np.arctan2(self._dgeom['summit'][1],
self._dgeom['summit'][0])
thetaax = thetacryst + np.pi/2*np.linspace(-1, 1, npaxis)
pts = np.array([magaxis[0]*np.cos(thetaax),
magaxis[0]*np.sin(thetaax),
np.full((npaxis,), magaxis[1])])
braggax, phiax = self.calc_braggphi_from_pts(pts)
lambax = self.get_lamb_from_bragg(braggax)
phiax = np.arctan2(np.sin(phiax-np.pi), np.cos(phiax-np.pi))
ind = ((lambax >= lambfit[0]) & (lambax <= lambfit[-1])
& (phiax >= phifit[0]) & (phiax <= phifit[-1]))
lambax, phiax = lambax[ind], phiax[ind]
ind = np.argsort(lambax)
lambax, phiax = lambax[ind], phiax[ind]
# Get lamb / phi for xj
lambcut, phicut, spectcut = None, None, None
if xjcut is not None:
if dxj is None:
dxj = 0.002
xjcut = np.sort(np.atleast_1d(xjcut).ravel())
xicutf = np.tile(xi, (xjcut.size, 1))
xjcutf = np.repeat(xjcut[:, None], nxi, axis=1)
(
braggcut, phicut, lambcut,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xicutf, xj=xjcutf, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=1,
grid=True,
return_lamb=True,
)
indxj = [(np.abs(xj-xjc) <= dxj).nonzero()[0] for xjc in xjcut]
spectcut = np.array([np.nanmean(data[ixj, :], axis=0)
for ixj in indxj])
# plot
ax = None
if plot:
ax = _plot_optics.CrystalBragg_plot_data_vs_lambphi(
xi, xj, bragg, lamb, phi, data,
lambfit=lambfit, phifit=phifit, spect1d=spect1d,
vertsum1d=vertsum1d, lambax=lambax, phiax=phiax,
lambmin=lambmin, lambmax=lambmax, phiminmax=phiminmax,
xjcut=xjcut, lambcut=lambcut, phicut=phicut, spectcut=spectcut,
cmap=cmap, vmin=vmin, vmax=vmax, dlines=dlines,
tit=tit, wintit=wintit, fs=fs)
if returnas == 'spect':
return spect1d, lambfit
elif returnas == 'ax':
return ax
def get_plasmadomain_at_lamb(
self,
config=None,
struct=None,
domain=None,
res=None,
det=None,
xixj_lim=None,
strict=None,
bragg=None,
lamb=None,
# for available lamb determination
ndtheta=None,
nlamb=None,
n=None,
use_non_parallelism=None,
# plotting
plot=None,
dax=None,
plot_as=None,
lcolor=None,
return_dax=None,
):
""" Return pts in the plasma domain and a mask
The mask is True only for points for which the desired wavelength is
accesible from the crystal (and from the detector if strict=True and
det is provided)
More than one value of lamb can be provided (nlamb >= 1)
pts is returned as a (3, npts) array
lambok is returned as a (nlamb, npts) array
"""
# ------------
# check inputs
struct = _check_optics._check_config_get_Ves(
config=config, struct=struct,
)
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
# To be refined if xjlim is narrow
if ndtheta is None:
ndtheta = 5
# To be refined if xilim is narrow
if nlamb is None:
nlamb = 11
if strict is None:
strict = True
if plot is None:
plot = True
if return_dax is None:
return_dax = plot is True
# -------------
# sample volume
(
pts, dV, ind, (resR, resZ, resPhi),
) = config.dStruct['dObj']['Ves'][struct].get_sampleV(
res=res,
domain=domain,
returnas='(R, Z, Phi)',
)
# ------------------------------
# check access from crystal only
ptsXYZ = np.array([
pts[0, :]*np.cos(pts[2, :]),
pts[0, :]*np.sin(pts[2, :]),
pts[1, :],
])
lamb_access = self.get_lamb_avail_from_pts(
pts=ptsXYZ,
nlamb=2,
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=False,
)
lambok = np.zeros((lamb.size, pts.shape[1]), dtype=bool)
for ii, ll in enumerate(lamb):
lambok[ii, :] = (
(lamb_access[:, 0] <= ll) & (ll <= lamb_access[:, 1])
)
# ---------------
# refactor pts and lambok
indok = np.any(lambok, axis=0)
pts = pts[:, indok]
ptsXYZ = ptsXYZ[:, indok]
lambok = lambok[:, indok]
# ---------------
# check strict
if strict is True:
# det vs detbis if xixj_lim
detbis = dict(det)
if xixj_lim is not None:
detbis['outline'] = np.array([
np.r_[
xixj_lim[0][0],
xixj_lim[0][1]*np.r_[1, 1],
xixj_lim[0][0],
],
np.r_[
xixj_lim[1][0]*np.r_[1, 1],
xixj_lim[1][1]*np.r_[1, 1],
],
])
detbis['outline'] = np.concatenate(
(detbis['outline'], detbis['outline'][:, 0:1]),
axis=1,
)
# intersection with detbis
for kk, ll in enumerate(lamb):
lambi = _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=np.full((lambok[kk, :].sum(), 1), ll),
n=n,
ndtheta=ndtheta,
pts=ptsXYZ[:, lambok[kk, :]],
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=strict,
det=detbis,
)
lambok[kk, lambok[kk, :]] = ~np.isnan(lambi[:, 0])
# -------
# return
if plot:
dax = _plot_optics.CrystalBragg_plot_plasma_domain_at_lamb(
cryst=self,
det=det,
xixj_lim=xixj_lim,
config=config,
lamb=lamb,
pts=pts,
reseff=[resR, resZ, resPhi],
lambok=lambok,
dax=dax,
plot_as=plot_as,
lcolor=lcolor,
)
# ---------------
# return
if return_dax is True:
return pts, lambok, dax
else:
return pts, lambok
def calc_signal_from_emissivity(
self,
emis=None,
config=None,
struct=None,
domain=None,
res=None,
det=None,
xixj_lim=None,
strict=None,
bragg=None,
lamb=None,
binning=None,
# for available lamb determination
ndtheta=None,
nlamb=None,
n=None,
use_non_parallelism=None,
# plotting
plot=None,
vmin=None,
vmax=None,
vmin_bin=None,
vmax_bin=None,
cmap=None,
dax=None,
fs=None,
dmargin=None,
tit=None,
return_dax=None,
):
""" Return pts in the plasma domain and a mask
The mask is True only for points for which the desired wavelength is
accesible from the crystal (and from the detector if strict=True and
det is provided)
More than one value of lamb can be provided (nlamb >= 1)
pts is returned as a (3, npts) array
lambok is returned as a (nlamb, npts) array
"""
# ------------
# check inputs
(
struct, lamb, binning,
) = _check_optics._check_calc_signal_from_emissivity(
emis=emis, config=config, struct=struct,
lamb=lamb, det=det, binning=binning,
)
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
# To be refined if xjlim is narrow
if ndtheta is None:
ndtheta = 5
# To be refined if xilim is narrow
if nlamb is None:
nlamb = 11
if strict is None:
strict = True
if plot is None:
plot = True
if return_dax is None:
return_dax = plot is True
# -------------
# sample volume
(
pts, dV, ind, (resR, resZ, resPhi),
) = config.dStruct['dObj']['Ves'][struct].get_sampleV(
res=res,
domain=domain,
returnas='(R, Z, Phi)',
)
# ------------------------------
# check access from crystal only
ptsXYZ = np.array([
pts[0, :]*np.cos(pts[2, :]),
pts[0, :]*np.sin(pts[2, :]),
pts[1, :],
])
lamb_access = self.get_lamb_avail_from_pts(
pts=ptsXYZ,
nlamb=2,
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=False,
)
lambok = np.zeros((lamb.size, pts.shape[1]), dtype=bool)
for ii, ll in enumerate(lamb):
lambok[ii, :] = (
(lamb_access[:, 0] <= ll) & (ll <= lamb_access[:, 1])
)
# ---------------
# refactor pts and lambok
indok = np.any(lambok, axis=0)
pts = pts[:, indok]
ptsXYZ = ptsXYZ[:, indok]
lambok = lambok[:, indok]
# ---------------
# check strict
# det vs detbis if xixj_lim
detbis = dict(det)
if xixj_lim is not None:
detbis['outline'] = np.array([
np.r_[
xixj_lim[0][0],
xixj_lim[0][1]*np.r_[1, 1],
xixj_lim[0][0],
],
np.r_[
xixj_lim[1][0]*np.r_[1, 1],
xixj_lim[1][1]*np.r_[1, 1],
],
])
detbis['outline'] = np.concatenate(
(detbis['outline'], detbis['outline'][:, 0:1]),
axis=1,
)
# intersection with detbis
shape = tuple(np.r_[pts.shape[1], lamb.size, ndtheta, 2])
xi = np.full(shape, np.nan)
xj = np.full(shape, np.nan)
val = np.full(shape, np.nan)
for kk, ll in enumerate(lamb):
(
lambi, xii, xji,
) = _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=np.full((lambok[kk, :].sum(), 1), ll),
n=n,
ndtheta=ndtheta,
pts=ptsXYZ[:, lambok[kk, :]],
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=True,
strict=True,
det=detbis,
)
iok = ~np.isnan(lambi[:, 0])
iokf = lambok[kk, :].nonzero()[0][iok]
lambok[kk, lambok[kk, :]] = iok
xi[iokf, kk, :, :] = xii[iok, 0, :, :]
xj[iokf, kk, :, :] = xji[iok, 0, :, :]
val[iokf, kk, :, :] = emis(
r=pts[0, iokf],
z=pts[1, iokf],
phi=pts[2, iokf],
lamb=lamb[kk:kk+1],
t=None,
)[:, 0, None, None]
# -------
# Optional binning
binned = None
if binning is not False:
iok = np.isfinite(val)
binned = scpstats.binned_statistic_2d(
xi[iok].ravel(),
xj[iok].ravel(),
val[iok].ravel(),
statistic='mean',
bins=binning,
expand_binnumbers=False,
)[0]
# -------
# return
if plot:
dax = _plot_optics.CrystalBragg_plot_signal_from_emissivity(
cryst=self,
det=det,
xixj_lim=xixj_lim,
config=config,
lamb=lamb,
pts=pts,
reseff=[resR, resZ, resPhi],
xi=xi,
xj=xj,
val=val,
lambok=lambok,
binning=binning,
binned=binned,
# plotting
vmin=vmin,
vmax=vmax,
vmin_bin=vmin_bin,
vmax_bin=vmax_bin,
cmap=cmap,
dax=dax,
fs=fs,
dmargin=dmargin,
tit=tit,
)
# ---------------
# return
if return_dax is True:
return pts, val, xi, xj, binned, dax
else:
return pts, val, xi, xj, binned
@staticmethod
def fit1d_dinput(
dlines=None, dconstraints=None, dprepare=None,
data=None, lamb=None,
mask=None, domain=None, pos=None, subset=None,
same_spectrum=None, same_spectrum_dlamb=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None, valid_return_fract=None,
):
""" Return a formatted dict of lines and constraints
To be fed to _fit12d.multigausfit1d_from_dlines()
Provides a user-friendly way of defining constraints
"""
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, lamb=lamb,
mask=mask, domain=domain, pos=pos, subset=subset,
same_spectrum=same_spectrum,
same_spectrum_dlamb=same_spectrum_dlamb,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
valid_return_fract=valid_return_fract)
def fit1d(
self,
# Input data kwdargs
data=None, lamb=None,
dinput=None, dprepare=None, dlines=None, dconstraints=None,
mask=None, domain=None, subset=None, pos=None,
same_spectrum=None, same_spectrum_dlamb=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None,
# Optimization kwdargs
dx0=None, dscales=None, x0_scale=None, bounds_scale=None,
method=None, tr_solver=None, tr_options=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=None, chain=None, jac=None, showonly=None,
# Results extraction kwdargs
amp=None, coefs=None, ratio=None,
Ti=None, width=None, vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
# Saving and plotting kwdargs
save=None, name=None, path=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
):
# ----------------------
# Get dinput for 1d fitting from dlines, dconstraints, dprepare...
if dinput is None:
dinput = self.fit1d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, lamb=lamb,
mask=mask, domain=domain, pos=pos, subset=subset,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
same_spectrum=same_spectrum,
same_spectrum_dlamb=same_spectrum_dlamb)
# ----------------------
# return
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d(
# Input data kwdargs
data=data, lamb=lamb,
dinput=dinput, dprepare=dprepare,
dlines=dlines, dconstraints=dconstraints,
mask=mask, domain=domain, subset=subset, pos=pos,
# Optimization kwdargs
method=method, tr_solver=tr_solver, tr_options=tr_options,
xtol=xtol, ftol=ftol, gtol=gtol,
max_nfev=max_nfev, loss=loss, chain=chain,
dx0=dx0, x0_scale=x0_scale, bounds_scale=bounds_scale,
jac=jac, verbose=verbose,
save=save, name=name, path=path,
amp=amp, coefs=coefs, ratio=ratio,
Ti=Ti, width=width, vi=vi, shift=shift,
pts_lamb_total=pts_lamb_total,
pts_lamb_detail=pts_lamb_detail,
plot=plot, fs=fs, wintit=wintit, tit=tit)
@staticmethod
def fit1d_extract(
dfit1d=None,
amp=None, coefs=None, ratio=None,
Ti=None, width=None,
vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
):
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d_extract(
dfit1d=dfit,
amp=amp, coefs=coefs, ratio=ratio,
Ti=Ti, width=width,
vi=vi, shift=shift,
pts_lamb_total=pts_lamb_total, pts_lamb_detail=pts_lamb_detail)
def fit1d_from2d(self):
""" Useful for optimizing detector or crystal position
Given a set of 2d images on a detector
Transform the 2d (xi, xj) image into (lamb, phi)
Slice nphi 1d spectra
Fit them using a dict of reference lines (dlines)
Optionally provide constraints for the fitting
Return the vertical profiles of the wavelength shitf of each line
To be used as input for an cost function and optimization
1d fitting is used instead of 2d because:
- faster (for optimization)
- does not require a choice of nbsplines
- easier to understand and decide for user
"""
# Check / format inputs
if lphi is None:
msg = ("Arg lphi must be provided !")
raise Exception(msg)
# ----------------------
# Prepare input data
# (geometrical transform, domain, binning, subset, noise...)
if dprepare is None:
dprepare = self.fit2d_prepare(
data=data, xi=xi, xj=xj, n=n,
det=det, dtheta=dtheta, psi=psi,
mask=mask, domain=domain,
pos=pos, binning=binning,
nbsplines=False, subset=False,
lphi=lphi, lphi_tol=lphi_tol)
# ----------------------
# Get dinput for 2d fitting from dlines, and dconstraints
if dinput is None:
dinput = self.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints,
deg=deg, knots=knots, nbsplines=nbsplines,
domain=dprepare['domain'],
dataphi1d=dprepare['dataphi1d'], phi1d=dprepare['phi1d'])
# ----------------------
# fit
out = self.fit1d(
xi=None, xj=None, data=None, mask=None,
det=None, dtheta=None, psi=None, n=None,
nlambfit=None, nphifit=None,
lambmin=None, lambmax=None,
dlines=None, spect1d=None,
dconstraints=None, dx0=None,
same_spectrum=None, dlamb=None,
double=None,
dscales=None, x0_scale=None, bounds_scale=None,
method=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=0, chain=None,
jac=None, showonly=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
)
pass
def fit2d_dinput(
self, dlines=None, dconstraints=None, dprepare=None,
data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, domain=None, pos=None, binning=None, subset=None,
# lphi=None, lphi_tol=None,
deg=None, knots=None, nbsplines=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None, valid_return_fract=None,
):
""" Return a formatted dict of lines and constraints
To be fed to _fit12d.multigausfit1d_from_dlines()
Provides a user-friendly way of defining constraints
"""
import tofu.spectro._fit12d as _fit12d
if dprepare is None:
# ----------------------
# Geometrical transform
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
nxi = xi.size if xi is not None else np.unique(xii).size
nxj = xj.size if xj is not None else np.unique(xjj).size
# Compute lamb / phi
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
# ----------------------
# Prepare input data (domain, binning, subset, noise...)
dprepare = _fit12d.multigausfit2d_from_dlines_prepare(
data, lamb, phi,
mask=mask, domain=domain,
pos=pos, binning=binning,
nbsplines=nbsplines, subset=subset,
nxi=nxi, nxj=nxj,
) # , lphi=lphi, lphi_tol=lphi_tol)
return _fit12d.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
deg=deg, knots=knots, nbsplines=nbsplines,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
valid_return_fract=valid_return_fract)
def fit2d(
self,
# Input data kwdargs
data=None, xi=None, xj=None,
det=None, dtheta=None, psi=None, n=None,
dinput=None, dprepare=None, dlines=None, dconstraints=None,
mask=None, domain=None, subset=None, pos=None, binning=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None,
deg=None, knots=None, nbsplines=None,
# Optimization kwdargs
dx0=None, dscales=None, x0_scale=None, bounds_scale=None,
method=None, tr_solver=None, tr_options=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=None, chain=None, jac=None, showonly=None,
predeclare=None, debug=None,
# Results extraction kwdargs
amp=None, coefs=None, ratio=None,
Ti=None, width=None, vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
# Saving and plotting kwdargs
save=None, name=None, path=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
):
# npts=None, dax=None,
# spect1d=None, nlambfit=None,
# plotmode=None, angunits=None, indspect=None,
# cmap=None, vmin=None, vmax=None):
""" Perform 2d fitting of a 2d spectrometre image
Fit the spectrum by a sum of gaussians
Modulate each gaussian parameters by bsplines in the spatial direction
data must be provided in shape (nt, nxi, nxj), where:
- nt is the number of time steps
- nxi is the nb. of pixels in the horizontal / spectral direction
- nxj is the nb. of pixels in the vertical / spacial direction
"""
# ----------------------
# Geometrical transform in dprepare
if dinput is None:
dinput = self.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, xi=xi, xj=xj, n=n,
det=det, dtheta=dtheta, psi=psi,
mask=mask, domain=domain,
pos=pos, binning=binning, subset=subset,
deg=deg, knots=knots, nbsplines=nbsplines,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width)
# ----------------------
# return
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit2d(
dinput=dinput, dprepare=dprepare,
dlines=dlines, dconstraints=dconstraints,
lamb=lamb, phi=phi, data=data, mask=mask,
nxi=dinput['dprepare']['nxi'], nxj=dinput['dprepare']['nxj'],
domain=domain, pos=pos, binning=binning, subset=subset,
deg=deg, knots=knots, nbsplines=nbsplines,
method=method, tr_solver=tr_solver, tr_options=tr_options,
xtol=xtol, ftol=ftol, gtol=gtol,
max_nfev=max_nfev, loss=loss, chain=chain,
dx0=dx0, x0_scale=x0_scale, bounds_scale=bounds_scale,
jac=jac, verbose=verbose,
save=save, name=name, path=path,
plot=plot)
@staticmethod
def fit2d_extract(dfit2d=None,
amp=None, Ti=None, vi=None,
pts_phi=None, npts_phi=None,
pts_lamb_phi_total=None,
pts_lamb_phi_detail=None):
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit2d_extract_data(
dfit2d=dfit2d,
amp=amp, Ti=Ti, vi=vi,
pts_phi=pts_phi, npts_phi=npts_phi,
pts_lamb_phi_total=pts_lamb_phi_total,
pts_lamb_phi_detail=pts_lamb_phi_detail)
def fit2d_plot(self, dfit2d=None, ratio=None,
dax=None, plotmode=None, angunits=None,
cmap=None, vmin=None, vmax=None,
dmargin=None, tit=None, wintit=None, fs=None):
dout = self.fit2d_extract(
dfit2d,
amp=amp, Ti=Ti, vi=vi,
pts_lamb_phi_total=pts_lamb_phi_total,
pts_lamb_phi_detail=pts_lamb_phi_detail)
return _plot_optics.CrystalBragg_plot_data_fit2d(
dfit2d=dfit2d, dout=dout, ratio=ratio,
dax=dax, plotmode=plotmode, angunits=angunits,
cmap=cmap, vmin=vmin, vmax=vmax,
dmargin=dmargin, tit=tit, wintit=wintit, fs=fs)
def noise_analysis(
self, data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, valid_fraction=None, nxerrbin=None,
margin=None, domain=None, nlamb=None,
deg=None, knots=None, nbsplines=None,
loss=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
method=None, tr_solver=None, tr_options=None,
verbose=None, plot=None,
ms=None, dcolor=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save_fig=None, name_fig=None, path_fig=None,
fmt=None, return_dax=None,
):
# ----------------------
# Geometrical transform
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xi, xj=xj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
import tofu.spectro._fit12d as _fit12d
return _fit12d.noise_analysis_2d(
data, lamb, phi,
mask=mask, valid_fraction=valid_fraction,
margin=margin, nxerrbin=nxerrbin,
nlamb=nlamb, deg=deg, knots=knots, nbsplines=nbsplines,
loss=loss, max_nfev=max_nfev,
xtol=xtol, ftol=ftol, gtol=gtol,
method=method, tr_solver=tr_solver, tr_options=tr_options,
verbose=verbose, plot=plot,
ms=ms, dcolor=dcolor,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save_fig=save_fig, name_fig=name_fig, path_fig=path_fig,
fmt=fmt, return_dax=return_dax)
@staticmethod
def noise_analysis_plot(
dnoise=None, margin=None, valid_fraction=None,
ms=None, dcolor=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save=None, name=None, path=None, fmt=None,
):
import tofu.spectro._plot as _plot_spectro
return _plot_spectro.plot_noise_analysis(
dnoise=dnoise, margin=margin, valid_fraction=valid_fraction,
ms=ms, dcolor=dcolor,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save=save, name=name, path=path, fmt=fmt)
def noise_analysis_scannbs(
self, data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, nxerrbin=None,
domain=None, nlamb=None,
deg=None, knots=None, nbsplines=None, lnbsplines=None,
loss=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
method=None, tr_solver=None, tr_options=None,
verbose=None, plot=None,
ms=None, dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save_fig=None, name_fig=None, path_fig=None,
fmt=None, return_dax=None,
):
# ----------------------
# Geometrical transform
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xi, xj=xj, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
import tofu.spectro._fit12d as _fit12d
return _fit12d.noise_analysis_2d_scannbs(
data, lamb, phi,
mask=mask, nxerrbin=nxerrbin, nlamb=nlamb,
deg=deg, knots=knots, nbsplines=nbsplines, lnbsplines=lnbsplines,
loss=loss, max_nfev=max_nfev,
xtol=xtol, ftol=ftol, gtol=gtol,
method=method, tr_solver=tr_solver, tr_options=tr_options,
verbose=verbose, plot=plot,
ms=ms, dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save_fig=save_fig, name_fig=name_fig, path_fig=path_fig,
fmt=fmt, return_dax=return_dax)
@staticmethod
def noise_analysis_scannbs_plot(
dnoise_scan=None, ms=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save=None, name=None, path=None, fmt=None,
):
import tofu.spectro._plot as _plot_spectro
return _plot_spectro.plot_noise_analysis_scannbs(
dnoise=dnoise_scan, ms=ms,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save=save, name=name, path=path, fmt=fmt)
| 35.852526 | 81 | 0.521275 |
import sys
import os
import warnings
import copy
import numpy as np
import scipy.interpolate as scpinterp
import scipy.stats as scpstats
import datetime as dtm
import matplotlib.pyplot as plt
import matplotlib as mpl
from tofu import __version__ as __version__
import tofu.pathfile as tfpf
import tofu.utils as utils
from . import _def as _def
from . import _GG as _GG
from . import _core
from . import _check_optics
from . import _comp_optics as _comp_optics
from . import _plot_optics as _plot_optics
import tofu.spectro._rockingcurve as _rockingcurve
__all__ = ['CrystalBragg']
_Type = 'Tor'
_NTHREADS = 16
_RETURN_COPY = False
_USE_NON_PARALLELISM = True
class CrystalBragg(utils.ToFuObject):
_ddef = {
'Id': {
'shot': 0, 'Exp': 'dummy', 'Diag': 'dummy',
'include': [
'Mod', 'Cls', 'Exp', 'Diag', 'Name', 'shot', 'version',
],
},
'dgeom': {'Type': 'sph', 'Typeoutline': 'rect'},
'dmat': {},
'dbragg': {'braggref': np.pi/4.},
'dmisc': {'color': 'k'},
}
_dplot = {'cross':{'Elt':'P',
'dP':{'color':'k','lw':2},
'dI':{'color':'k','ls':'--','marker':'x','ms':8,'mew':2},
'dBs':{'color':'b','ls':'--','marker':'x','ms':8,'mew':2},
'dBv':{'color':'g','ls':'--','marker':'x','ms':8,'mew':2},
'dVect':{'color':'r','scale':10}},
'hor':{'Elt':'P',
'dP':{'color':'k','lw':2},
'dI':{'color':'k','ls':'--'},
'dBs':{'color':'b','ls':'--'},
'dBv':{'color':'g','ls':'--'},
'Nstep':50},
'3d':{}}
def __init_subclass__(cls, color='k', **kwdargs):
super(CrystalBragg,cls).__init_subclass__(**kwdargs)
cls._ddef = copy.deepcopy(CrystalBragg._ddef)
cls._dplot = copy.deepcopy(CrystalBragg._dplot)
cls._set_color_ddef(cls._color)
@classmethod
def _set_color_ddef(cls, color):
cls._ddef['dmisc']['color'] = mpl.colors.to_rgba(color)
def __init__(self, dgeom=None, dmat=None, dbragg=None,
Id=None, Name=None, Exp=None, Diag=None, shot=None,
fromdict=None, sep=None,
SavePath=os.path.abspath('./'),
SavePath_Include=tfpf.defInclude, color=None):
if sys.version[0]=='2':
self._dstrip = utils.ToFuObjectBase._dstrip.copy()
self.__class__._strip_init()
self._dplot = copy.deepcopy(self.__class__._dplot)
kwdargs = locals()
del kwdargs['self']
super(CrystalBragg,self).__init__(**kwdargs)
def _reset(self):
super(CrystalBragg,self)._reset()
self._dgeom = dict.fromkeys(self._get_keys_dgeom())
self._dmat = dict.fromkeys(self._get_keys_dmat())
self._dbragg = dict.fromkeys(self._get_keys_dbragg())
self._dmisc = dict.fromkeys(self._get_keys_dmisc())
@classmethod
def _checkformat_inputs_Id(cls, Id=None, Name=None,
Exp=None, Diag=None, shot=None, Type=None,
include=None,
**kwdargs):
if Id is not None:
assert isinstance(Id,utils.ID)
Name, Exp, Type = Id.Name, Id.Exp, Id.Type
if Type is None:
Type = cls._ddef['dgeom']['Type']
if Exp is None:
Exp = cls._ddef['Id']['Exp']
if Diag is None:
Diag = cls._ddef['Id']['Diag']
if shot is None:
shot = cls._ddef['Id']['shot']
if include is None:
include = cls._ddef['Id']['include']
dins = {'Name':{'var':Name, 'cls':str},
'Exp': {'var':Exp, 'cls':str},
'Diag': {'var':Diag, 'cls':str},
'shot': {'var':shot, 'cls':int},
'Type': {'var':Type, 'in':['sph']},
'include':{'var':include, 'listof':str}}
dins, err, msg = cls._check_InputsGeneric(dins)
if err:
raise Exception(msg)
kwdargs.update({'Name':Name, 'shot':shot,
'Exp':Exp, 'Diag':Diag, 'Type':Type,
'include':include})
return kwdargs
rgs
@staticmethod
def _get_largs_dmat():
largs = ['dmat']
return largs
@staticmethod
def _get_largs_dbragg():
largs = ['dbragg']
return largs
@staticmethod
def _get_largs_dmisc():
largs = ['color']
return largs
ummit', 'center', 'extenthalf', 'surface',
'nin', 'nout', 'e1', 'e2', 'rcurve',
'move', 'move_param', 'move_kwdargs']
return lk
@staticmethod
def _get_keys_dmat():
lk = ['formula', 'density', 'symmetry',
'lengths', 'angles', 'cut', 'd',
'alpha', 'beta', 'nin', 'nout', 'e1', 'e2']
return lk
@staticmethod
def _get_keys_dbragg():
lk = ['rockingcurve', 'lambref', 'braggref']
return lk
@staticmethod
def _get_keys_dmisc():
lk = ['color']
return lk
allkwds = dict(locals(), **kwdargs)
largs = self._get_largs_dgeom()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dgeom(**kwds)
largs = self._get_largs_dmat()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dmat(**kwds)
largs = self._get_largs_dbragg()
kwds = self._extract_kwdargs(allkwds, largs)
self.set_dbragg(**kwds)
largs = self._get_largs_dmisc()
kwds = self._extract_kwdargs(allkwds, largs)
self._set_dmisc(**kwds)
self._dstrip['strip'] = 0
dgeom=dgeom, ddef=self._ddef['dgeom'],
valid_keys=self._get_keys_dgeom(),
)
if self._dgeom['move'] is not None:
self.set_move(
move=self._dgeom['move'],
param=self._dgeom['move_param'],
**self._dgeom['move_kwdargs'],
)
def set_dmat(self, dmat=None):
self._dmat = _check_optics._checkformat_dmat(
dmat=dmat, dgeom=self._dgeom,
ddef=self._ddef['dmat'],
valid_keys=self._get_keys_dmat()
)
def set_dbragg(self, dbragg=None):
self._dbragg = _check_optics._checkformat_dbragg(
dbragg=dbragg,
ddef=self._ddef['dbragg'],
valid_keys=self._get_keys_dbragg(),
dmat=self._dmat,
)
def _set_color(self, color=None):
color = _check_optics._checkformat_inputs_dmisc(
color=color, ddef=self._ddef,
)
self._dmisc['color'] = color
self._dplot['cross']['dP']['color'] = color
self._dplot['hor']['dP']['color'] = color
def _set_dmisc(self, color=None):
self._set_color(color)
bject._strip_dict(self._dgeom, lkeep=lkeep)
def _strip_dmat(self, lkeep=None):
lkeep = self._get_keys_dmat()
utils.ToFuObject._strip_dict(self._dmat, lkeep=lkeep)
def _strip_dbragg(self, lkeep=None):
lkeep = self._get_keys_dbragg()
utils.ToFuObject._strip_dict(self._dbragg, lkeep=lkeep)
def _strip_dmisc(self, lkeep=['color']):
utils.ToFuObject._strip_dict(self._dmisc, lkeep=lkeep)
tils.ToFuObject._test_Rebuild(self._dgeom, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dgeom,
lkeep=lkeep, dname='dgeom')
self._set_dgeom(dgeom=self._dgeom)
def _rebuild_dmat(self, lkeep=None):
lkeep = self._get_keys_dmat()
reset = utils.ToFuObject._test_Rebuild(self._dmat, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dmat,
lkeep=lkeep, dname='dmat')
self.set_dmat(self._dmat)
def _rebuild_dbragg(self, lkeep=None):
lkeep = self._get_keys_dbragg()
reset = utils.ToFuObject._test_Rebuild(self._dbragg, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dbragg,
lkeep=lkeep, dname='dbragg')
self.set_dbragg(self._dbragg)
def _rebuild_dmisc(self, lkeep=['color']):
reset = utils.ToFuObject._test_Rebuild(self._dmisc, lkeep=lkeep)
if reset:
utils.ToFuObject._check_Fields4Rebuild(self._dmisc,
lkeep=lkeep, dname='dmisc')
self._set_dmisc(color=self.dmisc['color'])
ax(cls._dstrip['allowed'])
doc = """
1: Remove nothing"""
doc = utils.ToFuObjectBase.strip.__doc__.format(doc,nMax)
if sys.version[0]=='2':
cls.strip.__func__.__doc__ = doc
else:
cls.strip.__doc__ = doc
def strip(self, strip=0):
super(CrystalBragg, self).strip(strip=strip)
def _strip(self, strip=0):
if strip==0:
self._rebuild_dgeom()
self._rebuild_dmat()
self._rebuild_dbragg()
self._rebuild_dmisc()
else:
self._strip_dgeom()
self._strip_dmat()
self._strip_dbragg()
self._strip_dmisc()
def _to_dict(self):
dout = {'dgeom':{'dict':self._dgeom, 'lexcept':None},
'dmat':{'dict':self._dmat, 'lexcept':None},
'dbragg':{'dict':self._dbragg, 'lexcept':None},
'dmisc':{'dict':self._dmisc, 'lexcept':None},
'dplot':{'dict':self._dplot, 'lexcept':None}}
return dout
def _from_dict(self, fd):
self._dgeom.update(**fd.get('dgeom', {}))
self._dmat.update(**fd.get('dmat', {}))
self._dbragg.update(**fd.get('dbragg', {}))
self._dmisc.update(**fd.get('dmisc', {}))
self._dplot.update(**fd.get('dplot', {}))
@property
def Type(self):
return self._Id.Type
@property
def dgeom(self):
return self._dgeom
@property
def dmat(self):
return self._dmat
@property
def dbragg(self):
return self._dbragg
@property
def dmisc(self):
return self._dmisc
@property
def summit(self):
return self._dgeom['summit']
@property
def center(self):
return self._dgeom['center']
@property
def ismobile(self):
return self._dgeom['move'] not in [None, False]
@property
def rockingcurve(self):
if self._dbragg.get('rockingcurve') is not None:
if self._dbragg['rockingcurve'].get('type') is not None:
return self._dbragg['rockingcurve']
raise Exception("rockingcurve was not set!")
def get_unit_vectors(self, use_non_parallelism=None):
if use_non_parallelism is None:
use_non_parallelism = _USE_NON_PARALLELISM
if use_non_parallelism is True:
nout = self._dmat['nout']
e1 = self._dmat['e1']
e2 = self._dmat['e2']
else:
nout = self._dgeom['nout']
e1 = self._dgeom['e1']
e2 = self._dgeom['e2']
return nout, e1, e2, use_non_parallelism
def set_color(self, col):
self._set_color(col)
def get_color(self):
return self._dmisc['color']
def get_summary(self, sep=' ', line='-', just='l',
table_sep=None, verb=True, return_=False):
col0 = [
'formula', 'symmetry', 'cut', 'density',
'd (A)',
'bragg({:9.6} A) (deg)'.format(self._dbragg['lambref']*1e10),
'Type', 'outline', 'surface (cm²)', 'rcurve', 'rocking curve',
]
ar0 = [self._dmat['formula'], self._dmat['symmetry'],
str(self._dmat['cut']), str(self._dmat['density']),
'{0:5.3f}'.format(self._dmat['d']*1.e10),
str(self._dbragg['braggref']*180./np.pi),
self._dgeom['Type'], self._dgeom['Typeoutline'],
'{0:5.1f}'.format(self._dgeom['surface']*1.e4),
'{0:6.3f}'.format(self._dgeom['rcurve'])]
try:
ar0.append(self.rockingcurve['type'])
except Exception as err:
ar0.append('None')
col1 = ['half-extent', 'summit', 'center', 'nout', 'e1',
'alpha', 'beta']
ar1 = [
str(np.round(self._dgeom['extenthalf'], decimals=3)),
str(np.round(self._dgeom['summit'], decimals=2)),
str(np.round(self._dgeom['center'], decimals=2)),
str(np.round(self._dmat['nout'], decimals=3)),
str(np.round(self._dmat['e1'], decimals=3)),
str(np.round(self._dmat['alpha'], decimals=6)),
str(np.round(self._dmat['beta'], decimals=6)),
]
if self._dgeom.get('move') not in [None, False]:
col1 += ['move', 'param']
ar1 += [self._dgeom['move'],
str(np.round(self._dgeom['move_param'], decimals=5))]
if self._dmisc.get('color') is not None:
col1.append('color')
ar1.append(str(self._dmisc['color']))
lcol = [col0, col1]
lar = [ar0, ar1]
return self._get_summary(lar, lcol,
sep=sep, line=line, table_sep=table_sep,
verb=verb, return_=return_)
def _update_or_copy(self, dgeom, pinhole=None,
return_copy=None,
name=None, diag=None, shot=None):
if return_copy is None:
return_copy = _RETURN_COPY
for kk, vv in self._dgeom.items():
if kk not in dgeom.keys():
dgeom[kk] = vv
if return_copy is True:
if name is None:
name = self.Id.Name + 'copy'
if diag is None:
diag = self.Id.Diag
if shot is None:
diag = self.Id.shot
return self.__class__(dgeom=dgeom,
dbragg=self._dbragg,
dmat=self._dmat,
color=self._dmisc['color'],
Exp=self.Id.Exp,
Diag=diag,
Name=name,
shot=shot,
SavePath=self.Id.SavePath)
else:
dgeom0 = self.dgeom
try:
self.set_dgeom(dgeom=dgeom)
self._dmat = _check_optics._checkformat_dmat(
dmat={
k0: v0 for k0, v0 in self._dmat.items()
if k0 not in ['nin', 'nout', 'e1', 'e2']
},
dgeom=self._dgeom,
ddef=self._ddef['dmat'],
valid_keys=self._get_keys_dmat()
)
except Exception as err:
self.set_dgeom(dgeom=dgeom0)
msg = (str(err)
+ "\nAn exception occured during updating\n"
+ " => instance unmoved")
raise Exception(msg)
def _rotate_or_translate(self, func, **kwdargs):
pts = np.array([self._dgeom['summit'], self._dgeom['center']]).T
if 'rotate' in func.__name__:
vect = np.array([
self._dgeom['nout'],
self._dgeom['e1'],
self._dgeom['e2']
]).T
pts, vect = func(pts=pts, vect=vect, **kwdargs)
return {'summit': pts[:, 0], 'center': pts[:, 1],
'nout': vect[:, 0], 'nin': -vect[:, 0],
'e1': vect[:, 1], 'e2': vect[:, 2]}
else:
pts = func(pts=pts, **kwdargs)
return {'summit': pts[:, 0], 'center': pts[:, 1]}
def translate_in_cross_section(self, distance=None, direction_rz=None,
phi=None,
return_copy=None,
diag=None, name=None, shot=None):
if phi is None:
phi = np.arctan2(*self.summit[1::-1])
msg = ("Poloidal plane was not explicitely specified\n"
+ " => phi set to self.summit's phi ({})".format(phi))
warnings.warn(msg)
dgeom = self._rotate_or_translate(
self._translate_pts_poloidal_plane,
phi=phi, direction_rz=direction_rz, distance=distance)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def translate_3d(self, distance=None, direction=None,
return_copy=None,
diag=None, name=None, shot=None):
dgeom = self._rotate_or_translate(
self._translate_pts_3d,
direction=direction, distance=distance)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_in_cross_section(self, angle=None, axis_rz=None,
phi=None,
return_copy=None,
diag=None, name=None, shot=None):
if phi is None:
phi = np.arctan2(*self.summit[1::-1])
msg = ("Poloidal plane was not explicitely specified\n"
+ " => phi set to self.summit's phi ({})".format(phi))
warnings.warn(msg)
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_in_poloidal_plane,
axis_rz=axis_rz, angle=angle, phi=phi)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_around_torusaxis(self, angle=None,
return_copy=None,
diag=None, name=None, shot=None):
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_around_torusaxis,
angle=angle)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def rotate_around_3daxis(self, angle=None, axis=None,
return_copy=None,
diag=None, name=None, shot=None):
dgeom = self._rotate_or_translate(
self._rotate_pts_vectors_around_3daxis,
axis=axis, angle=angle)
return self._update_or_copy(dgeom,
return_copy=return_copy,
diag=diag, name=name, shot=shot)
def set_move(self, move=None, param=None, **kwdargs):
move, param, kwdargs = self._checkformat_set_move(move, param, kwdargs)
self._dgeom['move'] = move
self._dgeom['move_param'] = param
if isinstance(kwdargs, dict) and len(kwdargs) == 0:
kwdargs = None
self._dgeom['move_kwdargs'] = kwdargs
def move(self, param):
param = self._move(param, dictname='_dgeom')
self._dgeom['move_param'] = param
def get_rockingcurve_func(self, lamb=None, n=None):
drock = self.rockingcurve
if drock['type'] == 'tabulated-1d':
if lamb is not None and lamb != drock['lamb']:
msg = ("rocking curve was tabulated only for:\n"
+ "\tlamb = {} m\n".format(lamb)
+ " => Please let lamb=None")
raise Exception(msg)
lamb = drock['lamb']
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
func = scpinterp.interp1d(drock['dangle'] + bragg, drock['value'],
kind='linear', bounds_error=False,
fill_value=0, assume_sorted=True)
elif drock['type'] == 'tabulated-2d':
lmin, lmax = drock['lamb'].min(), drock['lamb'].max()
if lamb is None:
lamb = drock['lamb']
if lamb < lmin or lamb > lmax:
msg = ("rocking curve was tabulated only in interval:\n"
+ "\tlamb in [{}; {}] m\n".format(lmin, lmax)
+ " => Please set lamb accordingly")
raise Exception(msg)
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
def func(angle, lamb=lamb, bragg=bragg, drock=drock):
return scpinterp.interp2d(drock['dangle']+bragg, drock['lamb'],
drock['value'], kind='linear',
bounds_error=False, fill_value=0,
assume_sorted=True)(angle, lamb)
else:
raise NotImplementedError
def func(angle, d=d, delta_bragg=delta_bragg,
Rmax=drock['Rmax'], sigma=drock['sigma']):
core = sigma**2/((angle - (bragg+delta_bragg))**2 + sigma**2)
if Rmax is None:
return core/(sigma*np.pi)
else:
return Rmax*core
return func, lamb, bragg
def plot_rockingcurve(self, lamb=None, n=None, sigma=None,
npts=None, color=None, ang_units=None,
dmargin=None, fs=None, ax=None, legend=None):
drock = self.rockingcurve
func, lamb, bragg = self.get_rockingcurve_func(lamb=lamb, n=n)
axtit = 'Rocking curve for ' + self.Id.Name
return _plot_optics.CrystalBragg_plot_rockingcurve(
func=func, bragg=bragg, lamb=lamb,
sigma=sigma, npts=npts,
ang_units=ang_units, axtit=axtit, color=color,
fs=fs, ax=ax, legend=legend)
def compute_rockingcurve(
self, ih=None, ik=None, il=None, lamb=None,
use_non_parallelism=None, na=None,
alpha_limits=None,
therm_exp=None, plot_therm_exp=None,
plot_asf=None, plot_power_ratio=None,
plot_asymmetry=None, plot_cmaps=None,
verb=None, returnas=None,
):
return _rockingcurve.compute_rockingcurve(
ih=ih, ik=ik, il=il, lamb=lamb,
use_non_parallelism=use_non_parallelism, na=na,
alpha_limits=alpha_limits,
therm_exp=therm_exp, plot_therm_exp=plot_therm_exp,
plot_asf=plot_asf, plot_power_ratio=plot_power_ratio,
plot_asymmetry=plot_asymmetry, plot_cmaps=plot_cmaps,
verb=None, returnas=None,
)
def plot_var_temp_changes_wavelengths(
self, ih=None, ik=None, il=None, lambdas=None,
use_non_parallelism=None, na=None,
alpha_limits=None,
therm_exp=None, plot_therm_exp=None,
plot_asf=None, plot_power_ratio=None,
plot_asymmetry=None, plot_cmaps=None,
quantity=None,
curv_radius=None, pixel_size=None,
):
return _rockingcurve.plot_var_temp_changes_wavelengths(
ih=ih, ik=ik, il=il, lambdas=lambdas,
use_non_parallelism=use_non_parallelism, na=na,
alpha_limits=alpha_limits,
therm_exp=therm_exp, plot_therm_exp=plot_therm_exp,
plot_asf=plot_asf, plot_power_ratio=plot_power_ratio,
plot_asymmetry=plot_asymmetry, plot_cmaps=plot_cmaps,
quantity=quantity,
curv_radius=curv_radius, pixel_size=pixel_size,
)
def sample_outline_plot(self, use_non_parallelism=None, res=None):
if self._dgeom['Type'] == 'sph':
if self._dgeom['Typeoutline'] == 'rect':
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
outline = _comp_optics.CrystBragg_sample_outline_plot_sphrect(
self._dgeom['summit'] - nout*self._dgeom['rcurve'],
nout,
e1,
e2,
self._dgeom['rcurve'],
self._dgeom['extenthalf'],
res,
)
else:
raise NotImplementedError
else:
raise NotImplementedError
return outline
def _checkformat_bragglamb(self, bragg=None, lamb=None, n=None):
lc = [lamb is not None, bragg is not None]
if not any(lc):
lamb = self._dbragg['lambref']
lc[0] = True
assert np.sum(lc) == 1, "Provide lamb xor bragg!"
if lc[0]:
bragg = self.get_bragg_from_lamb(
np.atleast_1d(lamb), n=n,
)
else:
bragg = np.atleast_1d(bragg)
return bragg
def _checkformat_get_Rays_from(self, phi=None, bragg=None):
assert phi is not None
assert bragg is not None
bragg = np.atleast_1d(bragg)
phi = np.atleast_1d(phi)
nrays = max(phi.size, bragg.size)
if not phi.shape == bragg.shape:
if phi.size == 1:
phi = np.full(bragg.shape, phi[0])
elif bragg.size == 1:
bragg = np.full(phi.shape, bragg[0])
else:
msg = "phi and bragg/lamb must have the same shape!\n"
msg += " phi.shape: %s\n"%str(phi.shape)
msg += " bragg/lamb.shape: %s\n"%str(bragg.shape)
raise Exception(msg)
return phi, bragg
def _get_rays_from_cryst(
self,
phi=None, bragg=None,
lamb=None, n=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
use_non_parallelism=None,
include_summit=None,
grid=None,
):
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb)
phi, bragg = self._checkformat_get_Rays_from(phi=phi, bragg=bragg)
pts_start, nout, e1, e2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
)
nin = -nout
if grid is True:
nin = nin[..., None]
e1 = e1[..., None]
e2 = e2[..., None]
else:
assert bragg.shape == nin.shape[1:]
vect = (
np.sin(bragg)*nin
+ np.cos(bragg)*(np.cos(phi)*e1 + np.sin(phi)*e2)
)
return pts_start, vect
def get_rays_from_cryst(
self,
phi=None, bragg=None,
lamb=None, n=None,
dtheta=None, psi=None,
use_non_parallelism=None,
ntheta=None, npsi=None,
include_summit=None,
det=None, config=None, length=None,
returnas=None,
return_xixj=None,
grid=None,
):
if returnas is None:
returnas = 'pts'
if return_xixj is None:
return_xixj = False
lret = ['(pts, vect, length)', '(pts, vect)', 'pts']
if returnas not in lret:
msg = (
"Arg returnas must be in:\n"
+ "\t- '(pts, vect, length)': starting points, unit vector,"
+ " length\n"
+ "\t- 'pts': starting and ending points\n"
)
raise Exception(msg)
det = self._checkformat_det(det)
if length is None:
length = 10.
if grid is None:
try:
grid = bragg.shape != dtheta.shape
except Exception as err:
grid = True
pts_start, vect = self._get_rays_from_cryst(
phi=phi, bragg=bragg,
lamb=lamb, n=n,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
grid=grid,
)
if returnas == '(pts, vect)':
return pts_start, vect
vshape = vect.shape
dk = {
k0: np.full(vshape[1:], np.nan)
for k0 in ['config', 'det', 'length']
}
xi, xj = None, None
if config is not None:
if vshape != pts_start.shape:
if len(vshape) == 3 and len(pts_start.shape) == 2:
D = np.reshape(
np.repeat(pts_start[..., None], vshape[-1], axis=-1),
(3, -1),
)
u = vect.reshape((3, -1))
else:
msg = (
"Not treated case!\n"
f"\t- pts_start.shape: {pts_start.shape}\n"
f"\t- vect.shape: {vshape}\n"
)
raise Exception(msg)
else:
if len(vshape) > 2:
D = pts_start.reshape((3, -1))
u = vect.reshape((3, -1))
else:
D = pts_start
u = vect
rays = _core.Rays(
dgeom=(D, u),
config=config,
strict=False,
Name='dummy',
Diag='dummy',
Exp='dummy',
)
if u.shape != vshape:
kout = rays.dgeom['kOut'].reshape(vshape[1:])
else:
kout = rays.dgeom['kOut']
dk['config'] = kout
if det is not None and det is not False:
shape = tuple([3] + [1 for ii in range(vect.ndim-1)])
cent = det['cent'].reshape(shape)
nout = det['nout'].reshape(shape)
if grid is True:
k = (
np.sum((cent-pts_start[..., None])*nout, axis=0)
/ np.sum(vect*nout, axis=0)
)
else:
k = (
np.sum((cent-pts_start)*nout, axis=0)
/ np.sum(vect*nout, axis=0)
)
dk['det'][k >= 0.] = k[k >= 0.]
if return_xixj is True:
if grid:
pts_end = pts_start[..., None] + dk['det'][None, ...]*vect
else:
pts_end = pts_start + dk['det'][None, ...]*vect
ei = det['ei'].reshape(shape)
ej = det['ej'].reshape(shape)
xi = np.sum((pts_end - cent)*ei, axis=0)
xj = np.sum((pts_end - cent)*ej, axis=0)
if length is not None:
dk['length'][:] = length
k = np.nanmin([vv for vv in dk.values() if vv is not None], axis=0)
if returnas == 'pts':
if grid:
pts_end = pts_start[..., None] + k[None, ...]*vect
if return_xixj:
return pts_start, pts_end, xi, xj
else:
return pts_start, pts_end
else:
pts_end = pts_start + k[None, ...]*vect
if return_xixj:
return pts_start, pts_end, xi, xj
else:
return pts_start, pts_end
elif returnas == '(pts, vect, length)':
if return_xixj:
return pts_start, vect, k, xi, xj
else:
return pts_start, vect, k
def split(self, direction=None, nb=None):
if direction is None:
direction = 'e1'
if direction not in ['e1', 'e2']:
msg = (
"Arg direction must be either:\n"
"\t- 'e1': split along vector 'e1' (~horizontally)\n"
"\t- 'e2': split along vector 'e2' (~vertically)\n"
f"You provided: {direction}"
)
raise Exception(msg)
if nb is None:
nb = 2
if not (isinstance(nb, int) and nb > 1):
msg = (
"Arg nb must be a int > 1 !\n"
"It specifies the number of equal parts desired\n"
f"You provided: {nb}"
)
raise Exception(msg)
edges = np.linspace(-1, 1, nb+1)
mid = 0.5*(edges[1:] + edges[:-1])[None, :]
if direction == 'e2':
dtheta = mid*self._dgeom['extenthalf'][1]
psi = np.zeros((1, nb), dtype=float)
extenthalf = [
self._dgeom['extenthalf'][0],
self._dgeom['extenthalf'][1]/nb,
]
else:
dtheta = np.zeros((1, nb), dtype=float)
psi = mid*self._dgeom['extenthalf'][0]
extenthalf = [
self._dgeom['extenthalf'][0]/nb,
self._dgeom['extenthalf'][1],
]
nouts = (
np.cos(dtheta)*(
self._dgeom['nout'][:, None]*np.cos(psi)
+ self._dgeom['e1'][:, None]*np.sin(psi)
)
+ np.sin(dtheta)*self._dgeom['e2'][:, None]
)
e1s = (
-self._dgeom['nout'][:, None]*np.sin(psi)
+ self._dgeom['e1'][:, None]*np.cos(psi)
)
e2s = np.array([
nouts[1, :]*e1s[2, :] - nouts[2, :]*e1s[1, :],
nouts[2, :]*e1s[0, :] - nouts[0, :]*e1s[2, :],
nouts[0, :]*e1s[1, :] - nouts[1, :]*e1s[0, :],
])
lobj = [
self.__class__(
dgeom={
'rcurve': self._dgeom['rcurve'],
'center': self._dgeom['center'],
'nout': nouts[:, ii],
'e1': e1s[:, ii],
'e2': e2s[:, ii],
'extenthalf': extenthalf,
},
dmat={
k0: v0 for k0, v0 in self._dmat.items()
if k0 not in ['nin', 'nout', 'e1', 'e2']
},
dbragg=dict(self._dbragg),
Name=f"{self.Id.Name}{ii}",
Exp=self.Id.Exp,
)
for ii in range(nb)
]
return lobj
def plot(
self, dcryst=None,
phi=None, bragg=None, lamb=None, pts=None,
n=None, config=None, det=None, length=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
include_summit=None,
dax=None, proj=None, res=None, element=None,
color=None, ddet=None,
dleg=None, draw=True, dmargin=None,
use_non_parallelism=None, grid=None,
rays_npts=None, rays_color=None,
fs=None, wintit=None, tit=None,
):
if det is None:
det = False
det = self._checkformat_det(det)
lc = [
dtheta is not None or psi is not None or phi is not None,
pts is not None
]
if np.sum(lc) == 2:
msg = (
"For ray tracing, please provide either:\n"
+ "\t- dtheta, psi, phi, lamb/bragg\n"
+ "\t- pts, lamb/bragg\n"
)
raise Exception(msg)
if lc[0]:
pts_summit, pts1 = self.get_rays_from_cryst(
phi=phi, lamb=lamb, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=False,
grid=grid,
)
pts2, xi, xj = self.get_rays_from_cryst(
phi=phi+np.pi, lamb=lamb, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=True,
grid=grid,
)[1:]
elif lc[1]:
c0 = (
isinstance(pts, np.ndarray)
and pts.ndim == 2
and pts.shape[0] == 3
)
if not c0:
msg = ("Arg pts must be a (3, npts) np.array!")
raise Exception(msg)
dtheta, psi, phi, bragg, _, _ = self.calc_raytracing_from_lambpts(
pts=pts,
lamb=lamb,
ndtheta=ntheta,
)
pts_summit, pts2, xi, xj = self.get_rays_from_cryst(
phi=phi+np.pi, lamb=None, bragg=bragg,
n=n, use_non_parallelism=use_non_parallelism,
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
config=config, det=det,
returnas='pts', return_xixj=True,
grid=grid,
)
pts1 = np.repeat(
np.repeat(
np.repeat(
pts[:, None, :], dtheta.shape[0], axis=1,
)[..., None],
dtheta.shape[2],
axis=-1,
)[..., None],
2,
axis=-1,
)
else:
pts_summit, pts1, pts2, xi, xj = None, None, None, None, None
return _plot_optics.CrystalBragg_plot(
cryst=self, dcryst=dcryst,
det=det, ddet=ddet,
dax=dax, proj=proj, res=res, element=element,
color=color,
pts_summit=pts_summit, pts1=pts1, pts2=pts2,
xi=xi, xj=xj,
rays_color=rays_color, rays_npts=rays_npts,
dleg=dleg, draw=draw, fs=fs, dmargin=dmargin,
use_non_parallelism=use_non_parallelism,
wintit=wintit, tit=tit,
)
def get_phi_from_magaxis_summit(
self,
axis_r,
axis_z,
axis_npts=None,
lamb=None,
lamb_tol=None,
bragg=None,
n=None,
use_non_parallelism=None,
):
if axis_npts is None:
axis_npts = 1000
axis_r = np.atleast_1d(axis_r)
axis_z = np.atleast_1d(axis_z)
assert axis_r.shape == axis_z.shape
if lamb_tol is None:
lamb_tol = 0.01e-10
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
shaperz = axis_r.shape
phi_ax = np.full(shaperz, np.nan)
theta_cryst = np.arctan2(
self._dgeom['summit'][1],
self._dgeom['summit'][0],
)
theta_ax = theta_cryst + np.pi/2*np.linspace(-1, 1, axis_npts)
shapetheta = np.r_[[1 for ii in shaperz], axis_npts]
theta_ax = theta_ax.reshape(shapetheta)
axis_x = (axis_r[..., None] * np.cos(theta_ax)).ravel()
axis_y = (axis_r[..., None] * np.sin(theta_ax)).ravel()
axis_z = (np.repeat(axis_z[..., None], axis_npts, axis=-1)).ravel()
(
bragg_ax_full, phi_ax_full, lamb_ax_full,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=np.array([axis_x, axis_y, axis_z]),
dtheta=None, psi=None,
ntheta=None, npsi=None,
n=None,
use_non_parallelism=use_non_parallelism,
grid=None,
return_lamb=True,
)
shape_full = tuple(np.r_[shaperz, axis_npts])
lamb_ax_full = lamb_ax_full.reshape(shape_full)
phi_ax_full = phi_ax_full.reshape(shape_full)
dlamb = np.abs(lamb_ax_full - lamb)
indok = np.any(dlamb <= lamb_tol, axis=-1)
indmin = np.nanargmin(dlamb[indok, :], axis=-1)
indtup = tuple([iii for iii in indok.nonzero()] + [indmin])
phi_ax[indok] = phi_ax_full[indtup]
return phi_ax
def get_bragg_from_lamb(self, lamb=None, n=None):
if self._dmat['d'] is None:
msg = "Interplane distance d no set !\n"
msg += " => self.set_dmat({'d':...})"
raise Exception(msg)
if lamb is None:
lamb = self._dbragg['lambref']
return _comp_optics.get_bragg_from_lamb(
np.atleast_1d(lamb), self._dmat['d'], n=n,
)
def get_lamb_from_bragg(self, bragg=None, n=None):
if self._dmat['d'] is None:
msg = "Interplane distance d no set !\n"
msg += " => self.set_dmat({'d':...})"
raise Exception(msg)
if bragg is None:
bragg = self._dbragg['braggref']
return _comp_optics.get_lamb_from_bragg(np.atleast_1d(bragg),
self._dmat['d'], n=n)
def update_non_parallelism(self, alpha=None, beta=None):
if alpha is None:
alpha = 0
if beta is None:
beta = 0
(self._dmat['nin'], self._dmat['nout'], self._dmat['e1'],
self._dmat['e2']) = _comp_optics.get_vectors_from_angles(
alpha, beta,
self._dgeom['nout'], self._dgeom['e1'],
self._dgeom['e2'],
)
self._dmat['alpha'], self._dmat['beta'] = alpha, beta
def calc_meridional_sagital_focus(
self,
rcurve=None,
bragg=None,
alpha=None,
use_non_parallelism=None,
verb=None,
):
if rcurve is None:
rcurve = self._dgeom['rcurve']
if bragg is None:
bragg = self._dbragg['braggref']
if use_non_parallelism is True:
alpha = self._dmat['alpha']
if use_non_parallelism is False:
alpha = 0.0
return _comp_optics.calc_meridional_sagital_focus(
rcurve=rcurve,
bragg=bragg,
alpha=alpha,
use_non_parallelism=use_non_parallelism,
verb=verb,
)
def get_rowland_dist_from_lambbragg(self, bragg=None, lamb=None, n=None):
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
if np.all(np.isnan(bragg)):
msg = ("There is no available bragg angle!\n"
+ " => Check the vlue of self.dmat['d'] vs lamb")
raise Exception(msg)
return _comp_optics.get_rowland_dist_from_bragg(
bragg=bragg, rcurve=self._dgeom['rcurve'],
)
def get_detector_ideal(
self,
bragg=None, lamb=None,
rcurve=None, n=None,
ddist=None, di=None, dj=None,
dtheta=None, dpsi=None, tilt=None,
lamb0=None, lamb1=None, dist01=None,
use_non_parallelism=None,
tangent_to_rowland=None, plot=False,
):
if rcurve is None:
rcurve = self._dgeom['rcurve']
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
if np.all(np.isnan(bragg)):
msg = ("There is no available bragg angle!\n"
+ " => Check the vlue of self.dmat['d'] vs lamb")
raise Exception(msg)
lc = [lamb0 is not None, lamb1 is not None, dist01 is not None]
if any(lc) and not all(lc):
msg = (
"Arg lamb0, lamb1 and dist01 must be provided together:\n"
+ "\t- lamb0: line0 wavelength ({})\n".format(lamb0)
+ "\t- lamb1: line1 wavelength ({})\n".format(lamb1)
+ "\t- dist01: distance (m) on detector between lines "
+ "({})".format(dist01)
)
raise Exception(msg)
bragg01 = None
if all(lc):
bragg01 = self._checkformat_bragglamb(
lamb=np.r_[lamb0, lamb1], n=n,
)
lc = [rcurve is None, self._dgeom['summit'] is None]
if any(lc):
msg = (
"Some missing fields in dgeom for computation:"
+ "\n\t-" + "\n\t-".join(['rcurve'] + 'summit')
)
raise Exception(msg)
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
lc = [cc is None for cc in [nout, e1, e2]]
if any(lc):
msg = (
"""
Field 'nout', 'e1', 'e2' missing!
"""
)
raise Exception(msg)
(det_dist, n_crystdet_rel,
det_nout_rel, det_ei_rel) = _comp_optics.get_approx_detector_rel(
rcurve, bragg,
bragg01=bragg01, dist01=dist01,
tangent_to_rowland=tangent_to_rowland)
det_cent, det_nout, det_ei, det_ej = _comp_optics.get_det_abs_from_rel(
det_dist, n_crystdet_rel, det_nout_rel, det_ei_rel,
self._dgeom['summit'], nout, e1, e2,
ddist=ddist, di=di, dj=dj,
dtheta=dtheta, dpsi=dpsi, tilt=tilt)
if plot:
dax = self.plot()
p0 = np.repeat(det_cent[:,None], 3, axis=1)
vv = np.vstack((det_nout, det_ei, det_ej)).T
dax['cross'].plot(np.hypot(det_cent[0], det_cent[1]),
det_cent[2], 'xb')
dax['hor'].plot(det_cent[0], det_cent[1], 'xb')
dax['cross'].quiver(np.hypot(p0[0, :], p0[1, :]), p0[2, :],
np.hypot(vv[0, :], vv[1, :]), vv[2, :],
units='xy', color='b')
dax['hor'].quiver(p0[0, :], p0[1, :], vv[0, :], vv[1, :],
units='xy', color='b')
return {'cent': det_cent, 'nout': det_nout,
'ei': det_ei, 'ej': det_ej}
def _checkformat_det(self, det=None):
lc = [det is None, det is False, isinstance(det, dict)]
msg = ("det must be:\n"
+ "\t- False: not det provided\n"
+ "\t- None: use default approx det from:\n"
+ "\t self.get_detector_ideal()\n"
+ "\t- dict: a dictionary of 3d (x,y,z) coordinates of a point"
+ " (local frame center) and 3 unit vectors forming a direct "
+ "orthonormal basis attached to the detector's frame\n"
+ "\t\t\t\t- 'cent': detector center\n"
+ "\t\t\t\t- 'nout': unit vector perpendicular to surface, "
+ "in direction of the crystal\n"
+ "\t\t\t\t- 'ei': unit vector, first coordinate on surface\n"
+ "\t\t\t\t- 'ej': unit vector, second coordinate on surfacei\n"
+ " You provided: {}".format(det))
if not any(lc):
raise Exception(msg)
if lc[0]:
det = self.get_detector_ideal(lamb=self._dbragg['lambref'])
elif lc[2]:
lk = ['cent', 'nout', 'ei', 'ej']
c0 = (isinstance(det, dict)
and all([(kk in det.keys()
and hasattr(det[kk], '__iter__')
and np.atleast_1d(det[kk]).size == 3
and not np.any(np.isnan(det[kk])))
for kk in lk]))
if not c0:
raise Exception(msg)
for k0 in lk:
det[k0] = np.atleast_1d(det[k0]).ravel()
return det
def get_local_noute1e2(
self,
dtheta=None, psi=None,
ntheta=None, npsi=None,
use_non_parallelism=None,
include_summit=None,
):
# Get local basis at crystal summit
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
nin = -nout
# Get vectors at any points from psi & dtheta
vout, ve1, ve2 = _comp_optics.CrystBragg_get_noute1e2_from_psitheta(
nout, e1, e2,
psi=psi, dtheta=dtheta,
e1e2=True, sameshape=False,
extenthalf_psi=self._dgeom['extenthalf'][0],
extenthalf_dtheta=self._dgeom['extenthalf'][1],
ntheta=ntheta, npsi=npsi,
include_summit=include_summit,
)
vin = -vout
# cent no longer dgeom['center'] because no longer a fixed point
cent = self._dgeom['summit'] + self._dgeom['rcurve']*nin
reshape = np.r_[3, [1 for ii in range(vout.ndim - 1)]]
cent = cent.reshape(reshape)
# Redefining summit according to nout at each point at crystal
summ = cent + self._dgeom['rcurve']*vout
return summ, vout, ve1, ve2
def calc_xixj_from_braggphi(
self,
phi=None,
bragg=None,
lamb=None,
n=None,
dtheta=None,
psi=None,
det=None,
use_non_parallelism=None,
strict=None,
return_strict=None,
data=None,
plot=True,
dax=None,
):
if return_strict is None:
return_strict = False
# Check / format inputs
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
phi = np.atleast_1d(phi)
# Check / get det
det = self._checkformat_det(det)
# Get local summit nout, e1, e2 if non-centered
if dtheta is None:
dtheta = 0.
if psi is None:
psi = 0.
# Probably to update with use_non_parallelism?
# Get back summit & vectors at any point at the crystal surface,
# according to parallelism properties
summit, nout, e1, e2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
ntheta=None, npsi=None,
include_summit=False,
)
# Compute
xi, xj, strict = _comp_optics.calc_xixj_from_braggphi(
det_cent=det['cent'],
det_nout=det['nout'], det_ei=det['ei'], det_ej=det['ej'],
det_outline=det.get('outline'),
summit=summit, nout=nout, e1=e1, e2=e2,
bragg=bragg, phi=phi, strict=strict,
)
if plot:
dax = _plot_optics.CrystalBragg_plot_approx_detector_params(
bragg, xi, xj, data, dax,
)
if return_strict is True:
return xi, xj, strict
else:
return xi, xj
def plot_line_on_det_tracing(
self, lamb=None, n=None,
nphi=None,
det=None, johann=None,
use_non_parallelism=None,
lpsi=None, ldtheta=None,
strict=None,
ax=None, dleg=None,
rocking=None, fs=None, dmargin=None,
wintit=None, tit=None,
):
# Check / format inputs
if lamb is None:
lamb = self._dbragg['lambref']
lamb = np.atleast_1d(lamb).ravel()
nlamb = lamb.size
if johann is None:
johann = lpsi is not None or ldtheta is not None
if rocking is None:
rocking = False
if det is None or det.get('outline') is None:
msg = ("Please provide det as a dict with 'outline'!")
raise Exception(msg)
# Get local basis
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism,
)
nin = -nout
# Compute lamb / phi
_, phi = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=det['outline'][0, :], xj=det['outline'][1, :], det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=False,
)
phimin, phimax = np.nanmin(phi), np.nanmax(phi)
phimin, phimax = phimin-(phimax-phimin)/10, phimax+(phimax-phimin)/10
# Get reference ray-tracing
bragg = self._checkformat_bragglamb(lamb=lamb, n=n)
if nphi is None:
nphi = 100
phi = np.linspace(phimin, phimax, nphi)
xi = np.full((nlamb, nphi), np.nan)
xj = np.full((nlamb, nphi), np.nan)
for ll in range(nlamb):
xi[ll, :], xj[ll, :] = self.calc_xixj_from_braggphi(
bragg=np.full(phi.shape, bragg[ll]),
phi=phi,
dtheta=0.,
psi=0.,
n=n,
det=det,
use_non_parallelism=use_non_parallelism,
strict=strict,
plot=False,
)
# Get johann-error raytracing (multiple positions on crystal)
xi_er, xj_er = None, None
if johann and not rocking:
if lpsi is None:
lpsi = np.linspace(-1., 1., 15)
if ldtheta is None:
ldtheta = np.linspace(-1., 1., 15)
lpsi, ldtheta = np.meshgrid(lpsi, ldtheta)
lpsi = lpsi.ravel()
ldtheta = ldtheta.ravel()
lpsi = self._dgeom['extenthalf'][0]*np.r_[lpsi]
ldtheta = self._dgeom['extenthalf'][1]*np.r_[ldtheta]
npsi = lpsi.size
assert npsi == ldtheta.size
xi_er = np.full((nlamb, npsi*nphi), np.nan)
xj_er = np.full((nlamb, npsi*nphi), np.nan)
for l in range(nlamb):
for ii in range(npsi):
i0 = np.arange(ii*nphi, (ii+1)*nphi)
xi_er[l, i0], xj_er[l, i0] = self.calc_xixj_from_braggphi(
phi=phi, bragg=bragg[l], lamb=None, n=n,
dtheta=ldtheta[ii], psi=lpsi[ii],
det=det, plot=False,
use_non_parallelism=use_non_parallelism,
strict=strict,
)
# Get rocking curve error
if rocking:
pass
# Plot
return _plot_optics.CrystalBragg_plot_line_tracing_on_det(
lamb, xi, xj, xi_er, xj_er,
det=det, ax=ax, dleg=dleg,
johann=johann, rocking=rocking,
fs=fs, dmargin=dmargin, wintit=wintit, tit=tit)
def calc_johannerror(
self,
xi=None, xj=None, err=None,
det=None, n=None,
lpsi=None, ldtheta=None,
lambda_interval_min=None,
lambda_interval_max=None,
use_non_parallelism=None,
plot=True, fs=None, cmap=None,
vmin=None, vmax=None, tit=None, wintit=None,
):
# Check xi, xj once before to avoid doing it twice
if err is None:
err = 'abs'
if lambda_interval_min is None:
lambda_interval_min = 3.93e-10
if lambda_interval_max is None:
lambda_interval_max = 4.00e-10
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
# Check / format inputs
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
# Only one summit was selected
bragg, phi, lamb = bragg[..., 0], phi[..., 0], lamb[..., 0]
# Check lambda interval into lamb array
c0 = (
np.min(lamb) < lambda_interval_min
and np.max(lamb) > lambda_interval_max
)
if c0:
test_lambda_interv = True
else:
test_lambda_interv = False
# Get err from multiple ldtheta, lpsi
if lpsi is None:
lpsi = np.r_[-1., 0., 1., 1., 1., 0., -1, -1]
lpsi = self._dgeom['extenthalf'][0]*np.r_[lpsi]
if ldtheta is None:
ldtheta = np.r_[-1., -1., -1., 0., 1., 1., 1., 0.]
ldtheta = self._dgeom['extenthalf'][1]*np.r_[ldtheta]
npsi = lpsi.size
assert npsi == ldtheta.size
(
braggerr, phierr, lamberr,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=ldtheta, psi=lpsi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
err_lamb = np.nanmax(np.abs(lamb[..., None] - lamberr), axis=-1)
err_phi = np.nanmax(np.abs(phi[..., None] - phierr), axis=-1)
# absolute vs relative error
if 'rel' in err:
if err == 'rel':
err_lamb = 100.*err_lamb / (np.nanmax(lamb) - np.nanmin(lamb))
err_phi = 100.*err_phi / (np.nanmax(phi) - np.nanmin(phi))
elif err == 'rel2':
err_lamb = 100.*err_lamb / np.mean(lamb)
err_phi = 100.*err_phi / np.mean(phi)
err_lamb_units = '%'
err_phi_units = '%'
else:
err_lamb_units = 'm'
err_phi_units = 'rad'
if plot is True:
ax = _plot_optics.CrystalBragg_plot_johannerror(
xi, xj, lamb, phi,
err_lamb, err_phi,
err_lamb_units=err_lamb_units,
err_phi_units=err_phi_units,
cmap=cmap, vmin=vmin, vmax=vmax,
fs=fs, tit=tit, wintit=wintit,
)
return (
err_lamb, err_phi, err_lamb_units, err_phi_units,
test_lambda_interv,
)
def plot_focal_error_summed(
self,
dist_min=None, dist_max=None,
di_min=None, di_max=None,
ndist=None, ndi=None,
lamb=None, bragg=None,
xi=None, xj=None,
err=None,
use_non_parallelism=None,
tangent_to_rowland=None, n=None,
plot=None,
pts=None,
det_ref=None, plot_dets=None, nsort=None,
dcryst=None,
lambda_interval_min=None,
lambda_interval_max=None,
contour=None,
fs=None,
ax=None,
cmap=None,
vmin=None,
vmax=None,
return_ax=None,
):
# Check / format inputs
if dist_min is None:
dist_min = -0.15
if dist_max is None:
dist_max = 0.15
if di_min is None:
di_min = -0.40
if di_max is None:
di_max = 0.40
if ndist is None:
ndist = 21
if ndi is None:
ndi = 21
if err is None:
err = 'rel'
if plot is None:
plot = True
if plot_dets is None:
plot_dets = det_ref is not None
if nsort is None:
nsort = 5
if return_ax is None:
return_ax = True
if lambda_interval_min is None:
lambda_interval_min = 3.93e-10
if lambda_interval_max is None:
lambda_interval_max = 4.00e-10
l0 = [dist_min, dist_max, ndist, di_min, di_max, ndi]
c0 = any([l00 is not None for l00 in l0])
if not c0:
msg = (
"Please give the ranges of ddist and di translations\n"
"\t to compute the different detector's position\n"
"\t Provided:\n"
"\t\t- dist_min, dist_max, ndist: ({}, {}, {})\n".format(
dist_min, dist_max, ndist,
)
+ "\t\t- di_min, di_max, ndi: ({}, {}, {})\n".format(
di_min, di_max, ndi,
)
)
raise Exception(msg)
(
ddist0, di0, dj0,
dtheta0, dpsi0, tilt0,
) = self._get_local_coordinates_of_det(
bragg=bragg,
lamb=lamb,
det_ref=det_ref,
use_non_parallelism=use_non_parallelism,
)
tor_ideal(
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=True,
)
det2 = self.get_detector_ideal(
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=False,
)
cos_angle_nout = np.sum(
det1['nout'] * det2['nout']
) / (
np.linalg.norm(det1['nout'] * np.linalg.norm(det2['nout']))
)
angle_nout = np.arccos(cos_angle_nout)
ddist = np.linspace(dist_min, dist_max, int(ndist))
di = np.linspace(di_min, di_max, int(ndi))
error_lambda = np.full((di.size, ddist.size), np.nan)
test_lamb_interv = np.zeros((di.size, ddist.size), dtype='bool')
end = '\r'
for ii in range(ddist.size):
for jj in range(di.size):
if ii == ndist-1 and jj == ndi-1:
end = '\n'
msg = (
"Computing mean focal error for det "
f"({ii+1}, {jj+1})/({ndist}, {ndi})"
).ljust(60)
print(msg, end=end, flush=True)
dpsi0bis = float(dpsi0)
if tangent_to_rowland:
dpsi0bis = dpsi0 - angle_nout
det = self.get_detector_ideal(
ddist=ddist[ii],
di=di[jj],
dj=dj0,
dtheta=dtheta0,
dpsi=dpsi0bis,
tilt=tilt0,
lamb=lamb,
bragg=bragg,
use_non_parallelism=use_non_parallelism,
tangent_to_rowland=False,
)
(
error_lambda_temp, test_lamb_interv[jj, ii],
) = self.calc_johannerror(
xi=xi, xj=xj,
det=det,
err=err,
lambda_interval_min=lambda_interval_min,
lambda_interval_max=lambda_interval_max,
plot=False,
)[::4]
error_lambda[jj, ii] = np.nanmean(error_lambda_temp)
if 'rel' in err:
units = '%'
else:
units = 'm'
if plot:
ax = _plot_optics.CrystalBragg_plot_focal_error_summed(
cryst=self, dcryst=dcryst,
lamb=lamb, bragg=bragg,
error_lambda=error_lambda,
ddist=ddist, di=di,
ddist0=ddist0, di0=di0, dj0=dj0,
dtheta0=dtheta0, dpsi0=dpsi0, tilt0=tilt0,
angle_nout=angle_nout,
det_ref=det_ref,
units=units,
plot_dets=plot_dets, nsort=nsort,
tangent_to_rowland=tangent_to_rowland,
use_non_parallelism=use_non_parallelism,
pts=pts,
test_lamb_interv=test_lamb_interv,
contour=contour,
fs=fs,
ax=ax,
cmap=cmap,
vmin=vmin,
vmax=vmax,
)
if return_ax:
return error_lambda, ddist, di, test_lamb_interv, ax
else:
return error_lambda, ddist, di, test_lamb_interv
def _get_local_coordinates_of_det(
self,
bragg=None,
lamb=None,
det_ref=None,
use_non_parallelism=None,
):
if det_ref is None:
msg = (
"You need to provide your arbitrary detector\n"
+ "\t in order to compute its spatial properties !\n"
+ "\t You provided: {}".format(det)
)
raise Exception(msg)
det_ref = self._checkformat_det(det=det_ref)
det_approx = self.get_detector_ideal(
bragg=bragg, lamb=lamb,
tangent_to_rowland=False,
use_non_parallelism=use_non_parallelism,
)
delta = det_ref['cent'] - det_approx['cent']
ddist = np.sum(delta * (-det_approx['nout']))
di = np.sum(delta * det_approx['ei'])
dj = np.sum(delta * det_approx['ej'])
dtheta, dpsi, tilt = None, None, None
sindtheta = np.sum(det_approx['ej'] * det_ref['nout'])
costheta_cospsi = np.sum(det_approx['nout'] * det_ref['nout'])
costheta_sinpsi = np.sum(det_approx['ei'] * det_ref['nout'])
costheta = np.sqrt(costheta_cospsi**2 + costheta_sinpsi**2)
dtheta = np.arctan2(sindtheta, costheta)
dpsi = np.arctan2(
costheta_sinpsi / costheta,
costheta_cospsi / costheta,
)
det_ei2 = (
np.cos(dpsi)*det_approx['ei'] - np.sin(dpsi)*det_approx['nout']
)
det_ej2 = np.cross(det_ref['nout'], det_ei2)
costilt = np.sum(det_ref['ei']*det_ei2)
sintilt = np.sum(det_ref['ei']*det_ej2)
tilt = np.arctan2(sintilt, costilt)
return ddist, di, dj, dtheta, dpsi, tilt
def get_lambbraggphi_from_ptsxixj_dthetapsi(
self,
pts=None,
xi=None, xj=None, det=None,
dtheta=None, psi=None,
ntheta=None, npsi=None,
n=None,
use_non_parallelism=None,
grid=None,
return_lamb=None,
):
if return_lamb is None:
return_lamb = True
det = self._checkformat_det(det)
summ, vout, ve1, ve2 = self.get_local_noute1e2(
dtheta=dtheta, psi=psi,
ntheta=ntheta, npsi=npsi,
use_non_parallelism=use_non_parallelism,
include_summit=True,
)
bragg, phi = _comp_optics.calc_braggphi_from_xixjpts(
pts=pts,
xi=xi, xj=xj, det=det,
summit=summ, nin=-vout, e1=ve1, e2=ve2,
grid=grid,
)
if return_lamb is True:
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
return bragg, phi, lamb
else:
return bragg, phi
def get_lamb_avail_from_pts(
self,
pts=None,
n=None, ndtheta=None,
det=None, nlamb=None, klamb=None,
use_non_parallelism=None,
strict=None,
return_phidtheta=None,
return_xixj=None,
):
if ndtheta is None:
ndtheta = 20
if nlamb is None:
nlamb = 100
assert nlamb >= 2, "nlamb must be >= 2"
if return_phidtheta is None:
return_phidtheta = True
if return_xixj is None:
return_xixj = det is not None
if det is None:
return_xixj = False
if det is None:
strict = False
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=pts,
dtheta='envelop', psi='envelop',
ntheta=None, npsi=None,
n=n, grid=True,
use_non_parallelism=use_non_parallelism,
return_lamb=True,
)
lambmin = np.nanmin(lamb, axis=1)
lambmax = np.nanmax(lamb, axis=1)
if klamb is None:
klamb = np.linspace(0, 1, nlamb)
elif not (isinstance(klamb, np.ndarray) and klamb.ndim == 1):
msg = "Please provide klamb as a 1d vector!"
raise Exception(msg)
nlamb = klamb.size
lamb = lambmin[:, None] + (lambmax-lambmin)[:, None]*klamb
return _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=lamb,
n=n,
ndtheta=ndtheta,
pts=pts,
use_non_parallelism=use_non_parallelism,
return_phidtheta=return_phidtheta,
return_xixj=return_xixj,
strict=strict,
det=det,
)
def _calc_dthetapsiphi_from_lambpts(
self,
pts=None, bragg=None, lamb=None,
n=None, ndtheta=None,
use_non_parallelism=None,
grid=None,
):
pts = _comp_optics._checkformat_pts(pts)
npts = pts.shape[1]
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
nout, e1, e2, use_non_parallelism = self.get_unit_vectors(
use_non_parallelism=use_non_parallelism
)
dtheta, psi, indok, grid = _comp_optics.calc_dthetapsiphi_from_lambpts(
pts,
bragg,
summit=self._dgeom['summit'],
rcurve=self._dgeom['rcurve'],
nout=nout, e1=e1, e2=e2,
extenthalf=self._dgeom['extenthalf'],
ndtheta=ndtheta,
grid=grid,
)
if grid is True:
bragg = np.repeat(
np.repeat(
np.repeat(bragg[:, None], npts, axis=-1)[..., None],
dtheta.shape[2],
axis=-1,
)[..., None],
2,
axis=-1,
)
pts = pts[:, None, :, None, None]
else:
bragg = np.repeat(
np.repeat(bragg[:, None], dtheta.shape[1], axis=1)[..., None],
2,
axis=-1,
)
pts = pts[..., None, None]
bragg[~indok] = np.nan
bragg2, phi = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
pts=pts,
dtheta=dtheta, psi=psi,
grid=False,
use_non_parallelism=use_non_parallelism,
return_lamb=False,
)
c0 = (
bragg2.shape == bragg.shape
and np.allclose(bragg, bragg2, equal_nan=True)
)
if not c0:
try:
plt.figure()
plt.plot(bragg, bragg2, '.')
except Exception as err:
pass
msg = (
"Inconsistency detected in bragg angle computations:\n"
+ "\t- from the points and lamb\n"
+ "\t- from the points and (dtheta, psi)\n"
+ "\nContext:\n"
+ "\t- use_non_parallelism: {}\n".format(use_non_parallelism)
+ "\t- bragg.shape = {}\n".format(bragg.shape)
+ "\t- bragg2.shape = {}\n".format(bragg2.shape)
)
raise Exception(msg)
return dtheta, psi, phi, bragg
def calc_raytracing_from_lambpts(
self,
lamb=None, bragg=None, pts=None,
xi_bounds=None, xj_bounds=None, nphi=None,
det=None, n=None, ndtheta=None,
johann=False, lpsi=None, ldtheta=None,
rocking=False, strict=None, plot=None, fs=None,
dmargin=None, wintit=None,
tit=None, proj=None,
legend=None, draw=None, returnas=None,
):
if returnas is None:
returnas = 'data'
if plot is None or plot is True:
plot = ['det', '3d']
if isinstance(plot, str):
plot = plot.split('+')
assert all([ss in ['det', '2d', '3d'] for ss in plot])
assert returnas in ['data', 'ax']
pts = _comp_optics._checkformat_pts(pts)
npts = pts.shape[1]
dtheta, psi, phi, bragg = self._calc_dthetapsiphi_from_lambpts(
pts=pts, lamb=lamb, bragg=bragg, n=n, ndtheta=ndtheta,
)
ndtheta = dtheta.shape[-1]
det = self._checkformat_det(det)
xi, xj = self.calc_xixj_from_braggphi(
bragg=bragg, phi=phi+np.pi, n=n,
dtheta=dtheta, psi=psi,
det=det, strict=strict, plot=False,
)
plot = False
if plot is not False:
ptscryst, ptsdet = None, None
if '2d' in plot or '3d' in plot:
ptscryst = self.get_local_noute1e2(dtheta, psi)[0]
ptsdet = (det['cent'][:, None, None, None]
+ xi[None, ...]*det['ei'][:, None, None, None]
+ xj[None, ...]*det['ej'][:, None, None, None])
ax = _plot_optics.CrystalBragg_plot_raytracing_from_lambpts(
xi=xi, xj=xj, lamb=lamb,
xi_bounds=xi_bounds, xj_bounds=xj_bounds,
pts=pts, ptscryst=ptscryst, ptsdet=ptsdet,
det_cent=det['cent'], det_nout=det['nout'],
det_ei=det['ei'], det_ej=det['ej'],
cryst=self, proj=plot, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, legend=legend, draw=draw)
if returnas == 'ax':
return ax
return dtheta, psi, phi, bragg, xi, xj
def _calc_spect1d_from_data2d(self, data, lamb, phi,
nlambfit=None, nphifit=None,
nxi=None, nxj=None,
spect1d=None, mask=None, vertsum1d=None):
if nlambfit is None:
nlambfit = nxi
if nphifit is None:
nphifit = nxj
return _comp_optics._calc_spect1d_from_data2d(
data, lamb, phi,
nlambfit=nlambfit,
nphifit=nphifit,
spect1d=spect1d,
mask=mask,
vertsum1d=vertsum1d,
)
def plot_data_vs_lambphi(
self,
xi=None, xj=None, data=None, mask=None,
det=None, dtheta=None, psi=None, n=None,
nlambfit=None, nphifit=None,
magaxis=None, npaxis=None,
dlines=None, spect1d='mean',
lambmin=None, lambmax=None,
xjcut=None, dxj=None,
plot=True, fs=None, tit=None, wintit=None,
cmap=None, vmin=None, vmax=None,
returnas=None,
):
assert data is not None
if returnas is None:
returnas = 'spect'
lreturn = ['ax', 'spect']
if returnas not in lreturn:
msg = ("Arg returnas must be in {}\n:".format(lreturn)
+ "\t- 'spect': return a 1d vertically averaged spectrum\n"
+ "\t- 'ax' : return a list of axes instances")
raise Exception(msg)
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
nxi = xi.size if xi is not None else np.unique(xii).size
nxj = xj.size if xj is not None else np.unique(xjj).size
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
(spect1d, lambfit, phifit,
vertsum1d, phiminmax) = self._calc_spect1d_from_data2d(
data, lamb, phi,
nlambfit=nlambfit, nphifit=nphifit, nxi=nxi, nxj=nxj,
spect1d=spect1d, mask=mask, vertsum1d=True
)
lambax, phiax = None, None
if magaxis is not None:
if npaxis is None:
npaxis = 1000
thetacryst = np.arctan2(self._dgeom['summit'][1],
self._dgeom['summit'][0])
thetaax = thetacryst + np.pi/2*np.linspace(-1, 1, npaxis)
pts = np.array([magaxis[0]*np.cos(thetaax),
magaxis[0]*np.sin(thetaax),
np.full((npaxis,), magaxis[1])])
braggax, phiax = self.calc_braggphi_from_pts(pts)
lambax = self.get_lamb_from_bragg(braggax)
phiax = np.arctan2(np.sin(phiax-np.pi), np.cos(phiax-np.pi))
ind = ((lambax >= lambfit[0]) & (lambax <= lambfit[-1])
& (phiax >= phifit[0]) & (phiax <= phifit[-1]))
lambax, phiax = lambax[ind], phiax[ind]
ind = np.argsort(lambax)
lambax, phiax = lambax[ind], phiax[ind]
lambcut, phicut, spectcut = None, None, None
if xjcut is not None:
if dxj is None:
dxj = 0.002
xjcut = np.sort(np.atleast_1d(xjcut).ravel())
xicutf = np.tile(xi, (xjcut.size, 1))
xjcutf = np.repeat(xjcut[:, None], nxi, axis=1)
(
braggcut, phicut, lambcut,
) = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xicutf, xj=xjcutf, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=1,
grid=True,
return_lamb=True,
)
indxj = [(np.abs(xj-xjc) <= dxj).nonzero()[0] for xjc in xjcut]
spectcut = np.array([np.nanmean(data[ixj, :], axis=0)
for ixj in indxj])
ax = None
if plot:
ax = _plot_optics.CrystalBragg_plot_data_vs_lambphi(
xi, xj, bragg, lamb, phi, data,
lambfit=lambfit, phifit=phifit, spect1d=spect1d,
vertsum1d=vertsum1d, lambax=lambax, phiax=phiax,
lambmin=lambmin, lambmax=lambmax, phiminmax=phiminmax,
xjcut=xjcut, lambcut=lambcut, phicut=phicut, spectcut=spectcut,
cmap=cmap, vmin=vmin, vmax=vmax, dlines=dlines,
tit=tit, wintit=wintit, fs=fs)
if returnas == 'spect':
return spect1d, lambfit
elif returnas == 'ax':
return ax
def get_plasmadomain_at_lamb(
self,
config=None,
struct=None,
domain=None,
res=None,
det=None,
xixj_lim=None,
strict=None,
bragg=None,
lamb=None,
ndtheta=None,
nlamb=None,
n=None,
use_non_parallelism=None,
plot=None,
dax=None,
plot_as=None,
lcolor=None,
return_dax=None,
):
struct = _check_optics._check_config_get_Ves(
config=config, struct=struct,
)
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
if ndtheta is None:
ndtheta = 5
if nlamb is None:
nlamb = 11
if strict is None:
strict = True
if plot is None:
plot = True
if return_dax is None:
return_dax = plot is True
(
pts, dV, ind, (resR, resZ, resPhi),
) = config.dStruct['dObj']['Ves'][struct].get_sampleV(
res=res,
domain=domain,
returnas='(R, Z, Phi)',
)
ptsXYZ = np.array([
pts[0, :]*np.cos(pts[2, :]),
pts[0, :]*np.sin(pts[2, :]),
pts[1, :],
])
lamb_access = self.get_lamb_avail_from_pts(
pts=ptsXYZ,
nlamb=2,
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=False,
)
lambok = np.zeros((lamb.size, pts.shape[1]), dtype=bool)
for ii, ll in enumerate(lamb):
lambok[ii, :] = (
(lamb_access[:, 0] <= ll) & (ll <= lamb_access[:, 1])
)
indok = np.any(lambok, axis=0)
pts = pts[:, indok]
ptsXYZ = ptsXYZ[:, indok]
lambok = lambok[:, indok]
if strict is True:
detbis = dict(det)
if xixj_lim is not None:
detbis['outline'] = np.array([
np.r_[
xixj_lim[0][0],
xixj_lim[0][1]*np.r_[1, 1],
xixj_lim[0][0],
],
np.r_[
xixj_lim[1][0]*np.r_[1, 1],
xixj_lim[1][1]*np.r_[1, 1],
],
])
detbis['outline'] = np.concatenate(
(detbis['outline'], detbis['outline'][:, 0:1]),
axis=1,
)
for kk, ll in enumerate(lamb):
lambi = _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=np.full((lambok[kk, :].sum(), 1), ll),
n=n,
ndtheta=ndtheta,
pts=ptsXYZ[:, lambok[kk, :]],
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=strict,
det=detbis,
)
lambok[kk, lambok[kk, :]] = ~np.isnan(lambi[:, 0])
if plot:
dax = _plot_optics.CrystalBragg_plot_plasma_domain_at_lamb(
cryst=self,
det=det,
xixj_lim=xixj_lim,
config=config,
lamb=lamb,
pts=pts,
reseff=[resR, resZ, resPhi],
lambok=lambok,
dax=dax,
plot_as=plot_as,
lcolor=lcolor,
)
if return_dax is True:
return pts, lambok, dax
else:
return pts, lambok
def calc_signal_from_emissivity(
self,
emis=None,
config=None,
struct=None,
domain=None,
res=None,
det=None,
xixj_lim=None,
strict=None,
bragg=None,
lamb=None,
binning=None,
ndtheta=None,
nlamb=None,
n=None,
use_non_parallelism=None,
plot=None,
vmin=None,
vmax=None,
vmin_bin=None,
vmax_bin=None,
cmap=None,
dax=None,
fs=None,
dmargin=None,
tit=None,
return_dax=None,
):
(
struct, lamb, binning,
) = _check_optics._check_calc_signal_from_emissivity(
emis=emis, config=config, struct=struct,
lamb=lamb, det=det, binning=binning,
)
bragg = self._checkformat_bragglamb(bragg=bragg, lamb=lamb, n=n)
lamb = self.get_lamb_from_bragg(bragg=bragg, n=n)
if ndtheta is None:
ndtheta = 5
if nlamb is None:
nlamb = 11
if strict is None:
strict = True
if plot is None:
plot = True
if return_dax is None:
return_dax = plot is True
(
pts, dV, ind, (resR, resZ, resPhi),
) = config.dStruct['dObj']['Ves'][struct].get_sampleV(
res=res,
domain=domain,
returnas='(R, Z, Phi)',
)
ptsXYZ = np.array([
pts[0, :]*np.cos(pts[2, :]),
pts[0, :]*np.sin(pts[2, :]),
pts[1, :],
])
lamb_access = self.get_lamb_avail_from_pts(
pts=ptsXYZ,
nlamb=2,
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=False,
strict=False,
)
lambok = np.zeros((lamb.size, pts.shape[1]), dtype=bool)
for ii, ll in enumerate(lamb):
lambok[ii, :] = (
(lamb_access[:, 0] <= ll) & (ll <= lamb_access[:, 1])
)
indok = np.any(lambok, axis=0)
pts = pts[:, indok]
ptsXYZ = ptsXYZ[:, indok]
lambok = lambok[:, indok]
detbis = dict(det)
if xixj_lim is not None:
detbis['outline'] = np.array([
np.r_[
xixj_lim[0][0],
xixj_lim[0][1]*np.r_[1, 1],
xixj_lim[0][0],
],
np.r_[
xixj_lim[1][0]*np.r_[1, 1],
xixj_lim[1][1]*np.r_[1, 1],
],
])
detbis['outline'] = np.concatenate(
(detbis['outline'], detbis['outline'][:, 0:1]),
axis=1,
)
shape = tuple(np.r_[pts.shape[1], lamb.size, ndtheta, 2])
xi = np.full(shape, np.nan)
xj = np.full(shape, np.nan)
val = np.full(shape, np.nan)
for kk, ll in enumerate(lamb):
(
lambi, xii, xji,
) = _comp_optics._get_lamb_avail_from_pts_phidtheta_xixj(
cryst=self,
lamb=np.full((lambok[kk, :].sum(), 1), ll),
n=n,
ndtheta=ndtheta,
pts=ptsXYZ[:, lambok[kk, :]],
use_non_parallelism=use_non_parallelism,
return_phidtheta=False,
return_xixj=True,
strict=True,
det=detbis,
)
iok = ~np.isnan(lambi[:, 0])
iokf = lambok[kk, :].nonzero()[0][iok]
lambok[kk, lambok[kk, :]] = iok
xi[iokf, kk, :, :] = xii[iok, 0, :, :]
xj[iokf, kk, :, :] = xji[iok, 0, :, :]
val[iokf, kk, :, :] = emis(
r=pts[0, iokf],
z=pts[1, iokf],
phi=pts[2, iokf],
lamb=lamb[kk:kk+1],
t=None,
)[:, 0, None, None]
binned = None
if binning is not False:
iok = np.isfinite(val)
binned = scpstats.binned_statistic_2d(
xi[iok].ravel(),
xj[iok].ravel(),
val[iok].ravel(),
statistic='mean',
bins=binning,
expand_binnumbers=False,
)[0]
if plot:
dax = _plot_optics.CrystalBragg_plot_signal_from_emissivity(
cryst=self,
det=det,
xixj_lim=xixj_lim,
config=config,
lamb=lamb,
pts=pts,
reseff=[resR, resZ, resPhi],
xi=xi,
xj=xj,
val=val,
lambok=lambok,
binning=binning,
binned=binned,
vmin=vmin,
vmax=vmax,
vmin_bin=vmin_bin,
vmax_bin=vmax_bin,
cmap=cmap,
dax=dax,
fs=fs,
dmargin=dmargin,
tit=tit,
)
if return_dax is True:
return pts, val, xi, xj, binned, dax
else:
return pts, val, xi, xj, binned
@staticmethod
def fit1d_dinput(
dlines=None, dconstraints=None, dprepare=None,
data=None, lamb=None,
mask=None, domain=None, pos=None, subset=None,
same_spectrum=None, same_spectrum_dlamb=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None, valid_return_fract=None,
):
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, lamb=lamb,
mask=mask, domain=domain, pos=pos, subset=subset,
same_spectrum=same_spectrum,
same_spectrum_dlamb=same_spectrum_dlamb,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
valid_return_fract=valid_return_fract)
def fit1d(
self,
data=None, lamb=None,
dinput=None, dprepare=None, dlines=None, dconstraints=None,
mask=None, domain=None, subset=None, pos=None,
same_spectrum=None, same_spectrum_dlamb=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None,
dx0=None, dscales=None, x0_scale=None, bounds_scale=None,
method=None, tr_solver=None, tr_options=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=None, chain=None, jac=None, showonly=None,
amp=None, coefs=None, ratio=None,
Ti=None, width=None, vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
save=None, name=None, path=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
):
if dinput is None:
dinput = self.fit1d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, lamb=lamb,
mask=mask, domain=domain, pos=pos, subset=subset,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
same_spectrum=same_spectrum,
same_spectrum_dlamb=same_spectrum_dlamb)
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d(
data=data, lamb=lamb,
dinput=dinput, dprepare=dprepare,
dlines=dlines, dconstraints=dconstraints,
mask=mask, domain=domain, subset=subset, pos=pos,
method=method, tr_solver=tr_solver, tr_options=tr_options,
xtol=xtol, ftol=ftol, gtol=gtol,
max_nfev=max_nfev, loss=loss, chain=chain,
dx0=dx0, x0_scale=x0_scale, bounds_scale=bounds_scale,
jac=jac, verbose=verbose,
save=save, name=name, path=path,
amp=amp, coefs=coefs, ratio=ratio,
Ti=Ti, width=width, vi=vi, shift=shift,
pts_lamb_total=pts_lamb_total,
pts_lamb_detail=pts_lamb_detail,
plot=plot, fs=fs, wintit=wintit, tit=tit)
@staticmethod
def fit1d_extract(
dfit1d=None,
amp=None, coefs=None, ratio=None,
Ti=None, width=None,
vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
):
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit1d_extract(
dfit1d=dfit,
amp=amp, coefs=coefs, ratio=ratio,
Ti=Ti, width=width,
vi=vi, shift=shift,
pts_lamb_total=pts_lamb_total, pts_lamb_detail=pts_lamb_detail)
def fit1d_from2d(self):
if lphi is None:
msg = ("Arg lphi must be provided !")
raise Exception(msg)
if dprepare is None:
dprepare = self.fit2d_prepare(
data=data, xi=xi, xj=xj, n=n,
det=det, dtheta=dtheta, psi=psi,
mask=mask, domain=domain,
pos=pos, binning=binning,
nbsplines=False, subset=False,
lphi=lphi, lphi_tol=lphi_tol)
if dinput is None:
dinput = self.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints,
deg=deg, knots=knots, nbsplines=nbsplines,
domain=dprepare['domain'],
dataphi1d=dprepare['dataphi1d'], phi1d=dprepare['phi1d'])
out = self.fit1d(
xi=None, xj=None, data=None, mask=None,
det=None, dtheta=None, psi=None, n=None,
nlambfit=None, nphifit=None,
lambmin=None, lambmax=None,
dlines=None, spect1d=None,
dconstraints=None, dx0=None,
same_spectrum=None, dlamb=None,
double=None,
dscales=None, x0_scale=None, bounds_scale=None,
method=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=0, chain=None,
jac=None, showonly=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
)
pass
def fit2d_dinput(
self, dlines=None, dconstraints=None, dprepare=None,
data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, domain=None, pos=None, binning=None, subset=None,
deg=None, knots=None, nbsplines=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None, valid_return_fract=None,
):
import tofu.spectro._fit12d as _fit12d
if dprepare is None:
xi, xj, (xii, xjj) = _comp_optics._checkformat_xixj(xi, xj)
nxi = xi.size if xi is not None else np.unique(xii).size
nxj = xj.size if xj is not None else np.unique(xjj).size
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xii, xj=xjj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
dprepare = _fit12d.multigausfit2d_from_dlines_prepare(
data, lamb, phi,
mask=mask, domain=domain,
pos=pos, binning=binning,
nbsplines=nbsplines, subset=subset,
nxi=nxi, nxj=nxj,
)
return _fit12d.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
deg=deg, knots=knots, nbsplines=nbsplines,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width,
valid_return_fract=valid_return_fract)
def fit2d(
self,
data=None, xi=None, xj=None,
det=None, dtheta=None, psi=None, n=None,
dinput=None, dprepare=None, dlines=None, dconstraints=None,
mask=None, domain=None, subset=None, pos=None, binning=None,
focus=None, valid_fraction=None, valid_nsigma=None,
focus_half_width=None,
deg=None, knots=None, nbsplines=None,
dx0=None, dscales=None, x0_scale=None, bounds_scale=None,
method=None, tr_solver=None, tr_options=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
loss=None, verbose=None, chain=None, jac=None, showonly=None,
predeclare=None, debug=None,
amp=None, coefs=None, ratio=None,
Ti=None, width=None, vi=None, shift=None,
pts_lamb_total=None, pts_lamb_detail=None,
save=None, name=None, path=None,
plot=None, fs=None, dmargin=None,
tit=None, wintit=None, returnas=None,
):
if dinput is None:
dinput = self.fit2d_dinput(
dlines=dlines, dconstraints=dconstraints, dprepare=dprepare,
data=data, xi=xi, xj=xj, n=n,
det=det, dtheta=dtheta, psi=psi,
mask=mask, domain=domain,
pos=pos, binning=binning, subset=subset,
deg=deg, knots=knots, nbsplines=nbsplines,
focus=focus, valid_fraction=valid_fraction,
valid_nsigma=valid_nsigma, focus_half_width=focus_half_width)
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit2d(
dinput=dinput, dprepare=dprepare,
dlines=dlines, dconstraints=dconstraints,
lamb=lamb, phi=phi, data=data, mask=mask,
nxi=dinput['dprepare']['nxi'], nxj=dinput['dprepare']['nxj'],
domain=domain, pos=pos, binning=binning, subset=subset,
deg=deg, knots=knots, nbsplines=nbsplines,
method=method, tr_solver=tr_solver, tr_options=tr_options,
xtol=xtol, ftol=ftol, gtol=gtol,
max_nfev=max_nfev, loss=loss, chain=chain,
dx0=dx0, x0_scale=x0_scale, bounds_scale=bounds_scale,
jac=jac, verbose=verbose,
save=save, name=name, path=path,
plot=plot)
@staticmethod
def fit2d_extract(dfit2d=None,
amp=None, Ti=None, vi=None,
pts_phi=None, npts_phi=None,
pts_lamb_phi_total=None,
pts_lamb_phi_detail=None):
import tofu.spectro._fit12d as _fit12d
return _fit12d.fit2d_extract_data(
dfit2d=dfit2d,
amp=amp, Ti=Ti, vi=vi,
pts_phi=pts_phi, npts_phi=npts_phi,
pts_lamb_phi_total=pts_lamb_phi_total,
pts_lamb_phi_detail=pts_lamb_phi_detail)
def fit2d_plot(self, dfit2d=None, ratio=None,
dax=None, plotmode=None, angunits=None,
cmap=None, vmin=None, vmax=None,
dmargin=None, tit=None, wintit=None, fs=None):
dout = self.fit2d_extract(
dfit2d,
amp=amp, Ti=Ti, vi=vi,
pts_lamb_phi_total=pts_lamb_phi_total,
pts_lamb_phi_detail=pts_lamb_phi_detail)
return _plot_optics.CrystalBragg_plot_data_fit2d(
dfit2d=dfit2d, dout=dout, ratio=ratio,
dax=dax, plotmode=plotmode, angunits=angunits,
cmap=cmap, vmin=vmin, vmax=vmax,
dmargin=dmargin, tit=tit, wintit=wintit, fs=fs)
def noise_analysis(
self, data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, valid_fraction=None, nxerrbin=None,
margin=None, domain=None, nlamb=None,
deg=None, knots=None, nbsplines=None,
loss=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
method=None, tr_solver=None, tr_options=None,
verbose=None, plot=None,
ms=None, dcolor=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save_fig=None, name_fig=None, path_fig=None,
fmt=None, return_dax=None,
):
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xi, xj=xj, det=det,
dtheta=dtheta, psi=psi,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
import tofu.spectro._fit12d as _fit12d
return _fit12d.noise_analysis_2d(
data, lamb, phi,
mask=mask, valid_fraction=valid_fraction,
margin=margin, nxerrbin=nxerrbin,
nlamb=nlamb, deg=deg, knots=knots, nbsplines=nbsplines,
loss=loss, max_nfev=max_nfev,
xtol=xtol, ftol=ftol, gtol=gtol,
method=method, tr_solver=tr_solver, tr_options=tr_options,
verbose=verbose, plot=plot,
ms=ms, dcolor=dcolor,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save_fig=save_fig, name_fig=name_fig, path_fig=path_fig,
fmt=fmt, return_dax=return_dax)
@staticmethod
def noise_analysis_plot(
dnoise=None, margin=None, valid_fraction=None,
ms=None, dcolor=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save=None, name=None, path=None, fmt=None,
):
import tofu.spectro._plot as _plot_spectro
return _plot_spectro.plot_noise_analysis(
dnoise=dnoise, margin=margin, valid_fraction=valid_fraction,
ms=ms, dcolor=dcolor,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save=save, name=name, path=path, fmt=fmt)
def noise_analysis_scannbs(
self, data=None, xi=None, xj=None, n=None,
det=None, dtheta=None, psi=None,
mask=None, nxerrbin=None,
domain=None, nlamb=None,
deg=None, knots=None, nbsplines=None, lnbsplines=None,
loss=None, max_nfev=None,
xtol=None, ftol=None, gtol=None,
method=None, tr_solver=None, tr_options=None,
verbose=None, plot=None,
ms=None, dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save_fig=None, name_fig=None, path_fig=None,
fmt=None, return_dax=None,
):
bragg, phi, lamb = self.get_lambbraggphi_from_ptsxixj_dthetapsi(
xi=xi, xj=xj, det=det,
dtheta=0, psi=0,
use_non_parallelism=use_non_parallelism,
n=n,
grid=True,
return_lamb=True,
)
import tofu.spectro._fit12d as _fit12d
return _fit12d.noise_analysis_2d_scannbs(
data, lamb, phi,
mask=mask, nxerrbin=nxerrbin, nlamb=nlamb,
deg=deg, knots=knots, nbsplines=nbsplines, lnbsplines=lnbsplines,
loss=loss, max_nfev=max_nfev,
xtol=xtol, ftol=ftol, gtol=gtol,
method=method, tr_solver=tr_solver, tr_options=tr_options,
verbose=verbose, plot=plot,
ms=ms, dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save_fig=save_fig, name_fig=name_fig, path_fig=path_fig,
fmt=fmt, return_dax=return_dax)
@staticmethod
def noise_analysis_scannbs_plot(
dnoise_scan=None, ms=None,
dax=None, fs=None, dmargin=None,
wintit=None, tit=None, sublab=None,
save=None, name=None, path=None, fmt=None,
):
import tofu.spectro._plot as _plot_spectro
return _plot_spectro.plot_noise_analysis_scannbs(
dnoise=dnoise_scan, ms=ms,
dax=dax, fs=fs, dmargin=dmargin,
wintit=wintit, tit=tit, sublab=sublab,
save=save, name=name, path=path, fmt=fmt)
| true | true |
f71a0da9d68a3d4c9024e6fcb718688385715211 | 83 | py | Python | buttonlist/src/buttonlist/__main__.py | pmfrank/beeware-tutorials | 96274b0a735bd468e946111baf441a527ff0b0d5 | [
"BSD-2-Clause"
] | 1 | 2021-06-04T05:51:39.000Z | 2021-06-04T05:51:39.000Z | buttonlist/src/buttonlist/__main__.py | pmfrank/beeware-tutorials | 96274b0a735bd468e946111baf441a527ff0b0d5 | [
"BSD-2-Clause"
] | null | null | null | buttonlist/src/buttonlist/__main__.py | pmfrank/beeware-tutorials | 96274b0a735bd468e946111baf441a527ff0b0d5 | [
"BSD-2-Clause"
] | null | null | null | from buttonlist.app import main
if __name__ == '__main__':
main().main_loop()
| 16.6 | 31 | 0.698795 | from buttonlist.app import main
if __name__ == '__main__':
main().main_loop()
| true | true |
f71a0f4dbef3bd901ce744bc93811b52faddf399 | 34,662 | py | Python | anuvaad-etl/anuvaad-extractor/document-processor/evaluator/evaluator_string/src/notebooks/tesseract_ocr_evaluation_local.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | anuvaad-etl/anuvaad-extractor/document-processor/evaluator/evaluator_string/src/notebooks/tesseract_ocr_evaluation_local.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | anuvaad-etl/anuvaad-extractor/document-processor/evaluator/evaluator_string/src/notebooks/tesseract_ocr_evaluation_local.py | srihari-nagaraj/anuvaad | b09b01a033a033e97db6e404c088e0e6332053e4 | [
"MIT"
] | null | null | null | import glob
import uuid
import json
import requests
import copy,time
import os
import cv2
import numpy as np
from time import sleep
import pandas as pd
import logging
from collections import Counter
import pytesseract
from pytesseract import Output
#from pytesseract import pytesseract
from difflib import SequenceMatcher
from io import StringIO
from dynamic_adjustment import coord_adjustment
import ast
from leven import levenshtein
from horizontal_merging import horzontal_merging
ocr_level = "LINE"
text_processing = True
REJECT_FILTER = 2
#crop_factor= 5
#crop_factor_y= 4
crop_factor= 5
crop_factor_y= 0
crop_save = True
digitization = True
vis_thresh=0.90
LANG_MAPPING = {
"en" : ["Latin","eng"],
"kn" : ['Kannada',"kan"],
"gu": ["guj"],
"or": ["ori"],
"hi" : ["Devanagari","hin","eng"],
"bn" : ["Bengali","ben"],
"mr": ["Devanagari","hin","eng"],
"ta": ['Tamil',"tam"],
"te" : ["Telugu","tel"],
"ml" :["Malayalam"],
"ma" :["Marathi"]
}
#path = '/home/ubuntu/tesseract_evaluation/data/'
#output_path = '/home/ubuntu/tesseract_evaluation/result/'
#output_path_boxes= '/home/ubuntu/tesseract_evaluation/test_word_boxes/'
#base_path = '/home/ubuntu/tesseract_evaluation/test_word_boxes/'
path = '/home/naresh/Tarento/testing_document_processor/test_pipeline/data/'
output_path = '/home/naresh/Tarento/testing_document_processor/test_pipeline/result/'
output_path_boxes= '/home/naresh/Tarento/testing_document_processor/test_word_boxes/'
base_path= '/home/naresh/Tarento/testing_document_processor/test_word_boxes/'
psms = [6,7,8,9,10,11]
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyTmFtZSI6ImRoaXJhai5kYWdhQHRhcmVudG8uY29tIiwicGFzc3dvcmQiOiJiJyQyYiQxMiRuTXdNcHpCVlBXVVUvSlVLWXBKYWkuQUd2SUNJalJVcUdIbnBPenRzai5VRU55emlSZmk1TyciLCJleHAiOjE2MTk3Njg2NjN9.14IL5_kw83F5gxjUMSw6kCDLYQhjAg306AwJj0DsxWc'
word_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
google_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
layout_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
segmenter_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
bs_url ="https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/jobs/search/bulk"
evaluator_url = "https://auth.anuvaad.org/anuvaad-etl/document-processor/evaluator/v0/process"
#evaluator_url = 'http://0.0.0.0:5001/anuvaad-etl/document-processor/evaluator/v0/process'
download_url ="https://auth.anuvaad.org/download/"
upload_url = 'https://auth.anuvaad.org/anuvaad-api/file-uploader/v0/upload-file'
headers = {
'auth-token' :token }
class Draw:
def __init__(self,input_json,save_dir,regions,prefix='',color= (255,0,0),thickness=5):
self.json = input_json
self.save_dir = save_dir
self.regions = regions
self.prefix = prefix
self.color = color
self.thickness=thickness
if self.prefix == 'seg':
#print('drawing children')
self.draw_region_children()
else:
self.draw_region__sub_children()
def get_coords(self,page_index):
return self.json['outputs'][0]['pages'][page_index][self.regions]
def get_page_count(self):
return(self.json['outputs'][0]['page_info'])
def get_page(self,page_index):
page_path = self.json['outputs'][0]['page_info'][page_index]
page_path = page_path.split('upload')[1]#'/'.join(page_path.split('/')[1:])
#print(page_path)
return download_file(download_url,headers,page_path,f_type='image')
def draw_region(self):
font = cv2.FONT_HERSHEY_SIMPLEX
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
for region in self.get_coords(page_index) :
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
cv2.polylines(image, [np.array(pts)],True, self.color, self.thickness)
if 'class' not in region.keys():
region['class'] = 'TEXT'
cv2.putText(image, str(region['class']), (pts[0][0],pts[0][1]), font,
2, (0,125,255), 3, cv2.LINE_AA)
image_path = os.path.join(self.save_dir , '{}_{}_{}.png'.format(self.regions,self.prefix,page_index))
cv2.imwrite(image_path , image)
def draw_region_children(self):
font = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 2
thickness =3
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
for region_index,region in enumerate(self.get_coords(page_index)) :
try:
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
#print(pts)
region_color = (0 ,0,125+ 130*(region_index/ len(self.get_coords(page_index))))
cv2.polylines(image, [np.array(pts)],True, region_color, self.thickness)
cv2.putText(image, str(region_index), (pts[0][0],pts[0][1]), font,
fontScale, region_color, thickness, cv2.LINE_AA)
for line_index, line in enumerate(region['children']):
ground = line['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
line_color = (125 + 130*(region_index/ len(self.get_coords(page_index))) ,0,0)
cv2.polylines(image, [np.array(pts)],True, line_color, self.thickness -2)
cv2.putText(image, str(line_index), (pts[0][0],pts[0][1]), font,
fontScale, line_color, thickness, cv2.LINE_AA)
except Exception as e:
print(str(e))
print(region)
image_path = os.path.join(self.save_dir , '{}_{}.png'.format(self.prefix,page_index))
cv2.imwrite(image_path , image)
def draw_region__sub_children(self):
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
font = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 2
# Blue color in BGR
color = (0 ,255,0)
# Line thickness of 2 px
thickness = 3
# Using cv2.putText() method
for region_index,region in enumerate(self.get_coords(page_index)) :
try:
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
#print(pts)
region_color = (0,0,255)
cv2.polylines(image, [np.array(pts)],True, region_color, self.thickness)
for line_index, line in enumerate(region['regions']):
ground = line['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x'])-1 ,int(pt['y']) -1 ])
line_color = (255,0,0)
cv2.polylines(image, [np.array(pts)],True, line_color, self.thickness -2)
cv2.putText(image, str(line_index), (pts[0][0],pts[0][1]), font,
fontScale, (255,0,0), thickness, cv2.LINE_AA)
for word_index, word in enumerate(line['regions']):
ground = word['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) -3,int(pt['y'])-3])
word_color = (0,255,0)
cv2.polylines(image, [np.array(pts)],True, word_color, self.thickness -2)
cv2.putText(image, str(word_index), (pts[0][0],pts[0][1]), font,
fontScale-1,(0,255,0), thickness, cv2.LINE_AA)
except Exception as e:
print(str(e))
print(region)
#print(self.prefix)
image_path = os.path.join(self.save_dir , '{}_{}_{}.png'.format(self.prefix,self.regions,page_index))
cv2.imwrite(image_path , image)
# # google vision pipeline
def google_ocr_v15(url,headers,pdf_name):
file = {
"files": [
{
"locale": "hi",
"path": pdf_name,
"type": "pdf",
"config":{
"OCR": {
"option": "HIGH_ACCURACY",
"language": "hi",
"top_correction":"True",
"craft_word": "True",
"craft_line": "True",
}
}}
],
"workflowCode": "WF_A_FCWDLDBSOD15GV"
}
res = requests.post(url,json=file,headers=headers)
return res.json()
def upload_file(pdf_file,headers,url):
#url = 'https://auth.anuvaad.org/anuvaad-api/file-uploader/v0/upload-file'
files = [
('file',(open(pdf_file,'rb')))]
response = requests.post(url, headers=headers, files=files)
return response.json()
def download_file(download_url,headers,outputfile,f_type='json'):
download_url =download_url+str(outputfile)
res = requests.get(download_url,headers=headers)
if f_type == 'json':
return res.json()
else :
return res.content
def save_json(path,res):
with open(path, "w", encoding='utf8') as write_file:
json.dump(res, write_file,ensure_ascii=False )
def bulk_search(job_id,bs_url,headers):
bs_request = {
"jobIDs": [job_id],
"taskDetails":"true"
}
print(job_id)
res = requests.post(bs_url,json=bs_request,headers=headers, timeout = 10000)
print(res.json())
while(1):
in_progress = res.json()['jobs'][0]['status']
if in_progress == 'COMPLETED':
outputfile = res.json()['jobs'][0]['output'][0]['outputFile']
print(in_progress)
return outputfile
break
sleep(0.5)
print(in_progress)
res = requests.post(bs_url,json=bs_request,headers=headers, timeout = 10000)
def execute_module(module,url,input_file,module_code,pdf_dir,overwirte=True , draw=True):
output_path = os.path.join(pdf_dir,'{}.json'.format(module_code))
if os.path.exists(output_path) and not overwirte:
print(' loading *****************{}'.format(module_code ))
with open(output_path,'r') as wd_file :
response = json.load(wd_file)
wf_res = pdf_dir + '/{}_wf.json'.format(module_code)
with open(wf_res,'r') as wd_file :
json_file = json.load(wd_file)
#json_file = upload_file(output_path,headers,upload_url)['data']
else :
if module_code in ['wd','gv']:
res = upload_file(input_file,headers,upload_url)
print('upload response **********', res)
pdf_name = res['data']
response = module(url,headers,pdf_name)
else :
response = module(url,headers,input_file)
if 'eval' in module_code :
json_file = response['outputFile']
response = download_file(download_url,headers,json_file)
save_json(output_path,response)
return json_file,response
print(' response *****************{} {}'.format(module_code ,response ))
job_id = response['jobID']
json_file = bulk_search(job_id,bs_url,headers)
save_json(pdf_dir + '/{}_wf.json'.format(module_code),json_file)
print('bulk search response **************',json_file )
response = download_file(download_url,headers,json_file)
save_json(output_path,response)
if draw :
if module_code in ['wd','gv']:
Draw(response,pdf_dir,regions='lines',prefix=module_code)
else :
Draw(response,pdf_dir,regions='regions',prefix=module_code)
return json_file,response
def evaluate__and_save_input(pdf_files,output_dir,headers,word_url,layout_url,download_url,upload_url,bs_url):
word_responses = {}
layout_responses = {}
segmenter_responses = []
for pdf in pdf_files:
#try :
pdf_name = pdf.split('/')[-1].split('.')[0]
print(pdf , ' is being processed')
pdf_output_dir = os.path.join(output_dir,pdf_name)
os.system('mkdir -p "{}"'.format(pdf_output_dir))
wd_json,_ = execute_module(google_ocr_v15,word_url,input_file=pdf,module_code='gv',pdf_dir=pdf_output_dir,overwirte=False , draw=False)
def main(path,headers,word_url,layout_url,download_url,upload_url,bs_url):
pdf_names = glob.glob(path + '/*.pdf')
return evaluate__and_save_input(pdf_names,output_path,headers,word_url,layout_url,download_url,upload_url,bs_url)
if digitization:
main(path,headers,word_url,layout_url,download_url,upload_url,bs_url)
def bound_coordinate(corrdinate,max):
if corrdinate < 0 :
corrdinate = 0
if corrdinate > max:
corrdinate = max - 2
return int(corrdinate)
def get_image_from_box(image, box, height=140):
#box = data['box']
#scale = np.sqrt((box[1, 1] - box[2, 1])**2 + (box[0, 1] - box[3, 1])**2) / height
#print("scale is ",scale)
#w = int(np.sqrt((box[0, 0] - box[1, 0])**2 + (box[2, 0] - box[3, 0])**2) / scale)
w = max(abs(box[0, 0] - box[1, 0]),abs(box[2, 0] - box[3, 0]))
height = max(abs(box[0, 1] - box[3, 1]),abs(box[1, 1] - box[2, 1]))
pts1 = np.float32(box)
#w=2266-376
pts2 = np.float32([[0, 0], [int(w), 0],[int(w),int(height)],[0,int(height)]])
M = cv2.getPerspectiveTransform(pts1, pts2)
result_img = cv2.warpPerspective(image,M,(int(w), int(height))) #flags=cv2.INTER_NEAREST
return result_img
def process_dfs(temp_df):
temp_df = temp_df[temp_df.text.notnull()]
text = ""
conf=0
temp_dict1 = []
for index, row in temp_df.iterrows():
temp_dict2 = {}
conf = conf + row["conf"]
temp_dict2["text"]=row['text']
temp_dict2["conf"]=row['conf']
text = text +" "+ str(row['text'])
temp_dict1.append(temp_dict2)
return text,temp_dict1
def process_dfs_updated(temp_df,language,psm_val,image):
temp_df = temp_df[temp_df.text.notnull()]
text = ""
conf=0
temp_dict1 = []
if len(temp_df)>0:
for index, row in temp_df.iterrows():
temp_dict2 = {}
org_conf = row["conf"]
org_text = row['text']
flag = True
if row["conf"]<50:
print(row["top"],row["height"],row["left"],row["width"])
crop_image = image[ int(row["top"]):int(row["top"]+row["height"]), int(row["left"]):int(row["left"]+row["width"])]
for psm in psms:
df2 = pytesseract.image_to_data(crop_image,config='--psm '+str(psm), lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
temp_df2 = df2[df2.text.notnull()]
if len(temp_df2)>0:
new_conf = temp_df2.iloc[0].conf
if org_conf<new_conf:
org_conf = new_conf
org_text = temp_df2.iloc[0].text
if flag:
print("old text", row['text'])
print("new text", org_text)
conf = conf + org_conf
temp_dict2["text"]=org_text
temp_dict2["conf"]=org_conf
text = text +" "+ str(org_text)
temp_dict1.append(temp_dict2)
return text,temp_dict1
def check_psm(path,coord,language,mode_height,save_base_path,psm_val,org_score,org_text,line_text,org_conf):
for psm in psms:
text,conf_dict = get_text(path,coord,language,mode_height,save_base_path,psm)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,line_text)
if score==1.0 or score==1:
org_score = score
org_text = text
org_conf = conf_dict
break
elif score>org_score:
org_score =score
org_text = text
org_conf = conf_dict
return org_text, org_conf,org_score
def get_text(path,coord,language,mode_height,save_base_path,psm_val):
#try:
path = path.split('upload')[1]
image = download_file(download_url,headers,path,f_type='image')
nparr = np.frombuffer(image, np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
#image = cv2.imread("/home/naresh/crop.jpeg",0)
height, width,channel = image.shape
# left = bound_coordinate(coord[0] , width)
# top = bound_coordinate(coord[1],height )
# right = bound_coordinate(coord[2] ,width)
# bottom = bound_coordinate(coord[3], height)
# region_width = abs(right-left)
# region_height = abs(bottom-top)
# if left==right==top==bottom==0 or region_width==0 or region_height==0:
# return ""
crop_image = get_image_from_box(image, coord, height=abs(coord[0,1]-coord[2,1]))
#crop_image = image[ top:bottom, left:right]
#crop_image_cv = image[ coord[0,1]:coord[2,1], coord[0,0]:coord[1,0]]
save_path = save_base_path+"/"+"_psm_pers"+str(psm_val)+"--"+str(uuid.uuid4()) + '.jpg'
if crop_save:
cv2.imwrite(save_path,crop_image)
#if abs(bottom-top) > 3*mode_height:
#print(LANG_MAPPING[language][0])
if abs(coord[1,1]-coord[2,1])>mode_height:
#text = pytesseract.image_to_string(crop_image,config='--psm 6', lang=LANG_MAPPING[language][1])
dfs = pytesseract.image_to_data(crop_image,config='--psm 6', lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
#text,conf_dict = process_dfs(dfs)
text,conf_dict = process_dfs_updated(dfs,language,6,crop_image)
else:
#text = pytesseract.image_to_string(crop_image,config='--psm '+str(psm_val), lang=LANG_MAPPING[language][1])
dfs = pytesseract.image_to_data(crop_image,config='--psm '+str(psm_val), lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
#text,conf_dict = process_dfs(dfs)
text,conf_dict = process_dfs_updated(dfs,language,psm_val,crop_image)
return text,conf_dict
#except:
#print("xxxxxxxxxxxxxxxxxxxxxxxxxx",coord)
#print([0.0])
#return "",[0.0]
def merger_text(line):
text = ""
word_count=0
for word_idx, word in enumerate(line['regions']):
if "text" in word.keys() and word["text"].replace(" ", "") != "":
text = text+" "+ word["text"]
word_count=word_count+1
return text, word_count
def get_coord(bbox):
temp_box = []
temp_box_cv = []
temp_box.append([bbox["boundingBox"]['vertices'][0]['x'],bbox["boundingBox"]['vertices'][0]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][1]['x'],bbox["boundingBox"]['vertices'][1]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][2]['x'],bbox["boundingBox"]['vertices'][2]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][3]['x'],bbox["boundingBox"]['vertices'][3]['y']])
temp_box_cv.append(bbox["boundingBox"]['vertices'][0]['x'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][0]['y'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][2]['x'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][2]['y'])
temp_box = np.array(temp_box)
return temp_box,temp_box_cv
def frequent_height(page_info):
text_height = []
if len(page_info) > 0 :
for idx, level in enumerate(page_info):
coord_crop,coord = get_coord(level)
if len(coord)!=0:
text_height.append(abs(coord[3]-coord[1]))
occurence_count = Counter(text_height)
return occurence_count.most_common(1)[0][0]
else :
return 0
def remove_space(a):
return a.replace(" ", "")
def seq_matcher(tgt_text,gt_text):
tgt_text = remove_space(tgt_text)
gt_text = remove_space(gt_text)
score = SequenceMatcher(None, gt_text, tgt_text).ratio()
mismatch_count = levenshtein(tgt_text, gt_text)
match_count = abs(len(gt_text)-mismatch_count)
score = match_count/len(gt_text)
# matchs = list(SequenceMatcher(None, gt_text, tgt_text).get_matching_blocks())
# match_count=0
## match_lis = []
# for match in matchs:
# match_count = match_count + match.size
message = {"ground":True,"input":True}
if score==0.0:
if len(gt_text)>0 and len(tgt_text)==0:
message['input'] = "text missing in tesseract"
if len(gt_text)==0 and len(tgt_text)>0:
message['ground'] = "text missing in google vision"
if score==1.0 and len(gt_text)==0 and len(tgt_text)==0:
message['ground'] = "text missing in google vision"
message['input'] = "text missing in tesseract"
return score,message,match_count
def count_mismatch_char(gt ,tgt) :
count=0
gt_count = len(gt)
for i,j in zip(gt,tgt):
if i==j:
count=count+1
mismatch_char = abs(gt_count-count)
return mismatch_char
def correct_region(region):
box = region['boundingBox']['vertices']
tmp=0
region['boundingBox']= {'vertices' : [{'x':box[0]['x']-crop_factor,'y':box[0]['y']-crop_factor_y},\
{'x':box[1]['x']+crop_factor+tmp,'y':box[1]['y']-crop_factor_y},\
{'x':box[2]['x']+crop_factor+tmp,'y':box[2]['y']+crop_factor_y},\
{'x':box[3]['x']-crop_factor,'y': box[3]['y']+crop_factor_y}]}
return region
def sort_line(line):
line['regions'].sort(key=lambda x: x['boundingBox']['vertices'][0]['x'],reverse=False)
return line
def cell_ocr_word(lang, page_path, line,save_base_path,mode_height):
cell_text =""
conf_dicts=[]
#updated_lines = horzontal_merging(line['regions'])
dynamic_line = coord_adjustment(page_path,line['regions'] ,save_base_path)
for word_idx, word in enumerate(dynamic_line):
word = correct_region(word)
coord_crop, coord = get_coord(word)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
cell_text = cell_text +" " +text
conf_dicts.extend(conf_dict)
return cell_text,conf_dicts
def cell_text_ocr(lang, page_path, line,save_base_path,mode_height):
cell_text =""
cell_regions = []
#updated_lines = horzontal_merging(line['regions'])
for word_idx, word in enumerate(line['regions']):
word = correct_region(word)
coord_crop, coord = get_coord(word)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
cell_text = cell_text +" " +text
return cell_text
def cell_ocr(lang, page_path, line,save_base_path,mode_height,psm):
text =""
cell_google_text = ""
conf_dicts = []
updated_lines = horzontal_merging(line['regions'])
dynamic_line = coord_adjustment(page_path,updated_lines ,save_base_path)
for updated_line in dynamic_line:
line_text = updated_line['text']
cell_google_text= cell_google_text + " "+line_text
corrected_line = correct_region(updated_line)
coord_crop, coord = get_coord(corrected_line)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
tess_text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,psm)
text = text + " " + tess_text
conf_dicts.extend(conf_dict)
return cell_google_text,text,conf_dicts
def text_extraction(df,lang, page_path, regions,save_base_path):
final_score = 0
total_words = 0
total_lines = 0
total_chars = 0
total_match_chars = 0
for idx, level in enumerate(regions):
mode_height = frequent_height(level['regions'])
if ocr_level=="WORD":
for line_idx, line in enumerate(level['regions']):
#word_regions = coord_adjustment(page_path, line['regions'],save_base_path)
for word_idx, word in enumerate(line['regions']):
word = correct_region(word)
coord_crop, coord = get_coord(word)
word_text = word['text']
if len(word_text)>0 and len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,word['text'])
final_score = final_score+score
total_words = total_words+1
total_chars = total_chars+len(remove_space(word['text']))
total_match_chars= total_match_chars+match_count
word['char_match'] = match_count
word['tess_text'] = text
word['conf_dict'] = conf_dict
word['score'] = score
word['message'] = message
columns = word.keys()
df2 = pd.DataFrame([word],columns=columns)
df = df.append(df2, ignore_index=True)
elif len(word_text)>0:
score,message,match_count = seq_matcher("",word['text'])
word['char_match'] = match_count
word['tess_text'] = " "
word['conf_dict'] = None
word['score'] = score
word['message'] = message
columns = word.keys()
df2 = pd.DataFrame([word],columns=columns)
df = df.append(df2, ignore_index=True)
if ocr_level=="LINE":
lines_adjusted = coord_adjustment(page_path, level['regions'],save_base_path)
for line_idx, line_org in enumerate(lines_adjusted):
line_sorted = copy.deepcopy(sort_line(line_org))
line_text,total_word = merger_text(line_sorted)
line = copy.deepcopy(correct_region(line_sorted))
psm = 7
if total_word<2:
#print(line_text)
psm=8
coord_crop, coord = get_coord(line)
print("line text",line_text)
if len(remove_space(line_text))>0 and len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
if 'class' in line.keys() and line['class']=="CELL":
line_text,text,conf_dict = cell_ocr(lang, page_path, line,save_base_path,mode_height,psm)
elif 'class' in line.keys() and line['class']=="CELL_TEXT":
text,conf_dict = cell_ocr_word(lang, page_path, line,save_base_path,mode_height)
else:
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,psm)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,line_text)
#if score < 1.0:
#text, conf_dict,score = check_psm(page_path,coord_crop,lang,mode_height,save_base_path,psm,score,text,line_text,conf_dict)
final_score = final_score+score
total_lines = total_lines+1
total_chars = total_chars+len(remove_space(line_text))
total_match_chars= total_match_chars+match_count
line['char_match'] = match_count
line['tess_text'] = text
line['text'] = line_text
line['conf_dict'] = conf_dict
line['score'] = score
line['message'] = message
columns = line.keys()
df2 = pd.DataFrame([line],columns=columns)
df = df.append(df2, ignore_index=True)
elif len(remove_space(line_text))>0:
score,message,match_count = seq_matcher("",line_text)
line['char_match'] = match_count
line['tess_text'] = " "
line['conf_dict'] = None
line['text'] = line_text
line['score'] = score
line['message'] = message
columns = line.keys()
df2 = pd.DataFrame([line],columns=columns)
df = df.append(df2, ignore_index=True)
#return regions,final_score/total_words,df,total_chars,total_match_chars
return regions,final_score/total_lines,df,total_chars,total_match_chars
json_files_path = glob.glob(output_path+"/*/gv.json")
def tesseract(json_files):
output = []
dfs =[]
for json_file in json_files:
file_name = json_file.split('/')[-1].split('.json')[0]
pdf_name = json_file.split('/')[-2]
print("file name--------------------->>>>>>>>>>>>>>>>>>",pdf_name)
if not os.path.exists(base_path+pdf_name):
os.mkdir(base_path+pdf_name)
save_base_path = base_path+pdf_name
with open(json_file,'r+') as f:
data = json.load(f)
columns = ["page_path","page_data","file_eval_info"]
final_df = pd.DataFrame(columns=columns)
Draw(data,save_base_path,regions='regions')
lang = data['outputs'][0]['config']['OCR']['language']
total_page = len(data['outputs'][0]['pages'])
file_score = 0; total_chars_file = 0
file_data = []; total_match_chars_file = 0
page_paths = []
page_data_counts = []
for idx,page_data in enumerate(data['outputs'][0]['pages']):
t1 = time.time()
print("processing started for page no. ",idx)
page_path = page_data['path']
regions = page_data['regions'][1:]
df = pd.DataFrame()
regions,score,df,total_chars,total_match_chars = text_extraction(df,lang, page_path, regions,save_base_path)
file_score = file_score + score
total_chars_file =total_chars_file +total_chars
total_match_chars_file = total_match_chars_file+total_match_chars
file_data.append(df.to_csv())
page_paths.append(page_path)
char_details = {"total_chars":total_chars,"total_match_chars":total_match_chars}
page_data_counts.append(char_details)
data['outputs'][0]['pages'][idx]["regions"][1:] = copy.deepcopy(regions)
t2 = t1+time.time()
print("processing completed for page in {}".format(t2))
file_eval_info = {"total_chars":total_chars_file,"total_match_chars":total_match_chars_file,"score":total_match_chars_file/total_chars_file}
print(file_eval_info)
final_df["page_path"] = page_paths
final_df["page_data"] = file_data
final_df["file_eval_info"] = [file_eval_info]*len(page_paths)
print("file level evaluation result------------------->>>>>>>>>>>>>>>>>>>>>>>>>>>",file_eval_info)
data['outputs'][0]['score'] = file_score/total_page
with open(save_base_path+"/"+file_name+".json", 'w') as outfile:
json.dump(data, outfile)
final_df.to_csv(save_base_path+"/"+file_name+'.csv')
return output,final_df
output,dfs = tesseract(json_files_path)
def draw_thresh_box(df,path,page_index,save_path):
path = path.split('upload')[1]
image = download_file(download_url,headers,path,f_type='image')
nparr = np.frombuffer(image, np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
font = cv2.FONT_HERSHEY_SIMPLEX
color= (255,0,0);thickness=5
df =df.reset_index()
for row in df.iterrows():
row2 = row[1].to_dict()
boxes = row2['boundingBox']
boxes2 = ast.literal_eval(boxes)
ground = boxes2['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
cv2.polylines(image, [np.array(pts)],True, color, thickness)
cv2.putText(image, str(row2['text']), (pts[0][0],pts[0][1]), font,
2, (0,0,255), 2, cv2.LINE_AA)
cv2.putText(image, str(row2['tess_text']), (pts[1][0],pts[1][1]), font,
2, (0,255,0), 2, cv2.LINE_AA)
image_path = os.path.join(save_path , '{}.png'.format(page_index))
cv2.imwrite(image_path , image)
def visualize_results(df_paths,thresh):
for df_path in glob.glob(df_paths+"*/*.csv"):
save_path = base_path + df_path.split('/')[-2]+"/"
df = pd.read_csv(df_path)
for idx,(page_path,page_data) in enumerate(zip(df['page_path'],df['page_data'])):
df_string = StringIO(page_data)
page_df = pd.read_csv(df_string, sep=",")
filtered_df = page_df[page_df['score']<thresh]
draw_thresh_box(filtered_df,page_path,idx,save_path)
visualize_results(base_path,vis_thresh)
| 39.523375 | 267 | 0.585454 | import glob
import uuid
import json
import requests
import copy,time
import os
import cv2
import numpy as np
from time import sleep
import pandas as pd
import logging
from collections import Counter
import pytesseract
from pytesseract import Output
from difflib import SequenceMatcher
from io import StringIO
from dynamic_adjustment import coord_adjustment
import ast
from leven import levenshtein
from horizontal_merging import horzontal_merging
ocr_level = "LINE"
text_processing = True
REJECT_FILTER = 2
crop_factor= 5
crop_factor_y= 0
crop_save = True
digitization = True
vis_thresh=0.90
LANG_MAPPING = {
"en" : ["Latin","eng"],
"kn" : ['Kannada',"kan"],
"gu": ["guj"],
"or": ["ori"],
"hi" : ["Devanagari","hin","eng"],
"bn" : ["Bengali","ben"],
"mr": ["Devanagari","hin","eng"],
"ta": ['Tamil',"tam"],
"te" : ["Telugu","tel"],
"ml" :["Malayalam"],
"ma" :["Marathi"]
}
path = '/home/naresh/Tarento/testing_document_processor/test_pipeline/data/'
output_path = '/home/naresh/Tarento/testing_document_processor/test_pipeline/result/'
output_path_boxes= '/home/naresh/Tarento/testing_document_processor/test_word_boxes/'
base_path= '/home/naresh/Tarento/testing_document_processor/test_word_boxes/'
psms = [6,7,8,9,10,11]
token = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyTmFtZSI6ImRoaXJhai5kYWdhQHRhcmVudG8uY29tIiwicGFzc3dvcmQiOiJiJyQyYiQxMiRuTXdNcHpCVlBXVVUvSlVLWXBKYWkuQUd2SUNJalJVcUdIbnBPenRzai5VRU55emlSZmk1TyciLCJleHAiOjE2MTk3Njg2NjN9.14IL5_kw83F5gxjUMSw6kCDLYQhjAg306AwJj0DsxWc'
word_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
google_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
layout_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
segmenter_url = "https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/async/initiate"
bs_url ="https://auth.anuvaad.org/anuvaad-etl/wf-manager/v1/workflow/jobs/search/bulk"
evaluator_url = "https://auth.anuvaad.org/anuvaad-etl/document-processor/evaluator/v0/process"
download_url ="https://auth.anuvaad.org/download/"
upload_url = 'https://auth.anuvaad.org/anuvaad-api/file-uploader/v0/upload-file'
headers = {
'auth-token' :token }
class Draw:
def __init__(self,input_json,save_dir,regions,prefix='',color= (255,0,0),thickness=5):
self.json = input_json
self.save_dir = save_dir
self.regions = regions
self.prefix = prefix
self.color = color
self.thickness=thickness
if self.prefix == 'seg':
self.draw_region_children()
else:
self.draw_region__sub_children()
def get_coords(self,page_index):
return self.json['outputs'][0]['pages'][page_index][self.regions]
def get_page_count(self):
return(self.json['outputs'][0]['page_info'])
def get_page(self,page_index):
page_path = self.json['outputs'][0]['page_info'][page_index]
page_path = page_path.split('upload')[1]
return download_file(download_url,headers,page_path,f_type='image')
def draw_region(self):
font = cv2.FONT_HERSHEY_SIMPLEX
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
for region in self.get_coords(page_index) :
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
cv2.polylines(image, [np.array(pts)],True, self.color, self.thickness)
if 'class' not in region.keys():
region['class'] = 'TEXT'
cv2.putText(image, str(region['class']), (pts[0][0],pts[0][1]), font,
2, (0,125,255), 3, cv2.LINE_AA)
image_path = os.path.join(self.save_dir , '{}_{}_{}.png'.format(self.regions,self.prefix,page_index))
cv2.imwrite(image_path , image)
def draw_region_children(self):
font = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 2
thickness =3
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
for region_index,region in enumerate(self.get_coords(page_index)) :
try:
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
region_color = (0 ,0,125+ 130*(region_index/ len(self.get_coords(page_index))))
cv2.polylines(image, [np.array(pts)],True, region_color, self.thickness)
cv2.putText(image, str(region_index), (pts[0][0],pts[0][1]), font,
fontScale, region_color, thickness, cv2.LINE_AA)
for line_index, line in enumerate(region['children']):
ground = line['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
line_color = (125 + 130*(region_index/ len(self.get_coords(page_index))) ,0,0)
cv2.polylines(image, [np.array(pts)],True, line_color, self.thickness -2)
cv2.putText(image, str(line_index), (pts[0][0],pts[0][1]), font,
fontScale, line_color, thickness, cv2.LINE_AA)
except Exception as e:
print(str(e))
print(region)
image_path = os.path.join(self.save_dir , '{}_{}.png'.format(self.prefix,page_index))
cv2.imwrite(image_path , image)
def draw_region__sub_children(self):
for page_index in range(len(self.get_page_count())) :
nparr = np.frombuffer(self.get_page(page_index), np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
font = cv2.FONT_HERSHEY_SIMPLEX
fontScale = 2
color = (0 ,255,0)
thickness = 3
for region_index,region in enumerate(self.get_coords(page_index)) :
try:
ground = region['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
region_color = (0,0,255)
cv2.polylines(image, [np.array(pts)],True, region_color, self.thickness)
for line_index, line in enumerate(region['regions']):
ground = line['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x'])-1 ,int(pt['y']) -1 ])
line_color = (255,0,0)
cv2.polylines(image, [np.array(pts)],True, line_color, self.thickness -2)
cv2.putText(image, str(line_index), (pts[0][0],pts[0][1]), font,
fontScale, (255,0,0), thickness, cv2.LINE_AA)
for word_index, word in enumerate(line['regions']):
ground = word['boundingBox']['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) -3,int(pt['y'])-3])
word_color = (0,255,0)
cv2.polylines(image, [np.array(pts)],True, word_color, self.thickness -2)
cv2.putText(image, str(word_index), (pts[0][0],pts[0][1]), font,
fontScale-1,(0,255,0), thickness, cv2.LINE_AA)
except Exception as e:
print(str(e))
print(region)
image_path = os.path.join(self.save_dir , '{}_{}_{}.png'.format(self.prefix,self.regions,page_index))
cv2.imwrite(image_path , image)
l,headers,pdf_name):
file = {
"files": [
{
"locale": "hi",
"path": pdf_name,
"type": "pdf",
"config":{
"OCR": {
"option": "HIGH_ACCURACY",
"language": "hi",
"top_correction":"True",
"craft_word": "True",
"craft_line": "True",
}
}}
],
"workflowCode": "WF_A_FCWDLDBSOD15GV"
}
res = requests.post(url,json=file,headers=headers)
return res.json()
def upload_file(pdf_file,headers,url):
files = [
('file',(open(pdf_file,'rb')))]
response = requests.post(url, headers=headers, files=files)
return response.json()
def download_file(download_url,headers,outputfile,f_type='json'):
download_url =download_url+str(outputfile)
res = requests.get(download_url,headers=headers)
if f_type == 'json':
return res.json()
else :
return res.content
def save_json(path,res):
with open(path, "w", encoding='utf8') as write_file:
json.dump(res, write_file,ensure_ascii=False )
def bulk_search(job_id,bs_url,headers):
bs_request = {
"jobIDs": [job_id],
"taskDetails":"true"
}
print(job_id)
res = requests.post(bs_url,json=bs_request,headers=headers, timeout = 10000)
print(res.json())
while(1):
in_progress = res.json()['jobs'][0]['status']
if in_progress == 'COMPLETED':
outputfile = res.json()['jobs'][0]['output'][0]['outputFile']
print(in_progress)
return outputfile
break
sleep(0.5)
print(in_progress)
res = requests.post(bs_url,json=bs_request,headers=headers, timeout = 10000)
def execute_module(module,url,input_file,module_code,pdf_dir,overwirte=True , draw=True):
output_path = os.path.join(pdf_dir,'{}.json'.format(module_code))
if os.path.exists(output_path) and not overwirte:
print(' loading *****************{}'.format(module_code ))
with open(output_path,'r') as wd_file :
response = json.load(wd_file)
wf_res = pdf_dir + '/{}_wf.json'.format(module_code)
with open(wf_res,'r') as wd_file :
json_file = json.load(wd_file)
else :
if module_code in ['wd','gv']:
res = upload_file(input_file,headers,upload_url)
print('upload response **********', res)
pdf_name = res['data']
response = module(url,headers,pdf_name)
else :
response = module(url,headers,input_file)
if 'eval' in module_code :
json_file = response['outputFile']
response = download_file(download_url,headers,json_file)
save_json(output_path,response)
return json_file,response
print(' response *****************{} {}'.format(module_code ,response ))
job_id = response['jobID']
json_file = bulk_search(job_id,bs_url,headers)
save_json(pdf_dir + '/{}_wf.json'.format(module_code),json_file)
print('bulk search response **************',json_file )
response = download_file(download_url,headers,json_file)
save_json(output_path,response)
if draw :
if module_code in ['wd','gv']:
Draw(response,pdf_dir,regions='lines',prefix=module_code)
else :
Draw(response,pdf_dir,regions='regions',prefix=module_code)
return json_file,response
def evaluate__and_save_input(pdf_files,output_dir,headers,word_url,layout_url,download_url,upload_url,bs_url):
word_responses = {}
layout_responses = {}
segmenter_responses = []
for pdf in pdf_files:
pdf_name = pdf.split('/')[-1].split('.')[0]
print(pdf , ' is being processed')
pdf_output_dir = os.path.join(output_dir,pdf_name)
os.system('mkdir -p "{}"'.format(pdf_output_dir))
wd_json,_ = execute_module(google_ocr_v15,word_url,input_file=pdf,module_code='gv',pdf_dir=pdf_output_dir,overwirte=False , draw=False)
def main(path,headers,word_url,layout_url,download_url,upload_url,bs_url):
pdf_names = glob.glob(path + '/*.pdf')
return evaluate__and_save_input(pdf_names,output_path,headers,word_url,layout_url,download_url,upload_url,bs_url)
if digitization:
main(path,headers,word_url,layout_url,download_url,upload_url,bs_url)
def bound_coordinate(corrdinate,max):
if corrdinate < 0 :
corrdinate = 0
if corrdinate > max:
corrdinate = max - 2
return int(corrdinate)
def get_image_from_box(image, box, height=140):
w = max(abs(box[0, 0] - box[1, 0]),abs(box[2, 0] - box[3, 0]))
height = max(abs(box[0, 1] - box[3, 1]),abs(box[1, 1] - box[2, 1]))
pts1 = np.float32(box)
pts2 = np.float32([[0, 0], [int(w), 0],[int(w),int(height)],[0,int(height)]])
M = cv2.getPerspectiveTransform(pts1, pts2)
result_img = cv2.warpPerspective(image,M,(int(w), int(height)))
return result_img
def process_dfs(temp_df):
temp_df = temp_df[temp_df.text.notnull()]
text = ""
conf=0
temp_dict1 = []
for index, row in temp_df.iterrows():
temp_dict2 = {}
conf = conf + row["conf"]
temp_dict2["text"]=row['text']
temp_dict2["conf"]=row['conf']
text = text +" "+ str(row['text'])
temp_dict1.append(temp_dict2)
return text,temp_dict1
def process_dfs_updated(temp_df,language,psm_val,image):
temp_df = temp_df[temp_df.text.notnull()]
text = ""
conf=0
temp_dict1 = []
if len(temp_df)>0:
for index, row in temp_df.iterrows():
temp_dict2 = {}
org_conf = row["conf"]
org_text = row['text']
flag = True
if row["conf"]<50:
print(row["top"],row["height"],row["left"],row["width"])
crop_image = image[ int(row["top"]):int(row["top"]+row["height"]), int(row["left"]):int(row["left"]+row["width"])]
for psm in psms:
df2 = pytesseract.image_to_data(crop_image,config='--psm '+str(psm), lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
temp_df2 = df2[df2.text.notnull()]
if len(temp_df2)>0:
new_conf = temp_df2.iloc[0].conf
if org_conf<new_conf:
org_conf = new_conf
org_text = temp_df2.iloc[0].text
if flag:
print("old text", row['text'])
print("new text", org_text)
conf = conf + org_conf
temp_dict2["text"]=org_text
temp_dict2["conf"]=org_conf
text = text +" "+ str(org_text)
temp_dict1.append(temp_dict2)
return text,temp_dict1
def check_psm(path,coord,language,mode_height,save_base_path,psm_val,org_score,org_text,line_text,org_conf):
for psm in psms:
text,conf_dict = get_text(path,coord,language,mode_height,save_base_path,psm)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,line_text)
if score==1.0 or score==1:
org_score = score
org_text = text
org_conf = conf_dict
break
elif score>org_score:
org_score =score
org_text = text
org_conf = conf_dict
return org_text, org_conf,org_score
def get_text(path,coord,language,mode_height,save_base_path,psm_val):
path = path.split('upload')[1]
image = download_file(download_url,headers,path,f_type='image')
nparr = np.frombuffer(image, np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
height, width,channel = image.shape
crop_image = get_image_from_box(image, coord, height=abs(coord[0,1]-coord[2,1]))
save_path = save_base_path+"/"+"_psm_pers"+str(psm_val)+"--"+str(uuid.uuid4()) + '.jpg'
if crop_save:
cv2.imwrite(save_path,crop_image)
if abs(coord[1,1]-coord[2,1])>mode_height:
dfs = pytesseract.image_to_data(crop_image,config='--psm 6', lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
text,conf_dict = process_dfs_updated(dfs,language,6,crop_image)
else:
dfs = pytesseract.image_to_data(crop_image,config='--psm '+str(psm_val), lang=LANG_MAPPING[language][0],output_type=Output.DATAFRAME)
text,conf_dict = process_dfs_updated(dfs,language,psm_val,crop_image)
return text,conf_dict
def merger_text(line):
text = ""
word_count=0
for word_idx, word in enumerate(line['regions']):
if "text" in word.keys() and word["text"].replace(" ", "") != "":
text = text+" "+ word["text"]
word_count=word_count+1
return text, word_count
def get_coord(bbox):
temp_box = []
temp_box_cv = []
temp_box.append([bbox["boundingBox"]['vertices'][0]['x'],bbox["boundingBox"]['vertices'][0]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][1]['x'],bbox["boundingBox"]['vertices'][1]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][2]['x'],bbox["boundingBox"]['vertices'][2]['y']])
temp_box.append([bbox["boundingBox"]['vertices'][3]['x'],bbox["boundingBox"]['vertices'][3]['y']])
temp_box_cv.append(bbox["boundingBox"]['vertices'][0]['x'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][0]['y'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][2]['x'])
temp_box_cv.append(bbox["boundingBox"]['vertices'][2]['y'])
temp_box = np.array(temp_box)
return temp_box,temp_box_cv
def frequent_height(page_info):
text_height = []
if len(page_info) > 0 :
for idx, level in enumerate(page_info):
coord_crop,coord = get_coord(level)
if len(coord)!=0:
text_height.append(abs(coord[3]-coord[1]))
occurence_count = Counter(text_height)
return occurence_count.most_common(1)[0][0]
else :
return 0
def remove_space(a):
return a.replace(" ", "")
def seq_matcher(tgt_text,gt_text):
tgt_text = remove_space(tgt_text)
gt_text = remove_space(gt_text)
score = SequenceMatcher(None, gt_text, tgt_text).ratio()
mismatch_count = levenshtein(tgt_text, gt_text)
match_count = abs(len(gt_text)-mismatch_count)
score = match_count/len(gt_text)
{"ground":True,"input":True}
if score==0.0:
if len(gt_text)>0 and len(tgt_text)==0:
message['input'] = "text missing in tesseract"
if len(gt_text)==0 and len(tgt_text)>0:
message['ground'] = "text missing in google vision"
if score==1.0 and len(gt_text)==0 and len(tgt_text)==0:
message['ground'] = "text missing in google vision"
message['input'] = "text missing in tesseract"
return score,message,match_count
def count_mismatch_char(gt ,tgt) :
count=0
gt_count = len(gt)
for i,j in zip(gt,tgt):
if i==j:
count=count+1
mismatch_char = abs(gt_count-count)
return mismatch_char
def correct_region(region):
box = region['boundingBox']['vertices']
tmp=0
region['boundingBox']= {'vertices' : [{'x':box[0]['x']-crop_factor,'y':box[0]['y']-crop_factor_y},\
{'x':box[1]['x']+crop_factor+tmp,'y':box[1]['y']-crop_factor_y},\
{'x':box[2]['x']+crop_factor+tmp,'y':box[2]['y']+crop_factor_y},\
{'x':box[3]['x']-crop_factor,'y': box[3]['y']+crop_factor_y}]}
return region
def sort_line(line):
line['regions'].sort(key=lambda x: x['boundingBox']['vertices'][0]['x'],reverse=False)
return line
def cell_ocr_word(lang, page_path, line,save_base_path,mode_height):
cell_text =""
conf_dicts=[]
dynamic_line = coord_adjustment(page_path,line['regions'] ,save_base_path)
for word_idx, word in enumerate(dynamic_line):
word = correct_region(word)
coord_crop, coord = get_coord(word)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
cell_text = cell_text +" " +text
conf_dicts.extend(conf_dict)
return cell_text,conf_dicts
def cell_text_ocr(lang, page_path, line,save_base_path,mode_height):
cell_text =""
cell_regions = []
for word_idx, word in enumerate(line['regions']):
word = correct_region(word)
coord_crop, coord = get_coord(word)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
cell_text = cell_text +" " +text
return cell_text
def cell_ocr(lang, page_path, line,save_base_path,mode_height,psm):
text =""
cell_google_text = ""
conf_dicts = []
updated_lines = horzontal_merging(line['regions'])
dynamic_line = coord_adjustment(page_path,updated_lines ,save_base_path)
for updated_line in dynamic_line:
line_text = updated_line['text']
cell_google_text= cell_google_text + " "+line_text
corrected_line = correct_region(updated_line)
coord_crop, coord = get_coord(corrected_line)
if len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
tess_text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,psm)
text = text + " " + tess_text
conf_dicts.extend(conf_dict)
return cell_google_text,text,conf_dicts
def text_extraction(df,lang, page_path, regions,save_base_path):
final_score = 0
total_words = 0
total_lines = 0
total_chars = 0
total_match_chars = 0
for idx, level in enumerate(regions):
mode_height = frequent_height(level['regions'])
if ocr_level=="WORD":
for line_idx, line in enumerate(level['regions']):
for word_idx, word in enumerate(line['regions']):
word = correct_region(word)
coord_crop, coord = get_coord(word)
word_text = word['text']
if len(word_text)>0 and len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,8)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,word['text'])
final_score = final_score+score
total_words = total_words+1
total_chars = total_chars+len(remove_space(word['text']))
total_match_chars= total_match_chars+match_count
word['char_match'] = match_count
word['tess_text'] = text
word['conf_dict'] = conf_dict
word['score'] = score
word['message'] = message
columns = word.keys()
df2 = pd.DataFrame([word],columns=columns)
df = df.append(df2, ignore_index=True)
elif len(word_text)>0:
score,message,match_count = seq_matcher("",word['text'])
word['char_match'] = match_count
word['tess_text'] = " "
word['conf_dict'] = None
word['score'] = score
word['message'] = message
columns = word.keys()
df2 = pd.DataFrame([word],columns=columns)
df = df.append(df2, ignore_index=True)
if ocr_level=="LINE":
lines_adjusted = coord_adjustment(page_path, level['regions'],save_base_path)
for line_idx, line_org in enumerate(lines_adjusted):
line_sorted = copy.deepcopy(sort_line(line_org))
line_text,total_word = merger_text(line_sorted)
line = copy.deepcopy(correct_region(line_sorted))
psm = 7
if total_word<2:
psm=8
coord_crop, coord = get_coord(line)
print("line text",line_text)
if len(remove_space(line_text))>0 and len(coord)!=0 and abs(coord_crop[1,1]-coord_crop[2,1]) > REJECT_FILTER :
if 'class' in line.keys() and line['class']=="CELL":
line_text,text,conf_dict = cell_ocr(lang, page_path, line,save_base_path,mode_height,psm)
elif 'class' in line.keys() and line['class']=="CELL_TEXT":
text,conf_dict = cell_ocr_word(lang, page_path, line,save_base_path,mode_height)
else:
text,conf_dict = get_text(page_path, coord_crop, lang,mode_height,save_base_path,psm)
if text_processing:
text_list = text.split()
text = " ".join(text_list)
score,message,match_count = seq_matcher(text,line_text)
final_score = final_score+score
total_lines = total_lines+1
total_chars = total_chars+len(remove_space(line_text))
total_match_chars= total_match_chars+match_count
line['char_match'] = match_count
line['tess_text'] = text
line['text'] = line_text
line['conf_dict'] = conf_dict
line['score'] = score
line['message'] = message
columns = line.keys()
df2 = pd.DataFrame([line],columns=columns)
df = df.append(df2, ignore_index=True)
elif len(remove_space(line_text))>0:
score,message,match_count = seq_matcher("",line_text)
line['char_match'] = match_count
line['tess_text'] = " "
line['conf_dict'] = None
line['text'] = line_text
line['score'] = score
line['message'] = message
columns = line.keys()
df2 = pd.DataFrame([line],columns=columns)
df = df.append(df2, ignore_index=True)
return regions,final_score/total_lines,df,total_chars,total_match_chars
json_files_path = glob.glob(output_path+"/*/gv.json")
def tesseract(json_files):
output = []
dfs =[]
for json_file in json_files:
file_name = json_file.split('/')[-1].split('.json')[0]
pdf_name = json_file.split('/')[-2]
print("file name--------------------->>>>>>>>>>>>>>>>>>",pdf_name)
if not os.path.exists(base_path+pdf_name):
os.mkdir(base_path+pdf_name)
save_base_path = base_path+pdf_name
with open(json_file,'r+') as f:
data = json.load(f)
columns = ["page_path","page_data","file_eval_info"]
final_df = pd.DataFrame(columns=columns)
Draw(data,save_base_path,regions='regions')
lang = data['outputs'][0]['config']['OCR']['language']
total_page = len(data['outputs'][0]['pages'])
file_score = 0; total_chars_file = 0
file_data = []; total_match_chars_file = 0
page_paths = []
page_data_counts = []
for idx,page_data in enumerate(data['outputs'][0]['pages']):
t1 = time.time()
print("processing started for page no. ",idx)
page_path = page_data['path']
regions = page_data['regions'][1:]
df = pd.DataFrame()
regions,score,df,total_chars,total_match_chars = text_extraction(df,lang, page_path, regions,save_base_path)
file_score = file_score + score
total_chars_file =total_chars_file +total_chars
total_match_chars_file = total_match_chars_file+total_match_chars
file_data.append(df.to_csv())
page_paths.append(page_path)
char_details = {"total_chars":total_chars,"total_match_chars":total_match_chars}
page_data_counts.append(char_details)
data['outputs'][0]['pages'][idx]["regions"][1:] = copy.deepcopy(regions)
t2 = t1+time.time()
print("processing completed for page in {}".format(t2))
file_eval_info = {"total_chars":total_chars_file,"total_match_chars":total_match_chars_file,"score":total_match_chars_file/total_chars_file}
print(file_eval_info)
final_df["page_path"] = page_paths
final_df["page_data"] = file_data
final_df["file_eval_info"] = [file_eval_info]*len(page_paths)
print("file level evaluation result------------------->>>>>>>>>>>>>>>>>>>>>>>>>>>",file_eval_info)
data['outputs'][0]['score'] = file_score/total_page
with open(save_base_path+"/"+file_name+".json", 'w') as outfile:
json.dump(data, outfile)
final_df.to_csv(save_base_path+"/"+file_name+'.csv')
return output,final_df
output,dfs = tesseract(json_files_path)
def draw_thresh_box(df,path,page_index,save_path):
path = path.split('upload')[1]
image = download_file(download_url,headers,path,f_type='image')
nparr = np.frombuffer(image, np.uint8)
image = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
font = cv2.FONT_HERSHEY_SIMPLEX
color= (255,0,0);thickness=5
df =df.reset_index()
for row in df.iterrows():
row2 = row[1].to_dict()
boxes = row2['boundingBox']
boxes2 = ast.literal_eval(boxes)
ground = boxes2['vertices']
pts = []
for pt in ground:
pts.append([int(pt['x']) ,int(pt['y'])])
cv2.polylines(image, [np.array(pts)],True, color, thickness)
cv2.putText(image, str(row2['text']), (pts[0][0],pts[0][1]), font,
2, (0,0,255), 2, cv2.LINE_AA)
cv2.putText(image, str(row2['tess_text']), (pts[1][0],pts[1][1]), font,
2, (0,255,0), 2, cv2.LINE_AA)
image_path = os.path.join(save_path , '{}.png'.format(page_index))
cv2.imwrite(image_path , image)
def visualize_results(df_paths,thresh):
for df_path in glob.glob(df_paths+"*/*.csv"):
save_path = base_path + df_path.split('/')[-2]+"/"
df = pd.read_csv(df_path)
for idx,(page_path,page_data) in enumerate(zip(df['page_path'],df['page_data'])):
df_string = StringIO(page_data)
page_df = pd.read_csv(df_string, sep=",")
filtered_df = page_df[page_df['score']<thresh]
draw_thresh_box(filtered_df,page_path,idx,save_path)
visualize_results(base_path,vis_thresh)
| true | true |
f71a1006eb8da62d4f7fca2700df5904cd0816c1 | 12,567 | py | Python | keras/wrappers/scikit_learn.py | phanvanthinh98/keras_LSTM | b22cff1e9fd762226ec3dc9d3af3e300484dd833 | [
"Apache-2.0"
] | 1 | 2021-05-03T05:10:03.000Z | 2021-05-03T05:10:03.000Z | keras/wrappers/scikit_learn.py | phanvanthinh98/keras_LSTM | b22cff1e9fd762226ec3dc9d3af3e300484dd833 | [
"Apache-2.0"
] | null | null | null | keras/wrappers/scikit_learn.py | phanvanthinh98/keras_LSTM | b22cff1e9fd762226ec3dc9d3af3e300484dd833 | [
"Apache-2.0"
] | 1 | 2021-11-25T00:17:16.000Z | 2021-11-25T00:17:16.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrapper for using the Scikit-Learn API with Keras models."""
# pylint: disable=g-classes-have-attributes
import copy
import types
import numpy as np
from keras import losses
from keras.models import Sequential
from keras.utils.generic_utils import has_arg
from keras.utils.np_utils import to_categorical
from tensorflow.python.util.tf_export import keras_export
class BaseWrapper(object):
"""Base class for the Keras scikit-learn wrapper.
Warning: This class should not be used directly.
Use descendant classes instead.
Args:
build_fn: callable function or class instance
**sk_params: model parameters & fitting parameters
The `build_fn` should construct, compile and return a Keras model, which
will then be used to fit/predict. One of the following
three values could be passed to `build_fn`:
1. A function
2. An instance of a class that implements the `__call__` method
3. None. This means you implement a class that inherits from either
`KerasClassifier` or `KerasRegressor`. The `__call__` method of the
present class will then be treated as the default `build_fn`.
`sk_params` takes both model parameters and fitting parameters. Legal model
parameters are the arguments of `build_fn`. Note that like all other
estimators in scikit-learn, `build_fn` should provide default values for
its arguments, so that you could create the estimator without passing any
values to `sk_params`.
`sk_params` could also accept parameters for calling `fit`, `predict`,
`predict_proba`, and `score` methods (e.g., `epochs`, `batch_size`).
fitting (predicting) parameters are selected in the following order:
1. Values passed to the dictionary arguments of
`fit`, `predict`, `predict_proba`, and `score` methods
2. Values passed to `sk_params`
3. The default values of the `keras.models.Sequential`
`fit`, `predict`, `predict_proba` and `score` methods
When using scikit-learn's `grid_search` API, legal tunable parameters are
those you could pass to `sk_params`, including fitting parameters.
In other words, you could use `grid_search` to search for the best
`batch_size` or `epochs` as well as the model parameters.
"""
def __init__(self, build_fn=None, **sk_params):
self.build_fn = build_fn
self.sk_params = sk_params
self.check_params(sk_params)
def check_params(self, params):
"""Checks for user typos in `params`.
Args:
params: dictionary; the parameters to be checked
Raises:
ValueError: if any member of `params` is not a valid argument.
"""
legal_params_fns = [
Sequential.fit, Sequential.predict, Sequential.predict_classes,
Sequential.evaluate
]
if self.build_fn is None:
legal_params_fns.append(self.__call__)
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
legal_params_fns.append(self.build_fn.__call__)
else:
legal_params_fns.append(self.build_fn)
for params_name in params:
for fn in legal_params_fns:
if has_arg(fn, params_name):
break
else:
if params_name != 'nb_epoch':
raise ValueError('{} is not a legal parameter'.format(params_name))
def get_params(self, **params): # pylint: disable=unused-argument
"""Gets parameters for this estimator.
Args:
**params: ignored (exists for API compatibility).
Returns:
Dictionary of parameter names mapped to their values.
"""
res = self.sk_params.copy()
res.update({'build_fn': self.build_fn})
return res
def set_params(self, **params):
"""Sets the parameters of this estimator.
Args:
**params: Dictionary of parameter names mapped to their values.
Returns:
self
"""
self.check_params(params)
self.sk_params.update(params)
return self
def fit(self, x, y, **kwargs):
"""Constructs a new model with `build_fn` & fit the model to `(x, y)`.
Args:
x : array-like, shape `(n_samples, n_features)`
Training samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.fit`
Returns:
history : object
details about the training history at each epoch.
"""
if self.build_fn is None:
self.model = self.__call__(**self.filter_sk_params(self.__call__))
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
self.model = self.build_fn(
**self.filter_sk_params(self.build_fn.__call__))
else:
self.model = self.build_fn(**self.filter_sk_params(self.build_fn))
if (losses.is_categorical_crossentropy(self.model.loss) and
len(y.shape) != 2):
y = to_categorical(y)
fit_args = copy.deepcopy(self.filter_sk_params(Sequential.fit))
fit_args.update(kwargs)
history = self.model.fit(x, y, **fit_args)
return history
def filter_sk_params(self, fn, override=None):
"""Filters `sk_params` and returns those in `fn`'s arguments.
Args:
fn : arbitrary function
override: dictionary, values to override `sk_params`
Returns:
res : dictionary containing variables
in both `sk_params` and `fn`'s arguments.
"""
override = override or {}
res = {}
for name, value in self.sk_params.items():
if has_arg(fn, name):
res.update({name: value})
res.update(override)
return res
@keras_export('keras.wrappers.scikit_learn.KerasClassifier')
class KerasClassifier(BaseWrapper):
"""Implementation of the scikit-learn classifier API for Keras.
"""
def fit(self, x, y, **kwargs):
"""Constructs a new model with `build_fn` & fit the model to `(x, y)`.
Args:
x : array-like, shape `(n_samples, n_features)`
Training samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y : array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.fit`
Returns:
history : object
details about the training history at each epoch.
Raises:
ValueError: In case of invalid shape for `y` argument.
"""
y = np.array(y)
if len(y.shape) == 2 and y.shape[1] > 1:
self.classes_ = np.arange(y.shape[1])
elif (len(y.shape) == 2 and y.shape[1] == 1) or len(y.shape) == 1:
self.classes_ = np.unique(y)
y = np.searchsorted(self.classes_, y)
else:
raise ValueError('Invalid shape for y: ' + str(y.shape))
self.n_classes_ = len(self.classes_)
return super(KerasClassifier, self).fit(x, y, **kwargs)
def predict(self, x, **kwargs):
"""Returns the class predictions for the given test data.
Args:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments
of `Sequential.predict_classes`.
Returns:
preds: array-like, shape `(n_samples,)`
Class predictions.
"""
kwargs = self.filter_sk_params(Sequential.predict_classes, kwargs)
classes = self.model.predict_classes(x, **kwargs)
return self.classes_[classes]
def predict_proba(self, x, **kwargs):
"""Returns class probability estimates for the given test data.
Args:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments
of `Sequential.predict_classes`.
Returns:
proba: array-like, shape `(n_samples, n_outputs)`
Class probability estimates.
In the case of binary classification,
to match the scikit-learn API,
will return an array of shape `(n_samples, 2)`
(instead of `(n_sample, 1)` as in Keras).
"""
kwargs = self.filter_sk_params(Sequential.predict_proba, kwargs)
probs = self.model.predict(x, **kwargs)
# check if binary classification
if probs.shape[1] == 1:
# first column is probability of class 0 and second is of class 1
probs = np.hstack([1 - probs, probs])
return probs
def score(self, x, y, **kwargs):
"""Returns the mean accuracy on the given test data and labels.
Args:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y: array-like, shape `(n_samples,)` or `(n_samples, n_outputs)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.evaluate`.
Returns:
score: float
Mean accuracy of predictions on `x` wrt. `y`.
Raises:
ValueError: If the underlying model isn't configured to
compute accuracy. You should pass `metrics=["accuracy"]` to
the `.compile()` method of the model.
"""
y = np.searchsorted(self.classes_, y)
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss_name = self.model.loss
if hasattr(loss_name, '__name__'):
loss_name = loss_name.__name__
if loss_name == 'categorical_crossentropy' and len(y.shape) != 2:
y = to_categorical(y)
outputs = self.model.evaluate(x, y, **kwargs)
if not isinstance(outputs, list):
outputs = [outputs]
for name, output in zip(self.model.metrics_names, outputs):
if name in ['accuracy', 'acc']:
return output
raise ValueError('The model is not configured to compute accuracy. '
'You should pass `metrics=["accuracy"]` to '
'the `model.compile()` method.')
@keras_export('keras.wrappers.scikit_learn.KerasRegressor')
class KerasRegressor(BaseWrapper):
"""Implementation of the scikit-learn regressor API for Keras.
"""
def predict(self, x, **kwargs):
"""Returns predictions for the given test data.
Args:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.predict`.
Returns:
preds: array-like, shape `(n_samples,)`
Predictions.
"""
kwargs = self.filter_sk_params(Sequential.predict, kwargs)
return np.squeeze(self.model.predict(x, **kwargs))
def score(self, x, y, **kwargs):
"""Returns the mean loss on the given test data and labels.
Args:
x: array-like, shape `(n_samples, n_features)`
Test samples where `n_samples` is the number of samples
and `n_features` is the number of features.
y: array-like, shape `(n_samples,)`
True labels for `x`.
**kwargs: dictionary arguments
Legal arguments are the arguments of `Sequential.evaluate`.
Returns:
score: float
Mean accuracy of predictions on `x` wrt. `y`.
"""
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss = self.model.evaluate(x, y, **kwargs)
if isinstance(loss, list):
return -loss[0]
return -loss
| 35.600567 | 80 | 0.659585 |
import copy
import types
import numpy as np
from keras import losses
from keras.models import Sequential
from keras.utils.generic_utils import has_arg
from keras.utils.np_utils import to_categorical
from tensorflow.python.util.tf_export import keras_export
class BaseWrapper(object):
def __init__(self, build_fn=None, **sk_params):
self.build_fn = build_fn
self.sk_params = sk_params
self.check_params(sk_params)
def check_params(self, params):
legal_params_fns = [
Sequential.fit, Sequential.predict, Sequential.predict_classes,
Sequential.evaluate
]
if self.build_fn is None:
legal_params_fns.append(self.__call__)
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
legal_params_fns.append(self.build_fn.__call__)
else:
legal_params_fns.append(self.build_fn)
for params_name in params:
for fn in legal_params_fns:
if has_arg(fn, params_name):
break
else:
if params_name != 'nb_epoch':
raise ValueError('{} is not a legal parameter'.format(params_name))
def get_params(self, **params):
res = self.sk_params.copy()
res.update({'build_fn': self.build_fn})
return res
def set_params(self, **params):
self.check_params(params)
self.sk_params.update(params)
return self
def fit(self, x, y, **kwargs):
if self.build_fn is None:
self.model = self.__call__(**self.filter_sk_params(self.__call__))
elif (not isinstance(self.build_fn, types.FunctionType) and
not isinstance(self.build_fn, types.MethodType)):
self.model = self.build_fn(
**self.filter_sk_params(self.build_fn.__call__))
else:
self.model = self.build_fn(**self.filter_sk_params(self.build_fn))
if (losses.is_categorical_crossentropy(self.model.loss) and
len(y.shape) != 2):
y = to_categorical(y)
fit_args = copy.deepcopy(self.filter_sk_params(Sequential.fit))
fit_args.update(kwargs)
history = self.model.fit(x, y, **fit_args)
return history
def filter_sk_params(self, fn, override=None):
override = override or {}
res = {}
for name, value in self.sk_params.items():
if has_arg(fn, name):
res.update({name: value})
res.update(override)
return res
@keras_export('keras.wrappers.scikit_learn.KerasClassifier')
class KerasClassifier(BaseWrapper):
def fit(self, x, y, **kwargs):
y = np.array(y)
if len(y.shape) == 2 and y.shape[1] > 1:
self.classes_ = np.arange(y.shape[1])
elif (len(y.shape) == 2 and y.shape[1] == 1) or len(y.shape) == 1:
self.classes_ = np.unique(y)
y = np.searchsorted(self.classes_, y)
else:
raise ValueError('Invalid shape for y: ' + str(y.shape))
self.n_classes_ = len(self.classes_)
return super(KerasClassifier, self).fit(x, y, **kwargs)
def predict(self, x, **kwargs):
kwargs = self.filter_sk_params(Sequential.predict_classes, kwargs)
classes = self.model.predict_classes(x, **kwargs)
return self.classes_[classes]
def predict_proba(self, x, **kwargs):
kwargs = self.filter_sk_params(Sequential.predict_proba, kwargs)
probs = self.model.predict(x, **kwargs)
if probs.shape[1] == 1:
probs = np.hstack([1 - probs, probs])
return probs
def score(self, x, y, **kwargs):
y = np.searchsorted(self.classes_, y)
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss_name = self.model.loss
if hasattr(loss_name, '__name__'):
loss_name = loss_name.__name__
if loss_name == 'categorical_crossentropy' and len(y.shape) != 2:
y = to_categorical(y)
outputs = self.model.evaluate(x, y, **kwargs)
if not isinstance(outputs, list):
outputs = [outputs]
for name, output in zip(self.model.metrics_names, outputs):
if name in ['accuracy', 'acc']:
return output
raise ValueError('The model is not configured to compute accuracy. '
'You should pass `metrics=["accuracy"]` to '
'the `model.compile()` method.')
@keras_export('keras.wrappers.scikit_learn.KerasRegressor')
class KerasRegressor(BaseWrapper):
def predict(self, x, **kwargs):
kwargs = self.filter_sk_params(Sequential.predict, kwargs)
return np.squeeze(self.model.predict(x, **kwargs))
def score(self, x, y, **kwargs):
kwargs = self.filter_sk_params(Sequential.evaluate, kwargs)
loss = self.model.evaluate(x, y, **kwargs)
if isinstance(loss, list):
return -loss[0]
return -loss
| true | true |
f71a12030f0c487777bd6c37ee0b866b3054ef36 | 1,894 | py | Python | backend/user/tests/test_models.py | Ssents/stonewell_tech | 2466dbd26105f630bccd87146253ac8adfc4e0bb | [
"MIT"
] | 1 | 2022-03-25T07:44:19.000Z | 2022-03-25T07:44:19.000Z | backend/user/tests/test_models.py | Ssents/stonewell_tech | 2466dbd26105f630bccd87146253ac8adfc4e0bb | [
"MIT"
] | null | null | null | backend/user/tests/test_models.py | Ssents/stonewell_tech | 2466dbd26105f630bccd87146253ac8adfc4e0bb | [
"MIT"
] | null | null | null | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
'''
Test that creating a user with an email is successful
'''
email = 'test@gmail.com'
password = '456@3'
username = 'test1'
user = get_user_model().objects.create_user(
email = email,
username = username
)
user.set_password(password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_user_email_is_normalised(self):
'''
Test that user email used to sign in is normalized
'''
email = 'test@STONEWELLTECH.com'
user = get_user_model().objects.create_user(email, 'test123')
self.assertEqual(user.email, email.lower())
def test_create_user_invalid_email(self):
'''
Test creating user with no email raises an error
'''
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test123')
def test_create_new_super_user(self):
'''Test creating a superuser'''
user = get_user_model().objects.create_superuser(
'test@stonewelltech.com',
'test123'
)
self.assertTrue(user.is_superuser) # is_superuser is added by PermissionsMixin
self.assertTrue(user.is_staff)
class UserModelTests(TestCase):
'''
Test whether the user characteristics are saved well
'''
def setUp(self):
self.client = Client()
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email = 'user@stonewelltech.com',
username = 'Test username'
)
user.set_password(password) | 32.101695 | 87 | 0.621964 | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
class ModelTests(TestCase):
def test_create_user_with_email_successful(self):
email = 'test@gmail.com'
password = '456@3'
username = 'test1'
user = get_user_model().objects.create_user(
email = email,
username = username
)
user.set_password(password)
self.assertEqual(user.email, email)
self.assertTrue(user.check_password(password))
def test_user_email_is_normalised(self):
email = 'test@STONEWELLTECH.com'
user = get_user_model().objects.create_user(email, 'test123')
self.assertEqual(user.email, email.lower())
def test_create_user_invalid_email(self):
with self.assertRaises(ValueError):
get_user_model().objects.create_user(None, 'test123')
def test_create_new_super_user(self):
user = get_user_model().objects.create_superuser(
'test@stonewelltech.com',
'test123'
)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
class UserModelTests(TestCase):
def setUp(self):
self.client = Client()
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email = 'user@stonewelltech.com',
username = 'Test username'
)
user.set_password(password) | true | true |
f71a13679ad5560a4a0a810a20a468a27ec122dd | 6,128 | py | Python | devday/talk/migrations/0044_auto_20200310_2010.py | jenslauterbach/devday_website | a827c9237e656842542eff07ec9fa7b39716a0ee | [
"CC-BY-4.0",
"BSD-3-Clause"
] | 6 | 2018-09-30T20:18:01.000Z | 2020-03-12T09:03:38.000Z | devday/talk/migrations/0044_auto_20200310_2010.py | jenslauterbach/devday_website | a827c9237e656842542eff07ec9fa7b39716a0ee | [
"CC-BY-4.0",
"BSD-3-Clause"
] | 260 | 2018-09-30T14:17:57.000Z | 2022-03-04T13:48:34.000Z | devday/talk/migrations/0044_auto_20200310_2010.py | jenslauterbach/devday_website | a827c9237e656842542eff07ec9fa7b39716a0ee | [
"CC-BY-4.0",
"BSD-3-Clause"
] | 9 | 2018-09-30T13:17:21.000Z | 2020-10-03T12:55:05.000Z | # Generated by Django 2.2.10 on 2020-03-10 20:10
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
def migrate_speakers(apps, schema_editor):
Talk = apps.get_model("talk", "Talk")
TalkPublishedSpeaker = apps.get_model("talk", "TalkPublishedSpeaker")
TalkDraftSpeaker = apps.get_model("talk", "TalkDraftSpeaker")
db_alias = schema_editor.connection.alias
for talk in Talk.objects.using(db_alias).all():
if talk.published_speaker is not None:
TalkPublishedSpeaker.objects.using(db_alias).create(
published_speaker_id=talk.published_speaker.id, talk_id=talk.id, order=1
)
if talk.draft_speaker is not None:
TalkDraftSpeaker.objects.using(db_alias).create(
draft_speaker_id=talk.draft_speaker.id, talk_id=talk.id, order=1
)
class Migration(migrations.Migration):
dependencies = [
("speaker", "0003_auto_20181019_0948"),
("talk", "0043_auto_20200310_1737"),
]
operations = [
migrations.CreateModel(
name="TalkPublishedSpeaker",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
(
"order",
models.PositiveIntegerField(
db_index=True, editable=False, verbose_name="order"
),
),
(
"published_speaker",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="speaker.PublishedSpeaker",
verbose_name="Published speaker",
),
),
(
"talk",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="talk.Talk",
verbose_name="Talk",
),
),
],
options={
"ordering": ("order",),
"verbose_name": "Talk published speaker",
"verbose_name_plural": "Talk published speakers",
"unique_together": {("talk", "published_speaker")},
},
),
migrations.CreateModel(
name="TalkDraftSpeaker",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
(
"order",
models.PositiveIntegerField(
db_index=True, editable=False, verbose_name="order"
),
),
(
"draft_speaker",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="speaker.Speaker",
verbose_name="Speaker",
),
),
(
"talk",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="talk.Talk",
verbose_name="Talk",
),
),
],
options={
"ordering": ("order",),
"verbose_name": "Talk draft speaker",
"verbose_name_plural": "Talk draft speakers",
"unique_together": {("talk", "draft_speaker")},
},
),
migrations.RunPython(migrate_speakers),
migrations.RemoveField(model_name="talk", name="draft_speaker"),
migrations.RemoveField(model_name="talk", name="published_speaker"),
migrations.AddField(
model_name="talk",
name="draft_speakers",
field=models.ManyToManyField(
blank=True,
through="talk.TalkDraftSpeaker",
to="speaker.Speaker",
verbose_name="Speaker (draft)",
),
),
migrations.AddField(
model_name="talk",
name="published_speakers",
field=models.ManyToManyField(
blank=True,
through="talk.TalkPublishedSpeaker",
to="speaker.PublishedSpeaker",
verbose_name="Speaker (public)",
),
),
]
| 35.218391 | 88 | 0.436847 |
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
from django.db import migrations, models
def migrate_speakers(apps, schema_editor):
Talk = apps.get_model("talk", "Talk")
TalkPublishedSpeaker = apps.get_model("talk", "TalkPublishedSpeaker")
TalkDraftSpeaker = apps.get_model("talk", "TalkDraftSpeaker")
db_alias = schema_editor.connection.alias
for talk in Talk.objects.using(db_alias).all():
if talk.published_speaker is not None:
TalkPublishedSpeaker.objects.using(db_alias).create(
published_speaker_id=talk.published_speaker.id, talk_id=talk.id, order=1
)
if talk.draft_speaker is not None:
TalkDraftSpeaker.objects.using(db_alias).create(
draft_speaker_id=talk.draft_speaker.id, talk_id=talk.id, order=1
)
class Migration(migrations.Migration):
dependencies = [
("speaker", "0003_auto_20181019_0948"),
("talk", "0043_auto_20200310_1737"),
]
operations = [
migrations.CreateModel(
name="TalkPublishedSpeaker",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
(
"order",
models.PositiveIntegerField(
db_index=True, editable=False, verbose_name="order"
),
),
(
"published_speaker",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="speaker.PublishedSpeaker",
verbose_name="Published speaker",
),
),
(
"talk",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="talk.Talk",
verbose_name="Talk",
),
),
],
options={
"ordering": ("order",),
"verbose_name": "Talk published speaker",
"verbose_name_plural": "Talk published speakers",
"unique_together": {("talk", "published_speaker")},
},
),
migrations.CreateModel(
name="TalkDraftSpeaker",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"created",
model_utils.fields.AutoCreatedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="created",
),
),
(
"modified",
model_utils.fields.AutoLastModifiedField(
default=django.utils.timezone.now,
editable=False,
verbose_name="modified",
),
),
(
"order",
models.PositiveIntegerField(
db_index=True, editable=False, verbose_name="order"
),
),
(
"draft_speaker",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="speaker.Speaker",
verbose_name="Speaker",
),
),
(
"talk",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="talk.Talk",
verbose_name="Talk",
),
),
],
options={
"ordering": ("order",),
"verbose_name": "Talk draft speaker",
"verbose_name_plural": "Talk draft speakers",
"unique_together": {("talk", "draft_speaker")},
},
),
migrations.RunPython(migrate_speakers),
migrations.RemoveField(model_name="talk", name="draft_speaker"),
migrations.RemoveField(model_name="talk", name="published_speaker"),
migrations.AddField(
model_name="talk",
name="draft_speakers",
field=models.ManyToManyField(
blank=True,
through="talk.TalkDraftSpeaker",
to="speaker.Speaker",
verbose_name="Speaker (draft)",
),
),
migrations.AddField(
model_name="talk",
name="published_speakers",
field=models.ManyToManyField(
blank=True,
through="talk.TalkPublishedSpeaker",
to="speaker.PublishedSpeaker",
verbose_name="Speaker (public)",
),
),
]
| true | true |
f71a147252b727cb58683934b78cbaab53a991a4 | 14,687 | py | Python | torchreid/models/mobilenetv3.py | daniil-lyakhov/deep-object-reid | b0f7d6a2d4cff8c417a66d82c09d16788d81ec67 | [
"Apache-2.0"
] | null | null | null | torchreid/models/mobilenetv3.py | daniil-lyakhov/deep-object-reid | b0f7d6a2d4cff8c417a66d82c09d16788d81ec67 | [
"Apache-2.0"
] | null | null | null | torchreid/models/mobilenetv3.py | daniil-lyakhov/deep-object-reid | b0f7d6a2d4cff8c417a66d82c09d16788d81ec67 | [
"Apache-2.0"
] | null | null | null | import math
import torch
import torch.nn as nn
from torch.cuda.amp import autocast
from torchreid.losses import AngleSimpleLinear
from torchreid.ops import Dropout, EvalModeSetter, rsc
from .common import HSigmoid, HSwish, ModelInterface, make_divisible
import timm
from torchreid.integration.nncf.compression import get_no_nncf_trace_context_manager, nullcontext
__all__ = ['mobilenetv3_large', 'mobilenetv3_large_075', 'mobilenetv3_small', 'mobilenetv3_large_150',
'mobilenetv3_large_125']
pretrained_urls = {
'mobilenetv3_small':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-small-55df8e1f.pth?raw=true',
'mobilenetv3_large':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-large-1cd25616.pth?raw=true',
'mobilenetv3_large_075':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-large-0.75-9632d2a8.pth?raw=true',
'mobilenetv3_large_21k':
'https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/mobilenetv3_large_100_miil_21k.pth'
}
SHOULD_NNCF_SKIP_SE_LAYERS = False
SHOULD_NNCF_SKIP_HEAD = False
no_nncf_se_layer_context = get_no_nncf_trace_context_manager() if SHOULD_NNCF_SKIP_SE_LAYERS else nullcontext
no_nncf_head_context = get_no_nncf_trace_context_manager() if SHOULD_NNCF_SKIP_HEAD else nullcontext
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, make_divisible(channel // reduction, 8)),
nn.ReLU(inplace=True),
nn.Linear(make_divisible(channel // reduction, 8), channel),
HSigmoid()
)
def forward(self, x):
with no_nncf_se_layer_context():
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y
def conv_3x3_bn(inp, oup, stride, IN_conv1=False):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup) if not IN_conv1 else nn.InstanceNorm2d(oup, affine=True),
HSwish()
)
def conv_1x1_bn(inp, oup, loss='softmax'):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
HSwish() if loss == 'softmax' else nn.PReLU()
)
class InvertedResidual(nn.Module):
def __init__(self, inp, hidden_dim, oup, kernel_size, stride, use_se, use_hs):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.identity = stride == 1 and inp == oup
if inp == hidden_dim:
self.conv = nn.Sequential(
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
HSwish() if use_hs else nn.ReLU(inplace=True),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Identity(),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
HSwish() if use_hs else nn.ReLU(inplace=True),
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Identity(),
HSwish() if use_hs else nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.identity:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetV3(ModelInterface):
def __init__(self,
cfgs,
mode,
IN_conv1=False,
num_classes=1000,
width_mult=1.,
in_channels=3,
input_size=(224, 224),
dropout_cls = None,
pooling_type='avg',
IN_first=False,
self_challenging_cfg=False,
**kwargs):
super().__init__(**kwargs)
self.in_size = input_size
self.num_classes = num_classes
self.input_IN = nn.InstanceNorm2d(in_channels, affine=True) if IN_first else None
self.pooling_type = pooling_type
self.self_challenging_cfg = self_challenging_cfg
self.width_mult = width_mult
self.dropout_cls = dropout_cls
# setting of inverted residual blocks
self.cfgs = cfgs
assert mode in ['large', 'small']
# building first layer
input_channel = make_divisible(16 * self.width_mult, 8)
stride = 1 if self.in_size[0] < 100 else 2
layers = [conv_3x3_bn(3, input_channel, stride, IN_conv1)]
# building inverted residual blocks
block = InvertedResidual
flag = True
for k, t, c, use_se, use_hs, s in self.cfgs:
if (self.in_size[0] < 100) and (s == 2) and flag:
s = 1
flag = False
output_channel = make_divisible(c * self.width_mult, 8)
exp_size = make_divisible(input_channel * t, 8)
layers.append(block(input_channel, exp_size, output_channel, k, s, use_se, use_hs))
input_channel = output_channel
self.features = nn.Sequential(*layers)
self.num_features = exp_size
# building last several layers
self.conv = conv_1x1_bn(input_channel, exp_size, self.loss)
output_channel = {'large': 1280, 'small': 1024}
output_channel = make_divisible(output_channel[mode] * self.width_mult, 8) if self.width_mult > 1.0 else output_channel[mode]
if self.loss == 'softmax' or self.loss == 'asl':
self.classifier = nn.Sequential(
nn.Linear(exp_size, output_channel),
nn.BatchNorm1d(output_channel),
HSwish(),
Dropout(**self.dropout_cls),
nn.Linear(output_channel, self.num_classes),
)
else:
assert self.loss in ['am_softmax', 'am_binary']
self.classifier = nn.Sequential(
nn.Linear(exp_size, output_channel),
nn.BatchNorm1d(output_channel),
nn.PReLU(),
Dropout(**self.dropout_cls),
AngleSimpleLinear(output_channel, self.num_classes),
)
self._initialize_weights()
self.forward = autocast(self.mix_precision)(self.forward)
def extract_features(self, x):
y = self.conv(self.features(x))
return y
def infer_head(self, x, skip_pool=False):
if not skip_pool:
glob_features = self._glob_feature_vector(x, self.pooling_type, reduce_dims=False)
else:
glob_features = x
logits = self.classifier(glob_features.view(x.shape[0], -1))
return glob_features, logits
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def forward(self, x, return_featuremaps=False, get_embeddings=False, gt_labels=None):
if self.input_IN is not None:
x = self.input_IN(x)
y = self.extract_features(x)
if return_featuremaps:
return y
with no_nncf_head_context():
glob_features, logits = self.infer_head(y, skip_pool=False)
if self.training and self.self_challenging_cfg.enable and gt_labels is not None:
glob_features = rsc(
features = glob_features,
scores = logits,
labels = gt_labels,
retain_p = 1.0 - self.self_challenging_cfg.drop_p,
retain_batch = 1.0 - self.self_challenging_cfg.drop_batch_p
)
with EvalModeSetter([self.output], m_type=(nn.BatchNorm1d, nn.BatchNorm2d)):
_, logits = self.infer_head(x, skip_pool=True)
if not self.training and self.is_classification():
return [logits]
if get_embeddings:
out_data = [logits, glob_features]
elif self.loss in ['softmax', 'am_softmax', 'asl', 'am_binary']:
out_data = [logits]
elif self.loss in ['triplet']:
out_data = [logits, glob_features]
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
return tuple(out_data)
def init_pretrained_weights(model, key='', **kwargs):
"""Initializes model with pretrained weights.
Layers that don't match with pretrained layers in name or size are kept unchanged.
"""
import os
import errno
import gdown
from torchreid.utils import load_pretrained_weights
def _get_torch_home():
ENV_TORCH_HOME = 'TORCH_HOME'
ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME'
DEFAULT_CACHE_DIR = '~/.cache'
torch_home = os.path.expanduser(
os.getenv(
ENV_TORCH_HOME,
os.path.join(
os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch'
)
)
)
return torch_home
torch_home = _get_torch_home()
model_dir = os.path.join(torch_home, 'checkpoints')
try:
os.makedirs(model_dir)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
filename = key + '_imagenet.pth'
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file):
gdown.download(pretrained_urls[key], cached_file)
model = load_pretrained_weights(model, cached_file, **kwargs)
def mobilenetv3_large_075(pretrained=False, **kwargs):
"""
Constructs a MobileNetV3-Large model
"""
cfgs = [
# k, t, c, SE, HS, s
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult =.75, **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_large_075')
return net
def mobilenetv3_large(pretrained=False, **kwargs):
"""
Constructs a MobileNetV3-Large model
"""
cfgs = [
# k, t, c, SE, HS, s
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1., **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_large')
return net
def mobilenetv3_large_150(pretrained=False, **kwargs):
"""
Constructs a MobileNetV3-Large model
"""
cfgs = [
# k, t, c, SE, HS, s
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1.5, **kwargs)
if pretrained:
raise NotImplementedError("The weights for this configuration are not available")
return net
def mobilenetv3_large_125(pretrained=False, **kwargs):
"""
Constructs a MobileNetV3-Large model
"""
cfgs = [
# k, t, c, SE, HS, s
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1.25, **kwargs)
if pretrained:
raise NotImplementedError("The weights for this configuration are not available")
return net
def mobilenetv3_small(pretrained=False, **kwargs):
"""
Constructs a MobileNetV3-Small model
"""
cfgs = [
# k, t, c, SE, HS, s
[3, 1, 16, 1, 0, 2],
[3, 4.5, 24, 0, 0, 2],
[3, 3.67, 24, 0, 0, 1],
[5, 4, 40, 1, 1, 2],
[5, 6, 40, 1, 1, 1],
[5, 6, 40, 1, 1, 1],
[5, 3, 48, 1, 1, 1],
[5, 3, 48, 1, 1, 1],
[5, 6, 96, 1, 1, 2],
[5, 6, 96, 1, 1, 1],
[5, 6, 96, 1, 1, 1],
]
net = MobileNetV3(cfgs, mode='small', width_mult = 1., **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_small')
return net
| 34.315421 | 133 | 0.544291 | import math
import torch
import torch.nn as nn
from torch.cuda.amp import autocast
from torchreid.losses import AngleSimpleLinear
from torchreid.ops import Dropout, EvalModeSetter, rsc
from .common import HSigmoid, HSwish, ModelInterface, make_divisible
import timm
from torchreid.integration.nncf.compression import get_no_nncf_trace_context_manager, nullcontext
__all__ = ['mobilenetv3_large', 'mobilenetv3_large_075', 'mobilenetv3_small', 'mobilenetv3_large_150',
'mobilenetv3_large_125']
pretrained_urls = {
'mobilenetv3_small':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-small-55df8e1f.pth?raw=true',
'mobilenetv3_large':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-large-1cd25616.pth?raw=true',
'mobilenetv3_large_075':
'https://github.com/d-li14/mobilenetv3.pytorch/blob/master/pretrained/mobilenetv3-large-0.75-9632d2a8.pth?raw=true',
'mobilenetv3_large_21k':
'https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/mobilenetv3_large_100_miil_21k.pth'
}
SHOULD_NNCF_SKIP_SE_LAYERS = False
SHOULD_NNCF_SKIP_HEAD = False
no_nncf_se_layer_context = get_no_nncf_trace_context_manager() if SHOULD_NNCF_SKIP_SE_LAYERS else nullcontext
no_nncf_head_context = get_no_nncf_trace_context_manager() if SHOULD_NNCF_SKIP_HEAD else nullcontext
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, make_divisible(channel // reduction, 8)),
nn.ReLU(inplace=True),
nn.Linear(make_divisible(channel // reduction, 8), channel),
HSigmoid()
)
def forward(self, x):
with no_nncf_se_layer_context():
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y
def conv_3x3_bn(inp, oup, stride, IN_conv1=False):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup) if not IN_conv1 else nn.InstanceNorm2d(oup, affine=True),
HSwish()
)
def conv_1x1_bn(inp, oup, loss='softmax'):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
HSwish() if loss == 'softmax' else nn.PReLU()
)
class InvertedResidual(nn.Module):
def __init__(self, inp, hidden_dim, oup, kernel_size, stride, use_se, use_hs):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.identity = stride == 1 and inp == oup
if inp == hidden_dim:
self.conv = nn.Sequential(
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
HSwish() if use_hs else nn.ReLU(inplace=True),
SELayer(hidden_dim) if use_se else nn.Identity(),
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
HSwish() if use_hs else nn.ReLU(inplace=True),
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
SELayer(hidden_dim) if use_se else nn.Identity(),
HSwish() if use_hs else nn.ReLU(inplace=True),
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.identity:
return x + self.conv(x)
else:
return self.conv(x)
class MobileNetV3(ModelInterface):
def __init__(self,
cfgs,
mode,
IN_conv1=False,
num_classes=1000,
width_mult=1.,
in_channels=3,
input_size=(224, 224),
dropout_cls = None,
pooling_type='avg',
IN_first=False,
self_challenging_cfg=False,
**kwargs):
super().__init__(**kwargs)
self.in_size = input_size
self.num_classes = num_classes
self.input_IN = nn.InstanceNorm2d(in_channels, affine=True) if IN_first else None
self.pooling_type = pooling_type
self.self_challenging_cfg = self_challenging_cfg
self.width_mult = width_mult
self.dropout_cls = dropout_cls
self.cfgs = cfgs
assert mode in ['large', 'small']
input_channel = make_divisible(16 * self.width_mult, 8)
stride = 1 if self.in_size[0] < 100 else 2
layers = [conv_3x3_bn(3, input_channel, stride, IN_conv1)]
block = InvertedResidual
flag = True
for k, t, c, use_se, use_hs, s in self.cfgs:
if (self.in_size[0] < 100) and (s == 2) and flag:
s = 1
flag = False
output_channel = make_divisible(c * self.width_mult, 8)
exp_size = make_divisible(input_channel * t, 8)
layers.append(block(input_channel, exp_size, output_channel, k, s, use_se, use_hs))
input_channel = output_channel
self.features = nn.Sequential(*layers)
self.num_features = exp_size
self.conv = conv_1x1_bn(input_channel, exp_size, self.loss)
output_channel = {'large': 1280, 'small': 1024}
output_channel = make_divisible(output_channel[mode] * self.width_mult, 8) if self.width_mult > 1.0 else output_channel[mode]
if self.loss == 'softmax' or self.loss == 'asl':
self.classifier = nn.Sequential(
nn.Linear(exp_size, output_channel),
nn.BatchNorm1d(output_channel),
HSwish(),
Dropout(**self.dropout_cls),
nn.Linear(output_channel, self.num_classes),
)
else:
assert self.loss in ['am_softmax', 'am_binary']
self.classifier = nn.Sequential(
nn.Linear(exp_size, output_channel),
nn.BatchNorm1d(output_channel),
nn.PReLU(),
Dropout(**self.dropout_cls),
AngleSimpleLinear(output_channel, self.num_classes),
)
self._initialize_weights()
self.forward = autocast(self.mix_precision)(self.forward)
def extract_features(self, x):
y = self.conv(self.features(x))
return y
def infer_head(self, x, skip_pool=False):
if not skip_pool:
glob_features = self._glob_feature_vector(x, self.pooling_type, reduce_dims=False)
else:
glob_features = x
logits = self.classifier(glob_features.view(x.shape[0], -1))
return glob_features, logits
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
n = m.weight.size(1)
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def forward(self, x, return_featuremaps=False, get_embeddings=False, gt_labels=None):
if self.input_IN is not None:
x = self.input_IN(x)
y = self.extract_features(x)
if return_featuremaps:
return y
with no_nncf_head_context():
glob_features, logits = self.infer_head(y, skip_pool=False)
if self.training and self.self_challenging_cfg.enable and gt_labels is not None:
glob_features = rsc(
features = glob_features,
scores = logits,
labels = gt_labels,
retain_p = 1.0 - self.self_challenging_cfg.drop_p,
retain_batch = 1.0 - self.self_challenging_cfg.drop_batch_p
)
with EvalModeSetter([self.output], m_type=(nn.BatchNorm1d, nn.BatchNorm2d)):
_, logits = self.infer_head(x, skip_pool=True)
if not self.training and self.is_classification():
return [logits]
if get_embeddings:
out_data = [logits, glob_features]
elif self.loss in ['softmax', 'am_softmax', 'asl', 'am_binary']:
out_data = [logits]
elif self.loss in ['triplet']:
out_data = [logits, glob_features]
else:
raise KeyError("Unsupported loss: {}".format(self.loss))
return tuple(out_data)
def init_pretrained_weights(model, key='', **kwargs):
import os
import errno
import gdown
from torchreid.utils import load_pretrained_weights
def _get_torch_home():
ENV_TORCH_HOME = 'TORCH_HOME'
ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME'
DEFAULT_CACHE_DIR = '~/.cache'
torch_home = os.path.expanduser(
os.getenv(
ENV_TORCH_HOME,
os.path.join(
os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'torch'
)
)
)
return torch_home
torch_home = _get_torch_home()
model_dir = os.path.join(torch_home, 'checkpoints')
try:
os.makedirs(model_dir)
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise
filename = key + '_imagenet.pth'
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file):
gdown.download(pretrained_urls[key], cached_file)
model = load_pretrained_weights(model, cached_file, **kwargs)
def mobilenetv3_large_075(pretrained=False, **kwargs):
cfgs = [
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult =.75, **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_large_075')
return net
def mobilenetv3_large(pretrained=False, **kwargs):
cfgs = [
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1., **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_large')
return net
def mobilenetv3_large_150(pretrained=False, **kwargs):
cfgs = [
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1.5, **kwargs)
if pretrained:
raise NotImplementedError("The weights for this configuration are not available")
return net
def mobilenetv3_large_125(pretrained=False, **kwargs):
cfgs = [
[3, 1, 16, 0, 0, 1],
[3, 4, 24, 0, 0, 2],
[3, 3, 24, 0, 0, 1],
[5, 3, 40, 1, 0, 2],
[5, 3, 40, 1, 0, 1],
[5, 3, 40, 1, 0, 1],
[3, 6, 80, 0, 1, 2],
[3, 2.5, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 2.3, 80, 0, 1, 1],
[3, 6, 112, 1, 1, 1],
[3, 6, 112, 1, 1, 1],
[5, 6, 160, 1, 1, 2],
[5, 6, 160, 1, 1, 1],
[5, 6, 160, 1, 1, 1]
]
net = MobileNetV3(cfgs, mode='large', width_mult = 1.25, **kwargs)
if pretrained:
raise NotImplementedError("The weights for this configuration are not available")
return net
def mobilenetv3_small(pretrained=False, **kwargs):
cfgs = [
[3, 1, 16, 1, 0, 2],
[3, 4.5, 24, 0, 0, 2],
[3, 3.67, 24, 0, 0, 1],
[5, 4, 40, 1, 1, 2],
[5, 6, 40, 1, 1, 1],
[5, 6, 40, 1, 1, 1],
[5, 3, 48, 1, 1, 1],
[5, 3, 48, 1, 1, 1],
[5, 6, 96, 1, 1, 2],
[5, 6, 96, 1, 1, 1],
[5, 6, 96, 1, 1, 1],
]
net = MobileNetV3(cfgs, mode='small', width_mult = 1., **kwargs)
if pretrained:
init_pretrained_weights(net, key='mobilenetv3_small')
return net
| true | true |
f71a168b25957243708b709f360ba988096918a1 | 674 | py | Python | setup.py | ashwin153/pdpyras | 19971ec2df9ab854a91b95a25de452483ea57af0 | [
"MIT"
] | 92 | 2018-08-16T21:35:02.000Z | 2022-03-30T06:52:21.000Z | setup.py | ashwin153/pdpyras | 19971ec2df9ab854a91b95a25de452483ea57af0 | [
"MIT"
] | 53 | 2018-11-26T20:18:01.000Z | 2022-03-22T17:25:19.000Z | setup.py | ashwin153/pdpyras | 19971ec2df9ab854a91b95a25de452483ea57af0 | [
"MIT"
] | 22 | 2018-10-18T14:36:12.000Z | 2022-02-06T21:52:47.000Z | from setuptools import setup, find_packages
__version__ = '4.3.0'
if __name__ == '__main__':
setup(
name='pdpyras',
description="PagerDuty REST API client",
long_description="A basic REST API client for PagerDuty based on Requests' Session class",
py_modules=['pdpyras'],
version=__version__,
license='MIT',
url='https://pagerduty.github.io/pdpyras',
download_url='https://pypi.org/project/pdpyras/',
install_requires=['requests', 'urllib3'],
author='Demitri Morgan',
author_email='demitri@pagerduty.com',
python_requires='!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, >=3.5'
)
| 33.7 | 98 | 0.614243 | from setuptools import setup, find_packages
__version__ = '4.3.0'
if __name__ == '__main__':
setup(
name='pdpyras',
description="PagerDuty REST API client",
long_description="A basic REST API client for PagerDuty based on Requests' Session class",
py_modules=['pdpyras'],
version=__version__,
license='MIT',
url='https://pagerduty.github.io/pdpyras',
download_url='https://pypi.org/project/pdpyras/',
install_requires=['requests', 'urllib3'],
author='Demitri Morgan',
author_email='demitri@pagerduty.com',
python_requires='!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, >=3.5'
)
| true | true |
f71a16f3990d1459e27c67ec2953c6e70264c9af | 421 | py | Python | configs/__init__.py | whiplash003/pytrorch_template | 4629ede6ade3359a12bd40269fced3b96e8d11b3 | [
"MIT"
] | 4 | 2019-10-11T01:08:47.000Z | 2021-02-27T13:37:05.000Z | configs/__init__.py | qilong97/PyTorch-Project-Framework | e1d791e9ac679907f94f0fbe7b9c930292cb61d3 | [
"MIT"
] | null | null | null | configs/__init__.py | qilong97/PyTorch-Project-Framework | e1d791e9ac679907f94f0fbe7b9c930292cb61d3 | [
"MIT"
] | 5 | 2019-11-01T09:25:00.000Z | 2021-08-23T02:48:45.000Z | import os
from .BaseConfig import BaseConfig
from .BaseTest import BaseTest
from .Env import env
from .Run import Run
__all__ = ['BaseConfig', 'BaseTest', 'Run', 'env', 'all']
def all(config, cfg_dir):
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
cfg_list = list()
for file in sorted(os.listdir(cfg_dir)):
cfg_list.append(config(os.path.join(cfg_dir, file)))
return cfg_list
| 21.05 | 60 | 0.684086 | import os
from .BaseConfig import BaseConfig
from .BaseTest import BaseTest
from .Env import env
from .Run import Run
__all__ = ['BaseConfig', 'BaseTest', 'Run', 'env', 'all']
def all(config, cfg_dir):
if not os.path.exists(cfg_dir):
os.makedirs(cfg_dir)
cfg_list = list()
for file in sorted(os.listdir(cfg_dir)):
cfg_list.append(config(os.path.join(cfg_dir, file)))
return cfg_list
| true | true |
f71a18336d3c0e2f947f297b8e9e9e31ea3bbe07 | 895 | py | Python | setup.py | zhs007/trdb2py | d07b874bd37085ed64b5c6c6c2c21a380024d082 | [
"Apache-2.0"
] | null | null | null | setup.py | zhs007/trdb2py | d07b874bd37085ed64b5c6c6c2c21a380024d082 | [
"Apache-2.0"
] | 43 | 2020-12-11T09:07:51.000Z | 2021-05-29T07:31:10.000Z | setup.py | zhs007/trdb2py | d07b874bd37085ed64b5c6c6c2c21a380024d082 | [
"Apache-2.0"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
with open("VERSION", "r") as fversion:
version = fversion.read()
setuptools.setup(
name="trdb2py",
version=version,
author="Zerro Zhao",
author_email="zerrozhao@gmail.com",
description="tradingdb2 for python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/zhs007/trdb2py",
packages=setuptools.find_packages(),
entry_points={
'console_scripts': [
'trdb2py=trdb2py:main'
],
},
classifiers=(
"Programming Language :: Python :: 3",
# "License :: OSI Approved :: Apache License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Operating System :: MacOS",
),
)
| 27.121212 | 54 | 0.620112 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
with open("VERSION", "r") as fversion:
version = fversion.read()
setuptools.setup(
name="trdb2py",
version=version,
author="Zerro Zhao",
author_email="zerrozhao@gmail.com",
description="tradingdb2 for python",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/zhs007/trdb2py",
packages=setuptools.find_packages(),
entry_points={
'console_scripts': [
'trdb2py=trdb2py:main'
],
},
classifiers=(
"Programming Language :: Python :: 3",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Operating System :: MacOS",
),
)
| true | true |
f71a184c5dbe74ec302bac2087f436f411cf0919 | 2,633 | py | Python | data_config.py | XieResearchGroup/CLEIT | 226ece5a8763ac010610cbc9f66915caca92775e | [
"MIT"
] | null | null | null | data_config.py | XieResearchGroup/CLEIT | 226ece5a8763ac010610cbc9f66915caca92775e | [
"MIT"
] | null | null | null | data_config.py | XieResearchGroup/CLEIT | 226ece5a8763ac010610cbc9f66915caca92775e | [
"MIT"
] | null | null | null | import os
"""
configuration file includes all related multi-omics datasets
"""
root_data_folder = './data'
raw_data_folder = os.path.join(root_data_folder, 'raw_dat')
preprocessed_data_folder = os.path.join(root_data_folder, 'preprocessed_dat')
gex_feature_file = os.path.join(preprocessed_data_folder, 'uq1000_gex_feature.csv')
xena_mut_uq_file = os.path.join(preprocessed_data_folder, 'xena_uq_mut_standarized.csv')
ccle_mut_uq_file = os.path.join(preprocessed_data_folder, 'ccle_uq_mut_standarized.csv')
#mapping_file = os.path.join(raw_data_folder, 'mart_export.txt')
gene_feature_file = os.path.join(preprocessed_data_folder, 'CosmicHGNC_list.tsv')
#Xena datasets
xena_folder = os.path.join(raw_data_folder, 'Xena')
xena_id_mapping_file = os.path.join(xena_folder, 'gencode.v23.annotation.gene.probemap')
xena_gex_file = os.path.join(xena_folder, 'tcga_RSEM_gene_tpm.gz')
xena_preprocessed_gex_file = os.path.join(preprocessed_data_folder, 'xena_gex')
xena_mut_file = os.path.join(xena_folder, 'mc3.v0.2.8.PUBLIC.nonsilentGene.xena.gz')
xena_preprocessed_mut_file = os.path.join(preprocessed_data_folder, 'xena_mut')
xena_sample_file = os.path.join(xena_folder, 'TCGA_phenotype_denseDataOnlyDownload.tsv.gz')
#CCLE datasets
ccle_folder = os.path.join(raw_data_folder, 'CCLE')
ccle_gex_file = os.path.join(ccle_folder, 'CCLE_expression.csv')
ccle_preprocessed_gex_file = os.path.join(preprocessed_data_folder, 'ccle_gex')
ccle_mut_file = os.path.join(ccle_folder, 'CCLE_mutations.csv')
ccle_preprocessed_mut_file = os.path.join(preprocessed_data_folder, 'ccle_mut')
ccle_sample_file = os.path.join(ccle_folder, 'sample_info.csv')
#GDSC datasets
gdsc_folder = os.path.join(raw_data_folder, 'GDSC')
gdsc_target_file1 = os.path.join(gdsc_folder, 'GDSC1_fitted_dose_response_25Feb20.csv')
gdsc_target_file2 = os.path.join(gdsc_folder, 'GDSC2_fitted_dose_response_25Feb20.csv')
gdsc_target_file = os.path.join(gdsc_folder, 'sanger-dose-response.csv')
gdsc_sample_file = os.path.join(gdsc_folder, 'gdsc_cell_line_annotation.csv')
gdsc_preprocessed_target_file = os.path.join(preprocessed_data_folder, 'gdsc_target')
#PPI network files
network_folder = os.path.join(raw_data_folder, 'network')
string_network_folder = os.path.join(network_folder, 'STRING')
raw_string_network_file = os.path.join(string_network_folder, '9606.protein.links.v11.0.txt.gz')
string_id_mapping_file = os.path.join(string_network_folder, '9606.protein.info.v11.0.txt.gz')
current_network_file = os.path.join(string_network_folder, 'string_network_hgnc.txt')
propagation_kernel_file = os.path.join(string_network_folder, 'string_propagation_kernel.file') | 57.23913 | 96 | 0.821117 | import os
root_data_folder = './data'
raw_data_folder = os.path.join(root_data_folder, 'raw_dat')
preprocessed_data_folder = os.path.join(root_data_folder, 'preprocessed_dat')
gex_feature_file = os.path.join(preprocessed_data_folder, 'uq1000_gex_feature.csv')
xena_mut_uq_file = os.path.join(preprocessed_data_folder, 'xena_uq_mut_standarized.csv')
ccle_mut_uq_file = os.path.join(preprocessed_data_folder, 'ccle_uq_mut_standarized.csv')
gene_feature_file = os.path.join(preprocessed_data_folder, 'CosmicHGNC_list.tsv')
xena_folder = os.path.join(raw_data_folder, 'Xena')
xena_id_mapping_file = os.path.join(xena_folder, 'gencode.v23.annotation.gene.probemap')
xena_gex_file = os.path.join(xena_folder, 'tcga_RSEM_gene_tpm.gz')
xena_preprocessed_gex_file = os.path.join(preprocessed_data_folder, 'xena_gex')
xena_mut_file = os.path.join(xena_folder, 'mc3.v0.2.8.PUBLIC.nonsilentGene.xena.gz')
xena_preprocessed_mut_file = os.path.join(preprocessed_data_folder, 'xena_mut')
xena_sample_file = os.path.join(xena_folder, 'TCGA_phenotype_denseDataOnlyDownload.tsv.gz')
ccle_folder = os.path.join(raw_data_folder, 'CCLE')
ccle_gex_file = os.path.join(ccle_folder, 'CCLE_expression.csv')
ccle_preprocessed_gex_file = os.path.join(preprocessed_data_folder, 'ccle_gex')
ccle_mut_file = os.path.join(ccle_folder, 'CCLE_mutations.csv')
ccle_preprocessed_mut_file = os.path.join(preprocessed_data_folder, 'ccle_mut')
ccle_sample_file = os.path.join(ccle_folder, 'sample_info.csv')
gdsc_folder = os.path.join(raw_data_folder, 'GDSC')
gdsc_target_file1 = os.path.join(gdsc_folder, 'GDSC1_fitted_dose_response_25Feb20.csv')
gdsc_target_file2 = os.path.join(gdsc_folder, 'GDSC2_fitted_dose_response_25Feb20.csv')
gdsc_target_file = os.path.join(gdsc_folder, 'sanger-dose-response.csv')
gdsc_sample_file = os.path.join(gdsc_folder, 'gdsc_cell_line_annotation.csv')
gdsc_preprocessed_target_file = os.path.join(preprocessed_data_folder, 'gdsc_target')
network_folder = os.path.join(raw_data_folder, 'network')
string_network_folder = os.path.join(network_folder, 'STRING')
raw_string_network_file = os.path.join(string_network_folder, '9606.protein.links.v11.0.txt.gz')
string_id_mapping_file = os.path.join(string_network_folder, '9606.protein.info.v11.0.txt.gz')
current_network_file = os.path.join(string_network_folder, 'string_network_hgnc.txt')
propagation_kernel_file = os.path.join(string_network_folder, 'string_propagation_kernel.file') | true | true |
f71a18b20364f8e9aea1382e54d3b363fe159bcb | 4,188 | py | Python | uptimer/events/meta.py | janw/uptimer | 967b5ed907d620f79ee29ab8be52ba89f1686513 | [
"Apache-2.0"
] | 1 | 2021-08-23T18:40:03.000Z | 2021-08-23T18:40:03.000Z | uptimer/events/meta.py | janw/uptimer | 967b5ed907d620f79ee29ab8be52ba89f1686513 | [
"Apache-2.0"
] | 1 | 2021-01-17T13:31:41.000Z | 2021-01-17T13:31:41.000Z | uptimer/events/meta.py | janw/uptimer | 967b5ed907d620f79ee29ab8be52ba89f1686513 | [
"Apache-2.0"
] | null | null | null | from abc import ABCMeta
from uuid import UUID
import jsonschema
from dateutil.parser import parse as dateparse
from uptimer.events import SCHEMATA_PATH
from uptimer.events.cache import schema_cache
from uptimer.helpers import to_bool, to_none
class EventDefinitionError(ValueError):
pass
class EventMeta(ABCMeta, metaclass=ABCMeta):
schema_path: str = f"file:///{SCHEMATA_PATH}"
"""Base-URL at which the schema resolver will look up schema references."""
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
schema = attrs.pop("schema", None)
# `table` can be a valid None, so use False as placeholder of missing property
table = attrs.pop("table", False)
if not schema:
raise EventDefinitionError(f"Class {name} did not declare a JSON schema.")
if table is False:
raise EventDefinitionError(
f"Class {name} did not declare a database table mapping."
)
# Now resolve and parse the JSON schema for additional properties; generating
# useful representations, the proper schema resolver for validation, etc.
# Inserting them in the `attrs` dictionary will cause them to become regular
# class variables, available in every instantiated class object.
schema_spec = schema_cache[schema]
if schema_spec["title"] != name:
raise EventDefinitionError(
f"Name of class {name} must be equal to "
f"JSON schema title '{schema_spec['title']}'"
)
properties_dict = cls._collect_properties(schema_spec)
properties = list(properties_dict.keys())
property_cast_mapping = {
prop: cls.property_to_python(spec) for prop, spec in properties_dict.items()
}
resolver = jsonschema.RefResolver(cls.schema_path, schema_spec)
attrs.update(
dict(
schema=schema,
table=table,
schema_spec=schema_spec,
properties_dict=properties_dict,
properties=properties,
property_cast_mapping=property_cast_mapping,
_resolver=resolver,
)
)
return super_new(cls, name, bases, attrs, **kwargs)
@staticmethod
def _collect_properties(schema):
"""Collects a list of all (including nested and conditional) properties."""
props = dict()
array_iter = []
if isinstance(schema, list):
array_iter = enumerate(schema)
elif isinstance(schema, dict):
array_iter = schema.items()
for key, value in array_iter:
if key == "properties":
props.update(value)
elif key == "required":
continue
else:
props.update(EventMeta._collect_properties(value))
return props
@staticmethod
def property_to_python(property_spec):
"""
Returns a list of appropriate python-native datatypes for a schema property.
Based on the event class'es schema, a list of callables is returned that a
value might be tried against. The list is ordered from most to least strict
as to prevent falsely casting values as a less strict type.
Possible types taken from JSON schema validation specification
http://json-schema.org/latest/json-schema-validation.html#rfc.section.6.1.1
"""
propformat = property_spec.get("format")
if propformat == "date-time":
return [dateparse]
if propformat == "uuid":
return [UUID]
proptypes = property_spec.get("type")
if not proptypes:
return []
if not isinstance(proptypes, list):
proptypes = [proptypes]
callables = []
if "null" in proptypes:
callables.append(to_none)
if "boolean" in proptypes:
callables.append(to_bool)
if "integer" in proptypes:
callables.append(int)
if "number" in proptypes:
callables.append(float)
return callables
| 34.9 | 88 | 0.61915 | from abc import ABCMeta
from uuid import UUID
import jsonschema
from dateutil.parser import parse as dateparse
from uptimer.events import SCHEMATA_PATH
from uptimer.events.cache import schema_cache
from uptimer.helpers import to_bool, to_none
class EventDefinitionError(ValueError):
pass
class EventMeta(ABCMeta, metaclass=ABCMeta):
schema_path: str = f"file:///{SCHEMATA_PATH}"
def __new__(cls, name, bases, attrs, **kwargs):
super_new = super().__new__
schema = attrs.pop("schema", None)
table = attrs.pop("table", False)
if not schema:
raise EventDefinitionError(f"Class {name} did not declare a JSON schema.")
if table is False:
raise EventDefinitionError(
f"Class {name} did not declare a database table mapping."
)
schema_spec = schema_cache[schema]
if schema_spec["title"] != name:
raise EventDefinitionError(
f"Name of class {name} must be equal to "
f"JSON schema title '{schema_spec['title']}'"
)
properties_dict = cls._collect_properties(schema_spec)
properties = list(properties_dict.keys())
property_cast_mapping = {
prop: cls.property_to_python(spec) for prop, spec in properties_dict.items()
}
resolver = jsonschema.RefResolver(cls.schema_path, schema_spec)
attrs.update(
dict(
schema=schema,
table=table,
schema_spec=schema_spec,
properties_dict=properties_dict,
properties=properties,
property_cast_mapping=property_cast_mapping,
_resolver=resolver,
)
)
return super_new(cls, name, bases, attrs, **kwargs)
@staticmethod
def _collect_properties(schema):
props = dict()
array_iter = []
if isinstance(schema, list):
array_iter = enumerate(schema)
elif isinstance(schema, dict):
array_iter = schema.items()
for key, value in array_iter:
if key == "properties":
props.update(value)
elif key == "required":
continue
else:
props.update(EventMeta._collect_properties(value))
return props
@staticmethod
def property_to_python(property_spec):
propformat = property_spec.get("format")
if propformat == "date-time":
return [dateparse]
if propformat == "uuid":
return [UUID]
proptypes = property_spec.get("type")
if not proptypes:
return []
if not isinstance(proptypes, list):
proptypes = [proptypes]
callables = []
if "null" in proptypes:
callables.append(to_none)
if "boolean" in proptypes:
callables.append(to_bool)
if "integer" in proptypes:
callables.append(int)
if "number" in proptypes:
callables.append(float)
return callables
| true | true |
f71a191b20700bf1958d34785c00621fcbe6eda7 | 12,820 | py | Python | hvac/api/secrets_engines/gcp.py | ddeka2910/hvac | 80cf3950157bf003ee6622e6db84bb9d6c90e5f1 | [
"Apache-2.0"
] | 1 | 2020-12-14T04:01:10.000Z | 2020-12-14T04:01:10.000Z | hvac/api/secrets_engines/gcp.py | ddeka2910/hvac | 80cf3950157bf003ee6622e6db84bb9d6c90e5f1 | [
"Apache-2.0"
] | 2 | 2019-07-08T03:09:38.000Z | 2021-07-08T18:17:51.000Z | hvac/api/secrets_engines/gcp.py | ddeka2910/hvac | 80cf3950157bf003ee6622e6db84bb9d6c90e5f1 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Gcp methods module."""
import json
import logging
from hvac import exceptions, utils
from hvac.api.vault_api_base import VaultApiBase
from hvac.constants.gcp import ALLOWED_SECRETS_TYPES, SERVICE_ACCOUNT_KEY_ALGORITHMS, SERVICE_ACCOUNT_KEY_TYPES
DEFAULT_MOUNT_POINT = 'gcp'
class Gcp(VaultApiBase):
"""Google Cloud Secrets Engine (API).
Reference: https://www.vaultproject.io/api/secret/gcp/index.html
"""
def configure(self, credentials=None, ttl=None, max_ttl=None, mount_point=DEFAULT_MOUNT_POINT):
"""Configure shared information for the Gcp secrets engine.
Supported methods:
POST: /{mount_point}/config. Produces: 204 (empty body)
:param credentials: JSON credentials (either file contents or '@path/to/file') See docs for alternative ways to
pass in to this parameter, as well as the required permissions.
:type credentials: str | unicode
:param ttl: – Specifies default config TTL for long-lived credentials (i.e. service account keys). Accepts
integer number of seconds or Go duration format string.
:type ttl: int | str
:param max_ttl: Specifies the maximum config TTL for long-lived credentials (i.e. service account keys). Accepts
integer number of seconds or Go duration format string.**
:type max_ttl: int | str
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
params = utils.remove_nones({
'credentials': credentials,
'ttl': ttl,
'max_ttl': max_ttl,
})
api_path = utils.format_url('/v1/{mount_point}/config', mount_point=mount_point)
return self._adapter.post(
url=api_path,
json=params,
)
def read_config(self, mount_point=DEFAULT_MOUNT_POINT):
"""Read the configured shared information for the Gcp secrets engine.
Credentials will be omitted from returned data.
Supported methods:
GET: /{mount_point}/config. Produces: 200 application/json
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url('/v1/{mount_point}/config', mount_point=mount_point)
return self._adapter.get(
url=api_path,
)
def create_or_update_roleset(self, name, project, bindings, secret_type=None, token_scopes=None,
mount_point=DEFAULT_MOUNT_POINT):
"""Create a roleset or update an existing roleset.
See roleset docs for the GCP secrets backend to learn more about what happens when you create or update a
roleset.
Supported methods:
POST: /{mount_point}/roleset/{name}. Produces: 204 (empty body)
:param name: Name of the role. Cannot be updated.
:type name: str | unicode
:param project: Name of the GCP project that this roleset's service account will belong to. Cannot be updated.
:type project: str | unicode
:param bindings: Bindings configuration string (expects HCL or JSON format in raw or base64-encoded string)
:type bindings: str | unicode
:param secret_type: Cannot be updated.
:type secret_type: str | unicode
:param token_scopes: List of OAuth scopes to assign to access_token secrets generated under this role set
(access_token role sets only)
:type token_scopes: list[str]
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
if secret_type is not None and secret_type not in ALLOWED_SECRETS_TYPES:
error_msg = 'unsupported secret_type argument provided "{arg}", supported types: "{secret_type}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=secret_type,
secret_type=','.join(ALLOWED_SECRETS_TYPES),
))
if isinstance(bindings, dict):
bindings = json.dumps(bindings).replace(' ', '')
logging.debug('bindings: %s' % bindings)
params = {
'project': project,
'bindings': bindings,
}
params.update(
utils.remove_nones({
'secret_type': secret_type,
'token_scopes': token_scopes,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
json=params,
)
def rotate_roleset_account(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Rotate the service account this roleset uses to generate secrets.
This also replaces the key access_token roleset. This can be used to invalidate old secrets generated by the
roleset or fix issues if a roleset's service account (and/or keys) was changed outside of Vault (i.e.
through GCP APIs/cloud console).
Supported methods:
POST: /{mount_point}/roleset/{name}/rotate. Produces: 204 (empty body)
:param name: Name of the role.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}/rotate',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
)
def rotate_roleset_account_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Rotate the service account key this roleset uses to generate access tokens.
This does not recreate the roleset service account.
Supported methods:
POST: /{mount_point}/roleset/{name}/rotate-key. Produces: 204 (empty body)
:param name: Name of the role.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}/rotate-key',
mount_point=mount_point,
name=name
)
return self._adapter.post(
url=api_path,
)
def read_roleset(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Read a roleset.
Supported methods:
GET: /{mount_point}/roleset/{name}. Produces: 200 application/json
:param name: Name of the role.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.get(
url=api_path,
)
def list_rolesets(self, mount_point=DEFAULT_MOUNT_POINT):
"""List configured rolesets.
Supported methods:
LIST: /{mount_point}/rolesets. Produces: 200 application/json
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url('/v1/{mount_point}/rolesets', mount_point=mount_point)
return self._adapter.list(
url=api_path,
)
def delete_roleset(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Delete an existing roleset by the given name.
Supported methods:
DELETE: /{mount_point}/roleset/{name} Produces: 200 application/json
:param name: Name of the role.
:type name: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
"""
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
name=name,
mount_point=mount_point,
)
return self._adapter.delete(
url=api_path,
)
def generate_oauth2_access_token(self, roleset, mount_point=DEFAULT_MOUNT_POINT):
"""Generate an OAuth2 token with the scopes defined on the roleset.
This OAuth access token can be used in GCP API calls, e.g. curl -H "Authorization: Bearer $TOKEN" ...
Supported methods:
GET: /{mount_point}/token/{roleset}. Produces: 200 application/json
:param roleset: Name of an roleset with secret type access_token to generate access_token under.
:type roleset: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url(
'/v1/{mount_point}/token/{roleset}',
mount_point=mount_point,
roleset=roleset,
)
return self._adapter.get(
url=api_path,
)
def generate_service_account_key(self, roleset, key_algorithm='KEY_ALG_RSA_2048',
key_type='TYPE_GOOGLE_CREDENTIALS_FILE', method='POST',
mount_point=DEFAULT_MOUNT_POINT):
"""Generate Secret (IAM Service Account Creds): Service Account Key
If using GET ('read'), the optional parameters will be set to their defaults. Use POST if you want to specify
different values for these params.
:param roleset: Name of an roleset with secret type service_account_key to generate key under.
:type roleset: str | unicode
:param key_algorithm: Key algorithm used to generate key. Defaults to 2k RSA key You probably should not choose
other values (i.e. 1k),
:type key_algorithm: str | unicode
:param key_type: Private key type to generate. Defaults to JSON credentials file.
:type key_type: str | unicode
:param method: Supported methods:
POST: /{mount_point}/key/{roleset}. Produces: 200 application/json
GET: /{mount_point}/key/{roleset}. Produces: 200 application/json
:type method: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: dict
"""
api_path = utils.format_url(
'/v1/{mount_point}/key/{roleset}',
mount_point=mount_point,
roleset=roleset,
)
if method == 'POST':
if key_algorithm not in SERVICE_ACCOUNT_KEY_ALGORITHMS:
error_msg = 'unsupported key_algorithm argument provided "{arg}", supported algorithms: "{algorithms}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_algorithm,
algorithms=','.join(SERVICE_ACCOUNT_KEY_ALGORITHMS),
))
if key_type not in SERVICE_ACCOUNT_KEY_TYPES:
error_msg = 'unsupported key_type argument provided "{arg}", supported types: "{key_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
key_types=','.join(SERVICE_ACCOUNT_KEY_TYPES),
))
params = {
'key_algorithm': key_algorithm,
'key_type': key_type,
}
response = self._adapter.post(
url=api_path,
json=params,
)
elif method == 'GET':
response = self._adapter.get(
url=api_path,
)
else:
error_message = '"method" parameter provided invalid value; POST or GET allowed, "{method}" provided'.format(method=method)
raise exceptions.ParamValidationError(error_message)
return response
| 39.690402 | 135 | 0.616147 |
import json
import logging
from hvac import exceptions, utils
from hvac.api.vault_api_base import VaultApiBase
from hvac.constants.gcp import ALLOWED_SECRETS_TYPES, SERVICE_ACCOUNT_KEY_ALGORITHMS, SERVICE_ACCOUNT_KEY_TYPES
DEFAULT_MOUNT_POINT = 'gcp'
class Gcp(VaultApiBase):
def configure(self, credentials=None, ttl=None, max_ttl=None, mount_point=DEFAULT_MOUNT_POINT):
params = utils.remove_nones({
'credentials': credentials,
'ttl': ttl,
'max_ttl': max_ttl,
})
api_path = utils.format_url('/v1/{mount_point}/config', mount_point=mount_point)
return self._adapter.post(
url=api_path,
json=params,
)
def read_config(self, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url('/v1/{mount_point}/config', mount_point=mount_point)
return self._adapter.get(
url=api_path,
)
def create_or_update_roleset(self, name, project, bindings, secret_type=None, token_scopes=None,
mount_point=DEFAULT_MOUNT_POINT):
if secret_type is not None and secret_type not in ALLOWED_SECRETS_TYPES:
error_msg = 'unsupported secret_type argument provided "{arg}", supported types: "{secret_type}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=secret_type,
secret_type=','.join(ALLOWED_SECRETS_TYPES),
))
if isinstance(bindings, dict):
bindings = json.dumps(bindings).replace(' ', '')
logging.debug('bindings: %s' % bindings)
params = {
'project': project,
'bindings': bindings,
}
params.update(
utils.remove_nones({
'secret_type': secret_type,
'token_scopes': token_scopes,
})
)
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
json=params,
)
def rotate_roleset_account(self, name, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}/rotate',
mount_point=mount_point,
name=name,
)
return self._adapter.post(
url=api_path,
)
def rotate_roleset_account_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}/rotate-key',
mount_point=mount_point,
name=name
)
return self._adapter.post(
url=api_path,
)
def read_roleset(self, name, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
mount_point=mount_point,
name=name,
)
return self._adapter.get(
url=api_path,
)
def list_rolesets(self, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url('/v1/{mount_point}/rolesets', mount_point=mount_point)
return self._adapter.list(
url=api_path,
)
def delete_roleset(self, name, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/roleset/{name}',
name=name,
mount_point=mount_point,
)
return self._adapter.delete(
url=api_path,
)
def generate_oauth2_access_token(self, roleset, mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/token/{roleset}',
mount_point=mount_point,
roleset=roleset,
)
return self._adapter.get(
url=api_path,
)
def generate_service_account_key(self, roleset, key_algorithm='KEY_ALG_RSA_2048',
key_type='TYPE_GOOGLE_CREDENTIALS_FILE', method='POST',
mount_point=DEFAULT_MOUNT_POINT):
api_path = utils.format_url(
'/v1/{mount_point}/key/{roleset}',
mount_point=mount_point,
roleset=roleset,
)
if method == 'POST':
if key_algorithm not in SERVICE_ACCOUNT_KEY_ALGORITHMS:
error_msg = 'unsupported key_algorithm argument provided "{arg}", supported algorithms: "{algorithms}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_algorithm,
algorithms=','.join(SERVICE_ACCOUNT_KEY_ALGORITHMS),
))
if key_type not in SERVICE_ACCOUNT_KEY_TYPES:
error_msg = 'unsupported key_type argument provided "{arg}", supported types: "{key_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=key_type,
key_types=','.join(SERVICE_ACCOUNT_KEY_TYPES),
))
params = {
'key_algorithm': key_algorithm,
'key_type': key_type,
}
response = self._adapter.post(
url=api_path,
json=params,
)
elif method == 'GET':
response = self._adapter.get(
url=api_path,
)
else:
error_message = '"method" parameter provided invalid value; POST or GET allowed, "{method}" provided'.format(method=method)
raise exceptions.ParamValidationError(error_message)
return response
| true | true |
f71a193cb6d839929618acd446da28cc742371b1 | 2,846 | py | Python | examples/tutorial_api_python/02_whole_body_from_image.py | ExSidius/openpose | 69f64206d63a156fa60e9a0a0de6738d27d1c00d | [
"DOC"
] | 12 | 2019-05-10T09:56:39.000Z | 2021-08-09T03:42:28.000Z | examples/tutorial_api_python/02_whole_body_from_image.py | ExSidius/openpose | 69f64206d63a156fa60e9a0a0de6738d27d1c00d | [
"DOC"
] | null | null | null | examples/tutorial_api_python/02_whole_body_from_image.py | ExSidius/openpose | 69f64206d63a156fa60e9a0a0de6738d27d1c00d | [
"DOC"
] | 7 | 2019-06-14T03:38:09.000Z | 2021-08-09T03:43:27.000Z | # From Python
# It requires OpenCV installed for Python
import sys
import cv2
import os
from sys import platform
import argparse
# Import Openpose (Windows/Ubuntu/OSX)
dir_path = os.path.dirname(os.path.realpath(__file__))
try:
# Windows Import
if platform == "win32":
# Change these variables to point to the correct folder (Release/x64 etc.)
sys.path.append(dir_path + '/../../python/openpose/Release');
os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;'
import pyopenpose as op
else:
# Change these variables to point to the correct folder (Release/x64 etc.)
sys.path.append('../../python');
# If you run `make install` (default path is `/usr/local/python` for Ubuntu), you can also access the OpenPose/python module from there. This will install OpenPose and the python library at your desired installation path. Ensure that this is in your python path in order to use it.
# sys.path.append('/usr/local/python')
from openpose import pyopenpose as op
except ImportError as e:
print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?')
raise e
# Flags
parser = argparse.ArgumentParser()
parser.add_argument("--image_path", default="../../../examples/media/COCO_val2014_000000000241.jpg", help="Process an image. Read all standard formats (jpg, png, bmp, etc.).")
args = parser.parse_known_args()
# Custom Params (refer to include/openpose/flags.hpp for more parameters)
params = dict()
params["model_folder"] = "../../../models/"
params["face"] = True
params["hand"] = True
# Add others in path?
for i in range(0, len(args[1])):
curr_item = args[1][i]
if i != len(args[1])-1: next_item = args[1][i+1]
else: next_item = "1"
if "--" in curr_item and "--" in next_item:
key = curr_item.replace('-','')
if key not in params: params[key] = "1"
elif "--" in curr_item and "--" not in next_item:
key = curr_item.replace('-','')
if key not in params: params[key] = next_item
# Construct it from system arguments
# op.init_argv(args[1])
# oppython = op.OpenposePython()
# Starting OpenPose
opWrapper = op.WrapperPython()
opWrapper.configure(params)
opWrapper.start()
# Process Image
datum = op.Datum()
imageToProcess = cv2.imread(args[0].image_path)
datum.cvInputData = imageToProcess
opWrapper.emplaceAndPop([datum])
# Display Image
print("Body keypoints: \n" + str(datum.poseKeypoints))
print("Face keypoints: \n" + str(datum.faceKeypoints))
print("Left hand keypoints: \n" + str(datum.handKeypoints[0]))
print("Right hand keypoints: \n" + str(datum.handKeypoints[1]))
cv2.imshow("OpenPose 1.4.0 - Tutorial Python API", datum.cvOutputData)
cv2.waitKey(0)
| 38.986301 | 289 | 0.685875 |
import sys
import cv2
import os
from sys import platform
import argparse
dir_path = os.path.dirname(os.path.realpath(__file__))
try:
if platform == "win32":
sys.path.append(dir_path + '/../../python/openpose/Release');
os.environ['PATH'] = os.environ['PATH'] + ';' + dir_path + '/../../x64/Release;' + dir_path + '/../../bin;'
import pyopenpose as op
else:
sys.path.append('../../python');
from openpose import pyopenpose as op
except ImportError as e:
print('Error: OpenPose library could not be found. Did you enable `BUILD_PYTHON` in CMake and have this Python script in the right folder?')
raise e
parser = argparse.ArgumentParser()
parser.add_argument("--image_path", default="../../../examples/media/COCO_val2014_000000000241.jpg", help="Process an image. Read all standard formats (jpg, png, bmp, etc.).")
args = parser.parse_known_args()
params = dict()
params["model_folder"] = "../../../models/"
params["face"] = True
params["hand"] = True
for i in range(0, len(args[1])):
curr_item = args[1][i]
if i != len(args[1])-1: next_item = args[1][i+1]
else: next_item = "1"
if "--" in curr_item and "--" in next_item:
key = curr_item.replace('-','')
if key not in params: params[key] = "1"
elif "--" in curr_item and "--" not in next_item:
key = curr_item.replace('-','')
if key not in params: params[key] = next_item
opWrapper = op.WrapperPython()
opWrapper.configure(params)
opWrapper.start()
datum = op.Datum()
imageToProcess = cv2.imread(args[0].image_path)
datum.cvInputData = imageToProcess
opWrapper.emplaceAndPop([datum])
print("Body keypoints: \n" + str(datum.poseKeypoints))
print("Face keypoints: \n" + str(datum.faceKeypoints))
print("Left hand keypoints: \n" + str(datum.handKeypoints[0]))
print("Right hand keypoints: \n" + str(datum.handKeypoints[1]))
cv2.imshow("OpenPose 1.4.0 - Tutorial Python API", datum.cvOutputData)
cv2.waitKey(0)
| true | true |
f71a1a2a2d27e09348b69858a543626888f37405 | 21,978 | py | Python | lingvo/core/conv_layers_builder_test.py | Harshs27/lingvo | bd396e651488b2e2c4a7416be077b4a0226c87c8 | [
"Apache-2.0"
] | 2,611 | 2018-10-16T20:14:10.000Z | 2022-03-31T14:48:41.000Z | lingvo/core/conv_layers_builder_test.py | Harshs27/lingvo | bd396e651488b2e2c4a7416be077b4a0226c87c8 | [
"Apache-2.0"
] | 249 | 2018-10-27T06:02:29.000Z | 2022-03-30T18:00:39.000Z | lingvo/core/conv_layers_builder_test.py | Harshs27/lingvo | bd396e651488b2e2c4a7416be077b4a0226c87c8 | [
"Apache-2.0"
] | 436 | 2018-10-25T05:31:45.000Z | 2022-03-31T07:26:03.000Z | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conv layers builder."""
from absl.testing import parameterized
from lingvo import compat as tf
from lingvo.core import bn_layers
from lingvo.core import conv_layers_builder
from lingvo.core import conv_layers_with_time_padding
from lingvo.core import layers
from lingvo.core import test_utils
import numpy as np
class ConvPaddedLayersTest(test_utils.TestCase):
def _ConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, out_dim, filter_shape, conv_last,
causal_conv):
with self.session(use_gpu=True) as sess:
p1 = layers.Conv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, out_dim],
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().Conv2D(
'conv_2d02',
in_dim,
out_dim,
filter_shape,
stride=stride,
dilation=dilation,
activation=activation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
w1 = l1_theta.w
w2 = l2_theta.conv_2d.w
# b1 = l1_theta.b
# b2 = l2_theta.bn_or_bias.b
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
w1_v = sess.run(w1)
v2, p2 = sess.run([conv_out2, out2_padding], feed_dict={w2: w1_v})
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testConvBasic(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvBnWnTanh(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvGn(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
in_dim = 3
out_dim = 4
filter_shape = [2, 2]
conv_last = False
causal_conv = False
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().Conv2D(
'conv_2d02',
in_dim,
out_dim,
filter_shape,
stride=stride,
dilation=dilation,
activation=activation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[-0.35070014, -1.7821487, 0.8349923, 1.1709788],
[-0.18872532, 0.9702145, 0.5534694, -1.1386856]],
[[0.34970748, -0.5403709, -0.9809327, -2.0930214],
[0.54232424, 1.1565661, 1.0349312, 1.3458138]],
[[0, 0, 0, 0], [0, 0, 0, 0]]]
self.assertAllClose(v, expected_out)
def testConvLastWnTanh(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = True
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvLastCausal(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 1]
conv_last = True
causal_conv = True
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def _DepthwiseConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv):
with self.session(use_gpu=True) as sess:
p1 = layers.DepthwiseConv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, depth_multiplier],
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().DepthwiseConv2D(
'conv_2d02',
in_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
w1 = l1_theta.w
w2 = l2_theta.conv_2d.w
# b1 = l1_theta.b
# b2 = l2_theta.bn_or_bias.b
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
w1_v = sess.run([w1])[0]
v2, p2 = sess.run([conv_out2, out2_padding], feed_dict={w2: w1_v})
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testDepthConvBasic(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
depth_multiplier = 2
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvBnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvGn(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
in_dim = 4
depth_multiplier = 1
filter_shape = [2, 2]
conv_last = False
causal_conv = False
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().DepthwiseConv2D(
'conv_2d02',
in_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 4]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[-0.77095497, 0.30285388, -0.05714864, 1.0386012],
[0.74034333, 0.04982221, -0.41769135, -2.9531932],
[-0.2647084, -0.1936804, 0.6598473, 0.42537105]],
[[1.3095646, -0.85996866, 2.2734299, -1.8457952],
[-0.9542263, -0.14199251, 0.51472515, 0.91931283],
[0.47267163, 1.4824618, 0.4548889, 0.93488806]],
[[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]]]
self.assertAllClose(expected_out, v)
def testDepthConvLastWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 2]
conv_last = True
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvLastCausal(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 1]
conv_last = True
causal_conv = True
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def _SeparableConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier, out_dim,
filter_shape, conv_last, causal_conv,
assert_equality=True):
with self.session(use_gpu=True) as sess:
p1 = layers.SeparableConv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, out_dim],
depth_multiplier=depth_multiplier,
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().SeparableConv2D(
'conv_2d02',
in_dim,
out_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
pointwise_conv_w1 = l1_theta.w
depth_conv_w1 = l1_theta.depthwise_conv.w
pointwise_conv_w2 = l2_theta.conv_1x1.w
depth_conv_w2 = l2_theta.conv_2d.w
# b1 = l1_theta.b
# b2 = l2_theta.bn_or_bias.b
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
p_w1_v, d_w1_v = sess.run([pointwise_conv_w1, depth_conv_w1])
v2, p2 = sess.run([conv_out2, out2_padding],
feed_dict={
pointwise_conv_w2: p_w1_v,
depth_conv_w2: d_w1_v
})
if assert_equality:
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testSeparableConv2DLayerBasic(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv)
def testSeparableConvWnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 1]
conv_last = False
causal_conv = True
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv)
def testSeparableConvLastBnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 1]
conv_last = True
causal_conv = True
# New implementation is not equivallent to the old.
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv,
assert_equality=False)
def testSeparableConvGn(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
in_dim = 4
depth_multiplier = 1
out_dim = 2
filter_shape = [2, 1]
conv_last = True
causal_conv = True
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().SeparableConv2D(
'conv_2d02',
in_dim,
out_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 4]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[0.00963847, -0.04019006], [0.36265337, -0.06592329],
[0.65582913, -0.1533944]],
[[0.7512939, -0.7282307], [0.96100605, -1.9509676],
[0.4639647, 0.2485837]], [[0., 0.], [0., 0.], [0., 0.]]]
self.assertAllClose(expected_out, v)
class CausalPoolingLayerTest(test_utils.TestCase, parameterized.TestCase):
"""Tests for CausalPoolingLayer."""
@parameterized.named_parameters(
{
'testcase_name': 'max_pooling',
'pooling_type': 'MAX',
'left_context': 2,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, 0, 2, 4, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling',
'pooling_type': 'AVG',
'left_context': 2,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 1, 3, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'max_pooling_large_window',
'pooling_type': 'MAX',
'left_context': 10,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, 0, 2, 4, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling_large_window',
'pooling_type': 'AVG',
'left_context': 10,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 0, 1, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling_infinte_window',
'pooling_type': 'AVG',
'left_context': -1,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 0, 1, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
})
def testSimpleCase(self, pooling_type, left_context, inputs, input_paddings,
expected_output, expected_output_padding):
inputs = inputs[np.newaxis, :, np.newaxis, np.newaxis]
input_paddings = input_paddings[np.newaxis, :]
param = conv_layers_builder.CausalPoolingLayer.Params().Set(
name='test_layer', pooling_type=pooling_type, left_context=left_context)
pooling_layer = param.Instantiate()
with self.session(use_gpu=True) as sess:
inputs = tf.convert_to_tensor(inputs, dtype=tf.float32)
input_paddings = tf.convert_to_tensor(input_paddings, dtype=tf.float32)
output, output_paddings = pooling_layer.FPropDefaultTheta(
inputs, input_paddings)
tf.global_variables_initializer().run()
output_val, output_paddings_val = sess.run([output, output_paddings])
self.assertAllClose(expected_output, output_val.flatten())
self.assertAllEqual(expected_output_padding, output_paddings_val.flatten())
if __name__ == '__main__':
tf.test.main()
| 35.620746 | 80 | 0.602193 |
from absl.testing import parameterized
from lingvo import compat as tf
from lingvo.core import bn_layers
from lingvo.core import conv_layers_builder
from lingvo.core import conv_layers_with_time_padding
from lingvo.core import layers
from lingvo.core import test_utils
import numpy as np
class ConvPaddedLayersTest(test_utils.TestCase):
def _ConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, out_dim, filter_shape, conv_last,
causal_conv):
with self.session(use_gpu=True) as sess:
p1 = layers.Conv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, out_dim],
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().Conv2D(
'conv_2d02',
in_dim,
out_dim,
filter_shape,
stride=stride,
dilation=dilation,
activation=activation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
w1 = l1_theta.w
w2 = l2_theta.conv_2d.w
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
w1_v = sess.run(w1)
v2, p2 = sess.run([conv_out2, out2_padding], feed_dict={w2: w1_v})
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testConvBasic(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvBnWnTanh(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvGn(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
in_dim = 3
out_dim = 4
filter_shape = [2, 2]
conv_last = False
causal_conv = False
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().Conv2D(
'conv_2d02',
in_dim,
out_dim,
filter_shape,
stride=stride,
dilation=dilation,
activation=activation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[-0.35070014, -1.7821487, 0.8349923, 1.1709788],
[-0.18872532, 0.9702145, 0.5534694, -1.1386856]],
[[0.34970748, -0.5403709, -0.9809327, -2.0930214],
[0.54232424, 1.1565661, 1.0349312, 1.3458138]],
[[0, 0, 0, 0], [0, 0, 0, 0]]]
self.assertAllClose(v, expected_out)
def testConvLastWnTanh(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 2]
conv_last = True
causal_conv = False
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def testConvLastCausal(self):
dilation = [1, 1]
stride = [2, 3]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
out_dim = 3
filter_shape = [2, 1]
conv_last = True
causal_conv = True
self._ConvTestHelper(dilation, stride, activation, batch_norm, weight_norm,
in_dim, out_dim, filter_shape, conv_last, causal_conv)
def _DepthwiseConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv):
with self.session(use_gpu=True) as sess:
p1 = layers.DepthwiseConv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, depth_multiplier],
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().DepthwiseConv2D(
'conv_2d02',
in_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
w1 = l1_theta.w
w2 = l2_theta.conv_2d.w
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
w1_v = sess.run([w1])[0]
v2, p2 = sess.run([conv_out2, out2_padding], feed_dict={w2: w1_v})
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testDepthConvBasic(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
depth_multiplier = 2
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvBnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvGn(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
in_dim = 4
depth_multiplier = 1
filter_shape = [2, 2]
conv_last = False
causal_conv = False
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().DepthwiseConv2D(
'conv_2d02',
in_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 4]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[-0.77095497, 0.30285388, -0.05714864, 1.0386012],
[0.74034333, 0.04982221, -0.41769135, -2.9531932],
[-0.2647084, -0.1936804, 0.6598473, 0.42537105]],
[[1.3095646, -0.85996866, 2.2734299, -1.8457952],
[-0.9542263, -0.14199251, 0.51472515, 0.91931283],
[0.47267163, 1.4824618, 0.4548889, 0.93488806]],
[[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.]]]
self.assertAllClose(expected_out, v)
def testDepthConvLastWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 2]
conv_last = True
causal_conv = False
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def testDepthConvLastCausal(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
filter_shape = [2, 1]
conv_last = True
causal_conv = True
self._DepthwiseConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
filter_shape, conv_last, causal_conv)
def _SeparableConvTestHelper(self, dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier, out_dim,
filter_shape, conv_last, causal_conv,
assert_equality=True):
with self.session(use_gpu=True) as sess:
p1 = layers.SeparableConv2DLayer.Params().Set(
name='conv_2d01',
filter_shape=filter_shape + [in_dim, out_dim],
depth_multiplier=depth_multiplier,
filter_stride=stride,
dilation_rate=dilation,
activation=activation,
batch_norm=batch_norm,
weight_norm=weight_norm,
bias=not batch_norm,
conv_last=conv_last,
causal_convolution=causal_conv)
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=weight_norm)
if batch_norm:
norm_p = conv_layers_with_time_padding.ConvBatchNormLayer.Params().Set(
decay=0.999)
builder_params.norm_layer_tpl = norm_p
else:
builder_params.norm_layer_tpl = None
p2 = builder_params.Instantiate().SeparableConv2D(
'conv_2d02',
in_dim,
out_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l1 = p1.Instantiate()
l2 = p2.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 3]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
l1_theta = l1.theta.Transform(tf.identity)
l2_theta = l2.theta.Transform(tf.identity)
conv_out1, out1_padding = l1.FProp(l1_theta, conv_in, conv_pad)
conv_out2, out2_padding = l2.FProp(l2_theta, conv_in, conv_pad)
tf.logging.info(l1_theta)
tf.logging.info(l2_theta)
l1_num_vars = l1_theta.Flatten()
l2_num_var2 = l2_theta.Flatten()
if len(l1_num_vars) != len(l2_num_var2):
tf.logging.info(
'Mismatched number of vars: l1: %d vars, l2: %d vars',
len(l1_num_vars), len(l2_num_var2))
pointwise_conv_w1 = l1_theta.w
depth_conv_w1 = l1_theta.depthwise_conv.w
pointwise_conv_w2 = l2_theta.conv_1x1.w
depth_conv_w2 = l2_theta.conv_2d.w
tf.global_variables_initializer().run()
v1, p1 = sess.run([conv_out1, out1_padding])
p_w1_v, d_w1_v = sess.run([pointwise_conv_w1, depth_conv_w1])
v2, p2 = sess.run([conv_out2, out2_padding],
feed_dict={
pointwise_conv_w2: p_w1_v,
depth_conv_w2: d_w1_v
})
if assert_equality:
self.assertAllClose(v1, v2)
self.assertAllClose(p1, p2)
def testSeparableConv2DLayerBasic(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'NONE'
batch_norm = False
weight_norm = False
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 2]
conv_last = False
causal_conv = False
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv)
def testSeparableConvWnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = False
weight_norm = True
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 1]
conv_last = False
causal_conv = True
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv)
def testSeparableConvLastBnWnTanh(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
batch_norm = True
weight_norm = True
in_dim = 3
depth_multiplier = 3
out_dim = 2
filter_shape = [2, 1]
conv_last = True
causal_conv = True
self._SeparableConvTestHelper(dilation, stride, activation, batch_norm,
weight_norm, in_dim, depth_multiplier,
out_dim, filter_shape, conv_last, causal_conv,
assert_equality=False)
def testSeparableConvGn(self):
dilation = [1, 1]
stride = [2, 2]
activation = 'TANH'
in_dim = 4
depth_multiplier = 1
out_dim = 2
filter_shape = [2, 1]
conv_last = True
causal_conv = True
with self.session(use_gpu=True) as sess:
builder_params = conv_layers_builder.Builder.Params().Set(
weight_norm=True)
builder_params.norm_layer_tpl = bn_layers.GroupNormLayer.Params().Set(
num_groups=2)
p = builder_params.Instantiate().SeparableConv2D(
'conv_2d02',
in_dim,
out_dim,
depth_multiplier,
filter_shape,
stride=stride,
activation=activation,
dilation=dilation,
conv_last=conv_last,
is_causal=causal_conv)
l = p.Instantiate()
conv_in = tf.constant(np.random.normal(size=[4, 5, 6, 4]), tf.float32)
conv_pad = np.full([4, 5], 0.0)
conv_pad[2, 3] = 1.0
conv_pad[2, 4] = 1.0
conv_pad = tf.constant(conv_pad, tf.float32)
conv_out, _ = l.FProp(l.theta, conv_in, conv_pad)
tf.global_variables_initializer().run()
v = sess.run(tf.reduce_sum(conv_out, 0))
expected_out = [[[0.00963847, -0.04019006], [0.36265337, -0.06592329],
[0.65582913, -0.1533944]],
[[0.7512939, -0.7282307], [0.96100605, -1.9509676],
[0.4639647, 0.2485837]], [[0., 0.], [0., 0.], [0., 0.]]]
self.assertAllClose(expected_out, v)
class CausalPoolingLayerTest(test_utils.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
{
'testcase_name': 'max_pooling',
'pooling_type': 'MAX',
'left_context': 2,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, 0, 2, 4, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling',
'pooling_type': 'AVG',
'left_context': 2,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 1, 3, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'max_pooling_large_window',
'pooling_type': 'MAX',
'left_context': 10,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, 0, 2, 4, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling_large_window',
'pooling_type': 'AVG',
'left_context': 10,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 0, 1, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
}, {
'testcase_name': 'avg_pooling_infinte_window',
'pooling_type': 'AVG',
'left_context': -1,
'inputs': np.array([-2, 0, 2, 4, 0, 0]),
'input_paddings': np.array([0, 0, 0, 0, 1, 1]),
'expected_output': np.array([-2, -1, 0, 1, 0, 0]),
'expected_output_padding': np.array([0, 0, 0, 0, 1, 1]),
})
def testSimpleCase(self, pooling_type, left_context, inputs, input_paddings,
expected_output, expected_output_padding):
inputs = inputs[np.newaxis, :, np.newaxis, np.newaxis]
input_paddings = input_paddings[np.newaxis, :]
param = conv_layers_builder.CausalPoolingLayer.Params().Set(
name='test_layer', pooling_type=pooling_type, left_context=left_context)
pooling_layer = param.Instantiate()
with self.session(use_gpu=True) as sess:
inputs = tf.convert_to_tensor(inputs, dtype=tf.float32)
input_paddings = tf.convert_to_tensor(input_paddings, dtype=tf.float32)
output, output_paddings = pooling_layer.FPropDefaultTheta(
inputs, input_paddings)
tf.global_variables_initializer().run()
output_val, output_paddings_val = sess.run([output, output_paddings])
self.assertAllClose(expected_output, output_val.flatten())
self.assertAllEqual(expected_output_padding, output_paddings_val.flatten())
if __name__ == '__main__':
tf.test.main()
| true | true |
f71a1a4b45bdc87ee38fe7fcbd95d71913d56e29 | 3,212 | py | Python | flickr.py | vicrobot/Flickr-Downloader | fecac723fca3c0f3e72b9d4581b0bcf52dfda3b5 | [
"MIT"
] | null | null | null | flickr.py | vicrobot/Flickr-Downloader | fecac723fca3c0f3e72b9d4581b0bcf52dfda3b5 | [
"MIT"
] | null | null | null | flickr.py | vicrobot/Flickr-Downloader | fecac723fca3c0f3e72b9d4581b0bcf52dfda3b5 | [
"MIT"
] | null | null | null | import flickrapi
import flickr_api
import urllib.request
import os
import sys
if __name__ != "__main__":
print("File 'flickr.py' not meant for transcendings and imports, direct use only")
sys.exit(0)
#functions
def url_list_maker(uiv):
count = 0
photos = flickr.walk_user(user_id = uiv, per_page = 100, extras = 'url_o')
url_list = []
for photo in photos:
try:
url_list.append(photo.get('url_o')) # o ->original size; other vars may not have all images.
except: pass
return url_list
def mkname(name):
num = 0
name = str(name)
new_n = name[:]
while os.path.exists(new_n):
num += 1
new_n = name + str(num)
return new_n
def checkIds(akv, skv, print_M = 0):
flickr_api.set_keys(api_key = akv, api_secret = skv)
try: flickr_api.Person.findByUserName('vicro_bot').id
except flickr_api.flickrerrors.FlickrAPIError:
if print_M: print("Wrong Keys!!", "Try again")
return 0
return 1
#reading logs
try:
with open('logs', 'r') as var:
lines = [i.rstrip() for i in var.readlines() if len(i) ]
except FileNotFoundError:
with open('logs', 'w+') as var:
lines = []
bool_contain = -1
bool_ask_old = 0
dict_ids = {}
#ids_handeling
for line in lines:
if 'id1' in line:
bool_contain += 1
dict_ids['id1'] = ''.join(line.split(' ')[1:])
if 'id2' in line:
bool_contain += 1
dict_ids['id2'] = ''.join(line.split(' ')[1:])
if bool_contain == 1: bool_contain = checkIds(dict_ids['id1'],dict_ids['id2'])
if bool_contain == 1:
inp_ask_old = input('Use previously saved keys?(Yes or No)').rstrip().lower()
if inp_ask_old == 'yes':
bool_ask_old = 1
api_key_val = dict_ids['id1']
secret_key_val = dict_ids['id2']
#print(secret_key_val)
if not bool_ask_old:
while 1:
var1_ = 1
api_key_val = input('Give your API key ').rstrip()
secret_key_val = input('Give your API secret ').rstrip()
var1_ = checkIds(api_key_val,secret_key_val, print_M = 1)
if var1_: break
writable = ['id1 {}\n'.format(api_key_val), 'id2 {}\n'.format(secret_key_val)]
with open('logs', 'w+') as var:
var.writelines(writable)
#some globals' setup
flickr=flickrapi.FlickrAPI(api_key_val, secret_key_val)
flickr_api.set_keys(api_key = api_key_val, api_secret = secret_key_val)
user_name = input('Give user name:- \n').rstrip()
user_id_val = flickr_api.Person.findByUserName(user_name).id
urls = url_list_maker(user_id_val)
#directory work
new_dir = mkname('Flickr_Imgs_{}'.format('_'.join(user_name.split(' '))))
os.mkdir(new_dir)
os.chdir(new_dir)
# terminal show
counter = 0
var = 100.0/(len(urls)*1.0)
print('Downloading ... {:05}%'.format(int(counter)), end = '', flush = True)
b, imagecount = 0, 1
for i in urls:
try: urllib.request.urlretrieve( i, '{1}{0}'.format(imagecount, user_name[:1]))
except KeyboardInterrupt:
print('\nAbort')
sys.exit()
except Exception: pass
counter += var
print('\b'*6, end = '', flush = True)
imagecount += 1
print('{:05}'.format(counter)[:5]+'%', end = '', flush = True)
print('\nDone')
| 29.740741 | 104 | 0.634184 | import flickrapi
import flickr_api
import urllib.request
import os
import sys
if __name__ != "__main__":
print("File 'flickr.py' not meant for transcendings and imports, direct use only")
sys.exit(0)
def url_list_maker(uiv):
count = 0
photos = flickr.walk_user(user_id = uiv, per_page = 100, extras = 'url_o')
url_list = []
for photo in photos:
try:
url_list.append(photo.get('url_o'))
except: pass
return url_list
def mkname(name):
num = 0
name = str(name)
new_n = name[:]
while os.path.exists(new_n):
num += 1
new_n = name + str(num)
return new_n
def checkIds(akv, skv, print_M = 0):
flickr_api.set_keys(api_key = akv, api_secret = skv)
try: flickr_api.Person.findByUserName('vicro_bot').id
except flickr_api.flickrerrors.FlickrAPIError:
if print_M: print("Wrong Keys!!", "Try again")
return 0
return 1
try:
with open('logs', 'r') as var:
lines = [i.rstrip() for i in var.readlines() if len(i) ]
except FileNotFoundError:
with open('logs', 'w+') as var:
lines = []
bool_contain = -1
bool_ask_old = 0
dict_ids = {}
for line in lines:
if 'id1' in line:
bool_contain += 1
dict_ids['id1'] = ''.join(line.split(' ')[1:])
if 'id2' in line:
bool_contain += 1
dict_ids['id2'] = ''.join(line.split(' ')[1:])
if bool_contain == 1: bool_contain = checkIds(dict_ids['id1'],dict_ids['id2'])
if bool_contain == 1:
inp_ask_old = input('Use previously saved keys?(Yes or No)').rstrip().lower()
if inp_ask_old == 'yes':
bool_ask_old = 1
api_key_val = dict_ids['id1']
secret_key_val = dict_ids['id2']
if not bool_ask_old:
while 1:
var1_ = 1
api_key_val = input('Give your API key ').rstrip()
secret_key_val = input('Give your API secret ').rstrip()
var1_ = checkIds(api_key_val,secret_key_val, print_M = 1)
if var1_: break
writable = ['id1 {}\n'.format(api_key_val), 'id2 {}\n'.format(secret_key_val)]
with open('logs', 'w+') as var:
var.writelines(writable)
flickr=flickrapi.FlickrAPI(api_key_val, secret_key_val)
flickr_api.set_keys(api_key = api_key_val, api_secret = secret_key_val)
user_name = input('Give user name:- \n').rstrip()
user_id_val = flickr_api.Person.findByUserName(user_name).id
urls = url_list_maker(user_id_val)
#directory work
new_dir = mkname('Flickr_Imgs_{}'.format('_'.join(user_name.split(' '))))
os.mkdir(new_dir)
os.chdir(new_dir)
# terminal show
counter = 0
var = 100.0/(len(urls)*1.0)
print('Downloading ... {:05}%'.format(int(counter)), end = '', flush = True)
b, imagecount = 0, 1
for i in urls:
try: urllib.request.urlretrieve( i, '{1}{0}'.format(imagecount, user_name[:1]))
except KeyboardInterrupt:
print('\nAbort')
sys.exit()
except Exception: pass
counter += var
print('\b'*6, end = '', flush = True)
imagecount += 1
print('{:05}'.format(counter)[:5]+'%', end = '', flush = True)
print('\nDone')
| true | true |
f71a1ac02563cd912e303318164fa03a1b3451a2 | 527 | py | Python | mydemo/matplotlibDemo/clickEvent.py | 541867329/pydata-notebook | 867f204d7abac96dbae80e6cdd2e3661e554d1dd | [
"MIT"
] | null | null | null | mydemo/matplotlibDemo/clickEvent.py | 541867329/pydata-notebook | 867f204d7abac96dbae80e6cdd2e3661e554d1dd | [
"MIT"
] | null | null | null | mydemo/matplotlibDemo/clickEvent.py | 541867329/pydata-notebook | 867f204d7abac96dbae80e6cdd2e3661e554d1dd | [
"MIT"
] | null | null | null | from matplotlib.pyplot import figure, show
import numpy as npy
from numpy.random import rand
if 1: # picking on a scatter plot (matplotlib.collections.RegularPolyCollection)
x, y, c, s = rand(4, 100)
def onpick3(event):
ind = event.ind
print('onpick3 scatter:', ind, npy.take(x, ind), npy.take(y, ind))
fig = figure()
ax1 = fig.add_subplot(111)
col = ax1.scatter(x, y, 100 * s, c, picker=True)
# fig.savefig('pscoll.eps')
fig.canvas.mpl_connect('pick_event', onpick3)
show()
| 23.954545 | 81 | 0.652751 | from matplotlib.pyplot import figure, show
import numpy as npy
from numpy.random import rand
if 1:
x, y, c, s = rand(4, 100)
def onpick3(event):
ind = event.ind
print('onpick3 scatter:', ind, npy.take(x, ind), npy.take(y, ind))
fig = figure()
ax1 = fig.add_subplot(111)
col = ax1.scatter(x, y, 100 * s, c, picker=True)
fig.canvas.mpl_connect('pick_event', onpick3)
show()
| true | true |
f71a1af80e296be1c22cd3a838643279ddd193cd | 313 | py | Python | Lib/objc/_IOAccelerator.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | Lib/objc/_IOAccelerator.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | Lib/objc/_IOAccelerator.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | '''
Classes from the 'IOAccelerator' framework.
'''
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
IOAccelMTLEvent = _Class('IOAccelMTLEvent')
| 15.65 | 43 | 0.661342 |
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
IOAccelMTLEvent = _Class('IOAccelMTLEvent')
| true | true |
f71a1b665af36fbf12688a3e2396cbb73c2862b5 | 230 | py | Python | app/books/urls.py | bayocr/example-docker-django | 550d7ce3e0dd5643616245eed9cbb9ae96812c11 | [
"MIT"
] | null | null | null | app/books/urls.py | bayocr/example-docker-django | 550d7ce3e0dd5643616245eed9cbb9ae96812c11 | [
"MIT"
] | 1 | 2021-05-25T00:56:48.000Z | 2021-05-25T00:56:48.000Z | app/books/urls.py | bayocr/example-docker-django | 550d7ce3e0dd5643616245eed9cbb9ae96812c11 | [
"MIT"
] | null | null | null | from django.urls import path
from .views import BookDetailView, BookListView
app_name = 'books'
urlpatterns = [
path('', BookListView.as_view(), name='list'),
path('<int:pk>/', BookDetailView.as_view(), name='detail')
] | 23 | 62 | 0.695652 | from django.urls import path
from .views import BookDetailView, BookListView
app_name = 'books'
urlpatterns = [
path('', BookListView.as_view(), name='list'),
path('<int:pk>/', BookDetailView.as_view(), name='detail')
] | true | true |
f71a1c4b664e4d204ee0e4819ed647e5e03c985d | 318 | py | Python | cwr_validator/__init__.py | weso/CWR-Validator | 18b83136f44f5bdd2f66c9af866b0e37acf682cb | [
"MIT"
] | 16 | 2015-04-21T15:50:14.000Z | 2021-07-14T07:22:32.000Z | cwr_validator/__init__.py | weso/CWR-Validator | 18b83136f44f5bdd2f66c9af866b0e37acf682cb | [
"MIT"
] | 12 | 2015-02-02T11:32:01.000Z | 2015-04-20T10:45:36.000Z | cwr_validator/__init__.py | weso/CWR-Validator | 18b83136f44f5bdd2f66c9af866b0e37acf682cb | [
"MIT"
] | 4 | 2015-02-01T21:45:03.000Z | 2018-08-20T07:51:02.000Z | # -*- coding: utf-8 -*-
from cwr_validator.app import create_app
"""
CWR Data API Validator WS
~~~~~~~~~~~~~~~~~~~~~~~~~
Validator Web Service for Common Works Registrations.
:copyright: (c) 2015 by WESO
:license: MIT, see LICENSE for more details.
"""
__version__ = '0.0.1'
__license__ = 'MIT'
| 21.2 | 57 | 0.613208 |
from cwr_validator.app import create_app
__version__ = '0.0.1'
__license__ = 'MIT'
| true | true |
f71a1e01f6c37695492ea9e9df0eec7b5250b6b1 | 986 | py | Python | env/Lib/site-packages/OpenGL/GLES2/EXT/texture_type_2_10_10_10_REV.py | 5gconnectedbike/Navio2 | 8c3f2b5d8bbbcea1fc08739945183c12b206712c | [
"BSD-3-Clause"
] | 210 | 2016-04-09T14:26:00.000Z | 2022-03-25T18:36:19.000Z | env/Lib/site-packages/OpenGL/GLES2/EXT/texture_type_2_10_10_10_REV.py | 5gconnectedbike/Navio2 | 8c3f2b5d8bbbcea1fc08739945183c12b206712c | [
"BSD-3-Clause"
] | 72 | 2016-09-04T09:30:19.000Z | 2022-03-27T17:06:53.000Z | env/Lib/site-packages/OpenGL/GLES2/EXT/texture_type_2_10_10_10_REV.py | 5gconnectedbike/Navio2 | 8c3f2b5d8bbbcea1fc08739945183c12b206712c | [
"BSD-3-Clause"
] | 64 | 2016-04-09T14:26:49.000Z | 2022-03-21T11:19:47.000Z | '''OpenGL extension EXT.texture_type_2_10_10_10_REV
This module customises the behaviour of the
OpenGL.raw.GLES2.EXT.texture_type_2_10_10_10_REV to provide a more
Python-friendly API
Overview (from the spec)
This extension adds a new texture data type, unsigned 2.10.10.10 ABGR,
which can be used with RGB or RGBA formats.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/texture_type_2_10_10_10_REV.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.EXT.texture_type_2_10_10_10_REV import *
from OpenGL.raw.GLES2.EXT.texture_type_2_10_10_10_REV import _EXTENSION_NAME
def glInitTextureType2101010RevEXT():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | 35.214286 | 76 | 0.813387 | from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.EXT.texture_type_2_10_10_10_REV import *
from OpenGL.raw.GLES2.EXT.texture_type_2_10_10_10_REV import _EXTENSION_NAME
def glInitTextureType2101010RevEXT():
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
| true | true |
f71a1e63deeffcfdc628570bf42b870b09678f9d | 621 | py | Python | debugprov/single_stepping.py | romerlrl/debugprov | 3527f6a3fa623354777aaaed2616b6b3065f8304 | [
"MIT"
] | 2 | 2019-09-26T17:46:12.000Z | 2021-04-21T00:19:59.000Z | debugprov/single_stepping.py | romerlrl/debugprov | 3527f6a3fa623354777aaaed2616b6b3065f8304 | [
"MIT"
] | null | null | null | debugprov/single_stepping.py | romerlrl/debugprov | 3527f6a3fa623354777aaaed2616b6b3065f8304 | [
"MIT"
] | 1 | 2020-09-22T20:37:19.000Z | 2020-09-22T20:37:19.000Z | from debugprov.navgiation_strategy import NavigationStrategy
from debugprov.node import Node
from debugprov.validity import Validity
class SingleStepping(NavigationStrategy):
def navigate(self):
self.recursive_navigate(self.exec_tree.root_node)
self.finish_navigation()
return self.exec_tree
def recursive_navigate(self, current_node: Node):
if self.there_are_nodes_with_unknown_validity():
if current_node.has_childrens():
for c in current_node.childrens:
self.recursive_navigate(c)
self.evaluate(current_node)
| 34.5 | 60 | 0.706924 | from debugprov.navgiation_strategy import NavigationStrategy
from debugprov.node import Node
from debugprov.validity import Validity
class SingleStepping(NavigationStrategy):
def navigate(self):
self.recursive_navigate(self.exec_tree.root_node)
self.finish_navigation()
return self.exec_tree
def recursive_navigate(self, current_node: Node):
if self.there_are_nodes_with_unknown_validity():
if current_node.has_childrens():
for c in current_node.childrens:
self.recursive_navigate(c)
self.evaluate(current_node)
| true | true |
f71a1e9ab3b466d5a052c9eb0a36e082154d5dbc | 1,747 | py | Python | igibson/robots/jr2_robot.py | suresh-guttikonda/iGibson | a69e623058180146466cd52d4bb3c00d1facdacf | [
"MIT"
] | 360 | 2020-04-02T11:12:09.000Z | 2022-03-24T21:46:58.000Z | igibson/robots/jr2_robot.py | suresh-guttikonda/iGibson | a69e623058180146466cd52d4bb3c00d1facdacf | [
"MIT"
] | 169 | 2020-04-07T21:01:05.000Z | 2022-03-31T10:07:39.000Z | igibson/robots/jr2_robot.py | suresh-guttikonda/iGibson | a69e623058180146466cd52d4bb3c00d1facdacf | [
"MIT"
] | 94 | 2020-04-09T23:22:17.000Z | 2022-03-17T21:49:03.000Z | import gym
import numpy as np
from igibson.robots.robot_locomotor import LocomotorRobot
class JR2(LocomotorRobot):
"""
JR2 robot (no arm)
Reference: https://cvgl.stanford.edu/projects/jackrabbot/
Uses joint velocity control
"""
def __init__(self, config):
self.config = config
self.velocity = config.get("velocity", 1.0)
LocomotorRobot.__init__(
self,
"jr2_urdf/jr2.urdf",
action_dim=4,
scale=config.get("robot_scale", 1.0),
is_discrete=config.get("is_discrete", True),
control="velocity",
)
def set_up_continuous_action_space(self):
"""
Set up continuous action space
"""
self.action_space = gym.spaces.Box(shape=(self.action_dim,), low=-1.0, high=1.0, dtype=np.float32)
self.action_high = self.velocity * np.ones([self.action_dim])
self.action_low = -self.action_high
def set_up_discrete_action_space(self):
"""
Set up discrete action space
"""
self.action_list = [
[self.velocity, self.velocity, 0, self.velocity],
[-self.velocity, -self.velocity, 0, -self.velocity],
[self.velocity, -self.velocity, -self.velocity, 0],
[-self.velocity, self.velocity, self.velocity, 0],
[0, 0, 0, 0],
]
self.action_space = gym.spaces.Discrete(len(self.action_list))
self.setup_keys_to_action()
def setup_keys_to_action(self):
self.keys_to_action = {
(ord("w"),): 0, # forward
(ord("s"),): 1, # backward
(ord("d"),): 2, # turn right
(ord("a"),): 3, # turn left
(): 4,
}
| 31.196429 | 106 | 0.566113 | import gym
import numpy as np
from igibson.robots.robot_locomotor import LocomotorRobot
class JR2(LocomotorRobot):
def __init__(self, config):
self.config = config
self.velocity = config.get("velocity", 1.0)
LocomotorRobot.__init__(
self,
"jr2_urdf/jr2.urdf",
action_dim=4,
scale=config.get("robot_scale", 1.0),
is_discrete=config.get("is_discrete", True),
control="velocity",
)
def set_up_continuous_action_space(self):
self.action_space = gym.spaces.Box(shape=(self.action_dim,), low=-1.0, high=1.0, dtype=np.float32)
self.action_high = self.velocity * np.ones([self.action_dim])
self.action_low = -self.action_high
def set_up_discrete_action_space(self):
self.action_list = [
[self.velocity, self.velocity, 0, self.velocity],
[-self.velocity, -self.velocity, 0, -self.velocity],
[self.velocity, -self.velocity, -self.velocity, 0],
[-self.velocity, self.velocity, self.velocity, 0],
[0, 0, 0, 0],
]
self.action_space = gym.spaces.Discrete(len(self.action_list))
self.setup_keys_to_action()
def setup_keys_to_action(self):
self.keys_to_action = {
(ord("w"),): 0,
(ord("s"),): 1,
(ord("d"),): 2,
(ord("a"),): 3,
(): 4,
}
| true | true |
f71a1fa441e506dab6e2238a62846f24b22db7ce | 17,068 | py | Python | Training_Raw_data_validation/rawValidation.py | teja-ambati1202/Insurance-Fraud-Detection | a9bbdd5a2af68e0e90f8e16ba43129bab709614b | [
"Apache-2.0"
] | null | null | null | Training_Raw_data_validation/rawValidation.py | teja-ambati1202/Insurance-Fraud-Detection | a9bbdd5a2af68e0e90f8e16ba43129bab709614b | [
"Apache-2.0"
] | null | null | null | Training_Raw_data_validation/rawValidation.py | teja-ambati1202/Insurance-Fraud-Detection | a9bbdd5a2af68e0e90f8e16ba43129bab709614b | [
"Apache-2.0"
] | 1 | 2022-03-27T09:02:29.000Z | 2022-03-27T09:02:29.000Z | import sqlite3
from datetime import datetime
from os import listdir
import os
import re
import json
import shutil
import pandas as pd
from application_logging.logger import App_Logger
class Raw_Data_validation:
"""
This class shall be used for handling all the validation done on the Raw Training Data!!.
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
def __init__(self,path):
self.Batch_Directory = path
self.schema_path = 'schema_training.json'
self.logger = App_Logger()
def valuesFromSchema(self):
"""
Method Name: valuesFromSchema
Description: This method extracts all the relevant information from the pre-defined "Schema" file.
Output: LengthOfDateStampInFile, LengthOfTimeStampInFile, column_names, Number of Columns
On Failure: Raise ValueError,KeyError,Exception
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
with open(self.schema_path, 'r') as f:
dic = json.load(f)
f.close()
pattern = dic['SampleFileName']
LengthOfDateStampInFile = dic['LengthOfDateStampInFile']
LengthOfTimeStampInFile = dic['LengthOfTimeStampInFile']
column_names = dic['ColName']
NumberofColumns = dic['NumberofColumns']
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
message ="LengthOfDateStampInFile:: %s" %LengthOfDateStampInFile + "\t" + "LengthOfTimeStampInFile:: %s" % LengthOfTimeStampInFile +"\t " + "NumberofColumns:: %s" % NumberofColumns + "\n"
self.logger.log(file,message)
file.close()
except ValueError:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file,"ValueError:Value not found inside schema_training.json")
file.close()
raise ValueError
except KeyError:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file, "KeyError:Key value error incorrect key passed")
file.close()
raise KeyError
except Exception as e:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file, str(e))
file.close()
raise e
return LengthOfDateStampInFile, LengthOfTimeStampInFile, column_names, NumberofColumns
def manualRegexCreation(self):
"""
Method Name: manualRegexCreation
Description: This method contains a manually defined regex based on the "FileName" given in "Schema" file.
This Regex is used to validate the filename of the training data.
Output: Regex pattern
On Failure: None
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
regex = "['fraudDetection']+['\_'']+[\d_]+[\d]+\.csv"
return regex
def createDirectoryForGoodBadRawData(self):
"""
Method Name: createDirectoryForGoodBadRawData
Description: This method creates directories to store the Good Data and Bad Data
after validating the training data.
Output: None
On Failure: OSError
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
path = os.path.join("Training_Raw_files_validated/", "Good_Raw/")
if not os.path.isdir(path):
os.makedirs(path)
path = os.path.join("Training_Raw_files_validated/", "Bad_Raw/")
if not os.path.isdir(path):
os.makedirs(path)
except OSError as ex:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while creating Directory %s:" % ex)
file.close()
raise OSError
def deleteExistingGoodDataTrainingFolder(self):
"""
Method Name: deleteExistingGoodDataTrainingFolder
Description: This method deletes the directory made to store the Good Data
after loading the data in the table. Once the good files are
loaded in the DB,deleting the directory ensures space optimization.
Output: None
On Failure: OSError
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
path = 'Training_Raw_files_validated/'
# if os.path.isdir("ids/" + userName):
# if os.path.isdir(path + 'Bad_Raw/'):
# shutil.rmtree(path + 'Bad_Raw/')
if os.path.isdir(path + 'Good_Raw/'):
shutil.rmtree(path + 'Good_Raw/')
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"GoodRaw directory deleted successfully!!!")
file.close()
except OSError as s:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while Deleting Directory : %s" %s)
file.close()
raise OSError
def deleteExistingBadDataTrainingFolder(self):
"""
Method Name: deleteExistingBadDataTrainingFolder
Description: This method deletes the directory made to store the bad Data.
Output: None
On Failure: OSError
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
path = 'Training_Raw_files_validated/'
if os.path.isdir(path + 'Bad_Raw/'):
shutil.rmtree(path + 'Bad_Raw/')
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"BadRaw directory deleted before starting validation!!!")
file.close()
except OSError as s:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while Deleting Directory : %s" %s)
file.close()
raise OSError
def moveBadFilesToArchiveBad(self):
"""
Method Name: moveBadFilesToArchiveBad
Description: This method deletes the directory made to store the Bad Data
after moving the data in an archive folder. We archive the bad
files to send them back to the client for invalid data issue.
Output: None
On Failure: OSError
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
now = datetime.now()
date = now.date()
time = now.strftime("%H%M%S")
try:
source = 'Training_Raw_files_validated/Bad_Raw/'
if os.path.isdir(source):
path = "TrainingArchiveBadData"
if not os.path.isdir(path):
os.makedirs(path)
dest = 'TrainingArchiveBadData/BadData_' + str(date)+"_"+str(time)
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(source)
for f in files:
if f not in os.listdir(dest):
shutil.move(source + f, dest)
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Bad files moved to archive")
path = 'Training_Raw_files_validated/'
if os.path.isdir(path + 'Bad_Raw/'):
shutil.rmtree(path + 'Bad_Raw/')
self.logger.log(file,"Bad Raw Data Folder Deleted successfully!!")
file.close()
except Exception as e:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file, "Error while moving bad files to archive:: %s" % e)
file.close()
raise e
def validationFileNameRaw(self,regex,LengthOfDateStampInFile,LengthOfTimeStampInFile):
"""
Method Name: validationFileNameRaw
Description: This function validates the name of the training csv files as per given name in the schema!
Regex pattern is used to do the validation.If name format do not match the file is moved
to Bad Raw Data folder else in Good raw data.
Output: None
On Failure: Exception
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
# delete the directories for good and bad data in case last run was unsuccessful and folders were not deleted.
self.deleteExistingBadDataTrainingFolder()
self.deleteExistingGoodDataTrainingFolder()
#create new directories
self.createDirectoryForGoodBadRawData()
onlyfiles = [f for f in listdir(self.Batch_Directory)]
try:
f = open("Training_Logs/nameValidationLog.txt", 'a+')
for filename in onlyfiles:
if (re.match(regex, filename)):
splitAtDot = re.split('.csv', filename)
splitAtDot = (re.split('_', splitAtDot[0]))
if len(splitAtDot[1]) == LengthOfDateStampInFile:
if len(splitAtDot[2]) == LengthOfTimeStampInFile:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Good_Raw")
self.logger.log(f,"Valid File name!! File moved to GoodRaw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f, "Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
f.close()
except Exception as e:
f = open("Training_Logs/nameValidationLog.txt", 'a+')
self.logger.log(f, "Error occured while validating FileName %s" % e)
f.close()
raise e
def validateColumnLength(self,NumberofColumns):
"""
Method Name: validateColumnLength
Description: This function validates the number of columns in the csv files.
It is should be same as given in the schema file.
If not same file is not suitable for processing and thus is moved to Bad Raw Data folder.
If the column number matches, file is kept in Good Raw Data for processing.
Output: None
On Failure: Exception
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f,"Column Length Validation Started!!")
for file in listdir('Training_Raw_files_validated/Good_Raw/'):
csv = pd.read_csv("Training_Raw_files_validated/Good_Raw/" + file)
if csv.shape[1] == NumberofColumns:
pass
else:
shutil.move("Training_Raw_files_validated/Good_Raw/" + file, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f, "Invalid Column Length for the file!! File moved to Bad Raw Folder :: %s" % file)
self.logger.log(f, "Column Length Validation Completed!!")
except OSError:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f, "Error Occured while moving the file :: %s" % OSError)
f.close()
raise OSError
except Exception as e:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f, "Error Occured:: %s" % e)
f.close()
raise e
f.close()
def validateMissingValuesInWholeColumn(self):
"""
Method Name: validateMissingValuesInWholeColumn
Description: This function validates if any column in the csv file has all values missing.
If all the values are missing, the file is not suitable for processing.
SUch files are moved to bad raw data.
Output: None
On Failure: Exception
Written By: iNeuron Intelligence
Version: 1.0
Revisions: None
"""
try:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f,"Missing Values Validation Started!!")
for file in listdir('Training_Raw_files_validated/Good_Raw/'):
csv = pd.read_csv("Training_Raw_files_validated/Good_Raw/" + file)
count = 0
for columns in csv:
if (len(csv[columns]) - csv[columns].count()) == len(csv[columns]):
count+=1
shutil.move("Training_Raw_files_validated/Good_Raw/" + file,
"Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid Column for the file!! File moved to Bad Raw Folder :: %s" % file)
break
if count==0:
csv.rename(columns={"Unnamed: 0": "Wafer"}, inplace=True)
csv.to_csv("Training_Raw_files_validated/Good_Raw/" + file, index=None, header=True)
except OSError:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f, "Error Occured while moving the file :: %s" % OSError)
f.close()
raise OSError
except Exception as e:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f, "Error Occured:: %s" % e)
f.close()
raise e
f.close()
| 44.563969 | 200 | 0.489278 | import sqlite3
from datetime import datetime
from os import listdir
import os
import re
import json
import shutil
import pandas as pd
from application_logging.logger import App_Logger
class Raw_Data_validation:
def __init__(self,path):
self.Batch_Directory = path
self.schema_path = 'schema_training.json'
self.logger = App_Logger()
def valuesFromSchema(self):
try:
with open(self.schema_path, 'r') as f:
dic = json.load(f)
f.close()
pattern = dic['SampleFileName']
LengthOfDateStampInFile = dic['LengthOfDateStampInFile']
LengthOfTimeStampInFile = dic['LengthOfTimeStampInFile']
column_names = dic['ColName']
NumberofColumns = dic['NumberofColumns']
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
message ="LengthOfDateStampInFile:: %s" %LengthOfDateStampInFile + "\t" + "LengthOfTimeStampInFile:: %s" % LengthOfTimeStampInFile +"\t " + "NumberofColumns:: %s" % NumberofColumns + "\n"
self.logger.log(file,message)
file.close()
except ValueError:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file,"ValueError:Value not found inside schema_training.json")
file.close()
raise ValueError
except KeyError:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file, "KeyError:Key value error incorrect key passed")
file.close()
raise KeyError
except Exception as e:
file = open("Training_Logs/valuesfromSchemaValidationLog.txt", 'a+')
self.logger.log(file, str(e))
file.close()
raise e
return LengthOfDateStampInFile, LengthOfTimeStampInFile, column_names, NumberofColumns
def manualRegexCreation(self):
regex = "['fraudDetection']+['\_'']+[\d_]+[\d]+\.csv"
return regex
def createDirectoryForGoodBadRawData(self):
try:
path = os.path.join("Training_Raw_files_validated/", "Good_Raw/")
if not os.path.isdir(path):
os.makedirs(path)
path = os.path.join("Training_Raw_files_validated/", "Bad_Raw/")
if not os.path.isdir(path):
os.makedirs(path)
except OSError as ex:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while creating Directory %s:" % ex)
file.close()
raise OSError
def deleteExistingGoodDataTrainingFolder(self):
try:
path = 'Training_Raw_files_validated/'
# if os.path.isdir("ids/" + userName):
# if os.path.isdir(path + 'Bad_Raw/'):
# shutil.rmtree(path + 'Bad_Raw/')
if os.path.isdir(path + 'Good_Raw/'):
shutil.rmtree(path + 'Good_Raw/')
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"GoodRaw directory deleted successfully!!!")
file.close()
except OSError as s:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while Deleting Directory : %s" %s)
file.close()
raise OSError
def deleteExistingBadDataTrainingFolder(self):
try:
path = 'Training_Raw_files_validated/'
if os.path.isdir(path + 'Bad_Raw/'):
shutil.rmtree(path + 'Bad_Raw/')
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"BadRaw directory deleted before starting validation!!!")
file.close()
except OSError as s:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Error while Deleting Directory : %s" %s)
file.close()
raise OSError
def moveBadFilesToArchiveBad(self):
now = datetime.now()
date = now.date()
time = now.strftime("%H%M%S")
try:
source = 'Training_Raw_files_validated/Bad_Raw/'
if os.path.isdir(source):
path = "TrainingArchiveBadData"
if not os.path.isdir(path):
os.makedirs(path)
dest = 'TrainingArchiveBadData/BadData_' + str(date)+"_"+str(time)
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(source)
for f in files:
if f not in os.listdir(dest):
shutil.move(source + f, dest)
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file,"Bad files moved to archive")
path = 'Training_Raw_files_validated/'
if os.path.isdir(path + 'Bad_Raw/'):
shutil.rmtree(path + 'Bad_Raw/')
self.logger.log(file,"Bad Raw Data Folder Deleted successfully!!")
file.close()
except Exception as e:
file = open("Training_Logs/GeneralLog.txt", 'a+')
self.logger.log(file, "Error while moving bad files to archive:: %s" % e)
file.close()
raise e
def validationFileNameRaw(self,regex,LengthOfDateStampInFile,LengthOfTimeStampInFile):
# delete the directories for good and bad data in case last run was unsuccessful and folders were not deleted.
self.deleteExistingBadDataTrainingFolder()
self.deleteExistingGoodDataTrainingFolder()
#create new directories
self.createDirectoryForGoodBadRawData()
onlyfiles = [f for f in listdir(self.Batch_Directory)]
try:
f = open("Training_Logs/nameValidationLog.txt", 'a+')
for filename in onlyfiles:
if (re.match(regex, filename)):
splitAtDot = re.split('.csv', filename)
splitAtDot = (re.split('_', splitAtDot[0]))
if len(splitAtDot[1]) == LengthOfDateStampInFile:
if len(splitAtDot[2]) == LengthOfTimeStampInFile:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Good_Raw")
self.logger.log(f,"Valid File name!! File moved to GoodRaw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
else:
shutil.copy("Training_Batch_Files/" + filename, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f, "Invalid File Name!! File moved to Bad Raw Folder :: %s" % filename)
f.close()
except Exception as e:
f = open("Training_Logs/nameValidationLog.txt", 'a+')
self.logger.log(f, "Error occured while validating FileName %s" % e)
f.close()
raise e
def validateColumnLength(self,NumberofColumns):
try:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f,"Column Length Validation Started!!")
for file in listdir('Training_Raw_files_validated/Good_Raw/'):
csv = pd.read_csv("Training_Raw_files_validated/Good_Raw/" + file)
if csv.shape[1] == NumberofColumns:
pass
else:
shutil.move("Training_Raw_files_validated/Good_Raw/" + file, "Training_Raw_files_validated/Bad_Raw")
self.logger.log(f, "Invalid Column Length for the file!! File moved to Bad Raw Folder :: %s" % file)
self.logger.log(f, "Column Length Validation Completed!!")
except OSError:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f, "Error Occured while moving the file :: %s" % OSError)
f.close()
raise OSError
except Exception as e:
f = open("Training_Logs/columnValidationLog.txt", 'a+')
self.logger.log(f, "Error Occured:: %s" % e)
f.close()
raise e
f.close()
def validateMissingValuesInWholeColumn(self):
try:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f,"Missing Values Validation Started!!")
for file in listdir('Training_Raw_files_validated/Good_Raw/'):
csv = pd.read_csv("Training_Raw_files_validated/Good_Raw/" + file)
count = 0
for columns in csv:
if (len(csv[columns]) - csv[columns].count()) == len(csv[columns]):
count+=1
shutil.move("Training_Raw_files_validated/Good_Raw/" + file,
"Training_Raw_files_validated/Bad_Raw")
self.logger.log(f,"Invalid Column for the file!! File moved to Bad Raw Folder :: %s" % file)
break
if count==0:
csv.rename(columns={"Unnamed: 0": "Wafer"}, inplace=True)
csv.to_csv("Training_Raw_files_validated/Good_Raw/" + file, index=None, header=True)
except OSError:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f, "Error Occured while moving the file :: %s" % OSError)
f.close()
raise OSError
except Exception as e:
f = open("Training_Logs/missingValuesInColumn.txt", 'a+')
self.logger.log(f, "Error Occured:: %s" % e)
f.close()
raise e
f.close()
| true | true |
f71a1fb42d65587e922d09e984061b07a1aaed3f | 122 | py | Python | askci/plugins/pam_auth/__init__.py | hpsee/askci | ef1e2e75481b71db7fbe774cb81938055aa596d0 | [
"MIT"
] | 3 | 2019-11-21T09:04:36.000Z | 2019-11-23T13:29:43.000Z | askci/plugins/pam_auth/__init__.py | hpsee/askci | ef1e2e75481b71db7fbe774cb81938055aa596d0 | [
"MIT"
] | 13 | 2019-11-21T20:28:23.000Z | 2019-11-26T19:34:22.000Z | askci/plugins/pam_auth/__init__.py | hpsee/askci | ef1e2e75481b71db7fbe774cb81938055aa596d0 | [
"MIT"
] | null | null | null | AUTHENTICATION_BACKENDS = (
"django_pam.auth.backends.PAMBackend",
"django.contrib.auth.backends.ModelBackend",
)
| 24.4 | 48 | 0.754098 | AUTHENTICATION_BACKENDS = (
"django_pam.auth.backends.PAMBackend",
"django.contrib.auth.backends.ModelBackend",
)
| true | true |
f71a227f18ed9f23f6798ac8a5fc17a955b9c0cb | 3,870 | py | Python | QCT/get_S_norm.py | inqlee0704/pyqct | 304612ed558e7c46fe987ecfea8145cbc5721700 | [
"MIT"
] | null | null | null | QCT/get_S_norm.py | inqlee0704/pyqct | 304612ed558e7c46fe987ecfea8145cbc5721700 | [
"MIT"
] | null | null | null | QCT/get_S_norm.py | inqlee0704/pyqct | 304612ed558e7c46fe987ecfea8145cbc5721700 | [
"MIT"
] | null | null | null | # ##############################################################################
# Usage: python get_S_norm.py Subj I1 I2
# Time: ~ 20s
# Ref:
# ##############################################################################
# 20220118, In Kyu Lee
# No version suffix
# ##############################################################################
# v1c: 08/11/2021, In Kyu Lee
# - Fixed: when V_IN < V_EX, s_norm returns nan issue.
# - ownpow is used
# v1b: 08/10/2021, In Kyu Lee
# - S* stat is added
# 03/18/2021, In Kyu Lee
# Calculate S*
# ##############################################################################
# Input:
# - displacement img, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_disp_resample.mhd'
# - IN lobe mask, ex) PMSN03001_IN0_vida-lobes.img
# Output:
# - s* image, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_s_norm.img
# - s* stat, ex) PMSN03001_EX0-TO-PMSN03001_IN0-SSTVD_lobar_s_norm.txt
# ##############################################################################w
# import libraries
import os
import sys
import numpy as np
import time
import pandas as pd
from medpy.io import load, save
import SimpleITK as sitk
sitk.ProcessObject_SetGlobalWarningDisplay(False)
import warnings
warnings.filterwarnings("ignore")
def ownpow(a, b):
if a > 0:
return a**b
if a < 0:
temp = abs(a)**b
return -1*temp
start = time.time()
Subj = str(sys.argv[1]) # PMSN03001
I1 = str(sys.argv[2]) # 'IN0'
I2 = str(sys.argv[3]) # 'EX0'
disp_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_disp_resample.mhd'
histo_EX = pd.read_csv(f'{Subj}_{I2}_vida-histo.csv')
histo_IN = pd.read_csv(f'{Subj}_{I1}_vida-histo.csv')
s_norm_stat_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_lobar_s_norm.txt'
IN_lobe_path = f'{Subj}_{I1}_vida-lobes.img'
if not os.path.exists(IN_lobe_path):
IN_lobe_path = f'{Subj}_{I1}_vida-lobes.img.gz'
s_norm_img_path = f'{Subj}_{I2}-TO-{Subj}_{I1}-SSTVD_s_norm.img'
# V_cm3_IN
V_EX = histo_EX.loc[histo_EX.location=='both', 'total-volume-cm3'].values[0]
V_IN = histo_IN.loc[histo_IN.location=='both', 'total-volume-cm3'].values[0]
# cm^3 -> mm^3
V_EX = V_EX * 1000
V_IN = V_IN * 1000
# Data Loading . . .
disp, disp_h = load(disp_path)
IN_lobe_img, IN_lobe_header = load(IN_lobe_path)
s_norm_h = disp_h
# [mm]
s = (disp[:,:,:,0]**2+disp[:,:,:,1]**2+disp[:,:,:,2]**2)**0.5
# This doesn't work if V_IN- V_EX is negative
# s_norm = s/((V_IN-V_EX)**(1/3))
s_norm = s/ownpow(V_IN-V_EX,1/3)
# Prep stat
s_norm_l0 = np.mean(s_norm[IN_lobe_img==8])
s_norm_l1 = np.mean(s_norm[IN_lobe_img==16])
s_norm_l2 = np.mean(s_norm[IN_lobe_img==32])
s_norm_l3 = np.mean(s_norm[IN_lobe_img==64])
s_norm_l4 = np.mean(s_norm[IN_lobe_img==128])
s_norm_mean = (s_norm_l0 + s_norm_l1 + s_norm_l2 + s_norm_l3 + s_norm_l4)/5
s_norm_l0_sd = np.std(s_norm[IN_lobe_img==8])
s_norm_l1_sd = np.std(s_norm[IN_lobe_img==16])
s_norm_l2_sd = np.std(s_norm[IN_lobe_img==32])
s_norm_l3_sd = np.std(s_norm[IN_lobe_img==64])
s_norm_l4_sd = np.std(s_norm[IN_lobe_img==128])
s_norm_sd = np.std(s_norm[IN_lobe_img!=0])
# CV = std/mean
s_norm_l0_cv = s_norm_l0_sd/s_norm_l0
s_norm_l1_cv = s_norm_l1_sd/s_norm_l1
s_norm_l2_cv = s_norm_l2_sd/s_norm_l2
s_norm_l3_cv = s_norm_l3_sd/s_norm_l3
s_norm_l4_cv = s_norm_l4_sd/s_norm_l4
s_norm_cv = s_norm_sd/s_norm_mean
s_norm_stat = pd.DataFrame({'Lobes':['Lobe0','Lobe1','Lobe2','Lobe3','Lobe4','All'],
'sStar_m':np.float16([s_norm_l0,s_norm_l1,s_norm_l2,s_norm_l3,s_norm_l4,s_norm_mean]),
'sStar_sd':np.float16([s_norm_l0_sd,s_norm_l1_sd,s_norm_l2_sd,s_norm_l3_sd,s_norm_l4_sd,s_norm_sd]),
'sStar_cv':np.float16([s_norm_l0_cv,s_norm_l1_cv,s_norm_l2_cv,s_norm_l3_cv,s_norm_l4_cv,s_norm_cv])})
# Save
save(s_norm,s_norm_img_path,hdr=s_norm_h)
s_norm_stat.to_csv(s_norm_stat_path, index=False, sep=' ')
end = time.time()
print(f'Elapsed time: {end-start}s')
| 35.181818 | 115 | 0.640052 | true | true | |
f71a22b92bee8bbe5221f6a278525d912c8b3c92 | 577 | py | Python | OLD THINGS/faceid_nabeel.py | AmirQadir/Auto-Object-Detection-and-Tracker | 24c6f4d18b0496ef19250ccc42f53a7f1f42ed3f | [
"MIT"
] | 1 | 2019-05-30T00:59:18.000Z | 2019-05-30T00:59:18.000Z | OLD THINGS/faceid_nabeel.py | AmirQadir/Auto-Object-Detection-and-Tracker | 24c6f4d18b0496ef19250ccc42f53a7f1f42ed3f | [
"MIT"
] | null | null | null | OLD THINGS/faceid_nabeel.py | AmirQadir/Auto-Object-Detection-and-Tracker | 24c6f4d18b0496ef19250ccc42f53a7f1f42ed3f | [
"MIT"
] | null | null | null | from FaceID import faceID
import numpy as np
import cv2 as cv
from matplotlib import pyplot as plt
img1 = cv.imread('nabeel.jpg',0) # queryImage
img2 = cv.imread('nabeel_train.jpg',0) # trainImage
print(img1.shape)
rec = faceID()
print("constructor finished")
# crop_img_2 = getCroppedImage(rec,crop_img_2) accepts image in np arary
print(img1.shape)
img1 = cv.resize(img1,(100,100),interpolation=cv.INTER_AREA)
print(img1.shape)
img1 = rec.prewhiten2(img1)
print(img1.shape)
# print("whiten finished")
embeds = rec.getEmbed(img1)
# print("embedding finished")
| 23.08 | 72 | 0.743501 | from FaceID import faceID
import numpy as np
import cv2 as cv
from matplotlib import pyplot as plt
img1 = cv.imread('nabeel.jpg',0)
img2 = cv.imread('nabeel_train.jpg',0)
print(img1.shape)
rec = faceID()
print("constructor finished")
print(img1.shape)
img1 = cv.resize(img1,(100,100),interpolation=cv.INTER_AREA)
print(img1.shape)
img1 = rec.prewhiten2(img1)
print(img1.shape)
embeds = rec.getEmbed(img1)
| true | true |
f71a234f7d07452f93e0a92a0eb80a7ca5668a4f | 5,007 | py | Python | maps/tests/09.py | wayne-wang-1119/maps-project-cs88 | ad330291042cd659142b1db4d5875fec5ebcfa90 | [
"MIT"
] | null | null | null | maps/tests/09.py | wayne-wang-1119/maps-project-cs88 | ad330291042cd659142b1db4d5875fec5ebcfa90 | [
"MIT"
] | null | null | null | maps/tests/09.py | wayne-wang-1119/maps-project-cs88 | ad330291042cd659142b1db4d5875fec5ebcfa90 | [
"MIT"
] | null | null | null | test = {
'name': 'Problem 9',
'points': 4,
'suites': [
{
'cases': [
{
'answer': 'restaurant names',
'choices': [
'restaurant names',
'restaurants',
'restaurant ratings'
],
'hidden': False,
'locked': False,
'question': 'rate_all returns a dictionary. What are the keys of this dictionary?'
},
{
'answer': 'numbers - a mix of user ratings and predicted ratings',
'choices': [
'numbers - a mix of user ratings and predicted ratings',
'numbers - user ratings only',
'numbers - predicted ratings only',
'numbers - mean restaurant ratings',
'lists - list of all restaurant ratings'
],
'hidden': False,
'locked': False,
'question': 'What are the values of the returned dictionary?'
},
{
'answer': 'a list of restaurants reviewed by the user',
'choices': [
'a list of restaurants reviewed by the user',
'a list of all possible restaurants',
'a list of ratings for restaurants reviewed by the user'
],
'hidden': False,
'locked': False,
'question': 'In rate_all, what does the variable reviewed represent?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('Mr. Mean Rating Minus One', [
... make_review('A', 3),
... make_review('B', 4),
... make_review('C', 1),
... ])
>>> cluster = [
... make_restaurant('A', [1, 2], [], 4, [
... make_review('A', 4),
... make_review('A', 4)
... ]),
... make_restaurant('B', [4, 2], [], 3, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 4], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3.5),
... ]),
... ]
>>> restaurants = {restaurant_name(r): r for r in cluster}
>>> recommend.ALL_RESTAURANTS = cluster
>>> to_rate = cluster[2:]
>>> fns = [restaurant_price, restaurant_mean_rating]
>>> ratings = rate_all(user, to_rate, fns)
>>> type(ratings)
<class 'dict'>
>>> len(ratings) # Only the restaurants passed to rate_all
2
>>> ratings['C'] # A restaurant rated by the user (should be an integer)
1
>>> round(ratings['D'], 5) # A predicted rating (should be a decimal)
2.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> from recommend import *
""",
'teardown': '',
'type': 'doctest'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('Mr. Mean Rating Minus One', [
... make_review('A', 3),
... make_review('B', 4),
... make_review('C', 1),
... ])
>>> cluster = [
... make_restaurant('A', [1, 2], [], 4, [
... make_review('A', 4),
... make_review('A', 4)
... ]),
... make_restaurant('B', [4, 2], [], 3, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 4], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3.5),
... ]),
... ]
>>> recommend.ALL_RESTAURANTS = cluster
>>> to_rate = cluster[2:]
>>> fns = [restaurant_price, restaurant_mean_rating]
>>> ratings = rate_all(user, to_rate, fns)
>>> type(ratings)
<class 'dict'>
>>> len(ratings) # Only the restaurants passed to rate_all
2
>>> ratings['C'] # A restaurant rated by the user (should be an integer)
1
>>> round(ratings['D'], 5) # A predicted rating (should be a decimal)
2.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> test.swap_implementations(recommend)
>>> from recommend import *
""",
'teardown': r"""
>>> test.restore_implementations(recommend)
""",
'type': 'doctest'
}
]
}
| 32.512987 | 92 | 0.425205 | test = {
'name': 'Problem 9',
'points': 4,
'suites': [
{
'cases': [
{
'answer': 'restaurant names',
'choices': [
'restaurant names',
'restaurants',
'restaurant ratings'
],
'hidden': False,
'locked': False,
'question': 'rate_all returns a dictionary. What are the keys of this dictionary?'
},
{
'answer': 'numbers - a mix of user ratings and predicted ratings',
'choices': [
'numbers - a mix of user ratings and predicted ratings',
'numbers - user ratings only',
'numbers - predicted ratings only',
'numbers - mean restaurant ratings',
'lists - list of all restaurant ratings'
],
'hidden': False,
'locked': False,
'question': 'What are the values of the returned dictionary?'
},
{
'answer': 'a list of restaurants reviewed by the user',
'choices': [
'a list of restaurants reviewed by the user',
'a list of all possible restaurants',
'a list of ratings for restaurants reviewed by the user'
],
'hidden': False,
'locked': False,
'question': 'In rate_all, what does the variable reviewed represent?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('Mr. Mean Rating Minus One', [
... make_review('A', 3),
... make_review('B', 4),
... make_review('C', 1),
... ])
>>> cluster = [
... make_restaurant('A', [1, 2], [], 4, [
... make_review('A', 4),
... make_review('A', 4)
... ]),
... make_restaurant('B', [4, 2], [], 3, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 4], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3.5),
... ]),
... ]
>>> restaurants = {restaurant_name(r): r for r in cluster}
>>> recommend.ALL_RESTAURANTS = cluster
>>> to_rate = cluster[2:]
>>> fns = [restaurant_price, restaurant_mean_rating]
>>> ratings = rate_all(user, to_rate, fns)
>>> type(ratings)
<class 'dict'>
>>> len(ratings) # Only the restaurants passed to rate_all
2
>>> ratings['C'] # A restaurant rated by the user (should be an integer)
1
>>> round(ratings['D'], 5) # A predicted rating (should be a decimal)
2.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> from recommend import *
""",
'teardown': '',
'type': 'doctest'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('Mr. Mean Rating Minus One', [
... make_review('A', 3),
... make_review('B', 4),
... make_review('C', 1),
... ])
>>> cluster = [
... make_restaurant('A', [1, 2], [], 4, [
... make_review('A', 4),
... make_review('A', 4)
... ]),
... make_restaurant('B', [4, 2], [], 3, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 4], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3.5),
... ]),
... ]
>>> recommend.ALL_RESTAURANTS = cluster
>>> to_rate = cluster[2:]
>>> fns = [restaurant_price, restaurant_mean_rating]
>>> ratings = rate_all(user, to_rate, fns)
>>> type(ratings)
<class 'dict'>
>>> len(ratings) # Only the restaurants passed to rate_all
2
>>> ratings['C'] # A restaurant rated by the user (should be an integer)
1
>>> round(ratings['D'], 5) # A predicted rating (should be a decimal)
2.0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> test.swap_implementations(recommend)
>>> from recommend import *
""",
'teardown': r"""
>>> test.restore_implementations(recommend)
""",
'type': 'doctest'
}
]
}
| true | true |
f71a245fa32058c020191858dd725ba966da6364 | 728 | py | Python | unstar_github.py | ashwinvis/zotero-tools | fa4ede2382ba6d462325b7cb08c66575cf87ce20 | [
"Apache-2.0"
] | null | null | null | unstar_github.py | ashwinvis/zotero-tools | fa4ede2382ba6d462325b7cb08c66575cf87ce20 | [
"Apache-2.0"
] | null | null | null | unstar_github.py | ashwinvis/zotero-tools | fa4ede2382ba6d462325b7cb08c66575cf87ce20 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import random
import time
from pygithub import Github
# Ref:
# https://pygithub.readthedocs.io/en/latest/introduction.html#very-short-tutorial
# If you are using an access token to circumvent 2FA, make sure you have
# enabled "repo" scope
g = Github("username", "password")
me = g.get_user()
starred = me.get_starred()
for repo in starred:
print("Unstarring", repo)
me.remove_from_starred(repo)
time.sleep(1 + random.random()) # try to avoid rate-limit
# Troubleshooting
# https://developer.github.com/v3/activity/starring/#unstar-a-repository
# Debug using curl:
# $ curl -H "Authorization: token $INSERT_ACCESS_TOKEN" \
# "https://api.github.com/user/starred/<owner>/<repo>" -i -s -X DELETE
| 30.333333 | 81 | 0.725275 |
import random
import time
from pygithub import Github
ame", "password")
me = g.get_user()
starred = me.get_starred()
for repo in starred:
print("Unstarring", repo)
me.remove_from_starred(repo)
time.sleep(1 + random.random())
| true | true |
f71a24882b5c3b3d085f16743970960081031c33 | 1,508 | py | Python | conda_tools/pack_non_conda.py | Amber-MD/ambertools-binary-build | 257f25cfbe829ee080807c6086d6edf8ec78c534 | [
"MIT"
] | 4 | 2018-12-02T19:43:52.000Z | 2019-12-14T01:15:50.000Z | conda_tools/pack_non_conda.py | Amber-MD/ambertools-binary-build | 257f25cfbe829ee080807c6086d6edf8ec78c534 | [
"MIT"
] | 15 | 2017-09-03T03:37:27.000Z | 2020-10-07T15:19:56.000Z | conda_tools/pack_non_conda.py | Amber-MD/ambertools-binary-build | 257f25cfbe829ee080807c6086d6edf8ec78c534 | [
"MIT"
] | 1 | 2021-06-01T19:18:54.000Z | 2021-06-01T19:18:54.000Z | # Aim: Mostly for phenix users and those don't like using Miniconda
# 1. wget url_to_tar_file.tar
# 2. tar -xf url_to_tar_file.tar
# 3. source amber17/ambersh
# 4. Just it
""" Usage example: python pack_non_conda.py ambertools-17.0.1-py27_1.tar.bz2
Note: You can use file pattern
This script will unpack that bz2 file, then do some editing, then pack it to ./non-conda-install folder.
This should be done after doing conda-build
"""
import os
import subprocess
from glob import glob
import argparse
# local file, in the same folder as this script
from edit_package import editing_conda_package
import update_shebang
def main():
parser = argparse.ArgumentParser()
parser.add_argument('tarfile', nargs='?', help='targer file')
parser.add_argument(
"--output-dir",
type=str,
default='./non-conda-install',
dest="output_dir",
help="output directory")
parser.add_argument(
"--date", action="store_true", help="Add date to output tarfile")
parser.add_argument("-d", "--dry_run", action="store_true", help="dry run")
opt = parser.parse_args()
pack_non_conda_package(opt)
def pack_non_conda_package(opt):
with editing_conda_package(
opt.tarfile,
output_dir=opt.output_dir,
add_date=opt.date,
dry_run=opt.dry_run):
update_shebang.update_python_env('./bin/')
# No need to copy here since we alread done in conda build step?
if __name__ == '__main__':
main()
| 27.925926 | 104 | 0.68634 |
# 1. wget url_to_tar_file.tar
# 2. tar -xf url_to_tar_file.tar
# 3. source amber17/ambersh
# 4. Just it
import os
import subprocess
from glob import glob
import argparse
# local file, in the same folder as this script
from edit_package import editing_conda_package
import update_shebang
def main():
parser = argparse.ArgumentParser()
parser.add_argument('tarfile', nargs='?', help='targer file')
parser.add_argument(
"--output-dir",
type=str,
default='./non-conda-install',
dest="output_dir",
help="output directory")
parser.add_argument(
"--date", action="store_true", help="Add date to output tarfile")
parser.add_argument("-d", "--dry_run", action="store_true", help="dry run")
opt = parser.parse_args()
pack_non_conda_package(opt)
def pack_non_conda_package(opt):
with editing_conda_package(
opt.tarfile,
output_dir=opt.output_dir,
add_date=opt.date,
dry_run=opt.dry_run):
update_shebang.update_python_env('./bin/')
# No need to copy here since we alread done in conda build step?
if __name__ == '__main__':
main()
| true | true |
f71a24ca46c0edd3de051b4f157eaa8487ab5b5d | 2,561 | py | Python | remoteSwitch/lib/rotation.py | zkity/remoteSwitch | 1b66baab87c81a9b79de7b161173fb0c75c03291 | [
"MIT"
] | 1 | 2021-02-19T11:24:41.000Z | 2021-02-19T11:24:41.000Z | remoteSwitch/lib/rotation.py | zkity/remoteSwitch | 1b66baab87c81a9b79de7b161173fb0c75c03291 | [
"MIT"
] | null | null | null | remoteSwitch/lib/rotation.py | zkity/remoteSwitch | 1b66baab87c81a9b79de7b161173fb0c75c03291 | [
"MIT"
] | null | null | null | '''
这段代码源于网上
原文请见 https://my.oschina.net/hechunc/blog/3020284
'''
import RPi.GPIO as GPIO
import time
# 这个类表示单个的SG90模块
class Rotation:
frequency=50 #脉冲频率(Hz)
delta_theta=0.2 #步进转动间隔(度)
min_delay=0.0006 #转动delta_theta的理论耗时(s)
max_delay=0.4 #从0转到180的耗时(s)
def __init__(self,channel,min_theta,max_theta,init_theta=0):
'''
构造函数:
channel: 舵机信号线所连接的树莓派引脚编号(BCM编码)
min_theta: 舵机转动的最小角度
max_theta: 舵机转动的最大角度
init_theta: 舵机的初始角度
'''
self.channel=channel
if(min_theta<0 or min_theta>180):
self.min_theta=0
else:
self.min_theta=min_theta
if(max_theta<0 or max_theta>180):
self.max_theta=180
else:
self.max_theta=max_theta
if(init_theta<min_theta or init_theta>max_theta):
self.init_theta=(self.min_theta+self.max_theta)/2
else:
self.init_theta=init_theta #初始角度
#计算最小角度、最大角度的占空比
self.min_dutycycle=2.5+self.min_theta*10/180
self.max_dutycycle=2.5+self.max_theta*10/180
def setup(self):
'''
初始化
'''
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.channel,GPIO.OUT)
self.pwm=GPIO.PWM(self.channel,Rotation.frequency) #PWM
self.dutycycle=2.5+self.init_theta*10/180 #脉冲占空比的初始值
self.pwm.start(self.dutycycle) #让舵机转到初始位置
time.sleep(Rotation.max_delay)
def positiveRotation(self):
'''
正相步进转动,每次调用只转动delta_theta度
'''
self.dutycycle=self.dutycycle+Rotation.delta_theta*10/180
if self.dutycycle>self.max_dutycycle:
self.dutycycle=self.max_dutycycle
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.min_delay)
def reverseRotation(self):
'''
反相转动,每次调用只转动delta_theta度
'''
self.dutycycle=self.dutycycle-Rotation.delta_theta*10/180
if self.dutycycle<self.min_dutycycle:
self.dutycycle=self.min_dutycycle
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.min_delay)
def specifyRotation(self,theta):
'''
转动到指定的角度
'''
if(theta<0 or theta>180):
return
self.dutycycle=2.5+theta*10/180
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.max_delay)
def cleanup(self):
self.pwm.stop()
time.sleep(Rotation.min_delay)
GPIO.cleanup()
| 28.455556 | 65 | 0.609137 | import RPi.GPIO as GPIO
import time
class Rotation:
frequency=50
delta_theta=0.2
min_delay=0.0006
max_delay=0.4
def __init__(self,channel,min_theta,max_theta,init_theta=0):
self.channel=channel
if(min_theta<0 or min_theta>180):
self.min_theta=0
else:
self.min_theta=min_theta
if(max_theta<0 or max_theta>180):
self.max_theta=180
else:
self.max_theta=max_theta
if(init_theta<min_theta or init_theta>max_theta):
self.init_theta=(self.min_theta+self.max_theta)/2
else:
self.init_theta=init_theta
self.min_dutycycle=2.5+self.min_theta*10/180
self.max_dutycycle=2.5+self.max_theta*10/180
def setup(self):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.channel,GPIO.OUT)
self.pwm=GPIO.PWM(self.channel,Rotation.frequency)
self.dutycycle=2.5+self.init_theta*10/180
self.pwm.start(self.dutycycle)
time.sleep(Rotation.max_delay)
def positiveRotation(self):
self.dutycycle=self.dutycycle+Rotation.delta_theta*10/180
if self.dutycycle>self.max_dutycycle:
self.dutycycle=self.max_dutycycle
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.min_delay)
def reverseRotation(self):
self.dutycycle=self.dutycycle-Rotation.delta_theta*10/180
if self.dutycycle<self.min_dutycycle:
self.dutycycle=self.min_dutycycle
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.min_delay)
def specifyRotation(self,theta):
if(theta<0 or theta>180):
return
self.dutycycle=2.5+theta*10/180
self.pwm.ChangeDutyCycle(self.dutycycle)
time.sleep(Rotation.max_delay)
def cleanup(self):
self.pwm.stop()
time.sleep(Rotation.min_delay)
GPIO.cleanup()
| true | true |
f71a2762ffafdc8fa41231f81f930197ee062c98 | 15,596 | py | Python | trainer.py | a-maumau/pixel_objectness.pytorch | f5acb972be694662d839b99eb33e66a807d6031e | [
"MIT"
] | 4 | 2018-10-28T14:44:24.000Z | 2019-10-27T11:27:12.000Z | trainer.py | a-maumau/pixel_objectness.pytorch | f5acb972be694662d839b99eb33e66a807d6031e | [
"MIT"
] | 2 | 2019-05-10T15:01:45.000Z | 2019-10-11T09:47:51.000Z | trainer.py | a-maumau/pixel_objectness.pytorch | f5acb972be694662d839b99eb33e66a807d6031e | [
"MIT"
] | null | null | null | import os
import math
import argparse
from datetime import datetime
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from tqdm import tqdm
from PIL import Image
import data_loader
from mau_ml_util.train_logger import TrainLogger
#from mau_ml_util.metric import SegmentationMetric
from metric_from_latest_mmu import SegmentationMetric
from templates import Template_Trainer
torch.backends.cudnn.benchmark = True
class ColorMap(object):
def __init__(self, base_color=[[0,0,1], [0,1,1], [0,1,0], [1,1,0], [1,0,0]]):
"""
color_points: list of [int, int, int]
each value of component represent R,G,B.
"""
self.base_color = base_color
self.num_color_min1 = len(self.base_color)-1
def __call__(self, val):
return self.to_colormap(val)
def to_colormap(self, val):
"""
returns tpule of (R,G,B) value in range [0,1].
"""
fract_between = 0
if val <= 0:
idx1 = idx2 = 0
elif val >= 1:
idx1 = idx2 = self.num_color_min1
else:
val = val * (self.num_color_min1)
idx1 = math.floor(val);
idx2 = idx1+1;
fract_between = val - idx1
r = (self.base_color[idx2][0] - self.base_color[idx1][0])*fract_between + self.base_color[idx1][0]
g = (self.base_color[idx2][1] - self.base_color[idx1][1])*fract_between + self.base_color[idx1][1]
b = (self.base_color[idx2][2] - self.base_color[idx1][2])*fract_between + self.base_color[idx1][2]
return (r,g,b)
class Trainer_PixelObjectness(Template_Trainer):
def __init__(self, args, model, optimizer, lr_policy):
self.args = args
self.lr_policy = lr_policy
self.iter_wise = self.lr_policy.iteration_wise
# for loggin the training
val_head = ["iter" if self.iter_wise else "epoch", "mean_pixel_accuracy"]
for i in range(self.args.class_num):
val_head.append("mean_precision_class_{}".format(i))
for i in range(self.args.class_num):
val_head.append("mean_IoU_class_{}".format(i))
self.tlog = self.get_train_logger({"train":["iter" if self.iter_wise else "epoch", "batch_mean_total_loss"], "val":val_head},
save_dir=self.args.save_dir, save_name=self.args.save_name, arguments=self.get_argparse_arguments(self.args),
use_http_server=self.args.use_http_server, use_msg_server=self.args.use_msg_server, notificate=False,
visualize_fetch_stride=self.args.viz_fetch_stride, http_port=self.args.http_server_port, msg_port=self.args.msg_server_port)
# paths
self.save_dir = self.tlog.log_save_path
self.model_param_dir = self.tlog.mkdir("model_param")
if torch.cuda.is_available() and not self.args.nogpu:
self.map_device = torch.device('cuda:{}'.format(self.args.gpu_device_num))
else:
self.map_device = torch.device('cpu')
self.model = model
if torch.cuda.is_available() and not args.nogpu:
self.model = self.model.to(self.map_device)
self.optimizer = optimizer
self.train_loader = data_loader.get_train_loader(self.args, [(0.5, 0.5, 0.5),(0.5, 0.5, 0.5)])#[(0.485, 0.456, 0.406),(0.229, 0.224, 0.225)])
self.val_loader = data_loader.get_val_loader(self.args, [(0.5, 0.5, 0.5),(0.5, 0.5, 0.5)])
self.cmap = self._gen_cmap()
if self.args.show_parameters:
for idx, m in enumerate(model.modules()):
print(idx, '->', m)
print(args)
print("\nsaving at {}\n".format(self.save_dir))
# PASCAL VOC color maps
# borrowed from https://gist.github.com/wllhf/a4533e0adebe57e3ed06d4b50c8419ae
def _gen_cmap_voc(self, class_num=255):
def bitget(byteval, idx):
return ((byteval & (1 << idx)) != 0)
cmap = np.zeros((class_num+1, 3), dtype='uint8')
for i in range(class_num+1):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
return cmap
def _gen_cmap(self, max_value=255):
mapper = ColorMap()
cmap = []
for v in range(max_value+1):
cmap.append(np.uint8(np.array(mapper(v/max_value))*255))
return cmap
def convert_to_color_map(self, img_array, color_map=None, class_num=255):
"""
img_array: numpy.ndarray
shape must be (width, height)
"""
if color_map is None:
color_map = self._gen_cmap()
new_img = np.empty(shape=(img_array.shape[0], img_array.shape[1], 3), dtype='uint8')
for c in range(class_num+1):
index = np.where(img_array == c)
new_img[index] = color_map[c]
return new_img
def validate(self, count):
with torch.no_grad():
self.model.eval()
# logging
pix_acc = 0.0
precision_class = []
jaccard_class = []
#data_count_precision = [0 for i in range(self.args.class_num)]
#data_count_jaccard = [0 for i in range(self.args.class_num)]
metric = SegmentationMetric(self.args.class_num, map_device=self.map_device)
if self.args.quiet:
_trainval_loader = self.val_loader
else:
_trainval_loader = self.to_tqdm(self.val_loader, desc="train val")
for b, (image, mask, original_image) in enumerate(_trainval_loader):
batch_size = image.shape[0]
img = self.format_tensor(image, requires_grad=False, map_device=self.map_device)
mask = self.format_tensor(mask, requires_grad=False, map_device=self.map_device)
outputs, prob_maps = self.model.inference(img)
outputs = F.interpolate(outputs, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
prob_maps = F.interpolate(prob_maps, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
metric(outputs, mask)
# save only few batch for sample
if b < 1:
self.tlog.setup_output("{}_{}_batch_{}_sample".format("iter" if self.iter_wise else "epoch", count, b))
# test color image
#test_img = np.ones((256,256))
#for i in range(256):
# test_img[i] = test_img[i]*i
#
#self.tlog.pack_output(Image.fromarray(self.convert_to_color_map(np.uint8(test_img))))
for n in range(batch_size):
self.tlog.pack_output(Image.fromarray(np.uint8(original_image[n].detach().numpy())))
pred_img = np.uint8(outputs[n].squeeze(0).cpu().detach().numpy())
prob_img = prob_maps[n].squeeze(0).cpu().detach().numpy()
self.tlog.pack_output(Image.fromarray(pred_img*255), not_in_schema=True)
self.tlog.pack_output(Image.fromarray(self.convert_to_color_map(np.uint8(prob_img[1]*255), self.cmap)))
gt_img = np.uint8(mask[n].cpu().detach().numpy())
self.tlog.pack_output(Image.fromarray(gt_img*255), not_in_schema=True)
self.tlog.pack_output(None, " ")
self.tlog.pack_output(None, "validation sample", ["left: input", "center: pred cmap", "right: output mask"])
self.tlog.flush_output()
pix_acc = metric.calc_pix_acc()
precision = metric.calc_mean_precision()
jaccard_index = metric.calc_mean_jaccard_index()
# might I should return the non evaluated class with nan and filter the list
# by filter(lambda n: n!=float("nan"), items)
for class_id in range(self.args.class_num):
precision_class.append(precision["class_{}".format(class_id)])
jaccard_class.append(jaccard_index["class_{}".format(class_id)])
#data_count_precision[class_id] += len(precision["class_{}".format(str(class_id))])
#data_count_jaccard[class_id] += len(jaccard_index["class_{}".format(str(class_id))])
# logging, this implementation is not caring missing value
#mean_precision_classes = [y/x if x > 0 else 0 for y, x in zip(precision_class, data_count_precision)]
#mean_iou_classes = [y/x if x > 0 else 0 for y, x in zip(jaccard_class, data_count_jaccard)]
# clac. with out background
log_msg_data = [count, pix_acc, np.mean(precision_class[1:]), np.mean(jaccard_class[1:])]
self.tlog.log("val", [count, pix_acc]+precision_class+jaccard_class)
self.tlog.log_message("[{}] mean pix acc.:{:.5f}, precision:{:.5f}, IoU:{:.5f}".format(*log_msg_data), "LOG", "validation")
if not self.args.quiet:
tqdm.write("[{}] mean pix acc.:{:.5f}, precision:{:.5f}, IoU:{:.5f}".format(*log_msg_data))
self.model.train()
def train(self):
train_finish = False
if self.args.quiet:
epochs = range(1, self.args.epochs+1)
else:
epochs = self.to_tqdm(range(1, self.args.epochs+1), desc="train")
curr_iter = 0
epoch = 0
total_loss = 0.0
data_num = 0
# for epoch wise and iter wise
decay_arg = {"curr_iter":curr_iter, "curr_epoch":epoch}
for epoch in epochs:
if not self.iter_wise:
total_loss = 0.0
data_num = 0
if self.args.quiet:
_train_loader = self.train_loader
else:
_train_loader = self.to_tqdm(self.train_loader)
for img, mask in _train_loader:
# loss log will be showed in size averaged
data_num += 1
self.optimizer.zero_grad()
images = self.format_tensor(img, map_device=self.map_device)
masks = self.format_tensor(mask, map_device=self.map_device)
output = self.model(images)
output = F.interpolate(output, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
batch_loss = self.model.loss(output, masks)
total_loss += batch_loss.item()
batch_loss.backward()
self.optimizer.step()
curr_iter += 1
if not self.args.quiet:
_train_loader.set_description("{: 3d}: train[{}] loss: {:.5f}".format(curr_iter if self.iter_wise else epoch, self.args.save_name, total_loss/data_num))
if self.iter_wise:
self.lr_policy.decay_lr(**decay_arg)
if curr_iter % self.args.trainval_every == 0:
self.validate(curr_iter)
if curr_iter % self.args.save_every == 0:
state = {'iter': curr_iter,
'optimizer_state_dict' : self.optimizer.state_dict()}
self.model.save(add_state=state, file_name=os.path.join(self.model_param_dir,'model_param_iter{}.pth'.format(curr_iter)))
self.tlog.log_message("[iter:{}] model saved.".format(curr_iter), "LOG", "train")
if curr_iter % self.args.log_every == 0:
if not self.args.quiet:
tqdm.write("[#{: 3d}] {} iter mean loss: {:.5f}".format(curr_iter, self.args.log_every, total_loss/data_num))
self.tlog.log("train", [curr_iter, float(total_loss/data_num)])
self.tlog.log_message("[{}] {} iter mean loss:{:.5f}".format("iter:{}".format(curr_iter), self.args.log_every, float(total_loss/data_num)), "LOG", "train")
total_loss = 0
data_num = 0
if curr_iter == self.args.max_iter:
train_finish = True
_train_loader.close()
break
if train_finish:
epochs.close()
break
if not self.iter_wise:
if not self.args.quiet:
tqdm.write("[# {: 3d}] batch mean loss: {:.5f}".format(epoch, total_loss/data_num))
if epoch % self.args.log_every == 0:
self.tlog.log("train", [epoch, float(total_loss/data_num)])
self.tlog.log_message("[{}] batch mean loss:{:.5f}".format("epoch:{}".format(epoch), float(total_loss/data_num)), "LOG", "train")
# check train validation
if epoch % self.args.trainval_every == 0:
self.validate(epoch)
self.lr_policy.decay_lr(**decay_arg)
#if epoch % self.args.decay_every == 0:
# for param_group in self.optimizer.param_groups:
# param_group['lr'] *= self.args.decay_value
#
# self.tlog.log_message("[epoch:{}] decay learning rate by {}".format(epoch, self.args.decay_value), "LOG", "train")
# save model
if epoch % self.args.save_every == 0:
state = {'epoch': epoch,
'optimizer_state_dict' : self.optimizer.state_dict()}
self.model.save(add_state=state, file_name=os.path.join(self.model_param_dir,'model_param_e{}.pth'.format(epoch)))
self.tlog.log_message("[epoch:{}] model saved.".format(epoch), "LOG", "train")
self.model.save(add_state={'optimizer_state_dict' : self.optimizer.state_dict()},
file_name=os.path.join(self.model_param_dir, 'model_param_fin_{}.pth'.format(datetime.now().strftime("%Y%m%d_%H-%M-%S"))))
print("data is saved at {}".format(self.save_dir))
def test_loader(self):
from matplotlib import pylab as plt
import time
if self.args.quiet:
epochs = range(1, self.args.epochs+1)
else:
epochs = self.to_tqdm(range(1, self.args.epochs+1), desc="train")
for epoch in epochs:
if self.args.quiet:
_train_loader = self.train_loader
else:
_train_loader = self.to_tqdm(self.train_loader)
for img, mask in _train_loader:
batch_size = img.shape[0]
img = img.numpy()
mask = mask.numpy()
for i in range(batch_size):
_img = np.uint8(img[i]*255).transpose(1,2,0)
_mask = self.convert_to_color_map(np.uint8(mask[i]), self.cmap)
merged_img = np.concatenate([_img, _mask], axis=1)
plt.imshow(merged_img)
plt.show()
| 40.934383 | 179 | 0.55604 | import os
import math
import argparse
from datetime import datetime
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from tqdm import tqdm
from PIL import Image
import data_loader
from mau_ml_util.train_logger import TrainLogger
from metric_from_latest_mmu import SegmentationMetric
from templates import Template_Trainer
torch.backends.cudnn.benchmark = True
class ColorMap(object):
def __init__(self, base_color=[[0,0,1], [0,1,1], [0,1,0], [1,1,0], [1,0,0]]):
self.base_color = base_color
self.num_color_min1 = len(self.base_color)-1
def __call__(self, val):
return self.to_colormap(val)
def to_colormap(self, val):
fract_between = 0
if val <= 0:
idx1 = idx2 = 0
elif val >= 1:
idx1 = idx2 = self.num_color_min1
else:
val = val * (self.num_color_min1)
idx1 = math.floor(val);
idx2 = idx1+1;
fract_between = val - idx1
r = (self.base_color[idx2][0] - self.base_color[idx1][0])*fract_between + self.base_color[idx1][0]
g = (self.base_color[idx2][1] - self.base_color[idx1][1])*fract_between + self.base_color[idx1][1]
b = (self.base_color[idx2][2] - self.base_color[idx1][2])*fract_between + self.base_color[idx1][2]
return (r,g,b)
class Trainer_PixelObjectness(Template_Trainer):
def __init__(self, args, model, optimizer, lr_policy):
self.args = args
self.lr_policy = lr_policy
self.iter_wise = self.lr_policy.iteration_wise
val_head = ["iter" if self.iter_wise else "epoch", "mean_pixel_accuracy"]
for i in range(self.args.class_num):
val_head.append("mean_precision_class_{}".format(i))
for i in range(self.args.class_num):
val_head.append("mean_IoU_class_{}".format(i))
self.tlog = self.get_train_logger({"train":["iter" if self.iter_wise else "epoch", "batch_mean_total_loss"], "val":val_head},
save_dir=self.args.save_dir, save_name=self.args.save_name, arguments=self.get_argparse_arguments(self.args),
use_http_server=self.args.use_http_server, use_msg_server=self.args.use_msg_server, notificate=False,
visualize_fetch_stride=self.args.viz_fetch_stride, http_port=self.args.http_server_port, msg_port=self.args.msg_server_port)
self.save_dir = self.tlog.log_save_path
self.model_param_dir = self.tlog.mkdir("model_param")
if torch.cuda.is_available() and not self.args.nogpu:
self.map_device = torch.device('cuda:{}'.format(self.args.gpu_device_num))
else:
self.map_device = torch.device('cpu')
self.model = model
if torch.cuda.is_available() and not args.nogpu:
self.model = self.model.to(self.map_device)
self.optimizer = optimizer
self.train_loader = data_loader.get_train_loader(self.args, [(0.5, 0.5, 0.5),(0.5, 0.5, 0.5)])
self.val_loader = data_loader.get_val_loader(self.args, [(0.5, 0.5, 0.5),(0.5, 0.5, 0.5)])
self.cmap = self._gen_cmap()
if self.args.show_parameters:
for idx, m in enumerate(model.modules()):
print(idx, '->', m)
print(args)
print("\nsaving at {}\n".format(self.save_dir))
def _gen_cmap_voc(self, class_num=255):
def bitget(byteval, idx):
return ((byteval & (1 << idx)) != 0)
cmap = np.zeros((class_num+1, 3), dtype='uint8')
for i in range(class_num+1):
r = g = b = 0
c = i
for j in range(8):
r = r | (bitget(c, 0) << 7-j)
g = g | (bitget(c, 1) << 7-j)
b = b | (bitget(c, 2) << 7-j)
c = c >> 3
cmap[i] = np.array([r, g, b])
return cmap
def _gen_cmap(self, max_value=255):
mapper = ColorMap()
cmap = []
for v in range(max_value+1):
cmap.append(np.uint8(np.array(mapper(v/max_value))*255))
return cmap
def convert_to_color_map(self, img_array, color_map=None, class_num=255):
if color_map is None:
color_map = self._gen_cmap()
new_img = np.empty(shape=(img_array.shape[0], img_array.shape[1], 3), dtype='uint8')
for c in range(class_num+1):
index = np.where(img_array == c)
new_img[index] = color_map[c]
return new_img
def validate(self, count):
with torch.no_grad():
self.model.eval()
pix_acc = 0.0
precision_class = []
jaccard_class = []
metric = SegmentationMetric(self.args.class_num, map_device=self.map_device)
if self.args.quiet:
_trainval_loader = self.val_loader
else:
_trainval_loader = self.to_tqdm(self.val_loader, desc="train val")
for b, (image, mask, original_image) in enumerate(_trainval_loader):
batch_size = image.shape[0]
img = self.format_tensor(image, requires_grad=False, map_device=self.map_device)
mask = self.format_tensor(mask, requires_grad=False, map_device=self.map_device)
outputs, prob_maps = self.model.inference(img)
outputs = F.interpolate(outputs, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
prob_maps = F.interpolate(prob_maps, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
metric(outputs, mask)
if b < 1:
self.tlog.setup_output("{}_{}_batch_{}_sample".format("iter" if self.iter_wise else "epoch", count, b))
for n in range(batch_size):
self.tlog.pack_output(Image.fromarray(np.uint8(original_image[n].detach().numpy())))
pred_img = np.uint8(outputs[n].squeeze(0).cpu().detach().numpy())
prob_img = prob_maps[n].squeeze(0).cpu().detach().numpy()
self.tlog.pack_output(Image.fromarray(pred_img*255), not_in_schema=True)
self.tlog.pack_output(Image.fromarray(self.convert_to_color_map(np.uint8(prob_img[1]*255), self.cmap)))
gt_img = np.uint8(mask[n].cpu().detach().numpy())
self.tlog.pack_output(Image.fromarray(gt_img*255), not_in_schema=True)
self.tlog.pack_output(None, " ")
self.tlog.pack_output(None, "validation sample", ["left: input", "center: pred cmap", "right: output mask"])
self.tlog.flush_output()
pix_acc = metric.calc_pix_acc()
precision = metric.calc_mean_precision()
jaccard_index = metric.calc_mean_jaccard_index()
for class_id in range(self.args.class_num):
precision_class.append(precision["class_{}".format(class_id)])
jaccard_class.append(jaccard_index["class_{}".format(class_id)])
log_msg_data = [count, pix_acc, np.mean(precision_class[1:]), np.mean(jaccard_class[1:])]
self.tlog.log("val", [count, pix_acc]+precision_class+jaccard_class)
self.tlog.log_message("[{}] mean pix acc.:{:.5f}, precision:{:.5f}, IoU:{:.5f}".format(*log_msg_data), "LOG", "validation")
if not self.args.quiet:
tqdm.write("[{}] mean pix acc.:{:.5f}, precision:{:.5f}, IoU:{:.5f}".format(*log_msg_data))
self.model.train()
def train(self):
train_finish = False
if self.args.quiet:
epochs = range(1, self.args.epochs+1)
else:
epochs = self.to_tqdm(range(1, self.args.epochs+1), desc="train")
curr_iter = 0
epoch = 0
total_loss = 0.0
data_num = 0
decay_arg = {"curr_iter":curr_iter, "curr_epoch":epoch}
for epoch in epochs:
if not self.iter_wise:
total_loss = 0.0
data_num = 0
if self.args.quiet:
_train_loader = self.train_loader
else:
_train_loader = self.to_tqdm(self.train_loader)
for img, mask in _train_loader:
data_num += 1
self.optimizer.zero_grad()
images = self.format_tensor(img, map_device=self.map_device)
masks = self.format_tensor(mask, map_device=self.map_device)
output = self.model(images)
output = F.interpolate(output, size=[self.args.crop_size, self.args.crop_size], mode='bilinear', align_corners=False)
batch_loss = self.model.loss(output, masks)
total_loss += batch_loss.item()
batch_loss.backward()
self.optimizer.step()
curr_iter += 1
if not self.args.quiet:
_train_loader.set_description("{: 3d}: train[{}] loss: {:.5f}".format(curr_iter if self.iter_wise else epoch, self.args.save_name, total_loss/data_num))
if self.iter_wise:
self.lr_policy.decay_lr(**decay_arg)
if curr_iter % self.args.trainval_every == 0:
self.validate(curr_iter)
if curr_iter % self.args.save_every == 0:
state = {'iter': curr_iter,
'optimizer_state_dict' : self.optimizer.state_dict()}
self.model.save(add_state=state, file_name=os.path.join(self.model_param_dir,'model_param_iter{}.pth'.format(curr_iter)))
self.tlog.log_message("[iter:{}] model saved.".format(curr_iter), "LOG", "train")
if curr_iter % self.args.log_every == 0:
if not self.args.quiet:
tqdm.write("[#{: 3d}] {} iter mean loss: {:.5f}".format(curr_iter, self.args.log_every, total_loss/data_num))
self.tlog.log("train", [curr_iter, float(total_loss/data_num)])
self.tlog.log_message("[{}] {} iter mean loss:{:.5f}".format("iter:{}".format(curr_iter), self.args.log_every, float(total_loss/data_num)), "LOG", "train")
total_loss = 0
data_num = 0
if curr_iter == self.args.max_iter:
train_finish = True
_train_loader.close()
break
if train_finish:
epochs.close()
break
if not self.iter_wise:
if not self.args.quiet:
tqdm.write("[# {: 3d}] batch mean loss: {:.5f}".format(epoch, total_loss/data_num))
if epoch % self.args.log_every == 0:
self.tlog.log("train", [epoch, float(total_loss/data_num)])
self.tlog.log_message("[{}] batch mean loss:{:.5f}".format("epoch:{}".format(epoch), float(total_loss/data_num)), "LOG", "train")
if epoch % self.args.trainval_every == 0:
self.validate(epoch)
self.lr_policy.decay_lr(**decay_arg)
if epoch % self.args.save_every == 0:
state = {'epoch': epoch,
'optimizer_state_dict' : self.optimizer.state_dict()}
self.model.save(add_state=state, file_name=os.path.join(self.model_param_dir,'model_param_e{}.pth'.format(epoch)))
self.tlog.log_message("[epoch:{}] model saved.".format(epoch), "LOG", "train")
self.model.save(add_state={'optimizer_state_dict' : self.optimizer.state_dict()},
file_name=os.path.join(self.model_param_dir, 'model_param_fin_{}.pth'.format(datetime.now().strftime("%Y%m%d_%H-%M-%S"))))
print("data is saved at {}".format(self.save_dir))
def test_loader(self):
from matplotlib import pylab as plt
import time
if self.args.quiet:
epochs = range(1, self.args.epochs+1)
else:
epochs = self.to_tqdm(range(1, self.args.epochs+1), desc="train")
for epoch in epochs:
if self.args.quiet:
_train_loader = self.train_loader
else:
_train_loader = self.to_tqdm(self.train_loader)
for img, mask in _train_loader:
batch_size = img.shape[0]
img = img.numpy()
mask = mask.numpy()
for i in range(batch_size):
_img = np.uint8(img[i]*255).transpose(1,2,0)
_mask = self.convert_to_color_map(np.uint8(mask[i]), self.cmap)
merged_img = np.concatenate([_img, _mask], axis=1)
plt.imshow(merged_img)
plt.show()
| true | true |
f71a280976585c5919618be25b73b5e66de54cdf | 4,197 | py | Python | ucsmsdk/mometa/comm/CommSyslogClient.py | anoop1984/python_sdk | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/comm/CommSyslogClient.py | anoop1984/python_sdk | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | [
"Apache-2.0"
] | null | null | null | ucsmsdk/mometa/comm/CommSyslogClient.py | anoop1984/python_sdk | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | [
"Apache-2.0"
] | null | null | null | """This module contains the general information for CommSyslogClient ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class CommSyslogClientConsts():
ADMIN_STATE_DISABLED = "disabled"
ADMIN_STATE_ENABLED = "enabled"
FORWARDING_FACILITY_LOCAL0 = "local0"
FORWARDING_FACILITY_LOCAL1 = "local1"
FORWARDING_FACILITY_LOCAL2 = "local2"
FORWARDING_FACILITY_LOCAL3 = "local3"
FORWARDING_FACILITY_LOCAL4 = "local4"
FORWARDING_FACILITY_LOCAL5 = "local5"
FORWARDING_FACILITY_LOCAL6 = "local6"
FORWARDING_FACILITY_LOCAL7 = "local7"
NAME_PRIMARY = "primary"
NAME_SECONDARY = "secondary"
NAME_TERTIARY = "tertiary"
SEVERITY_ALERTS = "alerts"
SEVERITY_CRITICAL = "critical"
SEVERITY_DEBUGGING = "debugging"
SEVERITY_EMERGENCIES = "emergencies"
SEVERITY_ERRORS = "errors"
SEVERITY_INFORMATION = "information"
SEVERITY_NOTIFICATIONS = "notifications"
SEVERITY_WARNINGS = "warnings"
class CommSyslogClient(ManagedObject):
"""This is CommSyslogClient class."""
consts = CommSyslogClientConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("CommSyslogClient", "commSyslogClient", "client-[name]", VersionMeta.Version101e, "InputOutput", 0x3ff, [], ["admin", "operations"], [u'commSyslog'], [], ["Get", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["disabled", "enabled"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x4, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"forwarding_facility": MoPropertyMeta("forwarding_facility", "forwardingFacility", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["local0", "local1", "local2", "local3", "local4", "local5", "local6", "local7"], []),
"hostname": MoPropertyMeta("hostname", "hostname", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101e, MoPropertyMeta.NAMING, 0x40, None, None, None, ["primary", "secondary", "tertiary"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x80, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"severity": MoPropertyMeta("severity", "severity", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["alerts", "critical", "debugging", "emergencies", "errors", "information", "notifications", "warnings"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x200, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"dn": "dn",
"forwardingFacility": "forwarding_facility",
"hostname": "hostname",
"name": "name",
"rn": "rn",
"sacl": "sacl",
"severity": "severity",
"status": "status",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.child_action = None
self.forwarding_facility = None
self.hostname = None
self.sacl = None
self.severity = None
self.status = None
ManagedObject.__init__(self, "CommSyslogClient", parent_mo_or_dn, **kwargs)
| 52.4625 | 264 | 0.671432 | import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class CommSyslogClientConsts():
ADMIN_STATE_DISABLED = "disabled"
ADMIN_STATE_ENABLED = "enabled"
FORWARDING_FACILITY_LOCAL0 = "local0"
FORWARDING_FACILITY_LOCAL1 = "local1"
FORWARDING_FACILITY_LOCAL2 = "local2"
FORWARDING_FACILITY_LOCAL3 = "local3"
FORWARDING_FACILITY_LOCAL4 = "local4"
FORWARDING_FACILITY_LOCAL5 = "local5"
FORWARDING_FACILITY_LOCAL6 = "local6"
FORWARDING_FACILITY_LOCAL7 = "local7"
NAME_PRIMARY = "primary"
NAME_SECONDARY = "secondary"
NAME_TERTIARY = "tertiary"
SEVERITY_ALERTS = "alerts"
SEVERITY_CRITICAL = "critical"
SEVERITY_DEBUGGING = "debugging"
SEVERITY_EMERGENCIES = "emergencies"
SEVERITY_ERRORS = "errors"
SEVERITY_INFORMATION = "information"
SEVERITY_NOTIFICATIONS = "notifications"
SEVERITY_WARNINGS = "warnings"
class CommSyslogClient(ManagedObject):
consts = CommSyslogClientConsts()
naming_props = set([u'name'])
mo_meta = MoMeta("CommSyslogClient", "commSyslogClient", "client-[name]", VersionMeta.Version101e, "InputOutput", 0x3ff, [], ["admin", "operations"], [u'commSyslog'], [], ["Get", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, ["disabled", "enabled"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x4, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"forwarding_facility": MoPropertyMeta("forwarding_facility", "forwardingFacility", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["local0", "local1", "local2", "local3", "local4", "local5", "local6", "local7"], []),
"hostname": MoPropertyMeta("hostname", "hostname", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, [], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101e, MoPropertyMeta.NAMING, 0x40, None, None, None, ["primary", "secondary", "tertiary"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x80, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"severity": MoPropertyMeta("severity", "severity", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["alerts", "critical", "debugging", "emergencies", "errors", "information", "notifications", "warnings"], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x200, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"adminState": "admin_state",
"childAction": "child_action",
"dn": "dn",
"forwardingFacility": "forwarding_facility",
"hostname": "hostname",
"name": "name",
"rn": "rn",
"sacl": "sacl",
"severity": "severity",
"status": "status",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.child_action = None
self.forwarding_facility = None
self.hostname = None
self.sacl = None
self.severity = None
self.status = None
ManagedObject.__init__(self, "CommSyslogClient", parent_mo_or_dn, **kwargs)
| true | true |
f71a28fae36dc01961cc60b2d06bc962234e0ce7 | 12,999 | py | Python | hy/macros.py | silver-dragon/hy | c7b2f47681f54b365da22ec8d65c7dbc59ab7501 | [
"MIT"
] | null | null | null | hy/macros.py | silver-dragon/hy | c7b2f47681f54b365da22ec8d65c7dbc59ab7501 | [
"MIT"
] | null | null | null | hy/macros.py | silver-dragon/hy | c7b2f47681f54b365da22ec8d65c7dbc59ab7501 | [
"MIT"
] | null | null | null | # Copyright 2021 the authors.
# This file is part of Hy, which is free software licensed under the Expat
# license. See the LICENSE.
import sys
import builtins
import importlib
import inspect
import pkgutil
import traceback
from ast import AST
from funcparserlib.parser import NoParseError
from hy._compat import PY3_8
from hy.model_patterns import whole
from hy.models import replace_hy_obj, Expression, Symbol, as_model, is_unpack
from hy.lex import mangle, unmangle
from hy.errors import (HyLanguageError, HyMacroExpansionError, HyTypeError,
HyRequireError)
import hy.compiler
EXTRA_MACROS = ["hy.core.result_macros", "hy.core.macros"]
def macro(name):
"""Decorator to define a macro called `name`.
"""
return lambda fn: install_macro(name, fn, fn)
def pattern_macro(names, pattern, shadow = None):
pattern = whole(pattern)
py_version_required = None
if isinstance(names, tuple):
py_version_required, names = names
def dec(fn):
def wrapper_maker(name):
def wrapper(hy_compiler, *args):
if (shadow and
any(is_unpack("iterable", x) for x in args)):
# Try a shadow function call with this name instead.
return Expression([
Symbol('hy.core.shadow.' + name),
*args]).replace(hy_compiler.this)
expr = hy_compiler.this
root = unmangle(expr[0])
if (py_version_required and
sys.version_info < py_version_required):
raise hy_compiler._syntax_error(expr,
'`{}` requires Python {} or later'.format(
root,
'.'.join(map(str, py_version_required))))
try:
parse_tree = pattern.parse(args)
except NoParseError as e:
raise hy_compiler._syntax_error(
expr[min(e.state.pos + 1, len(expr) - 1)],
"parse error for pattern macro '{}': {}".format(
root, e.msg.replace("<EOF>", "end of form")))
return fn(hy_compiler, expr, root, *parse_tree)
return wrapper
for name in ([names] if isinstance(names, str) else names):
install_macro(name, wrapper_maker(name), fn)
return fn
return dec
def install_macro(name, fn, module_of):
name = mangle(name)
fn = rename_function(fn, name)
(inspect.getmodule(module_of).__dict__
.setdefault('__macros__', {})[name]) = fn
return fn
def _same_modules(source_module, target_module):
"""Compare the filenames associated with the given modules names.
This tries to not actually load the modules.
"""
if not (source_module or target_module):
return False
if target_module == source_module:
return True
def _get_filename(module):
filename = None
try:
if not inspect.ismodule(module):
loader = pkgutil.get_loader(module)
if isinstance(loader, importlib.machinery.SourceFileLoader):
filename = loader.get_filename()
else:
filename = inspect.getfile(module)
except (TypeError, ImportError):
pass
return filename
source_filename = _get_filename(source_module)
target_filename = _get_filename(target_module)
return (source_filename and target_filename and
source_filename == target_filename)
def require(source_module, target_module, assignments, prefix=""):
"""Load macros from one module into the namespace of another.
This function is called from the macro also named `require`.
Parameters
----------
source_module: str or types.ModuleType
The module from which macros are to be imported.
target_module: str, types.ModuleType or None
The module into which the macros will be loaded. If `None`, then
the caller's namespace.
The latter is useful during evaluation of generated AST/bytecode.
assignments: str or list of tuples of strs
The string "ALL" or a list of macro name and alias pairs.
prefix: str, optional ("")
If nonempty, its value is prepended to the name of each imported macro.
This allows one to emulate namespaced macros, like
"mymacromodule.mymacro", which looks like an attribute of a module.
Returns
-------
out: boolean
Whether or not macros were actually transferred.
"""
if target_module is None:
parent_frame = inspect.stack()[1][0]
target_namespace = parent_frame.f_globals
target_module = target_namespace.get('__name__', None)
elif isinstance(target_module, str):
target_module = importlib.import_module(target_module)
target_namespace = target_module.__dict__
elif inspect.ismodule(target_module):
target_namespace = target_module.__dict__
else:
raise HyTypeError('`target_module` is not a recognized type: {}'.format(
type(target_module)))
# Let's do a quick check to make sure the source module isn't actually
# the module being compiled (e.g. when `runpy` executes a module's code
# in `__main__`).
# We use the module's underlying filename for this (when they exist), since
# it's the most "fixed" attribute.
if _same_modules(source_module, target_module):
return False
if not inspect.ismodule(source_module):
try:
if source_module.startswith("."):
source_dirs = source_module.split(".")
target_dirs = (getattr(target_module, "__name__", target_module)
.split("."))
while (len(source_dirs) > 1
and source_dirs[0] == ""
and target_dirs):
source_dirs.pop(0)
target_dirs.pop()
package = ".".join(target_dirs + source_dirs[:-1])
else:
package = None
source_module = importlib.import_module(source_module, package)
except ImportError as e:
raise HyRequireError(e.args[0]).with_traceback(None)
source_macros = source_module.__dict__.setdefault('__macros__', {})
if not source_module.__macros__:
if assignments != "ALL":
for name, alias in assignments:
try:
require(f"{source_module.__name__}.{mangle(name)}",
target_module,
"ALL",
prefix=alias)
except HyRequireError as e:
raise HyRequireError(f"Cannot import name '{name}'"
f" from '{source_module.__name__}'"
f" ({source_module.__file__})")
return True
else:
return False
target_macros = target_namespace.setdefault('__macros__', {})
if prefix:
prefix += "."
if assignments == "ALL":
name_assigns = [(k, k) for k in source_macros.keys()]
else:
name_assigns = assignments
for name, alias in name_assigns:
_name = mangle(name)
alias = mangle('#' + prefix + unmangle(alias)[1:]
if unmangle(alias).startswith('#')
else prefix + alias)
if _name in source_module.__macros__:
target_macros[alias] = source_macros[_name]
else:
raise HyRequireError('Could not require name {} from {}'.format(
_name, source_module))
return True
def load_macros(module):
"""Load the hy builtin macros into module `module_name`,
removing any prior macros set.
It is an error to call this on any module in `hy.core`.
"""
builtin_macros = EXTRA_MACROS
module.__macros__ = {}
for builtin_mod_name in builtin_macros:
builtin_mod = importlib.import_module(builtin_mod_name)
# This may overwrite macros in the module.
if hasattr(builtin_mod, '__macros__'):
module.__macros__.update(getattr(builtin_mod, '__macros__', {}))
class MacroExceptions():
"""wrap non ``HyLanguageError``'s in ``HyMacroExpansionError`` preserving stack trace
used in lieu of ``@contextmanager`` to ensure stack trace contains only internal hy
modules for consistent filtering.
"""
def __init__(self, module, macro_tree, compiler=None):
self.module = module
self.macro_tree = macro_tree
self.compiler = compiler
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if exc_type is None:
return True
elif not issubclass(exc_type, HyLanguageError):
if self.compiler:
filename = self.compiler.filename
source = self.compiler.source
else:
filename = None
source = None
exc_msg = ' '.join(traceback.format_exception_only(
sys.exc_info()[0], sys.exc_info()[1]))
msg = "expanding macro {}\n ".format(str(self.macro_tree[0]))
msg += exc_msg
raise HyMacroExpansionError(msg, self.macro_tree, filename, source)
else:
return False
def macroexpand(tree, module, compiler=None, once=False, result_ok=True):
"""Expand the toplevel macros for the given Hy AST tree.
Load the macros from the given `module`, then expand the (top-level) macros
in `tree` until we no longer can.
`Expression` resulting from macro expansions are assigned the module in
which the macro function is defined (determined using `inspect.getmodule`).
If the resulting `Expression` is itself macro expanded, then the namespace
of the assigned module is checked first for a macro corresponding to the
expression's head/car symbol. If the head/car symbol of such a `Expression`
is not found among the macros of its assigned module's namespace, the
outer-most namespace--e.g. the one given by the `module` parameter--is used
as a fallback.
Parameters
----------
tree: hy.models.Object or list
Hy AST tree.
module: str or types.ModuleType
Module used to determine the local namespace for macros.
compiler: HyASTCompiler, optional
The compiler object passed to expanded macros.
once: boolean, optional
Only expand the first macro in `tree`.
Returns
------
out: hy.models.Object
Returns a mutated tree with macros expanded.
"""
if not inspect.ismodule(module):
module = importlib.import_module(module)
assert not compiler or compiler.module == module
while isinstance(tree, Expression) and tree:
fn = tree[0]
if fn in ("quote", "quasiquote") or not isinstance(fn, Symbol):
break
fn = mangle(fn)
expr_modules = (([] if not hasattr(tree, 'module') else [tree.module])
+ [module])
expr_modules.append(builtins)
# Choose the first namespace with the macro.
m = next((mod.__macros__[fn]
for mod in expr_modules
if fn in getattr(mod, '__macros__', ())),
None)
if not m:
break
with MacroExceptions(module, tree, compiler):
if compiler:
compiler.this = tree
obj = m(compiler, *tree[1:])
if isinstance(obj, (hy.compiler.Result, AST)):
return obj if result_ok else tree
if isinstance(obj, Expression):
obj.module = inspect.getmodule(m)
tree = replace_hy_obj(obj, tree)
if once:
break
tree = as_model(tree)
return tree
def macroexpand_1(tree, module, compiler=None):
"""Expand the toplevel macro from `tree` once, in the context of
`compiler`."""
return macroexpand(tree, module, compiler, once=True)
def rename_function(func, new_name):
"""Creates a copy of a function and [re]sets the name at the code-object
level.
"""
c = func.__code__
new_code = type(c)(*[getattr(c, 'co_{}'.format(a))
if a != 'name' else str(new_name)
for a in code_obj_args])
_fn = type(func)(new_code, func.__globals__, str(new_name),
func.__defaults__, func.__closure__)
_fn.__dict__.update(func.__dict__)
return _fn
code_obj_args = ['argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize',
'flags', 'code', 'consts', 'names', 'varnames', 'filename', 'name',
'firstlineno', 'lnotab', 'freevars', 'cellvars']
if not PY3_8:
code_obj_args.remove("posonlyargcount")
| 34.11811 | 89 | 0.605123 |
import sys
import builtins
import importlib
import inspect
import pkgutil
import traceback
from ast import AST
from funcparserlib.parser import NoParseError
from hy._compat import PY3_8
from hy.model_patterns import whole
from hy.models import replace_hy_obj, Expression, Symbol, as_model, is_unpack
from hy.lex import mangle, unmangle
from hy.errors import (HyLanguageError, HyMacroExpansionError, HyTypeError,
HyRequireError)
import hy.compiler
EXTRA_MACROS = ["hy.core.result_macros", "hy.core.macros"]
def macro(name):
return lambda fn: install_macro(name, fn, fn)
def pattern_macro(names, pattern, shadow = None):
pattern = whole(pattern)
py_version_required = None
if isinstance(names, tuple):
py_version_required, names = names
def dec(fn):
def wrapper_maker(name):
def wrapper(hy_compiler, *args):
if (shadow and
any(is_unpack("iterable", x) for x in args)):
return Expression([
Symbol('hy.core.shadow.' + name),
*args]).replace(hy_compiler.this)
expr = hy_compiler.this
root = unmangle(expr[0])
if (py_version_required and
sys.version_info < py_version_required):
raise hy_compiler._syntax_error(expr,
'`{}` requires Python {} or later'.format(
root,
'.'.join(map(str, py_version_required))))
try:
parse_tree = pattern.parse(args)
except NoParseError as e:
raise hy_compiler._syntax_error(
expr[min(e.state.pos + 1, len(expr) - 1)],
"parse error for pattern macro '{}': {}".format(
root, e.msg.replace("<EOF>", "end of form")))
return fn(hy_compiler, expr, root, *parse_tree)
return wrapper
for name in ([names] if isinstance(names, str) else names):
install_macro(name, wrapper_maker(name), fn)
return fn
return dec
def install_macro(name, fn, module_of):
name = mangle(name)
fn = rename_function(fn, name)
(inspect.getmodule(module_of).__dict__
.setdefault('__macros__', {})[name]) = fn
return fn
def _same_modules(source_module, target_module):
if not (source_module or target_module):
return False
if target_module == source_module:
return True
def _get_filename(module):
filename = None
try:
if not inspect.ismodule(module):
loader = pkgutil.get_loader(module)
if isinstance(loader, importlib.machinery.SourceFileLoader):
filename = loader.get_filename()
else:
filename = inspect.getfile(module)
except (TypeError, ImportError):
pass
return filename
source_filename = _get_filename(source_module)
target_filename = _get_filename(target_module)
return (source_filename and target_filename and
source_filename == target_filename)
def require(source_module, target_module, assignments, prefix=""):
if target_module is None:
parent_frame = inspect.stack()[1][0]
target_namespace = parent_frame.f_globals
target_module = target_namespace.get('__name__', None)
elif isinstance(target_module, str):
target_module = importlib.import_module(target_module)
target_namespace = target_module.__dict__
elif inspect.ismodule(target_module):
target_namespace = target_module.__dict__
else:
raise HyTypeError('`target_module` is not a recognized type: {}'.format(
type(target_module)))
# in `__main__`).
# We use the module's underlying filename for this (when they exist), since
if _same_modules(source_module, target_module):
return False
if not inspect.ismodule(source_module):
try:
if source_module.startswith("."):
source_dirs = source_module.split(".")
target_dirs = (getattr(target_module, "__name__", target_module)
.split("."))
while (len(source_dirs) > 1
and source_dirs[0] == ""
and target_dirs):
source_dirs.pop(0)
target_dirs.pop()
package = ".".join(target_dirs + source_dirs[:-1])
else:
package = None
source_module = importlib.import_module(source_module, package)
except ImportError as e:
raise HyRequireError(e.args[0]).with_traceback(None)
source_macros = source_module.__dict__.setdefault('__macros__', {})
if not source_module.__macros__:
if assignments != "ALL":
for name, alias in assignments:
try:
require(f"{source_module.__name__}.{mangle(name)}",
target_module,
"ALL",
prefix=alias)
except HyRequireError as e:
raise HyRequireError(f"Cannot import name '{name}'"
f" from '{source_module.__name__}'"
f" ({source_module.__file__})")
return True
else:
return False
target_macros = target_namespace.setdefault('__macros__', {})
if prefix:
prefix += "."
if assignments == "ALL":
name_assigns = [(k, k) for k in source_macros.keys()]
else:
name_assigns = assignments
for name, alias in name_assigns:
_name = mangle(name)
alias = mangle('
if unmangle(alias).startswith('
else prefix + alias)
if _name in source_module.__macros__:
target_macros[alias] = source_macros[_name]
else:
raise HyRequireError('Could not require name {} from {}'.format(
_name, source_module))
return True
def load_macros(module):
builtin_macros = EXTRA_MACROS
module.__macros__ = {}
for builtin_mod_name in builtin_macros:
builtin_mod = importlib.import_module(builtin_mod_name)
# This may overwrite macros in the module.
if hasattr(builtin_mod, '__macros__'):
module.__macros__.update(getattr(builtin_mod, '__macros__', {}))
class MacroExceptions():
def __init__(self, module, macro_tree, compiler=None):
self.module = module
self.macro_tree = macro_tree
self.compiler = compiler
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if exc_type is None:
return True
elif not issubclass(exc_type, HyLanguageError):
if self.compiler:
filename = self.compiler.filename
source = self.compiler.source
else:
filename = None
source = None
exc_msg = ' '.join(traceback.format_exception_only(
sys.exc_info()[0], sys.exc_info()[1]))
msg = "expanding macro {}\n ".format(str(self.macro_tree[0]))
msg += exc_msg
raise HyMacroExpansionError(msg, self.macro_tree, filename, source)
else:
return False
def macroexpand(tree, module, compiler=None, once=False, result_ok=True):
if not inspect.ismodule(module):
module = importlib.import_module(module)
assert not compiler or compiler.module == module
while isinstance(tree, Expression) and tree:
fn = tree[0]
if fn in ("quote", "quasiquote") or not isinstance(fn, Symbol):
break
fn = mangle(fn)
expr_modules = (([] if not hasattr(tree, 'module') else [tree.module])
+ [module])
expr_modules.append(builtins)
# Choose the first namespace with the macro.
m = next((mod.__macros__[fn]
for mod in expr_modules
if fn in getattr(mod, '__macros__', ())),
None)
if not m:
break
with MacroExceptions(module, tree, compiler):
if compiler:
compiler.this = tree
obj = m(compiler, *tree[1:])
if isinstance(obj, (hy.compiler.Result, AST)):
return obj if result_ok else tree
if isinstance(obj, Expression):
obj.module = inspect.getmodule(m)
tree = replace_hy_obj(obj, tree)
if once:
break
tree = as_model(tree)
return tree
def macroexpand_1(tree, module, compiler=None):
return macroexpand(tree, module, compiler, once=True)
def rename_function(func, new_name):
c = func.__code__
new_code = type(c)(*[getattr(c, 'co_{}'.format(a))
if a != 'name' else str(new_name)
for a in code_obj_args])
_fn = type(func)(new_code, func.__globals__, str(new_name),
func.__defaults__, func.__closure__)
_fn.__dict__.update(func.__dict__)
return _fn
code_obj_args = ['argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize',
'flags', 'code', 'consts', 'names', 'varnames', 'filename', 'name',
'firstlineno', 'lnotab', 'freevars', 'cellvars']
if not PY3_8:
code_obj_args.remove("posonlyargcount")
| true | true |
f71a2b94b5be2676eac49b95b663de23170408de | 9,927 | py | Python | gpt2_model.py | solad5/acgan-gpt2 | 52901a996fd235355f8c3f6b83037c85b1fdb415 | [
"MIT"
] | null | null | null | gpt2_model.py | solad5/acgan-gpt2 | 52901a996fd235355f8c3f6b83037c85b1fdb415 | [
"MIT"
] | null | null | null | gpt2_model.py | solad5/acgan-gpt2 | 52901a996fd235355f8c3f6b83037c85b1fdb415 | [
"MIT"
] | null | null | null | '''
code by TaeHwan Jung(@graykode)
Original Paper and repository here : https://github.com/openai/gpt-2
GPT2 Pytorch Model : https://github.com/huggingface/pytorch-pretrained-BERT
'''
import copy
import torch
import math
import torch.nn as nn
from torch.nn.parameter import Parameter
def gelu(x):
return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
def load_weight(model, state_dict):
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if key.endswith(".g"):
new_key = key[:-2] + ".weight"
elif key.endswith(".b"):
new_key = key[:-2] + ".bias"
elif key.endswith(".w"):
new_key = key[:-2] + ".weight"
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
missing_keys = []
unexpected_keys = []
error_msgs = []
# copy state_dict so _load_from_state_dict can modify it
metadata = getattr(state_dict, "_metadata", None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=""):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs
)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + ".")
start_model = model
if hasattr(model, "transformer") and all(not s.startswith('transformer.') for s in state_dict.keys()):
start_model = model.transformer
load(start_model, prefix="")
# Make sure we are still sharing the output and input embeddings after loading weights
model.set_tied()
return model
class LayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-12):
"""Construct a layernorm module in the TF style (epsilon inside the square root).
"""
super(LayerNorm, self).__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.bias = nn.Parameter(torch.zeros(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
u = x.mean(-1, keepdim=True)
s = (x - u).pow(2).mean(-1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.variance_epsilon)
return self.weight * x + self.bias
class Conv1D(nn.Module):
def __init__(self, nf, nx):
super(Conv1D, self).__init__()
self.nf = nf
w = torch.empty(nx, nf)
nn.init.normal_(w, std=0.02)
self.weight = Parameter(w)
self.bias = Parameter(torch.zeros(nf))
def forward(self, x):
size_out = x.size()[:-1] + (self.nf,)
x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight)
x = x.view(*size_out)
return x
class Attention(nn.Module):
def __init__(self, nx, n_ctx, config, scale=False):
super(Attention, self).__init__()
n_state = nx # in Attention: n_state=768 (nx=n_embd)
# [switch nx => n_state from Block to Attention to keep identical to TF implem]
assert n_state % config.n_head == 0
self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx))
self.n_head = config.n_head
self.split_size = n_state
self.scale = scale
self.c_attn = Conv1D(n_state * 3, nx)
self.c_proj = Conv1D(n_state, nx)
def _attn(self, q, k, v):
w = torch.matmul(q, k)
if self.scale:
w = w / math.sqrt(v.size(-1))
nd, ns = w.size(-2), w.size(-1)
b = self.bias[:, :, ns - nd:ns, :ns]
# Here the bias b also serves as the mask to remove future information
w = w * b - 1e10 * (1 - b)
w = nn.Softmax(dim=-1)(w)
return torch.matmul(w, v)
def merge_heads(self, x):
x = x.permute(0, 2, 1, 3).contiguous()
new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),)
return x.view(*new_x_shape) # in Tensorflow implem: fct merge_states
def split_heads(self, x, k=False):
new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head)
x = x.view(*new_x_shape) # in Tensorflow implem: fct split_states
if k:
return x.permute(0, 2, 3, 1) # (batch, head, head_features, seq_length)
else:
return x.permute(0, 2, 1, 3) # (batch, head, seq_length, head_features)
def forward(self, x, layer_past=None):
x = self.c_attn(x)
query, key, value = x.split(self.split_size, dim=2)
query = self.split_heads(query)
key = self.split_heads(key, k=True)
value = self.split_heads(value)
if layer_past is not None:
past_key, past_value = layer_past[0].transpose(-2, -1), layer_past[1] # transpose back cf below
key = torch.cat((past_key, key), dim=-1)
value = torch.cat((past_value, value), dim=-2)
present = torch.stack((key.transpose(-2, -1), value)) # transpose to have same shapes for stacking
a = self._attn(query, key, value)
a = self.merge_heads(a)
a = self.c_proj(a)
return a, present
class MLP(nn.Module):
def __init__(self, n_state, config): # in MLP: n_state=3072 (4 * n_embd)
super(MLP, self).__init__()
nx = config.n_embd
self.c_fc = Conv1D(n_state, nx)
self.c_proj = Conv1D(nx, n_state)
self.act = gelu
def forward(self, x):
h = self.act(self.c_fc(x))
h2 = self.c_proj(h)
return h2
class Block(nn.Module):
def __init__(self, n_ctx, config, scale=False):
super(Block, self).__init__()
nx = config.n_embd
self.ln_1 = LayerNorm(nx, eps=config.layer_norm_epsilon)
self.attn = Attention(nx, n_ctx, config, scale)
self.ln_2 = LayerNorm(nx, eps=config.layer_norm_epsilon)
self.mlp = MLP(4 * nx, config)
def forward(self, x, layer_past=None):
a, present = self.attn(self.ln_1(x), layer_past=layer_past)
x = x + a
m = self.mlp(self.ln_2(x))
x = x + m
return x, present
class Transformer(nn.Module):
def __init__(self, config):
super().__init__()
self.n_layer = config.n_layer
self.n_embd = config.n_embd
self.n_vocab = config.vocab_size
self.wte = nn.Embedding(config.vocab_size, config.n_embd)
self.wpe = nn.Embedding(config.n_positions, config.n_embd)
block = Block(config.n_ctx, config, scale=True)
self.h = nn.ModuleList([copy.deepcopy(block) for _ in range(config.n_layer)])
self.ln_f = LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
def set_embeddings_weights(self, model_embeddings_weights):
embed_shape = model_embeddings_weights.shape
self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False)
self.decoder.weight = model_embeddings_weights # Tied weights
def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
if past is None:
past_length = 0
past = [None] * len(self.h)
else:
past_length = past[0][0].size(-2)
if position_ids is None:
position_ids = torch.arange(past_length, input_ids.size(-1) + past_length, dtype=torch.long,
device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_ids.size(-1))
position_ids = position_ids.view(-1, position_ids.size(-1))
inputs_embeds = self.wte(input_ids)
position_embeds = self.wpe(position_ids)
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1))
token_type_embeds = self.wte(token_type_ids)
else:
token_type_embeds = 0
hidden_states = inputs_embeds + position_embeds + token_type_embeds
presents = []
for block, layer_past in zip(self.h, past):
hidden_states, present = block(hidden_states, layer_past)
presents.append(present)
hidden_states = self.ln_f(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
return hidden_states.view(*output_shape), presents
class LinearReadoutHead(nn.Module):
def __init__(self, model_embeddings_weights, config):
super().__init__()
self.n_embd = config.n_embd
self.set_embeddings_weights(model_embeddings_weights)
def set_embeddings_weights(self, model_embeddings_weights):
embed_shape = model_embeddings_weights.shape
self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False)
self.decoder.weight = model_embeddings_weights # Tied weights
def forward(self, hidden_state):
# Truncated Language modeling logits (we remove the last token)
# h_trunc = h[:, :-1].contiguous().view(-1, self.n_embd)
lm_logits = self.decoder(hidden_state)
return lm_logits
class GPT2(nn.Module):
def __init__(self, config):
super().__init__()
self.transformer = Transformer(config)
self.readout_head = LinearReadoutHead(self.transformer.wte.weight, config)
def set_tied(self):
""" Make sure we are sharing the embeddings
"""
self.readout_head.set_embeddings_weights(self.transformer.wte.weight)
def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
hidden_states, presents = self.transformer(input_ids, position_ids, token_type_ids, past)
return hidden_states | 38.476744 | 108 | 0.621739 |
import copy
import torch
import math
import torch.nn as nn
from torch.nn.parameter import Parameter
def gelu(x):
return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3))))
def load_weight(model, state_dict):
old_keys = []
new_keys = []
for key in state_dict.keys():
new_key = None
if key.endswith(".g"):
new_key = key[:-2] + ".weight"
elif key.endswith(".b"):
new_key = key[:-2] + ".bias"
elif key.endswith(".w"):
new_key = key[:-2] + ".weight"
if new_key:
old_keys.append(key)
new_keys.append(new_key)
for old_key, new_key in zip(old_keys, new_keys):
state_dict[new_key] = state_dict.pop(old_key)
missing_keys = []
unexpected_keys = []
error_msgs = []
metadata = getattr(state_dict, "_metadata", None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
def load(module, prefix=""):
local_metadata = {} if metadata is None else metadata.get(prefix[:-1], {})
module._load_from_state_dict(
state_dict, prefix, local_metadata, True, missing_keys, unexpected_keys, error_msgs
)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + ".")
start_model = model
if hasattr(model, "transformer") and all(not s.startswith('transformer.') for s in state_dict.keys()):
start_model = model.transformer
load(start_model, prefix="")
model.set_tied()
return model
class LayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-12):
super(LayerNorm, self).__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.bias = nn.Parameter(torch.zeros(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
u = x.mean(-1, keepdim=True)
s = (x - u).pow(2).mean(-1, keepdim=True)
x = (x - u) / torch.sqrt(s + self.variance_epsilon)
return self.weight * x + self.bias
class Conv1D(nn.Module):
def __init__(self, nf, nx):
super(Conv1D, self).__init__()
self.nf = nf
w = torch.empty(nx, nf)
nn.init.normal_(w, std=0.02)
self.weight = Parameter(w)
self.bias = Parameter(torch.zeros(nf))
def forward(self, x):
size_out = x.size()[:-1] + (self.nf,)
x = torch.addmm(self.bias, x.view(-1, x.size(-1)), self.weight)
x = x.view(*size_out)
return x
class Attention(nn.Module):
def __init__(self, nx, n_ctx, config, scale=False):
super(Attention, self).__init__()
n_state = nx
assert n_state % config.n_head == 0
self.register_buffer("bias", torch.tril(torch.ones(n_ctx, n_ctx)).view(1, 1, n_ctx, n_ctx))
self.n_head = config.n_head
self.split_size = n_state
self.scale = scale
self.c_attn = Conv1D(n_state * 3, nx)
self.c_proj = Conv1D(n_state, nx)
def _attn(self, q, k, v):
w = torch.matmul(q, k)
if self.scale:
w = w / math.sqrt(v.size(-1))
nd, ns = w.size(-2), w.size(-1)
b = self.bias[:, :, ns - nd:ns, :ns]
w = w * b - 1e10 * (1 - b)
w = nn.Softmax(dim=-1)(w)
return torch.matmul(w, v)
def merge_heads(self, x):
x = x.permute(0, 2, 1, 3).contiguous()
new_x_shape = x.size()[:-2] + (x.size(-2) * x.size(-1),)
return x.view(*new_x_shape)
def split_heads(self, x, k=False):
new_x_shape = x.size()[:-1] + (self.n_head, x.size(-1) // self.n_head)
x = x.view(*new_x_shape)
if k:
return x.permute(0, 2, 3, 1)
else:
return x.permute(0, 2, 1, 3)
def forward(self, x, layer_past=None):
x = self.c_attn(x)
query, key, value = x.split(self.split_size, dim=2)
query = self.split_heads(query)
key = self.split_heads(key, k=True)
value = self.split_heads(value)
if layer_past is not None:
past_key, past_value = layer_past[0].transpose(-2, -1), layer_past[1]
key = torch.cat((past_key, key), dim=-1)
value = torch.cat((past_value, value), dim=-2)
present = torch.stack((key.transpose(-2, -1), value))
a = self._attn(query, key, value)
a = self.merge_heads(a)
a = self.c_proj(a)
return a, present
class MLP(nn.Module):
def __init__(self, n_state, config):
super(MLP, self).__init__()
nx = config.n_embd
self.c_fc = Conv1D(n_state, nx)
self.c_proj = Conv1D(nx, n_state)
self.act = gelu
def forward(self, x):
h = self.act(self.c_fc(x))
h2 = self.c_proj(h)
return h2
class Block(nn.Module):
def __init__(self, n_ctx, config, scale=False):
super(Block, self).__init__()
nx = config.n_embd
self.ln_1 = LayerNorm(nx, eps=config.layer_norm_epsilon)
self.attn = Attention(nx, n_ctx, config, scale)
self.ln_2 = LayerNorm(nx, eps=config.layer_norm_epsilon)
self.mlp = MLP(4 * nx, config)
def forward(self, x, layer_past=None):
a, present = self.attn(self.ln_1(x), layer_past=layer_past)
x = x + a
m = self.mlp(self.ln_2(x))
x = x + m
return x, present
class Transformer(nn.Module):
def __init__(self, config):
super().__init__()
self.n_layer = config.n_layer
self.n_embd = config.n_embd
self.n_vocab = config.vocab_size
self.wte = nn.Embedding(config.vocab_size, config.n_embd)
self.wpe = nn.Embedding(config.n_positions, config.n_embd)
block = Block(config.n_ctx, config, scale=True)
self.h = nn.ModuleList([copy.deepcopy(block) for _ in range(config.n_layer)])
self.ln_f = LayerNorm(config.n_embd, eps=config.layer_norm_epsilon)
def set_embeddings_weights(self, model_embeddings_weights):
embed_shape = model_embeddings_weights.shape
self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False)
self.decoder.weight = model_embeddings_weights
def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
if past is None:
past_length = 0
past = [None] * len(self.h)
else:
past_length = past[0][0].size(-2)
if position_ids is None:
position_ids = torch.arange(past_length, input_ids.size(-1) + past_length, dtype=torch.long,
device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_ids.size(-1))
position_ids = position_ids.view(-1, position_ids.size(-1))
inputs_embeds = self.wte(input_ids)
position_embeds = self.wpe(position_ids)
if token_type_ids is not None:
token_type_ids = token_type_ids.view(-1, token_type_ids.size(-1))
token_type_embeds = self.wte(token_type_ids)
else:
token_type_embeds = 0
hidden_states = inputs_embeds + position_embeds + token_type_embeds
presents = []
for block, layer_past in zip(self.h, past):
hidden_states, present = block(hidden_states, layer_past)
presents.append(present)
hidden_states = self.ln_f(hidden_states)
output_shape = input_shape + (hidden_states.size(-1),)
return hidden_states.view(*output_shape), presents
class LinearReadoutHead(nn.Module):
def __init__(self, model_embeddings_weights, config):
super().__init__()
self.n_embd = config.n_embd
self.set_embeddings_weights(model_embeddings_weights)
def set_embeddings_weights(self, model_embeddings_weights):
embed_shape = model_embeddings_weights.shape
self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False)
self.decoder.weight = model_embeddings_weights
def forward(self, hidden_state):
lm_logits = self.decoder(hidden_state)
return lm_logits
class GPT2(nn.Module):
def __init__(self, config):
super().__init__()
self.transformer = Transformer(config)
self.readout_head = LinearReadoutHead(self.transformer.wte.weight, config)
def set_tied(self):
self.readout_head.set_embeddings_weights(self.transformer.wte.weight)
def forward(self, input_ids, position_ids=None, token_type_ids=None, past=None):
hidden_states, presents = self.transformer(input_ids, position_ids, token_type_ids, past)
return hidden_states | true | true |
f71a2c9c59e0ff4712893eebaf781a9ad92104c2 | 4,896 | py | Python | library/bigip_software_update.py | Larsende/f5_ansible | 93b0747ba663128e2c8dfc456dad4653cdde4f38 | [
"Apache-2.0"
] | 12 | 2016-12-29T16:09:21.000Z | 2019-06-29T14:12:17.000Z | library/bigip_software_update.py | Larsende/f5_ansible | 93b0747ba663128e2c8dfc456dad4653cdde4f38 | [
"Apache-2.0"
] | 24 | 2017-05-24T07:56:56.000Z | 2017-11-30T09:31:56.000Z | library/bigip_software_update.py | Larsende/f5_ansible | 93b0747ba663128e2c8dfc456dad4653cdde4f38 | [
"Apache-2.0"
] | 26 | 2017-05-31T17:15:32.000Z | 2021-03-29T03:45:06.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigip_software_update
short_description: Manage the software update settings of a BIG-IP
description:
- Manage the software update settings of a BIG-IP.
version_added: "2.4"
options:
auto_check:
description:
- Specifies whether to automatically check for updates on the F5
Networks downloads server.
required: False
default: None
choices:
- yes
- no
frequency:
description:
- Specifies the schedule for the automatic update check.
required: False
default: None
choices:
- daily
- monthly
- weekly
notes:
- Requires the f5-sdk Python package on the host This is as easy as pip
install f5-sdk
extends_documentation_fragment: f5
requirements:
- f5-sdk >= 2.2.3
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
'''
RETURN = '''
'''
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
HAS_F5SDK,
F5ModuleError,
iControlUnexpectedHTTPError
)
class Parameters(AnsibleF5Parameters):
api_map = {
'autoCheck': 'auto_check'
}
updatables = [
'auto_check', 'frequency'
]
returnables = [
'auto_check', 'frequency'
]
@property
def auto_check(self):
if self._values['auto_check'] is None:
return None
elif self._values['auto_check'] in [True, 'enabled']:
return 'enabled'
else:
return 'disabled'
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.network == 'default':
result['network'] = None
elif self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def exec_module(self):
result = dict()
try:
changed = self.update()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.sys.software.update.load()
result.modify(**params)
def read_current_from_device(self):
resource = self.client.api.tm.sys.software.update.load()
result = resource.attrs
return Parameters(result)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
auto_check=dict(
type='bool'
),
frequency=dict(
choices=['daily', 'monthly', 'weekly']
)
)
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| 25.5 | 91 | 0.607639 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigip_software_update
short_description: Manage the software update settings of a BIG-IP
description:
- Manage the software update settings of a BIG-IP.
version_added: "2.4"
options:
auto_check:
description:
- Specifies whether to automatically check for updates on the F5
Networks downloads server.
required: False
default: None
choices:
- yes
- no
frequency:
description:
- Specifies the schedule for the automatic update check.
required: False
default: None
choices:
- daily
- monthly
- weekly
notes:
- Requires the f5-sdk Python package on the host This is as easy as pip
install f5-sdk
extends_documentation_fragment: f5
requirements:
- f5-sdk >= 2.2.3
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
'''
RETURN = '''
'''
from ansible.module_utils.f5_utils import (
AnsibleF5Client,
AnsibleF5Parameters,
HAS_F5SDK,
F5ModuleError,
iControlUnexpectedHTTPError
)
class Parameters(AnsibleF5Parameters):
api_map = {
'autoCheck': 'auto_check'
}
updatables = [
'auto_check', 'frequency'
]
returnables = [
'auto_check', 'frequency'
]
@property
def auto_check(self):
if self._values['auto_check'] is None:
return None
elif self._values['auto_check'] in [True, 'enabled']:
return 'enabled'
else:
return 'disabled'
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if self.network == 'default':
result['network'] = None
elif self.api_map is not None and api_attribute in self.api_map:
result[api_attribute] = getattr(self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ModuleManager(object):
def __init__(self, client):
self.client = client
self.have = None
self.want = Parameters(self.client.module.params)
self.changes = Parameters()
def exec_module(self):
result = dict()
try:
changed = self.update()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def _update_changed_options(self):
changed = {}
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if changed:
self.changes = Parameters(changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.client.check_mode:
return True
self.update_on_device()
return True
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.sys.software.update.load()
result.modify(**params)
def read_current_from_device(self):
resource = self.client.api.tm.sys.software.update.load()
result = resource.attrs
return Parameters(result)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
self.argument_spec = dict(
auto_check=dict(
type='bool'
),
frequency=dict(
choices=['daily', 'monthly', 'weekly']
)
)
self.f5_product_name = 'bigip'
def main():
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
spec = ArgumentSpec()
client = AnsibleF5Client(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
f5_product_name=spec.f5_product_name
)
mm = ModuleManager(client)
results = mm.exec_module()
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| true | true |
f71a2cf03b51c5cbf16bd9aeb093968dd349cef9 | 7,353 | py | Python | take_images.py | ManuLado/Enviar-comandos-a-marlin | f7f474ad0459602176114c62e7c97874cb69191b | [
"MIT"
] | 2 | 2021-10-02T20:20:45.000Z | 2021-10-02T20:20:53.000Z | take_images.py | ManuLado/2D-XRay_Scan_control | 5ba596c9b0db47125e2e29ed8084e61d326e8777 | [
"MIT"
] | null | null | null | take_images.py | ManuLado/2D-XRay_Scan_control | 5ba596c9b0db47125e2e29ed8084e61d326e8777 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Graba video leido desde la arducam
# Se le debe indicar el archivo de video a grabar y
# la duración de la captura en segundos.
# SINTAXIS: python capturar_video.py VIDEO TIEMPO
# 1- Ruta del video
# 2- Tiempo de grabacion en segundos
from ctypes import *
import ctypes
import sys
import os
import time
from PIL import Image
import numpy as np
import thread as thread
import math
from select import select
from evdev import InputDevice
from evdev import ecodes
from astropy.io import fits
import ArducamSDK
# Analisis de argumentos
if (len(sys.argv)==3):
NOMBREIMG = sys.argv[1];
NUMIMG = int(sys.argv[2]);
else:
print ("Se requieren 2 argumentos: NOMBRE_IMAGENES NUMERO_IMAGENES")
exit()
#### CONFIGURACION ARDUCAMSDK ################
COLOR_BYTE2RGB = 47 # No se modifico del original
CAMERA_MT9M001 = 0x4D091031 # No se modifico del original
SensorShipAddr = 186
I2C_MODE_8_16 = 1
usbVid = 0x52CB # No se modifico del original
Width = 1280 #1280
Height = 1024 #1024
cfg ={"u32CameraType":CAMERA_MT9M001,
"u32Width":Width,"u32Height":Height,
"u32UsbVersion":1,
"u8PixelBytes":1,
"u16Vid":0x52cb,
"u8PixelBits":8,
"u32SensorShipAddr":SensorShipAddr,
"emI2cMode":I2C_MODE_8_16 }
# FLAGS
global saveFlag,downFlag,flag,H_value,V_value,lx,ly,mx,my,dx,dy,W_zoom,H_zooM,handle,openFlag,initTime,storeFlag,bufferData,globalGain
global testPatternFlag
global integrationTime
global shutterWidth
openFlag = False
handle = {}
downFlag = False
flag = True
saveFlag = False
storeFlag = False
saveNum=0
H_value = 0
V_value = 0
W_zoom = 0
H_zoom = 0
lx = 0
ly = 0
mx = 0
my = 0
dx = 0
dy = 0
testPatternFlag = False;
regArr=[[0x01, 0x000C], # Row Start
[0x02, 0x0014], # Column Start
[0x03, Height - 1], # Window Height 0x03FF
[0x04, Width - 1], # Window Width 0x04FF
[0x05, 0x0009], # Horizontal Blanking
[0x06, 0x0019], # Vertical Blanking
[0x07, 0x0002], # Output Control
[0x09, 0x0419], # Shutter Width 0x0419 (max: 0x3FFF)
[0x0B, 0x0000], # Frame Restart
[0x0C, 0x0000],#0x0100],
[0x0D, 0x0000],
[0x1E, 0x8000], # Read Mode 1 0x8000
[0x20, 0x1104],
[0x2B, 0x0008],
[0x2C, 0x0008],
[0x2D, 0x0008],
[0x2E, 0x0008],
[0x32, 0x0FFC], # Test Data Register
[0x35, 0x0067], # Global Gain 0x0008 (max: 0x0067)
[0x5F, 0x0904],
#[0x60, 0x0000], # BLC offset: Even row, even column
#[0x61, 0x0000], # BLC offset: Odd row, odd column
#[0x62, 0x049F], # Black Level Calibration Control 0x0498 (No-BLC: 0x049F; Manual-BLC: 0x0499 & reg0x60/61/63/64)
#[0x63, 0x0000], # BLC offset: Even row, odd column
#[0x64, 0x0000], # BLC offset: Odd row, Even column
[0x60, 0x002F], # BLC offset: Even row, even column
[0x61, 0x002F], # BLC offset: Odd row, odd column
[0x62, 0x0499], # Black Level Calibration Control 0x0498 (No-BLC: 0x049F; Manual-BLC: 0x0499 & reg0x60/61/63/64)
[0x63, 0x000F], # BLC offset: Even row, odd column
[0x64, 0x000F], # BLC offset: Odd row, Even column
[0xF1, 0x0001],
[0xFFFF, 0xFFFF]
]
globalGain = regArr[18][1];
# Cálculo del tiempo de integración inicial (pag 16 del datasheet)
rowTime = regArr[3][1] + 1 + 244 + regArr[4][1] - 19; #[pixel clock periods] default: 1514
resetDelay = 4*regArr[9][1] #[pixel clock periods] default: 0
overheadTime = 180; #[pixel clock periods]
shutterWidth = regArr[7][1]
integrationPeriods = shutterWidth*rowTime - overheadTime - resetDelay;
clockPeriod = 1000.0/24e6; #[ms]
integrationTime = integrationPeriods * clockPeriod; #[ms]
with open('integrationtime.txt','w') as it:
it.write(str(integrationTime)+"\n")
print ("Initial integration time: %.3fms"%(integrationTime));
print ("Initial gain: 0x%02x"%(globalGain));
a_lock = thread.allocate_lock();
def readThread(threadName,read_Flag):
global flag,handle,storeFlag,bufferData,openFlag
global a_lock
count = 0
time0 = time.time()
time1 = time.time()
data = {}
# Wait for the arducam object to be ready
while openFlag == False:
time1 = time.time();
if time1 - time0 > 20:
#timeout
exit;
while flag:
res = ArducamSDK.Py_ArduCam_available(handle)
#~ print "Available frames %d"%(res)
if res > 0:
res,data = ArducamSDK.Py_ArduCam_read(handle,Width * Height)
if res == 0:
count += 1
time1 = time.time()
ArducamSDK.Py_ArduCam_del(handle)
else:
print ("read data fail!")
else:
#print "No data availiable"
time.sleep(.01);
if len(data) >= Width * Height:
if time1 - time0 >= 5:
print ("%s %f %s\n"%("fps:",count*1.0/(time1-time0),"/s"))
count = 0
time0 = time1
a_lock.acquire();
bufferData = data;
data = [];
storeFlag = True;
a_lock.release();
#show(data)
#else:
# print "data length is not enough!"
if flag == False:
break
thread.start_new_thread( readThread,("Thread-2", flag,))
pass
def showAndSave(threadName,algoquenoseusa):
global flag,W_zoom,H_zoom,V_value,H_value,lx,ly,downFlag,saveFlag,saveNum,bufferData,storeFlag
global a_lock
global hist_ax
global NOMBREIMG
img = np.zeros((Height, Width), dtype=np.uint8);
while flag:
a_lock.acquire();
if storeFlag == True:
storeFlag = False;
img = np.frombuffer(bufferData, np.uint8)
img = np.reshape(img, (Height, Width));
saveNum += 1
#name = NOMBREIMG + str(saveNum) + ".fits"
#name = NOMBREIMG + "_" + str(saveNum) + ".jpeg"
name = NOMBREIMG + ".fits"
hdu=fits.PrimaryHDU()
hdu.data=img
hdu.writeto(name,overwrite=True)
print ("Frame saved to %s"%(name))
a_lock.release();
if saveNum == NUMIMG:
flag=False;
print ("Total number of adq images = %d"%(saveNum))
if flag == False:
break
thread.start_new_thread( showAndSave,("Thread-3",flag))
pass
def init_and_read_arducam():
global flag,regArr,handle,openFlag
regNum = 0
res,handle = ArducamSDK.Py_ArduCam_autoopen(cfg)
if res == 0:
openFlag = True
print ("device open success!")
while (regArr[regNum][0] != 0xFFFF):
ArducamSDK.Py_ArduCam_writeSensorReg(handle,regArr[regNum][0],regArr[regNum][1])
regNum = regNum + 1
res = ArducamSDK.Py_ArduCam_beginCapture(handle)
if res == 0:
print ("transfer task create success!")
while flag :
res = ArducamSDK.Py_ArduCam_capture(handle)
if res != 0:
print ("capture failed!")
flag = False;
break;
time.sleep(0.1)
if flag == False:
break
else:
print ("transfer task create fail!")
time.sleep(2);
res = ArducamSDK.Py_ArduCam_close(handle)
if res == 0:
openFlag = False
print ("device close success!")
else:
print ("device close fail!")
else:
print ("device open fail!")
if __name__ == "__main__":
initTime = time.time();
init_and_read_arducam();
| 28.610895 | 134 | 0.622195 |
from ctypes import *
import ctypes
import sys
import os
import time
from PIL import Image
import numpy as np
import thread as thread
import math
from select import select
from evdev import InputDevice
from evdev import ecodes
from astropy.io import fits
import ArducamSDK
if (len(sys.argv)==3):
NOMBREIMG = sys.argv[1];
NUMIMG = int(sys.argv[2]);
else:
print ("Se requieren 2 argumentos: NOMBRE_IMAGENES NUMERO_IMAGENES")
exit()
"u16Vid":0x52cb,
"u8PixelBits":8,
"u32SensorShipAddr":SensorShipAddr,
"emI2cMode":I2C_MODE_8_16 }
global saveFlag,downFlag,flag,H_value,V_value,lx,ly,mx,my,dx,dy,W_zoom,H_zooM,handle,openFlag,initTime,storeFlag,bufferData,globalGain
global testPatternFlag
global integrationTime
global shutterWidth
openFlag = False
handle = {}
downFlag = False
flag = True
saveFlag = False
storeFlag = False
saveNum=0
H_value = 0
V_value = 0
W_zoom = 0
H_zoom = 0
lx = 0
ly = 0
mx = 0
my = 0
dx = 0
dy = 0
testPatternFlag = False;
regArr=[[0x01, 0x000C],
[0x02, 0x0014],
[0x03, Height - 1],
[0x04, Width - 1],
[0x05, 0x0009],
[0x06, 0x0019],
[0x07, 0x0002],
[0x09, 0x0419],
[0x0B, 0x0000],
[0x0C, 0x0000],
[0x0D, 0x0000],
[0x1E, 0x8000],
[0x20, 0x1104],
[0x2B, 0x0008],
[0x2C, 0x0008],
[0x2D, 0x0008],
[0x2E, 0x0008],
[0x32, 0x0FFC],
[0x35, 0x0067],
[0x5F, 0x0904],
4 + regArr[4][1] - 19;
resetDelay = 4*regArr[9][1]
overheadTime = 180;
shutterWidth = regArr[7][1]
integrationPeriods = shutterWidth*rowTime - overheadTime - resetDelay;
clockPeriod = 1000.0/24e6;
integrationTime = integrationPeriods * clockPeriod;
with open('integrationtime.txt','w') as it:
it.write(str(integrationTime)+"\n")
print ("Initial integration time: %.3fms"%(integrationTime));
print ("Initial gain: 0x%02x"%(globalGain));
a_lock = thread.allocate_lock();
def readThread(threadName,read_Flag):
global flag,handle,storeFlag,bufferData,openFlag
global a_lock
count = 0
time0 = time.time()
time1 = time.time()
data = {}
while openFlag == False:
time1 = time.time();
if time1 - time0 > 20:
exit;
while flag:
res = ArducamSDK.Py_ArduCam_available(handle)
if res > 0:
res,data = ArducamSDK.Py_ArduCam_read(handle,Width * Height)
if res == 0:
count += 1
time1 = time.time()
ArducamSDK.Py_ArduCam_del(handle)
else:
print ("read data fail!")
else:
time.sleep(.01);
if len(data) >= Width * Height:
if time1 - time0 >= 5:
print ("%s %f %s\n"%("fps:",count*1.0/(time1-time0),"/s"))
count = 0
time0 = time1
a_lock.acquire();
bufferData = data;
data = [];
storeFlag = True;
a_lock.release();
if flag == False:
break
thread.start_new_thread( readThread,("Thread-2", flag,))
pass
def showAndSave(threadName,algoquenoseusa):
global flag,W_zoom,H_zoom,V_value,H_value,lx,ly,downFlag,saveFlag,saveNum,bufferData,storeFlag
global a_lock
global hist_ax
global NOMBREIMG
img = np.zeros((Height, Width), dtype=np.uint8);
while flag:
a_lock.acquire();
if storeFlag == True:
storeFlag = False;
img = np.frombuffer(bufferData, np.uint8)
img = np.reshape(img, (Height, Width));
saveNum += 1
name = NOMBREIMG + ".fits"
hdu=fits.PrimaryHDU()
hdu.data=img
hdu.writeto(name,overwrite=True)
print ("Frame saved to %s"%(name))
a_lock.release();
if saveNum == NUMIMG:
flag=False;
print ("Total number of adq images = %d"%(saveNum))
if flag == False:
break
thread.start_new_thread( showAndSave,("Thread-3",flag))
pass
def init_and_read_arducam():
global flag,regArr,handle,openFlag
regNum = 0
res,handle = ArducamSDK.Py_ArduCam_autoopen(cfg)
if res == 0:
openFlag = True
print ("device open success!")
while (regArr[regNum][0] != 0xFFFF):
ArducamSDK.Py_ArduCam_writeSensorReg(handle,regArr[regNum][0],regArr[regNum][1])
regNum = regNum + 1
res = ArducamSDK.Py_ArduCam_beginCapture(handle)
if res == 0:
print ("transfer task create success!")
while flag :
res = ArducamSDK.Py_ArduCam_capture(handle)
if res != 0:
print ("capture failed!")
flag = False;
break;
time.sleep(0.1)
if flag == False:
break
else:
print ("transfer task create fail!")
time.sleep(2);
res = ArducamSDK.Py_ArduCam_close(handle)
if res == 0:
openFlag = False
print ("device close success!")
else:
print ("device close fail!")
else:
print ("device open fail!")
if __name__ == "__main__":
initTime = time.time();
init_and_read_arducam();
| true | true |
f71a2d96365d53c5ef530130fb564554ef725c20 | 1,117 | py | Python | lib/surface/eventflow/triggers/__init__.py | kustodian/google-cloud-sdk | b6bae4137d4b58030adb3dcb1271216dfb19f96d | [
"Apache-2.0"
] | null | null | null | lib/surface/eventflow/triggers/__init__.py | kustodian/google-cloud-sdk | b6bae4137d4b58030adb3dcb1271216dfb19f96d | [
"Apache-2.0"
] | null | null | null | lib/surface/eventflow/triggers/__init__.py | kustodian/google-cloud-sdk | b6bae4137d4b58030adb3dcb1271216dfb19f96d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gcloud eventflow triggers group."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Triggers(base.Group):
"""View and manage your Eventflow triggers.
This set of commands can be used to view and manage your Eventflow resources.
"""
detailed_help = {
'EXAMPLES': """\
To list your existing triggers, run:
$ {command} list
""",
}
| 30.189189 | 79 | 0.726052 |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
class Triggers(base.Group):
detailed_help = {
'EXAMPLES': """\
To list your existing triggers, run:
$ {command} list
""",
}
| true | true |
f71a2de92ecf79a70555c5ed5b4cafbc45bf3a74 | 4,851 | py | Python | tempest/cli/simple_read_only/test_cinder.py | BeenzSyed/tempest | 7a64ee1216d844f6b99928b53f5c665b84cb8719 | [
"Apache-2.0"
] | null | null | null | tempest/cli/simple_read_only/test_cinder.py | BeenzSyed/tempest | 7a64ee1216d844f6b99928b53f5c665b84cb8719 | [
"Apache-2.0"
] | null | null | null | tempest/cli/simple_read_only/test_cinder.py | BeenzSyed/tempest | 7a64ee1216d844f6b99928b53f5c665b84cb8719 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import re
import subprocess
import tempest.cli
LOG = logging.getLogger(__name__)
class SimpleReadOnlyCinderClientTest(tempest.cli.ClientTestBase):
"""Basic, read-only tests for Cinder CLI client.
Checks return values and output of read-only commands.
These tests do not presume any content, nor do they create
their own. They only verify the structure of output if present.
"""
def test_cinder_fake_action(self):
self.assertRaises(subprocess.CalledProcessError,
self.cinder,
'this-does-not-exist')
def test_cinder_absolute_limit_list(self):
roles = self.parser.listing(self.cinder('absolute-limits'))
self.assertTableStruct(roles, ['Name', 'Value'])
def test_cinder_backup_list(self):
self.cinder('backup-list')
def test_cinder_extra_specs_list(self):
self.cinder('extra-specs-list')
def test_cinder_volumes_list(self):
self.cinder('list')
def test_cinder_quota_class_show(self):
"""This CLI can accept and string as param."""
roles = self.parser.listing(self.cinder('quota-class-show',
params='abc'))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_quota_defaults(self):
"""This CLI can accept and string as param."""
roles = self.parser.listing(self.cinder('quota-defaults',
params=self.identity.
admin_tenant_name))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_quota_show(self):
"""This CLI can accept and string as param."""
roles = self.parser.listing(self.cinder('quota-show',
params=self.identity.
admin_tenant_name))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_rate_limits(self):
self.cinder('rate-limits')
def test_cinder_snapshot_list(self):
self.cinder('snapshot-list')
def test_cinder_type_list(self):
self.cinder('type-list')
def test_cinder_list_extensions(self):
self.cinder('list-extensions')
roles = self.parser.listing(self.cinder('list-extensions'))
self.assertTableStruct(roles, ['Name', 'Summary', 'Alias', 'Updated'])
def test_cinder_credentials(self):
self.cinder('credentials')
def test_cinder_availability_zone_list(self):
self.cinder('availability-zone-list')
def test_cinder_endpoints(self):
self.cinder('endpoints')
def test_cinder_service_list(self):
self.cinder('service-list')
def test_cinder_transfer_list(self):
self.cinder('transfer-list')
def test_cinder_bash_completion(self):
self.cinder('bash-completion')
def test_admin_help(self):
help_text = self.cinder('help')
lines = help_text.split('\n')
self.assertFirstLineStartsWith(lines, 'usage: cinder')
commands = []
cmds_start = lines.index('Positional arguments:')
cmds_end = lines.index('Optional arguments:')
command_pattern = re.compile('^ {4}([a-z0-9\-\_]+)')
for line in lines[cmds_start:cmds_end]:
match = command_pattern.match(line)
if match:
commands.append(match.group(1))
commands = set(commands)
wanted_commands = set(('absolute-limits', 'list', 'help',
'quota-show', 'type-list', 'snapshot-list'))
self.assertFalse(wanted_commands - commands)
# Optional arguments:
def test_cinder_version(self):
self.cinder('', flags='--version')
def test_cinder_debug_list(self):
self.cinder('list', flags='--debug')
def test_cinder_retries_list(self):
self.cinder('list', flags='--retries 3')
def test_cinder_region_list(self):
region = self.config.volume.region
if not region:
region = self.config.identity.region
self.cinder('list', flags='--os-region-name ' + region)
| 35.408759 | 78 | 0.632035 |
import logging
import re
import subprocess
import tempest.cli
LOG = logging.getLogger(__name__)
class SimpleReadOnlyCinderClientTest(tempest.cli.ClientTestBase):
def test_cinder_fake_action(self):
self.assertRaises(subprocess.CalledProcessError,
self.cinder,
'this-does-not-exist')
def test_cinder_absolute_limit_list(self):
roles = self.parser.listing(self.cinder('absolute-limits'))
self.assertTableStruct(roles, ['Name', 'Value'])
def test_cinder_backup_list(self):
self.cinder('backup-list')
def test_cinder_extra_specs_list(self):
self.cinder('extra-specs-list')
def test_cinder_volumes_list(self):
self.cinder('list')
def test_cinder_quota_class_show(self):
roles = self.parser.listing(self.cinder('quota-class-show',
params='abc'))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_quota_defaults(self):
roles = self.parser.listing(self.cinder('quota-defaults',
params=self.identity.
admin_tenant_name))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_quota_show(self):
roles = self.parser.listing(self.cinder('quota-show',
params=self.identity.
admin_tenant_name))
self.assertTableStruct(roles, ['Property', 'Value'])
def test_cinder_rate_limits(self):
self.cinder('rate-limits')
def test_cinder_snapshot_list(self):
self.cinder('snapshot-list')
def test_cinder_type_list(self):
self.cinder('type-list')
def test_cinder_list_extensions(self):
self.cinder('list-extensions')
roles = self.parser.listing(self.cinder('list-extensions'))
self.assertTableStruct(roles, ['Name', 'Summary', 'Alias', 'Updated'])
def test_cinder_credentials(self):
self.cinder('credentials')
def test_cinder_availability_zone_list(self):
self.cinder('availability-zone-list')
def test_cinder_endpoints(self):
self.cinder('endpoints')
def test_cinder_service_list(self):
self.cinder('service-list')
def test_cinder_transfer_list(self):
self.cinder('transfer-list')
def test_cinder_bash_completion(self):
self.cinder('bash-completion')
def test_admin_help(self):
help_text = self.cinder('help')
lines = help_text.split('\n')
self.assertFirstLineStartsWith(lines, 'usage: cinder')
commands = []
cmds_start = lines.index('Positional arguments:')
cmds_end = lines.index('Optional arguments:')
command_pattern = re.compile('^ {4}([a-z0-9\-\_]+)')
for line in lines[cmds_start:cmds_end]:
match = command_pattern.match(line)
if match:
commands.append(match.group(1))
commands = set(commands)
wanted_commands = set(('absolute-limits', 'list', 'help',
'quota-show', 'type-list', 'snapshot-list'))
self.assertFalse(wanted_commands - commands)
def test_cinder_version(self):
self.cinder('', flags='--version')
def test_cinder_debug_list(self):
self.cinder('list', flags='--debug')
def test_cinder_retries_list(self):
self.cinder('list', flags='--retries 3')
def test_cinder_region_list(self):
region = self.config.volume.region
if not region:
region = self.config.identity.region
self.cinder('list', flags='--os-region-name ' + region)
| true | true |
f71a2e2450c7afe71a1025c53865035c1ff60cb5 | 268 | py | Python | highiq/io/__init__.py | ClariNerd617/HighIQ | 0305902f889da869535834620bb4fb15ac54b11d | [
"BSD-3-Clause"
] | 6 | 2020-03-16T14:14:45.000Z | 2021-09-21T06:39:57.000Z | highiq/io/__init__.py | ClariNerd617/HighIQ | 0305902f889da869535834620bb4fb15ac54b11d | [
"BSD-3-Clause"
] | null | null | null | highiq/io/__init__.py | ClariNerd617/HighIQ | 0305902f889da869535834620bb4fb15ac54b11d | [
"BSD-3-Clause"
] | 3 | 2019-12-16T19:56:35.000Z | 2021-06-09T14:14:47.000Z | """
=========
highiq.io
=========
.. currentmodule:: highiq.io
This module contains the I/O methods for loading data into and saving data from HighIQ analyses.
.. autosummary::
:toctree: generated/
load_arm_netcdf
"""
from .arm_data import load_arm_netcdf | 16.75 | 96 | 0.682836 | from .arm_data import load_arm_netcdf | true | true |
f71a2e415b2e9d0db183f02c832c777618bce8e9 | 1,292 | py | Python | model-optimizer/extensions/back/RNNSequenceTypeRename.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | null | null | null | model-optimizer/extensions/back/RNNSequenceTypeRename.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | 19 | 2021-03-26T08:11:00.000Z | 2022-02-21T13:06:26.000Z | model-optimizer/extensions/back/RNNSequenceTypeRename.py | calvinfeng/openvino | 11f591c16852637506b1b40d083b450e56d0c8ac | [
"Apache-2.0"
] | 1 | 2021-07-28T17:30:46.000Z | 2021-07-28T17:30:46.000Z | """
Copyright (C) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mo.back.replacement import BackReplacementPattern
from mo.graph.graph import Graph
class RNNSequence(BackReplacementPattern):
"""
This transform change type RNNSequence (internal MO type for all recurrent layers)
to correct operation name.
"""
enabled = True
def pattern(self):
return dict(
nodes=[
('rnn_layer', {'type': 'RNNSequence'})
],
edges=[]
)
_supported_ops = ['RNN', 'LSTM', 'GRU']
def replace_pattern(self, graph: Graph, match: dict):
rnn_layer = match['rnn_layer']
assert rnn_layer['op'] in self._supported_ops
rnn_layer['type'] = rnn_layer['op'] + 'Sequence'
| 31.512195 | 86 | 0.681889 | from mo.back.replacement import BackReplacementPattern
from mo.graph.graph import Graph
class RNNSequence(BackReplacementPattern):
enabled = True
def pattern(self):
return dict(
nodes=[
('rnn_layer', {'type': 'RNNSequence'})
],
edges=[]
)
_supported_ops = ['RNN', 'LSTM', 'GRU']
def replace_pattern(self, graph: Graph, match: dict):
rnn_layer = match['rnn_layer']
assert rnn_layer['op'] in self._supported_ops
rnn_layer['type'] = rnn_layer['op'] + 'Sequence'
| true | true |
f71a2e67d16d278f046fedc42260f77f54a931dc | 2,802 | py | Python | vplexapi-7.0.0.0/vplexapi/models/rule_set.py | lhernand3z/python-vplex | 0f94723fd56c7a3a85c4afb3b78046b9c66b93e4 | [
"Apache-2.0"
] | null | null | null | vplexapi-7.0.0.0/vplexapi/models/rule_set.py | lhernand3z/python-vplex | 0f94723fd56c7a3a85c4afb3b78046b9c66b93e4 | [
"Apache-2.0"
] | null | null | null | vplexapi-7.0.0.0/vplexapi/models/rule_set.py | lhernand3z/python-vplex | 0f94723fd56c7a3a85c4afb3b78046b9c66b93e4 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
VPlex REST API
A definition for the next-gen VPlex API # noqa: E501
OpenAPI spec version: 0.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class RuleSet(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
def __init__(self, name=None): # noqa: E501
"""RuleSet - a model defined in Swagger""" # noqa: E501
self._name = None
self.discriminator = None
if name is not None:
self.name = name
@property
def name(self):
"""Gets the name of this RuleSet. # noqa: E501
:return: The name of this RuleSet. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this RuleSet.
:param name: The name of this RuleSet. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RuleSet):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 24.79646 | 80 | 0.533904 |
import pprint
import re
import six
class RuleSet(object):
swagger_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
def __init__(self, name=None):
self._name = None
self.discriminator = None
if name is not None:
self.name = name
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, RuleSet):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71a2e87d4b8d901b178fcd9d35e179c33a8334f | 4,868 | py | Python | BoxThermal.py | AndrewFalkowski/SODIS_SIM | 4d5da3e0872ee747d399d66fdee1633e7d2b8ab1 | [
"MIT"
] | null | null | null | BoxThermal.py | AndrewFalkowski/SODIS_SIM | 4d5da3e0872ee747d399d66fdee1633e7d2b8ab1 | [
"MIT"
] | null | null | null | BoxThermal.py | AndrewFalkowski/SODIS_SIM | 4d5da3e0872ee747d399d66fdee1633e7d2b8ab1 | [
"MIT"
] | null | null | null | import numpy as np
from math import sqrt
import matplotlib.pyplot as plt
import numba
import time
from scipy.integrate import odeint
# a sample differential equation dy/dx = (x-y)/2
# def dydx(x,y):
# return ((x-y)/2)
# # find the value of y for a given x using step size h
# # and an initial value y0 at x0
# def rungeKutta(x0, y0, x, h):
# #count num iteratings using step size or step height h
# n = int(((x - x0)/h))
# # iterate for number of iterations
# y = y0
# for i in range(1, n + 1):
# # apply runge kutta formulas to find the next value of y
# k1 = h * dydx(x0, y)
# k2 = h * dydx(x0 + 0.5 * h, y + 0.5 * k1)
# k3 = h * dydx(x0 + 0.5 * h, y + 0.5 * k2)
# k4 = h * dydx(x0 + h, y + k3)
# # update the next value of y
# y = y + (1.0 / 6.0) * (k1 + 2*k2 + 2*k3 + k4)
# # update the next value of x
# x0 = x0 + h
# return y
# # driver method
# x0 = 0
# y = 1
# x = 2
# h = 0.2
# print('The value of y at x is:', rungeKutta(x0, y, x, h))
def box_dim(A_c, h, prct_f):
# all dimensions in meters
box_vol = A_c * h
vol_f = box_vol * prct_f # L
m_a = box_vol * (1-prct_f) * 1.225
m_f = vol_f * 997 # kg
print('Contained Water: ', m_f, 'Liters')
A_s = 4 * h * np.sqrt(A_c)
return m_f, m_a, A_s
# m_f, m_a, A_s = box_dim(0.25, 0.15, 0.9)
def boxODE(x, t, m_f, m_a, A_s):
# constants
A_c = 0.25 # square meters
A_s = A_s
A_f = A_c # square meters
T_amb = 298 # kelvin
T_sky = T_amb - 6 # kelvin
alpha_g = 0.02 # %
alpha_p = 0.98
t_g = 0.9 # %
t_f = 0.85 # %
# print(t)
Irr = 0.0426*(t) + 1.38E-6*(t)**2 - 7.94E-11*(t)**3 + 7.3E-16*(t)**4
# Irr = 600
x_b = 0.065 # insulation thickness meters
x_s = 0.065 # insulation thickness meters
k_i = 1.0 # thermal conductivity of side materials, foamed glass # W/mK
h_rad_g2_g1 = 8
h_cov_g2_g1 = 20
h_rad_g1_sky = 8
h_rad_g1_amb = 8
h_rad_p_g2 = 20
h_cov_a_g2 = 8
h_cov_f_a = 8
h_cov_p_f = 30
h_cov_g1_amb = 65
M_f = m_f * 4.187
M_g1 = 1150 * (A_c * 0.001) * 1.67 # assuming acrylic
M_g2 = M_g1
M_p = 8960 * (A_c * 0.065) * 1.0
# assuming coper
M_a = 0.718 * m_a
# assign each ODE to a vector element
T_g1 = x[0]
T_g2 = x[1]
T_a = x[2]
T_p = x[3]
T_f = x[4]
Q_rad_g2_g1 = h_rad_g2_g1 * A_c * (T_g2 - T_g1)
Q_cov_g2_g1 = h_cov_g2_g1 * A_c * (T_g2 - T_g1)
Q_rad_g1_sky = h_rad_g1_sky * A_c * (T_g1 - T_sky)
Q_cov_g1_amb = h_rad_g1_amb * A_c * (T_g1 - T_amb)
Q_rad_p_g2 = h_rad_p_g2 * A_c * (T_p - T_g2)
Q_cov_a_g2 = h_cov_a_g2 * A_c * (T_a - T_g2)
Q_cov_f_a = h_cov_f_a * (A_c) * (T_f - T_a)
Q_cov_p_f = h_cov_p_f * A_c * (T_p - T_f)
U_base = ((x_b/k_i) + 1/(h_cov_g1_amb))**(-1)
U_side = ((x_s/k_i) + 1/(h_cov_g1_amb))**(-1)
Q_amb_loss = (U_base*A_c + U_side*A_s)*(T_p - T_amb)
# define each ODE
dT_g1dt = (Irr * alpha_g * A_c + Q_rad_g2_g1 + Q_cov_g2_g1 - Q_rad_g1_sky - Q_cov_g1_amb) / M_g1
dT_g2dt = (Irr * alpha_g * t_g * A_c + Q_rad_p_g2 + Q_cov_a_g2 - Q_rad_g2_g1) / M_g2
dT_adt = (Q_cov_f_a - Q_cov_a_g2)/M_a
dT_pdt = (Irr * alpha_p * t_g**2 * t_f * A_c - Q_rad_p_g2 - Q_amb_loss - Q_cov_p_f) / M_p
dT_fdt = (Q_cov_p_f + Q_cov_f_a) / M_f
return [dT_g1dt, dT_g2dt, dT_adt, dT_pdt, dT_fdt]
# x0 = [298, 298, 298, 298, 285]
# # test the defined ODES
# print(boxODE(x=x0, t=0, m_f=m_f, m_a=m_a, A_s=A_s))
# # declare a time vector (time window)
# t = np.linspace(0,54000,1000)
# x = odeint(boxODE,x0,t, args=(m_f, m_a, A_s))
# Tf= x[:,4]
# Tp = x[:,3]
# # plot the results
# plt.plot((t/3600)+5.8,Tf_2, label='fluid')
# # plt.plot(t/3600,Tp, label='plate')
# plt.legend()
# plt.ylim(298, 340)
# plt.xlim(0,24)
# plt.show()
#%%
# xs = np.arange(27000,28201,1)
# ys = 0.0226*xs - 295
# #%%
# fig = plt.figure(figsize=(5,5))
# fig, ax1 = plt.subplots()
# plt.plot((t/3600)+5.8,Tf, color='r')
# plt.plot(xs/3600 + 5.8, ys, color='r')
# plt.plot(np.arange(27000,27601,1)/3600+5.8, )
# plt.hlines(338, -100, 100, linestyle=':', color='k')
# plt.text(6.5, 339, 'Pasteurization Temperature')
# ax1.tick_params(direction='in', length=7,top=True, right=True, left=True)
# minor_locator_x = AutoMinorLocator(2)
# minor_locator_y = AutoMinorLocator(2)
# ax1.get_xaxis().set_minor_locator(minor_locator_x)
# ax1.get_yaxis().set_minor_locator(minor_locator_y)
# # rotate and align the tick labels so they look better
# plt.tick_params(which='minor',
# direction='in',
# length=4,
# right=True,
# left=True,
# top=True)
# plt.xlim(6,21)
# plt.xlabel('Hour of Day')
# plt.ylim(298, 350)
# plt.ylabel('Water Temperature (K)')
# plt.savefig('Figures/comb_img.png', dpi=300) | 27.044444 | 100 | 0.581758 | import numpy as np
from math import sqrt
import matplotlib.pyplot as plt
import numba
import time
from scipy.integrate import odeint
t, m_f, m_a, A_s):
A_c = 0.25
A_s = A_s
A_f = A_c
T_amb = 298
T_sky = T_amb - 6
alpha_g = 0.02
alpha_p = 0.98
t_g = 0.9
t_f = 0.85
Irr = 0.0426*(t) + 1.38E-6*(t)**2 - 7.94E-11*(t)**3 + 7.3E-16*(t)**4
x_b = 0.065
x_s = 0.065
k_i = 1.0 _rad_g2_g1 = 8
h_cov_g2_g1 = 20
h_rad_g1_sky = 8
h_rad_g1_amb = 8
h_rad_p_g2 = 20
h_cov_a_g2 = 8
h_cov_f_a = 8
h_cov_p_f = 30
h_cov_g1_amb = 65
M_f = m_f * 4.187
M_g1 = 1150 * (A_c * 0.001) * 1.67
M_g2 = M_g1
M_p = 8960 * (A_c * 0.065) * 1.0
M_a = 0.718 * m_a
T_g1 = x[0]
T_g2 = x[1]
T_a = x[2]
T_p = x[3]
T_f = x[4]
Q_rad_g2_g1 = h_rad_g2_g1 * A_c * (T_g2 - T_g1)
Q_cov_g2_g1 = h_cov_g2_g1 * A_c * (T_g2 - T_g1)
Q_rad_g1_sky = h_rad_g1_sky * A_c * (T_g1 - T_sky)
Q_cov_g1_amb = h_rad_g1_amb * A_c * (T_g1 - T_amb)
Q_rad_p_g2 = h_rad_p_g2 * A_c * (T_p - T_g2)
Q_cov_a_g2 = h_cov_a_g2 * A_c * (T_a - T_g2)
Q_cov_f_a = h_cov_f_a * (A_c) * (T_f - T_a)
Q_cov_p_f = h_cov_p_f * A_c * (T_p - T_f)
U_base = ((x_b/k_i) + 1/(h_cov_g1_amb))**(-1)
U_side = ((x_s/k_i) + 1/(h_cov_g1_amb))**(-1)
Q_amb_loss = (U_base*A_c + U_side*A_s)*(T_p - T_amb)
dT_g1dt = (Irr * alpha_g * A_c + Q_rad_g2_g1 + Q_cov_g2_g1 - Q_rad_g1_sky - Q_cov_g1_amb) / M_g1
dT_g2dt = (Irr * alpha_g * t_g * A_c + Q_rad_p_g2 + Q_cov_a_g2 - Q_rad_g2_g1) / M_g2
dT_adt = (Q_cov_f_a - Q_cov_a_g2)/M_a
dT_pdt = (Irr * alpha_p * t_g**2 * t_f * A_c - Q_rad_p_g2 - Q_amb_loss - Q_cov_p_f) / M_p
dT_fdt = (Q_cov_p_f + Q_cov_f_a) / M_f
return [dT_g1dt, dT_g2dt, dT_adt, dT_pdt, dT_fdt]
| true | true |
f71a2e97febc43b9fe06cbb74dd070431e79c852 | 5,121 | py | Python | libweasyl/libweasyl/alembic/versions/e2bedd00b085_fill_journal_and_character_hidden_.py | kfkitsune/weasyl | 7e63c6db98ed2debfadbc277509533f72ea078a5 | [
"Apache-2.0"
] | 111 | 2016-05-18T04:18:18.000Z | 2021-11-03T02:05:19.000Z | libweasyl/libweasyl/alembic/versions/e2bedd00b085_fill_journal_and_character_hidden_.py | Weasyl/weasyl | 80c86942c6f20a815086e2895fdad51d3aa77eed | [
"Apache-2.0"
] | 1,103 | 2016-05-29T05:17:53.000Z | 2022-03-31T18:12:40.000Z | libweasyl/libweasyl/alembic/versions/e2bedd00b085_fill_journal_and_character_hidden_.py | kfkitsune/weasyl | 7e63c6db98ed2debfadbc277509533f72ea078a5 | [
"Apache-2.0"
] | 47 | 2016-05-29T20:48:37.000Z | 2021-11-12T09:40:40.000Z | """Fill journal and character hidden/friends-only columns
Revision ID: e2bedd00b085
Revises: 1fbcfecd195e
Create Date: 2021-07-26 05:43:43.742595
"""
# revision identifiers, used by Alembic.
revision = 'e2bedd00b085'
down_revision = '1fbcfecd195e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
BATCH_SIZE = 10_000
def upgrade():
context = op.get_context()
with context.autocommit_block():
max_charid = context.bind.scalar(text("SELECT max(charid) FROM character"))
for i in range(1, max_charid + 1, BATCH_SIZE):
context.bind.execute(
text("UPDATE character SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (charid BETWEEN :start AND :end) AND (hidden IS NULL OR friends_only IS NULL)"),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text("UPDATE character SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (hidden IS NULL OR friends_only IS NULL)"),
)
max_journalid = context.bind.scalar(text("SELECT max(journalid) FROM journal"))
for i in range(1, max_journalid + 1, BATCH_SIZE):
context.bind.execute(
text("UPDATE journal SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (journalid BETWEEN :start AND :end) AND (hidden IS NULL OR friends_only IS NULL)"),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text("UPDATE journal SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (hidden IS NULL OR friends_only IS NULL)"),
)
op.alter_column('character', 'hidden',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('character', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('journal', 'hidden',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('journal', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
def downgrade():
op.alter_column('character', 'hidden',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('character', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('journal', 'hidden',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('journal', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
context = op.get_context()
with context.autocommit_block():
max_charid = context.bind.scalar(text("SELECT max(charid) FROM character"))
for i in range(1, max_charid + 1, BATCH_SIZE):
context.bind.execute(
text(
"UPDATE character SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
" AND (charid BETWEEN :start AND :end)"
),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text(
"UPDATE character SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
),
)
max_journalid = context.bind.scalar(text("SELECT max(journalid) FROM journal"))
for i in range(1, max_journalid + 1, BATCH_SIZE):
context.bind.execute(
text(
"UPDATE journal SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
" AND (journalid BETWEEN :start AND :end)"
),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text(
"UPDATE journal SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
),
)
| 38.503759 | 185 | 0.540129 |
revision = 'e2bedd00b085'
down_revision = '1fbcfecd195e'
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
BATCH_SIZE = 10_000
def upgrade():
context = op.get_context()
with context.autocommit_block():
max_charid = context.bind.scalar(text("SELECT max(charid) FROM character"))
for i in range(1, max_charid + 1, BATCH_SIZE):
context.bind.execute(
text("UPDATE character SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (charid BETWEEN :start AND :end) AND (hidden IS NULL OR friends_only IS NULL)"),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text("UPDATE character SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (hidden IS NULL OR friends_only IS NULL)"),
)
max_journalid = context.bind.scalar(text("SELECT max(journalid) FROM journal"))
for i in range(1, max_journalid + 1, BATCH_SIZE):
context.bind.execute(
text("UPDATE journal SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (journalid BETWEEN :start AND :end) AND (hidden IS NULL OR friends_only IS NULL)"),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text("UPDATE journal SET hidden = settings ~ 'h', friends_only = settings ~ 'f' WHERE (hidden IS NULL OR friends_only IS NULL)"),
)
op.alter_column('character', 'hidden',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('character', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('journal', 'hidden',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
op.alter_column('journal', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default='f',
nullable=False)
def downgrade():
op.alter_column('character', 'hidden',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('character', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('journal', 'hidden',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
op.alter_column('journal', 'friends_only',
existing_type=sa.BOOLEAN(),
server_default=None,
nullable=True)
context = op.get_context()
with context.autocommit_block():
max_charid = context.bind.scalar(text("SELECT max(charid) FROM character"))
for i in range(1, max_charid + 1, BATCH_SIZE):
context.bind.execute(
text(
"UPDATE character SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
" AND (charid BETWEEN :start AND :end)"
),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text(
"UPDATE character SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
),
)
max_journalid = context.bind.scalar(text("SELECT max(journalid) FROM journal"))
for i in range(1, max_journalid + 1, BATCH_SIZE):
context.bind.execute(
text(
"UPDATE journal SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
" AND (journalid BETWEEN :start AND :end)"
),
{"start": i, "end": i + BATCH_SIZE - 1},
)
context.bind.execute(
text(
"UPDATE journal SET settings = regexp_replace(settings, '[hf]', '', 'g')"
" || (CASE WHEN hidden THEN 'h' ELSE '' END)"
" || (CASE WHEN friends_only THEN 'f' ELSE '' END)"
" WHERE ((settings ~ 'h') != hidden OR (settings ~ 'f') != friends_only)"
),
)
| true | true |
f71a2f238671395b100919c093a517ccf04d98ac | 2,876 | py | Python | resolwe_bio/processes/slamdunk/alleyoop_utrrates.py | plojyon/resolwe-bio | 45d001a78fcc387b5e3239a34c9da7f40d789022 | [
"Apache-2.0"
] | 12 | 2015-12-07T18:29:27.000Z | 2022-03-16T08:00:18.000Z | resolwe_bio/processes/slamdunk/alleyoop_utrrates.py | plojyon/resolwe-bio | 45d001a78fcc387b5e3239a34c9da7f40d789022 | [
"Apache-2.0"
] | 480 | 2015-11-20T21:46:43.000Z | 2022-03-28T12:40:57.000Z | resolwe_bio/processes/slamdunk/alleyoop_utrrates.py | plojyon/resolwe-bio | 45d001a78fcc387b5e3239a34c9da7f40d789022 | [
"Apache-2.0"
] | 45 | 2015-11-19T14:54:07.000Z | 2022-02-13T21:36:50.000Z | """Run Alleyoop utrrates tool on Slamdunk results."""
import os
from plumbum import TEE
from resolwe.process import (
Cmd,
DataField,
FileField,
IntegerField,
Process,
StringField,
)
class AlleyoopUtrRates(Process):
"""Run Alleyoop utrrates."""
slug = "alleyoop-utr-rates"
process_type = "data:alleyoop:utrrates"
name = "Alleyoop UTR Rates"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/slamdunk:2.0.0"},
},
"resources": {
"cores": 1,
"memory": 16384,
},
}
entity = {
"type": "sample",
}
category = "Slamdunk"
data_name = '{{ slamdunk|sample_name|default("?") }}'
version = "1.2.1"
class Input:
"""Input fields for AlleyoopUtrRates."""
ref_seq = DataField(
"seq:nucleotide", label="FASTA file containig sequences for aligning"
)
regions = DataField(
"bed", label="BED file with coordinates of regions of interest"
)
slamdunk = DataField("alignment:bam:slamdunk", label="Slamdunk results")
read_length = IntegerField(
label="Maximum read length",
description="Maximum length of reads in the input FASTQ file",
default=150,
)
class Output:
"""Output fields to process AlleyoopUtrRates."""
report = FileField(
label="Tab-separated file containing conversion rates on each region of interest"
)
plot = FileField(label="Region of interest conversion rate plot")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run analysis."""
basename = os.path.basename(inputs.slamdunk.output.bam.path)
assert basename.endswith(".bam")
name = basename[:-4]
args = [
"-o",
"utrrates",
"-r",
inputs.ref_seq.output.fasta.path,
"-b",
inputs.regions.output.bed.path,
"-l",
inputs.read_length,
]
return_code, _, _ = Cmd["alleyoop"]["utrrates"][args][
inputs.slamdunk.output.bam.path
] & TEE(retcode=None)
if return_code:
self.error("Alleyoop utrrates analysis failed.")
rates_file = os.path.join("utrrates", f"{name}_mutationrates_utr.csv")
rates_file_renamed = os.path.join("utrrates", f"{name}_mutationrates.txt")
os.rename(rates_file, rates_file_renamed)
outputs.report = rates_file_renamed
outputs.plot = os.path.join("utrrates", f"{name}_mutationrates_utr.pdf")
outputs.species = inputs.slamdunk.output.species
outputs.build = inputs.slamdunk.output.build
| 30.273684 | 93 | 0.585883 | import os
from plumbum import TEE
from resolwe.process import (
Cmd,
DataField,
FileField,
IntegerField,
Process,
StringField,
)
class AlleyoopUtrRates(Process):
slug = "alleyoop-utr-rates"
process_type = "data:alleyoop:utrrates"
name = "Alleyoop UTR Rates"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/s4q6j6e8/resolwebio/slamdunk:2.0.0"},
},
"resources": {
"cores": 1,
"memory": 16384,
},
}
entity = {
"type": "sample",
}
category = "Slamdunk"
data_name = '{{ slamdunk|sample_name|default("?") }}'
version = "1.2.1"
class Input:
ref_seq = DataField(
"seq:nucleotide", label="FASTA file containig sequences for aligning"
)
regions = DataField(
"bed", label="BED file with coordinates of regions of interest"
)
slamdunk = DataField("alignment:bam:slamdunk", label="Slamdunk results")
read_length = IntegerField(
label="Maximum read length",
description="Maximum length of reads in the input FASTQ file",
default=150,
)
class Output:
report = FileField(
label="Tab-separated file containing conversion rates on each region of interest"
)
plot = FileField(label="Region of interest conversion rate plot")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
basename = os.path.basename(inputs.slamdunk.output.bam.path)
assert basename.endswith(".bam")
name = basename[:-4]
args = [
"-o",
"utrrates",
"-r",
inputs.ref_seq.output.fasta.path,
"-b",
inputs.regions.output.bed.path,
"-l",
inputs.read_length,
]
return_code, _, _ = Cmd["alleyoop"]["utrrates"][args][
inputs.slamdunk.output.bam.path
] & TEE(retcode=None)
if return_code:
self.error("Alleyoop utrrates analysis failed.")
rates_file = os.path.join("utrrates", f"{name}_mutationrates_utr.csv")
rates_file_renamed = os.path.join("utrrates", f"{name}_mutationrates.txt")
os.rename(rates_file, rates_file_renamed)
outputs.report = rates_file_renamed
outputs.plot = os.path.join("utrrates", f"{name}_mutationrates_utr.pdf")
outputs.species = inputs.slamdunk.output.species
outputs.build = inputs.slamdunk.output.build
| true | true |
f71a2fbd3261e086d9f3bcb7623757c304921595 | 3,328 | py | Python | fixture/orm.py | IKeiran/FPT-Sinyakov | 08c5121d84c394bcee91d087ac2d14581179d2fd | [
"Apache-2.0"
] | null | null | null | fixture/orm.py | IKeiran/FPT-Sinyakov | 08c5121d84c394bcee91d087ac2d14581179d2fd | [
"Apache-2.0"
] | null | null | null | fixture/orm.py | IKeiran/FPT-Sinyakov | 08c5121d84c394bcee91d087ac2d14581179d2fd | [
"Apache-2.0"
] | null | null | null | from pony.orm import *
from datetime import datetime
from model.contact import Contact
from model.group import Group
from pymysql.converters import decoders
class ORMFixtue:
db = Database()
class ORMGroup(db.Entity):
_table_ = 'group_list'
id = PrimaryKey(int, column='group_id')
name = Optional(str, column='group_name')
header = Optional(str, column='group_header')
footer = Optional(str, column='group_footer')
contacts = Set(lambda: ORMFixtue.ORMContact, table='address_in_groups', column='id', reverse='groups', lazy=True)
class ORMContact(db.Entity):
_table_ = 'addressbook'
id = PrimaryKey(int, column='id')
first_name = Optional(str, column='firstname')
last_name = Optional(str, column='lastname')
address = Optional(str, column='address')
home_phone = Optional(str, column='home')
mobile_phone = Optional(str, column='mobile')
work_phone = Optional(str, column='work')
email_prime = Optional(str, column='email')
email_secondary = Optional(str, column='email2')
email_third = Optional(str, column='email3')
deprecated = Optional(datetime, column='deprecated')
groups = Set(lambda: ORMFixtue.ORMGroup, table='address_in_groups', column='group_id', reverse='contacts', lazy = True)
def __init__(self, host, name, user, password):
self.db.bind('mysql', host=host, database=name, user=user, password=password, conv=decoders)
self.db.generate_mapping()
def convert_groups_to_model(self, groups):
def convert(group):
return Group(id=str(group.id), name=group.name, header=group.header, footer=group.footer)
return list(map(convert, groups))
@db_session
def get_group_list(self):
return self.convert_groups_to_model(select(g for g in ORMFixtue.ORMGroup))
def convert_contacts_to_model(self, contacts):
def convert(contact):
result = Contact(id=str(contact.id),
first_name=contact.first_name, last_name=contact.last_name, adress=contact.address,
home_phone=contact.home_phone, mobile_phone=contact.mobile_phone, work_phone=contact.work_phone,
email_prime=contact.email_prime, email_secondary=contact.email_secondary, email_third=contact.email_third)
return result
return list(map(convert, contacts))
@db_session
def get_contact_list(self):
return self.convert_contacts_to_model(select(c for c in ORMFixtue.ORMContact if c.deprecated is None))
@db_session
def get_orm_group(self, group):
return list(select(g for g in ORMFixtue.ORMGroup if g.id == group.id))[0]
@db_session
def get_contacts_in_group(self, group):
orm_group = self.get_orm_group(group)
return self.convert_contacts_to_model(orm_group.contacts)
@db_session
def get_contacts_not_in_group(self, group):
orm_group = self.get_orm_group(group)
return self.convert_contacts_to_model(
select(c for c in ORMFixtue.ORMContact if c.deprecated is None and orm_group not in c.groups))
# @db_session
# def get_contact_group_boundry(self):
# return list(select(d for d in ORMFixtue.ORMBoundary))
| 41.6 | 133 | 0.679688 | from pony.orm import *
from datetime import datetime
from model.contact import Contact
from model.group import Group
from pymysql.converters import decoders
class ORMFixtue:
db = Database()
class ORMGroup(db.Entity):
_table_ = 'group_list'
id = PrimaryKey(int, column='group_id')
name = Optional(str, column='group_name')
header = Optional(str, column='group_header')
footer = Optional(str, column='group_footer')
contacts = Set(lambda: ORMFixtue.ORMContact, table='address_in_groups', column='id', reverse='groups', lazy=True)
class ORMContact(db.Entity):
_table_ = 'addressbook'
id = PrimaryKey(int, column='id')
first_name = Optional(str, column='firstname')
last_name = Optional(str, column='lastname')
address = Optional(str, column='address')
home_phone = Optional(str, column='home')
mobile_phone = Optional(str, column='mobile')
work_phone = Optional(str, column='work')
email_prime = Optional(str, column='email')
email_secondary = Optional(str, column='email2')
email_third = Optional(str, column='email3')
deprecated = Optional(datetime, column='deprecated')
groups = Set(lambda: ORMFixtue.ORMGroup, table='address_in_groups', column='group_id', reverse='contacts', lazy = True)
def __init__(self, host, name, user, password):
self.db.bind('mysql', host=host, database=name, user=user, password=password, conv=decoders)
self.db.generate_mapping()
def convert_groups_to_model(self, groups):
def convert(group):
return Group(id=str(group.id), name=group.name, header=group.header, footer=group.footer)
return list(map(convert, groups))
@db_session
def get_group_list(self):
return self.convert_groups_to_model(select(g for g in ORMFixtue.ORMGroup))
def convert_contacts_to_model(self, contacts):
def convert(contact):
result = Contact(id=str(contact.id),
first_name=contact.first_name, last_name=contact.last_name, adress=contact.address,
home_phone=contact.home_phone, mobile_phone=contact.mobile_phone, work_phone=contact.work_phone,
email_prime=contact.email_prime, email_secondary=contact.email_secondary, email_third=contact.email_third)
return result
return list(map(convert, contacts))
@db_session
def get_contact_list(self):
return self.convert_contacts_to_model(select(c for c in ORMFixtue.ORMContact if c.deprecated is None))
@db_session
def get_orm_group(self, group):
return list(select(g for g in ORMFixtue.ORMGroup if g.id == group.id))[0]
@db_session
def get_contacts_in_group(self, group):
orm_group = self.get_orm_group(group)
return self.convert_contacts_to_model(orm_group.contacts)
@db_session
def get_contacts_not_in_group(self, group):
orm_group = self.get_orm_group(group)
return self.convert_contacts_to_model(
select(c for c in ORMFixtue.ORMContact if c.deprecated is None and orm_group not in c.groups))
| true | true |
f71a300263267957f62029ccbbaaa9d0a69f7565 | 5,677 | py | Python | selfdrive/car/chrysler/carstate.py | choongsoo/openpilot | 3441ee566669f40ffaac622b0ef025e5da570af1 | [
"MIT"
] | 1 | 2022-03-31T05:07:44.000Z | 2022-03-31T05:07:44.000Z | selfdrive/car/chrysler/carstate.py | choongsoo/openpilot | 3441ee566669f40ffaac622b0ef025e5da570af1 | [
"MIT"
] | null | null | null | selfdrive/car/chrysler/carstate.py | choongsoo/openpilot | 3441ee566669f40ffaac622b0ef025e5da570af1 | [
"MIT"
] | null | null | null | from cereal import car
from common.conversions import Conversions as CV
from opendbc.can.parser import CANParser
from opendbc.can.can_define import CANDefine
from selfdrive.car.interfaces import CarStateBase
from selfdrive.car.chrysler.values import DBC, STEER_THRESHOLD
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]["pt"])
self.shifter_values = can_define.dv["GEAR"]["PRNDL"]
def update(self, cp, cp_cam):
ret = car.CarState.new_message()
self.frame = int(cp.vl["EPS_STATUS"]["COUNTER"])
ret.doorOpen = any([cp.vl["BCM_1"]["DOOR_OPEN_FL"],
cp.vl["BCM_1"]["DOOR_OPEN_FR"],
cp.vl["BCM_1"]["DOOR_OPEN_RL"],
cp.vl["BCM_1"]["DOOR_OPEN_RR"]])
ret.seatbeltUnlatched = cp.vl["SEATBELT_STATUS"]["SEATBELT_DRIVER_UNLATCHED"] == 1
# brake pedal
ret.brake = 0
ret.brakePressed = cp.vl["ESP_1"]['Brake_Pedal_State'] == 1 # Physical brake pedal switch
# gas pedal
ret.gas = cp.vl["ECM_5"]["Accelerator_Position"]
ret.gasPressed = ret.gas > 1e-5
ret.espDisabled = (cp.vl["TRACTION_BUTTON"]["TRACTION_OFF"] == 1)
ret.wheelSpeeds = self.get_wheel_speeds(
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_FL"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_FR"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_RL"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_RR"],
unit=1,
)
ret.vEgoRaw = (cp.vl["SPEED_1"]["SPEED_LEFT"] + cp.vl["SPEED_1"]["SPEED_RIGHT"]) / 2.
ret.vEgo, ret.aEgo = self.update_speed_kf(ret.vEgoRaw)
ret.standstill = not ret.vEgoRaw > 0.001
ret.leftBlinker = cp.vl["STEERING_LEVERS"]["TURN_SIGNALS"] == 1
ret.rightBlinker = cp.vl["STEERING_LEVERS"]["TURN_SIGNALS"] == 2
ret.steeringAngleDeg = cp.vl["STEERING"]["STEER_ANGLE"]
ret.steeringRateDeg = cp.vl["STEERING"]["STEERING_RATE"]
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(cp.vl["GEAR"]["PRNDL"], None))
ret.cruiseState.available = cp.vl["DAS_3"]["ACC_AVAILABLE"] == 1 # ACC is white
ret.cruiseState.enabled = cp.vl["DAS_3"]["ACC_ACTIVE"] == 1 # ACC is green
ret.cruiseState.speed = cp.vl["DASHBOARD"]["ACC_SPEED_CONFIG_KPH"] * CV.KPH_TO_MS
# CRUISE_STATE is a three bit msg, 0 is off, 1 and 2 are Non-ACC mode, 3 and 4 are ACC mode, find if there are other states too
ret.cruiseState.nonAdaptive = cp.vl["DASHBOARD"]["CRUISE_STATE"] in (1, 2)
ret.accFaulted = cp.vl["DAS_3"]["ACC_FAULTED"] != 0
ret.steeringTorque = cp.vl["EPS_STATUS"]["TORQUE_DRIVER"]
ret.steeringTorqueEps = cp.vl["EPS_STATUS"]["TORQUE_MOTOR"]
ret.steeringPressed = abs(ret.steeringTorque) > STEER_THRESHOLD
steer_state = cp.vl["EPS_STATUS"]["LKAS_STATE"]
ret.steerFaultPermanent = steer_state == 4 or (steer_state == 0 and ret.vEgo > self.CP.minSteerSpeed)
ret.genericToggle = bool(cp.vl["STEERING_LEVERS"]["HIGH_BEAM_FLASH"])
if self.CP.enableBsm:
ret.leftBlindspot = cp.vl["BLIND_SPOT_WARNINGS"]["BLIND_SPOT_LEFT"] == 1
ret.rightBlindspot = cp.vl["BLIND_SPOT_WARNINGS"]["BLIND_SPOT_RIGHT"] == 1
self.lkas_counter = cp_cam.vl["LKAS_COMMAND"]["COUNTER"]
self.lkas_car_model = cp_cam.vl["LKAS_HUD"]["CAR_MODEL"]
self.lkas_status_ok = cp_cam.vl["LKAS_HEARTBIT"]["LKAS_STATUS_OK"]
self.button_counter = cp.vl["WHEEL_BUTTONS"]["COUNTER"]
return ret
@staticmethod
def get_can_parser(CP):
signals = [
# sig_name, sig_address
("PRNDL", "GEAR"),
("DOOR_OPEN_FL", "BCM_1"),
("DOOR_OPEN_FR", "BCM_1"),
("DOOR_OPEN_RL", "BCM_1"),
("DOOR_OPEN_RR", "BCM_1"),
("Brake_Pedal_State", "ESP_1"),
("Accelerator_Position", "ECM_5"),
("SPEED_LEFT", "SPEED_1"),
("SPEED_RIGHT", "SPEED_1"),
("WHEEL_SPEED_FL", "WHEEL_SPEEDS"),
("WHEEL_SPEED_RR", "WHEEL_SPEEDS"),
("WHEEL_SPEED_RL", "WHEEL_SPEEDS"),
("WHEEL_SPEED_FR", "WHEEL_SPEEDS"),
("STEER_ANGLE", "STEERING"),
("STEERING_RATE", "STEERING"),
("TURN_SIGNALS", "STEERING_LEVERS"),
("ACC_AVAILABLE", "DAS_3"),
("ACC_ACTIVE", "DAS_3"),
("ACC_FAULTED", "DAS_3"),
("HIGH_BEAM_FLASH", "STEERING_LEVERS"),
("ACC_SPEED_CONFIG_KPH", "DASHBOARD"),
("CRUISE_STATE", "DASHBOARD"),
("TORQUE_DRIVER", "EPS_STATUS"),
("TORQUE_MOTOR", "EPS_STATUS"),
("LKAS_STATE", "EPS_STATUS"),
("COUNTER", "EPS_STATUS",),
("TRACTION_OFF", "TRACTION_BUTTON"),
("SEATBELT_DRIVER_UNLATCHED", "SEATBELT_STATUS"),
("COUNTER", "WHEEL_BUTTONS"),
]
checks = [
# sig_address, frequency
("ESP_1", 50),
("EPS_STATUS", 100),
("SPEED_1", 100),
("WHEEL_SPEEDS", 50),
("STEERING", 100),
("DAS_3", 50),
("GEAR", 50),
("ECM_5", 50),
("WHEEL_BUTTONS", 50),
("DASHBOARD", 15),
("STEERING_LEVERS", 10),
("SEATBELT_STATUS", 2),
("BCM_1", 1),
("TRACTION_BUTTON", 1),
]
if CP.enableBsm:
signals += [
("BLIND_SPOT_RIGHT", "BLIND_SPOT_WARNINGS"),
("BLIND_SPOT_LEFT", "BLIND_SPOT_WARNINGS"),
]
checks.append(("BLIND_SPOT_WARNINGS", 2))
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 0)
@staticmethod
def get_cam_can_parser(CP):
signals = [
# sig_name, sig_address
("COUNTER", "LKAS_COMMAND"),
("CAR_MODEL", "LKAS_HUD"),
("LKAS_STATUS_OK", "LKAS_HEARTBIT")
]
checks = [
("LKAS_COMMAND", 100),
("LKAS_HEARTBIT", 10),
("LKAS_HUD", 4),
]
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 2)
| 36.159236 | 131 | 0.630791 | from cereal import car
from common.conversions import Conversions as CV
from opendbc.can.parser import CANParser
from opendbc.can.can_define import CANDefine
from selfdrive.car.interfaces import CarStateBase
from selfdrive.car.chrysler.values import DBC, STEER_THRESHOLD
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]["pt"])
self.shifter_values = can_define.dv["GEAR"]["PRNDL"]
def update(self, cp, cp_cam):
ret = car.CarState.new_message()
self.frame = int(cp.vl["EPS_STATUS"]["COUNTER"])
ret.doorOpen = any([cp.vl["BCM_1"]["DOOR_OPEN_FL"],
cp.vl["BCM_1"]["DOOR_OPEN_FR"],
cp.vl["BCM_1"]["DOOR_OPEN_RL"],
cp.vl["BCM_1"]["DOOR_OPEN_RR"]])
ret.seatbeltUnlatched = cp.vl["SEATBELT_STATUS"]["SEATBELT_DRIVER_UNLATCHED"] == 1
ret.brake = 0
ret.brakePressed = cp.vl["ESP_1"]['Brake_Pedal_State'] == 1
ret.gas = cp.vl["ECM_5"]["Accelerator_Position"]
ret.gasPressed = ret.gas > 1e-5
ret.espDisabled = (cp.vl["TRACTION_BUTTON"]["TRACTION_OFF"] == 1)
ret.wheelSpeeds = self.get_wheel_speeds(
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_FL"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_FR"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_RL"],
cp.vl["WHEEL_SPEEDS"]["WHEEL_SPEED_RR"],
unit=1,
)
ret.vEgoRaw = (cp.vl["SPEED_1"]["SPEED_LEFT"] + cp.vl["SPEED_1"]["SPEED_RIGHT"]) / 2.
ret.vEgo, ret.aEgo = self.update_speed_kf(ret.vEgoRaw)
ret.standstill = not ret.vEgoRaw > 0.001
ret.leftBlinker = cp.vl["STEERING_LEVERS"]["TURN_SIGNALS"] == 1
ret.rightBlinker = cp.vl["STEERING_LEVERS"]["TURN_SIGNALS"] == 2
ret.steeringAngleDeg = cp.vl["STEERING"]["STEER_ANGLE"]
ret.steeringRateDeg = cp.vl["STEERING"]["STEERING_RATE"]
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(cp.vl["GEAR"]["PRNDL"], None))
ret.cruiseState.available = cp.vl["DAS_3"]["ACC_AVAILABLE"] == 1
ret.cruiseState.enabled = cp.vl["DAS_3"]["ACC_ACTIVE"] == 1
ret.cruiseState.speed = cp.vl["DASHBOARD"]["ACC_SPEED_CONFIG_KPH"] * CV.KPH_TO_MS
ret.cruiseState.nonAdaptive = cp.vl["DASHBOARD"]["CRUISE_STATE"] in (1, 2)
ret.accFaulted = cp.vl["DAS_3"]["ACC_FAULTED"] != 0
ret.steeringTorque = cp.vl["EPS_STATUS"]["TORQUE_DRIVER"]
ret.steeringTorqueEps = cp.vl["EPS_STATUS"]["TORQUE_MOTOR"]
ret.steeringPressed = abs(ret.steeringTorque) > STEER_THRESHOLD
steer_state = cp.vl["EPS_STATUS"]["LKAS_STATE"]
ret.steerFaultPermanent = steer_state == 4 or (steer_state == 0 and ret.vEgo > self.CP.minSteerSpeed)
ret.genericToggle = bool(cp.vl["STEERING_LEVERS"]["HIGH_BEAM_FLASH"])
if self.CP.enableBsm:
ret.leftBlindspot = cp.vl["BLIND_SPOT_WARNINGS"]["BLIND_SPOT_LEFT"] == 1
ret.rightBlindspot = cp.vl["BLIND_SPOT_WARNINGS"]["BLIND_SPOT_RIGHT"] == 1
self.lkas_counter = cp_cam.vl["LKAS_COMMAND"]["COUNTER"]
self.lkas_car_model = cp_cam.vl["LKAS_HUD"]["CAR_MODEL"]
self.lkas_status_ok = cp_cam.vl["LKAS_HEARTBIT"]["LKAS_STATUS_OK"]
self.button_counter = cp.vl["WHEEL_BUTTONS"]["COUNTER"]
return ret
@staticmethod
def get_can_parser(CP):
signals = [
("PRNDL", "GEAR"),
("DOOR_OPEN_FL", "BCM_1"),
("DOOR_OPEN_FR", "BCM_1"),
("DOOR_OPEN_RL", "BCM_1"),
("DOOR_OPEN_RR", "BCM_1"),
("Brake_Pedal_State", "ESP_1"),
("Accelerator_Position", "ECM_5"),
("SPEED_LEFT", "SPEED_1"),
("SPEED_RIGHT", "SPEED_1"),
("WHEEL_SPEED_FL", "WHEEL_SPEEDS"),
("WHEEL_SPEED_RR", "WHEEL_SPEEDS"),
("WHEEL_SPEED_RL", "WHEEL_SPEEDS"),
("WHEEL_SPEED_FR", "WHEEL_SPEEDS"),
("STEER_ANGLE", "STEERING"),
("STEERING_RATE", "STEERING"),
("TURN_SIGNALS", "STEERING_LEVERS"),
("ACC_AVAILABLE", "DAS_3"),
("ACC_ACTIVE", "DAS_3"),
("ACC_FAULTED", "DAS_3"),
("HIGH_BEAM_FLASH", "STEERING_LEVERS"),
("ACC_SPEED_CONFIG_KPH", "DASHBOARD"),
("CRUISE_STATE", "DASHBOARD"),
("TORQUE_DRIVER", "EPS_STATUS"),
("TORQUE_MOTOR", "EPS_STATUS"),
("LKAS_STATE", "EPS_STATUS"),
("COUNTER", "EPS_STATUS",),
("TRACTION_OFF", "TRACTION_BUTTON"),
("SEATBELT_DRIVER_UNLATCHED", "SEATBELT_STATUS"),
("COUNTER", "WHEEL_BUTTONS"),
]
checks = [
("ESP_1", 50),
("EPS_STATUS", 100),
("SPEED_1", 100),
("WHEEL_SPEEDS", 50),
("STEERING", 100),
("DAS_3", 50),
("GEAR", 50),
("ECM_5", 50),
("WHEEL_BUTTONS", 50),
("DASHBOARD", 15),
("STEERING_LEVERS", 10),
("SEATBELT_STATUS", 2),
("BCM_1", 1),
("TRACTION_BUTTON", 1),
]
if CP.enableBsm:
signals += [
("BLIND_SPOT_RIGHT", "BLIND_SPOT_WARNINGS"),
("BLIND_SPOT_LEFT", "BLIND_SPOT_WARNINGS"),
]
checks.append(("BLIND_SPOT_WARNINGS", 2))
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 0)
@staticmethod
def get_cam_can_parser(CP):
signals = [
("COUNTER", "LKAS_COMMAND"),
("CAR_MODEL", "LKAS_HUD"),
("LKAS_STATUS_OK", "LKAS_HEARTBIT")
]
checks = [
("LKAS_COMMAND", 100),
("LKAS_HEARTBIT", 10),
("LKAS_HUD", 4),
]
return CANParser(DBC[CP.carFingerprint]["pt"], signals, checks, 2)
| true | true |
f71a301d080276930f713a265069db17067d03cb | 43 | py | Python | linguistics/bert/__init__.py | idin/mercurius | 48a4ed7843fb5d1946ef8051f23da7b32ab52ca3 | [
"MIT"
] | 7 | 2019-02-24T16:56:46.000Z | 2022-01-30T03:26:49.000Z | linguistics/bert/__init__.py | idin/mercurius | 48a4ed7843fb5d1946ef8051f23da7b32ab52ca3 | [
"MIT"
] | 1 | 2020-07-14T21:00:57.000Z | 2021-02-25T07:12:11.000Z | linguistics/bert/__init__.py | idin/linguistics | ab9568d81b225928beab353174fd97ccb0fe369c | [
"MIT"
] | null | null | null | from .BertVectorizer import BertVectorizer
| 21.5 | 42 | 0.883721 | from .BertVectorizer import BertVectorizer
| true | true |
f71a30533b6634f0a1e795ab1b2cb53461019bfe | 1,928 | py | Python | upvote/gae/lib/bit9/monitoring.py | iwikmai/upvote | 77bb200d0e35a28cc5aed98ceee8e234998814b6 | [
"Apache-2.0"
] | 453 | 2017-10-24T15:29:44.000Z | 2021-09-27T23:21:20.000Z | upvote/gae/lib/bit9/monitoring.py | iwikmai/upvote | 77bb200d0e35a28cc5aed98ceee8e234998814b6 | [
"Apache-2.0"
] | 58 | 2018-03-23T21:19:16.000Z | 2021-05-23T20:06:05.000Z | upvote/gae/lib/bit9/monitoring.py | iwikmai/upvote | 77bb200d0e35a28cc5aed98ceee8e234998814b6 | [
"Apache-2.0"
] | 36 | 2018-03-23T21:25:54.000Z | 2021-09-27T23:21:24.000Z | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Monitoring metrics for the bit9_api AppEngine module."""
import six
from upvote.gae.utils import monitoring_utils
from upvote.monitoring import metrics
# Remove once everything is PY3, where long == int
if six.PY3:
long = int # pylint: disable=redefined-builtin, invalid-name
events_to_pull = monitoring_utils.Metric(metrics.BIT9_API.EVENTS_TO_PULL, long)
events_pulled = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_PULLED)
events_to_process = monitoring_utils.Metric(
metrics.BIT9_API.EVENTS_TO_PROCESS, long)
events_processed = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_PROCESSED)
events_skipped = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_SKIPPED)
pending_changes = monitoring_utils.Metric(metrics.BIT9_API.PENDING_CHANGES, long)
# Bit9 integration metrics
bit9_logins = monitoring_utils.SuccessFailureCounter(metrics.BIT9_API.BIT9_LOGINS)
bit9_qps = monitoring_utils.Counter(metrics.BIT9_API.BIT9_QPS)
bit9_requests = monitoring_utils.Counter(
metrics.BIT9_API.BIT9_REQUESTS,
fields=[('http_method', str), ('api_object', str), ('http_status', int)])
bit9_latency = monitoring_utils.LatencyMetric(
metrics.BIT9_API.BIT9_LATENCY,
fields=[('http_method', str), ('api_object', str)])
file_instances_missing = monitoring_utils.Counter(
metrics.BIT9_API.FILE_INSTANCES_MISSING)
| 41.913043 | 82 | 0.795643 |
import six
from upvote.gae.utils import monitoring_utils
from upvote.monitoring import metrics
if six.PY3:
long = int
events_to_pull = monitoring_utils.Metric(metrics.BIT9_API.EVENTS_TO_PULL, long)
events_pulled = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_PULLED)
events_to_process = monitoring_utils.Metric(
metrics.BIT9_API.EVENTS_TO_PROCESS, long)
events_processed = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_PROCESSED)
events_skipped = monitoring_utils.Counter(metrics.BIT9_API.EVENTS_SKIPPED)
pending_changes = monitoring_utils.Metric(metrics.BIT9_API.PENDING_CHANGES, long)
bit9_logins = monitoring_utils.SuccessFailureCounter(metrics.BIT9_API.BIT9_LOGINS)
bit9_qps = monitoring_utils.Counter(metrics.BIT9_API.BIT9_QPS)
bit9_requests = monitoring_utils.Counter(
metrics.BIT9_API.BIT9_REQUESTS,
fields=[('http_method', str), ('api_object', str), ('http_status', int)])
bit9_latency = monitoring_utils.LatencyMetric(
metrics.BIT9_API.BIT9_LATENCY,
fields=[('http_method', str), ('api_object', str)])
file_instances_missing = monitoring_utils.Counter(
metrics.BIT9_API.FILE_INSTANCES_MISSING)
| true | true |
f71a332a571fb8fd40a02f9f22795f51a43552c4 | 4,280 | py | Python | single_query_extract.py | Gguinet/semisupervised-alignment | 4f914c2e95ef69fa3aefe312fb9b12e482c6f0b5 | [
"MIT"
] | 2 | 2021-01-16T14:12:21.000Z | 2021-12-31T10:15:39.000Z | single_query_extract.py | Gguinet/semisupervised-alignment | 4f914c2e95ef69fa3aefe312fb9b12e482c6f0b5 | [
"MIT"
] | null | null | null | single_query_extract.py | Gguinet/semisupervised-alignment | 4f914c2e95ef69fa3aefe312fb9b12e482c6f0b5 | [
"MIT"
] | 1 | 2021-03-06T15:52:49.000Z | 2021-03-06T15:52:49.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Modifications for Guinet et al.
import io
import warnings
import numpy as np
import argparse
from utils import *
from query_aux import *
#Disable warnings for Meta-features
warnings.filterwarnings("ignore")
# to use bool for parsing
def str2bool(v):
"""Parse String to bool
Args:
v: String or Bool
Returns:
bool
Raises:
ArgumentTypeError: If v is not a String nor a bool
"""
if isinstance(v, bool):
return v
if v.lower() in ("yes", "true", "t", "y", "1"):
return True
elif v.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise argparse.ArgumentTypeError("Boolean value expected.")
parser = argparse.ArgumentParser(description="Extraction of queries simplified")
parser.add_argument(
"--src_emb", type=str, default="", help="Load source embeddings for training"
)
parser.add_argument(
"--tgt_emb", type=str, default="", help="Load target embeddings for validation"
)
parser.add_argument(
"--filename", type=str, default="", help="Filename of lightsvm files extracted"
)
parser.add_argument(
"--center", action="store_true", help="whether to center embeddings or not"
)
parser.add_argument(
"--dico", type=str, default="", help="Dictionary for query extraction"
)
parser.add_argument("--maxload", type=int, default=200000)
parser.add_argument(
"--query_relevance_type",
type=str,
default="",
help="Type of query relevance: binary or continuous",
)
parser.add_argument("--query_size", type=int, default=10, help="Size of the query")
parser.add_argument(
"--add_csls_coord",
type=str2bool,
default=True,
help="Whether to add to query coord CSLS distance",
)
parser.add_argument(
"--k_csls",
type=int,
default=10,
help="Number of coord in query for CSLS distance (from 0 to k)",
)
parser.add_argument(
"--testing_query",
type=str2bool,
default=False,
help="Whether to impose the ground truth traduction presence in the query",
)
parser.add_argument(
"--add_word_coord",
type=str2bool,
default=False,
help="Whether to add to query coord word embedding",
)
parser.add_argument(
"--discard_empty_query",
type=str2bool,
default=False,
help="Whether to remove query without the right traduction or not",
)
parser.add_argument(
"--use_csls",
type=str2bool,
default=False,
help="Whether to use CSLS distance or CosineSim",
)
parser.add_argument(
"--add_query_coord",
type=str2bool,
default=False,
help="Whether to add to query coord query word embedding",
)
parser.add_argument(
"--add_meta_features",
type=str2bool,
default=True,
help="Whether to add to meta-features of the 2 clouds (source and target)",
)
parser.add_argument(
"--center_meta_features",
type=str2bool,
default=True,
help="Whether to add to center the meta-features of the target clouds",
)
parser.add_argument(
"--nn_size_meta_features",
type=int,
default=10,
help="Number of neighbors to use when computing meta-features",
)
params = parser.parse_args()
###### MAIN ######
query_extractor = (
compute_binary_distance
if params.query_relevance_type == "binary"
else compute_embedding_distance
)
print("Extraction of queries alignment on %s" % params.dico)
words_tgt, x_tgt = load_vectors(
params.tgt_emb, maxload=params.maxload, center=params.center
)
words_src, x_src = load_vectors(
params.src_emb, maxload=params.maxload, center=params.center
)
print("Loading and extracting data")
src2tgt, lexicon_size = load_lexicon(params.dico, words_src, words_tgt)
query_extractor(
x_src,
x_tgt,
params.filename,
src2tgt,
add_csls_coord=params.add_csls_coord,
k_csls=params.k_csls,
testing_query=params.testing_query,
discard_empty_query=params.discard_empty_query,
add_word_coord=params.add_word_coord,
add_query_coord=params.add_query_coord,
add_meta_features=params.add_meta_features,
center_meta_features=params.center_meta_features,
nn_size_meta_features=params.nn_size_meta_features,
query_size=params.query_size,
use_csls=params.use_csls
)
print("Query file extracted")
| 25.628743 | 83 | 0.700935 |
import io
import warnings
import numpy as np
import argparse
from utils import *
from query_aux import *
warnings.filterwarnings("ignore")
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ("yes", "true", "t", "y", "1"):
return True
elif v.lower() in ("no", "false", "f", "n", "0"):
return False
else:
raise argparse.ArgumentTypeError("Boolean value expected.")
parser = argparse.ArgumentParser(description="Extraction of queries simplified")
parser.add_argument(
"--src_emb", type=str, default="", help="Load source embeddings for training"
)
parser.add_argument(
"--tgt_emb", type=str, default="", help="Load target embeddings for validation"
)
parser.add_argument(
"--filename", type=str, default="", help="Filename of lightsvm files extracted"
)
parser.add_argument(
"--center", action="store_true", help="whether to center embeddings or not"
)
parser.add_argument(
"--dico", type=str, default="", help="Dictionary for query extraction"
)
parser.add_argument("--maxload", type=int, default=200000)
parser.add_argument(
"--query_relevance_type",
type=str,
default="",
help="Type of query relevance: binary or continuous",
)
parser.add_argument("--query_size", type=int, default=10, help="Size of the query")
parser.add_argument(
"--add_csls_coord",
type=str2bool,
default=True,
help="Whether to add to query coord CSLS distance",
)
parser.add_argument(
"--k_csls",
type=int,
default=10,
help="Number of coord in query for CSLS distance (from 0 to k)",
)
parser.add_argument(
"--testing_query",
type=str2bool,
default=False,
help="Whether to impose the ground truth traduction presence in the query",
)
parser.add_argument(
"--add_word_coord",
type=str2bool,
default=False,
help="Whether to add to query coord word embedding",
)
parser.add_argument(
"--discard_empty_query",
type=str2bool,
default=False,
help="Whether to remove query without the right traduction or not",
)
parser.add_argument(
"--use_csls",
type=str2bool,
default=False,
help="Whether to use CSLS distance or CosineSim",
)
parser.add_argument(
"--add_query_coord",
type=str2bool,
default=False,
help="Whether to add to query coord query word embedding",
)
parser.add_argument(
"--add_meta_features",
type=str2bool,
default=True,
help="Whether to add to meta-features of the 2 clouds (source and target)",
)
parser.add_argument(
"--center_meta_features",
type=str2bool,
default=True,
help="Whether to add to center the meta-features of the target clouds",
)
parser.add_argument(
"--nn_size_meta_features",
type=int,
default=10,
help="Number of neighbors to use when computing meta-features",
)
params = parser.parse_args()
"
else compute_embedding_distance
)
print("Extraction of queries alignment on %s" % params.dico)
words_tgt, x_tgt = load_vectors(
params.tgt_emb, maxload=params.maxload, center=params.center
)
words_src, x_src = load_vectors(
params.src_emb, maxload=params.maxload, center=params.center
)
print("Loading and extracting data")
src2tgt, lexicon_size = load_lexicon(params.dico, words_src, words_tgt)
query_extractor(
x_src,
x_tgt,
params.filename,
src2tgt,
add_csls_coord=params.add_csls_coord,
k_csls=params.k_csls,
testing_query=params.testing_query,
discard_empty_query=params.discard_empty_query,
add_word_coord=params.add_word_coord,
add_query_coord=params.add_query_coord,
add_meta_features=params.add_meta_features,
center_meta_features=params.center_meta_features,
nn_size_meta_features=params.nn_size_meta_features,
query_size=params.query_size,
use_csls=params.use_csls
)
print("Query file extracted")
| true | true |
f71a33492bc89ba75ddffd485b3bbc63fcd86dc9 | 29,388 | py | Python | source/deepsecurity/api/mac_lists_api.py | felipecosta09/cloudone-workload-controltower-lifecycle | 7927c84d164058b034fc872701b5ee117641f4d1 | [
"Apache-2.0"
] | 1 | 2021-10-30T16:40:09.000Z | 2021-10-30T16:40:09.000Z | source/deepsecurity/api/mac_lists_api.py | felipecosta09/cloudone-workload-controltower-lifecycle | 7927c84d164058b034fc872701b5ee117641f4d1 | [
"Apache-2.0"
] | 1 | 2021-07-28T20:19:03.000Z | 2021-07-28T20:19:03.000Z | source/deepsecurity/api/mac_lists_api.py | felipecosta09/cloudone-workload-controltower-lifecycle | 7927c84d164058b034fc872701b5ee117641f4d1 | [
"Apache-2.0"
] | 1 | 2021-10-30T16:40:02.000Z | 2021-10-30T16:40:02.000Z | # coding: utf-8
"""
Trend Micro Deep Security API
Copyright 2018 - 2020 Trend Micro Incorporated.<br/>Get protected, stay secured, and keep informed with Trend Micro Deep Security's new RESTful API. Access system data and manage security configurations to automate your security workflows and integrate Deep Security into your CI/CD pipeline. # noqa: E501
OpenAPI spec version: 12.5.841
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from deepsecurity.api_client import ApiClient
class MACListsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_mac_list(self, mac_list, api_version, **kwargs): # noqa: E501
"""Create a MAC List # noqa: E501
Create a new MAC list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_mac_list(mac_list, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param MacList mac_list: The settings of the new MAC list. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_mac_list_with_http_info(mac_list, api_version, **kwargs) # noqa: E501
else:
(data) = self.create_mac_list_with_http_info(mac_list, api_version, **kwargs) # noqa: E501
return data
def create_mac_list_with_http_info(self, mac_list, api_version, **kwargs): # noqa: E501
"""Create a MAC List # noqa: E501
Create a new MAC list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_mac_list_with_http_info(mac_list, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param MacList mac_list: The settings of the new MAC list. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mac_list', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_mac_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mac_list' is set
if ('mac_list' not in params or
params['mac_list'] is None):
raise ValueError("Missing the required parameter `mac_list` when calling `create_mac_list`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `create_mac_list`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'mac_list' in params:
body_params = params['mac_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_mac_list(self, mac_list_id, api_version, **kwargs): # noqa: E501
"""Delete a MAC List # noqa: E501
Delete a MAC list by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mac_list(mac_list_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_mac_list_with_http_info(mac_list_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.delete_mac_list_with_http_info(mac_list_id, api_version, **kwargs) # noqa: E501
return data
def delete_mac_list_with_http_info(self, mac_list_id, api_version, **kwargs): # noqa: E501
"""Delete a MAC List # noqa: E501
Delete a MAC list by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_mac_list_with_http_info(mac_list_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to delete. (required)
:param str api_version: The version of the api being called. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mac_list_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mac_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mac_list_id' is set
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `delete_mac_list`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_mac_list`") # noqa: E501
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `mac_list_id` when calling `delete_mac_list`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists/{macListID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_mac_list(self, mac_list_id, api_version, **kwargs): # noqa: E501
"""Describe a MAC List # noqa: E501
Describe a MAC list by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_mac_list(mac_list_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_mac_list_with_http_info(mac_list_id, api_version, **kwargs) # noqa: E501
else:
(data) = self.describe_mac_list_with_http_info(mac_list_id, api_version, **kwargs) # noqa: E501
return data
def describe_mac_list_with_http_info(self, mac_list_id, api_version, **kwargs): # noqa: E501
"""Describe a MAC List # noqa: E501
Describe a MAC list by ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.describe_mac_list_with_http_info(mac_list_id, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to describe. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mac_list_id', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_mac_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mac_list_id' is set
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `describe_mac_list`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_mac_list`") # noqa: E501
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `mac_list_id` when calling `describe_mac_list`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists/{macListID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_mac_lists(self, api_version, **kwargs): # noqa: E501
"""List MAC Lists # noqa: E501
Lists all MAC lists. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_mac_lists(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: MacLists
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_mac_lists_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.list_mac_lists_with_http_info(api_version, **kwargs) # noqa: E501
return data
def list_mac_lists_with_http_info(self, api_version, **kwargs): # noqa: E501
"""List MAC Lists # noqa: E501
Lists all MAC lists. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_mac_lists_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:return: MacLists
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_mac_lists" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_mac_lists`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacLists', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_mac_list(self, mac_list_id, mac_list, api_version, **kwargs): # noqa: E501
"""Modify a MAC List # noqa: E501
Modify a MAC list by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_mac_list(mac_list_id, mac_list, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to modify. (required)
:param MacList mac_list: The settings of the MAC list to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_mac_list_with_http_info(mac_list_id, mac_list, api_version, **kwargs) # noqa: E501
else:
(data) = self.modify_mac_list_with_http_info(mac_list_id, mac_list, api_version, **kwargs) # noqa: E501
return data
def modify_mac_list_with_http_info(self, mac_list_id, mac_list, api_version, **kwargs): # noqa: E501
"""Modify a MAC List # noqa: E501
Modify a MAC list by ID. Any unset elements will be left unchanged. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.modify_mac_list_with_http_info(mac_list_id, mac_list, api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int mac_list_id: The ID number of the MAC list to modify. (required)
:param MacList mac_list: The settings of the MAC list to modify. (required)
:param str api_version: The version of the api being called. (required)
:return: MacList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['mac_list_id', 'mac_list', 'api_version'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_mac_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'mac_list_id' is set
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `modify_mac_list`") # noqa: E501
# verify the required parameter 'mac_list' is set
if ('mac_list' not in params or
params['mac_list'] is None):
raise ValueError("Missing the required parameter `mac_list` when calling `modify_mac_list`") # noqa: E501
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_mac_list`") # noqa: E501
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])): # noqa: E501
raise ValueError("Invalid value for parameter `mac_list_id` when calling `modify_mac_list`, must conform to the pattern `/\\d+/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id'] # noqa: E501
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'mac_list' in params:
body_params = params['mac_list']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists/{macListID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_mac_lists(self, api_version, **kwargs): # noqa: E501
"""Search MAC Lists # noqa: E501
Search for MAC lists using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_mac_lists(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: MacLists
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_mac_lists_with_http_info(api_version, **kwargs) # noqa: E501
else:
(data) = self.search_mac_lists_with_http_info(api_version, **kwargs) # noqa: E501
return data
def search_mac_lists_with_http_info(self, api_version, **kwargs): # noqa: E501
"""Search MAC Lists # noqa: E501
Search for MAC lists using optional filters. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_mac_lists_with_http_info(api_version, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str api_version: The version of the api being called. (required)
:param SearchFilter search_filter: A collection of options used to filter the search results.
:return: MacLists
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['api_version', 'search_filter'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_mac_lists" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'api_version' is set
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_mac_lists`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['DefaultAuthentication'] # noqa: E501
return self.api_client.call_api(
'/maclists/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacLists', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.281296 | 311 | 0.605928 |
from __future__ import absolute_import
import re
import six
from deepsecurity.api_client import ApiClient
class MACListsApi(object):
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_mac_list(self, mac_list, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_mac_list_with_http_info(mac_list, api_version, **kwargs)
else:
(data) = self.create_mac_list_with_http_info(mac_list, api_version, **kwargs)
return data
def create_mac_list_with_http_info(self, mac_list, api_version, **kwargs):
all_params = ['mac_list', 'api_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_mac_list" % key
)
params[key] = val
del params['kwargs']
if ('mac_list' not in params or
params['mac_list'] is None):
raise ValueError("Missing the required parameter `mac_list` when calling `create_mac_list`")
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `create_mac_list`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
if 'mac_list' in params:
body_params = params['mac_list']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_mac_list(self, mac_list_id, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_mac_list_with_http_info(mac_list_id, api_version, **kwargs)
else:
(data) = self.delete_mac_list_with_http_info(mac_list_id, api_version, **kwargs)
return data
def delete_mac_list_with_http_info(self, mac_list_id, api_version, **kwargs):
all_params = ['mac_list_id', 'api_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_mac_list" % key
)
params[key] = val
del params['kwargs']
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `delete_mac_list`")
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `delete_mac_list`")
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])):
raise ValueError("Invalid value for parameter `mac_list_id` when calling `delete_mac_list`, must conform to the pattern `/\\d+/`")
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id']
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists/{macListID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def describe_mac_list(self, mac_list_id, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.describe_mac_list_with_http_info(mac_list_id, api_version, **kwargs)
else:
(data) = self.describe_mac_list_with_http_info(mac_list_id, api_version, **kwargs)
return data
def describe_mac_list_with_http_info(self, mac_list_id, api_version, **kwargs):
all_params = ['mac_list_id', 'api_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method describe_mac_list" % key
)
params[key] = val
del params['kwargs']
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `describe_mac_list`")
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `describe_mac_list`")
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])):
raise ValueError("Invalid value for parameter `mac_list_id` when calling `describe_mac_list`, must conform to the pattern `/\\d+/`")
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id']
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists/{macListID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_mac_lists(self, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_mac_lists_with_http_info(api_version, **kwargs)
else:
(data) = self.list_mac_lists_with_http_info(api_version, **kwargs)
return data
def list_mac_lists_with_http_info(self, api_version, **kwargs):
all_params = ['api_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_mac_lists" % key
)
params[key] = val
del params['kwargs']
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `list_mac_lists`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacLists',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def modify_mac_list(self, mac_list_id, mac_list, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.modify_mac_list_with_http_info(mac_list_id, mac_list, api_version, **kwargs)
else:
(data) = self.modify_mac_list_with_http_info(mac_list_id, mac_list, api_version, **kwargs)
return data
def modify_mac_list_with_http_info(self, mac_list_id, mac_list, api_version, **kwargs):
all_params = ['mac_list_id', 'mac_list', 'api_version']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method modify_mac_list" % key
)
params[key] = val
del params['kwargs']
if ('mac_list_id' not in params or
params['mac_list_id'] is None):
raise ValueError("Missing the required parameter `mac_list_id` when calling `modify_mac_list`")
if ('mac_list' not in params or
params['mac_list'] is None):
raise ValueError("Missing the required parameter `mac_list` when calling `modify_mac_list`")
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `modify_mac_list`")
if 'mac_list_id' in params and not re.search('\\d+', str(params['mac_list_id'])):
raise ValueError("Invalid value for parameter `mac_list_id` when calling `modify_mac_list`, must conform to the pattern `/\\d+/`")
collection_formats = {}
path_params = {}
if 'mac_list_id' in params:
path_params['macListID'] = params['mac_list_id']
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
if 'mac_list' in params:
body_params = params['mac_list']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists/{macListID}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacList',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def search_mac_lists(self, api_version, **kwargs):
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_mac_lists_with_http_info(api_version, **kwargs)
else:
(data) = self.search_mac_lists_with_http_info(api_version, **kwargs)
return data
def search_mac_lists_with_http_info(self, api_version, **kwargs):
all_params = ['api_version', 'search_filter']
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method search_mac_lists" % key
)
params[key] = val
del params['kwargs']
if ('api_version' not in params or
params['api_version'] is None):
raise ValueError("Missing the required parameter `api_version` when calling `search_mac_lists`")
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'api_version' in params:
header_params['api-version'] = params['api_version']
form_params = []
local_var_files = {}
body_params = None
if 'search_filter' in params:
body_params = params['search_filter']
header_params['Accept'] = self.api_client.select_header_accept(
['application/json'])
header_params['Content-Type'] = self.api_client.select_header_content_type(
['application/json'])
auth_settings = ['DefaultAuthentication']
return self.api_client.call_api(
'/maclists/search', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MacLists',
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| true | true |
f71a3354afd52b38a1b508cdd629a00d472d8746 | 2,651 | py | Python | tests/test_logger.py | agraubert/agutil | d9a568df01959ed985c9c8e77bdd501ac13bdbbf | [
"MIT"
] | 3 | 2017-06-05T15:46:22.000Z | 2019-05-22T21:26:54.000Z | tests/test_logger.py | agraubert/agutil | d9a568df01959ed985c9c8e77bdd501ac13bdbbf | [
"MIT"
] | 93 | 2016-06-22T18:57:47.000Z | 2022-02-14T10:50:27.000Z | tests/test_logger.py | agraubert/agutil | d9a568df01959ed985c9c8e77bdd501ac13bdbbf | [
"MIT"
] | null | null | null | import unittest
import unittest.mock
import os
from py_compile import compile
import sys
import random
import time
import tempfile
from filecmp import cmp
def make_random_string(length=25, lower=0, upper=255):
return "".join(chr(random.randint(lower,upper)) for i in range(length))
def tempname():
(handle, name) = tempfile.mkstemp()
os.close(handle)
return name
class test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.script_path = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)
)
),
"agutil",
"src",
"logger.py"
)
cls.data_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'data',
'logger'
)
sys.path.append(os.path.dirname(os.path.dirname(cls.script_path)))
random.seed()
def test_compilation(self):
compiled_path = compile(self.script_path)
self.assertTrue(compiled_path)
@unittest.skipIf(sys.platform.startswith('win'), "Tempfile cannot be used in this way on Windows")
def test_basic_logging(self):
import agutil.src.logger
time_mock = unittest.mock.Mock(side_effect = lambda fmt, time=0:fmt)
agutil.src.logger.time.strftime = time_mock
output_file = tempname()
log = agutil.src.logger.Logger(output_file, loglevel=agutil.src.logger.Logger.LOGLEVEL_DETAIL)
log.log("Test message")
log.log("More messages!", sender="me")
log.log("OH NO! This one's an error!", "Foo", "ERROR")
foo_bound = log.bindToSender("Foo")
log.mute("Foo", "Bar")
foo_bound("Message 1")
foo_bound("Message 2")
log.log("This should appear in the log, but not the dump", "Bar", "WARN")
foo_bound("Message 3")
log.unmute("Foo")
log.log("I've been unmuted!", "Foo")
log.log("This should be a warning", "Anyone", "BLORG")
time.sleep(.2)
log.addChannel("BLORG", 15)
log.setChannelCollection("BLORG", True)
log.log("This should be seen", "Anyone", "BLORG")
log.setChannelCollection("WARN", False)
log.setChannelCollection("WARN", True)
time.sleep(.2)
log.log("This should appear in the dump", "Bar", "WARN")
time.sleep(.1)
self.assertFalse(log.close())
self.assertTrue(cmp(
output_file,
os.path.join(
self.data_path,
'logger_compare.txt'
)
))
os.remove(output_file)
| 32.728395 | 102 | 0.590343 | import unittest
import unittest.mock
import os
from py_compile import compile
import sys
import random
import time
import tempfile
from filecmp import cmp
def make_random_string(length=25, lower=0, upper=255):
return "".join(chr(random.randint(lower,upper)) for i in range(length))
def tempname():
(handle, name) = tempfile.mkstemp()
os.close(handle)
return name
class test(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.script_path = os.path.join(
os.path.dirname(
os.path.dirname(
os.path.abspath(__file__)
)
),
"agutil",
"src",
"logger.py"
)
cls.data_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'data',
'logger'
)
sys.path.append(os.path.dirname(os.path.dirname(cls.script_path)))
random.seed()
def test_compilation(self):
compiled_path = compile(self.script_path)
self.assertTrue(compiled_path)
@unittest.skipIf(sys.platform.startswith('win'), "Tempfile cannot be used in this way on Windows")
def test_basic_logging(self):
import agutil.src.logger
time_mock = unittest.mock.Mock(side_effect = lambda fmt, time=0:fmt)
agutil.src.logger.time.strftime = time_mock
output_file = tempname()
log = agutil.src.logger.Logger(output_file, loglevel=agutil.src.logger.Logger.LOGLEVEL_DETAIL)
log.log("Test message")
log.log("More messages!", sender="me")
log.log("OH NO! This one's an error!", "Foo", "ERROR")
foo_bound = log.bindToSender("Foo")
log.mute("Foo", "Bar")
foo_bound("Message 1")
foo_bound("Message 2")
log.log("This should appear in the log, but not the dump", "Bar", "WARN")
foo_bound("Message 3")
log.unmute("Foo")
log.log("I've been unmuted!", "Foo")
log.log("This should be a warning", "Anyone", "BLORG")
time.sleep(.2)
log.addChannel("BLORG", 15)
log.setChannelCollection("BLORG", True)
log.log("This should be seen", "Anyone", "BLORG")
log.setChannelCollection("WARN", False)
log.setChannelCollection("WARN", True)
time.sleep(.2)
log.log("This should appear in the dump", "Bar", "WARN")
time.sleep(.1)
self.assertFalse(log.close())
self.assertTrue(cmp(
output_file,
os.path.join(
self.data_path,
'logger_compare.txt'
)
))
os.remove(output_file)
| true | true |
f71a33a61a60a199f194543768784c8caef1eda7 | 7,886 | py | Python | python/pm4pyPlus.py | rivei/pm4py_with_dash | 05ed524c11b44932783864a4465d400ea1300910 | [
"MIT"
] | null | null | null | python/pm4pyPlus.py | rivei/pm4py_with_dash | 05ed524c11b44932783864a4465d400ea1300910 | [
"MIT"
] | null | null | null | python/pm4pyPlus.py | rivei/pm4py_with_dash | 05ed524c11b44932783864a4465d400ea1300910 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Dec 1 22:17:20 2019
@author: Wei
"""
#from dash_app import default_log as log
import pandas as pd
import numpy as np
#import pytz
from datetime import datetime, tzinfo,timedelta
from pm4py.statistics.traces.log import case_statistics
from pm4py.algo.filtering.log.attributes import attributes_filter
MAX_TRACES = 9999
def filtered_log_df(log, top_trace_n = MAX_TRACES):
# if top_trace_n == MAX_TRACES:
# traces_with_count = case_statistics.get_variant_statistics(log) #parameters=("max_variants_to_return":5)
# #df = pd.DataFrame.from_dict([dict(x) for x in traces_with_count])
# df = pd.DataFrame()
# df.columns = ['caseid','actid','actseq','resid','ts','sT']
# else:
n_cases = 0
caseid = []
actid = []
actseq = []
resid = []
ts = []
startTime = []
for case in log:
actidx = 0
startT = case[0]['time:timestamp'].timestamp()
for event in case:
caseid.append(n_cases)
actid.append(event['concept:name'])
actseq.append(actidx)
resid.append(event['org:resource'])
ts.append(event['time:timestamp'].timestamp())
startTime.append(event['time:timestamp'].timestamp() - startT)
actidx = actidx + 1
n_cases = n_cases + 1
df = pd.DataFrame({'caseid': caseid,
'actid':actid,
'actseq':actseq,
'resid':resid,
'ts':ts,
'sT': startTime})
df['preid'] = df['actid'].shift(1)
df['preid'] = df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
return df
def n_cases(log, top_trace_n = MAX_TRACES):
if top_trace_n == MAX_TRACES:
df = filtered_log_df(log)
else:
df = filtered_log_df(log, top_trace_n)
return len(df['caseid'].unique())
def n_events(log):
df = filtered_log_df(log)
return len(df)
def n_activities(log):
df = filtered_log_df(log)
return len(df['actid'].unique())
def n_resources(log):
df = filtered_log_df(log)
return len(df['resid'].unique())
def n_traces(log, top_trace_n = MAX_TRACES):
if top_trace_n == MAX_TRACES:
traces_with_count = case_statistics.get_variant_statistics(log) #parameters=("max_variants_to_return":5)
else:
traces_with_count = case_statistics.get_variant_statistics(log, parameters={"max_variants_to_return":top_trace_n})
df = pd.DataFrame.from_dict([dict(x) for x in traces_with_count])
return len(df)
def acts_df(log):
activities = attributes_filter.get_attribute_values(log, "concept:name")
actid = []
cnt = []
for act0 in activities.items():
actid.append(act0[0])
cnt.append(act0[1])
return pd.DataFrame({'id':actid, 'cnt':cnt})
def traces_df(log):
traces = case_statistics.get_variant_statistics(log)
tid = []
actid = []
actseq = []
cnt = []
n_traces = 0
for trace in traces:
actidx = 0
acts = trace['variant']
for s in acts.split(','):
tid.append(n_traces)
actid.append(s)
actseq.append(actidx)
cnt.append(trace['count'])
actidx = actidx+1
n_traces = n_traces + 1
trace_df = pd.DataFrame({'id': tid, 'actid': actid, 'actseq':actseq, 'cnt':cnt})
trace_df['preid'] = trace_df['actid'].shift(1)
trace_df['preid'] = trace_df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
trace_df['pre_post'] = trace_df.apply(lambda row: row['preid']+"@@"+row['actid'], axis = 1)
# def actid2num(sactid, df):
# nactid = -1
# for i in range(0, len(df)):
# if df['id'][i] == sactid:
# nactid = i/len(df)
# return nactid
#
# act_df = acts_df(log)
# trace_df['nactid'] = trace_df['actid'].apply(lambda i:actid2num(i, act_df))
return trace_df
def sort_df(log):
df = filtered_log_df(log)
dur = np.zeros(len(df))
evS = 0
evE = -1
for i in range(0, len(df)):
if df['actseq'][i] == 0:
evS = i
if i < len(df) - 1:
if df['actseq'][i + 1] == 0:
evE = i
else:
evE = i
if evE >= evS:
for j in range(evS, evE+1):
dur[j] = df['sT'][evE-1]
df['dur'] = dur
sort_df = df.sort_values(by=['dur','caseid', 'actseq'], ascending = [0,1,1])
sortid = 0
sid = np.zeros(len(sort_df))
for i in range(1, len(sort_df)):
if i < len(sort_df) - 1:
if sort_df.iloc[i,:]['caseid'] != sort_df.iloc[i-1,:]['caseid']:
sortid = sortid + 1
sid[i] = sortid
sort_df['sid'] = sid
return sort_df
def mtx_df(log):
df = traces_df(log)
prelist = (df['preid'].unique())
actlist = (df['actid'].unique())
dff = pd.DataFrame(columns=prelist,index = actlist)
# dff.columns = actlist
# dff.index = prelist
mtxdf1 = df.groupby('pre_post')['cnt'].sum() #agg(['sum','count','mean'])
#mtxdf1['abs'] = mtxdf1['sum']/mtxdf1['count']
# mtxdf= pd.DataFrame({'pre_post':mtxdf1.index, 'cnt': list(mtxdf1)})
for s in mtxdf1.index:
a = s.split("@@")
if len(a) != 2:
print(a[0], a[1])
else:
dff[a[0]][a[1]] = mtxdf1[s]
return dff
#
#activities = log_attributes_filter.get_attribute_values(log, "concept:name")
#actid = []
#cnt = []
#for act0 in activities.items():
# actid.append(act0[0])
# cnt.append(act0[1])
#
#act_df = pd.DataFrame({'id':actid, 'cnt':cnt})
#
#n_activities = len(act_df)
#
#from pm4py.statistics.traces.log import case_statistics
#traces = case_statistics.get_variant_statistics(log)#, parameters={"max_variants_to_return": 5})
#
##acts = []
##cnt = []
##tid = []
##idx = 0
##for trace in traces:
## tid.append(idx)
## acts.append(trace['variant'])
## cnt.append(trace['count'])
## idx = idx + 1
##
##trace_df = pd.DataFrame({'id': tid, 'acts': acts, 'cnt':cnt})
##n_traces = len(trace_df)
#
#tid = []
#actid = []
#actseq = []
#cnt = []
#n_traces = 0
#for trace in traces:
# actidx = 0
# acts = trace['variant']
# for s in acts.split(','):
# tid.append(n_traces)
# actid.append(s)
# actseq.append(actidx)
# cnt.append(trace['count'])
# actidx = actidx+1
# n_traces = n_traces + 1
#
#trace_df = pd.DataFrame({'id': tid, 'actid': actid, 'actseq':actseq, 'cnt':cnt})
#trace_df['preid'] = trace_df['actid'].shift(1)
#trace_df['preid'] = trace_df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
##trace_df['postid'] = trace_df['actid'].shift(1)
##trace_df['postid'] = trace_df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
#
#trace_df['pre_post'] = trace_df.apply(lambda row: row['preid']+"-"+row['actid'], axis = 1)
#
#def actid2num(sactid, df):
# nactid = -1
# for i in range(0, len(df)):
# if df['id'][i] == sactid:
# nactid = i/len(df)
# return nactid
#
##actid2num("Confirmation of receipt", act_df)
#
#trace_df['nactid'] = trace_df['actid'].apply(lambda i:actid2num(i, act_df))
#
## matrix
#df['pre_post'] = df.apply(lambda row: row['preid']+"-"+row['actid'], axis = 1)
##mtxdf1 = pd.DataFrame({'ant':df['preid'],'con':df})
#mtxdf1 = df[df['preid']!='START'].groupby('pre_post')['caseid'].count() #agg(['sum','count','mean'])
##mtxdf1['abs'] = mtxdf1['sum']/mtxdf1['count']
#mtxdf= pd.DataFrame({'pre_post':mtxdf1.index, 'cnt': list(mtxdf1)})
#
##roles Detection: related to resource vs activity?
##from pm4py.algo.enhancement.roles import factory as roles_factory
##roles = roles_factory.apply(log)
#aaa
| 30.565891 | 122 | 0.578113 |
import pandas as pd
import numpy as np
from datetime import datetime, tzinfo,timedelta
from pm4py.statistics.traces.log import case_statistics
from pm4py.algo.filtering.log.attributes import attributes_filter
MAX_TRACES = 9999
def filtered_log_df(log, top_trace_n = MAX_TRACES):
me = []
for case in log:
actidx = 0
startT = case[0]['time:timestamp'].timestamp()
for event in case:
caseid.append(n_cases)
actid.append(event['concept:name'])
actseq.append(actidx)
resid.append(event['org:resource'])
ts.append(event['time:timestamp'].timestamp())
startTime.append(event['time:timestamp'].timestamp() - startT)
actidx = actidx + 1
n_cases = n_cases + 1
df = pd.DataFrame({'caseid': caseid,
'actid':actid,
'actseq':actseq,
'resid':resid,
'ts':ts,
'sT': startTime})
df['preid'] = df['actid'].shift(1)
df['preid'] = df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
return df
def n_cases(log, top_trace_n = MAX_TRACES):
if top_trace_n == MAX_TRACES:
df = filtered_log_df(log)
else:
df = filtered_log_df(log, top_trace_n)
return len(df['caseid'].unique())
def n_events(log):
df = filtered_log_df(log)
return len(df)
def n_activities(log):
df = filtered_log_df(log)
return len(df['actid'].unique())
def n_resources(log):
df = filtered_log_df(log)
return len(df['resid'].unique())
def n_traces(log, top_trace_n = MAX_TRACES):
if top_trace_n == MAX_TRACES:
traces_with_count = case_statistics.get_variant_statistics(log)
else:
traces_with_count = case_statistics.get_variant_statistics(log, parameters={"max_variants_to_return":top_trace_n})
df = pd.DataFrame.from_dict([dict(x) for x in traces_with_count])
return len(df)
def acts_df(log):
activities = attributes_filter.get_attribute_values(log, "concept:name")
actid = []
cnt = []
for act0 in activities.items():
actid.append(act0[0])
cnt.append(act0[1])
return pd.DataFrame({'id':actid, 'cnt':cnt})
def traces_df(log):
traces = case_statistics.get_variant_statistics(log)
tid = []
actid = []
actseq = []
cnt = []
n_traces = 0
for trace in traces:
actidx = 0
acts = trace['variant']
for s in acts.split(','):
tid.append(n_traces)
actid.append(s)
actseq.append(actidx)
cnt.append(trace['count'])
actidx = actidx+1
n_traces = n_traces + 1
trace_df = pd.DataFrame({'id': tid, 'actid': actid, 'actseq':actseq, 'cnt':cnt})
trace_df['preid'] = trace_df['actid'].shift(1)
trace_df['preid'] = trace_df.apply(lambda row: row['preid'] if row['actseq']!=0 else 'START', axis = 1)
trace_df['pre_post'] = trace_df.apply(lambda row: row['preid']+"@@"+row['actid'], axis = 1)
return trace_df
def sort_df(log):
df = filtered_log_df(log)
dur = np.zeros(len(df))
evS = 0
evE = -1
for i in range(0, len(df)):
if df['actseq'][i] == 0:
evS = i
if i < len(df) - 1:
if df['actseq'][i + 1] == 0:
evE = i
else:
evE = i
if evE >= evS:
for j in range(evS, evE+1):
dur[j] = df['sT'][evE-1]
df['dur'] = dur
sort_df = df.sort_values(by=['dur','caseid', 'actseq'], ascending = [0,1,1])
sortid = 0
sid = np.zeros(len(sort_df))
for i in range(1, len(sort_df)):
if i < len(sort_df) - 1:
if sort_df.iloc[i,:]['caseid'] != sort_df.iloc[i-1,:]['caseid']:
sortid = sortid + 1
sid[i] = sortid
sort_df['sid'] = sid
return sort_df
def mtx_df(log):
df = traces_df(log)
prelist = (df['preid'].unique())
actlist = (df['actid'].unique())
dff = pd.DataFrame(columns=prelist,index = actlist)
mtxdf1 = df.groupby('pre_post')['cnt'].sum()
for s in mtxdf1.index:
a = s.split("@@")
if len(a) != 2:
print(a[0], a[1])
else:
dff[a[0]][a[1]] = mtxdf1[s]
return dff
| true | true |
f71a348d15db2579bb6b6dd7bce60ef5fc4a8a65 | 4,854 | py | Python | pypureclient/flasharray/FA_2_8/models/active_directory.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 14 | 2018-12-07T18:30:27.000Z | 2022-02-22T09:12:33.000Z | pypureclient/flasharray/FA_2_8/models/active_directory.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 28 | 2019-09-17T21:03:52.000Z | 2022-03-29T22:07:35.000Z | pypureclient/flasharray/FA_2_8/models/active_directory.py | Flav-STOR-WL/py-pure-client | 03b889c997d90380ac5d6380ca5d5432792d3e89 | [
"BSD-2-Clause"
] | 15 | 2020-06-11T15:50:08.000Z | 2022-03-21T09:27:25.000Z | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_8 import models
class ActiveDirectory(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'computer_name': 'str',
'directory_servers': 'list[str]',
'domain': 'str',
'kerberos_servers': 'list[str]'
}
attribute_map = {
'name': 'name',
'computer_name': 'computer_name',
'directory_servers': 'directory_servers',
'domain': 'domain',
'kerberos_servers': 'kerberos_servers'
}
required_args = {
}
def __init__(
self,
name=None, # type: str
computer_name=None, # type: str
directory_servers=None, # type: List[str]
domain=None, # type: str
kerberos_servers=None, # type: List[str]
):
"""
Keyword args:
name (str): A locally unique, system-generated name. The name cannot be modified.
computer_name (str): The name of the computer account in the Active Directory domain.
directory_servers (list[str]): A list of directory servers used for lookups related to user authorization. Servers must be specified in FQDN format. All specified servers must be registered to the domain appropriately in the configured DNS of the array and are only communicated with over the secure LDAP (LDAPS) protocol. If this field is `null`, the servers are resolved for the domain in DNS.
domain (str): The Active Directory domain joined.
kerberos_servers (list[str]): A list of key distribution servers to use for Kerberos protocol. Servers must be specified in FQDN format. All specified servers must be registered to the domain appropriately in the configured DNS of the array. If this field is `null`, the servers are resolved for the domain in DNS.
"""
if name is not None:
self.name = name
if computer_name is not None:
self.computer_name = computer_name
if directory_servers is not None:
self.directory_servers = directory_servers
if domain is not None:
self.domain = domain
if kerberos_servers is not None:
self.kerberos_servers = kerberos_servers
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ActiveDirectory`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ActiveDirectory, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ActiveDirectory):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 35.691176 | 407 | 0.592707 |
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_8 import models
class ActiveDirectory(object):
swagger_types = {
'name': 'str',
'computer_name': 'str',
'directory_servers': 'list[str]',
'domain': 'str',
'kerberos_servers': 'list[str]'
}
attribute_map = {
'name': 'name',
'computer_name': 'computer_name',
'directory_servers': 'directory_servers',
'domain': 'domain',
'kerberos_servers': 'kerberos_servers'
}
required_args = {
}
def __init__(
self,
name=None,
computer_name=None,
directory_servers=None,
domain=None,
kerberos_servers=None,
):
if name is not None:
self.name = name
if computer_name is not None:
self.computer_name = computer_name
if directory_servers is not None:
self.directory_servers = directory_servers
if domain is not None:
self.domain = domain
if kerberos_servers is not None:
self.kerberos_servers = kerberos_servers
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `ActiveDirectory`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ActiveDirectory, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ActiveDirectory):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71a3506e2c79b16c7a1c6ca335f47af41777dc9 | 2,781 | py | Python | antz/io.py | jmschrei/antz | 74c901f543279b1904f2db9f3a70e5dcc7ade182 | [
"MIT"
] | 3 | 2015-05-10T16:00:20.000Z | 2016-06-22T22:03:05.000Z | antz/io.py | jmschrei/antz | 74c901f543279b1904f2db9f3a70e5dcc7ade182 | [
"MIT"
] | null | null | null | antz/io.py | jmschrei/antz | 74c901f543279b1904f2db9f3a70e5dcc7ade182 | [
"MIT"
] | null | null | null | # io.py
# Contact: Jacob Schreiber
# jmschr@cs.washington.edu
'''
This script focuses on data input and output, and currently supports the
following files:
* FastA
'''
from seq import *
class FastA( object ):
'''
This is a FastA file. It can contain many DNA, RNA, or Protein
sequences in it. This can be read in or written out.
'''
def __init__( self, sequences ):
'''
If sequences are passed in, they should be as the DNA, RNA, or protein
objects, so that all metadata is written out as well.
'''
self.sequences = sequences
def __str__( self ):
'''
String representation of the FastA
'''
return '\n'.join( sequence.to_fasta() for sequence in self.sequences )
def to_file( self, filename, attrs=None ):
'''
Write out a FastA file. Attrs specifies the attributes you want to
write out as well, in that order. Since any data can be stored in these
objects, it allows you to pick both what you want to write out, and
in what order. If nothing is provided, nothing is written out.
'''
with open( filename, 'w' ) as outfile:
# Write out each stored sequence
for sequence in self.sequences:
outfile.write( sequence.to_fasta( attrs ) )
@classmethod
def from_file( cls, filename, attrs=None, delimiter=' ', seqType=None ):
'''
Read in a FastA file. Given names for each delimited item in the
comments by specifying their attribute in order. Specify the seqType
as the class object or string.
'''
if isinstance( seqType, str ):
if seqType.lower() == 'protein':
seqType = Protein
elif seqType.lower() == 'rna':
seqType = RNA
elif seqType.lower() == 'dna':
seqType = DNA
else:
seqType = Sequence
seqType = seqType or Sequence
sequences = []
with open( filename, 'r' ) as infile:
comments, sequence = None, ''
# Go through the file line by line
for line in infile:
# If the next line starts with a >, it means that the previous
# sequence has come to an end.
if line.startswith( '>' ):
# If a sequence has been found, create and append the
# sequence object
if sequence != '':
comments = comments.split( delimiter )
attributes = { attr: comment for attr, comment in zip( attrs, comments ) }
sequences.append( seqType( sequence, **attributes ) )
# Now get the comment, removing the > and any newlines
comments = line[1:].strip('\r\n')
# Reset the sequence
sequence = ''
else:
# Otherwise, append the sequence line to the growing
# sequence
sequence += line.strip('\r\n')
comments = comments.split( delimiter )
attributes = { attr: comment for attr, comment in zip( attrs, comments )}
sequences.append( seqType( sequence, **attributes ) )
return cls( sequences ) | 28.670103 | 80 | 0.665948 |
from seq import *
class FastA( object ):
def __init__( self, sequences ):
self.sequences = sequences
def __str__( self ):
return '\n'.join( sequence.to_fasta() for sequence in self.sequences )
def to_file( self, filename, attrs=None ):
with open( filename, 'w' ) as outfile:
for sequence in self.sequences:
outfile.write( sequence.to_fasta( attrs ) )
@classmethod
def from_file( cls, filename, attrs=None, delimiter=' ', seqType=None ):
if isinstance( seqType, str ):
if seqType.lower() == 'protein':
seqType = Protein
elif seqType.lower() == 'rna':
seqType = RNA
elif seqType.lower() == 'dna':
seqType = DNA
else:
seqType = Sequence
seqType = seqType or Sequence
sequences = []
with open( filename, 'r' ) as infile:
comments, sequence = None, ''
for line in infile:
if line.startswith( '>' ):
if sequence != '':
comments = comments.split( delimiter )
attributes = { attr: comment for attr, comment in zip( attrs, comments ) }
sequences.append( seqType( sequence, **attributes ) )
comments = line[1:].strip('\r\n')
sequence = ''
else:
sequence += line.strip('\r\n')
comments = comments.split( delimiter )
attributes = { attr: comment for attr, comment in zip( attrs, comments )}
sequences.append( seqType( sequence, **attributes ) )
return cls( sequences ) | true | true |
f71a3706a5e1e09a9b5ac6542d63281e2cb4bab7 | 1,370 | py | Python | tests/test_platform_api.py | jain-aayush1123/here-location-services-python | 11ad5ef8273b4f243c43bc00ebd470f725b980bc | [
"Apache-2.0"
] | 16 | 2021-02-15T13:49:29.000Z | 2022-03-29T10:34:43.000Z | tests/test_platform_api.py | jain-aayush1123/here-location-services-python | 11ad5ef8273b4f243c43bc00ebd470f725b980bc | [
"Apache-2.0"
] | 8 | 2021-02-27T18:40:46.000Z | 2021-10-03T15:49:27.000Z | tests/test_platform_api.py | jain-aayush1123/here-location-services-python | 11ad5ef8273b4f243c43bc00ebd470f725b980bc | [
"Apache-2.0"
] | 11 | 2021-02-16T04:58:08.000Z | 2022-02-21T20:51:55.000Z | # Copyright (C) 2019-2021 HERE Europe B.V.
# SPDX-License-Identifier: Apache-2.0
"""This module will test platform api module."""
import pytest
from requests_oauthlib import OAuth1
from here_location_services.platform.apis.aaa_oauth2_api import AAAOauth2Api
from here_location_services.platform.apis.api import Api as PlaformApi
from here_location_services.utils import get_apikey
from tests.conftest import get_mock_response
LS_API_KEY = get_apikey()
def test_api_headers_property():
api = PlaformApi(access_token="dummy")
assert api.headers == {"Authorization": "Bearer dummy"}
def test_mock_request_post(mocker):
mocker.patch("requests.post", return_value=True)
api = PlaformApi(access_token="dummy")
resp = api.post("dummy_url", data={"foo": "bar"})
assert resp is True
def test_mock_request_scoped_access_token_excception(mocker):
reason = "This is mock reason"
text = "This is mock text"
mock_response = get_mock_response(500, reason, text)
mocker.patch("here_location_services.platform.apis.api.Api.post", return_value=mock_response)
aaa_api = AAAOauth2Api(base_url="dummy")
oauth = OAuth1(
"dummy_key",
client_secret="dummy_secret",
signature_method="HMAC-SHA256",
)
with pytest.raises(Exception):
aaa_api.request_scoped_access_token(oauth=oauth, data="dummy_data")
| 34.25 | 97 | 0.750365 |
import pytest
from requests_oauthlib import OAuth1
from here_location_services.platform.apis.aaa_oauth2_api import AAAOauth2Api
from here_location_services.platform.apis.api import Api as PlaformApi
from here_location_services.utils import get_apikey
from tests.conftest import get_mock_response
LS_API_KEY = get_apikey()
def test_api_headers_property():
api = PlaformApi(access_token="dummy")
assert api.headers == {"Authorization": "Bearer dummy"}
def test_mock_request_post(mocker):
mocker.patch("requests.post", return_value=True)
api = PlaformApi(access_token="dummy")
resp = api.post("dummy_url", data={"foo": "bar"})
assert resp is True
def test_mock_request_scoped_access_token_excception(mocker):
reason = "This is mock reason"
text = "This is mock text"
mock_response = get_mock_response(500, reason, text)
mocker.patch("here_location_services.platform.apis.api.Api.post", return_value=mock_response)
aaa_api = AAAOauth2Api(base_url="dummy")
oauth = OAuth1(
"dummy_key",
client_secret="dummy_secret",
signature_method="HMAC-SHA256",
)
with pytest.raises(Exception):
aaa_api.request_scoped_access_token(oauth=oauth, data="dummy_data")
| true | true |
f71a37cbfdc3fa96ea44404d682a0922befa7d2d | 13,580 | py | Python | scripts/blame_opt.py | regehr/yarpgen | 025a8cb90df018578c892ec82051ddf74388ec2f | [
"Apache-2.0"
] | null | null | null | scripts/blame_opt.py | regehr/yarpgen | 025a8cb90df018578c892ec82051ddf74388ec2f | [
"Apache-2.0"
] | null | null | null | scripts/blame_opt.py | regehr/yarpgen | 025a8cb90df018578c892ec82051ddf74388ec2f | [
"Apache-2.0"
] | 1 | 2021-03-02T08:54:02.000Z | 2021-03-02T08:54:02.000Z | #!/usr/bin/python3
###############################################################################
#
# Copyright (c) 2015-2020, Intel Corporation
# Copyright (c) 2019-2020, University of Utah
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###############################################################################
"""
Experimental script for automatic sorting of errors, basing on failed optimization phase
"""
###############################################################################
import logging
import os
import re
import common
import gen_test_makefile
import run_gen
icc_blame_opts = ["-from_rtn=0 -to_rtn=", "-num_opt=", "-num-case="]
icc_opt_patterns = ["\(\d+\)", "\(\d+\)\s*\n", "DO ANOTHER.*\(\d+\)"]
icc_opt_name_prefix = "DOING\s*\[\w*\]\s*"
icc_opt_name_suffix = "\s*\(\d*\)\s*\(last opt\)"
icx_blame_opts = ["-mllvm -opt-bisect-limit="]
icx_opt_patterns = ["BISECT: running pass \(\d+\)"]
icx_opt_name_prefix = "BISECT: running pass \(\d+\) "
icx_opt_name_suffix = " \(.*\)"
clang_blame_opts = ["-mllvm -opt-bisect-limit="]
clang_opt_patterns = ["BISECT: running pass \(\d+\)"]
clang_opt_name_prefix = "BISECT: running pass \(\d+\) "
clang_opt_name_suffix = " \(.*\)"
dpcpp_gpu_blame_opts = ["IGC_ShaderDumpEnableAll=1 IGC_ShaderDisableOptPassesAfter="]
dpcpp_gpu_patterns = ["Skipping optimization pass: .* (threshold: \(\d+\))."]
dpcpp_gpu_opt_name_prefix = "Skipping optimization pass: '"
dpcpp_gpu_opt_name_suffix = "' \(.*\)."
compilers_blame_opts = {"icc": icc_blame_opts, "icx": icx_blame_opts, "clang": clang_blame_opts, "dpcpp": dpcpp_gpu_blame_opts}
compilers_blame_patterns = {"icc": icc_opt_patterns, "icx": icx_opt_patterns, "clang": clang_opt_patterns, "dpcpp": dpcpp_gpu_patterns}
compilers_opt_name_cutter = {"icc": [icc_opt_name_prefix, icc_opt_name_suffix], \
"icx": [icx_opt_name_prefix, icx_opt_name_suffix], \
"clang": [clang_opt_name_prefix, clang_opt_name_suffix], \
"dpcpp": [dpcpp_gpu_opt_name_prefix, dpcpp_gpu_opt_name_suffix]}
blame_test_makefile_name = "Blame_Makefile"
###############################################################################
def get_next_step(start, end, current, fail_flag):
if fail_flag:
next_start = start
next_current = (current - start) // 2 + start
next_end = current
else:
next_start = current
next_current = (end - current) // 2 + current
next_end = end
return next_start, next_end, next_current
def dump_exec_output(msg, ret_code, output, err_output, time_expired, num):
common.log_msg(logging.DEBUG, msg + " (process " + str(num) + ")")
common.log_msg(logging.DEBUG, "Ret code: " + str(ret_code) + " | process " + str(num))
common.log_msg(logging.DEBUG, "Time exp: " + str(time_expired) + " | process " + str(num))
common.log_msg(logging.DEBUG, "Output: " + str(output, "utf-8") + " | process " + str(num))
common.log_msg(logging.DEBUG, "Err output: " + str(err_output, "utf-8") + " | process " + str(num))
def execute_blame_phase(valid_res, fail_target, inject_str, num, phase_num):
gen_test_makefile.gen_makefile(
out_file_name = blame_test_makefile_name,
force = True,
config_file = None,
only_target = fail_target,
inject_blame_opt = inject_str + "-1" if fail_target.specs.name != "dpcpp" else None,
inject_blame_env = inject_str + "1" if fail_target.specs.name == "dpcpp" else None)
ret_code, output, err_output, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num)
if fail_target.specs.name == "dpcpp":
ret_code, output, err_output, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.compiler_timeout, num)
opt_num_regex = re.compile(compilers_blame_patterns[fail_target.specs.name][phase_num])
try:
if fail_target.specs.name == "dpcpp":
max_opt_num = 250
else:
matches = opt_num_regex.findall(str(err_output, "utf-8"))
# Some icc phases may not support going to phase "2", i.e. drilling down to num_case level,
# in this case we are done.
if phase_num == 2 and not matches:
return str(-1)
max_opt_num_str = matches[-1]
remove_brackets_pattern = re.compile("\d+")
max_opt_num = int(remove_brackets_pattern.findall(max_opt_num_str)[-1])
common.log_msg(logging.DEBUG, "Max opt num (process " + str(num) + "): " + str(max_opt_num))
except IndexError:
common.log_msg(logging.ERROR, "Can't decode max opt number using \"" + compilers_blame_patterns[fail_target.specs.name][phase_num]
+ "\" regexp (phase " + str(phase_num) + ") in the following output:\n" + str(err_output, "utf-8")
+ " (process " + str(num) + "): ")
raise
start_opt = 0
end_opt = max_opt_num
cur_opt = max_opt_num
failed_flag = True
time_to_finish = False
while not time_to_finish:
start_opt, end_opt, cur_opt = get_next_step(start_opt, end_opt, cur_opt, failed_flag)
common.log_msg(logging.DEBUG, "Previous failed (process " + str(num) + "): " + str(failed_flag))
failed_flag = False
eff = ((start_opt + 1) >= cur_opt) # Earliest fail was found
common.log_msg(logging.DEBUG, "Trying opt (process " + str(num) + "): " + str(start_opt) + "/" + str(cur_opt) + "/" + str(end_opt))
gen_test_makefile.gen_makefile(
out_file_name = blame_test_makefile_name,
force = True,
config_file = None,
only_target = fail_target,
inject_blame_opt = inject_str + str(cur_opt) if fail_target.specs.name != "dpcpp" else None,
inject_blame_env = inject_str + str(cur_opt) if fail_target.specs.name == "dpcpp" else None)
ret_code, output, err_output, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num)
if time_expired or ret_code != 0:
dump_exec_output("Compilation failed", ret_code, output, err_output, time_expired, num)
failed_flag = True
if not eff:
continue
else:
break
ret_code, output, err_output, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.run_timeout, num)
if time_expired or ret_code != 0:
dump_exec_output("Execution failed", ret_code, output, err_output, time_expired, num)
failed_flag = True
if not eff:
continue
else:
break
if str(output, "utf-8").split()[-1] != valid_res:
common.log_msg(logging.DEBUG, "Output differs (process " + str(num) + "): " + str(output, "utf-8").split()[-1] + " vs " + valid_res + " (expected)")
failed_flag = True
if not eff:
continue
else:
break
time_to_finish = (eff and failed_flag) or (eff and not failed_flag and (cur_opt == (end_opt - 1)))
common.log_msg(logging.DEBUG, "Time to finish (process " + str(num) + "): " + str(time_to_finish))
if not failed_flag:
common.log_msg(logging.DEBUG, "Swapping current and end opt (process " + str(num) + ")")
cur_opt = end_opt
common.log_msg(logging.DEBUG, "Finished blame phase, result: " + str(inject_str) + str(cur_opt) + " (process " + str(num) + ")")
return cur_opt
def blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace):
blame_str = ""
stdout = stderr = b""
if not re.search("-O0", fail_target.args):
blame_opts = compilers_blame_opts[fail_target.specs.name]
phase_num = 0
blame_phase_num = 0
# Do blaming
try:
for i in blame_opts:
blame_str += i
blame_phase_num = execute_blame_phase(valid_res, fail_target, blame_str, num, phase_num)
if fail_target.specs.name == "dpcpp":
# Special case becasue triagging mechanism is different and there's only one level of triagging.
blame_str += str(blame_phase_num-1)
else:
blame_str += str(blame_phase_num)
blame_str += " "
phase_num += 1
except:
common.log_msg(logging.ERROR, "Something went wrong while executing blame_opt.py on " + str(fail_dir))
return False
# Wrap up results
gen_test_makefile.gen_makefile(
out_file_name = blame_test_makefile_name,
force = True,
config_file = None,
only_target = fail_target,
inject_blame_opt = blame_str if fail_target.specs.name != "dpcpp" else None,
inject_blame_env = blame_str if fail_target.specs.name == "dpcpp" else None)
ret_code, stdout, stderr, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, fail_target.name], run_gen.compiler_timeout, num)
if fail_target.specs.name == "dpcpp":
ret_code, stdout, stderr, time_expired, elapsed_time = \
common.run_cmd(["make", "-f", blame_test_makefile_name, "run_" + fail_target.name], run_gen.compiler_timeout, num)
if fail_target.specs.name != "dpcpp":
opt_name_pattern = re.compile(compilers_opt_name_cutter[fail_target.specs.name][0] + ".*" +
compilers_opt_name_cutter[fail_target.specs.name][1])
opt_name = opt_name_pattern.findall(str(stderr, "utf-8"))[-1]
opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][0], "", opt_name)
opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][1], "", opt_name)
real_opt_name = opt_name
opt_name = opt_name.replace(" ", "_")
else:
if blame_phase_num == 1:
# It's special case for DPC++. 1 means that triagging failed, no specific phase can be blamed.
real_opt_name = opt_name = "FailedToBlame"
else:
opt_name_pattern = re.compile(compilers_opt_name_cutter[fail_target.specs.name][0] + ".*" +
compilers_opt_name_cutter[fail_target.specs.name][1])
opt_name = opt_name_pattern.findall(str(stderr, "utf-8"))[0]
opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][0], "", opt_name)
opt_name = re.sub(compilers_opt_name_cutter[fail_target.specs.name][1], "", opt_name)
real_opt_name = opt_name
opt_name = opt_name.replace(" ", "_")
else:
real_opt_name = opt_name = "O0_bug"
common.run_cmd(["make", "-f", blame_test_makefile_name, "clean"], run_gen.compiler_timeout, num)
seed_dir = os.path.basename(os.path.normpath(fail_dir))
# Create log files in different places depending on "inplace" switch.
if not inplace:
full_out_path = os.path.join(os.path.join(out_dir, opt_name), seed_dir)
common.copy_test_to_out(fail_dir, full_out_path, lock)
else:
full_out_path = "."
# Write to log
with open(os.path.join(full_out_path, "log.txt"), "a") as log_file:
log_file.write("\nBlaming for " + fail_target.name + " optset was done.\n")
log_file.write("Optimization to blame: " + real_opt_name + "\n")
log_file.write("Blame opts: " + blame_str + "\n\n")
log_file.write("Details of blaming run:\n")
log_file.write("=== Compiler log ==================================================\n")
log_file.write(str(stdout, "utf-8"))
log_file.write("=== Compiler err ==================================================\n")
log_file.write(str(stderr, "utf-8"))
log_file.write("=== Compiler end ==================================================\n")
common.log_msg(logging.DEBUG, "Done blaming")
# Inplace mode require blaming string to be communicated back to the caller
if not inplace:
return True
else:
return real_opt_name
def prepare_env_and_blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace=False):
common.log_msg(logging.DEBUG, "Blaming target: " + fail_target.name + " | " + fail_target.specs.name)
os.chdir(fail_dir)
if fail_target.specs.name not in compilers_blame_opts:
common.log_msg(logging.DEBUG, "We can't blame " + fail_target.name + " (process " + str(num) + ")")
return False
return blame(fail_dir, valid_res, fail_target, out_dir, lock, num, inplace)
| 48.848921 | 160 | 0.60891 | true | true | |
f71a37e5c9f3342edb98fd5bc2f1279f8371e8c8 | 27,693 | py | Python | src/python/turicreate/data_structures/sketch.py | pappasG/turicreate | 494e313957a6c01333628b182a7d5bc6efea18f8 | [
"BSD-3-Clause"
] | null | null | null | src/python/turicreate/data_structures/sketch.py | pappasG/turicreate | 494e313957a6c01333628b182a7d5bc6efea18f8 | [
"BSD-3-Clause"
] | null | null | null | src/python/turicreate/data_structures/sketch.py | pappasG/turicreate | 494e313957a6c01333628b182a7d5bc6efea18f8 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright © 2017 Apple Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE.txt file or at https://opensource.org/licenses/BSD-3-Clause
"""
Efficiently compute the approximate statistics over an SArray.
"""
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from .._cython.cy_sketch import UnitySketchProxy
from .._cython.context import debug_trace as cython_context
from .sarray import SArray
from .sframe import SFrame
import operator
from math import sqrt
__all__ = ['Sketch']
class Sketch(object):
"""
The Sketch object contains a sketch of a single SArray (a column of an
SFrame). Using a sketch representation of an SArray, many approximate and
exact statistics can be computed very quickly.
To construct a Sketch object, the following methods are equivalent:
>>> my_sarray = turicreate.SArray([1,2,3,4,5])
>>> sketch = turicreate.Sketch(my_sarray)
>>> sketch = my_sarray.summary()
Typically, the SArray is a column of an SFrame:
>>> my_sframe = turicreate.SFrame({'column1': [1,2,3]})
>>> sketch = turicreate.Sketch(my_sframe['column1'])
>>> sketch = my_sframe['column1'].summary()
The sketch computation is fast, with complexity approximately linear in the
length of the SArray. After the Sketch is computed, all queryable functions
are performed nearly instantly.
A sketch can compute the following information depending on the dtype of the
SArray:
For numeric columns, the following information is provided exactly:
- length (:func:`~turicreate.Sketch.size`)
- number of missing Values (:func:`~turicreate.Sketch.num_missing`)
- minimum value (:func:`~turicreate.Sketch.min`)
- maximum value (:func:`~turicreate.Sketch.max`)
- mean (:func:`~turicreate.Sketch.mean`)
- variance (:func:`~turicreate.Sketch.var`)
- standard deviation (:func:`~turicreate.Sketch.std`)
And the following information is provided approximately:
- number of unique values (:func:`~turicreate.Sketch.num_unique`)
- quantiles (:func:`~turicreate.Sketch.quantile`)
- frequent items (:func:`~turicreate.Sketch.frequent_items`)
- frequency count for any value (:func:`~turicreate.Sketch.frequency_count`)
For non-numeric columns(str), the following information is provided exactly:
- length (:func:`~turicreate.Sketch.size`)
- number of missing values (:func:`~turicreate.Sketch.num_missing`)
And the following information is provided approximately:
- number of unique Values (:func:`~turicreate.Sketch.num_unique`)
- frequent items (:func:`~turicreate.Sketch.frequent_items`)
- frequency count of any value (:func:`~turicreate.Sketch.frequency_count`)
For SArray of type list or array, there is a sub sketch for all sub elements.
The sub sketch flattens all list/array values and then computes sketch
summary over flattened values. Element sub sketch may be retrieved through:
- element_summary(:func:`~turicreate.Sketch.element_summary`)
For SArray of type dict, there are sub sketches for both dict key and value.
The sub sketch may be retrieved through:
- dict_key_summary(:func:`~turicreate.Sketch.dict_key_summary`)
- dict_value_summary(:func:`~turicreate.Sketch.dict_value_summary`)
For SArray of type dict, user can also pass in a list of dictionary keys to
summary function, this would generate one sub sketch for each key.
For example:
>>> sa = turicreate.SArray([{'a':1, 'b':2}, {'a':3}])
>>> sketch = sa.summary(sub_sketch_keys=["a", "b"])
Then the sub summary may be retrieved by:
>>> sketch.element_sub_sketch()
or to get subset keys:
>>> sketch.element_sub_sketch(["a"])
Similarly, for SArray of type vector(array), user can also pass in a list of
integers which is the index into the vector to get sub sketch
For example:
>>> sa = turicreate.SArray([[100,200,300,400,500], [100,200,300], [400,500]])
>>> sketch = sa.summary(sub_sketch_keys=[1,3,5])
Then the sub summary may be retrieved by:
>>> sketch.element_sub_sketch()
Or:
>>> sketch.element_sub_sketch([1,3])
for subset of keys
Please see the individual function documentation for detail about each of
these statistics.
Parameters
----------
array : SArray
Array to generate sketch summary.
background : boolean
If True, the sketch construction will return immediately and the
sketch will be constructed in the background. While this is going on,
the sketch can be queried incrementally, but at a performance penalty.
Defaults to False.
References
----------
- Wikipedia. `Streaming algorithms. <http://en.wikipedia.org/wiki/Streaming_algorithm>`_
- Charikar, et al. (2002) `Finding frequent items in data streams.
<https://www.cs.rutgers.edu/~farach/pubs/FrequentStream.pdf>`_
- Cormode, G. and Muthukrishnan, S. (2004) `An Improved Data Stream Summary:
The Count-Min Sketch and its Applications.
<http://dimacs.rutgers.edu/~graham/pubs/papers/cm-latin.pdf>`_
"""
def __init__(self, array=None, background=False, sub_sketch_keys=[], _proxy=None):
"""__init__(array)
Construct a new Sketch from an SArray.
Parameters
----------
array : SArray
Array to sketch.
background : boolean, optional
If true, run the sketch in background. The the state of the sketch
may be queried by calling (:func:`~turicreate.Sketch.sketch_ready`)
default is False
sub_sketch_keys : list
The list of sub sketch to calculate, for SArray of dictionary type.
key needs to be a string, for SArray of vector(array) type, the key
needs to be positive integer
"""
if (_proxy):
self.__proxy__ = _proxy
else:
self.__proxy__ = UnitySketchProxy()
if not isinstance(array, SArray):
raise TypeError("Sketch object can only be constructed from SArrays")
self.__proxy__.construct_from_sarray(array.__proxy__, background, sub_sketch_keys)
def __repr__(self):
"""
Emits a brief summary of all the statistics as a string.
"""
fields = [
['size', 'Length' , 'Yes'],
['min', 'Min' , 'Yes'],
['max', 'Max' , 'Yes'],
['mean', 'Mean' , 'Yes'],
['sum', 'Sum' , 'Yes'],
['var', 'Variance' , 'Yes'],
['std', 'Standard Deviation' , 'Yes'],
['num_missing', '# Missing Values' , 'Yes',],
['num_unique', '# unique values', 'No' ]
]
s = '\n'
result = []
for field in fields:
try:
method_to_call = getattr(self, field[0])
result.append([field[1], str(method_to_call()), field[2]])
except:
pass
sf = SArray(result).unpack(column_name_prefix = "")
sf.rename({'0': 'item', '1':'value', '2': 'is exact'}, inplace=True)
s += sf.__str__(footer=False)
s += "\n"
s += "\nMost frequent items:\n"
frequent = self.frequent_items()
# convert to string key
frequent_strkeys = {}
for key in frequent:
strkey = str(key)
if strkey in frequent_strkeys:
frequent_strkeys[strkey] += frequent[key]
else:
frequent_strkeys[strkey] = frequent[key]
sorted_freq = sorted(frequent_strkeys.items(), key=operator.itemgetter(1), reverse=True)
if len(sorted_freq) == 0:
s += " -- All elements appear with less than 0.01% frequency -- \n"
else:
sorted_freq = sorted_freq[:10]
sf = SFrame()
sf['value'] = [elem[0] for elem in sorted_freq]
sf['count'] = [elem[1] for elem in sorted_freq]
s += sf.__str__(footer=False) + "\n"
s += "\n"
try:
# print quantiles
self.quantile(0) # XXX: is this necessary?
s += "Quantiles: \n"
sf = SFrame()
for q in [0.0,0.01,0.05,0.25,0.5,0.75,0.95,0.99,1.00]:
sf.add_column(SArray([self.quantile(q)]), str(int(q * 100)) + '%', inplace=True)
s += sf.__str__(footer=False) + "\n"
except:
pass
try:
t_k = self.dict_key_summary()
t_v = self.dict_value_summary()
s += "\n******** Dictionary Element Key Summary ********\n"
s += t_k.__repr__()
s += "\n******** Dictionary Element Value Summary ********\n"
s += t_v.__repr__() + '\n'
except:
pass
try:
t_k = self.element_summary()
s += "\n******** Element Summary ********\n"
s += t_k.__repr__() + '\n'
except:
pass
return s.expandtabs(8)
def __str__(self):
"""
Emits a brief summary of all the statistics as a string.
"""
return self.__repr__()
def size(self):
"""
Returns the size of the input SArray.
Returns
-------
out : int
The number of elements of the input SArray.
"""
with cython_context():
return int(self.__proxy__.size())
def max(self):
"""
Returns the maximum value in the SArray. Returns *nan* on an empty
array. Throws an exception if called on an SArray with non-numeric type.
Raises
------
RuntimeError
Throws an exception if the SArray is a non-numeric type.
Returns
-------
out : type of SArray
Maximum value of SArray. Returns nan if the SArray is empty.
"""
with cython_context():
return self.__proxy__.max()
def min(self):
"""
Returns the minimum value in the SArray. Returns *nan* on an empty
array. Throws an exception if called on an SArray with non-numeric type.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
Returns
-------
out : type of SArray
Minimum value of SArray. Returns nan if the sarray is empty.
"""
with cython_context():
return self.__proxy__.min()
def sum(self):
"""
Returns the sum of all the values in the SArray. Returns 0 on an empty
array. Throws an exception if called on an sarray with non-numeric type.
Will overflow without warning.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
Returns
-------
out : type of SArray
Sum of all values in SArray. Returns 0 if the SArray is empty.
"""
with cython_context():
return self.__proxy__.sum()
def mean(self):
"""
Returns the mean of the values in the SArray. Returns 0 on an empty
array. Throws an exception if called on an SArray with non-numeric type.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
Returns
-------
out : float
Mean of all values in SArray. Returns 0 if the sarray is empty.
"""
with cython_context():
return self.__proxy__.mean()
def std(self):
"""
Returns the standard deviation of the values in the SArray. Returns 0 on
an empty array. Throws an exception if called on an SArray with
non-numeric type.
Returns
-------
out : float
The standard deviation of all the values. Returns 0 if the sarray is
empty.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
"""
return sqrt(self.var())
def var(self):
"""
Returns the variance of the values in the sarray. Returns 0 on an empty
array. Throws an exception if called on an SArray with non-numeric type.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
Returns
-------
out : float
The variance of all the values. Returns 0 if the SArray is empty.
"""
with cython_context():
return self.__proxy__.var()
def num_missing(self):
"""
Returns the the number of missing (i.e. None) values in the SArray.
Return 0 on an empty SArray.
Returns
-------
out : int
The number of missing values in the SArray.
"""
with cython_context():
return int(self.__proxy__.num_undefined())
def num_unique(self):
"""
Returns a sketched estimate of the number of unique values in the
SArray based on the Hyperloglog sketch.
Returns
-------
out : float
An estimate of the number of unique values in the SArray.
"""
with cython_context():
return int(self.__proxy__.num_unique())
def frequent_items(self):
"""
Returns a sketched estimate of the most frequent elements in the SArray
based on the SpaceSaving sketch. It is only guaranteed that all
elements which appear in more than 0.01% rows of the array will
appear in the set of returned elements. However, other elements may
also appear in the result. The item counts are estimated using
the CountSketch.
Missing values are not taken into account when computing frequent items.
If this function returns no elements, it means that all elements appear
with less than 0.01% occurrence.
Returns
-------
out : dict
A dictionary mapping items and their estimated occurrence frequencies.
"""
with cython_context():
return self.__proxy__.frequent_items()
def quantile(self, quantile_val):
"""
Returns a sketched estimate of the value at a particular quantile
between 0.0 and 1.0. The quantile is guaranteed to be accurate within
1%: meaning that if you ask for the 0.55 quantile, the returned value is
guaranteed to be between the true 0.54 quantile and the true 0.56
quantile. The quantiles are only defined for numeric arrays and this
function will throw an exception if called on a sketch constructed for a
non-numeric column.
Parameters
----------
quantile_val : float
A value between 0.0 and 1.0 inclusive. Values below 0.0 will be
interpreted as 0.0. Values above 1.0 will be interpreted as 1.0.
Raises
------
RuntimeError
If the sarray is a non-numeric type.
Returns
-------
out : float | str
An estimate of the value at a quantile.
"""
with cython_context():
return self.__proxy__.get_quantile(quantile_val)
def frequency_count(self, element):
"""
Returns a sketched estimate of the number of occurrences of a given
element. This estimate is based on the count sketch. The element type
must be of the same type as the input SArray. Throws an exception if
element is of the incorrect type.
Parameters
----------
element : val
An element of the same type as the SArray.
Raises
------
RuntimeError
Throws an exception if element is of the incorrect type.
Returns
-------
out : int
An estimate of the number of occurrences of the element.
"""
with cython_context():
return int(self.__proxy__.frequency_count(element))
def sketch_ready(self):
"""
Returns True if the sketch has been executed on all the data.
If the sketch is created with background == False (default), this will
always return True. Otherwise, this will return False until the sketch
is ready.
"""
with cython_context():
return self.__proxy__.sketch_ready()
def num_elements_processed(self):
"""
Returns the number of elements processed so far.
If the sketch is created with background == False (default), this will
always return the length of the input array. Otherwise, this will
return the number of elements processed so far.
"""
with cython_context():
return self.__proxy__.num_elements_processed()
def element_length_summary(self):
"""
Returns the sketch summary for the element length. This is only valid for
a sketch constructed SArray of type list/array/dict, raises Runtime
exception otherwise.
Examples
--------
>>> sa = turicreate.SArray([[j for j in range(i)] for i in range(1,1000)])
>>> sa.summary().element_length_summary()
+--------------------+---------------+----------+
| item | value | is exact |
+--------------------+---------------+----------+
| Length | 999 | Yes |
| Min | 1.0 | Yes |
| Max | 999.0 | Yes |
| Mean | 500.0 | Yes |
| Sum | 499500.0 | Yes |
| Variance | 83166.6666667 | Yes |
| Standard Deviation | 288.386314978 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 992 | No |
+--------------------+---------------+----------+
Most frequent items:
+-------+---+---+---+---+---+---+---+---+---+----+
| value | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 |
+-------+---+---+---+---+---+---+---+---+---+----+
| count | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 |
+-------+---+---+---+---+---+---+---+---+---+----+
Quantiles:
+-----+------+------+-------+-------+-------+-------+-------+-------+
| 0% | 1% | 5% | 25% | 50% | 75% | 95% | 99% | 100% |
+-----+------+------+-------+-------+-------+-------+-------+-------+
| 1.0 | 10.0 | 50.0 | 250.0 | 500.0 | 750.0 | 950.0 | 990.0 | 999.0 |
+-----+------+------+-------+-------+-------+-------+-------+-------+
Returns
-------
out : Sketch
An new sketch object regarding the element length of the current SArray
"""
with cython_context():
return Sketch(_proxy = self.__proxy__.element_length_summary())
def dict_key_summary(self):
"""
Returns the sketch summary for all dictionary keys. This is only valid
for sketch object from an SArray of dict type. Dictionary keys are
converted to strings and then do the sketch summary.
Examples
--------
>>> sa = turicreate.SArray([{'I':1, 'love': 2}, {'nature':3, 'beauty':4}])
>>> sa.summary().dict_key_summary()
+------------------+-------+----------+
| item | value | is exact |
+------------------+-------+----------+
| Length | 4 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 4 | No |
+------------------+-------+----------+
Most frequent items:
+-------+---+------+--------+--------+
| value | I | love | beauty | nature |
+-------+---+------+--------+--------+
| count | 1 | 1 | 1 | 1 |
+-------+---+------+--------+--------+
"""
with cython_context():
return Sketch(_proxy = self.__proxy__.dict_key_summary())
def dict_value_summary(self):
"""
Returns the sketch summary for all dictionary values. This is only valid
for sketch object from an SArray of dict type.
Type of value summary is inferred from first set of values.
Examples
--------
>>> sa = turicreate.SArray([{'I':1, 'love': 2}, {'nature':3, 'beauty':4}])
>>> sa.summary().dict_value_summary()
+--------------------+---------------+----------+
| item | value | is exact |
+--------------------+---------------+----------+
| Length | 4 | Yes |
| Min | 1.0 | Yes |
| Max | 4.0 | Yes |
| Mean | 2.5 | Yes |
| Sum | 10.0 | Yes |
| Variance | 1.25 | Yes |
| Standard Deviation | 1.11803398875 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 4 | No |
+--------------------+---------------+----------+
Most frequent items:
+-------+-----+-----+-----+-----+
| value | 1.0 | 2.0 | 3.0 | 4.0 |
+-------+-----+-----+-----+-----+
| count | 1 | 1 | 1 | 1 |
+-------+-----+-----+-----+-----+
Quantiles:
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 0% | 1% | 5% | 25% | 50% | 75% | 95% | 99% | 100% |
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 1.0 | 1.0 | 1.0 | 2.0 | 3.0 | 4.0 | 4.0 | 4.0 | 4.0 |
+-----+-----+-----+-----+-----+-----+-----+-----+------+
"""
with cython_context():
return Sketch(_proxy = self.__proxy__.dict_value_summary())
def element_summary(self):
"""
Returns the sketch summary for all element values. This is only valid for
sketch object created from SArray of list or vector(array) type.
For SArray of list type, all list values are treated as string for
sketch summary.
For SArray of vector type, the sketch summary is on FLOAT type.
Examples
--------
>>> sa = turicreate.SArray([[1,2,3], [4,5]])
>>> sa.summary().element_summary()
+--------------------+---------------+----------+
| item | value | is exact |
+--------------------+---------------+----------+
| Length | 5 | Yes |
| Min | 1.0 | Yes |
| Max | 5.0 | Yes |
| Mean | 3.0 | Yes |
| Sum | 15.0 | Yes |
| Variance | 2.0 | Yes |
| Standard Deviation | 1.41421356237 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 5 | No |
+--------------------+---------------+----------+
Most frequent items:
+-------+-----+-----+-----+-----+-----+
| value | 1.0 | 2.0 | 3.0 | 4.0 | 5.0 |
+-------+-----+-----+-----+-----+-----+
| count | 1 | 1 | 1 | 1 | 1 |
+-------+-----+-----+-----+-----+-----+
Quantiles:
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 0% | 1% | 5% | 25% | 50% | 75% | 95% | 99% | 100% |
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 1.0 | 1.0 | 1.0 | 2.0 | 3.0 | 4.0 | 5.0 | 5.0 | 5.0 |
+-----+-----+-----+-----+-----+-----+-----+-----+------+
"""
with cython_context():
return Sketch(_proxy = self.__proxy__.element_summary())
def element_sub_sketch(self, keys = None):
"""
Returns the sketch summary for the given set of keys. This is only
applicable for sketch summary created from SArray of sarray or dict type.
For dict SArray, the keys are the keys in dict value.
For array Sarray, the keys are indexes into the array value.
The keys must be passed into original summary() call in order to
be able to be retrieved later
Parameters
-----------
keys : list of str | str | list of int | int
The list of dictionary keys or array index to get sub sketch from.
if not given, then retrieve all sub sketches that are available
Returns
-------
A dictionary that maps from the key(index) to the actual sketch summary
for that key(index)
Examples
--------
>>> sa = turicreate.SArray([{'a':1, 'b':2}, {'a':4, 'd':1}])
>>> s = sa.summary(sub_sketch_keys=['a','b'])
>>> s.element_sub_sketch(['a'])
{'a':
+--------------------+-------+----------+
| item | value | is exact |
+--------------------+-------+----------+
| Length | 2 | Yes |
| Min | 1.0 | Yes |
| Max | 4.0 | Yes |
| Mean | 2.5 | Yes |
| Sum | 5.0 | Yes |
| Variance | 2.25 | Yes |
| Standard Deviation | 1.5 | Yes |
| # Missing Values | 0 | Yes |
| # unique values | 2 | No |
+--------------------+-------+----------+
Most frequent items:
+-------+-----+-----+
| value | 1.0 | 4.0 |
+-------+-----+-----+
| count | 1 | 1 |
+-------+-----+-----+
Quantiles:
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 0% | 1% | 5% | 25% | 50% | 75% | 95% | 99% | 100% |
+-----+-----+-----+-----+-----+-----+-----+-----+------+
| 1.0 | 1.0 | 1.0 | 1.0 | 4.0 | 4.0 | 4.0 | 4.0 | 4.0 |
+-----+-----+-----+-----+-----+-----+-----+-----+------+}
"""
single_val = False
if keys is None:
keys = []
else:
if not isinstance(keys, list):
single_val = True
keys = [keys]
value_types = set([type(i) for i in keys])
if (len(value_types) > 1):
raise ValueError("All keys should have the same type.")
with cython_context():
ret_sketches = self.__proxy__.element_sub_sketch(keys)
ret = {}
# check return key matches input key
for key in keys:
if key not in ret_sketches:
raise KeyError("Cannot retrieve element sub sketch for key '" + str(key) + "'. Element sub sketch can only be retrieved when the summary object was created using the 'sub_sketch_keys' option.")
for key in ret_sketches:
ret[key] = Sketch(_proxy = ret_sketches[key])
if single_val:
return ret[keys[0]]
else:
return ret
def cancel(self):
"""
Cancels a background sketch computation immediately if one is ongoing.
Does nothing otherwise.
Examples
--------
>>> s = sa.summary(array, background=True)
>>> s.cancel()
"""
with cython_context():
self.__proxy__.cancel()
| 36.728117 | 209 | 0.502799 |
from __future__ import print_function as _
from __future__ import division as _
from __future__ import absolute_import as _
from .._cython.cy_sketch import UnitySketchProxy
from .._cython.context import debug_trace as cython_context
from .sarray import SArray
from .sframe import SFrame
import operator
from math import sqrt
__all__ = ['Sketch']
class Sketch(object):
def __init__(self, array=None, background=False, sub_sketch_keys=[], _proxy=None):
if (_proxy):
self.__proxy__ = _proxy
else:
self.__proxy__ = UnitySketchProxy()
if not isinstance(array, SArray):
raise TypeError("Sketch object can only be constructed from SArrays")
self.__proxy__.construct_from_sarray(array.__proxy__, background, sub_sketch_keys)
def __repr__(self):
fields = [
['size', 'Length' , 'Yes'],
['min', 'Min' , 'Yes'],
['max', 'Max' , 'Yes'],
['mean', 'Mean' , 'Yes'],
['sum', 'Sum' , 'Yes'],
['var', 'Variance' , 'Yes'],
['std', 'Standard Deviation' , 'Yes'],
['num_missing', '# Missing Values' , 'Yes',],
['num_unique', '# unique values', 'No' ]
]
s = '\n'
result = []
for field in fields:
try:
method_to_call = getattr(self, field[0])
result.append([field[1], str(method_to_call()), field[2]])
except:
pass
sf = SArray(result).unpack(column_name_prefix = "")
sf.rename({'0': 'item', '1':'value', '2': 'is exact'}, inplace=True)
s += sf.__str__(footer=False)
s += "\n"
s += "\nMost frequent items:\n"
frequent = self.frequent_items()
frequent_strkeys = {}
for key in frequent:
strkey = str(key)
if strkey in frequent_strkeys:
frequent_strkeys[strkey] += frequent[key]
else:
frequent_strkeys[strkey] = frequent[key]
sorted_freq = sorted(frequent_strkeys.items(), key=operator.itemgetter(1), reverse=True)
if len(sorted_freq) == 0:
s += " -- All elements appear with less than 0.01% frequency -- \n"
else:
sorted_freq = sorted_freq[:10]
sf = SFrame()
sf['value'] = [elem[0] for elem in sorted_freq]
sf['count'] = [elem[1] for elem in sorted_freq]
s += sf.__str__(footer=False) + "\n"
s += "\n"
try:
self.quantile(0)
s += "Quantiles: \n"
sf = SFrame()
for q in [0.0,0.01,0.05,0.25,0.5,0.75,0.95,0.99,1.00]:
sf.add_column(SArray([self.quantile(q)]), str(int(q * 100)) + '%', inplace=True)
s += sf.__str__(footer=False) + "\n"
except:
pass
try:
t_k = self.dict_key_summary()
t_v = self.dict_value_summary()
s += "\n******** Dictionary Element Key Summary ********\n"
s += t_k.__repr__()
s += "\n******** Dictionary Element Value Summary ********\n"
s += t_v.__repr__() + '\n'
except:
pass
try:
t_k = self.element_summary()
s += "\n******** Element Summary ********\n"
s += t_k.__repr__() + '\n'
except:
pass
return s.expandtabs(8)
def __str__(self):
return self.__repr__()
def size(self):
with cython_context():
return int(self.__proxy__.size())
def max(self):
with cython_context():
return self.__proxy__.max()
def min(self):
with cython_context():
return self.__proxy__.min()
def sum(self):
with cython_context():
return self.__proxy__.sum()
def mean(self):
with cython_context():
return self.__proxy__.mean()
def std(self):
return sqrt(self.var())
def var(self):
with cython_context():
return self.__proxy__.var()
def num_missing(self):
with cython_context():
return int(self.__proxy__.num_undefined())
def num_unique(self):
with cython_context():
return int(self.__proxy__.num_unique())
def frequent_items(self):
with cython_context():
return self.__proxy__.frequent_items()
def quantile(self, quantile_val):
with cython_context():
return self.__proxy__.get_quantile(quantile_val)
def frequency_count(self, element):
with cython_context():
return int(self.__proxy__.frequency_count(element))
def sketch_ready(self):
with cython_context():
return self.__proxy__.sketch_ready()
def num_elements_processed(self):
with cython_context():
return self.__proxy__.num_elements_processed()
def element_length_summary(self):
with cython_context():
return Sketch(_proxy = self.__proxy__.element_length_summary())
def dict_key_summary(self):
with cython_context():
return Sketch(_proxy = self.__proxy__.dict_key_summary())
def dict_value_summary(self):
with cython_context():
return Sketch(_proxy = self.__proxy__.dict_value_summary())
def element_summary(self):
with cython_context():
return Sketch(_proxy = self.__proxy__.element_summary())
def element_sub_sketch(self, keys = None):
single_val = False
if keys is None:
keys = []
else:
if not isinstance(keys, list):
single_val = True
keys = [keys]
value_types = set([type(i) for i in keys])
if (len(value_types) > 1):
raise ValueError("All keys should have the same type.")
with cython_context():
ret_sketches = self.__proxy__.element_sub_sketch(keys)
ret = {}
for key in keys:
if key not in ret_sketches:
raise KeyError("Cannot retrieve element sub sketch for key '" + str(key) + "'. Element sub sketch can only be retrieved when the summary object was created using the 'sub_sketch_keys' option.")
for key in ret_sketches:
ret[key] = Sketch(_proxy = ret_sketches[key])
if single_val:
return ret[keys[0]]
else:
return ret
def cancel(self):
with cython_context():
self.__proxy__.cancel()
| true | true |
f71a380e5b2adadd88bb74e831433cb584917dad | 965 | py | Python | docs/source/rules/examples/REQ-E004/tester.py | yyang08/swagger-spec-compatibility | e7a6ba6fc53c6a8a92ba26016219a595a8cecbbe | [
"Apache-2.0"
] | 18 | 2019-04-30T21:07:30.000Z | 2021-12-16T17:56:08.000Z | docs/source/rules/examples/REQ-E004/tester.py | yyang08/swagger-spec-compatibility | e7a6ba6fc53c6a8a92ba26016219a595a8cecbbe | [
"Apache-2.0"
] | 30 | 2019-02-26T11:25:44.000Z | 2021-04-16T00:12:11.000Z | docs/source/rules/examples/REQ-E004/tester.py | yyang08/swagger-spec-compatibility | e7a6ba6fc53c6a8a92ba26016219a595a8cecbbe | [
"Apache-2.0"
] | 6 | 2019-02-25T22:12:29.000Z | 2020-12-23T00:24:48.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from os.path import abspath
from bravado.client import SwaggerClient
from jsonschema import ValidationError
from six.moves.urllib.parse import urljoin
from six.moves.urllib.request import pathname2url
old_client = SwaggerClient.from_url(
spec_url=urljoin('file:', pathname2url(abspath('old.yaml'))),
)
new_client = SwaggerClient.from_url(
spec_url=urljoin('file:', pathname2url(abspath('new.yaml'))),
)
object_to_send = {'property_1': 'v1', 'property_2': 'v2', 'property_3': 'v3'}
print('Calling the post endpoint with the old client: Succeeded')
old_client.endpoint.post_endpoint(body=object_to_send)
print('Calling the post endpoint with the old client: Failed')
try:
new_client.endpoint.post_endpoint(body=object_to_send)
raise RuntimeError('An error was expected')
except ValidationError:
pass
| 31.129032 | 77 | 0.779275 |
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from os.path import abspath
from bravado.client import SwaggerClient
from jsonschema import ValidationError
from six.moves.urllib.parse import urljoin
from six.moves.urllib.request import pathname2url
old_client = SwaggerClient.from_url(
spec_url=urljoin('file:', pathname2url(abspath('old.yaml'))),
)
new_client = SwaggerClient.from_url(
spec_url=urljoin('file:', pathname2url(abspath('new.yaml'))),
)
object_to_send = {'property_1': 'v1', 'property_2': 'v2', 'property_3': 'v3'}
print('Calling the post endpoint with the old client: Succeeded')
old_client.endpoint.post_endpoint(body=object_to_send)
print('Calling the post endpoint with the old client: Failed')
try:
new_client.endpoint.post_endpoint(body=object_to_send)
raise RuntimeError('An error was expected')
except ValidationError:
pass
| true | true |
f71a3828e5ab1b447e9e0f5e00e3b95d8c4e7d7e | 3,496 | py | Python | examples/upload_a_chapter.py | PythonCoderAS/Hondana | 14a7db9837bbe78212c462f845278777c246e3bf | [
"MIT"
] | 19 | 2021-07-21T01:25:06.000Z | 2022-03-14T21:22:45.000Z | examples/upload_a_chapter.py | PythonCoderAS/Hondana | 14a7db9837bbe78212c462f845278777c246e3bf | [
"MIT"
] | 5 | 2021-12-05T22:21:59.000Z | 2022-03-18T16:30:24.000Z | examples/upload_a_chapter.py | PythonCoderAS/Hondana | 14a7db9837bbe78212c462f845278777c246e3bf | [
"MIT"
] | 12 | 2021-07-17T18:26:33.000Z | 2022-03-21T19:57:46.000Z | """
This example shows three different ways to perform this task.
Please examine all three to find a method you like.
If you ask me: I prefer the first.
"""
import asyncio
import hondana
# Create your client, you must be authorised to upload a chapter.
client = hondana.Client(username="my username", password="my password")
async def main():
"""
In this example we are going to upload a chapter to the MangaDex API.
"""
# Define your chapter details
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
# Get the manga we are going to upload a chapter for.
manga = await client.view_manga("...")
# let's open up some images, and store their ``bytes`` in memory
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
files: list[bytes] = []
# Open our upload session
async with client.upload_session(
manga,
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
scanlator_groups=scanlator_groups,
) as upload_session:
# First we upload the bytes we stored in memory, adhering to the earlier note.
await upload_session.upload_images(files)
# Then we choose to commit that data, which returns a valid ``hondana.Chapter`` instance.
chapter = await upload_session.commit()
## You can also choose not to commit manually, exiting this context manager will commit for you, and discard the returned chapter data.
async def alternative_main():
# Define your chapter details
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
# This will create and return an instance of ``hondana.ChapterUpload``
## You can also use a manga ID, or a ``hondana.Manga`` instance as the first parameter
upload_session = client.upload_session(
"...",
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
scanlator_groups=scanlator_groups,
)
# I recommend the context manager method, since the session checking and committing are done for you.
await upload_session._check_for_session()
# Create and upload your images.
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
images: list[bytes] = []
await upload_session.upload_images(images)
## NOTE: You **MUST** commit when not using the context manager.
chapter = await upload_session.commit()
async def other_alternative_main():
# Define your chapter details
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
# Create and upload your images.
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
images: list[bytes] = []
chapter = await client.upload_chapter(
"...",
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
images=images,
scanlator_groups=scanlator_groups,
)
asyncio.run(main())
| 30.4 | 139 | 0.670767 |
import asyncio
import hondana
client = hondana.Client(username="my username", password="my password")
async def main():
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
manga = await client.view_manga("...")
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
files: list[bytes] = []
# Open our upload session
async with client.upload_session(
manga,
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
scanlator_groups=scanlator_groups,
) as upload_session:
# First we upload the bytes we stored in memory, adhering to the earlier note.
await upload_session.upload_images(files)
# Then we choose to commit that data, which returns a valid ``hondana.Chapter`` instance.
chapter = await upload_session.commit()
## You can also choose not to commit manually, exiting this context manager will commit for you, and discard the returned chapter data.
async def alternative_main():
# Define your chapter details
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
# This will create and return an instance of ``hondana.ChapterUpload``
## You can also use a manga ID, or a ``hondana.Manga`` instance as the first parameter
upload_session = client.upload_session(
"...",
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
scanlator_groups=scanlator_groups,
)
# I recommend the context manager method, since the session checking and committing are done for you.
await upload_session._check_for_session()
# Create and upload your images.
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
images: list[bytes] = []
await upload_session.upload_images(images)
## NOTE: You **MUST** commit when not using the context manager.
chapter = await upload_session.commit()
async def other_alternative_main():
# Define your chapter details
chapter = "1"
volume = "1"
translated_language = "en"
title = "..."
scanlator_groups = ["..."]
# Create and upload your images.
## NOTE: The order of this list is important, this is the order in which the pages will be presented in the finished upload.
## Please ensure you order this correctly.
images: list[bytes] = []
chapter = await client.upload_chapter(
"...",
volume=volume,
chapter=chapter,
title=title,
translated_language=translated_language,
images=images,
scanlator_groups=scanlator_groups,
)
asyncio.run(main())
| true | true |
f71a387c3ff2cd382f14cdd92eec52461942a18f | 945 | py | Python | questions/q354_water_overflow/code.py | aadhityasw/Competitive-Programs | 901a48d35f024a3a87c32a45b7f4531e8004a203 | [
"MIT"
] | null | null | null | questions/q354_water_overflow/code.py | aadhityasw/Competitive-Programs | 901a48d35f024a3a87c32a45b7f4531e8004a203 | [
"MIT"
] | 1 | 2021-05-15T07:56:51.000Z | 2021-05-15T07:56:51.000Z | questions/q354_water_overflow/code.py | aadhityasw/Competitive-Programs | 901a48d35f024a3a87c32a45b7f4531e8004a203 | [
"MIT"
] | null | null | null | class Solution:
def waterOverflow(self, K, R, C):
if R <= 0 or C <= 0 or C > R :
return 0
table = [[K]]
i = 0
while True :
table.append([0]*(i+2))
flag = True
for j in range(i+1) :
if table[i][j] > 1 :
val = (table[i][j] - 1) / 2
table[i][j] = 1
table[i+1][j] += val
table[i+1][j+1] += val
flag = False
if flag or i > (R-1) :
break
i += 1
if table[R-1][C-1] == int(table[R-1][C-1]) :
return int(table[R-1][C-1])
return round(table[R-1][C-1], 6)
if __name__ == '__main__':
t = int (input ())
for _ in range (t):
K,R,C=map(int,input().split())
ob = Solution()
print(ob.waterOverflow(K,R,C))
| 26.25 | 52 | 0.359788 | class Solution:
def waterOverflow(self, K, R, C):
if R <= 0 or C <= 0 or C > R :
return 0
table = [[K]]
i = 0
while True :
table.append([0]*(i+2))
flag = True
for j in range(i+1) :
if table[i][j] > 1 :
val = (table[i][j] - 1) / 2
table[i][j] = 1
table[i+1][j] += val
table[i+1][j+1] += val
flag = False
if flag or i > (R-1) :
break
i += 1
if table[R-1][C-1] == int(table[R-1][C-1]) :
return int(table[R-1][C-1])
return round(table[R-1][C-1], 6)
if __name__ == '__main__':
t = int (input ())
for _ in range (t):
K,R,C=map(int,input().split())
ob = Solution()
print(ob.waterOverflow(K,R,C))
| true | true |
f71a389b852f7333755362f2c4739c7e128d3163 | 173 | py | Python | LR/production/test.py | whz-NJ/PersonalRecommendation | 4887209270f052d6d39bb35ee0c90498496849d8 | [
"Apache-2.0"
] | null | null | null | LR/production/test.py | whz-NJ/PersonalRecommendation | 4887209270f052d6d39bb35ee0c90498496849d8 | [
"Apache-2.0"
] | null | null | null | LR/production/test.py | whz-NJ/PersonalRecommendation | 4887209270f052d6d39bb35ee0c90498496849d8 | [
"Apache-2.0"
] | null | null | null | #看看文件内容有多少列
if __name__ == "__main__":
fp = open("../data/lr_coef")
count = 0
for line in fp:
item = line.strip().split(",")
print (len(item))
| 19.222222 | 38 | 0.531792 |
if __name__ == "__main__":
fp = open("../data/lr_coef")
count = 0
for line in fp:
item = line.strip().split(",")
print (len(item))
| true | true |
f71a3969c7a14edff97577d65dbc459028956dcc | 654 | py | Python | projects/migrations/0017_project_user.py | Tuitoek/Awwards | 090b4a0dc7ea3b0b733d61732fca4554baba5e90 | [
"MIT"
] | null | null | null | projects/migrations/0017_project_user.py | Tuitoek/Awwards | 090b4a0dc7ea3b0b733d61732fca4554baba5e90 | [
"MIT"
] | null | null | null | projects/migrations/0017_project_user.py | Tuitoek/Awwards | 090b4a0dc7ea3b0b733d61732fca4554baba5e90 | [
"MIT"
] | 1 | 2021-09-21T12:52:12.000Z | 2021-09-21T12:52:12.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-03-20 14:32
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('projects', '0016_auto_20190320_1731'),
]
operations = [
migrations.AddField(
model_name='project',
name='user',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 27.25 | 124 | 0.683486 |
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('projects', '0016_auto_20190320_1731'),
]
operations = [
migrations.AddField(
model_name='project',
name='user',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| true | true |
f71a397e2dbddfef3306743b9d7789a6cc7dd8b2 | 46,025 | py | Python | selfdrive/car/hyundai/values.py | yunbong2/multi-076 | 5079eab33fbc69097e38cd8aced3c904c11c9bc8 | [
"MIT"
] | null | null | null | selfdrive/car/hyundai/values.py | yunbong2/multi-076 | 5079eab33fbc69097e38cd8aced3c904c11c9bc8 | [
"MIT"
] | null | null | null | selfdrive/car/hyundai/values.py | yunbong2/multi-076 | 5079eab33fbc69097e38cd8aced3c904c11c9bc8 | [
"MIT"
] | 5 | 2020-09-28T06:36:56.000Z | 2020-09-29T13:26:03.000Z | from cereal import car
from selfdrive.car import dbc_dict
from common.params import Params
Ecu = car.CarParams.Ecu
# Steer torque limits
class SteerLimitParams:
STEER_MAX = 280 # 409 is the max, 255 is stock
STEER_DELTA_UP = 5
STEER_DELTA_DOWN = 5
STEER_DRIVER_ALLOWANCE = 50
STEER_DRIVER_MULTIPLIER = 2
STEER_DRIVER_FACTOR = 1
class CAR:
AVANTE = "HYUNDAI AVANTE"
SONATA = "HYUNDAI SONATA"
SONATA_HEV = "HYUNDAI SONATA Hybrid"
SONATA_TURBO = "HYUNDAI SONATA Turbo"
GRANDEUR = "HYUNDAI GRANDEUR"
GRANDEUR_HEV = "HYUNDAI GRANDEUR Hybrid"
GENESIS = "GENESIS"
SANTAFE = "HYUNDAI SANTAFE"
KONA = "HYUNDAI KONA"
KONA_HEV = "HYUNDAI KONA Hybrid"
KONA_EV = "HYUNDAI KONA ELECTRIC"
IONIQ_HEV = "HYUNDAI IONIQ HYBRID"
IONIQ_EV = "HYUNDAI IONIQ ELECTRIC"
K5 = "KIA K5"
K5_HEV = "KIA K5 Hybrid"
K7 = "KIA K7"
K7_HEV = "KIA K7 Hybrid"
STINGER = "KIA STINGER"
SORENTO = "KIA SORENTO"
NIRO_HEV = "KIA NIRO Hybrid"
NIRO_EV = "KIA NIRO ELECTRIC"
NEXO = "HYUNDAI NEXO"
MOHAVE = "KIA MOHAVE"
I30 = "HYUNDAI I30"
SELTOS = "KIA SELTOS"
PALISADE = "HYUNDAI PALISADE"
class Buttons:
NONE = 0
RES_ACCEL = 1
SET_DECEL = 2
GAP_DIST = 3
CANCEL = 4
params = Params()
fingerprint_issued_fix = params.get("FingerprintIssuedFix", encoding='utf8') == "1"
if fingerprint_issued_fix:
FINGERPRINTS = {
CAR.AVANTE: [{}],
CAR.SONATA: [{}],
CAR.SONATA_HEV: [{}],
CAR.SONATA_TURBO: [{}],
CAR.GRANDEUR: [{}],
CAR.GRANDEUR_HEV: [{}],
CAR.GENESIS: [{}],
CAR.SANTAFE: [{}],
CAR.KONA: [{}],
CAR.KONA_HEV: [{}],
CAR.KONA_EV: [{}],
CAR.IONIQ_HEV: [{}],
CAR.IONIQ_EV: [{}],
CAR.K5: [{}],
CAR.K5_HEV: [{}],
CAR.K7: [{}],
CAR.K7_HEV: [{}],
CAR.STINGER: [{}],
CAR.NIRO_HEV: [{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1292: 8, 1345: 8, 1363: 8, 1419: 8, 1429: 8, 1448: 8, 1456: 4},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.NIRO_EV: [{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8}],
CAR.NEXO: [{}],
CAR.MOHAVE: [{}],
CAR.I30: [{}],
CAR.SELTOS: [{}],
CAR.PALISADE: [{}],
CAR.SORENTO: [{}],
}
else:
FINGERPRINTS = {
CAR.AVANTE: [{66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1345: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1427: 6, 1440: 8, 1456: 4, 1472: 8, 1491: 8, 1530: 8}],
CAR.SONATA: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 625: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1366: 8, 1367: 8, 1369: 8, 1371: 8, 1407: 8, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1491: 8, 1530: 8, 1990: 8, 1998: 8, 2016: 8, 2024: 8},
{66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1491: 8, 1530: 8}],
CAR.SONATA_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 593: 8, 688: 5, 832: 7, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8}],
CAR.SONATA_TURBO: [{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1371: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 2015: 8, 2024: 8},
{66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1471: 8, 1472: 8, 1491: 8, 1530: 8, 1532: 5, 2016: 8, 2024: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1905: 8, 1913: 8, 1990: 8, 1998: 8, 2006: 8, 2014: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8}],
CAR.GRANDEUR: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1185: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1185: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8}],
CAR.GRANDEUR_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1156: 8, 1157: 4, 1168: 7, 1173: 8, 1185: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 516: 8, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.GENESIS: [{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1268: 8, 1280: 1, 1281: 3, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 5, 1407: 8, 1419: 8, 1427: 6, 1434: 2, 1437: 8, 1456: 4},
{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1378: 4, 1379: 8, 1384: 5, 1407: 8, 1425: 2, 1427: 6, 1437: 8, 1456: 4},
{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 5, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1437: 8, 1456: 4}],
CAR.SANTAFE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 764: 8, 809: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 912: 7, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8, 1628: 8, 1629: 8, 1630: 8, 1631: 8, 1674: 8, 1675: 8, 1676: 8, 1677: 8, 1791: 8, 2015: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 547: 8, 548: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1210: 8, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1911: 8}],
CAR.KONA: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1193: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8}],
CAR.KONA_HEV: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1990: 8, 1998: 8, 2001: 8, 2004: 8, 2009: 8, 2012: 8, 2015: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1138: 4, 1151: 6, 1155: 8, 1157: 4, 1164: 8, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.KONA_EV: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1990: 8, 1998: 8, 2001: 8, 2004: 8, 2009: 8, 2012: 8, 2015: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 547: 8, 548: 8, 549: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8}],
CAR.IONIQ_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470:8, 1476: 8, 1535: 8}],
CAR.IONIQ_EV: [{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1425: 2, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 545: 8, 546: 8, 548: 8, 549: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 546: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8}],
CAR.K5: [{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1186: 2, 1191: 2, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1532: 5, 1952: 8, 1960: 8, 1988: 8, 1996: 8, 2001: 8, 2004: 8, 2008: 8, 2009: 8, 2012: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1491: 8, 1492: 8}],
CAR.K5_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1236: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 909: 8, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1420: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.K7: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 608: 8, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8}],
CAR.K7_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1096: 8, 1102: 8, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1210: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1343: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.STINGER: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1437: 8, 1456: 4, 1470: 8}],
CAR.NIRO_HEV: [{}],
CAR.NIRO_EV: [{}],
CAR.NEXO: [{127: 8, 145: 8, 146: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 512: 6, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1174: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1192: 8, 1193: 8, 1210: 8, 1219: 8, 1220: 8, 1222: 6, 1223: 8, 1224: 8, 1227: 8, 1230: 6, 1231: 6, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1297: 8, 1298: 8, 1305: 8, 1312: 8, 1315: 8, 1316: 8, 1322: 8, 1324: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1437: 8, 1456: 4, 1460: 8, 1470: 8, 1484: 8, 1507: 8, 1520: 8, 1535: 8},
{127: 8, 145: 8, 146: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 512: 6, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1174: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1192: 8, 1193: 8, 1210: 8, 1219: 8, 1220: 8, 1222: 6, 1223: 8, 1224: 8, 1227: 8, 1230: 6, 1231: 6, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1297: 8, 1298: 8, 1305: 8, 1312: 8, 1315: 8, 1316: 8, 1322: 8, 1324: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1437: 8, 1456: 4, 1460: 8, 1470: 8, 1484: 8, 1507: 8, 1520: 8, 1535: 8}],
CAR.MOHAVE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8}],
CAR.I30: [{67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8}],
CAR.SELTOS: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 910: 5, 911: 5, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1114: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1446: 8, 1456: 4, 1470: 8, 1485: 8, 1911: 8}],
CAR.PALISADE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 549: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 2000: 8, 2005: 8, 2008: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8}],
CAR.SORENTO: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1384: 8, 1407: 8, 1411: 8, 1419: 8, 1425: 2, 1427: 6, 1444: 8, 1456: 4, 1470: 8, 1489: 1},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 548: 8, 550: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 608: 8, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1384: 5, 1407: 8, 1411: 8, 1419: 8, 1427: 6, 1437: 8, 1444: 8, 1456: 4, 1470: 8, 1489: 1, 1990: 8, 1998: 8}],
}
ECU_FINGERPRINT = {
Ecu.fwdCamera: [832, 1156, 1191, 1342]
}
CHECKSUM = {
"crc8": [CAR.SANTAFE, CAR.SONATA, CAR.PALISADE],
"6B": [CAR.SORENTO, CAR.GENESIS],
}
FEATURES = {
"use_cluster_gears": [CAR.KONA, CAR.GRANDEUR, CAR.K7, CAR.MOHAVE, CAR.I30, CAR.AVANTE], # Use Cluster for Gear Selection, rather than Transmission
"use_tcu_gears": [CAR.K5, CAR.SONATA, CAR.SONATA_TURBO], # Use TCU Message for Gear Selection
"use_elect_gears": [CAR.K5_HEV, CAR.SONATA_HEV, CAR.GRANDEUR_HEV, CAR.IONIQ_HEV, CAR.IONIQ_EV, CAR.NIRO_HEV, CAR.KONA_HEV, CAR.KONA_EV, CAR.NIRO_EV, CAR.NEXO], # Use TCU Message for Gear Selection
}
EV_HYBRID = [CAR.K5_HEV, CAR.SONATA_HEV, CAR.GRANDEUR_HEV, CAR.IONIQ_HEV, CAR.IONIQ_EV, CAR.NIRO_HEV, CAR.KONA_HEV, CAR.KONA_EV, CAR.NIRO_EV, CAR.NEXO]
DBC = {
CAR.AVANTE: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA_TURBO: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.GENESIS: dbc_dict('hyundai_kia_generic', None),
CAR.SANTAFE: dbc_dict('hyundai_kia_generic', None),
CAR.KONA: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_EV: dbc_dict('hyundai_kia_generic', None),
CAR.IONIQ_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.IONIQ_EV: dbc_dict('hyundai_kia_generic', None),
CAR.K5: dbc_dict('hyundai_kia_generic', None),
CAR.K5_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.K7: dbc_dict('hyundai_kia_generic', None),
CAR.K7_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.STINGER: dbc_dict('hyundai_kia_generic', None),
CAR.NIRO_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.NIRO_EV: dbc_dict('hyundai_kia_generic', None),
CAR.NEXO: dbc_dict('hyundai_kia_generic', None),
CAR.MOHAVE: dbc_dict('hyundai_kia_generic', None),
CAR.I30: dbc_dict('hyundai_kia_generic', None),
CAR.SELTOS: dbc_dict('hyundai_kia_generic', None),
CAR.PALISADE: dbc_dict('hyundai_kia_generic', None),
CAR.SORENTO: dbc_dict('hyundai_kia_generic', None),
}
STEER_THRESHOLD = 360
| 212.096774 | 793 | 0.536274 | from cereal import car
from selfdrive.car import dbc_dict
from common.params import Params
Ecu = car.CarParams.Ecu
class SteerLimitParams:
STEER_MAX = 280
STEER_DELTA_UP = 5
STEER_DELTA_DOWN = 5
STEER_DRIVER_ALLOWANCE = 50
STEER_DRIVER_MULTIPLIER = 2
STEER_DRIVER_FACTOR = 1
class CAR:
AVANTE = "HYUNDAI AVANTE"
SONATA = "HYUNDAI SONATA"
SONATA_HEV = "HYUNDAI SONATA Hybrid"
SONATA_TURBO = "HYUNDAI SONATA Turbo"
GRANDEUR = "HYUNDAI GRANDEUR"
GRANDEUR_HEV = "HYUNDAI GRANDEUR Hybrid"
GENESIS = "GENESIS"
SANTAFE = "HYUNDAI SANTAFE"
KONA = "HYUNDAI KONA"
KONA_HEV = "HYUNDAI KONA Hybrid"
KONA_EV = "HYUNDAI KONA ELECTRIC"
IONIQ_HEV = "HYUNDAI IONIQ HYBRID"
IONIQ_EV = "HYUNDAI IONIQ ELECTRIC"
K5 = "KIA K5"
K5_HEV = "KIA K5 Hybrid"
K7 = "KIA K7"
K7_HEV = "KIA K7 Hybrid"
STINGER = "KIA STINGER"
SORENTO = "KIA SORENTO"
NIRO_HEV = "KIA NIRO Hybrid"
NIRO_EV = "KIA NIRO ELECTRIC"
NEXO = "HYUNDAI NEXO"
MOHAVE = "KIA MOHAVE"
I30 = "HYUNDAI I30"
SELTOS = "KIA SELTOS"
PALISADE = "HYUNDAI PALISADE"
class Buttons:
NONE = 0
RES_ACCEL = 1
SET_DECEL = 2
GAP_DIST = 3
CANCEL = 4
params = Params()
fingerprint_issued_fix = params.get("FingerprintIssuedFix", encoding='utf8') == "1"
if fingerprint_issued_fix:
FINGERPRINTS = {
CAR.AVANTE: [{}],
CAR.SONATA: [{}],
CAR.SONATA_HEV: [{}],
CAR.SONATA_TURBO: [{}],
CAR.GRANDEUR: [{}],
CAR.GRANDEUR_HEV: [{}],
CAR.GENESIS: [{}],
CAR.SANTAFE: [{}],
CAR.KONA: [{}],
CAR.KONA_HEV: [{}],
CAR.KONA_EV: [{}],
CAR.IONIQ_HEV: [{}],
CAR.IONIQ_EV: [{}],
CAR.K5: [{}],
CAR.K5_HEV: [{}],
CAR.K7: [{}],
CAR.K7_HEV: [{}],
CAR.STINGER: [{}],
CAR.NIRO_HEV: [{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 549: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1419: 8, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1535: 8},
{304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1292: 8, 1345: 8, 1363: 8, 1419: 8, 1429: 8, 1448: 8, 1456: 4},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.NIRO_EV: [{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8}],
CAR.NEXO: [{}],
CAR.MOHAVE: [{}],
CAR.I30: [{}],
CAR.SELTOS: [{}],
CAR.PALISADE: [{}],
CAR.SORENTO: [{}],
}
else:
FINGERPRINTS = {
CAR.AVANTE: [{66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1345: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1427: 6, 1440: 8, 1456: 4, 1472: 8, 1491: 8, 1530: 8}],
CAR.SONATA: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 625: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1366: 8, 1367: 8, 1369: 8, 1371: 8, 1407: 8, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1491: 8, 1530: 8, 1990: 8, 1998: 8, 2016: 8, 2024: 8},
{66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1491: 8, 1530: 8}],
CAR.SONATA_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 593: 8, 688: 5, 832: 7, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8}],
CAR.SONATA_TURBO: [{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1371: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 2015: 8, 2024: 8},
{66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1471: 8, 1472: 8, 1491: 8, 1530: 8, 1532: 5, 2016: 8, 2024: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1314: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1460: 8, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1905: 8, 1913: 8, 1990: 8, 1998: 8, 2006: 8, 2014: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8}],
CAR.GRANDEUR: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1185: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1185: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8}],
CAR.GRANDEUR_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1156: 8, 1157: 4, 1168: 7, 1173: 8, 1185: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 516: 8, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.GENESIS: [{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1268: 8, 1280: 1, 1281: 3, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 5, 1407: 8, 1419: 8, 1427: 6, 1434: 2, 1437: 8, 1456: 4},
{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1378: 4, 1379: 8, 1384: 5, 1407: 8, 1425: 2, 1427: 6, 1437: 8, 1456: 4},
{67: 8, 68: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1334: 8, 1335: 8, 1345: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1378: 4, 1384: 5, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1437: 8, 1456: 4}],
CAR.SANTAFE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 764: 8, 809: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 2000: 8, 2004: 8, 2008: 8, 2012: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 912: 7, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8, 1628: 8, 1629: 8, 1630: 8, 1631: 8, 1674: 8, 1675: 8, 1676: 8, 1677: 8, 1791: 8, 2015: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 547: 8, 548: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1210: 8, 1227: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1384: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1911: 8}],
CAR.KONA: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1193: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8}],
CAR.KONA_HEV: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1990: 8, 1998: 8, 2001: 8, 2004: 8, 2009: 8, 2012: 8, 2015: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1138: 4, 1151: 6, 1155: 8, 1157: 4, 1164: 8, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.KONA_EV: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 3, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1988: 8, 1990: 8, 1998: 8, 2001: 8, 2004: 8, 2009: 8, 2012: 8, 2015: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1193: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1379: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 546: 8, 547: 8, 548: 8, 549: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1173: 8, 1183: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1378: 4, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1473: 8, 1507: 8, 1535: 8}],
CAR.IONIQ_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1173: 8, 1225: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470:8, 1476: 8, 1535: 8}],
CAR.IONIQ_EV: [{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1425: 2, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 545: 8, 546: 8, 548: 8, 549: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8, 1535: 8},
{127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 7, 546: 8, 832: 8, 881: 8, 882: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 6, 1168: 7, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1322: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1407: 8, 1419: 8, 1426: 8, 1427: 6, 1429: 8, 1430: 8, 1456: 4, 1470: 8, 1507: 8}],
CAR.K5: [{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 273: 8, 274: 8, 275: 8, 339: 8, 356: 4, 399: 8, 447: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 884: 8, 897: 8, 899: 8, 902: 8, 903: 6, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1186: 2, 1191: 2, 1253: 8, 1254: 8, 1255: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8, 1532: 5, 1952: 8, 1960: 8, 1988: 8, 1996: 8, 2001: 8, 2004: 8, 2008: 8, 2009: 8, 2012: 8, 2016: 8, 2017: 8, 2024: 8, 2025: 8},
{64: 8, 66: 8, 67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 593: 8, 608: 8, 688: 5, 790: 8, 809: 8, 832: 8, 897: 8, 899: 8, 902: 8, 903: 6, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1268: 8, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1472: 8, 1491: 8, 1492: 8}],
CAR.K5_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1236: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8},
{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 909: 8, 912: 7, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 6, 1151: 6, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1420: 8, 1425: 2, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.K7: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 608: 8, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 4, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1444: 8, 1456: 4, 1470: 8}],
CAR.K7_HEV: [{68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 544: 8, 576: 8, 593: 8, 688: 5, 832: 8, 865: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1096: 8, 1102: 8, 1108: 8, 1136: 6, 1138: 5, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1210: 8, 1227: 8, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1343: 8, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1379: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1448: 8, 1456: 4, 1470: 8, 1476: 8, 1535: 8}],
CAR.STINGER: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1378: 4, 1379: 8, 1384: 8, 1407: 8, 1419: 8, 1425: 2, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 358: 6, 359: 8, 544: 8, 576: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1157: 4, 1168: 7, 1170: 8, 1173: 8, 1184: 8, 1265: 4, 1280: 1, 1281: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 4, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1437: 8, 1456: 4, 1470: 8}],
CAR.NIRO_HEV: [{}],
CAR.NIRO_EV: [{}],
CAR.NEXO: [{127: 8, 145: 8, 146: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 512: 6, 544: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1174: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1192: 8, 1193: 8, 1210: 8, 1219: 8, 1220: 8, 1222: 6, 1223: 8, 1224: 8, 1227: 8, 1230: 6, 1231: 6, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1297: 8, 1298: 8, 1305: 8, 1312: 8, 1315: 8, 1316: 8, 1322: 8, 1324: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1437: 8, 1456: 4, 1460: 8, 1470: 8, 1484: 8, 1507: 8, 1520: 8, 1535: 8},
{127: 8, 145: 8, 146: 8, 304: 8, 320: 8, 339: 8, 352: 8, 356: 4, 512: 6, 544: 8, 546: 8, 593: 8, 688: 5, 832: 8, 881: 8, 882: 8, 897: 8, 902: 8, 903: 8, 905: 8, 908: 8, 909: 8, 912: 7, 916: 8, 1056: 8, 1057: 8, 1078: 4, 1136: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1173: 8, 1174: 8, 1180: 8, 1183: 8, 1186: 2, 1191: 2, 1192: 8, 1193: 8, 1210: 8, 1219: 8, 1220: 8, 1222: 6, 1223: 8, 1224: 8, 1227: 8, 1230: 6, 1231: 6, 1265: 4, 1268: 8, 1280: 1, 1287: 4, 1290: 8, 1291: 8, 1292: 8, 1294: 8, 1297: 8, 1298: 8, 1305: 8, 1312: 8, 1315: 8, 1316: 8, 1322: 8, 1324: 8, 1342: 6, 1345: 8, 1348: 8, 1355: 8, 1363: 8, 1369: 8, 1371: 8, 1407: 8, 1419: 8, 1427: 6, 1429: 8, 1430: 8, 1437: 8, 1456: 4, 1460: 8, 1470: 8, 1484: 8, 1507: 8, 1520: 8, 1535: 8}],
CAR.MOHAVE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8}],
CAR.I30: [{67: 8, 68: 8, 127: 8, 128: 8, 129: 8, 273: 8, 274: 8, 275: 8, 339: 8, 354: 3, 356: 4, 399: 8, 512: 6, 544: 8, 608: 8, 790: 8, 809: 8, 832: 8, 899: 8, 902: 8, 903: 8, 905: 8, 909: 8, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1151: 6, 1168: 7, 1170: 8, 1265: 4, 1280: 1, 1282: 4, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1349: 8, 1351: 8, 1353: 8, 1356: 8, 1363: 8, 1365: 8, 1366: 8, 1367: 8, 1369: 8, 1407: 8, 1414: 3, 1415: 8, 1419: 8, 1427: 6, 1440: 8, 1456: 4, 1470: 8, 1486: 8, 1487: 8, 1491: 8, 1530: 8}],
CAR.SELTOS: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 354: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 8, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 905: 8, 909: 8, 910: 5, 911: 5, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1114: 8, 1136: 8, 1145: 8, 1151: 8, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 8, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1225: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1379: 8, 1384: 8, 1394: 8, 1407: 8, 1414: 3, 1419: 8, 1427: 6, 1446: 8, 1456: 4, 1470: 8, 1485: 8, 1911: 8}],
CAR.PALISADE: [{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 549: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 2000: 8, 2005: 8, 2008: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8},
{67: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 576: 8, 593: 8, 608: 8, 688: 6, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 905: 8, 909: 8, 913: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1123: 8, 1136: 8, 1151: 6, 1155: 8, 1156: 8, 1157: 4, 1162: 8, 1164: 8, 1168: 7, 1170: 8, 1173: 8, 1180: 8, 1186: 2, 1191: 2, 1193: 8, 1210: 8, 1225: 8, 1227: 8, 1265: 4, 1280: 8, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8}],
CAR.SORENTO: [{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1064: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1371: 8, 1384: 8, 1407: 8, 1411: 8, 1419: 8, 1425: 2, 1427: 6, 1444: 8, 1456: 4, 1470: 8, 1489: 1},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 546: 8, 548: 8, 550: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 8, 593: 8, 608: 8, 688: 5, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 8, 897: 8, 902: 8, 903: 8, 909: 8, 916: 8, 1040: 8, 1042: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1156: 8, 1157: 4, 1162: 8, 1168: 7, 1170: 8, 1173: 8, 1186: 2, 1191: 2, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1294: 8, 1312: 8, 1322: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1371: 8, 1378: 8, 1384: 8, 1407: 8, 1419: 8, 1427: 6, 1456: 4, 1470: 8, 1479: 8},
{67: 8, 68: 8, 127: 8, 304: 8, 320: 8, 339: 8, 356: 4, 544: 7, 608: 8, 809: 8, 832: 8, 854: 7, 870: 7, 871: 8, 872: 5, 902: 8, 903: 6, 916: 8, 1040: 8, 1056: 8, 1057: 8, 1078: 4, 1107: 5, 1136: 8, 1151: 6, 1168: 7, 1170: 8, 1173: 8, 1265: 4, 1280: 1, 1287: 4, 1290: 8, 1292: 8, 1322: 8, 1331: 8, 1332: 8, 1333: 8, 1342: 6, 1345: 8, 1348: 8, 1363: 8, 1369: 8, 1370: 8, 1384: 5, 1407: 8, 1411: 8, 1419: 8, 1427: 6, 1437: 8, 1444: 8, 1456: 4, 1470: 8, 1489: 1, 1990: 8, 1998: 8}],
}
ECU_FINGERPRINT = {
Ecu.fwdCamera: [832, 1156, 1191, 1342]
}
CHECKSUM = {
"crc8": [CAR.SANTAFE, CAR.SONATA, CAR.PALISADE],
"6B": [CAR.SORENTO, CAR.GENESIS],
}
FEATURES = {
"use_cluster_gears": [CAR.KONA, CAR.GRANDEUR, CAR.K7, CAR.MOHAVE, CAR.I30, CAR.AVANTE],
"use_tcu_gears": [CAR.K5, CAR.SONATA, CAR.SONATA_TURBO],
"use_elect_gears": [CAR.K5_HEV, CAR.SONATA_HEV, CAR.GRANDEUR_HEV, CAR.IONIQ_HEV, CAR.IONIQ_EV, CAR.NIRO_HEV, CAR.KONA_HEV, CAR.KONA_EV, CAR.NIRO_EV, CAR.NEXO],
}
EV_HYBRID = [CAR.K5_HEV, CAR.SONATA_HEV, CAR.GRANDEUR_HEV, CAR.IONIQ_HEV, CAR.IONIQ_EV, CAR.NIRO_HEV, CAR.KONA_HEV, CAR.KONA_EV, CAR.NIRO_EV, CAR.NEXO]
DBC = {
CAR.AVANTE: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.SONATA_TURBO: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR: dbc_dict('hyundai_kia_generic', None),
CAR.GRANDEUR_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.GENESIS: dbc_dict('hyundai_kia_generic', None),
CAR.SANTAFE: dbc_dict('hyundai_kia_generic', None),
CAR.KONA: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.KONA_EV: dbc_dict('hyundai_kia_generic', None),
CAR.IONIQ_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.IONIQ_EV: dbc_dict('hyundai_kia_generic', None),
CAR.K5: dbc_dict('hyundai_kia_generic', None),
CAR.K5_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.K7: dbc_dict('hyundai_kia_generic', None),
CAR.K7_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.STINGER: dbc_dict('hyundai_kia_generic', None),
CAR.NIRO_HEV: dbc_dict('hyundai_kia_generic', None),
CAR.NIRO_EV: dbc_dict('hyundai_kia_generic', None),
CAR.NEXO: dbc_dict('hyundai_kia_generic', None),
CAR.MOHAVE: dbc_dict('hyundai_kia_generic', None),
CAR.I30: dbc_dict('hyundai_kia_generic', None),
CAR.SELTOS: dbc_dict('hyundai_kia_generic', None),
CAR.PALISADE: dbc_dict('hyundai_kia_generic', None),
CAR.SORENTO: dbc_dict('hyundai_kia_generic', None),
}
STEER_THRESHOLD = 360
| true | true |
f71a3a75821354fee84241165aa869abf4a61832 | 5,614 | py | Python | sdk/python/pulumi_azure_nextgen/documentdb/v20200901/notebook_workspace.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 31 | 2020-09-21T09:41:01.000Z | 2021-02-26T13:21:59.000Z | sdk/python/pulumi_azure_nextgen/documentdb/v20200901/notebook_workspace.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 231 | 2020-09-21T09:38:45.000Z | 2021-03-01T11:16:03.000Z | sdk/python/pulumi_azure_nextgen/documentdb/v20200901/notebook_workspace.py | pulumi/pulumi-azure-nextgen | 452736b0a1cf584c2d4c04666e017af6e9b2c15c | [
"Apache-2.0"
] | 4 | 2020-09-29T14:14:59.000Z | 2021-02-10T20:38:16.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['NotebookWorkspace']
class NotebookWorkspace(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
notebook_workspace_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
A notebook workspace resource
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: Cosmos DB database account name.
:param pulumi.Input[str] notebook_workspace_name: The name of the notebook workspace resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__['account_name'] = account_name
__props__['notebook_workspace_name'] = notebook_workspace_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['notebook_server_endpoint'] = None
__props__['status'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:documentdb:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/latest:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20190801:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20191212:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200301:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200401:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200601preview:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20210115:NotebookWorkspace")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(NotebookWorkspace, __self__).__init__(
'azure-nextgen:documentdb/v20200901:NotebookWorkspace',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'NotebookWorkspace':
"""
Get an existing NotebookWorkspace resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return NotebookWorkspace(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the database account.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notebookServerEndpoint")
def notebook_server_endpoint(self) -> pulumi.Output[str]:
"""
Specifies the endpoint of Notebook server.
"""
return pulumi.get(self, "notebook_server_endpoint")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Status of the notebook workspace. Possible values are: Creating, Online, Deleting, Failed, Updating.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of Azure resource.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 44.912 | 655 | 0.665301 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['NotebookWorkspace']
class NotebookWorkspace(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
notebook_workspace_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__['account_name'] = account_name
__props__['notebook_workspace_name'] = notebook_workspace_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['name'] = None
__props__['notebook_server_endpoint'] = None
__props__['status'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:documentdb:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/latest:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20190801:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20191212:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200301:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200401:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20200601preview:NotebookWorkspace"), pulumi.Alias(type_="azure-nextgen:documentdb/v20210115:NotebookWorkspace")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(NotebookWorkspace, __self__).__init__(
'azure-nextgen:documentdb/v20200901:NotebookWorkspace',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'NotebookWorkspace':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return NotebookWorkspace(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="notebookServerEndpoint")
def notebook_server_endpoint(self) -> pulumi.Output[str]:
return pulumi.get(self, "notebook_server_endpoint")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
f71a3c4c2a40dfd2974f50c147e4fa1e98133caa | 1,214 | py | Python | statistical_analysis/gpa_scatter.py | guptarohit994/ECE143_group25_project | e31d0425b2a6114eed6c55bdb0491c2c996b94be | [
"CC0-1.0"
] | null | null | null | statistical_analysis/gpa_scatter.py | guptarohit994/ECE143_group25_project | e31d0425b2a6114eed6c55bdb0491c2c996b94be | [
"CC0-1.0"
] | null | null | null | statistical_analysis/gpa_scatter.py | guptarohit994/ECE143_group25_project | e31d0425b2a6114eed6c55bdb0491c2c996b94be | [
"CC0-1.0"
] | null | null | null |
import helper
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
def plot_gpa_scatter():
"""Plotting scatterplot of grades expected and grade received, using the general department list
"""
# obtaining data
department_df = helper.generate_depts_df(helper.general_dept_list)
comp_criteria = ["AvgGradeExpected","AvgGradeReceived"]
# generating scatterplot graph
lower_bound = 1.5
upper_bound = 4.02
ax = department_df.plot.scatter(x=comp_criteria[0], y=comp_criteria[1], c= "grey",ylim=(lower_bound,upper_bound),xlim=(lower_bound,upper_bound), figsize=(10,10), fontsize=20, alpha = 0.3)
ax.set_xlabel("Average Grade Expected", fontsize = 20)
ax.set_ylabel("Average Grade Received", fontsize = 20)
# computing least squares best fit line and adding it onto graph
y = department_df["AvgGradeReceived"]
x = department_df["AvgGradeExpected"]
A = np.vstack([x, np.ones(len(x))]).T
m, c = np.linalg.lstsq(A, y, rcond=None)[0]
print("m:{}, c:{}".format(m,c))
ax.plot(np.linspace(lower_bound,4,10),np.linspace(lower_bound,4,10),c="red")
ax.plot(np.linspace(lower_bound,4,10),(np.linspace(lower_bound,4,10)*m) + c,c="blue") | 43.357143 | 191 | 0.706755 |
import helper
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
def plot_gpa_scatter():
department_df = helper.generate_depts_df(helper.general_dept_list)
comp_criteria = ["AvgGradeExpected","AvgGradeReceived"]
lower_bound = 1.5
upper_bound = 4.02
ax = department_df.plot.scatter(x=comp_criteria[0], y=comp_criteria[1], c= "grey",ylim=(lower_bound,upper_bound),xlim=(lower_bound,upper_bound), figsize=(10,10), fontsize=20, alpha = 0.3)
ax.set_xlabel("Average Grade Expected", fontsize = 20)
ax.set_ylabel("Average Grade Received", fontsize = 20)
y = department_df["AvgGradeReceived"]
x = department_df["AvgGradeExpected"]
A = np.vstack([x, np.ones(len(x))]).T
m, c = np.linalg.lstsq(A, y, rcond=None)[0]
print("m:{}, c:{}".format(m,c))
ax.plot(np.linspace(lower_bound,4,10),np.linspace(lower_bound,4,10),c="red")
ax.plot(np.linspace(lower_bound,4,10),(np.linspace(lower_bound,4,10)*m) + c,c="blue") | true | true |
f71a3d3679c710701747a7487f1d3ca7742c6324 | 1,437 | py | Python | destruction/render.py | tcdude/destruction | 44d24cee4f73e841e600a814e7b3c659a1a5c98c | [
"MIT"
] | null | null | null | destruction/render.py | tcdude/destruction | 44d24cee4f73e841e600a814e7b3c659a1a5c98c | [
"MIT"
] | null | null | null | destruction/render.py | tcdude/destruction | 44d24cee4f73e841e600a814e7b3c659a1a5c98c | [
"MIT"
] | null | null | null | """
MIT License
Copyright (c) 2019 tcdude
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import sdl2.ext
class HWRenderer(sdl2.ext.TextureSpriteRenderSystem):
"""Basic SDL HW Renderer."""
def __init__(self, window):
super(HWRenderer, self).__init__(window)
self.renderer = self.sdlrenderer
def render(self, components, **kwargs):
self._renderer.clear()
super(HWRenderer, self).render(components, **kwargs)
| 37.815789 | 78 | 0.767571 |
import sdl2.ext
class HWRenderer(sdl2.ext.TextureSpriteRenderSystem):
def __init__(self, window):
super(HWRenderer, self).__init__(window)
self.renderer = self.sdlrenderer
def render(self, components, **kwargs):
self._renderer.clear()
super(HWRenderer, self).render(components, **kwargs)
| true | true |
f71a3d5dbbec7288cff475d0741e98fb99b63c84 | 1,001 | py | Python | integration-tests/environment.py | oazmon/sceptre-template-fetcher | ff40fea4dcdb7b785b90b70426758475a8d09634 | [
"Apache-2.0"
] | null | null | null | integration-tests/environment.py | oazmon/sceptre-template-fetcher | ff40fea4dcdb7b785b90b70426758475a8d09634 | [
"Apache-2.0"
] | null | null | null | integration-tests/environment.py | oazmon/sceptre-template-fetcher | ff40fea4dcdb7b785b90b70426758475a8d09634 | [
"Apache-2.0"
] | null | null | null | import os
import uuid
import yaml
from sceptre_template_fetcher.cli import setup_logging
def before_all(context):
if context.config.wip:
setup_logging(True)
context.uuid = uuid.uuid1().hex
context.project_code = "sceptre-integration-tests-{0}".format(
context.uuid
)
context.sceptre_dir = os.path.join(
os.getcwd(), "integration-tests", "sceptre-project"
)
update_config(context)
def before_scenario(context, scenario):
context.error = None
context.response = None
context.output = None
def update_config(context):
config_path = os.path.join(
context.sceptre_dir, "config", "config.yaml"
)
with open(config_path) as config_file:
env_config = yaml.safe_load(config_file)
env_config["project_code"] = context.project_code
with open(config_path, 'w') as config_file:
yaml.safe_dump(env_config, config_file, default_flow_style=False)
def after_all(context):
update_config(context)
| 23.27907 | 73 | 0.7003 | import os
import uuid
import yaml
from sceptre_template_fetcher.cli import setup_logging
def before_all(context):
if context.config.wip:
setup_logging(True)
context.uuid = uuid.uuid1().hex
context.project_code = "sceptre-integration-tests-{0}".format(
context.uuid
)
context.sceptre_dir = os.path.join(
os.getcwd(), "integration-tests", "sceptre-project"
)
update_config(context)
def before_scenario(context, scenario):
context.error = None
context.response = None
context.output = None
def update_config(context):
config_path = os.path.join(
context.sceptre_dir, "config", "config.yaml"
)
with open(config_path) as config_file:
env_config = yaml.safe_load(config_file)
env_config["project_code"] = context.project_code
with open(config_path, 'w') as config_file:
yaml.safe_dump(env_config, config_file, default_flow_style=False)
def after_all(context):
update_config(context)
| true | true |
f71a3d9d71d9308eee9e5caacf5594010124ebf4 | 17,425 | py | Python | openstack_dashboard/dashboards/project/networks/workflows.py | ameoba/horizon | ff9e367c98a8bb79f10914abffaaa04b0a461819 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/project/networks/workflows.py | ameoba/horizon | ff9e367c98a8bb79f10914abffaaa04b0a461819 | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/dashboards/project/networks/workflows.py | ameoba/horizon | ff9e367c98a8bb79f10914abffaaa04b0a461819 | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.conf import settings
import netaddr
from django.conf import settings
from django.core.urlresolvers import reverse # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import fields
from horizon import workflows
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateNetworkInfoAction(workflows.Action):
net_name = forms.CharField(max_length=255,
label=_("Network Name"),
required=False)
if api.neutron.is_port_profiles_supported():
net_profile_id = forms.ChoiceField(label=_("Network Profile"))
admin_state = forms.BooleanField(label=_("Admin State"),
initial=True, required=False)
if api.neutron.is_port_profiles_supported():
def __init__(self, request, *args, **kwargs):
super(CreateNetworkInfoAction, self).__init__(request,
*args, **kwargs)
self.fields['net_profile_id'].choices = (
self.get_network_profile_choices(request))
def get_network_profile_choices(self, request):
profile_choices = [('', _("Select a profile"))]
for profile in self._get_profiles(request, 'network'):
profile_choices.append((profile.id, profile.name))
return profile_choices
def _get_profiles(self, request, type_p):
try:
profiles = api.neutron.profile_list(request, type_p)
except Exception:
profiles = []
msg = _('Network Profiles could not be retrieved.')
exceptions.handle(request, msg)
return profiles
# TODO(absubram): Add ability to view network profile information
# in the network detail if a profile is used.
class Meta:
name = _("Network")
help_text = _("From here you can create a new network.\n"
"In addition a subnet associated with the network "
"can be created in the next panel.")
class CreateNetworkInfo(workflows.Step):
action_class = CreateNetworkInfoAction
if api.neutron.is_port_profiles_supported():
contributes = ("net_name", "admin_state", "net_profile_id")
else:
contributes = ("net_name", "admin_state")
class CreateSubnetInfoAction(workflows.Action):
_ccs_enable_ipv6 = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {}).get('enable_ipv6', False)
if _ccs_enable_ipv6:
ip_version_choices = [(4, 'IPv4'), (6, 'IPv6')]
ip_version_fields = fields.IPv4 | fields.IPv6
else:
ip_version_choices = [(4, 'IPv4')]
ip_version_fields = fields.IPv4
with_subnet = forms.BooleanField(label=_("Create Subnet"),
initial=True, required=False)
subnet_name = forms.CharField(max_length=255,
label=_("Subnet Name"),
required=False)
cidr = fields.IPField(label=_("Network Address"),
required=False,
initial="",
help_text=_("Network address in CIDR format "
"(e.g. 192.168.0.0/24)"),
version=ip_version_fields,
mask=True)
ip_version = forms.ChoiceField(choices=ip_version_choices,
label=_("IP Version"))
gateway_ip = fields.IPField(
label=_("Gateway IP"),
required=False,
initial="",
help_text=_("IP address of Gateway (e.g. 192.168.0.254) "
"The default value is the first IP of the "
"network address (e.g. 192.168.0.1 for "
"192.168.0.0/24). "
"If you use the default, leave blank. "
"If you want to use no gateway, "
"check 'Disable Gateway' below."),
version=ip_version_fields,
mask=False)
no_gateway = forms.BooleanField(label=_("Disable Gateway"),
initial=False, required=False)
class Meta:
name = _("Subnet")
help_text = _('You can create a subnet associated with the new '
'network, in which case "Network Address" must be '
'specified. If you wish to create a network WITHOUT a '
'subnet, uncheck the "Create Subnet" checkbox.')
def __init__(self, request, context, *args, **kwargs):
super(CreateSubnetInfoAction, self).__init__(request, context, *args,
**kwargs)
if not getattr(settings, 'OPENSTACK_NEUTRON_NETWORK',
{}).get('enable_ipv6', True):
self.fields['ip_version'].widget = forms.HiddenInput()
self.fields['ip_version'].initial = 4
def _check_subnet_data(self, cleaned_data, is_create=True):
cidr = cleaned_data.get('cidr')
ip_version = int(cleaned_data.get('ip_version'))
gateway_ip = cleaned_data.get('gateway_ip')
no_gateway = cleaned_data.get('no_gateway')
if not cidr:
msg = _('Specify "Network Address" or '
'clear "Create Subnet" checkbox.')
raise forms.ValidationError(msg)
if cidr:
subnet = netaddr.IPNetwork(cidr)
if subnet.version != ip_version:
msg = _('Network Address and IP version are inconsistent.')
raise forms.ValidationError(msg)
if (ip_version == 4 and subnet.prefixlen == 32) or \
(ip_version == 6 and subnet.prefixlen == 128):
msg = _("The subnet in the Network Address is too small (/%s)."
% subnet.prefixlen)
raise forms.ValidationError(msg)
if not no_gateway and gateway_ip:
if netaddr.IPAddress(gateway_ip).version is not ip_version:
msg = _('Gateway IP and IP version are inconsistent.')
raise forms.ValidationError(msg)
if not is_create and not no_gateway and not gateway_ip:
msg = _('Specify IP address of gateway or '
'check "Disable Gateway".')
raise forms.ValidationError(msg)
def clean(self):
cleaned_data = super(CreateSubnetInfoAction, self).clean()
with_subnet = cleaned_data.get('with_subnet')
if not with_subnet:
return cleaned_data
self._check_subnet_data(cleaned_data)
return cleaned_data
class CreateSubnetInfo(workflows.Step):
action_class = CreateSubnetInfoAction
contributes = ("with_subnet", "subnet_name", "cidr",
"ip_version", "gateway_ip", "no_gateway")
class CreateSubnetDetailAction(workflows.Action):
enable_dhcp = forms.BooleanField(label=_("Enable DHCP"),
initial=True, required=False)
allocation_pools = forms.CharField(
widget=forms.Textarea(),
label=_("Allocation Pools"),
help_text=_("IP address allocation pools. Each entry is "
"<start_ip_address>,<end_ip_address> "
"(e.g., 192.168.1.100,192.168.1.120) "
"and one entry per line."),
required=False)
dns_nameservers = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("DNS Name Servers"),
help_text=_("IP address list of DNS name servers for this subnet. "
"One entry per line."),
required=False)
host_routes = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("Host Routes"),
help_text=_("Additional routes announced to the hosts. "
"Each entry is <destination_cidr>,<nexthop> "
"(e.g., 192.168.200.0/24,10.56.1.254) "
"and one entry per line."),
required=False)
class Meta:
name = _("Subnet Detail")
help_text = _('You can specify additional attributes for the subnet.')
def _convert_ip_address(self, ip, field_name):
try:
return netaddr.IPAddress(ip)
except (netaddr.AddrFormatError, ValueError):
msg = _('%(field_name)s: Invalid IP address '
'(value=%(ip)s)' % dict(
field_name=field_name, ip=ip))
raise forms.ValidationError(msg)
def _convert_ip_network(self, network, field_name):
try:
return netaddr.IPNetwork(network)
except (netaddr.AddrFormatError, ValueError):
msg = _('%(field_name)s: Invalid IP address '
'(value=%(network)s)' % dict(
field_name=field_name, network=network))
raise forms.ValidationError(msg)
def _check_allocation_pools(self, allocation_pools):
for p in allocation_pools.split('\n'):
p = p.strip()
if not p:
continue
pool = p.split(',')
if len(pool) != 2:
msg = _('Start and end addresses must be specified '
'(value=%s)') % p
raise forms.ValidationError(msg)
start, end = [self._convert_ip_address(ip, "allocation_pools")
for ip in pool]
if start > end:
msg = _('Start address is larger than end address '
'(value=%s)') % p
raise forms.ValidationError(msg)
def _check_dns_nameservers(self, dns_nameservers):
for ns in dns_nameservers.split('\n'):
ns = ns.strip()
if not ns:
continue
self._convert_ip_address(ns, "dns_nameservers")
def _check_host_routes(self, host_routes):
for r in host_routes.split('\n'):
r = r.strip()
if not r:
continue
route = r.split(',')
if len(route) != 2:
msg = _('Host Routes format error: '
'Destination CIDR and nexthop must be specified '
'(value=%s)') % r
raise forms.ValidationError(msg)
self._convert_ip_network(route[0], "host_routes")
self._convert_ip_address(route[1], "host_routes")
def clean(self):
cleaned_data = super(CreateSubnetDetailAction, self).clean()
self._check_allocation_pools(cleaned_data.get('allocation_pools'))
self._check_host_routes(cleaned_data.get('host_routes'))
self._check_dns_nameservers(cleaned_data.get('dns_nameservers'))
return cleaned_data
class CreateSubnetDetail(workflows.Step):
action_class = CreateSubnetDetailAction
contributes = ("enable_dhcp", "allocation_pools",
"dns_nameservers", "host_routes")
class CreateNetwork(workflows.Workflow):
slug = "create_network"
name = _("Create Network")
finalize_button_name = _("Create")
success_message = _('Created network "%s".')
failure_message = _('Unable to create network "%s".')
default_steps = (CreateNetworkInfo,
CreateSubnetInfo,
CreateSubnetDetail)
def get_success_url(self):
return reverse("horizon:project:networks:index")
def get_failure_url(self):
return reverse("horizon:project:networks:index")
def format_status_message(self, message):
name = self.context.get('net_name') or self.context.get('net_id', '')
return message % name
def _create_network(self, request, data):
try:
params = {'name': data['net_name'],
'admin_state_up': data['admin_state']}
if api.neutron.is_port_profiles_supported():
params['net_profile_id'] = data['net_profile_id']
network = api.neutron.network_create(request, **params)
network.set_id_as_name_if_empty()
self.context['net_id'] = network.id
msg = _('Network "%s" was successfully created.') % network.name
LOG.debug(msg)
return network
except Exception as e:
msg = (_('Failed to create network "%(network)s": %(reason)s') %
{"network": data['net_name'], "reason": e})
LOG.info(msg)
redirect = self.get_failure_url()
exceptions.handle(request, msg, redirect=redirect)
return False
def _setup_subnet_parameters(self, params, data, is_create=True):
"""Setup subnet parameters
This methods setups subnet parameters which are available
in both create and update.
"""
is_update = not is_create
params['enable_dhcp'] = data['enable_dhcp']
if is_create and data['allocation_pools']:
pools = [dict(zip(['start', 'end'], pool.strip().split(',')))
for pool in data['allocation_pools'].split('\n')
if pool.strip()]
params['allocation_pools'] = pools
if data['host_routes'] or is_update:
routes = [dict(zip(['destination', 'nexthop'],
route.strip().split(',')))
for route in data['host_routes'].split('\n')
if route.strip()]
params['host_routes'] = routes
if data['dns_nameservers'] or is_update:
nameservers = [ns.strip()
for ns in data['dns_nameservers'].split('\n')
if ns.strip()]
params['dns_nameservers'] = nameservers
def _create_subnet(self, request, data, network=None, tenant_id=None,
no_redirect=False):
if network:
network_id = network.id
network_name = network.name
else:
network_id = self.context.get('network_id')
network_name = self.context.get('network_name')
try:
params = {'network_id': network_id,
'name': data['subnet_name'],
'cidr': data['cidr'],
'ip_version': int(data['ip_version'])}
if tenant_id:
params['tenant_id'] = tenant_id
if data['no_gateway']:
params['gateway_ip'] = None
elif data['gateway_ip']:
params['gateway_ip'] = data['gateway_ip']
self._setup_subnet_parameters(params, data)
subnet = api.neutron.subnet_create(request, **params)
self.context['subnet_id'] = subnet.id
msg = _('Subnet "%s" was successfully created.') % data['cidr']
LOG.debug(msg)
return subnet
except Exception as e:
msg = _('Failed to create subnet "%(sub)s" for network "%(net)s": '
' %(reason)s')
if no_redirect:
redirect = None
else:
redirect = self.get_failure_url()
exceptions.handle(request,
msg % {"sub": data['cidr'], "net": network_name,
"reason": e},
redirect=redirect)
return False
def _delete_network(self, request, network):
"""Delete the created network when subnet creation failed"""
try:
api.neutron.network_delete(request, network.id)
msg = _('Delete the created network "%s" '
'due to subnet creation failure.') % network.name
LOG.debug(msg)
redirect = self.get_failure_url()
messages.info(request, msg)
raise exceptions.Http302(redirect)
#return exceptions.RecoverableError
except Exception:
msg = _('Failed to delete network "%s"') % network.name
LOG.info(msg)
redirect = self.get_failure_url()
exceptions.handle(request, msg, redirect=redirect)
def handle(self, request, data):
network = self._create_network(request, data)
if not network:
return False
# If we do not need to create a subnet, return here.
if not data['with_subnet']:
return True
subnet = self._create_subnet(request, data, network, no_redirect=True)
if subnet:
return True
else:
self._delete_network(request, network)
return False
| 41.587112 | 99 | 0.570502 |
import logging
from django.conf import settings
import netaddr
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import fields
from horizon import workflows
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateNetworkInfoAction(workflows.Action):
net_name = forms.CharField(max_length=255,
label=_("Network Name"),
required=False)
if api.neutron.is_port_profiles_supported():
net_profile_id = forms.ChoiceField(label=_("Network Profile"))
admin_state = forms.BooleanField(label=_("Admin State"),
initial=True, required=False)
if api.neutron.is_port_profiles_supported():
def __init__(self, request, *args, **kwargs):
super(CreateNetworkInfoAction, self).__init__(request,
*args, **kwargs)
self.fields['net_profile_id'].choices = (
self.get_network_profile_choices(request))
def get_network_profile_choices(self, request):
profile_choices = [('', _("Select a profile"))]
for profile in self._get_profiles(request, 'network'):
profile_choices.append((profile.id, profile.name))
return profile_choices
def _get_profiles(self, request, type_p):
try:
profiles = api.neutron.profile_list(request, type_p)
except Exception:
profiles = []
msg = _('Network Profiles could not be retrieved.')
exceptions.handle(request, msg)
return profiles
class Meta:
name = _("Network")
help_text = _("From here you can create a new network.\n"
"In addition a subnet associated with the network "
"can be created in the next panel.")
class CreateNetworkInfo(workflows.Step):
action_class = CreateNetworkInfoAction
if api.neutron.is_port_profiles_supported():
contributes = ("net_name", "admin_state", "net_profile_id")
else:
contributes = ("net_name", "admin_state")
class CreateSubnetInfoAction(workflows.Action):
_ccs_enable_ipv6 = getattr(settings, 'OPENSTACK_NEUTRON_NETWORK', {}).get('enable_ipv6', False)
if _ccs_enable_ipv6:
ip_version_choices = [(4, 'IPv4'), (6, 'IPv6')]
ip_version_fields = fields.IPv4 | fields.IPv6
else:
ip_version_choices = [(4, 'IPv4')]
ip_version_fields = fields.IPv4
with_subnet = forms.BooleanField(label=_("Create Subnet"),
initial=True, required=False)
subnet_name = forms.CharField(max_length=255,
label=_("Subnet Name"),
required=False)
cidr = fields.IPField(label=_("Network Address"),
required=False,
initial="",
help_text=_("Network address in CIDR format "
"(e.g. 192.168.0.0/24)"),
version=ip_version_fields,
mask=True)
ip_version = forms.ChoiceField(choices=ip_version_choices,
label=_("IP Version"))
gateway_ip = fields.IPField(
label=_("Gateway IP"),
required=False,
initial="",
help_text=_("IP address of Gateway (e.g. 192.168.0.254) "
"The default value is the first IP of the "
"network address (e.g. 192.168.0.1 for "
"192.168.0.0/24). "
"If you use the default, leave blank. "
"If you want to use no gateway, "
"check 'Disable Gateway' below."),
version=ip_version_fields,
mask=False)
no_gateway = forms.BooleanField(label=_("Disable Gateway"),
initial=False, required=False)
class Meta:
name = _("Subnet")
help_text = _('You can create a subnet associated with the new '
'network, in which case "Network Address" must be '
'specified. If you wish to create a network WITHOUT a '
'subnet, uncheck the "Create Subnet" checkbox.')
def __init__(self, request, context, *args, **kwargs):
super(CreateSubnetInfoAction, self).__init__(request, context, *args,
**kwargs)
if not getattr(settings, 'OPENSTACK_NEUTRON_NETWORK',
{}).get('enable_ipv6', True):
self.fields['ip_version'].widget = forms.HiddenInput()
self.fields['ip_version'].initial = 4
def _check_subnet_data(self, cleaned_data, is_create=True):
cidr = cleaned_data.get('cidr')
ip_version = int(cleaned_data.get('ip_version'))
gateway_ip = cleaned_data.get('gateway_ip')
no_gateway = cleaned_data.get('no_gateway')
if not cidr:
msg = _('Specify "Network Address" or '
'clear "Create Subnet" checkbox.')
raise forms.ValidationError(msg)
if cidr:
subnet = netaddr.IPNetwork(cidr)
if subnet.version != ip_version:
msg = _('Network Address and IP version are inconsistent.')
raise forms.ValidationError(msg)
if (ip_version == 4 and subnet.prefixlen == 32) or \
(ip_version == 6 and subnet.prefixlen == 128):
msg = _("The subnet in the Network Address is too small (/%s)."
% subnet.prefixlen)
raise forms.ValidationError(msg)
if not no_gateway and gateway_ip:
if netaddr.IPAddress(gateway_ip).version is not ip_version:
msg = _('Gateway IP and IP version are inconsistent.')
raise forms.ValidationError(msg)
if not is_create and not no_gateway and not gateway_ip:
msg = _('Specify IP address of gateway or '
'check "Disable Gateway".')
raise forms.ValidationError(msg)
def clean(self):
cleaned_data = super(CreateSubnetInfoAction, self).clean()
with_subnet = cleaned_data.get('with_subnet')
if not with_subnet:
return cleaned_data
self._check_subnet_data(cleaned_data)
return cleaned_data
class CreateSubnetInfo(workflows.Step):
action_class = CreateSubnetInfoAction
contributes = ("with_subnet", "subnet_name", "cidr",
"ip_version", "gateway_ip", "no_gateway")
class CreateSubnetDetailAction(workflows.Action):
enable_dhcp = forms.BooleanField(label=_("Enable DHCP"),
initial=True, required=False)
allocation_pools = forms.CharField(
widget=forms.Textarea(),
label=_("Allocation Pools"),
help_text=_("IP address allocation pools. Each entry is "
"<start_ip_address>,<end_ip_address> "
"(e.g., 192.168.1.100,192.168.1.120) "
"and one entry per line."),
required=False)
dns_nameservers = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("DNS Name Servers"),
help_text=_("IP address list of DNS name servers for this subnet. "
"One entry per line."),
required=False)
host_routes = forms.CharField(
widget=forms.widgets.Textarea(),
label=_("Host Routes"),
help_text=_("Additional routes announced to the hosts. "
"Each entry is <destination_cidr>,<nexthop> "
"(e.g., 192.168.200.0/24,10.56.1.254) "
"and one entry per line."),
required=False)
class Meta:
name = _("Subnet Detail")
help_text = _('You can specify additional attributes for the subnet.')
def _convert_ip_address(self, ip, field_name):
try:
return netaddr.IPAddress(ip)
except (netaddr.AddrFormatError, ValueError):
msg = _('%(field_name)s: Invalid IP address '
'(value=%(ip)s)' % dict(
field_name=field_name, ip=ip))
raise forms.ValidationError(msg)
def _convert_ip_network(self, network, field_name):
try:
return netaddr.IPNetwork(network)
except (netaddr.AddrFormatError, ValueError):
msg = _('%(field_name)s: Invalid IP address '
'(value=%(network)s)' % dict(
field_name=field_name, network=network))
raise forms.ValidationError(msg)
def _check_allocation_pools(self, allocation_pools):
for p in allocation_pools.split('\n'):
p = p.strip()
if not p:
continue
pool = p.split(',')
if len(pool) != 2:
msg = _('Start and end addresses must be specified '
'(value=%s)') % p
raise forms.ValidationError(msg)
start, end = [self._convert_ip_address(ip, "allocation_pools")
for ip in pool]
if start > end:
msg = _('Start address is larger than end address '
'(value=%s)') % p
raise forms.ValidationError(msg)
def _check_dns_nameservers(self, dns_nameservers):
for ns in dns_nameservers.split('\n'):
ns = ns.strip()
if not ns:
continue
self._convert_ip_address(ns, "dns_nameservers")
def _check_host_routes(self, host_routes):
for r in host_routes.split('\n'):
r = r.strip()
if not r:
continue
route = r.split(',')
if len(route) != 2:
msg = _('Host Routes format error: '
'Destination CIDR and nexthop must be specified '
'(value=%s)') % r
raise forms.ValidationError(msg)
self._convert_ip_network(route[0], "host_routes")
self._convert_ip_address(route[1], "host_routes")
def clean(self):
cleaned_data = super(CreateSubnetDetailAction, self).clean()
self._check_allocation_pools(cleaned_data.get('allocation_pools'))
self._check_host_routes(cleaned_data.get('host_routes'))
self._check_dns_nameservers(cleaned_data.get('dns_nameservers'))
return cleaned_data
class CreateSubnetDetail(workflows.Step):
action_class = CreateSubnetDetailAction
contributes = ("enable_dhcp", "allocation_pools",
"dns_nameservers", "host_routes")
class CreateNetwork(workflows.Workflow):
slug = "create_network"
name = _("Create Network")
finalize_button_name = _("Create")
success_message = _('Created network "%s".')
failure_message = _('Unable to create network "%s".')
default_steps = (CreateNetworkInfo,
CreateSubnetInfo,
CreateSubnetDetail)
def get_success_url(self):
return reverse("horizon:project:networks:index")
def get_failure_url(self):
return reverse("horizon:project:networks:index")
def format_status_message(self, message):
name = self.context.get('net_name') or self.context.get('net_id', '')
return message % name
def _create_network(self, request, data):
try:
params = {'name': data['net_name'],
'admin_state_up': data['admin_state']}
if api.neutron.is_port_profiles_supported():
params['net_profile_id'] = data['net_profile_id']
network = api.neutron.network_create(request, **params)
network.set_id_as_name_if_empty()
self.context['net_id'] = network.id
msg = _('Network "%s" was successfully created.') % network.name
LOG.debug(msg)
return network
except Exception as e:
msg = (_('Failed to create network "%(network)s": %(reason)s') %
{"network": data['net_name'], "reason": e})
LOG.info(msg)
redirect = self.get_failure_url()
exceptions.handle(request, msg, redirect=redirect)
return False
def _setup_subnet_parameters(self, params, data, is_create=True):
is_update = not is_create
params['enable_dhcp'] = data['enable_dhcp']
if is_create and data['allocation_pools']:
pools = [dict(zip(['start', 'end'], pool.strip().split(',')))
for pool in data['allocation_pools'].split('\n')
if pool.strip()]
params['allocation_pools'] = pools
if data['host_routes'] or is_update:
routes = [dict(zip(['destination', 'nexthop'],
route.strip().split(',')))
for route in data['host_routes'].split('\n')
if route.strip()]
params['host_routes'] = routes
if data['dns_nameservers'] or is_update:
nameservers = [ns.strip()
for ns in data['dns_nameservers'].split('\n')
if ns.strip()]
params['dns_nameservers'] = nameservers
def _create_subnet(self, request, data, network=None, tenant_id=None,
no_redirect=False):
if network:
network_id = network.id
network_name = network.name
else:
network_id = self.context.get('network_id')
network_name = self.context.get('network_name')
try:
params = {'network_id': network_id,
'name': data['subnet_name'],
'cidr': data['cidr'],
'ip_version': int(data['ip_version'])}
if tenant_id:
params['tenant_id'] = tenant_id
if data['no_gateway']:
params['gateway_ip'] = None
elif data['gateway_ip']:
params['gateway_ip'] = data['gateway_ip']
self._setup_subnet_parameters(params, data)
subnet = api.neutron.subnet_create(request, **params)
self.context['subnet_id'] = subnet.id
msg = _('Subnet "%s" was successfully created.') % data['cidr']
LOG.debug(msg)
return subnet
except Exception as e:
msg = _('Failed to create subnet "%(sub)s" for network "%(net)s": '
' %(reason)s')
if no_redirect:
redirect = None
else:
redirect = self.get_failure_url()
exceptions.handle(request,
msg % {"sub": data['cidr'], "net": network_name,
"reason": e},
redirect=redirect)
return False
def _delete_network(self, request, network):
try:
api.neutron.network_delete(request, network.id)
msg = _('Delete the created network "%s" '
'due to subnet creation failure.') % network.name
LOG.debug(msg)
redirect = self.get_failure_url()
messages.info(request, msg)
raise exceptions.Http302(redirect)
except Exception:
msg = _('Failed to delete network "%s"') % network.name
LOG.info(msg)
redirect = self.get_failure_url()
exceptions.handle(request, msg, redirect=redirect)
def handle(self, request, data):
network = self._create_network(request, data)
if not network:
return False
if not data['with_subnet']:
return True
subnet = self._create_subnet(request, data, network, no_redirect=True)
if subnet:
return True
else:
self._delete_network(request, network)
return False
| true | true |
f71a3eaeece4ab1511448b596d52d6ce7165fb16 | 34 | py | Python | 06_01_name_conflict.py | simonmonk/prog_pico_ed1 | 36e70f88ea7dc73e75399cd390d1cc2023843971 | [
"MIT"
] | 6 | 2021-05-08T13:19:33.000Z | 2022-03-20T08:29:44.000Z | 06_01_name_conflict.py | simonmonk/prog_pico_ed1 | 36e70f88ea7dc73e75399cd390d1cc2023843971 | [
"MIT"
] | 1 | 2021-03-05T20:27:15.000Z | 2021-11-17T09:07:43.000Z | 06_01_name_conflict.py | simonmonk/prog_pico_ed1 | 36e70f88ea7dc73e75399cd390d1cc2023843971 | [
"MIT"
] | 2 | 2021-07-02T15:19:37.000Z | 2021-10-06T00:53:25.000Z | def print():
pass
print() | 8.5 | 12 | 0.5 | def print():
pass
print() | true | true |
f71a3ebfc7a88a941fd26cb5f19083ae093e7d3f | 18,115 | py | Python | src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/tests/test_locks.py | aag09/azurecli | 30c98a75c36c02a657f1753ff5c48502dc7f7933 | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/tests/test_locks.py | aag09/azurecli | 30c98a75c36c02a657f1753ff5c48502dc7f7933 | [
"MIT"
] | null | null | null | src/command_modules/azure-cli-resource/azure/cli/command_modules/resource/tests/test_locks.py | aag09/azurecli | 30c98a75c36c02a657f1753ff5c48502dc7f7933 | [
"MIT"
] | 1 | 2017-12-28T04:51:44.000Z | 2017-12-28T04:51:44.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from time import sleep
import unittest
from azure.cli.testsdk import ScenarioTest, JMESPathCheck, ResourceGroupPreparer, record_only
from azure.cli.command_modules.resource.custom import _parse_lock_id
class ResourceLockTests(ScenarioTest):
def test_list_locks(self):
# just make sure this doesn't throw
self.cmd('az lock list').get_output_in_json()
@record_only()
def test_subscription_locks(self):
for lock_type in ['ReadOnly', 'CanNotDelete']:
lock_name = self.create_random_name('cli-test-lock', 48)
lock = self.cmd('az lock create -n {} --lock-type {}'.format(lock_name, lock_type)).get_output_in_json()
lock_id = lock.get('id')
self._sleep_for_lock_operation()
locks_list = self.cmd('az lock list').get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, [l['name'] for l in locks_list])
lock = self.cmd('az lock show -n {}'.format(lock_name)).get_output_in_json()
lock_from_id = self.cmd('az lock show --ids {}'.format(lock_id)).get_output_in_json()
self.assertEqual(lock.get('name', None), lock_name)
self.assertEqual(lock_from_id.get('name', None), lock_name)
self.assertEqual(lock.get('level', None), lock_type)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} --notes {} --lock-type {}'
.format(lock_name, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
lock = self.cmd('az lock update --ids {} --lock-type {}'
.format(lock_id, lock_type)).get_output_in_json()
self.assertEqual(lock.get('level', None), lock_type)
self.cmd('az lock delete -n {}'.format(lock_name))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_readonly_resource_group_lock')
def test_readonly_resource_group_lock(self, resource_group):
self._lock_operation_with_resource_group('ReadOnly', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_cannotdelete_resource_group_lock')
def test_cannotdelete_resource_group_lock(self, resource_group):
self._lock_operation_with_resource_group('CanNotDelete', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_readonly_resource_lock')
def test_readonly_resource_lock(self, resource_group):
self._lock_operation_with_resource('ReadOnly', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_cannotdelete_resource_lock')
def test_cannotdelete_resource_lock(self, resource_group):
self._lock_operation_with_resource('CanNotDelete', resource_group)
def _lock_operation_with_resource_group(self, lock_type, resource_group):
lock_name = self.create_random_name('cli-test-lock', 48)
self.cmd('az lock create -n {} -g {} --lock-type {}'.format(lock_name, resource_group, lock_type))
self._sleep_for_lock_operation()
self.cmd('az lock show -g {} -n {}'.format(resource_group, lock_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
locks_list = self.cmd("az lock list -g {} --query '[].name' -ojson".format(resource_group)).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} -g {} --notes {} --lock-type {}'
.format(lock_name, resource_group, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
self.cmd('az lock delete -g {} -n {}'.format(resource_group, lock_name))
self._sleep_for_lock_operation()
def _lock_operation_with_resource(self, lock_type, resource_group):
rsrc_name = self.create_random_name('cli.lock.rsrc', 30)
rsrc_type = 'Microsoft.Network/virtualNetworks'
lock_name = self.create_random_name('cli-test-lock', 74)
self.cmd('az network vnet create -n {} -g {}'.format(rsrc_name, resource_group))
self.cmd('az lock create -n {} -g {} --resource-type {} --resource-name {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, lock_type))
self._sleep_for_lock_operation()
self.cmd('az lock show --name {} -g {} --resource-type {} --resource-name {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
locks_list = self.cmd("az lock list --query '[].name' -ojson").get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} -g {} --resource-type {} --resource-name {} --notes {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
self.cmd('az lock delete --name {} -g {} --resource-name {} --resource-type {}'
.format(lock_name, resource_group, rsrc_name, rsrc_type))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_group_lock')
def test_group_lock_commands(self, resource_group):
lock_name = self.create_random_name('cli-test-lock', 48)
self.cmd('group lock create -n {} -g {} --lock-type CanNotDelete'.format(lock_name, resource_group))
self._sleep_for_lock_operation()
self.cmd('group lock show -g {} -n {}'.format(resource_group, lock_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', 'CanNotDelete')]).get_output_in_json()
locks_list = self.cmd("group lock list -g {} --query [].name -ojson"
.format(resource_group)).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
lock = self.cmd('group lock update -n {} -g {} --notes {} --lock-type ReadOnly'
.format(lock_name, resource_group, notes)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('group lock delete -g {} -n {}'.format(resource_group, lock_name))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_resource_lock')
def test_resource_lock_commands(self, resource_group):
rsrc_name = self.create_random_name('cli.lock.rsrc', 30)
rsrc_type = 'Microsoft.Network/virtualNetworks'
lock_name = self.create_random_name('cli-test-lock', 74)
lock_type = 'CanNotDelete'
self.cmd('network vnet create -n {} -g {}'.format(rsrc_name, resource_group))
self.cmd('resource lock create -n {} -g {} --resource-type {} --resource-name {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, lock_type))
self._sleep_for_lock_operation()
self.cmd('resource lock show --name {} -g {} --resource-type {} --resource-name {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
list_cmd = "resource lock list -g {} --resource-type {} --resource-name {} " \
"--query [].name -ojson".format(resource_group, rsrc_type, rsrc_name)
locks_list = self.cmd(list_cmd).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
lock = self.cmd('resource lock update -n {} -g {} --resource-type {} --resource-name {} --notes {} '
'--lock-type ReadOnly'
.format(lock_name, resource_group, rsrc_type, rsrc_name, notes)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('resource lock delete --name {} -g {} --resource-name {} --resource-type {}'
.format(lock_name, resource_group, rsrc_name, rsrc_type))
self._sleep_for_lock_operation()
@record_only()
def test_subscription_locks(self):
lock_name = self.create_random_name('cli-test-lock', 48)
lock = self.cmd('az account lock create -n {} --lock-type CanNotDelete'.format(lock_name)).get_output_in_json()
lock_id = lock.get('id')
locks_list = self.cmd('az account lock list --query [].name').get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
lock = self.cmd('az account lock show -n {}'.format(lock_name)).get_output_in_json()
lock_from_id = self.cmd('az account lock show --ids {}'.format(lock_id)).get_output_in_json()
self.assertEqual(lock.get('name', None), lock_name)
self.assertEqual(lock_from_id.get('name', None), lock_name)
self.assertEqual(lock.get('level', None), 'CanNotDelete')
notes = self.create_random_name('notes', 20)
lock = self.cmd('az account lock update -n {} --notes {} --lock-type {}'
.format(lock_name, notes, 'ReadOnly')).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
lock = self.cmd('az account lock update --ids {} --lock-type {}'
.format(lock_id, 'CanNotDelete')).get_output_in_json()
self.assertEqual(lock.get('level', None), 'CanNotDelete')
self.cmd('az account lock delete -n {}'.format(lock_name))
@ResourceGroupPreparer(name_prefix='cli_test_lock_commands_with_ids')
def test_lock_commands_with_ids(self, resource_group):
vnet_name = self.create_random_name('cli-lock-vnet', 30)
subnet_name = self.create_random_name('cli-lock-subnet', 30)
group_lock_name = self.create_random_name('cli-test-lock', 50)
vnet_lock_name = self.create_random_name('cli-test-lock', 50)
subnet_lock_name = self.create_random_name('cli-test-lock', 20)
vnet = self.cmd('az network vnet create -n {} -g {}'.format(vnet_name, resource_group)).get_output_in_json()
subnetaddress = vnet.get('newVNet').get('addressSpace').get('addressPrefixes')[0]
self.cmd('az network vnet subnet create -n {} --address-prefix {} --vnet-name {} -g {}'
.format(subnet_name, subnetaddress, vnet_name, resource_group))
locks = []
locks.append(self.cmd('az lock create -n {} -g {} --lock-type CanNotDelete'
.format(group_lock_name, resource_group)).get_output_in_json())
locks.append(self.cmd('az lock create -n {} -g {} --resource-type Microsoft.Network/virtualNetworks'
' --resource-name {} --lock-type CanNotDelete'
.format(vnet_lock_name, resource_group, vnet_name)).get_output_in_json())
locks.append(self.cmd('az lock create -n {} -g {} --resource-name {} --resource-type subnets '
'--namespace Microsoft.Network --parent virtualNetworks/{} --lock-type CanNotDelete'
.format(subnet_lock_name, resource_group, subnet_name, vnet_name)).get_output_in_json())
self._sleep_for_lock_operation()
space_delimited_ids = ' '.join([lock.get('id', None) for lock in locks])
my_locks = self.cmd('az lock show --ids {} --query [].name'.format(space_delimited_ids)).get_output_in_json()
self.assertTrue(len(my_locks) == 3)
for lock in my_locks:
self.assertIn(lock, [group_lock_name, vnet_lock_name, subnet_lock_name])
my_locks = self.cmd('az lock update --ids {} --notes somenotes --lock-type ReadOnly'
.format(space_delimited_ids)).get_output_in_json()
self.assertTrue(len(my_locks) == 3)
for lock in my_locks:
self.assertEqual(lock.get('notes', None), 'somenotes')
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('az lock delete --ids {}'.format(space_delimited_ids))
self._sleep_for_lock_operation()
my_locks = self.cmd("az lock list -g {} -ojson".format(resource_group)).get_output_in_json()
self.assertFalse(my_locks)
def _sleep_for_lock_operation(self):
if self.is_live:
sleep(5)
class ParseIdTests(unittest.TestCase):
def test_parsing_lock_ids(self):
tests = [
{
'input': "/subscriptions/subId/providers/"
"Microsoft.Authorization/locks/sublock",
'expected': {
'resource_group': None,
'resource_provider_namespace': None,
'parent_resource_path': None,
'resource_type': None,
'resource_name': None,
'lock_name': 'sublock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/examplegroup/providers/"
"Microsoft.Authorization/locks/grouplock",
'expected': {
'resource_group': 'examplegroup',
'resource_provider_namespace': None,
'parent_resource_path': None,
'resource_type': None,
'resource_name': None,
'lock_name': 'grouplock'
}
},
{
'input': "/subscriptions/subId/resourcegroups/mygroup/providers/"
"Microsoft.Network/virtualNetworks/myvnet/providers/"
"Microsoft.Authorization/locks/vnetlock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Network',
'parent_resource_path': None,
'resource_type': 'virtualNetworks',
'resource_name': 'myvnet',
'lock_name': 'vnetlock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/mygroup/providers/"
"Microsoft.Network/virtualNetworks/myvnet/subnets/subnet/providers/"
"Microsoft.Authorization/locks/subnetlock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Network',
'parent_resource_path': 'virtualNetworks/myvnet',
'resource_type': 'subnets',
'resource_name': 'subnet',
'lock_name': 'subnetlock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/mygroup/providers/"
"Microsoft.Provider1/resourceType1/name1/providers/"
"Microsoft.Provider2/resourceType2/name2/providers/"
"Microsoft.Authorization/locks/somelock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Provider1',
'parent_resource_path': 'resourceType1/name1/providers/Microsoft.Provider2',
'resource_type': 'resourceType2',
'resource_name': 'name2',
'lock_name': 'somelock'
}
}
]
for test in tests:
kwargs = _parse_lock_id(test['input'])
self.assertDictEqual(kwargs, test['expected'])
fail_tests = [
"/notsubscriptions/subId/providers/Microsoft.Authorization/locks/sublock",
"/subscriptions/subId/notResourceGroups/examplegroup/providers/Microsoft.Authorization/locks/grouplock",
"/subscriptions/subId/resourceGroups/examplegroup/providers/Microsoft.NotAuthorization/not_locks/grouplock",
"/subscriptions/subId/resourcegroups/mygroup/Microsoft.Network/virtualNetworks/myvnet/providers/"
"Microsoft.Authorization/locks/missingProvidersLock",
"/subscriptions/subId/resourcegroups/mygroup/providers/Microsoft.Network/myvnet/providers/"
"Microsoft.Authorization/locks/missingRsrcTypeLock",
"/subscriptions/subId/providers/Microsoft.Network/virtualNetworks/myvnet/subnets/subnet/providers/"
"Microsoft.Authorization/locks/missingRsrcGroupLock",
"not_a_id_at_all"
]
for test in fail_tests:
with self.assertRaises(AttributeError):
_parse_lock_id(test)
if __name__ == '__main__':
unittest.main()
| 51.463068 | 120 | 0.613525 |
from time import sleep
import unittest
from azure.cli.testsdk import ScenarioTest, JMESPathCheck, ResourceGroupPreparer, record_only
from azure.cli.command_modules.resource.custom import _parse_lock_id
class ResourceLockTests(ScenarioTest):
def test_list_locks(self):
self.cmd('az lock list').get_output_in_json()
@record_only()
def test_subscription_locks(self):
for lock_type in ['ReadOnly', 'CanNotDelete']:
lock_name = self.create_random_name('cli-test-lock', 48)
lock = self.cmd('az lock create -n {} --lock-type {}'.format(lock_name, lock_type)).get_output_in_json()
lock_id = lock.get('id')
self._sleep_for_lock_operation()
locks_list = self.cmd('az lock list').get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, [l['name'] for l in locks_list])
lock = self.cmd('az lock show -n {}'.format(lock_name)).get_output_in_json()
lock_from_id = self.cmd('az lock show --ids {}'.format(lock_id)).get_output_in_json()
self.assertEqual(lock.get('name', None), lock_name)
self.assertEqual(lock_from_id.get('name', None), lock_name)
self.assertEqual(lock.get('level', None), lock_type)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} --notes {} --lock-type {}'
.format(lock_name, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
lock = self.cmd('az lock update --ids {} --lock-type {}'
.format(lock_id, lock_type)).get_output_in_json()
self.assertEqual(lock.get('level', None), lock_type)
self.cmd('az lock delete -n {}'.format(lock_name))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_readonly_resource_group_lock')
def test_readonly_resource_group_lock(self, resource_group):
self._lock_operation_with_resource_group('ReadOnly', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_cannotdelete_resource_group_lock')
def test_cannotdelete_resource_group_lock(self, resource_group):
self._lock_operation_with_resource_group('CanNotDelete', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_readonly_resource_lock')
def test_readonly_resource_lock(self, resource_group):
self._lock_operation_with_resource('ReadOnly', resource_group)
@ResourceGroupPreparer(name_prefix='cli_test_cannotdelete_resource_lock')
def test_cannotdelete_resource_lock(self, resource_group):
self._lock_operation_with_resource('CanNotDelete', resource_group)
def _lock_operation_with_resource_group(self, lock_type, resource_group):
lock_name = self.create_random_name('cli-test-lock', 48)
self.cmd('az lock create -n {} -g {} --lock-type {}'.format(lock_name, resource_group, lock_type))
self._sleep_for_lock_operation()
self.cmd('az lock show -g {} -n {}'.format(resource_group, lock_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
locks_list = self.cmd("az lock list -g {} --query '[].name' -ojson".format(resource_group)).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} -g {} --notes {} --lock-type {}'
.format(lock_name, resource_group, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
self.cmd('az lock delete -g {} -n {}'.format(resource_group, lock_name))
self._sleep_for_lock_operation()
def _lock_operation_with_resource(self, lock_type, resource_group):
rsrc_name = self.create_random_name('cli.lock.rsrc', 30)
rsrc_type = 'Microsoft.Network/virtualNetworks'
lock_name = self.create_random_name('cli-test-lock', 74)
self.cmd('az network vnet create -n {} -g {}'.format(rsrc_name, resource_group))
self.cmd('az lock create -n {} -g {} --resource-type {} --resource-name {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, lock_type))
self._sleep_for_lock_operation()
self.cmd('az lock show --name {} -g {} --resource-type {} --resource-name {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
locks_list = self.cmd("az lock list --query '[].name' -ojson").get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
new_lvl = 'ReadOnly' if lock_type == 'CanNotDelete' else 'CanNotDelete'
lock = self.cmd('az lock update -n {} -g {} --resource-type {} --resource-name {} --notes {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, notes, new_lvl)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), new_lvl)
self.cmd('az lock delete --name {} -g {} --resource-name {} --resource-type {}'
.format(lock_name, resource_group, rsrc_name, rsrc_type))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_group_lock')
def test_group_lock_commands(self, resource_group):
lock_name = self.create_random_name('cli-test-lock', 48)
self.cmd('group lock create -n {} -g {} --lock-type CanNotDelete'.format(lock_name, resource_group))
self._sleep_for_lock_operation()
self.cmd('group lock show -g {} -n {}'.format(resource_group, lock_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', 'CanNotDelete')]).get_output_in_json()
locks_list = self.cmd("group lock list -g {} --query [].name -ojson"
.format(resource_group)).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
lock = self.cmd('group lock update -n {} -g {} --notes {} --lock-type ReadOnly'
.format(lock_name, resource_group, notes)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('group lock delete -g {} -n {}'.format(resource_group, lock_name))
self._sleep_for_lock_operation()
@ResourceGroupPreparer(name_prefix='cli_test_resource_lock')
def test_resource_lock_commands(self, resource_group):
rsrc_name = self.create_random_name('cli.lock.rsrc', 30)
rsrc_type = 'Microsoft.Network/virtualNetworks'
lock_name = self.create_random_name('cli-test-lock', 74)
lock_type = 'CanNotDelete'
self.cmd('network vnet create -n {} -g {}'.format(rsrc_name, resource_group))
self.cmd('resource lock create -n {} -g {} --resource-type {} --resource-name {} --lock-type {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name, lock_type))
self._sleep_for_lock_operation()
self.cmd('resource lock show --name {} -g {} --resource-type {} --resource-name {}'
.format(lock_name, resource_group, rsrc_type, rsrc_name)).assert_with_checks([
JMESPathCheck('name', lock_name),
JMESPathCheck('level', lock_type)])
list_cmd = "resource lock list -g {} --resource-type {} --resource-name {} " \
"--query [].name -ojson".format(resource_group, rsrc_type, rsrc_name)
locks_list = self.cmd(list_cmd).get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
notes = self.create_random_name('notes', 20)
lock = self.cmd('resource lock update -n {} -g {} --resource-type {} --resource-name {} --notes {} '
'--lock-type ReadOnly'
.format(lock_name, resource_group, rsrc_type, rsrc_name, notes)).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('resource lock delete --name {} -g {} --resource-name {} --resource-type {}'
.format(lock_name, resource_group, rsrc_name, rsrc_type))
self._sleep_for_lock_operation()
@record_only()
def test_subscription_locks(self):
lock_name = self.create_random_name('cli-test-lock', 48)
lock = self.cmd('az account lock create -n {} --lock-type CanNotDelete'.format(lock_name)).get_output_in_json()
lock_id = lock.get('id')
locks_list = self.cmd('az account lock list --query [].name').get_output_in_json()
self.assertTrue(locks_list)
self.assertIn(lock_name, locks_list)
lock = self.cmd('az account lock show -n {}'.format(lock_name)).get_output_in_json()
lock_from_id = self.cmd('az account lock show --ids {}'.format(lock_id)).get_output_in_json()
self.assertEqual(lock.get('name', None), lock_name)
self.assertEqual(lock_from_id.get('name', None), lock_name)
self.assertEqual(lock.get('level', None), 'CanNotDelete')
notes = self.create_random_name('notes', 20)
lock = self.cmd('az account lock update -n {} --notes {} --lock-type {}'
.format(lock_name, notes, 'ReadOnly')).get_output_in_json()
self.assertEqual(lock.get('notes', None), notes)
self.assertEqual(lock.get('level', None), 'ReadOnly')
lock = self.cmd('az account lock update --ids {} --lock-type {}'
.format(lock_id, 'CanNotDelete')).get_output_in_json()
self.assertEqual(lock.get('level', None), 'CanNotDelete')
self.cmd('az account lock delete -n {}'.format(lock_name))
@ResourceGroupPreparer(name_prefix='cli_test_lock_commands_with_ids')
def test_lock_commands_with_ids(self, resource_group):
vnet_name = self.create_random_name('cli-lock-vnet', 30)
subnet_name = self.create_random_name('cli-lock-subnet', 30)
group_lock_name = self.create_random_name('cli-test-lock', 50)
vnet_lock_name = self.create_random_name('cli-test-lock', 50)
subnet_lock_name = self.create_random_name('cli-test-lock', 20)
vnet = self.cmd('az network vnet create -n {} -g {}'.format(vnet_name, resource_group)).get_output_in_json()
subnetaddress = vnet.get('newVNet').get('addressSpace').get('addressPrefixes')[0]
self.cmd('az network vnet subnet create -n {} --address-prefix {} --vnet-name {} -g {}'
.format(subnet_name, subnetaddress, vnet_name, resource_group))
locks = []
locks.append(self.cmd('az lock create -n {} -g {} --lock-type CanNotDelete'
.format(group_lock_name, resource_group)).get_output_in_json())
locks.append(self.cmd('az lock create -n {} -g {} --resource-type Microsoft.Network/virtualNetworks'
' --resource-name {} --lock-type CanNotDelete'
.format(vnet_lock_name, resource_group, vnet_name)).get_output_in_json())
locks.append(self.cmd('az lock create -n {} -g {} --resource-name {} --resource-type subnets '
'--namespace Microsoft.Network --parent virtualNetworks/{} --lock-type CanNotDelete'
.format(subnet_lock_name, resource_group, subnet_name, vnet_name)).get_output_in_json())
self._sleep_for_lock_operation()
space_delimited_ids = ' '.join([lock.get('id', None) for lock in locks])
my_locks = self.cmd('az lock show --ids {} --query [].name'.format(space_delimited_ids)).get_output_in_json()
self.assertTrue(len(my_locks) == 3)
for lock in my_locks:
self.assertIn(lock, [group_lock_name, vnet_lock_name, subnet_lock_name])
my_locks = self.cmd('az lock update --ids {} --notes somenotes --lock-type ReadOnly'
.format(space_delimited_ids)).get_output_in_json()
self.assertTrue(len(my_locks) == 3)
for lock in my_locks:
self.assertEqual(lock.get('notes', None), 'somenotes')
self.assertEqual(lock.get('level', None), 'ReadOnly')
self.cmd('az lock delete --ids {}'.format(space_delimited_ids))
self._sleep_for_lock_operation()
my_locks = self.cmd("az lock list -g {} -ojson".format(resource_group)).get_output_in_json()
self.assertFalse(my_locks)
def _sleep_for_lock_operation(self):
if self.is_live:
sleep(5)
class ParseIdTests(unittest.TestCase):
def test_parsing_lock_ids(self):
tests = [
{
'input': "/subscriptions/subId/providers/"
"Microsoft.Authorization/locks/sublock",
'expected': {
'resource_group': None,
'resource_provider_namespace': None,
'parent_resource_path': None,
'resource_type': None,
'resource_name': None,
'lock_name': 'sublock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/examplegroup/providers/"
"Microsoft.Authorization/locks/grouplock",
'expected': {
'resource_group': 'examplegroup',
'resource_provider_namespace': None,
'parent_resource_path': None,
'resource_type': None,
'resource_name': None,
'lock_name': 'grouplock'
}
},
{
'input': "/subscriptions/subId/resourcegroups/mygroup/providers/"
"Microsoft.Network/virtualNetworks/myvnet/providers/"
"Microsoft.Authorization/locks/vnetlock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Network',
'parent_resource_path': None,
'resource_type': 'virtualNetworks',
'resource_name': 'myvnet',
'lock_name': 'vnetlock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/mygroup/providers/"
"Microsoft.Network/virtualNetworks/myvnet/subnets/subnet/providers/"
"Microsoft.Authorization/locks/subnetlock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Network',
'parent_resource_path': 'virtualNetworks/myvnet',
'resource_type': 'subnets',
'resource_name': 'subnet',
'lock_name': 'subnetlock'
}
},
{
'input': "/subscriptions/subId/resourceGroups/mygroup/providers/"
"Microsoft.Provider1/resourceType1/name1/providers/"
"Microsoft.Provider2/resourceType2/name2/providers/"
"Microsoft.Authorization/locks/somelock",
'expected': {
'resource_group': 'mygroup',
'resource_provider_namespace': 'Microsoft.Provider1',
'parent_resource_path': 'resourceType1/name1/providers/Microsoft.Provider2',
'resource_type': 'resourceType2',
'resource_name': 'name2',
'lock_name': 'somelock'
}
}
]
for test in tests:
kwargs = _parse_lock_id(test['input'])
self.assertDictEqual(kwargs, test['expected'])
fail_tests = [
"/notsubscriptions/subId/providers/Microsoft.Authorization/locks/sublock",
"/subscriptions/subId/notResourceGroups/examplegroup/providers/Microsoft.Authorization/locks/grouplock",
"/subscriptions/subId/resourceGroups/examplegroup/providers/Microsoft.NotAuthorization/not_locks/grouplock",
"/subscriptions/subId/resourcegroups/mygroup/Microsoft.Network/virtualNetworks/myvnet/providers/"
"Microsoft.Authorization/locks/missingProvidersLock",
"/subscriptions/subId/resourcegroups/mygroup/providers/Microsoft.Network/myvnet/providers/"
"Microsoft.Authorization/locks/missingRsrcTypeLock",
"/subscriptions/subId/providers/Microsoft.Network/virtualNetworks/myvnet/subnets/subnet/providers/"
"Microsoft.Authorization/locks/missingRsrcGroupLock",
"not_a_id_at_all"
]
for test in fail_tests:
with self.assertRaises(AttributeError):
_parse_lock_id(test)
if __name__ == '__main__':
unittest.main()
| true | true |
f71a3fac624255159a6714f8e472afdd01de6526 | 1,342 | py | Python | molsysmt/form/openmm_Topology/to_openmm_System.py | uibcdf/MolModSAKs | 02263fb710693f0c41817f1a318459b35fd5462a | [
"MIT"
] | null | null | null | molsysmt/form/openmm_Topology/to_openmm_System.py | uibcdf/MolModSAKs | 02263fb710693f0c41817f1a318459b35fd5462a | [
"MIT"
] | null | null | null | molsysmt/form/openmm_Topology/to_openmm_System.py | uibcdf/MolModSAKs | 02263fb710693f0c41817f1a318459b35fd5462a | [
"MIT"
] | null | null | null | from molsysmt._private.exceptions import *
from molsysmt._private.digestion import *
from .is_openmm_Topology import is_openmm_Topology
def to_openmm_System(item, atom_indices='all', forcefield=None, parameters=None, check=True):
if check:
try:
is_openmm_Topology(item)
except:
raise WrongFormError('openmm.Topology')
try:
atom_indices = digest_atom_indices(atom_indices)
except:
raise WrongAtomIndicesError()
try:
forcefield = digest_forcefield(forcefield)
except:
raise WrongForceFieldError()
#forcefield = molecular_mechanics.to_openmm_ForceField()
#system_parameters = molecular_mechanics.get_openmm_System_parameters()
#tmp_item = forcefield.createSystem(item, **parameters)
#if molecular_mechanics.use_dispersion_correction or molecular_mechanics.ewald_error_tolerance:
# forces = {ii.__class__.__name__ : ii for ii in tmp_item.getForces()}
#if molecular_mechanics.use_dispersion_correction:
# forces['NonbondedForce'].setUseDispersionCorrection(True)
#if molecular_mechanics.ewald_error_tolerance:
# forces['NonbondedForce'].setEwaldErrorTolerance(molecular_mechanics.ewald_error_tolerance)
#return tmp_item
raise NotImplementedMethodError
pass
| 34.410256 | 99 | 0.727273 | from molsysmt._private.exceptions import *
from molsysmt._private.digestion import *
from .is_openmm_Topology import is_openmm_Topology
def to_openmm_System(item, atom_indices='all', forcefield=None, parameters=None, check=True):
if check:
try:
is_openmm_Topology(item)
except:
raise WrongFormError('openmm.Topology')
try:
atom_indices = digest_atom_indices(atom_indices)
except:
raise WrongAtomIndicesError()
try:
forcefield = digest_forcefield(forcefield)
except:
raise WrongForceFieldError()
raise NotImplementedMethodError
pass
| true | true |
f71a3fafd60fd3e85163023cfc3f27d9dfd7b309 | 1,273 | py | Python | python/app.py | webbhm/GBE_T | 77302ecc57c6997bd646a5a789ec5d55bdc1b8d8 | [
"MIT"
] | null | null | null | python/app.py | webbhm/GBE_T | 77302ecc57c6997bd646a5a789ec5d55bdc1b8d8 | [
"MIT"
] | null | null | null | python/app.py | webbhm/GBE_T | 77302ecc57c6997bd646a5a789ec5d55bdc1b8d8 | [
"MIT"
] | 1 | 2021-07-30T15:54:29.000Z | 2021-07-30T15:54:29.000Z | from flask import Flask, render_template, request
from datetime import datetime
from ChartHelper import ChartHelper
from werkzeug.middleware.proxy_fix import ProxyFix
app = Flask(__name__)
#
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_host=1)
@app.route("/")
def index():
return render_template('index.html', title="Test GBE_T")
@app.route("/hello")
def hello():
return render_template('hello.html', title="Temperature Chart")
@app.route("/temp_chart")
def temp_chart():
ch = ChartHelper("Temperature")
arr = ch.get_array()
return render_template('temp_chart.html', title="Temperature Chart", data=arr)
@app.route("/humidity_chart")
def humidity_chart():
ch = ChartHelper("Humidity")
arr = ch.get_array()
return render_template('humidity_chart.html', title="Humidity Chart", data=arr)
@app.route("/pressure_chart")
def pressure_chart():
ch = ChartHelper("Pressure")
arr = ch.get_array()
return render_template('pressure_chart.html', title="Pressure Chart", data=arr)
@app.route("/co2_chart")
def co2_chart():
ch = ChartHelper("CO2")
arr = ch.get_array()
return render_template('co2_chart.html', title="CO2 Chart", data=arr)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000, debug=True) | 27.673913 | 83 | 0.711705 | from flask import Flask, render_template, request
from datetime import datetime
from ChartHelper import ChartHelper
from werkzeug.middleware.proxy_fix import ProxyFix
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_host=1)
@app.route("/")
def index():
return render_template('index.html', title="Test GBE_T")
@app.route("/hello")
def hello():
return render_template('hello.html', title="Temperature Chart")
@app.route("/temp_chart")
def temp_chart():
ch = ChartHelper("Temperature")
arr = ch.get_array()
return render_template('temp_chart.html', title="Temperature Chart", data=arr)
@app.route("/humidity_chart")
def humidity_chart():
ch = ChartHelper("Humidity")
arr = ch.get_array()
return render_template('humidity_chart.html', title="Humidity Chart", data=arr)
@app.route("/pressure_chart")
def pressure_chart():
ch = ChartHelper("Pressure")
arr = ch.get_array()
return render_template('pressure_chart.html', title="Pressure Chart", data=arr)
@app.route("/co2_chart")
def co2_chart():
ch = ChartHelper("CO2")
arr = ch.get_array()
return render_template('co2_chart.html', title="CO2 Chart", data=arr)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=5000, debug=True) | true | true |
f71a408dbfa7813e062114f0338906e60d2e2f3e | 15,336 | py | Python | viz/renderer.py | AK391/stylegan_xl | 9854d3d0e96eccaad10cab22379c018e1e031cf0 | [
"MIT"
] | 214 | 2022-02-02T02:24:57.000Z | 2022-03-31T18:39:55.000Z | viz/renderer.py | AK391/stylegan_xl | 9854d3d0e96eccaad10cab22379c018e1e031cf0 | [
"MIT"
] | 8 | 2022-02-03T11:21:10.000Z | 2022-03-31T23:26:24.000Z | viz/renderer.py | AK391/stylegan_xl | 9854d3d0e96eccaad10cab22379c018e1e031cf0 | [
"MIT"
] | 2 | 2022-03-08T08:05:55.000Z | 2022-03-31T23:01:58.000Z | # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# NVIDIA CORPORATION and its licensors retain all intellectual property
# and proprietary rights in and to this software, related documentation
# and any modifications thereto. Any use, reproduction, disclosure or
# distribution of this software and related documentation without an express
# license agreement from NVIDIA CORPORATION is strictly prohibited.
import sys
import copy
import traceback
import numpy as np
import torch
import torch.fft
import torch.nn
import matplotlib.cm
import dnnlib
from torch_utils.ops import upfirdn2d
import legacy # pylint: disable=import-error
#----------------------------------------------------------------------------
class CapturedException(Exception):
def __init__(self, msg=None):
if msg is None:
_type, value, _traceback = sys.exc_info()
assert value is not None
if isinstance(value, CapturedException):
msg = str(value)
else:
msg = traceback.format_exc()
assert isinstance(msg, str)
super().__init__(msg)
#----------------------------------------------------------------------------
class CaptureSuccess(Exception):
def __init__(self, out):
super().__init__()
self.out = out
#----------------------------------------------------------------------------
def _sinc(x):
y = (x * np.pi).abs()
z = torch.sin(y) / y.clamp(1e-30, float('inf'))
return torch.where(y < 1e-30, torch.ones_like(x), z)
def _lanczos_window(x, a):
x = x.abs() / a
return torch.where(x < 1, _sinc(x), torch.zeros_like(x))
#----------------------------------------------------------------------------
def _construct_affine_bandlimit_filter(mat, a=3, amax=16, aflt=64, up=4, cutoff_in=1, cutoff_out=1):
assert a <= amax < aflt
mat = torch.as_tensor(mat).to(torch.float32)
# Construct 2D filter taps in input & output coordinate spaces.
taps = ((torch.arange(aflt * up * 2 - 1, device=mat.device) + 1) / up - aflt).roll(1 - aflt * up)
yi, xi = torch.meshgrid(taps, taps)
xo, yo = (torch.stack([xi, yi], dim=2) @ mat[:2, :2].t()).unbind(2)
# Convolution of two oriented 2D sinc filters.
fi = _sinc(xi * cutoff_in) * _sinc(yi * cutoff_in)
fo = _sinc(xo * cutoff_out) * _sinc(yo * cutoff_out)
f = torch.fft.ifftn(torch.fft.fftn(fi) * torch.fft.fftn(fo)).real
# Convolution of two oriented 2D Lanczos windows.
wi = _lanczos_window(xi, a) * _lanczos_window(yi, a)
wo = _lanczos_window(xo, a) * _lanczos_window(yo, a)
w = torch.fft.ifftn(torch.fft.fftn(wi) * torch.fft.fftn(wo)).real
# Construct windowed FIR filter.
f = f * w
# Finalize.
c = (aflt - amax) * up
f = f.roll([aflt * up - 1] * 2, dims=[0,1])[c:-c, c:-c]
f = torch.nn.functional.pad(f, [0, 1, 0, 1]).reshape(amax * 2, up, amax * 2, up)
f = f / f.sum([0,2], keepdim=True) / (up ** 2)
f = f.reshape(amax * 2 * up, amax * 2 * up)[:-1, :-1]
return f
#----------------------------------------------------------------------------
def _apply_affine_transformation(x, mat, up=4, **filter_kwargs):
_N, _C, H, W = x.shape
mat = torch.as_tensor(mat).to(dtype=torch.float32, device=x.device)
# Construct filter.
f = _construct_affine_bandlimit_filter(mat, up=up, **filter_kwargs)
assert f.ndim == 2 and f.shape[0] == f.shape[1] and f.shape[0] % 2 == 1
p = f.shape[0] // 2
# Construct sampling grid.
theta = mat.inverse()
theta[:2, 2] *= 2
theta[0, 2] += 1 / up / W
theta[1, 2] += 1 / up / H
theta[0, :] *= W / (W + p / up * 2)
theta[1, :] *= H / (H + p / up * 2)
theta = theta[:2, :3].unsqueeze(0).repeat([x.shape[0], 1, 1])
g = torch.nn.functional.affine_grid(theta, x.shape, align_corners=False)
# Resample image.
y = upfirdn2d.upsample2d(x=x, f=f, up=up, padding=p)
z = torch.nn.functional.grid_sample(y, g, mode='bilinear', padding_mode='zeros', align_corners=False)
# Form mask.
m = torch.zeros_like(y)
c = p * 2 + 1
m[:, :, c:-c, c:-c] = 1
m = torch.nn.functional.grid_sample(m, g, mode='nearest', padding_mode='zeros', align_corners=False)
return z, m
#----------------------------------------------------------------------------
class Renderer:
def __init__(self):
self._device = torch.device('cuda')
self._pkl_data = dict() # {pkl: dict | CapturedException, ...}
self._networks = dict() # {cache_key: torch.nn.Module, ...}
self._pinned_bufs = dict() # {(shape, dtype): torch.Tensor, ...}
self._cmaps = dict() # {name: torch.Tensor, ...}
self._is_timing = False
self._start_event = torch.cuda.Event(enable_timing=True)
self._end_event = torch.cuda.Event(enable_timing=True)
self._net_layers = dict() # {cache_key: [dnnlib.EasyDict, ...], ...}
def render(self, **args):
self._is_timing = True
self._start_event.record(torch.cuda.current_stream(self._device))
res = dnnlib.EasyDict()
try:
self._render_impl(res, **args)
except:
res.error = CapturedException()
self._end_event.record(torch.cuda.current_stream(self._device))
if 'image' in res:
res.image = self.to_cpu(res.image).numpy()
if 'stats' in res:
res.stats = self.to_cpu(res.stats).numpy()
if 'error' in res:
res.error = str(res.error)
if self._is_timing:
self._end_event.synchronize()
res.render_time = self._start_event.elapsed_time(self._end_event) * 1e-3
self._is_timing = False
return res
def get_network(self, pkl, key, **tweak_kwargs):
data = self._pkl_data.get(pkl, None)
if data is None:
print(f'Loading "{pkl}"... ', end='', flush=True)
try:
with dnnlib.util.open_url(pkl, verbose=False) as f:
data = legacy.load_network_pkl(f)
print('Done.')
except:
data = CapturedException()
print('Failed!')
self._pkl_data[pkl] = data
self._ignore_timing()
if isinstance(data, CapturedException):
raise data
orig_net = data[key]
cache_key = (orig_net, self._device, tuple(sorted(tweak_kwargs.items())))
net = self._networks.get(cache_key, None)
if net is None:
try:
net = copy.deepcopy(orig_net)
net = self._tweak_network(net, **tweak_kwargs)
net.to(self._device)
except:
net = CapturedException()
self._networks[cache_key] = net
self._ignore_timing()
if isinstance(net, CapturedException):
raise net
return net
def _tweak_network(self, net):
# Print diagnostics.
#for name, value in misc.named_params_and_buffers(net):
# if name.endswith('.magnitude_ema'):
# value = value.rsqrt().numpy()
# print(f'{name:<50s}{np.min(value):<16g}{np.max(value):g}')
# if name.endswith('.weight') and value.ndim == 4:
# value = value.square().mean([1,2,3]).sqrt().numpy()
# print(f'{name:<50s}{np.min(value):<16g}{np.max(value):g}')
return net
def _get_pinned_buf(self, ref):
key = (tuple(ref.shape), ref.dtype)
buf = self._pinned_bufs.get(key, None)
if buf is None:
buf = torch.empty(ref.shape, dtype=ref.dtype).pin_memory()
self._pinned_bufs[key] = buf
return buf
def to_device(self, buf):
return self._get_pinned_buf(buf).copy_(buf).to(self._device)
def to_cpu(self, buf):
return self._get_pinned_buf(buf).copy_(buf).clone()
def _ignore_timing(self):
self._is_timing = False
def _apply_cmap(self, x, name='viridis'):
cmap = self._cmaps.get(name, None)
if cmap is None:
cmap = matplotlib.cm.get_cmap(name)
cmap = cmap(np.linspace(0, 1, num=1024), bytes=True)[:, :3]
cmap = self.to_device(torch.from_numpy(cmap))
self._cmaps[name] = cmap
hi = cmap.shape[0] - 1
x = (x * hi + 0.5).clamp(0, hi).to(torch.int64)
x = torch.nn.functional.embedding(x, cmap)
return x
def _render_impl(self, res,
pkl = None,
w0_seeds = [[0, 1]],
stylemix_idx = [],
stylemix_seed = 0,
trunc_psi = 1,
trunc_cutoff = 0,
random_seed = 0,
noise_mode = 'const',
force_fp32 = False,
layer_name = None,
sel_channels = 3,
base_channel = 0,
img_scale_db = 0,
img_normalize = False,
fft_show = False,
fft_all = True,
fft_range_db = 50,
fft_beta = 8,
input_transform = None,
untransform = False,
):
# Dig up network details.
G = self.get_network(pkl, 'G_ema')
res.img_resolution = G.img_resolution
res.num_ws = G.num_ws
res.has_noise = any('noise_const' in name for name, _buf in G.synthesis.named_buffers())
res.has_input_transform = (hasattr(G.synthesis, 'input') and hasattr(G.synthesis.input, 'transform'))
# Set input transform.
if res.has_input_transform:
m = np.eye(3)
try:
if input_transform is not None:
m = np.linalg.inv(np.asarray(input_transform))
except np.linalg.LinAlgError:
res.error = CapturedException()
G.synthesis.input.transform.copy_(torch.from_numpy(m))
# Generate random latents.
all_seeds = [seed for seed, _weight in w0_seeds] + [stylemix_seed]
all_seeds = list(set(all_seeds))
all_zs = np.zeros([len(all_seeds), G.z_dim], dtype=np.float32)
all_cs = np.zeros([len(all_seeds), G.c_dim], dtype=np.float32)
for idx, seed in enumerate(all_seeds):
rnd = np.random.RandomState(seed)
all_zs[idx] = rnd.randn(G.z_dim)
cls = rnd.randint(G.c_dim)
if G.c_dim > 0:
all_cs[idx, cls] = 1
# Run mapping network.
w_avg = G.mapping.w_avg[cls]
all_zs = self.to_device(torch.from_numpy(all_zs))
all_cs = self.to_device(torch.from_numpy(all_cs))
all_ws = G.mapping(z=all_zs, c=all_cs, truncation_psi=trunc_psi, truncation_cutoff=trunc_cutoff) - w_avg
all_ws = dict(zip(all_seeds, all_ws))
# Calculate final W.
w = torch.stack([all_ws[seed] * weight for seed, weight in w0_seeds]).sum(dim=0, keepdim=True)
stylemix_idx = [idx for idx in stylemix_idx if 0 <= idx < G.num_ws]
if len(stylemix_idx) > 0:
w[:, stylemix_idx] = all_ws[stylemix_seed][np.newaxis, stylemix_idx]
w += w_avg
# Run synthesis network.
synthesis_kwargs = dnnlib.EasyDict(noise_mode=noise_mode, force_fp32=force_fp32)
torch.manual_seed(random_seed)
out, layers = self.run_synthesis_net(G.synthesis, w, capture_layer=layer_name, **synthesis_kwargs)
# Update layer list.
cache_key = (G.synthesis, tuple(sorted(synthesis_kwargs.items())))
if cache_key not in self._net_layers:
if layer_name is not None:
torch.manual_seed(random_seed)
_out, layers = self.run_synthesis_net(G.synthesis, w, **synthesis_kwargs)
self._net_layers[cache_key] = layers
res.layers = self._net_layers[cache_key]
# Untransform.
if untransform and res.has_input_transform:
out, _mask = _apply_affine_transformation(out.to(torch.float32), G.synthesis.input.transform, amax=6) # Override amax to hit the fast path in upfirdn2d.
# Select channels and compute statistics.
out = out[0].to(torch.float32)
if sel_channels > out.shape[0]:
sel_channels = 1
base_channel = max(min(base_channel, out.shape[0] - sel_channels), 0)
sel = out[base_channel : base_channel + sel_channels]
res.stats = torch.stack([
out.mean(), sel.mean(),
out.std(), sel.std(),
out.norm(float('inf')), sel.norm(float('inf')),
])
# Scale and convert to uint8.
img = sel
if img_normalize:
img = img / img.norm(float('inf'), dim=[1,2], keepdim=True).clip(1e-8, 1e8)
img = img * (10 ** (img_scale_db / 20))
img = (img * 127.5 + 128).clamp(0, 255).to(torch.uint8).permute(1, 2, 0)
res.image = img
# FFT.
if fft_show:
sig = out if fft_all else sel
sig = sig.to(torch.float32)
sig = sig - sig.mean(dim=[1,2], keepdim=True)
sig = sig * torch.kaiser_window(sig.shape[1], periodic=False, beta=fft_beta, device=self._device)[None, :, None]
sig = sig * torch.kaiser_window(sig.shape[2], periodic=False, beta=fft_beta, device=self._device)[None, None, :]
fft = torch.fft.fftn(sig, dim=[1,2]).abs().square().sum(dim=0)
fft = fft.roll(shifts=[fft.shape[0] // 2, fft.shape[1] // 2], dims=[0,1])
fft = (fft / fft.mean()).log10() * 10 # dB
fft = self._apply_cmap((fft / fft_range_db + 1) / 2)
res.image = torch.cat([img.expand_as(fft), fft], dim=1)
@staticmethod
def run_synthesis_net(net, *args, capture_layer=None, **kwargs): # => out, layers
submodule_names = {mod: name for name, mod in net.named_modules()}
unique_names = set()
layers = []
def module_hook(module, _inputs, outputs):
outputs = list(outputs) if isinstance(outputs, (tuple, list)) else [outputs]
outputs = [out for out in outputs if isinstance(out, torch.Tensor) and out.ndim in [4, 5]]
for idx, out in enumerate(outputs):
if out.ndim == 5: # G-CNN => remove group dimension.
out = out.mean(2)
name = submodule_names[module]
if name == '':
name = 'output'
if len(outputs) > 1:
name += f':{idx}'
if name in unique_names:
suffix = 2
while f'{name}_{suffix}' in unique_names:
suffix += 1
name += f'_{suffix}'
unique_names.add(name)
shape = [int(x) for x in out.shape]
dtype = str(out.dtype).split('.')[-1]
layers.append(dnnlib.EasyDict(name=name, shape=shape, dtype=dtype))
if name == capture_layer:
raise CaptureSuccess(out)
hooks = [module.register_forward_hook(module_hook) for module in net.modules()]
try:
out = net(*args, **kwargs)
except CaptureSuccess as e:
out = e.out
for hook in hooks:
hook.remove()
return out, layers
#----------------------------------------------------------------------------
| 40.46438 | 164 | 0.555099 |
import sys
import copy
import traceback
import numpy as np
import torch
import torch.fft
import torch.nn
import matplotlib.cm
import dnnlib
from torch_utils.ops import upfirdn2d
import legacy
class CapturedException(Exception):
def __init__(self, msg=None):
if msg is None:
_type, value, _traceback = sys.exc_info()
assert value is not None
if isinstance(value, CapturedException):
msg = str(value)
else:
msg = traceback.format_exc()
assert isinstance(msg, str)
super().__init__(msg)
class CaptureSuccess(Exception):
def __init__(self, out):
super().__init__()
self.out = out
def _sinc(x):
y = (x * np.pi).abs()
z = torch.sin(y) / y.clamp(1e-30, float('inf'))
return torch.where(y < 1e-30, torch.ones_like(x), z)
def _lanczos_window(x, a):
x = x.abs() / a
return torch.where(x < 1, _sinc(x), torch.zeros_like(x))
def _construct_affine_bandlimit_filter(mat, a=3, amax=16, aflt=64, up=4, cutoff_in=1, cutoff_out=1):
assert a <= amax < aflt
mat = torch.as_tensor(mat).to(torch.float32)
taps = ((torch.arange(aflt * up * 2 - 1, device=mat.device) + 1) / up - aflt).roll(1 - aflt * up)
yi, xi = torch.meshgrid(taps, taps)
xo, yo = (torch.stack([xi, yi], dim=2) @ mat[:2, :2].t()).unbind(2)
fi = _sinc(xi * cutoff_in) * _sinc(yi * cutoff_in)
fo = _sinc(xo * cutoff_out) * _sinc(yo * cutoff_out)
f = torch.fft.ifftn(torch.fft.fftn(fi) * torch.fft.fftn(fo)).real
wi = _lanczos_window(xi, a) * _lanczos_window(yi, a)
wo = _lanczos_window(xo, a) * _lanczos_window(yo, a)
w = torch.fft.ifftn(torch.fft.fftn(wi) * torch.fft.fftn(wo)).real
f = f * w
c = (aflt - amax) * up
f = f.roll([aflt * up - 1] * 2, dims=[0,1])[c:-c, c:-c]
f = torch.nn.functional.pad(f, [0, 1, 0, 1]).reshape(amax * 2, up, amax * 2, up)
f = f / f.sum([0,2], keepdim=True) / (up ** 2)
f = f.reshape(amax * 2 * up, amax * 2 * up)[:-1, :-1]
return f
def _apply_affine_transformation(x, mat, up=4, **filter_kwargs):
_N, _C, H, W = x.shape
mat = torch.as_tensor(mat).to(dtype=torch.float32, device=x.device)
f = _construct_affine_bandlimit_filter(mat, up=up, **filter_kwargs)
assert f.ndim == 2 and f.shape[0] == f.shape[1] and f.shape[0] % 2 == 1
p = f.shape[0] // 2
theta = mat.inverse()
theta[:2, 2] *= 2
theta[0, 2] += 1 / up / W
theta[1, 2] += 1 / up / H
theta[0, :] *= W / (W + p / up * 2)
theta[1, :] *= H / (H + p / up * 2)
theta = theta[:2, :3].unsqueeze(0).repeat([x.shape[0], 1, 1])
g = torch.nn.functional.affine_grid(theta, x.shape, align_corners=False)
y = upfirdn2d.upsample2d(x=x, f=f, up=up, padding=p)
z = torch.nn.functional.grid_sample(y, g, mode='bilinear', padding_mode='zeros', align_corners=False)
m = torch.zeros_like(y)
c = p * 2 + 1
m[:, :, c:-c, c:-c] = 1
m = torch.nn.functional.grid_sample(m, g, mode='nearest', padding_mode='zeros', align_corners=False)
return z, m
class Renderer:
def __init__(self):
self._device = torch.device('cuda')
self._pkl_data = dict()
self._networks = dict()
self._pinned_bufs = dict()
self._cmaps = dict()
self._is_timing = False
self._start_event = torch.cuda.Event(enable_timing=True)
self._end_event = torch.cuda.Event(enable_timing=True)
self._net_layers = dict()
def render(self, **args):
self._is_timing = True
self._start_event.record(torch.cuda.current_stream(self._device))
res = dnnlib.EasyDict()
try:
self._render_impl(res, **args)
except:
res.error = CapturedException()
self._end_event.record(torch.cuda.current_stream(self._device))
if 'image' in res:
res.image = self.to_cpu(res.image).numpy()
if 'stats' in res:
res.stats = self.to_cpu(res.stats).numpy()
if 'error' in res:
res.error = str(res.error)
if self._is_timing:
self._end_event.synchronize()
res.render_time = self._start_event.elapsed_time(self._end_event) * 1e-3
self._is_timing = False
return res
def get_network(self, pkl, key, **tweak_kwargs):
data = self._pkl_data.get(pkl, None)
if data is None:
print(f'Loading "{pkl}"... ', end='', flush=True)
try:
with dnnlib.util.open_url(pkl, verbose=False) as f:
data = legacy.load_network_pkl(f)
print('Done.')
except:
data = CapturedException()
print('Failed!')
self._pkl_data[pkl] = data
self._ignore_timing()
if isinstance(data, CapturedException):
raise data
orig_net = data[key]
cache_key = (orig_net, self._device, tuple(sorted(tweak_kwargs.items())))
net = self._networks.get(cache_key, None)
if net is None:
try:
net = copy.deepcopy(orig_net)
net = self._tweak_network(net, **tweak_kwargs)
net.to(self._device)
except:
net = CapturedException()
self._networks[cache_key] = net
self._ignore_timing()
if isinstance(net, CapturedException):
raise net
return net
def _tweak_network(self, net):
return net
def _get_pinned_buf(self, ref):
key = (tuple(ref.shape), ref.dtype)
buf = self._pinned_bufs.get(key, None)
if buf is None:
buf = torch.empty(ref.shape, dtype=ref.dtype).pin_memory()
self._pinned_bufs[key] = buf
return buf
def to_device(self, buf):
return self._get_pinned_buf(buf).copy_(buf).to(self._device)
def to_cpu(self, buf):
return self._get_pinned_buf(buf).copy_(buf).clone()
def _ignore_timing(self):
self._is_timing = False
def _apply_cmap(self, x, name='viridis'):
cmap = self._cmaps.get(name, None)
if cmap is None:
cmap = matplotlib.cm.get_cmap(name)
cmap = cmap(np.linspace(0, 1, num=1024), bytes=True)[:, :3]
cmap = self.to_device(torch.from_numpy(cmap))
self._cmaps[name] = cmap
hi = cmap.shape[0] - 1
x = (x * hi + 0.5).clamp(0, hi).to(torch.int64)
x = torch.nn.functional.embedding(x, cmap)
return x
def _render_impl(self, res,
pkl = None,
w0_seeds = [[0, 1]],
stylemix_idx = [],
stylemix_seed = 0,
trunc_psi = 1,
trunc_cutoff = 0,
random_seed = 0,
noise_mode = 'const',
force_fp32 = False,
layer_name = None,
sel_channels = 3,
base_channel = 0,
img_scale_db = 0,
img_normalize = False,
fft_show = False,
fft_all = True,
fft_range_db = 50,
fft_beta = 8,
input_transform = None,
untransform = False,
):
G = self.get_network(pkl, 'G_ema')
res.img_resolution = G.img_resolution
res.num_ws = G.num_ws
res.has_noise = any('noise_const' in name for name, _buf in G.synthesis.named_buffers())
res.has_input_transform = (hasattr(G.synthesis, 'input') and hasattr(G.synthesis.input, 'transform'))
if res.has_input_transform:
m = np.eye(3)
try:
if input_transform is not None:
m = np.linalg.inv(np.asarray(input_transform))
except np.linalg.LinAlgError:
res.error = CapturedException()
G.synthesis.input.transform.copy_(torch.from_numpy(m))
all_seeds = [seed for seed, _weight in w0_seeds] + [stylemix_seed]
all_seeds = list(set(all_seeds))
all_zs = np.zeros([len(all_seeds), G.z_dim], dtype=np.float32)
all_cs = np.zeros([len(all_seeds), G.c_dim], dtype=np.float32)
for idx, seed in enumerate(all_seeds):
rnd = np.random.RandomState(seed)
all_zs[idx] = rnd.randn(G.z_dim)
cls = rnd.randint(G.c_dim)
if G.c_dim > 0:
all_cs[idx, cls] = 1
w_avg = G.mapping.w_avg[cls]
all_zs = self.to_device(torch.from_numpy(all_zs))
all_cs = self.to_device(torch.from_numpy(all_cs))
all_ws = G.mapping(z=all_zs, c=all_cs, truncation_psi=trunc_psi, truncation_cutoff=trunc_cutoff) - w_avg
all_ws = dict(zip(all_seeds, all_ws))
w = torch.stack([all_ws[seed] * weight for seed, weight in w0_seeds]).sum(dim=0, keepdim=True)
stylemix_idx = [idx for idx in stylemix_idx if 0 <= idx < G.num_ws]
if len(stylemix_idx) > 0:
w[:, stylemix_idx] = all_ws[stylemix_seed][np.newaxis, stylemix_idx]
w += w_avg
synthesis_kwargs = dnnlib.EasyDict(noise_mode=noise_mode, force_fp32=force_fp32)
torch.manual_seed(random_seed)
out, layers = self.run_synthesis_net(G.synthesis, w, capture_layer=layer_name, **synthesis_kwargs)
cache_key = (G.synthesis, tuple(sorted(synthesis_kwargs.items())))
if cache_key not in self._net_layers:
if layer_name is not None:
torch.manual_seed(random_seed)
_out, layers = self.run_synthesis_net(G.synthesis, w, **synthesis_kwargs)
self._net_layers[cache_key] = layers
res.layers = self._net_layers[cache_key]
if untransform and res.has_input_transform:
out, _mask = _apply_affine_transformation(out.to(torch.float32), G.synthesis.input.transform, amax=6)
out = out[0].to(torch.float32)
if sel_channels > out.shape[0]:
sel_channels = 1
base_channel = max(min(base_channel, out.shape[0] - sel_channels), 0)
sel = out[base_channel : base_channel + sel_channels]
res.stats = torch.stack([
out.mean(), sel.mean(),
out.std(), sel.std(),
out.norm(float('inf')), sel.norm(float('inf')),
])
img = sel
if img_normalize:
img = img / img.norm(float('inf'), dim=[1,2], keepdim=True).clip(1e-8, 1e8)
img = img * (10 ** (img_scale_db / 20))
img = (img * 127.5 + 128).clamp(0, 255).to(torch.uint8).permute(1, 2, 0)
res.image = img
if fft_show:
sig = out if fft_all else sel
sig = sig.to(torch.float32)
sig = sig - sig.mean(dim=[1,2], keepdim=True)
sig = sig * torch.kaiser_window(sig.shape[1], periodic=False, beta=fft_beta, device=self._device)[None, :, None]
sig = sig * torch.kaiser_window(sig.shape[2], periodic=False, beta=fft_beta, device=self._device)[None, None, :]
fft = torch.fft.fftn(sig, dim=[1,2]).abs().square().sum(dim=0)
fft = fft.roll(shifts=[fft.shape[0] // 2, fft.shape[1] // 2], dims=[0,1])
fft = (fft / fft.mean()).log10() * 10
fft = self._apply_cmap((fft / fft_range_db + 1) / 2)
res.image = torch.cat([img.expand_as(fft), fft], dim=1)
@staticmethod
def run_synthesis_net(net, *args, capture_layer=None, **kwargs):
submodule_names = {mod: name for name, mod in net.named_modules()}
unique_names = set()
layers = []
def module_hook(module, _inputs, outputs):
outputs = list(outputs) if isinstance(outputs, (tuple, list)) else [outputs]
outputs = [out for out in outputs if isinstance(out, torch.Tensor) and out.ndim in [4, 5]]
for idx, out in enumerate(outputs):
if out.ndim == 5:
out = out.mean(2)
name = submodule_names[module]
if name == '':
name = 'output'
if len(outputs) > 1:
name += f':{idx}'
if name in unique_names:
suffix = 2
while f'{name}_{suffix}' in unique_names:
suffix += 1
name += f'_{suffix}'
unique_names.add(name)
shape = [int(x) for x in out.shape]
dtype = str(out.dtype).split('.')[-1]
layers.append(dnnlib.EasyDict(name=name, shape=shape, dtype=dtype))
if name == capture_layer:
raise CaptureSuccess(out)
hooks = [module.register_forward_hook(module_hook) for module in net.modules()]
try:
out = net(*args, **kwargs)
except CaptureSuccess as e:
out = e.out
for hook in hooks:
hook.remove()
return out, layers
| true | true |
f71a41378868c94ddf49eb311aa584b642394977 | 1,307 | py | Python | parse.py | itsmehemant123/twitter-hydration | 543ef7019f3c34e281acc08ae45f24c0407939f6 | [
"MIT"
] | 1 | 2018-05-05T04:40:01.000Z | 2018-05-05T04:40:01.000Z | parse.py | itsmehemant123/twitter-hydration | 543ef7019f3c34e281acc08ae45f24c0407939f6 | [
"MIT"
] | null | null | null | parse.py | itsmehemant123/twitter-hydration | 543ef7019f3c34e281acc08ae45f24c0407939f6 | [
"MIT"
] | null | null | null | import os
import json
import time
import logging
from connectors.mongodb.mongohandle import MongoHandle
from twarc import Twarc
logging.basicConfig(level=logging.INFO)
with open('./config/config.json') as data_file:
config = json.load(data_file)
logging.info('Finished parsing config.')
handle = MongoHandle(config)
logging.info('Initialized the Mongo connection.')
t = Twarc(config['twitter']['consumer_key'], config['twitter']['consumer_secret'],
config['twitter']['access_token'], config['twitter']['access_token_secret'])
logging.info('Initialized Twitter connection.')
for source_file in os.listdir('./' + config['source_folder']):
logging.info('Preparing to hydrate: ' + source_file)
tweet_ids = open('./' + config['source_folder'] + '/' + source_file)
new_tweet_ids = []
logging.info('Parsing tweet ids.')
start = time.time()
for line in tweet_ids:
line = line.strip()
if (not handle.is_written(line)):
new_tweet_ids.append(line)
end = time.time()
logging.info('Finished looking for new tweets in %.2f seconds.' % (end - start))
handle.write(t.hydrate(new_tweet_ids), source_file)
tweet_ids.close()
logging.info('Finished hydrating: ' + source_file)
logging.info('Finished hydration task.')
handle.clean()
| 31.878049 | 86 | 0.701607 | import os
import json
import time
import logging
from connectors.mongodb.mongohandle import MongoHandle
from twarc import Twarc
logging.basicConfig(level=logging.INFO)
with open('./config/config.json') as data_file:
config = json.load(data_file)
logging.info('Finished parsing config.')
handle = MongoHandle(config)
logging.info('Initialized the Mongo connection.')
t = Twarc(config['twitter']['consumer_key'], config['twitter']['consumer_secret'],
config['twitter']['access_token'], config['twitter']['access_token_secret'])
logging.info('Initialized Twitter connection.')
for source_file in os.listdir('./' + config['source_folder']):
logging.info('Preparing to hydrate: ' + source_file)
tweet_ids = open('./' + config['source_folder'] + '/' + source_file)
new_tweet_ids = []
logging.info('Parsing tweet ids.')
start = time.time()
for line in tweet_ids:
line = line.strip()
if (not handle.is_written(line)):
new_tweet_ids.append(line)
end = time.time()
logging.info('Finished looking for new tweets in %.2f seconds.' % (end - start))
handle.write(t.hydrate(new_tweet_ids), source_file)
tweet_ids.close()
logging.info('Finished hydrating: ' + source_file)
logging.info('Finished hydration task.')
handle.clean()
| true | true |
f71a414dc127cdf908b1db847cc87bf66e249e05 | 515 | py | Python | nkrsiSystem/configDefault.py | Kanciarzek/NkrsiSystem | ee3d19b1419ee64ccef05051a3892663e7d71625 | [
"MIT"
] | null | null | null | nkrsiSystem/configDefault.py | Kanciarzek/NkrsiSystem | ee3d19b1419ee64ccef05051a3892663e7d71625 | [
"MIT"
] | null | null | null | nkrsiSystem/configDefault.py | Kanciarzek/NkrsiSystem | ee3d19b1419ee64ccef05051a3892663e7d71625 | [
"MIT"
] | null | null | null | import os
DEBUG_MODE = True
SECRET_KEY = 'secret'
# Database config
DB_USER = 'postgres'
DB_NAME = 'postgres'
DB_PASSWORD = ''
DB_HOST = os.environ.get('POSTGRES_HOST', 'localhost')
DB_PORT = os.environ.get('POSTGRES_PORT', 5432)
# Slack config
SLACK_TOKEN = 'token'
SLACK_API_INVITE_URL = 'https://slack.com/api/users.admin.invite'
# Email config
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
PROJECTOR_IP = ''
DOOR_ENDPOINT = ''
FACEBOOK_TOKEN = ''
GOOGLE_MAPS_API_KEY = ''
| 18.392857 | 65 | 0.728155 | import os
DEBUG_MODE = True
SECRET_KEY = 'secret'
DB_USER = 'postgres'
DB_NAME = 'postgres'
DB_PASSWORD = ''
DB_HOST = os.environ.get('POSTGRES_HOST', 'localhost')
DB_PORT = os.environ.get('POSTGRES_PORT', 5432)
SLACK_TOKEN = 'token'
SLACK_API_INVITE_URL = 'https://slack.com/api/users.admin.invite'
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
PROJECTOR_IP = ''
DOOR_ENDPOINT = ''
FACEBOOK_TOKEN = ''
GOOGLE_MAPS_API_KEY = ''
| true | true |
f71a41f2f041e11ccff687d63b1853750bc8274a | 1,270 | py | Python | scripts/05_modules/snap/snap_enable_snapping_3d_point_r14.py | mgoldshteyn/cinema4d_py_sdk_extended | b6c67f1dbae182c09ccbcc1df51f0e7ea4816074 | [
"Apache-2.0"
] | null | null | null | scripts/05_modules/snap/snap_enable_snapping_3d_point_r14.py | mgoldshteyn/cinema4d_py_sdk_extended | b6c67f1dbae182c09ccbcc1df51f0e7ea4816074 | [
"Apache-2.0"
] | null | null | null | scripts/05_modules/snap/snap_enable_snapping_3d_point_r14.py | mgoldshteyn/cinema4d_py_sdk_extended | b6c67f1dbae182c09ccbcc1df51f0e7ea4816074 | [
"Apache-2.0"
] | null | null | null | """
Copyright: MAXON Computer GmbH
Description:
- Enables the snap if it's not already the case.
- Sets it to 3D Type and also to Point mode.
Class/method highlighted:
- c4d.modules.snap
- c4d.modules.snap.IsSnapEnabled()
- c4d.modules.snap.GetSnapSettings()
- c4d.modules.snap.SetSnapSettings()
- c4d.modules.snap.EnableSnap()
Compatible:
- Win / Mac
- R14, R15, R16, R17, R18, R19, R20, R21, S22
"""
import c4d
def main():
# Checks snap state
res = c4d.modules.snap.IsSnapEnabled(doc)
if not res:
# Enables snap if not activated
c4d.modules.snap.EnableSnap(True, doc)
print("Snap Enabled:", c4d.modules.snap.IsSnapEnabled(doc))
# Retrieves the BaseContainer storing all the settings
settings = c4d.modules.snap.GetSnapSettings(doc)
# Defines the snapping Type to 3D snapping
settings[c4d.SNAP_SETTINGS_MODE] = c4d.SNAP_SETTINGS_MODE_3D
# Pushes back modification made in the memory BaseContainer to the BaseContainer setting
c4d.modules.snap.SetSnapSettings(doc, settings)
# Enables point snap
c4d.modules.snap.EnableSnap(True, doc, c4d.SNAPMODE_POINT)
# Pushes an update event to Cinema 4D
c4d.EventAdd()
if __name__ == '__main__':
main()
| 26.458333 | 92 | 0.693701 | import c4d
def main():
res = c4d.modules.snap.IsSnapEnabled(doc)
if not res:
c4d.modules.snap.EnableSnap(True, doc)
print("Snap Enabled:", c4d.modules.snap.IsSnapEnabled(doc))
settings = c4d.modules.snap.GetSnapSettings(doc)
settings[c4d.SNAP_SETTINGS_MODE] = c4d.SNAP_SETTINGS_MODE_3D
c4d.modules.snap.SetSnapSettings(doc, settings)
c4d.modules.snap.EnableSnap(True, doc, c4d.SNAPMODE_POINT)
c4d.EventAdd()
if __name__ == '__main__':
main()
| true | true |
f71a4257afb79b3e6037c8b3e3e9cc6b87d2a7dc | 212 | py | Python | analise/urls.py | IgorAlmeeida/coronaDataScience | f3b7fb4601870882483cc6ef913c6dcee83432da | [
"MIT"
] | null | null | null | analise/urls.py | IgorAlmeeida/coronaDataScience | f3b7fb4601870882483cc6ef913c6dcee83432da | [
"MIT"
] | null | null | null | analise/urls.py | IgorAlmeeida/coronaDataScience | f3b7fb4601870882483cc6ef913c6dcee83432da | [
"MIT"
] | null | null | null |
from django.contrib import admin
from django.urls import path
from .views import home, infoDiaEstado
urlpatterns = [
path('', home),
path('info_dia_estado', infoDiaEstado, name="dataInfoDiaEstado"),
]
| 19.272727 | 69 | 0.735849 |
from django.contrib import admin
from django.urls import path
from .views import home, infoDiaEstado
urlpatterns = [
path('', home),
path('info_dia_estado', infoDiaEstado, name="dataInfoDiaEstado"),
]
| true | true |
f71a428d471b125b47b81715ffe4cf49f8639526 | 15,466 | py | Python | package/tests/test_domain_services/test_vpc.py | DYeag/AWS-Shell | b5318e72373b1a948ac6aced1c0bb4566d5ae46f | [
"0BSD"
] | 3 | 2016-08-22T07:14:56.000Z | 2018-03-16T07:31:44.000Z | package/tests/test_domain_services/test_vpc.py | DYeag/AWS-Shell | b5318e72373b1a948ac6aced1c0bb4566d5ae46f | [
"0BSD"
] | 470 | 2016-03-24T13:38:08.000Z | 2022-02-05T01:14:05.000Z | package/tests/test_domain_services/test_vpc.py | DYeag/AWS-Shell | b5318e72373b1a948ac6aced1c0bb4566d5ae46f | [
"0BSD"
] | 9 | 2016-06-20T11:41:54.000Z | 2020-11-21T00:42:45.000Z | from unittest import TestCase
from mock import Mock, call
from cloudshell.cp.aws.domain.services.ec2.vpc import VPCService
from cloudshell.cp.aws.domain.services.waiters.vpc_peering import VpcPeeringConnectionWaiter
class TestVPCService(TestCase):
def setUp(self):
self.tag_service = Mock()
self.tags = Mock()
self.tag_service.get_default_tags = Mock(return_value=self.tags)
self.subnet_service = Mock()
self.logger = Mock()
self.aws_ec2_datamodel = Mock()
self.ec2_client= Mock()
self.ec2_session = Mock()
self.vpc = Mock()
self.vpc_id = Mock()
self.ec2_session.create_vpc = Mock(return_value=self.vpc)
self.ec2_session.Vpc = Mock(return_value=self.vpc)
self.s3_session = Mock()
self.reservation = Mock()
self.cidr = Mock()
self.vpc_waiter = Mock()
self.vpc_peering_waiter = Mock()
self.instance_service = Mock()
self.sg_service = Mock()
self.route_table_service = Mock()
self.traffic_mirror_service = Mock()
self.vpc_service = VPCService(tag_service=self.tag_service,
subnet_service=self.subnet_service,
instance_service=self.instance_service,
vpc_waiter=self.vpc_waiter,
vpc_peering_waiter=self.vpc_peering_waiter,
sg_service=self.sg_service,
route_table_service=self.route_table_service,
traffic_mirror_service=self.traffic_mirror_service)
def test_get_all_internet_gateways(self):
internet_gate = Mock()
self.vpc.internet_gateways = Mock()
self.vpc.internet_gateways.all = Mock(return_value=[internet_gate])
res = self.vpc_service.get_all_internet_gateways(self.vpc)
self.assertEquals(res, [internet_gate])
def test_remove_all_internet_gateways(self):
internet_gate = Mock()
self.vpc.internet_gateways = Mock()
self.vpc.internet_gateways.all = Mock(return_value=[internet_gate])
self.vpc_service.remove_all_internet_gateways(self.vpc)
internet_gate.detach_from_vpc.assert_called_with(VpcId=self.vpc.id)
self.assertTrue(internet_gate.delete.called)
def test_create_and_attach_internet_gateway(self):
internet_gate = Mock()
internet_gate.id = 'super_id'
self.ec2_session.create_internet_gateway = Mock(return_value=internet_gate)
internet_gateway_id = self.vpc_service.create_and_attach_internet_gateway(self.ec2_session, self.vpc, self.reservation)
self.assertTrue(self.ec2_session.create_internet_gateway.called)
self.tag_service.get_default_tags.assert_called_once_with("IGW {0}".format(self.reservation.reservation_id),self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(resource=internet_gate, tags=self.tag_service.get_default_tags())
self.assertEqual(internet_gateway_id, internet_gate.id)
def test_create_vpc_for_reservation(self):
vpc = self.vpc_service.create_vpc_for_reservation(self.ec2_session, self.reservation, self.cidr)
vpc_name = self.vpc_service.VPC_RESERVATION.format(self.reservation.reservation_id)
self.vpc_waiter.wait.assert_called_once_with(vpc=vpc, state=self.vpc_waiter.AVAILABLE)
self.assertEqual(self.vpc, vpc)
self.ec2_session.create_vpc.assert_called_once_with(CidrBlock=self.cidr)
self.tag_service.get_default_tags.assert_called_once_with(vpc_name, self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(self.vpc, self.tags)
def test_find_vpc_for_reservation(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[self.vpc])
vpc = self.vpc_service.find_vpc_for_reservation(self.ec2_session, self.reservation)
self.assertEqual(vpc, self.vpc)
def test_find_vpc_for_reservation_no_vpc(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[])
vpc = self.vpc_service.find_vpc_for_reservation(self.ec2_session, self.reservation)
self.assertIsNone(vpc)
def test_find_vpc_for_reservation_too_many(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[1, 2])
self.assertRaises(ValueError, self.vpc_service.find_vpc_for_reservation, self.ec2_session, self.reservation)
def test_peer_vpc(self):
def change_to_active(vpc_peering_connection):
vpc_peering_connection.status['Code'] = VpcPeeringConnectionWaiter.ACTIVE
vpc1 = Mock()
vpc2 = Mock()
peered = Mock()
peered.status = {'Code': VpcPeeringConnectionWaiter.PENDING_ACCEPTANCE}
peered.accept = Mock(side_effect=change_to_active(peered))
self.ec2_session.create_vpc_peering_connection = Mock(return_value=peered)
reservation_model = Mock()
res = self.vpc_service.peer_vpcs(self.ec2_session, vpc1, vpc2, reservation_model,Mock())
self.ec2_session.create_vpc_peering_connection.assert_called_once_with(VpcId=vpc1, PeerVpcId=vpc2)
self.assertEqual(peered.status['Code'], VpcPeeringConnectionWaiter.ACTIVE)
self.assertEqual(res, peered.id)
def test_remove_all_peering(self):
peering = Mock()
peering.status = {'Code': 'ok'}
peering1 = Mock()
peering1.status = {'Code': 'failed'}
peering2 = Mock()
peering2.status = {'Code': 'aa'}
self.vpc.accepted_vpc_peering_connections = Mock()
self.vpc.accepted_vpc_peering_connections.all = Mock(return_value=[peering, peering1, peering2])
res = self.vpc_service.remove_all_peering(self.vpc)
self.assertIsNotNone(res)
self.assertTrue(peering.delete.called)
self.assertFalse(peering1.delete.called)
self.assertTrue(peering2.delete.called)
def test_remove_all_sgs(self):
sg = Mock()
self.vpc.security_groups = Mock()
self.vpc.security_groups.all = Mock(return_value=[sg])
res = self.vpc_service.remove_all_security_groups(self.vpc, self.reservation.reservation_id )
self.assertIsNotNone(res)
self.sg_service.delete_security_group.assert_called_once_with(sg)
# When a trying to delete security group(isolated) and it is referenced in another's group rule.
# we get resource sg-XXXXXX has a dependent object, so to fix that , isolated group shall be deleted last.
def test_remove_all_sgs_isolated_group_removed_last(self):
sg = Mock()
sg.group_name = 'dummy'
isolated_sg = Mock()
isolated_sg.group_name = self.sg_service.sandbox_isolated_sg_name(self.reservation.reservation_id)
isolated_at_start_sgs = [isolated_sg, sg]
isolated_at_end_sgs_calls = [call(sg), call(isolated_sg)]
self.vpc.security_groups = Mock()
self.vpc.security_groups.all = Mock(return_value=isolated_at_start_sgs)
res = self.vpc_service.remove_all_security_groups(self.vpc, self.reservation.reservation_id )
self.assertIsNotNone(res)
self.sg_service.delete_security_group.assert_has_calls(isolated_at_end_sgs_calls, any_order=False)
def test_remove_subnets(self):
subnet = Mock()
self.vpc.subnets = Mock()
self.vpc.subnets.all = Mock(return_value=[subnet])
res = self.vpc_service.remove_all_subnets(self.vpc)
self.assertIsNotNone(res)
self.subnet_service.delete_subnet.assert_called_once_with(subnet)
def test_delete_all_instances(self):
instance = Mock()
self.vpc.instances = Mock()
self.vpc.instances.all = Mock(return_value=[instance])
res = self.vpc_service.delete_all_instances(self.vpc)
self.assertIsNotNone(res)
self.instance_service.terminate_instances.assert_called_once_with([instance])
def test_delete_vpc(self):
res = self.vpc_service.delete_vpc(self.vpc)
self.assertTrue(self.vpc.delete.called)
self.assertIsNotNone(res)
def test_get_or_create_subnet_for_vpc_1(self): # Scenario(1): Get
# Arrange
subnet = Mock()
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_create_subnet_for_vpc(reservation=self.reservation,
cidr="1.2.3.4/24", alias="MySubnet",
vpc=self.vpc,
ec2_client=self.ec2_client,
aws_ec2_datamodel=self.aws_ec2_datamodel,
logger=self.logger)
# Assert
self.assertEqual(result, subnet)
def test_get_or_create_subnet_for_vpc_2(self): # Scenario(2): Create
# Arrange
subnet = Mock()
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.reservation.reservation_id = "123"
self.vpc_service.get_or_pick_availability_zone = Mock(return_value="MyZone")
self.subnet_service.create_subnet_for_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_create_subnet_for_vpc(reservation=self.reservation,
cidr="1.2.3.4/24", alias="MySubnet",
vpc=self.vpc,
ec2_client=self.ec2_client,
aws_ec2_datamodel=self.aws_ec2_datamodel,
logger=self.logger)
# Assert
self.assertEqual(result, subnet)
self.subnet_service.create_subnet_for_vpc.assert_called_once_with(
vpc=self.vpc,
cidr="1.2.3.4/24",
subnet_name="MySubnet Reservation: 123",
availability_zone="MyZone",
reservation=self.reservation)
def test_get_or_create_private_route_table_1(self): # Scenario(1): Get
# Arrange
table = Mock()
self.route_table_service.get_route_table = Mock(return_value=table)
# Act
result = self.vpc_service.get_or_create_private_route_table(ec2_session=self.ec2_session, reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, table)
def test_get_or_create_private_route_table_2(self): # Scenario(2): Create
# Arrange
table = Mock()
self.reservation.reservation_id = "123"
self.route_table_service.get_route_table = Mock(return_value=None)
self.route_table_service.create_route_table = Mock(return_value=table)
# Act
result = self.vpc_service.get_or_create_private_route_table(ec2_session=self.ec2_session,
reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, table)
self.route_table_service.create_route_table.assert_called_once_with(
self.ec2_session,
self.reservation,
self.vpc_id,
"Private RoutingTable Reservation: 123"
)
def test_get_or_throw_private_route_table(self):
# Arrange
self.route_table_service.get_route_table = Mock(return_value=None)
# Act
with self.assertRaises(Exception) as error:
self.vpc_service.get_or_throw_private_route_table(ec2_session=self.ec2_session, reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(error.exception.message, "Routing table for non-public subnet was not found")
def test_get_vpc_cidr(self):
# Arrange
self.vpc.cidr_block = "1.2.3.4/24"
# Act
result = self.vpc_service.get_vpc_cidr(ec2_session=self.ec2_session, vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, "1.2.3.4/24")
def test_get_or_pick_availability_zone_1(self): #Scenario(1): from existing subnet
# Arrange
subnet = Mock()
subnet.availability_zone = "z"
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(result, "z")
def test_get_or_pick_availability_zone_2(self): # Scenario(2): from available zones list
# Arrange
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.ec2_client.describe_availability_zones = Mock(return_value={"AvailabilityZones":[{"ZoneName":"z"}]})
# Act
result = self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(result, "z")
def test_get_or_pick_availability_zone_3(self): # Scenario(3): no available zone
# Arrange
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.ec2_client.describe_availability_zones = Mock(return_value=None)
# Act
with self.assertRaises(Exception) as error:
self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(error.exception.message, "No AvailabilityZone is available for this vpc")
def test_remove_custom_route_tables(self):
# Arrange
tables = [Mock(), Mock()]
self.vpc.id = "123"
self.route_table_service.get_custom_route_tables = Mock(return_value=tables)
# Act
result = self.vpc_service.remove_custom_route_tables(ec2_session=self.ec2_session, vpc=self.vpc)
# Assert
self.assertTrue(result)
self.route_table_service.delete_table.assert_any_call(tables[0])
self.route_table_service.delete_table.assert_any_call(tables[1])
def test_set_main_route_table_tags(self):
# Arrange
table = Mock()
tags = Mock()
self.reservation.reservation_id = "123"
self.tag_service.get_default_tags = Mock(return_value=tags)
# Act
self.vpc_service.set_main_route_table_tags(main_route_table=table, reservation=self.reservation)
# Assert
self.tag_service.get_default_tags.assert_called_once_with("Main RoutingTable Reservation: 123", self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(table, tags) | 47.009119 | 136 | 0.652916 | from unittest import TestCase
from mock import Mock, call
from cloudshell.cp.aws.domain.services.ec2.vpc import VPCService
from cloudshell.cp.aws.domain.services.waiters.vpc_peering import VpcPeeringConnectionWaiter
class TestVPCService(TestCase):
def setUp(self):
self.tag_service = Mock()
self.tags = Mock()
self.tag_service.get_default_tags = Mock(return_value=self.tags)
self.subnet_service = Mock()
self.logger = Mock()
self.aws_ec2_datamodel = Mock()
self.ec2_client= Mock()
self.ec2_session = Mock()
self.vpc = Mock()
self.vpc_id = Mock()
self.ec2_session.create_vpc = Mock(return_value=self.vpc)
self.ec2_session.Vpc = Mock(return_value=self.vpc)
self.s3_session = Mock()
self.reservation = Mock()
self.cidr = Mock()
self.vpc_waiter = Mock()
self.vpc_peering_waiter = Mock()
self.instance_service = Mock()
self.sg_service = Mock()
self.route_table_service = Mock()
self.traffic_mirror_service = Mock()
self.vpc_service = VPCService(tag_service=self.tag_service,
subnet_service=self.subnet_service,
instance_service=self.instance_service,
vpc_waiter=self.vpc_waiter,
vpc_peering_waiter=self.vpc_peering_waiter,
sg_service=self.sg_service,
route_table_service=self.route_table_service,
traffic_mirror_service=self.traffic_mirror_service)
def test_get_all_internet_gateways(self):
internet_gate = Mock()
self.vpc.internet_gateways = Mock()
self.vpc.internet_gateways.all = Mock(return_value=[internet_gate])
res = self.vpc_service.get_all_internet_gateways(self.vpc)
self.assertEquals(res, [internet_gate])
def test_remove_all_internet_gateways(self):
internet_gate = Mock()
self.vpc.internet_gateways = Mock()
self.vpc.internet_gateways.all = Mock(return_value=[internet_gate])
self.vpc_service.remove_all_internet_gateways(self.vpc)
internet_gate.detach_from_vpc.assert_called_with(VpcId=self.vpc.id)
self.assertTrue(internet_gate.delete.called)
def test_create_and_attach_internet_gateway(self):
internet_gate = Mock()
internet_gate.id = 'super_id'
self.ec2_session.create_internet_gateway = Mock(return_value=internet_gate)
internet_gateway_id = self.vpc_service.create_and_attach_internet_gateway(self.ec2_session, self.vpc, self.reservation)
self.assertTrue(self.ec2_session.create_internet_gateway.called)
self.tag_service.get_default_tags.assert_called_once_with("IGW {0}".format(self.reservation.reservation_id),self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(resource=internet_gate, tags=self.tag_service.get_default_tags())
self.assertEqual(internet_gateway_id, internet_gate.id)
def test_create_vpc_for_reservation(self):
vpc = self.vpc_service.create_vpc_for_reservation(self.ec2_session, self.reservation, self.cidr)
vpc_name = self.vpc_service.VPC_RESERVATION.format(self.reservation.reservation_id)
self.vpc_waiter.wait.assert_called_once_with(vpc=vpc, state=self.vpc_waiter.AVAILABLE)
self.assertEqual(self.vpc, vpc)
self.ec2_session.create_vpc.assert_called_once_with(CidrBlock=self.cidr)
self.tag_service.get_default_tags.assert_called_once_with(vpc_name, self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(self.vpc, self.tags)
def test_find_vpc_for_reservation(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[self.vpc])
vpc = self.vpc_service.find_vpc_for_reservation(self.ec2_session, self.reservation)
self.assertEqual(vpc, self.vpc)
def test_find_vpc_for_reservation_no_vpc(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[])
vpc = self.vpc_service.find_vpc_for_reservation(self.ec2_session, self.reservation)
self.assertIsNone(vpc)
def test_find_vpc_for_reservation_too_many(self):
self.ec2_session.vpcs = Mock()
self.ec2_session.vpcs.filter = Mock(return_value=[1, 2])
self.assertRaises(ValueError, self.vpc_service.find_vpc_for_reservation, self.ec2_session, self.reservation)
def test_peer_vpc(self):
def change_to_active(vpc_peering_connection):
vpc_peering_connection.status['Code'] = VpcPeeringConnectionWaiter.ACTIVE
vpc1 = Mock()
vpc2 = Mock()
peered = Mock()
peered.status = {'Code': VpcPeeringConnectionWaiter.PENDING_ACCEPTANCE}
peered.accept = Mock(side_effect=change_to_active(peered))
self.ec2_session.create_vpc_peering_connection = Mock(return_value=peered)
reservation_model = Mock()
res = self.vpc_service.peer_vpcs(self.ec2_session, vpc1, vpc2, reservation_model,Mock())
self.ec2_session.create_vpc_peering_connection.assert_called_once_with(VpcId=vpc1, PeerVpcId=vpc2)
self.assertEqual(peered.status['Code'], VpcPeeringConnectionWaiter.ACTIVE)
self.assertEqual(res, peered.id)
def test_remove_all_peering(self):
peering = Mock()
peering.status = {'Code': 'ok'}
peering1 = Mock()
peering1.status = {'Code': 'failed'}
peering2 = Mock()
peering2.status = {'Code': 'aa'}
self.vpc.accepted_vpc_peering_connections = Mock()
self.vpc.accepted_vpc_peering_connections.all = Mock(return_value=[peering, peering1, peering2])
res = self.vpc_service.remove_all_peering(self.vpc)
self.assertIsNotNone(res)
self.assertTrue(peering.delete.called)
self.assertFalse(peering1.delete.called)
self.assertTrue(peering2.delete.called)
def test_remove_all_sgs(self):
sg = Mock()
self.vpc.security_groups = Mock()
self.vpc.security_groups.all = Mock(return_value=[sg])
res = self.vpc_service.remove_all_security_groups(self.vpc, self.reservation.reservation_id )
self.assertIsNotNone(res)
self.sg_service.delete_security_group.assert_called_once_with(sg)
# we get resource sg-XXXXXX has a dependent object, so to fix that , isolated group shall be deleted last.
def test_remove_all_sgs_isolated_group_removed_last(self):
sg = Mock()
sg.group_name = 'dummy'
isolated_sg = Mock()
isolated_sg.group_name = self.sg_service.sandbox_isolated_sg_name(self.reservation.reservation_id)
isolated_at_start_sgs = [isolated_sg, sg]
isolated_at_end_sgs_calls = [call(sg), call(isolated_sg)]
self.vpc.security_groups = Mock()
self.vpc.security_groups.all = Mock(return_value=isolated_at_start_sgs)
res = self.vpc_service.remove_all_security_groups(self.vpc, self.reservation.reservation_id )
self.assertIsNotNone(res)
self.sg_service.delete_security_group.assert_has_calls(isolated_at_end_sgs_calls, any_order=False)
def test_remove_subnets(self):
subnet = Mock()
self.vpc.subnets = Mock()
self.vpc.subnets.all = Mock(return_value=[subnet])
res = self.vpc_service.remove_all_subnets(self.vpc)
self.assertIsNotNone(res)
self.subnet_service.delete_subnet.assert_called_once_with(subnet)
def test_delete_all_instances(self):
instance = Mock()
self.vpc.instances = Mock()
self.vpc.instances.all = Mock(return_value=[instance])
res = self.vpc_service.delete_all_instances(self.vpc)
self.assertIsNotNone(res)
self.instance_service.terminate_instances.assert_called_once_with([instance])
def test_delete_vpc(self):
res = self.vpc_service.delete_vpc(self.vpc)
self.assertTrue(self.vpc.delete.called)
self.assertIsNotNone(res)
def test_get_or_create_subnet_for_vpc_1(self): # Scenario(1): Get
# Arrange
subnet = Mock()
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_create_subnet_for_vpc(reservation=self.reservation,
cidr="1.2.3.4/24", alias="MySubnet",
vpc=self.vpc,
ec2_client=self.ec2_client,
aws_ec2_datamodel=self.aws_ec2_datamodel,
logger=self.logger)
# Assert
self.assertEqual(result, subnet)
def test_get_or_create_subnet_for_vpc_2(self): # Scenario(2): Create
# Arrange
subnet = Mock()
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.reservation.reservation_id = "123"
self.vpc_service.get_or_pick_availability_zone = Mock(return_value="MyZone")
self.subnet_service.create_subnet_for_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_create_subnet_for_vpc(reservation=self.reservation,
cidr="1.2.3.4/24", alias="MySubnet",
vpc=self.vpc,
ec2_client=self.ec2_client,
aws_ec2_datamodel=self.aws_ec2_datamodel,
logger=self.logger)
# Assert
self.assertEqual(result, subnet)
self.subnet_service.create_subnet_for_vpc.assert_called_once_with(
vpc=self.vpc,
cidr="1.2.3.4/24",
subnet_name="MySubnet Reservation: 123",
availability_zone="MyZone",
reservation=self.reservation)
def test_get_or_create_private_route_table_1(self): # Scenario(1): Get
# Arrange
table = Mock()
self.route_table_service.get_route_table = Mock(return_value=table)
# Act
result = self.vpc_service.get_or_create_private_route_table(ec2_session=self.ec2_session, reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, table)
def test_get_or_create_private_route_table_2(self): # Scenario(2): Create
# Arrange
table = Mock()
self.reservation.reservation_id = "123"
self.route_table_service.get_route_table = Mock(return_value=None)
self.route_table_service.create_route_table = Mock(return_value=table)
# Act
result = self.vpc_service.get_or_create_private_route_table(ec2_session=self.ec2_session,
reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, table)
self.route_table_service.create_route_table.assert_called_once_with(
self.ec2_session,
self.reservation,
self.vpc_id,
"Private RoutingTable Reservation: 123"
)
def test_get_or_throw_private_route_table(self):
# Arrange
self.route_table_service.get_route_table = Mock(return_value=None)
# Act
with self.assertRaises(Exception) as error:
self.vpc_service.get_or_throw_private_route_table(ec2_session=self.ec2_session, reservation=self.reservation,
vpc_id=self.vpc_id)
# Assert
self.assertEqual(error.exception.message, "Routing table for non-public subnet was not found")
def test_get_vpc_cidr(self):
# Arrange
self.vpc.cidr_block = "1.2.3.4/24"
# Act
result = self.vpc_service.get_vpc_cidr(ec2_session=self.ec2_session, vpc_id=self.vpc_id)
# Assert
self.assertEqual(result, "1.2.3.4/24")
def test_get_or_pick_availability_zone_1(self): #Scenario(1): from existing subnet
# Arrange
subnet = Mock()
subnet.availability_zone = "z"
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=subnet)
# Act
result = self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(result, "z")
def test_get_or_pick_availability_zone_2(self): # Scenario(2): from available zones list
# Arrange
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.ec2_client.describe_availability_zones = Mock(return_value={"AvailabilityZones":[{"ZoneName":"z"}]})
# Act
result = self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(result, "z")
def test_get_or_pick_availability_zone_3(self): # Scenario(3): no available zone
# Arrange
self.subnet_service.get_first_or_none_subnet_from_vpc = Mock(return_value=None)
self.ec2_client.describe_availability_zones = Mock(return_value=None)
# Act
with self.assertRaises(Exception) as error:
self.vpc_service.get_or_pick_availability_zone(ec2_client=self.ec2_client, vpc=self.vpc,
aws_ec2_datamodel=self.aws_ec2_datamodel)
# Assert
self.assertEqual(error.exception.message, "No AvailabilityZone is available for this vpc")
def test_remove_custom_route_tables(self):
# Arrange
tables = [Mock(), Mock()]
self.vpc.id = "123"
self.route_table_service.get_custom_route_tables = Mock(return_value=tables)
# Act
result = self.vpc_service.remove_custom_route_tables(ec2_session=self.ec2_session, vpc=self.vpc)
# Assert
self.assertTrue(result)
self.route_table_service.delete_table.assert_any_call(tables[0])
self.route_table_service.delete_table.assert_any_call(tables[1])
def test_set_main_route_table_tags(self):
# Arrange
table = Mock()
tags = Mock()
self.reservation.reservation_id = "123"
self.tag_service.get_default_tags = Mock(return_value=tags)
# Act
self.vpc_service.set_main_route_table_tags(main_route_table=table, reservation=self.reservation)
# Assert
self.tag_service.get_default_tags.assert_called_once_with("Main RoutingTable Reservation: 123", self.reservation)
self.tag_service.set_ec2_resource_tags.assert_called_once_with(table, tags) | true | true |
f71a432e88d1054b78e97329d6efffbbe65f95b6 | 8,264 | py | Python | wagtail/wagtailsnippets/views/snippets.py | markosamuli/wagtail | 5158ee7aad594d3d9b8b7cd14c139094080466fb | [
"BSD-3-Clause"
] | null | null | null | wagtail/wagtailsnippets/views/snippets.py | markosamuli/wagtail | 5158ee7aad594d3d9b8b7cd14c139094080466fb | [
"BSD-3-Clause"
] | null | null | null | wagtail/wagtailsnippets/views/snippets.py | markosamuli/wagtail | 5158ee7aad594d3d9b8b7cd14c139094080466fb | [
"BSD-3-Clause"
] | null | null | null | from django.apps import apps
from django.core.urlresolvers import reverse
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from wagtail.utils.pagination import paginate
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.edit_handlers import (
ObjectList, extract_panel_definitions_from_model_class)
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailadmin.utils import permission_denied
from wagtail.wagtailsearch.backends import get_search_backend
from wagtail.wagtailsearch.index import class_is_indexed
from wagtail.wagtailsnippets.models import get_snippet_models
from wagtail.wagtailsnippets.permissions import get_permission_name, user_can_edit_snippet_type
# == Helper functions ==
def get_snippet_model_from_url_params(app_name, model_name):
"""
Retrieve a model from an app_label / model_name combo.
Raise Http404 if the model is not a valid snippet type.
"""
try:
model = apps.get_model(app_name, model_name)
except LookupError:
raise Http404
if model not in get_snippet_models():
# don't allow people to hack the URL to edit content types that aren't registered as snippets
raise Http404
return model
SNIPPET_EDIT_HANDLERS = {}
def get_snippet_edit_handler(model):
if model not in SNIPPET_EDIT_HANDLERS:
if hasattr(model, 'edit_handler'):
# use the edit handler specified on the page class
edit_handler = model.edit_handler
else:
panels = extract_panel_definitions_from_model_class(model)
edit_handler = ObjectList(panels)
SNIPPET_EDIT_HANDLERS[model] = edit_handler.bind_to_model(model)
return SNIPPET_EDIT_HANDLERS[model]
# == Views ==
def index(request):
snippet_model_opts = [
model._meta for model in get_snippet_models()
if user_can_edit_snippet_type(request.user, model)]
return render(request, 'wagtailsnippets/snippets/index.html', {
'snippet_model_opts': sorted(
snippet_model_opts, key=lambda x: x.verbose_name.lower())})
def list(request, app_label, model_name):
model = get_snippet_model_from_url_params(app_label, model_name)
permissions = [
get_permission_name(action, model)
for action in ['add', 'change', 'delete']
]
if not any([request.user.has_perm(perm) for perm in permissions]):
return permission_denied(request)
items = model.objects.all()
# Search
is_searchable = class_is_indexed(model)
is_searching = False
search_query = None
if is_searchable and 'q' in request.GET:
search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % {
'snippet_type_name': model._meta.verbose_name_plural
})
if search_form.is_valid():
search_query = search_form.cleaned_data['q']
search_backend = get_search_backend()
items = search_backend.search(search_query, items)
is_searching = True
else:
search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % {
'snippet_type_name': model._meta.verbose_name_plural
})
paginator, paginated_items = paginate(request, items)
# Template
if request.is_ajax():
template = 'wagtailsnippets/snippets/results.html'
else:
template = 'wagtailsnippets/snippets/type_index.html'
return render(request, template, {
'model_opts': model._meta,
'items': paginated_items,
'can_add_snippet': request.user.has_perm(get_permission_name('add', model)),
'is_searchable': is_searchable,
'search_form': search_form,
'is_searching': is_searching,
'query_string': search_query,
})
def create(request, app_label, model_name):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('add', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = model()
edit_handler_class = get_snippet_edit_handler(model)
form_class = edit_handler_class.get_form_class(model)
if request.POST:
form = form_class(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
messages.success(
request,
_("{snippet_type} '{instance}' created.").format(
snippet_type=capfirst(model._meta.verbose_name),
instance=instance
),
buttons=[
messages.button(reverse(
'wagtailsnippets:edit', args=(app_label, model_name, instance.id)
), _('Edit'))
]
)
return redirect('wagtailsnippets:list', app_label, model_name)
else:
messages.error(request, _("The snippet could not be created due to errors."))
edit_handler = edit_handler_class(instance=instance, form=form)
else:
form = form_class(instance=instance)
edit_handler = edit_handler_class(instance=instance, form=form)
return render(request, 'wagtailsnippets/snippets/create.html', {
'model_opts': model._meta,
'edit_handler': edit_handler,
})
def edit(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('change', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = get_object_or_404(model, id=id)
edit_handler_class = get_snippet_edit_handler(model)
form_class = edit_handler_class.get_form_class(model)
if request.POST:
form = form_class(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
messages.success(
request,
_("{snippet_type} '{instance}' updated.").format(
snippet_type=capfirst(model._meta.verbose_name_plural),
instance=instance
),
buttons=[
messages.button(reverse(
'wagtailsnippets:edit', args=(app_label, model_name, instance.id)
), _('Edit'))
]
)
return redirect('wagtailsnippets:list', app_label, model_name)
else:
messages.error(request, _("The snippet could not be saved due to errors."))
edit_handler = edit_handler_class(instance=instance, form=form)
else:
form = form_class(instance=instance)
edit_handler = edit_handler_class(instance=instance, form=form)
return render(request, 'wagtailsnippets/snippets/edit.html', {
'model_opts': model._meta,
'instance': instance,
'edit_handler': edit_handler
})
def delete(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('delete', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = get_object_or_404(model, id=id)
if request.POST:
instance.delete()
messages.success(
request,
_("{snippet_type} '{instance}' deleted.").format(
snippet_type=capfirst(model._meta.verbose_name_plural),
instance=instance
)
)
return redirect('wagtailsnippets:list', app_label, model_name)
return render(request, 'wagtailsnippets/snippets/confirm_delete.html', {
'model_opts': model._meta,
'instance': instance,
})
def usage(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
instance = get_object_or_404(model, id=id)
paginator, used_by = paginate(request, instance.get_usage())
return render(request, "wagtailsnippets/snippets/usage.html", {
'instance': instance,
'used_by': used_by
})
| 34.290456 | 101 | 0.666142 | from django.apps import apps
from django.core.urlresolvers import reverse
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from wagtail.utils.pagination import paginate
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.edit_handlers import (
ObjectList, extract_panel_definitions_from_model_class)
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailadmin.utils import permission_denied
from wagtail.wagtailsearch.backends import get_search_backend
from wagtail.wagtailsearch.index import class_is_indexed
from wagtail.wagtailsnippets.models import get_snippet_models
from wagtail.wagtailsnippets.permissions import get_permission_name, user_can_edit_snippet_type
def get_snippet_model_from_url_params(app_name, model_name):
try:
model = apps.get_model(app_name, model_name)
except LookupError:
raise Http404
if model not in get_snippet_models():
raise Http404
return model
SNIPPET_EDIT_HANDLERS = {}
def get_snippet_edit_handler(model):
if model not in SNIPPET_EDIT_HANDLERS:
if hasattr(model, 'edit_handler'):
edit_handler = model.edit_handler
else:
panels = extract_panel_definitions_from_model_class(model)
edit_handler = ObjectList(panels)
SNIPPET_EDIT_HANDLERS[model] = edit_handler.bind_to_model(model)
return SNIPPET_EDIT_HANDLERS[model]
def index(request):
snippet_model_opts = [
model._meta for model in get_snippet_models()
if user_can_edit_snippet_type(request.user, model)]
return render(request, 'wagtailsnippets/snippets/index.html', {
'snippet_model_opts': sorted(
snippet_model_opts, key=lambda x: x.verbose_name.lower())})
def list(request, app_label, model_name):
model = get_snippet_model_from_url_params(app_label, model_name)
permissions = [
get_permission_name(action, model)
for action in ['add', 'change', 'delete']
]
if not any([request.user.has_perm(perm) for perm in permissions]):
return permission_denied(request)
items = model.objects.all()
is_searchable = class_is_indexed(model)
is_searching = False
search_query = None
if is_searchable and 'q' in request.GET:
search_form = SearchForm(request.GET, placeholder=_("Search %(snippet_type_name)s") % {
'snippet_type_name': model._meta.verbose_name_plural
})
if search_form.is_valid():
search_query = search_form.cleaned_data['q']
search_backend = get_search_backend()
items = search_backend.search(search_query, items)
is_searching = True
else:
search_form = SearchForm(placeholder=_("Search %(snippet_type_name)s") % {
'snippet_type_name': model._meta.verbose_name_plural
})
paginator, paginated_items = paginate(request, items)
if request.is_ajax():
template = 'wagtailsnippets/snippets/results.html'
else:
template = 'wagtailsnippets/snippets/type_index.html'
return render(request, template, {
'model_opts': model._meta,
'items': paginated_items,
'can_add_snippet': request.user.has_perm(get_permission_name('add', model)),
'is_searchable': is_searchable,
'search_form': search_form,
'is_searching': is_searching,
'query_string': search_query,
})
def create(request, app_label, model_name):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('add', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = model()
edit_handler_class = get_snippet_edit_handler(model)
form_class = edit_handler_class.get_form_class(model)
if request.POST:
form = form_class(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
messages.success(
request,
_("{snippet_type} '{instance}' created.").format(
snippet_type=capfirst(model._meta.verbose_name),
instance=instance
),
buttons=[
messages.button(reverse(
'wagtailsnippets:edit', args=(app_label, model_name, instance.id)
), _('Edit'))
]
)
return redirect('wagtailsnippets:list', app_label, model_name)
else:
messages.error(request, _("The snippet could not be created due to errors."))
edit_handler = edit_handler_class(instance=instance, form=form)
else:
form = form_class(instance=instance)
edit_handler = edit_handler_class(instance=instance, form=form)
return render(request, 'wagtailsnippets/snippets/create.html', {
'model_opts': model._meta,
'edit_handler': edit_handler,
})
def edit(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('change', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = get_object_or_404(model, id=id)
edit_handler_class = get_snippet_edit_handler(model)
form_class = edit_handler_class.get_form_class(model)
if request.POST:
form = form_class(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
messages.success(
request,
_("{snippet_type} '{instance}' updated.").format(
snippet_type=capfirst(model._meta.verbose_name_plural),
instance=instance
),
buttons=[
messages.button(reverse(
'wagtailsnippets:edit', args=(app_label, model_name, instance.id)
), _('Edit'))
]
)
return redirect('wagtailsnippets:list', app_label, model_name)
else:
messages.error(request, _("The snippet could not be saved due to errors."))
edit_handler = edit_handler_class(instance=instance, form=form)
else:
form = form_class(instance=instance)
edit_handler = edit_handler_class(instance=instance, form=form)
return render(request, 'wagtailsnippets/snippets/edit.html', {
'model_opts': model._meta,
'instance': instance,
'edit_handler': edit_handler
})
def delete(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
permission = get_permission_name('delete', model)
if not request.user.has_perm(permission):
return permission_denied(request)
instance = get_object_or_404(model, id=id)
if request.POST:
instance.delete()
messages.success(
request,
_("{snippet_type} '{instance}' deleted.").format(
snippet_type=capfirst(model._meta.verbose_name_plural),
instance=instance
)
)
return redirect('wagtailsnippets:list', app_label, model_name)
return render(request, 'wagtailsnippets/snippets/confirm_delete.html', {
'model_opts': model._meta,
'instance': instance,
})
def usage(request, app_label, model_name, id):
model = get_snippet_model_from_url_params(app_label, model_name)
instance = get_object_or_404(model, id=id)
paginator, used_by = paginate(request, instance.get_usage())
return render(request, "wagtailsnippets/snippets/usage.html", {
'instance': instance,
'used_by': used_by
})
| true | true |
f71a43557442ce97907f082e75eb667688ce3597 | 664 | py | Python | manage.py | bastoune57/gokiting_back_end | f3edcbeede292713349b28f2390b5d57e1420f8e | [
"MIT"
] | null | null | null | manage.py | bastoune57/gokiting_back_end | f3edcbeede292713349b28f2390b5d57e1420f8e | [
"MIT"
] | null | null | null | manage.py | bastoune57/gokiting_back_end | f3edcbeede292713349b28f2390b5d57e1420f8e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gokiting.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.869565 | 73 | 0.679217 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gokiting.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
f71a4417980584c697fd59995b017ae74c4d8707 | 210 | py | Python | visualisation/core/__init__.py | dashings/CAMVIS | fb7e4e5d885ae227140f7ab40b5f47e730ec249b | [
"MIT"
] | 213 | 2018-12-20T12:09:07.000Z | 2022-03-21T10:09:58.000Z | visualisation/core/__init__.py | dashings/CAMVIS | fb7e4e5d885ae227140f7ab40b5f47e730ec249b | [
"MIT"
] | 3 | 2020-07-16T05:11:25.000Z | 2022-03-16T13:59:07.000Z | visualisation/core/__init__.py | dashings/CAMVIS | fb7e4e5d885ae227140f7ab40b5f47e730ec249b | [
"MIT"
] | 41 | 2019-03-06T12:01:24.000Z | 2022-03-09T07:55:56.000Z | from .SaliencyMap import SaliencyMap
from .DeepDream import DeepDream
from .GradCam import GradCam
from .Weights import Weights
from .Base import Base
from .ClassActivationMapping import ClassActivationMapping
| 30 | 58 | 0.857143 | from .SaliencyMap import SaliencyMap
from .DeepDream import DeepDream
from .GradCam import GradCam
from .Weights import Weights
from .Base import Base
from .ClassActivationMapping import ClassActivationMapping
| true | true |
f71a45fb15de192f5a1129710b39b955da52f151 | 13,147 | py | Python | tests/query_test/test_observability.py | twmarshall/impala | bdd904922a220c37326928ac674779acaef5f6fa | [
"Apache-2.0"
] | null | null | null | tests/query_test/test_observability.py | twmarshall/impala | bdd904922a220c37326928ac674779acaef5f6fa | [
"Apache-2.0"
] | null | null | null | tests/query_test/test_observability.py | twmarshall/impala | bdd904922a220c37326928ac674779acaef5f6fa | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from tests.common.impala_cluster import ImpalaCluster
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.skip import SkipIfS3, SkipIfADLS, SkipIfIsilon, SkipIfLocal
from tests.util.filesystem_utils import IS_EC
import logging
import pytest
import re
import time
class TestObservability(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
def test_merge_exchange_num_rows(self):
"""Regression test for IMPALA-1473 - checks that the exec summary for a merging
exchange with a limit reports the number of rows returned as equal to the limit,
and that the coordinator fragment portion of the runtime profile reports the number
of rows returned correctly."""
query = """select tinyint_col, count(*) from functional.alltypes
group by tinyint_col order by tinyint_col limit 5"""
result = self.execute_query(query)
assert result.exec_summary[0]['operator'] == '05:MERGING-EXCHANGE'
assert result.exec_summary[0]['num_rows'] == 5
assert result.exec_summary[0]['est_num_rows'] == 5
assert result.exec_summary[0]['peak_mem'] > 0
for line in result.runtime_profile.split('\n'):
# The first 'RowsProduced' we find is for the coordinator fragment.
if 'RowsProduced' in line:
assert '(5)' in line
break
def test_broadcast_num_rows(self):
"""Regression test for IMPALA-3002 - checks that the num_rows for a broadcast node
in the exec summaty is correctly set as the max over all instances, not the sum."""
query = """select distinct a.int_col, a.string_col from functional.alltypes a
inner join functional.alltypessmall b on (a.id = b.id)
where a.year = 2009 and b.month = 2"""
result = self.execute_query(query)
assert result.exec_summary[5]['operator'] == '04:EXCHANGE'
assert result.exec_summary[5]['num_rows'] == 25
assert result.exec_summary[5]['est_num_rows'] == 25
assert result.exec_summary[5]['peak_mem'] > 0
@SkipIfS3.hbase
@SkipIfLocal.hbase
@SkipIfIsilon.hbase
@SkipIfADLS.hbase
def test_scan_summary(self):
"""IMPALA-4499: Checks that the exec summary for scans show the table name."""
# HDFS table
query = "select count(*) from functional.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN HDFS'
assert result.exec_summary[scan_idx]['detail'] == 'functional.alltypestiny'
# KUDU table
query = "select count(*) from functional_kudu.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN KUDU'
assert result.exec_summary[scan_idx]['detail'] == 'functional_kudu.alltypestiny'
# HBASE table
query = "select count(*) from functional_hbase.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN HBASE'
assert result.exec_summary[scan_idx]['detail'] == 'functional_hbase.alltypestiny'
def test_query_states(self):
"""Tests that the query profile shows expected query states."""
query = "select count(*) from functional.alltypes"
handle = self.execute_query_async(query,
{"debug_action": "CRS_BEFORE_ADMISSION:SLEEP@1000"})
# If ExecuteStatement() has completed and the query is paused in the admission control
# phase, then the query must be in COMPILED state.
profile = self.client.get_runtime_profile(handle)
assert "Query State: COMPILED" in profile
# After completion of the admission control phase, the query must have at least
# reached RUNNING state.
self.client.wait_for_admission_control(handle)
profile = self.client.get_runtime_profile(handle)
assert "Query State: RUNNING" in profile or \
"Query State: FINISHED" in profile, profile
results = self.client.fetch(query, handle)
profile = self.client.get_runtime_profile(handle)
# After fetching the results, the query must be in state FINISHED.
assert "Query State: FINISHED" in profile, profile
def test_query_options(self):
"""Test that the query profile shows expected non-default query options, both set
explicitly through client and those set by planner"""
# Set mem_limit and runtime_filter_wait_time_ms to non-default and default value.
query_opts = {'mem_limit': 8589934592, 'runtime_filter_wait_time_ms': 0}
profile = self.execute_query("select 1", query_opts).runtime_profile
assert "Query Options (set by configuration): MEM_LIMIT=8589934592" in profile,\
profile
# For this query, the planner sets NUM_NODES=1, NUM_SCANNER_THREADS=1,
# RUNTIME_FILTER_MODE=0 and MT_DOP=0
expected_str = ("Query Options (set by configuration and planner): "
"MEM_LIMIT=8589934592,NUM_NODES=1,NUM_SCANNER_THREADS=1,"
"RUNTIME_FILTER_MODE=0,MT_DOP=0{erasure_coding}\n")
expected_str = expected_str.format(
erasure_coding=",ALLOW_ERASURE_CODED_FILES=1" if IS_EC else "")
assert expected_str in profile
def test_exec_summary(self):
"""Test that the exec summary is populated correctly in every query state"""
query = "select count(*) from functional.alltypes"
handle = self.execute_query_async(query,
{"debug_action": "CRS_BEFORE_ADMISSION:SLEEP@1000"})
# If ExecuteStatement() has completed and the query is paused in the admission control
# phase, then the coordinator has not started yet and exec_summary should be empty.
exec_summary = self.client.get_exec_summary(handle)
assert exec_summary is not None and exec_summary.nodes is None
# After completion of the admission control phase, the coordinator would have started
# and we should get a populated exec_summary.
self.client.wait_for_admission_control(handle)
exec_summary = self.client.get_exec_summary(handle)
assert exec_summary is not None and exec_summary.nodes is not None
self.client.fetch(query, handle)
exec_summary = self.client.get_exec_summary(handle)
# After fetching the results and reaching finished state, we should still be able to
# fetch an exec_summary.
assert exec_summary is not None and exec_summary.nodes is not None
@SkipIfLocal.multiple_impalad
@pytest.mark.xfail(reason="IMPALA-6338")
def test_profile_fragment_instances(self):
"""IMPALA-6081: Test that the expected number of fragment instances and their exec
nodes appear in the runtime profile, even when fragments may be quickly cancelled when
all results are already returned."""
results = self.execute_query("""
with l as (select * from tpch.lineitem UNION ALL select * from tpch.lineitem)
select STRAIGHT_JOIN count(*) from (select * from tpch.lineitem a LIMIT 1) a
join (select * from l LIMIT 2000000) b on a.l_orderkey = -b.l_orderkey;""")
# There are 3 scan nodes and each appears in the profile 4 times (for 3 fragment
# instances + the averaged fragment).
assert results.runtime_profile.count("HDFS_SCAN_NODE") == 12
# There are 3 exchange nodes and each appears in the profile 2 times (for 1 fragment
# instance + the averaged fragment).
assert results.runtime_profile.count("EXCHANGE_NODE") == 6
# The following appear only in the root fragment which has 1 instance.
assert results.runtime_profile.count("HASH_JOIN_NODE") == 2
assert results.runtime_profile.count("AGGREGATION_NODE") == 2
assert results.runtime_profile.count("PLAN_ROOT_SINK") == 2
def test_query_profile_contains_query_events(self):
"""Test that the expected events show up in a query profile."""
event_regexes = [r'Query Timeline:',
r'Query submitted:',
r'Planning finished:',
r'Submit for admission:',
r'Completed admission:',
r'Ready to start on .* backends:',
r'All .* execution backends \(.* fragment instances\) started:',
r'Rows available:',
r'First row fetched:',
r'Last row fetched:',
r'Released admission control resources:']
query = "select * from functional.alltypes"
runtime_profile = self.execute_query(query).runtime_profile
self.__verify_profile_event_sequence(event_regexes, runtime_profile)
def test_query_profile_contains_instance_events(self):
"""Test that /query_profile_encoded contains an event timeline for fragment
instances, even when there are errors."""
event_regexes = [r'Fragment Instance Lifecycle Event Timeline',
r'Prepare Finished',
r'Open Finished',
r'First Batch Produced',
r'First Batch Sent',
r'ExecInternal Finished']
query = "select count(*) from functional.alltypes"
runtime_profile = self.execute_query(query).runtime_profile
self.__verify_profile_event_sequence(event_regexes, runtime_profile)
def __verify_profile_event_sequence(self, event_regexes, runtime_profile):
"""Check that 'event_regexes' appear in a consecutive series of lines in
'runtime_profile'"""
lines = runtime_profile.splitlines()
event_regex_index = 0
# Check that the strings appear in the above order with no gaps in the profile.
for line in runtime_profile.splitlines():
match = re.search(event_regexes[event_regex_index], line)
if match is not None:
event_regex_index += 1
if event_regex_index == len(event_regexes):
# Found all the lines - we're done.
return
else:
# Haven't found the first regex yet.
assert event_regex_index == 0, \
event_regexes[event_regex_index] + " not in " + line + "\n" + runtime_profile
assert event_regex_index == len(event_regexes), \
"Didn't find all events in profile: \n" + runtime_profile
class TestThriftProfile(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
# IMPALA-6399: Run this test serially to avoid a delay over the wait time in fetching
# the profile.
# This test needs to call self.client.close() to force computation of query end time,
# so it has to be in its own suite (IMPALA-6498).
@pytest.mark.execute_serially
def test_query_profile_thrift_timestamps(self):
"""Test that the query profile start and end time date-time strings have
nanosecond precision. Nanosecond precision is expected by management API clients
that consume Impala debug webpages."""
query = "select sleep(5)"
handle = self.client.execute_async(query)
query_id = handle.get_handle().id
results = self.client.fetch(query, handle)
self.client.close()
MAX_WAIT = 300
start = time.time()
end = start + MAX_WAIT
while time.time() <= end:
# Sleep before trying to fetch the profile. This helps to prevent a warning when the
# profile is not yet available immediately. It also makes it less likely to
# introduce an error below in future changes by forgetting to sleep.
time.sleep(1)
tree = self.impalad_test_service.get_thrift_profile(query_id)
if not tree:
continue
# tree.nodes[1] corresponds to ClientRequestState::summary_profile_
# See be/src/service/client-request-state.[h|cc].
start_time = tree.nodes[1].info_strings["Start Time"]
end_time = tree.nodes[1].info_strings["End Time"]
# Start and End Times are of the form "2017-12-07 22:26:52.167711000"
start_time_sub_sec_str = start_time.split('.')[-1]
end_time_sub_sec_str = end_time.split('.')[-1]
if len(end_time_sub_sec_str) == 0:
elapsed = time.time() - start
logging.info("end_time_sub_sec_str hasn't shown up yet, elapsed=%d", elapsed)
continue
assert len(end_time_sub_sec_str) == 9, end_time
assert len(start_time_sub_sec_str) == 9, start_time
return True
# If we're here, we didn't get the final thrift profile from the debug web page.
# This could happen due to heavy system load. The test is then inconclusive.
# Log a message and fail this run.
dbg_str = "Debug thrift profile for query {0} not available in {1} seconds".format(
query_id, MAX_WAIT)
assert False, dbg_str
| 47.634058 | 90 | 0.716818 |
from tests.common.impala_cluster import ImpalaCluster
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.skip import SkipIfS3, SkipIfADLS, SkipIfIsilon, SkipIfLocal
from tests.util.filesystem_utils import IS_EC
import logging
import pytest
import re
import time
class TestObservability(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
def test_merge_exchange_num_rows(self):
query = """select tinyint_col, count(*) from functional.alltypes
group by tinyint_col order by tinyint_col limit 5"""
result = self.execute_query(query)
assert result.exec_summary[0]['operator'] == '05:MERGING-EXCHANGE'
assert result.exec_summary[0]['num_rows'] == 5
assert result.exec_summary[0]['est_num_rows'] == 5
assert result.exec_summary[0]['peak_mem'] > 0
for line in result.runtime_profile.split('\n'):
if 'RowsProduced' in line:
assert '(5)' in line
break
def test_broadcast_num_rows(self):
query = """select distinct a.int_col, a.string_col from functional.alltypes a
inner join functional.alltypessmall b on (a.id = b.id)
where a.year = 2009 and b.month = 2"""
result = self.execute_query(query)
assert result.exec_summary[5]['operator'] == '04:EXCHANGE'
assert result.exec_summary[5]['num_rows'] == 25
assert result.exec_summary[5]['est_num_rows'] == 25
assert result.exec_summary[5]['peak_mem'] > 0
@SkipIfS3.hbase
@SkipIfLocal.hbase
@SkipIfIsilon.hbase
@SkipIfADLS.hbase
def test_scan_summary(self):
query = "select count(*) from functional.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN HDFS'
assert result.exec_summary[scan_idx]['detail'] == 'functional.alltypestiny'
query = "select count(*) from functional_kudu.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN KUDU'
assert result.exec_summary[scan_idx]['detail'] == 'functional_kudu.alltypestiny'
query = "select count(*) from functional_hbase.alltypestiny"
result = self.execute_query(query)
scan_idx = len(result.exec_summary) - 1
assert result.exec_summary[scan_idx]['operator'] == '00:SCAN HBASE'
assert result.exec_summary[scan_idx]['detail'] == 'functional_hbase.alltypestiny'
def test_query_states(self):
query = "select count(*) from functional.alltypes"
handle = self.execute_query_async(query,
{"debug_action": "CRS_BEFORE_ADMISSION:SLEEP@1000"})
profile = self.client.get_runtime_profile(handle)
assert "Query State: COMPILED" in profile
self.client.wait_for_admission_control(handle)
profile = self.client.get_runtime_profile(handle)
assert "Query State: RUNNING" in profile or \
"Query State: FINISHED" in profile, profile
results = self.client.fetch(query, handle)
profile = self.client.get_runtime_profile(handle)
assert "Query State: FINISHED" in profile, profile
def test_query_options(self):
query_opts = {'mem_limit': 8589934592, 'runtime_filter_wait_time_ms': 0}
profile = self.execute_query("select 1", query_opts).runtime_profile
assert "Query Options (set by configuration): MEM_LIMIT=8589934592" in profile,\
profile
expected_str = ("Query Options (set by configuration and planner): "
"MEM_LIMIT=8589934592,NUM_NODES=1,NUM_SCANNER_THREADS=1,"
"RUNTIME_FILTER_MODE=0,MT_DOP=0{erasure_coding}\n")
expected_str = expected_str.format(
erasure_coding=",ALLOW_ERASURE_CODED_FILES=1" if IS_EC else "")
assert expected_str in profile
def test_exec_summary(self):
query = "select count(*) from functional.alltypes"
handle = self.execute_query_async(query,
{"debug_action": "CRS_BEFORE_ADMISSION:SLEEP@1000"})
exec_summary = self.client.get_exec_summary(handle)
assert exec_summary is not None and exec_summary.nodes is None
self.client.wait_for_admission_control(handle)
exec_summary = self.client.get_exec_summary(handle)
assert exec_summary is not None and exec_summary.nodes is not None
self.client.fetch(query, handle)
exec_summary = self.client.get_exec_summary(handle)
assert exec_summary is not None and exec_summary.nodes is not None
@SkipIfLocal.multiple_impalad
@pytest.mark.xfail(reason="IMPALA-6338")
def test_profile_fragment_instances(self):
results = self.execute_query("""
with l as (select * from tpch.lineitem UNION ALL select * from tpch.lineitem)
select STRAIGHT_JOIN count(*) from (select * from tpch.lineitem a LIMIT 1) a
join (select * from l LIMIT 2000000) b on a.l_orderkey = -b.l_orderkey;""")
assert results.runtime_profile.count("HDFS_SCAN_NODE") == 12
assert results.runtime_profile.count("EXCHANGE_NODE") == 6
assert results.runtime_profile.count("HASH_JOIN_NODE") == 2
assert results.runtime_profile.count("AGGREGATION_NODE") == 2
assert results.runtime_profile.count("PLAN_ROOT_SINK") == 2
def test_query_profile_contains_query_events(self):
event_regexes = [r'Query Timeline:',
r'Query submitted:',
r'Planning finished:',
r'Submit for admission:',
r'Completed admission:',
r'Ready to start on .* backends:',
r'All .* execution backends \(.* fragment instances\) started:',
r'Rows available:',
r'First row fetched:',
r'Last row fetched:',
r'Released admission control resources:']
query = "select * from functional.alltypes"
runtime_profile = self.execute_query(query).runtime_profile
self.__verify_profile_event_sequence(event_regexes, runtime_profile)
def test_query_profile_contains_instance_events(self):
event_regexes = [r'Fragment Instance Lifecycle Event Timeline',
r'Prepare Finished',
r'Open Finished',
r'First Batch Produced',
r'First Batch Sent',
r'ExecInternal Finished']
query = "select count(*) from functional.alltypes"
runtime_profile = self.execute_query(query).runtime_profile
self.__verify_profile_event_sequence(event_regexes, runtime_profile)
def __verify_profile_event_sequence(self, event_regexes, runtime_profile):
lines = runtime_profile.splitlines()
event_regex_index = 0
for line in runtime_profile.splitlines():
match = re.search(event_regexes[event_regex_index], line)
if match is not None:
event_regex_index += 1
if event_regex_index == len(event_regexes):
return
else:
# Haven't found the first regex yet.
assert event_regex_index == 0, \
event_regexes[event_regex_index] + " not in " + line + "\n" + runtime_profile
assert event_regex_index == len(event_regexes), \
"Didn't find all events in profile: \n" + runtime_profile
class TestThriftProfile(ImpalaTestSuite):
@classmethod
def get_workload(self):
return 'functional-query'
# IMPALA-6399: Run this test serially to avoid a delay over the wait time in fetching
# the profile.
# This test needs to call self.client.close() to force computation of query end time,
# so it has to be in its own suite (IMPALA-6498).
@pytest.mark.execute_serially
def test_query_profile_thrift_timestamps(self):
query = "select sleep(5)"
handle = self.client.execute_async(query)
query_id = handle.get_handle().id
results = self.client.fetch(query, handle)
self.client.close()
MAX_WAIT = 300
start = time.time()
end = start + MAX_WAIT
while time.time() <= end:
# Sleep before trying to fetch the profile. This helps to prevent a warning when the
# profile is not yet available immediately. It also makes it less likely to
# introduce an error below in future changes by forgetting to sleep.
time.sleep(1)
tree = self.impalad_test_service.get_thrift_profile(query_id)
if not tree:
continue
# tree.nodes[1] corresponds to ClientRequestState::summary_profile_
# See be/src/service/client-request-state.[h|cc].
start_time = tree.nodes[1].info_strings["Start Time"]
end_time = tree.nodes[1].info_strings["End Time"]
# Start and End Times are of the form "2017-12-07 22:26:52.167711000"
start_time_sub_sec_str = start_time.split('.')[-1]
end_time_sub_sec_str = end_time.split('.')[-1]
if len(end_time_sub_sec_str) == 0:
elapsed = time.time() - start
logging.info("end_time_sub_sec_str hasn't shown up yet, elapsed=%d", elapsed)
continue
assert len(end_time_sub_sec_str) == 9, end_time
assert len(start_time_sub_sec_str) == 9, start_time
return True
dbg_str = "Debug thrift profile for query {0} not available in {1} seconds".format(
query_id, MAX_WAIT)
assert False, dbg_str
| true | true |
f71a466907a327211f69a6d078aeba3666c44465 | 3,067 | py | Python | GPA-Spider/config.py | xsx-123/awesome-sdu-scripts | bc371fda9d4d2a616f82c9a44b7d1d6eddb2c6eb | [
"MIT"
] | 21 | 2021-06-01T09:54:20.000Z | 2022-03-11T16:50:42.000Z | GPA-Spider/config.py | xsx-123/awesome-sdu-scripts | bc371fda9d4d2a616f82c9a44b7d1d6eddb2c6eb | [
"MIT"
] | 1 | 2019-08-16T05:30:19.000Z | 2019-08-16T05:30:19.000Z | GPA-Spider/config.py | xsx-123/awesome-sdu-scripts | bc371fda9d4d2a616f82c9a44b7d1d6eddb2c6eb | [
"MIT"
] | 8 | 2021-07-21T03:11:40.000Z | 2021-12-03T08:25:19.000Z | # -*- coding: utf-8 -*-
#!/usr/bin/env python
# Copyright 2018 ZhangT. All Rights Reserved.
# Author: ZhangT
# Author-Github: github.com/zhangt2333
# config.py 2018/2/10 21:49
# 包含一些通用常量和工具函数
HEADERS = {"Host": "bkjws.sdu.edu.cn",
"Connection": "keep-alive",
"Accept": "*/*",
"Origin": "http://bkjws.sdu.edu.cn",
"X-Requested-With": "XMLHttpRequest",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Accept-Language": "zh-CN,zh;q=0.8"}
# 获取成绩时候的post数据
aoData = 'aoData=%5B%7B%22name%22%3A%22sEcho%22%2C%22value%22%3A1%7D%2C%7B%22' \
'name%22%3A%22iColumns%22%2C%22value%22%3A8%7D%2C%7B%22name%22%3A%22' \
'sColumns%22%2C%22value%22%3A%22%22%7D%2C%7B%22name%22%3A%22iDisplay' \
'Start%22%2C%22value%22%3A0%7D%2C%7B%22name%22%3A%22iDisplayLength%2' \
'2%2C%22value%22%3A-1%7D%2C%7B%22name%22%3A%22mDataProp_0%22%2C%22va' \
'lue%22%3A%22function%22%7D%2C%7B%22name%22%3A%22mDataProp_1%22%2C%2' \
'2value%22%3A%22kch%22%7D%2C%7B%22name%22%3A%22mDataProp_2%22%2C%22v' \
'alue%22%3A%22kcm%22%7D%2C%7B%22name%22%3A%22mDataProp_3%22%2C%22val' \
'ue%22%3A%22kxh%22%7D%2C%7B%22name%22%3A%22mDataProp_4%22%2C%22value' \
'%22%3A%22xf%22%7D%2C%7B%22name%22%3A%22mDataProp_5%22%2C%22value%22' \
'%3A%22kssj%22%7D%2C%7B%22name%22%3A%22mDataProp_6%22%2C%22value%22%' \
'3A%22kscjView%22%7D%2C%7B%22name%22%3A%22mDataProp_7%22%2C%22value%' \
'22%3A%22kcsx%22%7D%2C%7B%22name%22%3A%22iSortingCols%22%2C%22value%' \
'22%3A0%7D%2C%7B%22name%22%3A%22bSortable_0%22%2C%22value%22%3Afalse' \
'%7D%2C%7B%22name%22%3A%22bSortable_1%22%2C%22value%22%3Afalse%7D%2C' \
'%7B%22name%22%3A%22bSortable_2%22%2C%22value%22%3Afalse%7D%2C%7B%22' \
'name%22%3A%22bSortable_3%22%2C%22value%22%3Afalse%7D%2C%7B%22name%2' \
'2%3A%22bSortable_4%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22' \
'bSortable_5%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22bSortab' \
'le_6%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22bSortable_7%22' \
'%2C%22value%22%3Afalse%7D%5D'
def strB2Q(ustring):
"""工具函数:全角转半角"""
rstring = ""
for uchar in ustring:
inside_code = ord(uchar)
if inside_code == 32: # 全角空格直接转换
inside_code = 12288
elif (inside_code >= 33 and inside_code <= 126): # 全角字符(除空格)根据关系转化
inside_code += 65248
rstring += chr(inside_code)
return rstring
def Align_CHstr(str, format_spec):
"""工具函数:处理一个中英文混杂str的填充对齐"""
format_spec = "{0:{1}" + format_spec + "}"
return format_spec.format(strB2Q(str), chr(12288))
def compare_xnxq(xnxq1, xnxq2):
"""返回 xnxq1 > xnxq2"""
tmp = xnxq1.split('-')
xnxq1 = tmp[2] + tmp[1]*10 + tmp[0]*10000
tmp = xnxq2.split('-')
xnxq2 = tmp[2] + tmp[1]*10 + tmp[0]*10000
return xnxq1 > xnxq2 | 45.776119 | 137 | 0.635474 |
HEADERS = {"Host": "bkjws.sdu.edu.cn",
"Connection": "keep-alive",
"Accept": "*/*",
"Origin": "http://bkjws.sdu.edu.cn",
"X-Requested-With": "XMLHttpRequest",
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Accept-Language": "zh-CN,zh;q=0.8"}
aoData = 'aoData=%5B%7B%22name%22%3A%22sEcho%22%2C%22value%22%3A1%7D%2C%7B%22' \
'name%22%3A%22iColumns%22%2C%22value%22%3A8%7D%2C%7B%22name%22%3A%22' \
'sColumns%22%2C%22value%22%3A%22%22%7D%2C%7B%22name%22%3A%22iDisplay' \
'Start%22%2C%22value%22%3A0%7D%2C%7B%22name%22%3A%22iDisplayLength%2' \
'2%2C%22value%22%3A-1%7D%2C%7B%22name%22%3A%22mDataProp_0%22%2C%22va' \
'lue%22%3A%22function%22%7D%2C%7B%22name%22%3A%22mDataProp_1%22%2C%2' \
'2value%22%3A%22kch%22%7D%2C%7B%22name%22%3A%22mDataProp_2%22%2C%22v' \
'alue%22%3A%22kcm%22%7D%2C%7B%22name%22%3A%22mDataProp_3%22%2C%22val' \
'ue%22%3A%22kxh%22%7D%2C%7B%22name%22%3A%22mDataProp_4%22%2C%22value' \
'%22%3A%22xf%22%7D%2C%7B%22name%22%3A%22mDataProp_5%22%2C%22value%22' \
'%3A%22kssj%22%7D%2C%7B%22name%22%3A%22mDataProp_6%22%2C%22value%22%' \
'3A%22kscjView%22%7D%2C%7B%22name%22%3A%22mDataProp_7%22%2C%22value%' \
'22%3A%22kcsx%22%7D%2C%7B%22name%22%3A%22iSortingCols%22%2C%22value%' \
'22%3A0%7D%2C%7B%22name%22%3A%22bSortable_0%22%2C%22value%22%3Afalse' \
'%7D%2C%7B%22name%22%3A%22bSortable_1%22%2C%22value%22%3Afalse%7D%2C' \
'%7B%22name%22%3A%22bSortable_2%22%2C%22value%22%3Afalse%7D%2C%7B%22' \
'name%22%3A%22bSortable_3%22%2C%22value%22%3Afalse%7D%2C%7B%22name%2' \
'2%3A%22bSortable_4%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22' \
'bSortable_5%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22bSortab' \
'le_6%22%2C%22value%22%3Afalse%7D%2C%7B%22name%22%3A%22bSortable_7%22' \
'%2C%22value%22%3Afalse%7D%5D'
def strB2Q(ustring):
rstring = ""
for uchar in ustring:
inside_code = ord(uchar)
if inside_code == 32:
inside_code = 12288
elif (inside_code >= 33 and inside_code <= 126):
inside_code += 65248
rstring += chr(inside_code)
return rstring
def Align_CHstr(str, format_spec):
format_spec = "{0:{1}" + format_spec + "}"
return format_spec.format(strB2Q(str), chr(12288))
def compare_xnxq(xnxq1, xnxq2):
tmp = xnxq1.split('-')
xnxq1 = tmp[2] + tmp[1]*10 + tmp[0]*10000
tmp = xnxq2.split('-')
xnxq2 = tmp[2] + tmp[1]*10 + tmp[0]*10000
return xnxq1 > xnxq2 | true | true |
f71a467939d4c660726511d6392456a49b013fa9 | 384 | py | Python | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_description/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | 1 | 2017-04-23T14:23:54.000Z | 2017-04-23T14:23:54.000Z | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_description/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | 13 | 2016-03-25T05:15:17.000Z | 2018-05-30T15:53:12.000Z | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_description/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_description"
PROJECT_SPACE_DIR = "/home/neo/ros/gazebo_ws_1/install"
PROJECT_VERSION = "0.0.0"
| 42.666667 | 68 | 0.710938 |
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_description"
PROJECT_SPACE_DIR = "/home/neo/ros/gazebo_ws_1/install"
PROJECT_VERSION = "0.0.0"
| true | true |
f71a46e6e4364c2e9a02fba2afe9a37df835f18f | 2,165 | py | Python | azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/azure_firewall_network_rule_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/azure_firewall_network_rule_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | azure-mgmt-network/azure/mgmt/network/v2018_11_01/models/azure_firewall_network_rule_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AzureFirewallNetworkRule(Model):
"""Properties of the network rule.
:param name: Name of the network rule.
:type name: str
:param description: Description of the rule.
:type description: str
:param protocols: Array of AzureFirewallNetworkRuleProtocols.
:type protocols: list[str or
~azure.mgmt.network.v2018_11_01.models.AzureFirewallNetworkRuleProtocol]
:param source_addresses: List of source IP addresses for this rule.
:type source_addresses: list[str]
:param destination_addresses: List of destination IP addresses.
:type destination_addresses: list[str]
:param destination_ports: List of destination ports.
:type destination_ports: list[str]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
}
def __init__(self, *, name: str=None, description: str=None, protocols=None, source_addresses=None, destination_addresses=None, destination_ports=None, **kwargs) -> None:
super(AzureFirewallNetworkRule, self).__init__(**kwargs)
self.name = name
self.description = description
self.protocols = protocols
self.source_addresses = source_addresses
self.destination_addresses = destination_addresses
self.destination_ports = destination_ports
| 43.3 | 174 | 0.647575 |
from msrest.serialization import Model
class AzureFirewallNetworkRule(Model):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'protocols': {'key': 'protocols', 'type': '[str]'},
'source_addresses': {'key': 'sourceAddresses', 'type': '[str]'},
'destination_addresses': {'key': 'destinationAddresses', 'type': '[str]'},
'destination_ports': {'key': 'destinationPorts', 'type': '[str]'},
}
def __init__(self, *, name: str=None, description: str=None, protocols=None, source_addresses=None, destination_addresses=None, destination_ports=None, **kwargs) -> None:
super(AzureFirewallNetworkRule, self).__init__(**kwargs)
self.name = name
self.description = description
self.protocols = protocols
self.source_addresses = source_addresses
self.destination_addresses = destination_addresses
self.destination_ports = destination_ports
| true | true |
f71a47042dc21875d17453ebc714d5444f63f220 | 1,718 | py | Python | docker/demultiplexing/demuxlet/generate_zarr.py | jggatter/cumulus | 1dfd9dfce5a44ff867859db6f24a356f72c6ccdd | [
"BSD-3-Clause"
] | null | null | null | docker/demultiplexing/demuxlet/generate_zarr.py | jggatter/cumulus | 1dfd9dfce5a44ff867859db6f24a356f72c6ccdd | [
"BSD-3-Clause"
] | null | null | null | docker/demultiplexing/demuxlet/generate_zarr.py | jggatter/cumulus | 1dfd9dfce5a44ff867859db6f24a356f72c6ccdd | [
"BSD-3-Clause"
] | null | null | null | import argparse
import pegasusio as pio
import pandas as pd
parser = argparse.ArgumentParser(description='Merge demuxlet result with gene-count matrix.')
parser.add_argument('demux_res', metavar = 'demux_result.best', help = 'Demuxlet demultiplexing results.')
parser.add_argument('raw_mat', metavar = 'raw_feature_bc_matrix.h5', help = 'Raw gene count matrix in 10x format.')
parser.add_argument('out_file', metavar = 'output_result.zarr', help = 'Output zarr file.')
args = parser.parse_args()
demux_type_dict = {'SNG': 'singlet', 'DBL': 'doublet', 'AMB': 'unknown'}
def write_output(assignment_file: str, input_mat_file: str, output_zarr_file: str) -> None:
df = pd.read_csv(assignment_file, sep = '\t', header = 0, index_col = 'BARCODE')
df.index = pd.Index([x[:-2] for x in df.index])
df['demux_type'] = df['DROPLET.TYPE'].apply(lambda s: demux_type_dict[s])
df['assignment'] = ''
df.loc[df['demux_type'] == 'singlet', 'assignment'] = df.loc[df['demux_type'] == 'singlet', 'SNG.BEST.GUESS']
df.loc[df['demux_type'] == 'doublet', 'assignment'] = df.loc[df['demux_type'] == 'doublet', 'DBL.BEST.GUESS'].apply(lambda s: ','.join(s.split(',')[:-1]))
data = pio.read_input(input_mat_file)
data.obs['demux_type'] = ''
data.obs['assignment'] = ''
idx = data.obs_names.isin(df.index)
barcodes = data.obs_names[idx]
df_valid = df.loc[barcodes, ['demux_type', 'assignment']]
data.obs.loc[idx, 'demux_type'] = df_valid['demux_type'].values
data.obs.loc[idx, 'assignment'] = df_valid['assignment'].values
pio.write_output(data, output_zarr_file, zarr_zipstore = True)
if __name__ == '__main__':
write_output(args.demux_res, args.raw_mat, args.out_file) | 47.722222 | 158 | 0.689173 | import argparse
import pegasusio as pio
import pandas as pd
parser = argparse.ArgumentParser(description='Merge demuxlet result with gene-count matrix.')
parser.add_argument('demux_res', metavar = 'demux_result.best', help = 'Demuxlet demultiplexing results.')
parser.add_argument('raw_mat', metavar = 'raw_feature_bc_matrix.h5', help = 'Raw gene count matrix in 10x format.')
parser.add_argument('out_file', metavar = 'output_result.zarr', help = 'Output zarr file.')
args = parser.parse_args()
demux_type_dict = {'SNG': 'singlet', 'DBL': 'doublet', 'AMB': 'unknown'}
def write_output(assignment_file: str, input_mat_file: str, output_zarr_file: str) -> None:
df = pd.read_csv(assignment_file, sep = '\t', header = 0, index_col = 'BARCODE')
df.index = pd.Index([x[:-2] for x in df.index])
df['demux_type'] = df['DROPLET.TYPE'].apply(lambda s: demux_type_dict[s])
df['assignment'] = ''
df.loc[df['demux_type'] == 'singlet', 'assignment'] = df.loc[df['demux_type'] == 'singlet', 'SNG.BEST.GUESS']
df.loc[df['demux_type'] == 'doublet', 'assignment'] = df.loc[df['demux_type'] == 'doublet', 'DBL.BEST.GUESS'].apply(lambda s: ','.join(s.split(',')[:-1]))
data = pio.read_input(input_mat_file)
data.obs['demux_type'] = ''
data.obs['assignment'] = ''
idx = data.obs_names.isin(df.index)
barcodes = data.obs_names[idx]
df_valid = df.loc[barcodes, ['demux_type', 'assignment']]
data.obs.loc[idx, 'demux_type'] = df_valid['demux_type'].values
data.obs.loc[idx, 'assignment'] = df_valid['assignment'].values
pio.write_output(data, output_zarr_file, zarr_zipstore = True)
if __name__ == '__main__':
write_output(args.demux_res, args.raw_mat, args.out_file) | true | true |
f71a4726a2407751112c37ace25b054f8f423083 | 152 | py | Python | tests/model_control/detailed/transf_BoxCox/model_control_one_enabled_BoxCox_MovingAverage_BestCycle_AR.py | shaido987/pyaf | b9afd089557bed6b90b246d3712c481ae26a1957 | [
"BSD-3-Clause"
] | 377 | 2016-10-13T20:52:44.000Z | 2022-03-29T18:04:14.000Z | tests/model_control/detailed/transf_BoxCox/model_control_one_enabled_BoxCox_MovingAverage_BestCycle_AR.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 160 | 2016-10-13T16:11:53.000Z | 2022-03-28T04:21:34.000Z | tests/model_control/detailed/transf_BoxCox/model_control_one_enabled_BoxCox_MovingAverage_BestCycle_AR.py | ysdede/pyaf | b5541b8249d5a1cfdc01f27fdfd99b6580ed680b | [
"BSD-3-Clause"
] | 63 | 2017-03-09T14:51:18.000Z | 2022-03-27T20:52:57.000Z | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['BoxCox'] , ['MovingAverage'] , ['BestCycle'] , ['AR'] ); | 38 | 79 | 0.743421 | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['BoxCox'] , ['MovingAverage'] , ['BestCycle'] , ['AR'] ); | true | true |
f71a48ee730e59aec180da887d03b93c9e9a6c0f | 40,848 | py | Python | gym_miniworld/miniworld.py | HuangHaoyu1997/gym-miniworld | 77dc24bf1b1ca8c2cfefadfe3e35a0deb2d08a1a | [
"Apache-2.0"
] | null | null | null | gym_miniworld/miniworld.py | HuangHaoyu1997/gym-miniworld | 77dc24bf1b1ca8c2cfefadfe3e35a0deb2d08a1a | [
"Apache-2.0"
] | null | null | null | gym_miniworld/miniworld.py | HuangHaoyu1997/gym-miniworld | 77dc24bf1b1ca8c2cfefadfe3e35a0deb2d08a1a | [
"Apache-2.0"
] | null | null | null | import math
from enum import IntEnum
import numpy as np
import gym
from gym import spaces
from .random import *
from .opengl import *
from .objmesh import *
from .entity import *
from .math import *
from .params import *
# Default wall height for room
DEFAULT_WALL_HEIGHT=2.74
# Texture size/density in texels/meter
TEX_DENSITY = 512
def gen_texcs_wall(
tex,
min_x,
min_y,
width,
height
):
"""
Generate texture coordinates for a wall quad
"""
xc = (TEX_DENSITY / tex.width)
yc = (TEX_DENSITY / tex.height)
min_u = (min_x) * xc
max_u = (min_x + width) * xc
min_v = (min_y) * yc
max_v = (min_y + height) * yc
return np.array(
[
[min_u, min_v],
[min_u, max_v],
[max_u, max_v],
[max_u, min_v],
],
dtype=np.float32
)
def gen_texcs_floor(
tex,
poss
):
"""
Generate texture coordinates for the floor or ceiling
This is done by mapping x,z positions directly to texture
coordinates
"""
texc_mul = np.array(
[
TEX_DENSITY / tex.width,
TEX_DENSITY / tex.height
],
dtype=float
)
coords = np.stack([poss[:,0], poss[:,2]], axis=1) * texc_mul
return coords
class Room:
"""
Represent an individual room and its contents
"""
def __init__(
self,
outline,
wall_height=DEFAULT_WALL_HEIGHT,
floor_tex='floor_tiles_bw',
wall_tex='concrete',
ceil_tex='concrete_tiles',
no_ceiling=False
):
# The outlien should have shape Nx2
assert len(outline.shape) == 2
assert outline.shape[1] == 2
assert outline.shape[0] >= 3
# Add a Y coordinate to the outline points
outline = np.insert(outline, 1, 0, axis=1)
# Number of outline vertices / walls
self.num_walls = outline.shape[0]
# List of 2D points forming the outline of the room
# Shape is Nx3
self.outline = outline
# Compute the min and max x, z extents
self.min_x = self.outline[:, 0].min()
self.max_x = self.outline[:, 0].max()
self.min_z = self.outline[:, 2].min()
self.max_z = self.outline[:, 2].max()
# Compute midpoint coordinates
self.mid_x = (self.max_x + self.min_x) / 2
self.mid_z = (self.max_z + self.min_z) / 2
# Compute approximate surface area
self.area = (self.max_x - self.min_x) * (self.max_z - self.min_z)
# Compute room edge directions and normals
# Compute edge vectors (p1 - p0)
# For the first point, p0 is the last
# For the last point, p0 is p_n-1
next_pts = np.concatenate([self.outline[1:], np.expand_dims(self.outline[0], axis=0)], axis=0)
self.edge_dirs = next_pts - self.outline
self.edge_dirs = (self.edge_dirs.T / np.linalg.norm(self.edge_dirs, axis=1)).T
self.edge_norms = -np.cross(self.edge_dirs, Y_VEC)
self.edge_norms = (self.edge_norms.T / np.linalg.norm(self.edge_norms, axis=1)).T
# Height of the room walls
self.wall_height = wall_height
# No ceiling flag
self.no_ceiling = no_ceiling
# Texture names
self.wall_tex_name = wall_tex
self.floor_tex_name = floor_tex
self.ceil_tex_name = ceil_tex
# Lists of portals, indexed by wall/edge index
self.portals = [[] for i in range(self.num_walls)]
# List of neighbor rooms
# Same length as list of portals
self.neighbors = []
def add_portal(
self,
edge,
start_pos=None,
end_pos=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
min_y=0,
max_y=None
):
"""
Create a new portal/opening in a wall of this room
"""
if max_y == None:
max_y = self.wall_height
assert edge <= self.num_walls
assert max_y > min_y
# Get the edge points, compute the direction vector
e_p0 = self.outline[edge]
e_p1 = self.outline[(edge+1) % self.num_walls]
e_len = np.linalg.norm(e_p1 - e_p0)
e_dir = (e_p1 - e_p0) / e_len
x0, _, z0 = e_p0
x1, _, z1 = e_p1
dx, _, dz = e_dir
# If the portal extents are specified by x coordinates
if min_x != None:
assert min_z == None and max_z == None
assert start_pos == None and end_pos == None
assert x0 != x1
m0 = (min_x - x0) / dx
m1 = (max_x - x0) / dx
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
# If the portal extents are specified by z coordinates
elif min_z != None:
assert min_x == None and max_x == None
assert start_pos == None and end_pos == None
assert z0 != z1
m0 = (min_z - z0) / dz
m1 = (max_z - z0) / dz
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
else:
assert min_x == None and max_x == None
assert min_z == None and max_z == None
assert end_pos > start_pos
assert start_pos >= 0, "portal outside of wall extents"
assert end_pos <= e_len, "portal outside of wall extents"
self.portals[edge].append({
'start_pos': start_pos,
'end_pos': end_pos,
'min_y': min_y,
'max_y': max_y
})
# Sort the portals by start position
self.portals[edge].sort(key=lambda e: e['start_pos'])
return start_pos, end_pos
def point_inside(self, p):
"""
Test if a point is inside the room
"""
# Vector from edge start to test point
ap = p - self.outline
# Compute the dot products of normals to AP vectors
dotNAP = np.sum(self.edge_norms * ap, axis=1)
# The point is inside if all the dot products are greater than zero
return np.all(np.greater(dotNAP, 0))
def _gen_static_data(self, params, rng):
"""
Generate polygons and static data for this room
Needed for rendering and collision detection
Note: the wall polygons are quads, but the floor and
ceiling can be arbitrary n-gons
"""
# Load the textures and do texture randomization
self.wall_tex = Texture.get(self.wall_tex_name, rng)
self.floor_tex = Texture.get(self.floor_tex_name, rng)
self.ceil_tex = Texture.get(self.ceil_tex_name, rng)
# Generate the floor vertices
self.floor_verts = self.outline
self.floor_texcs = gen_texcs_floor(
self.floor_tex,
self.floor_verts
)
# Generate the ceiling vertices
# Flip the ceiling vertex order because of backface culling
self.ceil_verts = np.flip(self.outline, axis=0) + self.wall_height * Y_VEC
self.ceil_texcs = gen_texcs_floor(
self.ceil_tex,
self.ceil_verts
)
self.wall_verts = []
self.wall_norms = []
self.wall_texcs = []
self.wall_segs = []
def gen_seg_poly(
edge_p0,
side_vec,
seg_start,
seg_end,
min_y,
max_y
):
if seg_end == seg_start:
return
if min_y == max_y:
return
s_p0 = edge_p0 + seg_start * side_vec
s_p1 = edge_p0 + seg_end * side_vec
# If this polygon starts at ground level, add a collidable segment
if min_y == 0:
self.wall_segs.append(np.array([s_p1, s_p0]))
# Generate the vertices
# Vertices are listed in counter-clockwise order
self.wall_verts.append(s_p0 + min_y * Y_VEC)
self.wall_verts.append(s_p0 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + min_y * Y_VEC)
# Compute the normal for the polygon
normal = np.cross(s_p1 - s_p0, Y_VEC)
normal = -normal / np.linalg.norm(normal)
for i in range(4):
self.wall_norms.append(normal)
# Generate the texture coordinates
texcs = gen_texcs_wall(
self.wall_tex,
seg_start,
min_y,
seg_end - seg_start,
max_y - min_y
)
self.wall_texcs.append(texcs)
# For each wall
for wall_idx in range(self.num_walls):
edge_p0 = self.outline[wall_idx, :]
edge_p1 = self.outline[(wall_idx+1) % self.num_walls, :]
wall_width = np.linalg.norm(edge_p1 - edge_p0)
side_vec = (edge_p1 - edge_p0) / wall_width
if len(self.portals[wall_idx]) > 0:
seg_end = self.portals[wall_idx][0]['start_pos']
else:
seg_end = wall_width
# Generate the first polygon (going up to the first portal)
gen_seg_poly(
edge_p0,
side_vec,
0,
seg_end,
0,
self.wall_height
)
# For each portal in this wall
for portal_idx, portal in enumerate(self.portals[wall_idx]):
portal = self.portals[wall_idx][portal_idx]
start_pos = portal['start_pos']
end_pos = portal['end_pos']
min_y = portal['min_y']
max_y = portal['max_y']
# Generate the bottom polygon
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
0,
min_y
)
# Generate the top polygon
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
max_y,
self.wall_height
)
if portal_idx < len(self.portals[wall_idx]) - 1:
next_portal = self.portals[wall_idx][portal_idx+1]
next_portal_start = next_portal['start_pos']
else:
next_portal_start = wall_width
# Generate the polygon going up to the next portal
gen_seg_poly(
edge_p0,
side_vec,
end_pos,
next_portal_start,
0,
self.wall_height
)
self.wall_verts = np.array(self.wall_verts)
self.wall_norms = np.array(self.wall_norms)
if len(self.wall_segs) > 0:
self.wall_segs = np.array(self.wall_segs)
else:
self.wall_segs = np.array([]).reshape(0, 2, 3)
if len(self.wall_texcs) > 0:
self.wall_texcs = np.concatenate(self.wall_texcs)
else:
self.wall_texcs = np.array([]).reshape(0, 2)
def _render(self):
"""
Render the static elements of the room
"""
glColor3f(1, 1, 1)
# Draw the floor
self.floor_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, 1, 0)
for i in range(self.floor_verts.shape[0]):
glTexCoord2f(*self.floor_texcs[i, :])
glVertex3f(*self.floor_verts[i, :])
glEnd()
# Draw the ceiling
if not self.no_ceiling:
self.ceil_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, -1, 0)
for i in range(self.ceil_verts.shape[0]):
glTexCoord2f(*self.ceil_texcs[i, :])
glVertex3f(*self.ceil_verts[i, :])
glEnd()
# Draw the walls
self.wall_tex.bind()
glBegin(GL_QUADS)
for i in range(self.wall_verts.shape[0]):
glNormal3f(*self.wall_norms[i, :])
glTexCoord2f(*self.wall_texcs[i, :])
glVertex3f(*self.wall_verts[i, :])
glEnd()
class MiniWorldEnv(gym.Env):
"""
Base class for MiniWorld environments. Implements the procedural
world generation and simulation logic.
"""
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 30
}
# Enumeration of possible actions
class Actions(IntEnum):
# Turn left or right by a small amount
turn_left = 0
turn_right = 1
# Move forward or back by a small amount
move_forward = 2
move_back = 3
# Pick up or drop an object being carried
pickup = 4
drop = 5
# Toggle/activate an object
toggle = 6
# Done completing task
done = 7
def __init__(
self,
max_episode_steps=1500,
obs_width=80,
obs_height=60,
window_width=800,
window_height=600,
params=DEFAULT_PARAMS,
domain_rand=False
):
# Action enumeration for this environment
self.actions = MiniWorldEnv.Actions
# Actions are discrete integer values
self.action_space = spaces.Discrete(len(self.actions))
# Observations are RGB images with pixels in [0, 255]
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(obs_height, obs_width, 3),
dtype=np.uint8
)
self.reward_range = (-math.inf, math.inf)
# Maximum number of steps per episode
self.max_episode_steps = max_episode_steps
# Simulation parameters, used for domain randomization
self.params = params
# Domain randomization enable/disable flag
self.domain_rand = domain_rand
# Window for displaying the environment to humans
self.window = None
# Invisible window to render into (shadow OpenGL context)
self.shadow_window = pyglet.window.Window(width=1, height=1, visible=False)
# Enable depth testing and backface culling
glEnable(GL_DEPTH_TEST)
glEnable(GL_CULL_FACE)
# Frame buffer used to render observations
self.obs_fb = FrameBuffer(obs_width, obs_height, 8)
# Frame buffer used for human visualization
self.vis_fb = FrameBuffer(window_width, window_height, 16)
# Compute the observation display size
self.obs_disp_width = 256
self.obs_disp_height = obs_height * (self.obs_disp_width / obs_width)
# For displaying text
self.text_label = pyglet.text.Label(
font_name="Arial",
font_size=14,
multiline=True,
width=400,
x = window_width + 5,
y = window_height - (self.obs_disp_height + 19)
)
# Initialize the state
self.seed()
self.reset()
def close(self):
pass
def seed(self, seed=None):
self.rand = RandGen(seed)
return [seed]
def reset(self):
"""
Reset the simulation at the start of a new episode
This also randomizes many environment parameters (domain randomization)
"""
# Step count since episode start
self.step_count = 0
# Create the agent
self.agent = Agent()
# List of entities contained
self.entities = []
# List of rooms in the world
self.rooms = []
# Wall segments for collision detection
# Shape is (N, 2, 3)
self.wall_segs = []
# Generate the world
self._gen_world()
# Check if domain randomization is enabled or not
rand = self.rand if self.domain_rand else None
# Randomize elements of the world (domain randomization)
self.params.sample_many(rand, self, [
'sky_color',
'light_pos',
'light_color',
'light_ambient'
])
# Get the max forward step distance
self.max_forward_step = self.params.get_max('forward_step')
# Randomize parameters of the entities
for ent in self.entities:
ent.randomize(self.params, rand)
# Compute the min and max x, z extents of the whole floorplan
self.min_x = min([r.min_x for r in self.rooms])
self.max_x = max([r.max_x for r in self.rooms])
self.min_z = min([r.min_z for r in self.rooms])
self.max_z = max([r.max_z for r in self.rooms])
# Generate static data
if len(self.wall_segs) == 0:
self._gen_static_data()
# Pre-compile static parts of the environment into a display list
self._render_static()
# Generate the first camera image
obs = self.render_obs()
# Return first observation
return obs
def _get_carry_pos(self, agent_pos, ent):
"""
Compute the position at which to place an object being carried
"""
dist = self.agent.radius + ent.radius + self.max_forward_step
pos = agent_pos + self.agent.dir_vec * 1.05 * dist
# Adjust the Y-position so the object is visible while being carried
y_pos = max(self.agent.cam_height - ent.height - 0.3, 0)
pos = pos + Y_VEC * y_pos
return pos
def move_agent(self, fwd_dist, fwd_drift):
"""
Move the agent forward
"""
next_pos = (
self.agent.pos +
self.agent.dir_vec * fwd_dist +
self.agent.right_vec * fwd_drift
)
if self.intersect(self.agent, next_pos, self.agent.radius):
return False
carrying = self.agent.carrying
if carrying:
next_carrying_pos = self._get_carry_pos(next_pos, carrying)
if self.intersect(carrying, next_carrying_pos, carrying.radius):
return False
carrying.pos = next_carrying_pos
self.agent.pos = next_pos
return True
def turn_agent(self, turn_angle):
"""
Turn the agent left or right
"""
turn_angle *= (math.pi / 180)
orig_dir = self.agent.dir
self.agent.dir += turn_angle
carrying = self.agent.carrying
if carrying:
pos = self._get_carry_pos(self.agent.pos, carrying)
if self.intersect(carrying, pos, carrying.radius):
self.agent.dir = orig_dir
return False
carrying.pos = pos
carrying.dir = self.agent.dir
return True
def step(self, action):
"""
Perform one action and update the simulation
"""
self.step_count += 1
rand = self.rand if self.domain_rand else None
fwd_step = self.params.sample(rand, 'forward_step')
fwd_drift = self.params.sample(rand, 'forward_drift')
turn_step = self.params.sample(rand, 'turn_step')
if action == self.actions.move_forward:
self.move_agent(fwd_step, fwd_drift)
elif action == self.actions.move_back:
self.move_agent(-fwd_step, fwd_drift)
elif action == self.actions.turn_left:
self.turn_agent(turn_step)
elif action == self.actions.turn_right:
self.turn_agent(-turn_step)
# Pick up an object
elif action == self.actions.pickup:
# Position at which we will test for an intersection
test_pos = self.agent.pos + self.agent.dir_vec * 1.5 * self.agent.radius
ent = self.intersect(self.agent, test_pos, 1.2 * self.agent.radius)
if not self.agent.carrying:
if isinstance(ent, Entity):
if not ent.is_static:
self.agent.carrying = ent
# Drop an object being carried
elif action == self.actions.drop:
if self.agent.carrying:
self.agent.carrying.pos[1] = 0
self.agent.carrying = None
# If we are carrying an object, update its position as we move
if self.agent.carrying:
ent_pos = self._get_carry_pos(self.agent.pos, self.agent.carrying)
self.agent.carrying.pos = ent_pos
self.agent.carrying.dir = self.agent.dir
# Generate the current camera image
obs = self.render_obs()
# If the maximum time step count is reached
if self.step_count >= self.max_episode_steps:
done = True
reward = 0
return obs, reward, done, {}
reward = 0
done = False
return obs, reward, done, {}
def add_rect_room(
self,
min_x,
max_x,
min_z,
max_z,
**kwargs
):
"""
Create a rectangular room
"""
# 2D outline coordinates of the room,
# listed in counter-clockwise order when viewed from the top
outline = np.array([
# East wall
[max_x, max_z],
# North wall
[max_x, min_z],
# West wall
[min_x, min_z],
# South wall
[min_x, max_z],
])
return self.add_room(outline=outline, **kwargs)
def add_room(self, **kwargs):
"""
Create a new room
"""
assert len(self.wall_segs) == 0, "cannot add rooms after static data is generated"
room = Room(**kwargs)
self.rooms.append(room)
return room
def connect_rooms(
self,
room_a,
room_b,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
max_y=None
):
"""
Connect two rooms along facing edges
"""
def find_facing_edges():
for idx_a in range(room_a.num_walls):
norm_a = room_a.edge_norms[idx_a]
for idx_b in range(room_b.num_walls):
norm_b = room_b.edge_norms[idx_b]
# Reject edges that are not facing each other
if np.dot(norm_a, norm_b) > -0.9:
continue
dir = room_b.outline[idx_b] - room_a.outline[idx_a]
# Reject edges that are not touching
if np.dot(norm_a, dir) > 0.05:
continue
return idx_a, idx_b
return None, None
idx_a, idx_b = find_facing_edges()
assert idx_a != None, "matching edges not found in connect_rooms"
start_a, end_a = room_a.add_portal(
edge=idx_a,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
start_b, end_b = room_b.add_portal(
edge=idx_b,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
a = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * start_a
b = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * end_a
c = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * start_b
d = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * end_b
# If the portals are directly connected, stop
if np.linalg.norm(a - d) < 0.001:
return
len_a = np.linalg.norm(b - a)
len_b = np.linalg.norm(d - c)
# Room outline points must be specified in counter-clockwise order
outline = np.stack([c, b, a, d])
outline = np.stack([outline[:, 0], outline[:, 2]], axis=1)
max_y = max_y if max_y != None else room_a.wall_height
room = Room(
outline,
wall_height=max_y,
wall_tex=room_a.wall_tex_name,
floor_tex=room_a.floor_tex_name,
ceil_tex=room_a.ceil_tex_name,
no_ceiling=room_a.no_ceiling,
)
self.rooms.append(room)
room.add_portal(1, start_pos=0, end_pos=len_a)
room.add_portal(3, start_pos=0, end_pos=len_b)
def place_entity(
self,
ent,
room=None,
pos=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
"""
Place an entity/object in the world.
Find a position that doesn't intersect with any other object.
"""
assert len(self.rooms) > 0, "create rooms before calling place_entity"
assert ent.radius != None, "entity must have physical size defined"
# Generate collision detection data
if len(self.wall_segs) == 0:
self._gen_static_data()
# If an exact position if specified
if pos is not None:
ent.dir = dir if dir != None else self.rand.float(-math.pi, math.pi)
ent.pos = pos
self.entities.append(ent)
return ent
# Keep retrying until we find a suitable position
while True:
# Pick a room, sample rooms proportionally to floor surface area
r = room if room else self.rand.choice(self.rooms, probs=self.room_probs)
# Choose a random point within the square bounding box of the room
lx = r.min_x if min_x == None else min_x
hx = r.max_x if max_x == None else max_x
lz = r.min_z if min_z == None else min_z
hz = r.max_z if max_z == None else max_z
pos = self.rand.float(
low =[lx + ent.radius, 0, lz + ent.radius],
high=[hx - ent.radius, 0, hz - ent.radius]
)
# Make sure the position is within the room's outline
if not r.point_inside(pos):
continue
# Make sure the position doesn't intersect with any walls
if self.intersect(ent, pos, ent.radius):
continue
# Pick a direction
d = dir if dir != None else self.rand.float(-math.pi, math.pi)
ent.pos = pos
ent.dir = d
break
self.entities.append(ent)
return ent
def place_agent(
self,
room=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
"""
Place the agent in the environment at a random position
and orientation
"""
return self.place_entity(
self.agent,
room=room,
dir=dir,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z
)
def intersect(self, ent, pos, radius):
"""
Check if an entity intersects with the world
"""
# Ignore the Y position
px, _, pz = pos
pos = np.array([px, 0, pz])
# Check for intersection with walls
if intersect_circle_segs(pos, radius, self.wall_segs):
return True
# Check for entity intersection
for ent2 in self.entities:
# Entities can't intersect with themselves
if ent2 is ent:
continue
px, _, pz = ent2.pos
pos2 = np.array([px, 0, pz])
d = np.linalg.norm(pos2 - pos)
if d < radius + ent2.radius:
return ent2
return None
def near(self, ent0, ent1=None):
"""
Test if the two entities are near each other.
Used for "go to" or "put next" type tasks
"""
if ent1 == None:
ent1 = self.agent
dist = np.linalg.norm(ent0.pos - ent1.pos)
return dist < ent0.radius + ent1.radius + 1.1 * self.max_forward_step
def _load_tex(self, tex_name):
"""
Load a texture, with or without domain randomization
"""
rand = self.rand if self.params.sample(self.rand, 'tex_rand') else None
return Texture.get(tex_name, rand)
def _gen_static_data(self):
"""
Generate static data needed for rendering and collision detection
"""
# Generate the static data for each room
for room in self.rooms:
room._gen_static_data(
self.params,
self.rand if self.domain_rand else None
)
# Concatenate the wall segments
self.wall_segs = np.concatenate([r.wall_segs for r in self.rooms])
# Room selection probabilities
self.room_probs = np.array([r.area for r in self.rooms], dtype=float)
self.room_probs /= np.sum(self.room_probs)
def _gen_world(self):
"""
Generate the world. Derived classes must implement this method.
"""
raise NotImplementedError
def _reward(self):
"""
Default sparse reward computation
"""
return 1.0 - 0.2 * (self.step_count / self.max_episode_steps)
def _render_static(self):
"""
Render the static elements of the scene into a display list.
Called once at the beginning of each episode.
"""
# TODO: manage this automatically
# glIsList
glDeleteLists(1, 1);
glNewList(1, GL_COMPILE);
# Light position
glLightfv(GL_LIGHT0, GL_POSITION, (GLfloat*4)(*self.light_pos + [1]))
# Background/minimum light level
glLightfv(GL_LIGHT0, GL_AMBIENT, (GLfloat*4)(*self.light_ambient))
# Diffuse light color
glLightfv(GL_LIGHT0, GL_DIFFUSE, (GLfloat*4)(*self.light_color))
#glLightf(GL_LIGHT0, GL_SPOT_CUTOFF, 180)
#glLightf(GL_LIGHT0, GL_SPOT_EXPONENT, 0)
#glLightf(GL_LIGHT0, GL_CONSTANT_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_LINEAR_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_QUADRATIC_ATTENUATION, 0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glEnable(GL_COLOR_MATERIAL)
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE)
# Render the rooms
glEnable(GL_TEXTURE_2D)
for room in self.rooms:
room._render()
# Render the static entities
for ent in self.entities:
if ent.is_static:
ent.render()
glEndList()
def _render_world(
self,
frame_buffer,
render_agent
):
"""
Render the world from a given camera position into a frame buffer,
and produce a numpy image array as output.
"""
# Call the display list for the static parts of the environment
glCallList(1)
# TODO: keep the non-static entities in a different list for efficiency?
# Render the non-static entities
for ent in self.entities:
if not ent.is_static and ent is not self.agent:
ent.render()
#ent.draw_bound()
if render_agent:
self.agent.render()
# Resolve the rendered image into a numpy array
img = frame_buffer.resolve()
return img
def render_top_view(self, frame_buffer=None):
"""
Render a top view of the whole map (from above)
"""
if frame_buffer == None:
frame_buffer = self.obs_fb
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Scene extents to render
min_x = self.min_x - 1
max_x = self.max_x + 1
min_z = self.min_z - 1
max_z = self.max_z + 1
width = max_x - min_x
height = max_z - min_z
aspect = width / height
fb_aspect = frame_buffer.width / frame_buffer.height
# Adjust the aspect extents to match the frame buffer aspect
if aspect > fb_aspect:
# Want to add to denom, add to height
new_h = width / fb_aspect
h_diff = new_h - height
min_z -= h_diff / 2
max_z += h_diff / 2
elif aspect < fb_aspect:
# Want to add to num, add to width
new_w = height * fb_aspect
w_diff = new_w - width
min_x -= w_diff / 2
max_x += w_diff / 2
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(
min_x,
max_x,
-max_z,
-min_z,
-100, 100.0
)
# Setup the camera
# Y maps to +Z, Z maps to +Y
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
m = [
1, 0, 0, 0,
0, 0, 1, 0,
0, -1, 0, 0,
0, 0, 0, 1,
]
glLoadMatrixf((GLfloat * len(m))(*m))
return self._render_world(
frame_buffer,
render_agent=True
)
def render_obs(self, frame_buffer=None):
"""
Render an observation from the point of view of the agent
"""
if frame_buffer == None:
frame_buffer = self.obs_fb
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(
self.agent.cam_fov_y,
frame_buffer.width / float(frame_buffer.height),
0.04,
100.0
)
# Setup the camera
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(
# Eye position
*self.agent.cam_pos,
# Target
*(self.agent.cam_pos + self.agent.cam_dir),
# Up vector
0, 1.0, 0.0
)
return self._render_world(
frame_buffer,
render_agent=False
)
def render_depth(self, frame_buffer=None):
"""
Produce a depth map
Values are floating-point, map shape is (H,W,1)
Distances are in meters from the observer
"""
if frame_buffer == None:
frame_buffer = self.obs_fb
# Render the world
self.render_obs(frame_buffer)
return frame_buffer.get_depth_map(0.04, 100.0)
def get_visible_ents(self):
"""
Get a list of visible entities.
Uses OpenGL occlusion queries to approximate visibility.
:return: set of objects visible to the agent
"""
# Allocate the occlusion query ids
num_ents = len(self.entities)
query_ids = (GLuint * num_ents)()
glGenQueries(num_ents, query_ids)
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Use the small observation frame buffer
frame_buffer = self.obs_fb
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(
self.agent.cam_fov_y,
frame_buffer.width / float(frame_buffer.height),
0.04,
100.0
)
# Setup the cameravisible objects
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(
# Eye position
*self.agent.cam_pos,
# Target
*(self.agent.cam_pos + self.agent.cam_dir),
# Up vector
0, 1.0, 0.0
)
# Render the rooms, without texturing
glDisable(GL_TEXTURE_2D)
for room in self.rooms:
room._render()
# For each entity
for ent_idx, ent in enumerate(self.entities):
if ent is self.agent:
continue
glBeginQuery(GL_ANY_SAMPLES_PASSED, query_ids[ent_idx])
pos = ent.pos
#glColor3f(1, 0, 0)
drawBox(
x_min=pos[0] - 0.1,
x_max=pos[0] + 0.1,
y_min=pos[1],
y_max=pos[1] + 0.2,
z_min=pos[2] - 0.1,
z_max=pos[2] + 0.1
)
glEndQuery(GL_ANY_SAMPLES_PASSED)
vis_objs = set()
# Get query results
for ent_idx, ent in enumerate(self.entities):
if ent is self.agent:
continue
visible = (GLuint*1)(1)
glGetQueryObjectuiv(query_ids[ent_idx], GL_QUERY_RESULT, visible);
if visible[0] != 0:
vis_objs.add(ent)
# Free the occlusion query ids
glDeleteQueries(1, query_ids)
#img = frame_buffer.resolve()
#return img
return vis_objs
def render(self, mode='human', close=False, view='agent'):
"""
Render the environment for human viewing
"""
if close:
if self.window:
self.window.close()
return
# Render the human-view image
assert view in ['agent', 'top']
if view == 'agent':
img = self.render_obs(self.vis_fb)
else:
img = self.render_top_view(self.vis_fb)
img_width = img.shape[1]
img_height = img.shape[0]
if mode == 'rgb_array':
return img
# Render the agent's view
obs = self.render_obs()
obs_width = obs.shape[1]
obs_height = obs.shape[0]
window_width = img_width + self.obs_disp_width
window_height = img_height
if self.window is None:
config = pyglet.gl.Config(double_buffer=True)
self.window = pyglet.window.Window(
width=window_width,
height=window_height,
resizable=False,
config=config
)
self.window.clear()
self.window.switch_to()
# Bind the default frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, 0);
# Clear the color and depth buffers
glClearColor(0, 0, 0, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
# Setup orghogonal projection
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glOrtho(0, window_width, 0, window_height, 0, 10)
# Draw the human render to the rendering window
img_flip = np.ascontiguousarray(np.flip(img, axis=0))
img_data = pyglet.image.ImageData(
img_width,
img_height,
'RGB',
img_flip.ctypes.data_as(POINTER(GLubyte)),
pitch=img_width * 3,
)
img_data.blit(
0,
0,
0,
width=img_width,
height=img_height
)
# Draw the observation
obs = np.ascontiguousarray(np.flip(obs, axis=0))
obs_data = pyglet.image.ImageData(
obs_width,
obs_height,
'RGB',
obs.ctypes.data_as(POINTER(GLubyte)),
pitch=obs_width * 3,
)
obs_data.blit(
img_width,
img_height - self.obs_disp_height,
0,
width=self.obs_disp_width,
height=self.obs_disp_height
)
# Draw the text label in the window
self.text_label.text = "pos: (%.2f, %.2f, %.2f)\nangle: %d\nsteps: %d" % (
*self.agent.pos,
int(self.agent.dir * 180 / math.pi) % 360,
self.step_count
)
self.text_label.draw()
# Force execution of queued commands
glFlush()
# If we are not running the Pyglet event loop,
# we have to manually flip the buffers and dispatch events
if mode == 'human':
self.window.flip()
self.window.dispatch_events()
return img
| 28.665263 | 102 | 0.548815 | import math
from enum import IntEnum
import numpy as np
import gym
from gym import spaces
from .random import *
from .opengl import *
from .objmesh import *
from .entity import *
from .math import *
from .params import *
DEFAULT_WALL_HEIGHT=2.74
TEX_DENSITY = 512
def gen_texcs_wall(
tex,
min_x,
min_y,
width,
height
):
xc = (TEX_DENSITY / tex.width)
yc = (TEX_DENSITY / tex.height)
min_u = (min_x) * xc
max_u = (min_x + width) * xc
min_v = (min_y) * yc
max_v = (min_y + height) * yc
return np.array(
[
[min_u, min_v],
[min_u, max_v],
[max_u, max_v],
[max_u, min_v],
],
dtype=np.float32
)
def gen_texcs_floor(
tex,
poss
):
texc_mul = np.array(
[
TEX_DENSITY / tex.width,
TEX_DENSITY / tex.height
],
dtype=float
)
coords = np.stack([poss[:,0], poss[:,2]], axis=1) * texc_mul
return coords
class Room:
def __init__(
self,
outline,
wall_height=DEFAULT_WALL_HEIGHT,
floor_tex='floor_tiles_bw',
wall_tex='concrete',
ceil_tex='concrete_tiles',
no_ceiling=False
):
assert len(outline.shape) == 2
assert outline.shape[1] == 2
assert outline.shape[0] >= 3
outline = np.insert(outline, 1, 0, axis=1)
self.num_walls = outline.shape[0]
self.outline = outline
self.min_x = self.outline[:, 0].min()
self.max_x = self.outline[:, 0].max()
self.min_z = self.outline[:, 2].min()
self.max_z = self.outline[:, 2].max()
self.mid_x = (self.max_x + self.min_x) / 2
self.mid_z = (self.max_z + self.min_z) / 2
self.area = (self.max_x - self.min_x) * (self.max_z - self.min_z)
next_pts = np.concatenate([self.outline[1:], np.expand_dims(self.outline[0], axis=0)], axis=0)
self.edge_dirs = next_pts - self.outline
self.edge_dirs = (self.edge_dirs.T / np.linalg.norm(self.edge_dirs, axis=1)).T
self.edge_norms = -np.cross(self.edge_dirs, Y_VEC)
self.edge_norms = (self.edge_norms.T / np.linalg.norm(self.edge_norms, axis=1)).T
self.wall_height = wall_height
self.no_ceiling = no_ceiling
self.wall_tex_name = wall_tex
self.floor_tex_name = floor_tex
self.ceil_tex_name = ceil_tex
self.portals = [[] for i in range(self.num_walls)]
self.neighbors = []
def add_portal(
self,
edge,
start_pos=None,
end_pos=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
min_y=0,
max_y=None
):
if max_y == None:
max_y = self.wall_height
assert edge <= self.num_walls
assert max_y > min_y
e_p0 = self.outline[edge]
e_p1 = self.outline[(edge+1) % self.num_walls]
e_len = np.linalg.norm(e_p1 - e_p0)
e_dir = (e_p1 - e_p0) / e_len
x0, _, z0 = e_p0
x1, _, z1 = e_p1
dx, _, dz = e_dir
if min_x != None:
assert min_z == None and max_z == None
assert start_pos == None and end_pos == None
assert x0 != x1
m0 = (min_x - x0) / dx
m1 = (max_x - x0) / dx
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
elif min_z != None:
assert min_x == None and max_x == None
assert start_pos == None and end_pos == None
assert z0 != z1
m0 = (min_z - z0) / dz
m1 = (max_z - z0) / dz
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
else:
assert min_x == None and max_x == None
assert min_z == None and max_z == None
assert end_pos > start_pos
assert start_pos >= 0, "portal outside of wall extents"
assert end_pos <= e_len, "portal outside of wall extents"
self.portals[edge].append({
'start_pos': start_pos,
'end_pos': end_pos,
'min_y': min_y,
'max_y': max_y
})
self.portals[edge].sort(key=lambda e: e['start_pos'])
return start_pos, end_pos
def point_inside(self, p):
ap = p - self.outline
dotNAP = np.sum(self.edge_norms * ap, axis=1)
return np.all(np.greater(dotNAP, 0))
def _gen_static_data(self, params, rng):
self.wall_tex = Texture.get(self.wall_tex_name, rng)
self.floor_tex = Texture.get(self.floor_tex_name, rng)
self.ceil_tex = Texture.get(self.ceil_tex_name, rng)
self.floor_verts = self.outline
self.floor_texcs = gen_texcs_floor(
self.floor_tex,
self.floor_verts
)
self.ceil_verts = np.flip(self.outline, axis=0) + self.wall_height * Y_VEC
self.ceil_texcs = gen_texcs_floor(
self.ceil_tex,
self.ceil_verts
)
self.wall_verts = []
self.wall_norms = []
self.wall_texcs = []
self.wall_segs = []
def gen_seg_poly(
edge_p0,
side_vec,
seg_start,
seg_end,
min_y,
max_y
):
if seg_end == seg_start:
return
if min_y == max_y:
return
s_p0 = edge_p0 + seg_start * side_vec
s_p1 = edge_p0 + seg_end * side_vec
if min_y == 0:
self.wall_segs.append(np.array([s_p1, s_p0]))
self.wall_verts.append(s_p0 + min_y * Y_VEC)
self.wall_verts.append(s_p0 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + min_y * Y_VEC)
normal = np.cross(s_p1 - s_p0, Y_VEC)
normal = -normal / np.linalg.norm(normal)
for i in range(4):
self.wall_norms.append(normal)
texcs = gen_texcs_wall(
self.wall_tex,
seg_start,
min_y,
seg_end - seg_start,
max_y - min_y
)
self.wall_texcs.append(texcs)
for wall_idx in range(self.num_walls):
edge_p0 = self.outline[wall_idx, :]
edge_p1 = self.outline[(wall_idx+1) % self.num_walls, :]
wall_width = np.linalg.norm(edge_p1 - edge_p0)
side_vec = (edge_p1 - edge_p0) / wall_width
if len(self.portals[wall_idx]) > 0:
seg_end = self.portals[wall_idx][0]['start_pos']
else:
seg_end = wall_width
gen_seg_poly(
edge_p0,
side_vec,
0,
seg_end,
0,
self.wall_height
)
for portal_idx, portal in enumerate(self.portals[wall_idx]):
portal = self.portals[wall_idx][portal_idx]
start_pos = portal['start_pos']
end_pos = portal['end_pos']
min_y = portal['min_y']
max_y = portal['max_y']
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
0,
min_y
)
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
max_y,
self.wall_height
)
if portal_idx < len(self.portals[wall_idx]) - 1:
next_portal = self.portals[wall_idx][portal_idx+1]
next_portal_start = next_portal['start_pos']
else:
next_portal_start = wall_width
gen_seg_poly(
edge_p0,
side_vec,
end_pos,
next_portal_start,
0,
self.wall_height
)
self.wall_verts = np.array(self.wall_verts)
self.wall_norms = np.array(self.wall_norms)
if len(self.wall_segs) > 0:
self.wall_segs = np.array(self.wall_segs)
else:
self.wall_segs = np.array([]).reshape(0, 2, 3)
if len(self.wall_texcs) > 0:
self.wall_texcs = np.concatenate(self.wall_texcs)
else:
self.wall_texcs = np.array([]).reshape(0, 2)
def _render(self):
glColor3f(1, 1, 1)
self.floor_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, 1, 0)
for i in range(self.floor_verts.shape[0]):
glTexCoord2f(*self.floor_texcs[i, :])
glVertex3f(*self.floor_verts[i, :])
glEnd()
if not self.no_ceiling:
self.ceil_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, -1, 0)
for i in range(self.ceil_verts.shape[0]):
glTexCoord2f(*self.ceil_texcs[i, :])
glVertex3f(*self.ceil_verts[i, :])
glEnd()
self.wall_tex.bind()
glBegin(GL_QUADS)
for i in range(self.wall_verts.shape[0]):
glNormal3f(*self.wall_norms[i, :])
glTexCoord2f(*self.wall_texcs[i, :])
glVertex3f(*self.wall_verts[i, :])
glEnd()
class MiniWorldEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 30
}
class Actions(IntEnum):
turn_left = 0
turn_right = 1
move_forward = 2
move_back = 3
pickup = 4
drop = 5
toggle = 6
done = 7
def __init__(
self,
max_episode_steps=1500,
obs_width=80,
obs_height=60,
window_width=800,
window_height=600,
params=DEFAULT_PARAMS,
domain_rand=False
):
self.actions = MiniWorldEnv.Actions
self.action_space = spaces.Discrete(len(self.actions))
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(obs_height, obs_width, 3),
dtype=np.uint8
)
self.reward_range = (-math.inf, math.inf)
self.max_episode_steps = max_episode_steps
self.params = params
self.domain_rand = domain_rand
self.window = None
self.shadow_window = pyglet.window.Window(width=1, height=1, visible=False)
glEnable(GL_DEPTH_TEST)
glEnable(GL_CULL_FACE)
self.obs_fb = FrameBuffer(obs_width, obs_height, 8)
self.vis_fb = FrameBuffer(window_width, window_height, 16)
self.obs_disp_width = 256
self.obs_disp_height = obs_height * (self.obs_disp_width / obs_width)
self.text_label = pyglet.text.Label(
font_name="Arial",
font_size=14,
multiline=True,
width=400,
x = window_width + 5,
y = window_height - (self.obs_disp_height + 19)
)
self.seed()
self.reset()
def close(self):
pass
def seed(self, seed=None):
self.rand = RandGen(seed)
return [seed]
def reset(self):
self.step_count = 0
self.agent = Agent()
self.entities = []
self.rooms = []
self.wall_segs = []
self._gen_world()
rand = self.rand if self.domain_rand else None
self.params.sample_many(rand, self, [
'sky_color',
'light_pos',
'light_color',
'light_ambient'
])
self.max_forward_step = self.params.get_max('forward_step')
for ent in self.entities:
ent.randomize(self.params, rand)
self.min_x = min([r.min_x for r in self.rooms])
self.max_x = max([r.max_x for r in self.rooms])
self.min_z = min([r.min_z for r in self.rooms])
self.max_z = max([r.max_z for r in self.rooms])
if len(self.wall_segs) == 0:
self._gen_static_data()
self._render_static()
obs = self.render_obs()
return obs
def _get_carry_pos(self, agent_pos, ent):
dist = self.agent.radius + ent.radius + self.max_forward_step
pos = agent_pos + self.agent.dir_vec * 1.05 * dist
y_pos = max(self.agent.cam_height - ent.height - 0.3, 0)
pos = pos + Y_VEC * y_pos
return pos
def move_agent(self, fwd_dist, fwd_drift):
next_pos = (
self.agent.pos +
self.agent.dir_vec * fwd_dist +
self.agent.right_vec * fwd_drift
)
if self.intersect(self.agent, next_pos, self.agent.radius):
return False
carrying = self.agent.carrying
if carrying:
next_carrying_pos = self._get_carry_pos(next_pos, carrying)
if self.intersect(carrying, next_carrying_pos, carrying.radius):
return False
carrying.pos = next_carrying_pos
self.agent.pos = next_pos
return True
def turn_agent(self, turn_angle):
turn_angle *= (math.pi / 180)
orig_dir = self.agent.dir
self.agent.dir += turn_angle
carrying = self.agent.carrying
if carrying:
pos = self._get_carry_pos(self.agent.pos, carrying)
if self.intersect(carrying, pos, carrying.radius):
self.agent.dir = orig_dir
return False
carrying.pos = pos
carrying.dir = self.agent.dir
return True
def step(self, action):
self.step_count += 1
rand = self.rand if self.domain_rand else None
fwd_step = self.params.sample(rand, 'forward_step')
fwd_drift = self.params.sample(rand, 'forward_drift')
turn_step = self.params.sample(rand, 'turn_step')
if action == self.actions.move_forward:
self.move_agent(fwd_step, fwd_drift)
elif action == self.actions.move_back:
self.move_agent(-fwd_step, fwd_drift)
elif action == self.actions.turn_left:
self.turn_agent(turn_step)
elif action == self.actions.turn_right:
self.turn_agent(-turn_step)
elif action == self.actions.pickup:
test_pos = self.agent.pos + self.agent.dir_vec * 1.5 * self.agent.radius
ent = self.intersect(self.agent, test_pos, 1.2 * self.agent.radius)
if not self.agent.carrying:
if isinstance(ent, Entity):
if not ent.is_static:
self.agent.carrying = ent
elif action == self.actions.drop:
if self.agent.carrying:
self.agent.carrying.pos[1] = 0
self.agent.carrying = None
if self.agent.carrying:
ent_pos = self._get_carry_pos(self.agent.pos, self.agent.carrying)
self.agent.carrying.pos = ent_pos
self.agent.carrying.dir = self.agent.dir
obs = self.render_obs()
if self.step_count >= self.max_episode_steps:
done = True
reward = 0
return obs, reward, done, {}
reward = 0
done = False
return obs, reward, done, {}
def add_rect_room(
self,
min_x,
max_x,
min_z,
max_z,
**kwargs
):
outline = np.array([
[max_x, max_z],
[max_x, min_z],
[min_x, min_z],
[min_x, max_z],
])
return self.add_room(outline=outline, **kwargs)
def add_room(self, **kwargs):
assert len(self.wall_segs) == 0, "cannot add rooms after static data is generated"
room = Room(**kwargs)
self.rooms.append(room)
return room
def connect_rooms(
self,
room_a,
room_b,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
max_y=None
):
def find_facing_edges():
for idx_a in range(room_a.num_walls):
norm_a = room_a.edge_norms[idx_a]
for idx_b in range(room_b.num_walls):
norm_b = room_b.edge_norms[idx_b]
if np.dot(norm_a, norm_b) > -0.9:
continue
dir = room_b.outline[idx_b] - room_a.outline[idx_a]
if np.dot(norm_a, dir) > 0.05:
continue
return idx_a, idx_b
return None, None
idx_a, idx_b = find_facing_edges()
assert idx_a != None, "matching edges not found in connect_rooms"
start_a, end_a = room_a.add_portal(
edge=idx_a,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
start_b, end_b = room_b.add_portal(
edge=idx_b,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
a = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * start_a
b = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * end_a
c = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * start_b
d = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * end_b
if np.linalg.norm(a - d) < 0.001:
return
len_a = np.linalg.norm(b - a)
len_b = np.linalg.norm(d - c)
outline = np.stack([c, b, a, d])
outline = np.stack([outline[:, 0], outline[:, 2]], axis=1)
max_y = max_y if max_y != None else room_a.wall_height
room = Room(
outline,
wall_height=max_y,
wall_tex=room_a.wall_tex_name,
floor_tex=room_a.floor_tex_name,
ceil_tex=room_a.ceil_tex_name,
no_ceiling=room_a.no_ceiling,
)
self.rooms.append(room)
room.add_portal(1, start_pos=0, end_pos=len_a)
room.add_portal(3, start_pos=0, end_pos=len_b)
def place_entity(
self,
ent,
room=None,
pos=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
assert len(self.rooms) > 0, "create rooms before calling place_entity"
assert ent.radius != None, "entity must have physical size defined"
if len(self.wall_segs) == 0:
self._gen_static_data()
if pos is not None:
ent.dir = dir if dir != None else self.rand.float(-math.pi, math.pi)
ent.pos = pos
self.entities.append(ent)
return ent
while True:
r = room if room else self.rand.choice(self.rooms, probs=self.room_probs)
lx = r.min_x if min_x == None else min_x
hx = r.max_x if max_x == None else max_x
lz = r.min_z if min_z == None else min_z
hz = r.max_z if max_z == None else max_z
pos = self.rand.float(
low =[lx + ent.radius, 0, lz + ent.radius],
high=[hx - ent.radius, 0, hz - ent.radius]
)
if not r.point_inside(pos):
continue
# Make sure the position doesn't intersect with any walls
if self.intersect(ent, pos, ent.radius):
continue
d = dir if dir != None else self.rand.float(-math.pi, math.pi)
ent.pos = pos
ent.dir = d
break
self.entities.append(ent)
return ent
def place_agent(
self,
room=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
return self.place_entity(
self.agent,
room=room,
dir=dir,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z
)
def intersect(self, ent, pos, radius):
px, _, pz = pos
pos = np.array([px, 0, pz])
if intersect_circle_segs(pos, radius, self.wall_segs):
return True
for ent2 in self.entities:
if ent2 is ent:
continue
px, _, pz = ent2.pos
pos2 = np.array([px, 0, pz])
d = np.linalg.norm(pos2 - pos)
if d < radius + ent2.radius:
return ent2
return None
def near(self, ent0, ent1=None):
if ent1 == None:
ent1 = self.agent
dist = np.linalg.norm(ent0.pos - ent1.pos)
return dist < ent0.radius + ent1.radius + 1.1 * self.max_forward_step
def _load_tex(self, tex_name):
rand = self.rand if self.params.sample(self.rand, 'tex_rand') else None
return Texture.get(tex_name, rand)
def _gen_static_data(self):
# Generate the static data for each room
for room in self.rooms:
room._gen_static_data(
self.params,
self.rand if self.domain_rand else None
)
# Concatenate the wall segments
self.wall_segs = np.concatenate([r.wall_segs for r in self.rooms])
# Room selection probabilities
self.room_probs = np.array([r.area for r in self.rooms], dtype=float)
self.room_probs /= np.sum(self.room_probs)
def _gen_world(self):
raise NotImplementedError
def _reward(self):
return 1.0 - 0.2 * (self.step_count / self.max_episode_steps)
def _render_static(self):
# TODO: manage this automatically
# glIsList
glDeleteLists(1, 1);
glNewList(1, GL_COMPILE);
# Light position
glLightfv(GL_LIGHT0, GL_POSITION, (GLfloat*4)(*self.light_pos + [1]))
# Background/minimum light level
glLightfv(GL_LIGHT0, GL_AMBIENT, (GLfloat*4)(*self.light_ambient))
# Diffuse light color
glLightfv(GL_LIGHT0, GL_DIFFUSE, (GLfloat*4)(*self.light_color))
#glLightf(GL_LIGHT0, GL_SPOT_CUTOFF, 180)
#glLightf(GL_LIGHT0, GL_SPOT_EXPONENT, 0)
#glLightf(GL_LIGHT0, GL_CONSTANT_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_LINEAR_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_QUADRATIC_ATTENUATION, 0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glEnable(GL_COLOR_MATERIAL)
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE)
# Render the rooms
glEnable(GL_TEXTURE_2D)
for room in self.rooms:
room._render()
# Render the static entities
for ent in self.entities:
if ent.is_static:
ent.render()
glEndList()
def _render_world(
self,
frame_buffer,
render_agent
):
# Call the display list for the static parts of the environment
glCallList(1)
# TODO: keep the non-static entities in a different list for efficiency?
# Render the non-static entities
for ent in self.entities:
if not ent.is_static and ent is not self.agent:
ent.render()
#ent.draw_bound()
if render_agent:
self.agent.render()
# Resolve the rendered image into a numpy array
img = frame_buffer.resolve()
return img
def render_top_view(self, frame_buffer=None):
if frame_buffer == None:
frame_buffer = self.obs_fb
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Scene extents to render
min_x = self.min_x - 1
max_x = self.max_x + 1
min_z = self.min_z - 1
max_z = self.max_z + 1
width = max_x - min_x
height = max_z - min_z
aspect = width / height
fb_aspect = frame_buffer.width / frame_buffer.height
# Adjust the aspect extents to match the frame buffer aspect
if aspect > fb_aspect:
# Want to add to denom, add to height
new_h = width / fb_aspect
h_diff = new_h - height
min_z -= h_diff / 2
max_z += h_diff / 2
elif aspect < fb_aspect:
# Want to add to num, add to width
new_w = height * fb_aspect
w_diff = new_w - width
min_x -= w_diff / 2
max_x += w_diff / 2
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(
min_x,
max_x,
-max_z,
-min_z,
-100, 100.0
)
# Setup the camera
# Y maps to +Z, Z maps to +Y
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
m = [
1, 0, 0, 0,
0, 0, 1, 0,
0, -1, 0, 0,
0, 0, 0, 1,
]
glLoadMatrixf((GLfloat * len(m))(*m))
return self._render_world(
frame_buffer,
render_agent=True
)
def render_obs(self, frame_buffer=None):
if frame_buffer == None:
frame_buffer = self.obs_fb
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(
self.agent.cam_fov_y,
frame_buffer.width / float(frame_buffer.height),
0.04,
100.0
)
# Setup the camera
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(
# Eye position
*self.agent.cam_pos,
# Target
*(self.agent.cam_pos + self.agent.cam_dir),
# Up vector
0, 1.0, 0.0
)
return self._render_world(
frame_buffer,
render_agent=False
)
def render_depth(self, frame_buffer=None):
if frame_buffer == None:
frame_buffer = self.obs_fb
# Render the world
self.render_obs(frame_buffer)
return frame_buffer.get_depth_map(0.04, 100.0)
def get_visible_ents(self):
# Allocate the occlusion query ids
num_ents = len(self.entities)
query_ids = (GLuint * num_ents)()
glGenQueries(num_ents, query_ids)
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Use the small observation frame buffer
frame_buffer = self.obs_fb
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*self.sky_color, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(
self.agent.cam_fov_y,
frame_buffer.width / float(frame_buffer.height),
0.04,
100.0
)
# Setup the cameravisible objects
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(
# Eye position
*self.agent.cam_pos,
# Target
*(self.agent.cam_pos + self.agent.cam_dir),
# Up vector
0, 1.0, 0.0
)
# Render the rooms, without texturing
glDisable(GL_TEXTURE_2D)
for room in self.rooms:
room._render()
# For each entity
for ent_idx, ent in enumerate(self.entities):
if ent is self.agent:
continue
glBeginQuery(GL_ANY_SAMPLES_PASSED, query_ids[ent_idx])
pos = ent.pos
#glColor3f(1, 0, 0)
drawBox(
x_min=pos[0] - 0.1,
x_max=pos[0] + 0.1,
y_min=pos[1],
y_max=pos[1] + 0.2,
z_min=pos[2] - 0.1,
z_max=pos[2] + 0.1
)
glEndQuery(GL_ANY_SAMPLES_PASSED)
vis_objs = set()
# Get query results
for ent_idx, ent in enumerate(self.entities):
if ent is self.agent:
continue
visible = (GLuint*1)(1)
glGetQueryObjectuiv(query_ids[ent_idx], GL_QUERY_RESULT, visible);
if visible[0] != 0:
vis_objs.add(ent)
# Free the occlusion query ids
glDeleteQueries(1, query_ids)
#img = frame_buffer.resolve()
#return img
return vis_objs
def render(self, mode='human', close=False, view='agent'):
if close:
if self.window:
self.window.close()
return
# Render the human-view image
assert view in ['agent', 'top']
if view == 'agent':
img = self.render_obs(self.vis_fb)
else:
img = self.render_top_view(self.vis_fb)
img_width = img.shape[1]
img_height = img.shape[0]
if mode == 'rgb_array':
return img
# Render the agent's view
obs = self.render_obs()
obs_width = obs.shape[1]
obs_height = obs.shape[0]
window_width = img_width + self.obs_disp_width
window_height = img_height
if self.window is None:
config = pyglet.gl.Config(double_buffer=True)
self.window = pyglet.window.Window(
width=window_width,
height=window_height,
resizable=False,
config=config
)
self.window.clear()
self.window.switch_to()
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glClearColor(0, 0, 0, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glOrtho(0, window_width, 0, window_height, 0, 10)
img_flip = np.ascontiguousarray(np.flip(img, axis=0))
img_data = pyglet.image.ImageData(
img_width,
img_height,
'RGB',
img_flip.ctypes.data_as(POINTER(GLubyte)),
pitch=img_width * 3,
)
img_data.blit(
0,
0,
0,
width=img_width,
height=img_height
)
obs = np.ascontiguousarray(np.flip(obs, axis=0))
obs_data = pyglet.image.ImageData(
obs_width,
obs_height,
'RGB',
obs.ctypes.data_as(POINTER(GLubyte)),
pitch=obs_width * 3,
)
obs_data.blit(
img_width,
img_height - self.obs_disp_height,
0,
width=self.obs_disp_width,
height=self.obs_disp_height
)
self.text_label.text = "pos: (%.2f, %.2f, %.2f)\nangle: %d\nsteps: %d" % (
*self.agent.pos,
int(self.agent.dir * 180 / math.pi) % 360,
self.step_count
)
self.text_label.draw()
glFlush()
if mode == 'human':
self.window.flip()
self.window.dispatch_events()
return img
| true | true |
f71a4919671cb710595a953343f020b773680367 | 163 | py | Python | polls/admin.py | egemen61/excell | 654b51d7cb0cb3384b7a8b714a2e21b44fcb7afc | [
"BSD-3-Clause"
] | 253 | 2017-09-15T10:01:58.000Z | 2022-03-27T00:19:49.000Z | polls/admin.py | egemen61/excell | 654b51d7cb0cb3384b7a8b714a2e21b44fcb7afc | [
"BSD-3-Clause"
] | 35 | 2017-10-26T09:16:30.000Z | 2022-01-20T19:57:19.000Z | polls/admin.py | egemen61/excell | 654b51d7cb0cb3384b7a8b714a2e21b44fcb7afc | [
"BSD-3-Clause"
] | 64 | 2017-10-20T15:42:05.000Z | 2022-02-10T02:25:22.000Z | from django.contrib import admin
from polls.models import Question, Choice
# Register your models here.
admin.site.register(Question)
admin.site.register(Choice)
| 23.285714 | 41 | 0.815951 | from django.contrib import admin
from polls.models import Question, Choice
admin.site.register(Question)
admin.site.register(Choice)
| true | true |
f71a4b05579a18c573ff27b6ef2507849421cf07 | 43,124 | py | Python | src/transformers/configuration_utils.py | arfon/transformers | bbd0901805292901e8df05bf7be87d2e43a7ae1b | [
"Apache-2.0"
] | 2 | 2021-12-25T10:04:17.000Z | 2022-03-13T05:37:13.000Z | src/transformers/configuration_utils.py | arfon/transformers | bbd0901805292901e8df05bf7be87d2e43a7ae1b | [
"Apache-2.0"
] | 9 | 2021-06-08T22:35:33.000Z | 2021-10-04T08:53:44.000Z | src/transformers/configuration_utils.py | arfon/transformers | bbd0901805292901e8df05bf7be87d2e43a7ae1b | [
"Apache-2.0"
] | 1 | 2020-06-26T08:13:16.000Z | 2020-06-26T08:13:16.000Z | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Configuration base class and utilities."""
import copy
import json
import os
import warnings
from typing import Any, Dict, Tuple, Union
from . import __version__
from .file_utils import (
CONFIG_NAME,
PushToHubMixin,
cached_path,
copy_func,
hf_bucket_url,
is_offline_mode,
is_remote_url,
is_torch_available,
)
from .utils import logging
logger = logging.get_logger(__name__)
class PretrainedConfig(PushToHubMixin):
r"""
Base class for all configuration classes. Handles a few parameters common to all models' configurations as well as
methods for loading/downloading/saving configurations.
Note:
A configuration file can be loaded and saved to disk. Loading the configuration file and using this file to
initialize a model does **not** load the model weights. It only affects the model's configuration.
Class attributes (overridden by derived classes)
- **model_type** (:obj:`str`) -- An identifier for the model type, serialized into the JSON file, and used to
recreate the correct object in :class:`~transformers.AutoConfig`.
- **is_composition** (:obj:`bool`) -- Whether the config class is composed of multiple sub-configs. In this
case the config has to be initialized from two or more configs of type
:class:`~transformers.PretrainedConfig` like: :class:`~transformers.EncoderDecoderConfig` or
:class:`~RagConfig`.
- **keys_to_ignore_at_inference** (:obj:`List[str]`) -- A list of keys to ignore by default when looking at
dictionary outputs of the model during inference.
- **attribute_map** (:obj:`Dict[str, str]`) -- A dict that maps model specific attribute names to the
standardized naming of attributes.
Common attributes (present in all subclasses)
- **vocab_size** (:obj:`int`) -- The number of tokens in the vocabulary, which is also the first dimension of
the embeddings matrix (this attribute may be missing for models that don't have a text modality like ViT).
- **hidden_size** (:obj:`int`) -- The hidden size of the model.
- **num_attention_heads** (:obj:`int`) -- The number of attention heads used in the multi-head attention layers
of the model.
- **num_hidden_layers** (:obj:`int`) -- The number of blocks in the model.
Args:
name_or_path (:obj:`str`, `optional`, defaults to :obj:`""`):
Store the string that was passed to :func:`~transformers.PreTrainedModel.from_pretrained` or
:func:`~transformers.TFPreTrainedModel.from_pretrained` as ``pretrained_model_name_or_path`` if the
configuration was created with such a method.
output_hidden_states (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the model should return all hidden-states.
output_attentions (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not the model should returns all attentions.
return_dict (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether or not the model should return a :class:`~transformers.file_utils.ModelOutput` instead of a plain
tuple.
is_encoder_decoder (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether the model is used as an encoder/decoder or not.
is_decoder (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether the model is used as decoder or not (in which case it's used as an encoder).
add_cross_attention (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether cross-attention layers should be added to the model. Note, this option is only relevant for models
that can be used as decoder models within the `:class:~transformers.EncoderDecoderModel` class, which
consists of all models in ``AUTO_MODELS_FOR_CAUSAL_LM``.
tie_encoder_decoder (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether all encoder weights should be tied to their equivalent decoder weights. This requires the encoder
and decoder model to have the exact same parameter names.
prune_heads (:obj:`Dict[int, List[int]]`, `optional`, defaults to :obj:`{}`):
Pruned heads of the model. The keys are the selected layer indices and the associated values, the list of
heads to prune in said layer.
For instance ``{1: [0, 2], 2: [2, 3]}`` will prune heads 0 and 2 on layer 1 and heads 2 and 3 on layer 2.
chunk_size_feed_forward (:obj:`int`, `optional`, defaults to :obj:`0`):
The chunk size of all feed forward layers in the residual attention blocks. A chunk size of :obj:`0` means
that the feed forward layer is not chunked. A chunk size of n means that the feed forward layer processes
:obj:`n` < sequence_length embeddings at a time. For more information on feed forward chunking, see `How
does Feed Forward Chunking work? <../glossary.html#feed-forward-chunking>`__ .
Parameters for sequence generation
- **max_length** (:obj:`int`, `optional`, defaults to 20) -- Maximum length that will be used by default in the
:obj:`generate` method of the model.
- **min_length** (:obj:`int`, `optional`, defaults to 10) -- Minimum length that will be used by default in the
:obj:`generate` method of the model.
- **do_sample** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Flag that will be used by default in the
:obj:`generate` method of the model. Whether or not to use sampling ; use greedy decoding otherwise.
- **early_stopping** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Flag that will be used by default
in the :obj:`generate` method of the model. Whether to stop the beam search when at least ``num_beams``
sentences are finished per batch or not.
- **num_beams** (:obj:`int`, `optional`, defaults to 1) -- Number of beams for beam search that will be used by
default in the :obj:`generate` method of the model. 1 means no beam search.
- **num_beam_groups** (:obj:`int`, `optional`, defaults to 1) -- Number of groups to divide :obj:`num_beams`
into in order to ensure diversity among different groups of beams that will be used by default in the
:obj:`generate` method of the model. 1 means no group beam search.
- **diversity_penalty** (:obj:`float`, `optional`, defaults to 0.0) -- Value to control diversity for group
beam search. that will be used by default in the :obj:`generate` method of the model. 0 means no diversity
penalty. The higher the penalty, the more diverse are the outputs.
- **temperature** (:obj:`float`, `optional`, defaults to 1) -- The value used to module the next token
probabilities that will be used by default in the :obj:`generate` method of the model. Must be strictly
positive.
- **top_k** (:obj:`int`, `optional`, defaults to 50) -- Number of highest probability vocabulary tokens to keep
for top-k-filtering that will be used by default in the :obj:`generate` method of the model.
- **top_p** (:obj:`float`, `optional`, defaults to 1) -- Value that will be used by default in the
:obj:`generate` method of the model for ``top_p``. If set to float < 1, only the most probable tokens with
probabilities that add up to ``top_p`` or higher are kept for generation.
- **repetition_penalty** (:obj:`float`, `optional`, defaults to 1) -- Parameter for repetition penalty that
will be used by default in the :obj:`generate` method of the model. 1.0 means no penalty.
- **length_penalty** (:obj:`float`, `optional`, defaults to 1) -- Exponential penalty to the length that will
be used by default in the :obj:`generate` method of the model.
- **no_repeat_ngram_size** (:obj:`int`, `optional`, defaults to 0) -- Value that will be used by default in the
:obj:`generate` method of the model for ``no_repeat_ngram_size``. If set to int > 0, all ngrams of that size
can only occur once.
- **encoder_no_repeat_ngram_size** (:obj:`int`, `optional`, defaults to 0) -- Value that will be used by
default in the :obj:`generate` method of the model for ``encoder_no_repeat_ngram_size``. If set to int > 0,
all ngrams of that size that occur in the ``encoder_input_ids`` cannot occur in the ``decoder_input_ids``.
- **bad_words_ids** (:obj:`List[int]`, `optional`) -- List of token ids that are not allowed to be generated
that will be used by default in the :obj:`generate` method of the model. In order to get the tokens of the
words that should not appear in the generated text, use :obj:`tokenizer.encode(bad_word,
add_prefix_space=True)`.
- **num_return_sequences** (:obj:`int`, `optional`, defaults to 1) -- Number of independently computed returned
sequences for each element in the batch that will be used by default in the :obj:`generate` method of the
model.
- **output_scores** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether the model should return the
logits when used for generation
- **return_dict_in_generate** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether the model should
return a :class:`~transformers.file_utils.ModelOutput` instead of a :obj:`torch.LongTensor`
- **forced_bos_token_id** (:obj:`int`, `optional`) -- The id of the token to force as the first generated token
after the :obj:`decoder_start_token_id`. Useful for multilingual models like :doc:`mBART
<../model_doc/mbart>` where the first generated token needs to be the target language token.
- **forced_eos_token_id** (:obj:`int`, `optional`) -- The id of the token to force as the last generated token
when :obj:`max_length` is reached.
- **remove_invalid_values** (:obj:`bool`, `optional`) -- Whether to remove possible `nan` and `inf` outputs of
the model to prevent the generation method to crash. Note that using ``remove_invalid_values`` can slow down
generation.
Parameters for fine-tuning tasks
- **architectures** (:obj:`List[str]`, `optional`) -- Model architectures that can be used with the model
pretrained weights.
- **finetuning_task** (:obj:`str`, `optional`) -- Name of the task used to fine-tune the model. This can be
used when converting from an original (TensorFlow or PyTorch) checkpoint.
- **id2label** (:obj:`Dict[int, str]`, `optional`) -- A map from index (for instance prediction index, or
target index) to label.
- **label2id** (:obj:`Dict[str, int]`, `optional`) -- A map from label to index for the model.
- **num_labels** (:obj:`int`, `optional`) -- Number of labels to use in the last layer added to the model,
typically for a classification task.
- **task_specific_params** (:obj:`Dict[str, Any]`, `optional`) -- Additional keyword arguments to store for the
current task.
- **problem_type** (:obj:`str`, `optional`) -- Problem type for :obj:`XxxForSequenceClassification` models. Can
be one of (:obj:`"regression"`, :obj:`"single_label_classification"`, :obj:`"multi_label_classification"`).
Please note that this parameter is only available in the following models: `AlbertForSequenceClassification`,
`BertForSequenceClassification`, `BigBirdForSequenceClassification`, `ConvBertForSequenceClassification`,
`DistilBertForSequenceClassification`, `ElectraForSequenceClassification`, `FunnelForSequenceClassification`,
`LongformerForSequenceClassification`, `MobileBertForSequenceClassification`,
`ReformerForSequenceClassification`, `RobertaForSequenceClassification`,
`SqueezeBertForSequenceClassification`, `XLMForSequenceClassification` and `XLNetForSequenceClassification`.
Parameters linked to the tokenizer
- **tokenizer_class** (:obj:`str`, `optional`) -- The name of the associated tokenizer class to use (if none is
set, will use the tokenizer associated to the model by default).
- **prefix** (:obj:`str`, `optional`) -- A specific prompt that should be added at the beginning of each text
before calling the model.
- **bos_token_id** (:obj:`int`, `optional`)) -- The id of the `beginning-of-stream` token.
- **pad_token_id** (:obj:`int`, `optional`)) -- The id of the `padding` token.
- **eos_token_id** (:obj:`int`, `optional`)) -- The id of the `end-of-stream` token.
- **decoder_start_token_id** (:obj:`int`, `optional`)) -- If an encoder-decoder model starts decoding with a
different token than `bos`, the id of that token.
- **sep_token_id** (:obj:`int`, `optional`)) -- The id of the `separation` token.
PyTorch specific parameters
- **torchscript** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether or not the model should be
used with Torchscript.
- **tie_word_embeddings** (:obj:`bool`, `optional`, defaults to :obj:`True`) -- Whether the model's input and
output word embeddings should be tied. Note that this is only relevant if the model has a output word
embedding layer.
- **torch_dtype** (:obj:`str`, `optional`) -- The :obj:`dtype` of the weights. This attribute can be used to
initialize the model to a non-default ``dtype`` (which is normally ``float32``) and thus allow for optimal
storage allocation. For example, if the saved model is ``float16``, ideally we want to load it back using the
minimal amount of memory needed to load ``float16`` weights. Since the config object is stored in plain text,
this attribute contains just the floating type string without the ``torch.`` prefix. For example, for
``torch.float16`` ``torch_dtype`` is the ``"float16"`` string.
This attribute is currently not being used during model loading time, but this may change in the future
versions. But we can already start preparing for the future by saving the dtype with save_pretrained.
TensorFlow specific parameters
- **use_bfloat16** (:obj:`bool`, `optional`, defaults to :obj:`False`) -- Whether or not the model should use
BFloat16 scalars (only used by some TensorFlow models).
"""
model_type: str = ""
is_composition: bool = False
attribute_map: Dict[str, str] = {}
def __setattr__(self, key, value):
if key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
super().__setattr__(key, value)
def __getattribute__(self, key):
if key != "attribute_map" and key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
return super().__getattribute__(key)
def __init__(self, **kwargs):
# Attributes with defaults
self.return_dict = kwargs.pop("return_dict", True)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.torchscript = kwargs.pop("torchscript", False) # Only used by PyTorch models
self.torch_dtype = kwargs.pop("torch_dtype", None) # Only used by PyTorch models
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
self.tie_word_embeddings = kwargs.pop(
"tie_word_embeddings", True
) # Whether input and output word embeddings should be tied for all MLM, LM and Seq2Seq models.
# Is decoder is used in encoder-decoder models to differentiate encoder from decoder
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
self.tie_encoder_decoder = kwargs.pop("tie_encoder_decoder", False)
# Parameters for sequence generation
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.num_beam_groups = kwargs.pop("num_beam_groups", 1)
self.diversity_penalty = kwargs.pop("diversity_penalty", 0.0)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.encoder_no_repeat_ngram_size = kwargs.pop("encoder_no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
self.output_scores = kwargs.pop("output_scores", False)
self.return_dict_in_generate = kwargs.pop("return_dict_in_generate", False)
self.forced_bos_token_id = kwargs.pop("forced_bos_token_id", None)
self.forced_eos_token_id = kwargs.pop("forced_eos_token_id", None)
self.remove_invalid_values = kwargs.pop("remove_invalid_values", False)
# Fine-tuning task arguments
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
# Keys are always strings in JSON so convert ids to int here.
else:
self.num_labels = kwargs.pop("num_labels", 2)
if self.torch_dtype is not None and isinstance(self.torch_dtype, str):
# we will start using self.torch_dtype in v5, but to be consistent with
# from_pretrained's torch_dtype arg convert it to an actual torch.dtype object
if is_torch_available():
import torch
self.torch_dtype = getattr(torch, self.torch_dtype)
# Tokenizer arguments TODO: eventually tokenizer and models should share the same config
self.tokenizer_class = kwargs.pop("tokenizer_class", None)
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.sep_token_id = kwargs.pop("sep_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
# regression / multi-label classification
self.problem_type = kwargs.pop("problem_type", None)
allowed_problem_types = ("regression", "single_label_classification", "multi_label_classification")
if self.problem_type is not None and self.problem_type not in allowed_problem_types:
raise ValueError(
f"The config parameter `problem_type` was not understood: received {self.problem_type}"
"but only 'regression', 'single_label_classification' and 'multi_label_classification' are valid."
)
# TPU arguments
if kwargs.pop("xla_device", None) is not None:
logger.warning(
"The `xla_device` argument has been deprecated in v4.4.0 of Transformers. It is ignored and you can "
"safely remove it from your `config.json` file."
)
# Name or path to the pretrained checkpoint
self._name_or_path = str(kwargs.pop("name_or_path", ""))
# Drop the transformers version info
self.transformers_version = kwargs.pop("transformers_version", None)
# Deal with gradient checkpointing
if kwargs.get("gradient_checkpointing", False):
warnings.warn(
"Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 "
"Transformers. Using `model.gradient_checkpointing_enable()` instead, or if you are using the "
"`Trainer` API, pass `gradient_checkpointing=True` in your `TrainingArguments`."
)
# Additional attributes without default values
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
@property
def name_or_path(self) -> str:
return self._name_or_path
@name_or_path.setter
def name_or_path(self, value):
self._name_or_path = str(value) # Make sure that name_or_path is a string (for JSON encoding)
@property
def use_return_dict(self) -> bool:
"""
:obj:`bool`: Whether or not return :class:`~transformers.file_utils.ModelOutput` instead of tuples.
"""
# If torchscript is set, force `return_dict=False` to avoid jit errors
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
"""
:obj:`int`: The number of labels for classification models.
"""
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
if not hasattr(self, "id2label") or self.id2label is None or len(self.id2label) != num_labels:
self.id2label = {i: f"LABEL_{i}" for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):
"""
Save a configuration object to the directory ``save_directory``, so that it can be re-loaded using the
:func:`~transformers.PretrainedConfig.from_pretrained` class method.
Args:
save_directory (:obj:`str` or :obj:`os.PathLike`):
Directory where the configuration JSON file will be saved (will be created if it does not exist).
push_to_hub (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to push your model to the Hugging Face model hub after saving it.
.. warning::
Using :obj:`push_to_hub=True` will synchronize the repository you are pushing to with
:obj:`save_directory`, which requires :obj:`save_directory` to be a local clone of the repo you are
pushing to if it's an existing folder. Pass along :obj:`temp_dir=True` to use a temporary directory
instead.
kwargs:
Additional key word arguments passed along to the
:meth:`~transformers.file_utils.PushToHubMixin.push_to_hub` method.
"""
if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
if push_to_hub:
commit_message = kwargs.pop("commit_message", None)
repo = self._create_or_get_repo(save_directory, **kwargs)
os.makedirs(save_directory, exist_ok=True)
# If we save using the predefined names, we can load using `from_pretrained`
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info(f"Configuration saved in {output_config_file}")
if push_to_hub:
url = self._push_to_hub(repo, commit_message=commit_message)
logger.info(f"Configuration pushed to the hub in this commit: {url}")
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig":
r"""
Instantiate a :class:`~transformers.PretrainedConfig` (or a derived class) from a pretrained model
configuration.
Args:
pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`):
This can be either:
- a string, the `model id` of a pretrained model configuration hosted inside a model repo on
huggingface.co. Valid model ids can be located at the root-level, like ``bert-base-uncased``, or
namespaced under a user or organization name, like ``dbmdz/bert-base-german-cased``.
- a path to a `directory` containing a configuration file saved using the
:func:`~transformers.PretrainedConfig.save_pretrained` method, e.g., ``./my_model_directory/``.
- a path or url to a saved configuration JSON `file`, e.g.,
``./my_model_directory/configuration.json``.
cache_dir (:obj:`str` or :obj:`os.PathLike`, `optional`):
Path to a directory in which a downloaded pretrained model configuration should be cached if the
standard cache should not be used.
force_download (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to force to (re-)download the configuration files and override the cached versions if
they exist.
resume_download (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to delete incompletely received file. Attempts to resume the download if such a file
exists.
proxies (:obj:`Dict[str, str]`, `optional`):
A dictionary of proxy servers to use by protocol or endpoint, e.g., :obj:`{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (:obj:`str` or `bool`, `optional`):
The token to use as HTTP bearer authorization for remote files. If :obj:`True`, will use the token
generated when running :obj:`transformers-cli login` (stored in :obj:`~/.huggingface`).
revision(:obj:`str`, `optional`, defaults to :obj:`"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any
identifier allowed by git.
return_unused_kwargs (:obj:`bool`, `optional`, defaults to :obj:`False`):
If :obj:`False`, then this function returns just the final configuration object.
If :obj:`True`, then this functions returns a :obj:`Tuple(config, unused_kwargs)` where `unused_kwargs`
is a dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e.,
the part of ``kwargs`` which has not been used to update ``config`` and is otherwise ignored.
kwargs (:obj:`Dict[str, Any]`, `optional`):
The values in kwargs of any keys which are configuration attributes will be used to override the loaded
values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
by the ``return_unused_kwargs`` keyword parameter.
.. note::
Passing :obj:`use_auth_token=True` is required when you want to use a private model.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from this pretrained model.
Examples::
# We can't instantiate directly the base class `PretrainedConfig` so let's show the examples on a
# derived class: BertConfig
config = BertConfig.from_pretrained('bert-base-uncased') # Download configuration from huggingface.co and cache.
config = BertConfig.from_pretrained('./test/saved_model/') # E.g. config (or model) was saved using `save_pretrained('./test/saved_model/')`
config = BertConfig.from_pretrained('./test/saved_model/my_configuration.json')
config = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True, foo=False)
assert config.output_attentions == True
config, unused_kwargs = BertConfig.from_pretrained('bert-base-uncased', output_attentions=True,
foo=False, return_unused_kwargs=True)
assert config.output_attentions == True
assert unused_kwargs == {'foo': False}
"""
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
logger.warn(
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""
From a ``pretrained_model_name_or_path``, resolve to a dictionary of parameters, to be used for instantiating a
:class:`~transformers.PretrainedConfig` using ``from_dict``.
Parameters:
pretrained_model_name_or_path (:obj:`str` or :obj:`os.PathLike`):
The identifier of the pre-trained checkpoint from which we want the dictionary of parameters.
Returns:
:obj:`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object.
"""
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None)
from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False)
user_agent = {"file_type": "config", "from_auto_class": from_auto_class}
if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline
if is_offline_mode() and not local_files_only:
logger.info("Offline mode: forcing local_files_only=True")
local_files_only = True
pretrained_model_name_or_path = str(pretrained_model_name_or_path)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
config_file = hf_bucket_url(
pretrained_model_name_or_path, filename=CONFIG_NAME, revision=revision, mirror=None
)
try:
# Load from URL or cache if already cached
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
user_agent=user_agent,
)
# Load config dict
config_dict = cls._dict_from_json_file(resolved_config_file)
except EnvironmentError as err:
logger.error(err)
msg = (
f"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\n\n"
f"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\n\n"
f"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\n\n"
)
if revision is not None:
msg += f"- or '{revision}' is a valid git identifier (branch name, a tag name, or a commit id) that exists for this model name as listed on its model page on 'https://huggingface.co/models'\n\n"
raise EnvironmentError(msg)
except (json.JSONDecodeError, UnicodeDecodeError):
msg = (
f"Couldn't reach server at '{config_file}' to download configuration file or "
"configuration file is not a valid JSON file. "
f"Please check network or file content here: {resolved_config_file}."
)
raise EnvironmentError(msg)
if resolved_config_file == config_file:
logger.info(f"loading configuration file {config_file}")
else:
logger.info(f"loading configuration file {config_file} from cache at {resolved_config_file}")
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
"""
Instantiates a :class:`~transformers.PretrainedConfig` from a Python dictionary of parameters.
Args:
config_dict (:obj:`Dict[str, Any]`):
Dictionary that will be used to instantiate the configuration object. Such a dictionary can be
retrieved from a pretrained checkpoint by leveraging the
:func:`~transformers.PretrainedConfig.get_config_dict` method.
kwargs (:obj:`Dict[str, Any]`):
Additional parameters from which to initialize the configuration object.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from those parameters.
"""
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
# Update config with kwargs if needed
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
if key != "torch_dtype":
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info(f"Model config {config}")
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: Union[str, os.PathLike]) -> "PretrainedConfig":
"""
Instantiates a :class:`~transformers.PretrainedConfig` from the path to a JSON file of parameters.
Args:
json_file (:obj:`str` or :obj:`os.PathLike`):
Path to the JSON file containing the parameters.
Returns:
:class:`PretrainedConfig`: The configuration object instantiated from that JSON file.
"""
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return f"{self.__class__.__name__} {self.to_json_string()}"
def to_diff_dict(self) -> Dict[str, Any]:
"""
Removes all attributes from config which correspond to the default config attributes for better readability and
serializes to a Python dictionary.
Returns:
:obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance,
"""
config_dict = self.to_dict()
# get the default config dict
default_config_dict = PretrainedConfig().to_dict()
# get class specific config dict
class_config_dict = self.__class__().to_dict() if not self.is_composition else {}
serializable_config_dict = {}
# only serialize values that differ from the default config
for key, value in config_dict.items():
if (
key not in default_config_dict
or key == "transformers_version"
or value != default_config_dict[key]
or (key in class_config_dict and value != class_config_dict[key])
):
serializable_config_dict[key] = value
self.dict_torch_dtype_to_str(serializable_config_dict)
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
"""
Serializes this instance to a Python dictionary.
Returns:
:obj:`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance.
"""
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
# Transformers version when serializing the model
output["transformers_version"] = __version__
self.dict_torch_dtype_to_str(output)
return output
def to_json_string(self, use_diff: bool = True) -> str:
"""
Serializes this instance to a JSON string.
Args:
use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to ``True``, only the difference between the config instance and the default
``PretrainedConfig()`` is serialized to JSON string.
Returns:
:obj:`str`: String containing all the attributes that make up this configuration instance in JSON format.
"""
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True):
"""
Save this instance to a JSON file.
Args:
json_file_path (:obj:`str` or :obj:`os.PathLike`):
Path to the JSON file in which this configuration instance's parameters will be saved.
use_diff (:obj:`bool`, `optional`, defaults to :obj:`True`):
If set to ``True``, only the difference between the config instance and the default
``PretrainedConfig()`` is serialized to JSON file.
"""
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
"""
Updates attributes of this class with attributes from ``config_dict``.
Args:
config_dict (:obj:`Dict[str, Any]`): Dictionary of attributes that should be updated for this class.
"""
for key, value in config_dict.items():
setattr(self, key, value)
def update_from_string(self, update_str: str):
"""
Updates attributes of this class with attributes from ``update_str``.
The expected format is ints, floats and strings as is, and for booleans use ``true`` or ``false``. For example:
"n_embd=10,resid_pdrop=0.2,scale_attn_weights=false,summary_type=cls_index"
The keys to change have to already exist in the config object.
Args:
update_str (:obj:`str`): String with attributes that should be updated for this class.
"""
d = dict(x.split("=") for x in update_str.split(","))
for k, v in d.items():
if not hasattr(self, k):
raise ValueError(f"key {k} isn't in the original config dict")
old_v = getattr(self, k)
if isinstance(old_v, bool):
if v.lower() in ["true", "1", "y", "yes"]:
v = True
elif v.lower() in ["false", "0", "n", "no"]:
v = False
else:
raise ValueError(f"can't derive true or false from {v} (key {k})")
elif isinstance(old_v, int):
v = int(v)
elif isinstance(old_v, float):
v = float(v)
elif not isinstance(old_v, str):
raise ValueError(
f"You can only update int, float, bool or string values in the config, got {v} for key {k}"
)
setattr(self, k, v)
def dict_torch_dtype_to_str(self, d: Dict[str, Any]) -> None:
"""
Checks whether the passed dictionary has a `torch_dtype` key and if it's not None, converts torch.dtype to a
string of just the type. For example, :obj:`torch.float32` get converted into `"float32"` string, which can
then be stored in the json format.
"""
if d.get("torch_dtype", None) is not None and not isinstance(d["torch_dtype"], str):
d["torch_dtype"] = str(d["torch_dtype"]).split(".")[1]
PretrainedConfig.push_to_hub = copy_func(PretrainedConfig.push_to_hub)
PretrainedConfig.push_to_hub.__doc__ = PretrainedConfig.push_to_hub.__doc__.format(
object="config", object_class="AutoConfig", object_files="configuration file"
)
| 54.0401 | 210 | 0.649082 |
import copy
import json
import os
import warnings
from typing import Any, Dict, Tuple, Union
from . import __version__
from .file_utils import (
CONFIG_NAME,
PushToHubMixin,
cached_path,
copy_func,
hf_bucket_url,
is_offline_mode,
is_remote_url,
is_torch_available,
)
from .utils import logging
logger = logging.get_logger(__name__)
class PretrainedConfig(PushToHubMixin):
model_type: str = ""
is_composition: bool = False
attribute_map: Dict[str, str] = {}
def __setattr__(self, key, value):
if key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
super().__setattr__(key, value)
def __getattribute__(self, key):
if key != "attribute_map" and key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
return super().__getattribute__(key)
def __init__(self, **kwargs):
self.return_dict = kwargs.pop("return_dict", True)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.torchscript = kwargs.pop("torchscript", False)
self.torch_dtype = kwargs.pop("torch_dtype", None)
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
self.tie_word_embeddings = kwargs.pop(
"tie_word_embeddings", True
)
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
self.tie_encoder_decoder = kwargs.pop("tie_encoder_decoder", False)
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.num_beam_groups = kwargs.pop("num_beam_groups", 1)
self.diversity_penalty = kwargs.pop("diversity_penalty", 0.0)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.encoder_no_repeat_ngram_size = kwargs.pop("encoder_no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
self.output_scores = kwargs.pop("output_scores", False)
self.return_dict_in_generate = kwargs.pop("return_dict_in_generate", False)
self.forced_bos_token_id = kwargs.pop("forced_bos_token_id", None)
self.forced_eos_token_id = kwargs.pop("forced_eos_token_id", None)
self.remove_invalid_values = kwargs.pop("remove_invalid_values", False)
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
else:
self.num_labels = kwargs.pop("num_labels", 2)
if self.torch_dtype is not None and isinstance(self.torch_dtype, str):
if is_torch_available():
import torch
self.torch_dtype = getattr(torch, self.torch_dtype)
# Tokenizer arguments TODO: eventually tokenizer and models should share the same config
self.tokenizer_class = kwargs.pop("tokenizer_class", None)
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.sep_token_id = kwargs.pop("sep_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
# regression / multi-label classification
self.problem_type = kwargs.pop("problem_type", None)
allowed_problem_types = ("regression", "single_label_classification", "multi_label_classification")
if self.problem_type is not None and self.problem_type not in allowed_problem_types:
raise ValueError(
f"The config parameter `problem_type` was not understood: received {self.problem_type}"
"but only 'regression', 'single_label_classification' and 'multi_label_classification' are valid."
)
# TPU arguments
if kwargs.pop("xla_device", None) is not None:
logger.warning(
"The `xla_device` argument has been deprecated in v4.4.0 of Transformers. It is ignored and you can "
"safely remove it from your `config.json` file."
)
# Name or path to the pretrained checkpoint
self._name_or_path = str(kwargs.pop("name_or_path", ""))
# Drop the transformers version info
self.transformers_version = kwargs.pop("transformers_version", None)
# Deal with gradient checkpointing
if kwargs.get("gradient_checkpointing", False):
warnings.warn(
"Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 "
"Transformers. Using `model.gradient_checkpointing_enable()` instead, or if you are using the "
"`Trainer` API, pass `gradient_checkpointing=True` in your `TrainingArguments`."
)
# Additional attributes without default values
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
@property
def name_or_path(self) -> str:
return self._name_or_path
@name_or_path.setter
def name_or_path(self, value):
self._name_or_path = str(value)
@property
def use_return_dict(self) -> bool:
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
if not hasattr(self, "id2label") or self.id2label is None or len(self.id2label) != num_labels:
self.id2label = {i: f"LABEL_{i}" for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):
if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
if push_to_hub:
commit_message = kwargs.pop("commit_message", None)
repo = self._create_or_get_repo(save_directory, **kwargs)
os.makedirs(save_directory, exist_ok=True)
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info(f"Configuration saved in {output_config_file}")
if push_to_hub:
url = self._push_to_hub(repo, commit_message=commit_message)
logger.info(f"Configuration pushed to the hub in this commit: {url}")
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig":
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
logger.warn(
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None)
from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False)
user_agent = {"file_type": "config", "from_auto_class": from_auto_class}
if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline
if is_offline_mode() and not local_files_only:
logger.info("Offline mode: forcing local_files_only=True")
local_files_only = True
pretrained_model_name_or_path = str(pretrained_model_name_or_path)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, CONFIG_NAME)
elif os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
config_file = hf_bucket_url(
pretrained_model_name_or_path, filename=CONFIG_NAME, revision=revision, mirror=None
)
try:
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
user_agent=user_agent,
)
config_dict = cls._dict_from_json_file(resolved_config_file)
except EnvironmentError as err:
logger.error(err)
msg = (
f"Can't load config for '{pretrained_model_name_or_path}'. Make sure that:\n\n"
f"- '{pretrained_model_name_or_path}' is a correct model identifier listed on 'https://huggingface.co/models'\n\n"
f"- or '{pretrained_model_name_or_path}' is the correct path to a directory containing a {CONFIG_NAME} file\n\n"
)
if revision is not None:
msg += f"- or '{revision}' is a valid git identifier (branch name, a tag name, or a commit id) that exists for this model name as listed on its model page on 'https://huggingface.co/models'\n\n"
raise EnvironmentError(msg)
except (json.JSONDecodeError, UnicodeDecodeError):
msg = (
f"Couldn't reach server at '{config_file}' to download configuration file or "
"configuration file is not a valid JSON file. "
f"Please check network or file content here: {resolved_config_file}."
)
raise EnvironmentError(msg)
if resolved_config_file == config_file:
logger.info(f"loading configuration file {config_file}")
else:
logger.info(f"loading configuration file {config_file} from cache at {resolved_config_file}")
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
if key != "torch_dtype":
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info(f"Model config {config}")
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: Union[str, os.PathLike]) -> "PretrainedConfig":
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return f"{self.__class__.__name__} {self.to_json_string()}"
def to_diff_dict(self) -> Dict[str, Any]:
config_dict = self.to_dict()
default_config_dict = PretrainedConfig().to_dict()
class_config_dict = self.__class__().to_dict() if not self.is_composition else {}
serializable_config_dict = {}
for key, value in config_dict.items():
if (
key not in default_config_dict
or key == "transformers_version"
or value != default_config_dict[key]
or (key in class_config_dict and value != class_config_dict[key])
):
serializable_config_dict[key] = value
self.dict_torch_dtype_to_str(serializable_config_dict)
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
output["transformers_version"] = __version__
self.dict_torch_dtype_to_str(output)
return output
def to_json_string(self, use_diff: bool = True) -> str:
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True):
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
for key, value in config_dict.items():
setattr(self, key, value)
def update_from_string(self, update_str: str):
d = dict(x.split("=") for x in update_str.split(","))
for k, v in d.items():
if not hasattr(self, k):
raise ValueError(f"key {k} isn't in the original config dict")
old_v = getattr(self, k)
if isinstance(old_v, bool):
if v.lower() in ["true", "1", "y", "yes"]:
v = True
elif v.lower() in ["false", "0", "n", "no"]:
v = False
else:
raise ValueError(f"can't derive true or false from {v} (key {k})")
elif isinstance(old_v, int):
v = int(v)
elif isinstance(old_v, float):
v = float(v)
elif not isinstance(old_v, str):
raise ValueError(
f"You can only update int, float, bool or string values in the config, got {v} for key {k}"
)
setattr(self, k, v)
def dict_torch_dtype_to_str(self, d: Dict[str, Any]) -> None:
if d.get("torch_dtype", None) is not None and not isinstance(d["torch_dtype"], str):
d["torch_dtype"] = str(d["torch_dtype"]).split(".")[1]
PretrainedConfig.push_to_hub = copy_func(PretrainedConfig.push_to_hub)
PretrainedConfig.push_to_hub.__doc__ = PretrainedConfig.push_to_hub.__doc__.format(
object="config", object_class="AutoConfig", object_files="configuration file"
)
| true | true |
f71a4c3038f108011a235c4b7bce53875e9cbabb | 173 | py | Python | sentence-embedding/python-lib/dku_language_model/__init__.py | RedaAffane/dataiku-contrib | d409ddc25d31570972a14abb19a84ac101afc6cc | [
"Apache-2.0"
] | 1 | 2020-10-11T14:53:53.000Z | 2020-10-11T14:53:53.000Z | sentence-embedding/python-lib/dku_language_model/__init__.py | RedaAffane/dataiku-contrib | d409ddc25d31570972a14abb19a84ac101afc6cc | [
"Apache-2.0"
] | 10 | 2020-04-24T13:14:42.000Z | 2022-02-10T01:07:28.000Z | python-lib/dku_language_model/__init__.py | dataiku/dss-plugin-nlp-embedding | 7805151307210e2be15d844728be4ace2d381f13 | [
"Apache-2.0"
] | null | null | null | from dku_language_model.context_independent_language_model import FasttextModel, Word2vecModel, GloveModel
from dku_language_model.contextual_language_model import ElmoModel | 86.5 | 106 | 0.924855 | from dku_language_model.context_independent_language_model import FasttextModel, Word2vecModel, GloveModel
from dku_language_model.contextual_language_model import ElmoModel | true | true |
f71a4cd9e12534305a660dab19c40de08f3f20a3 | 6,545 | py | Python | loldib/getratings/models/NA/na_syndra/na_syndra_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_syndra/na_syndra_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | loldib/getratings/models/NA/na_syndra/na_syndra_jng.py | koliupy/loldib | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | [
"Apache-2.0"
] | null | null | null | from getratings.models.ratings import Ratings
class NA_Syndra_Jng_Aatrox(Ratings):
pass
class NA_Syndra_Jng_Ahri(Ratings):
pass
class NA_Syndra_Jng_Akali(Ratings):
pass
class NA_Syndra_Jng_Alistar(Ratings):
pass
class NA_Syndra_Jng_Amumu(Ratings):
pass
class NA_Syndra_Jng_Anivia(Ratings):
pass
class NA_Syndra_Jng_Annie(Ratings):
pass
class NA_Syndra_Jng_Ashe(Ratings):
pass
class NA_Syndra_Jng_AurelionSol(Ratings):
pass
class NA_Syndra_Jng_Azir(Ratings):
pass
class NA_Syndra_Jng_Bard(Ratings):
pass
class NA_Syndra_Jng_Blitzcrank(Ratings):
pass
class NA_Syndra_Jng_Brand(Ratings):
pass
class NA_Syndra_Jng_Braum(Ratings):
pass
class NA_Syndra_Jng_Caitlyn(Ratings):
pass
class NA_Syndra_Jng_Camille(Ratings):
pass
class NA_Syndra_Jng_Cassiopeia(Ratings):
pass
class NA_Syndra_Jng_Chogath(Ratings):
pass
class NA_Syndra_Jng_Corki(Ratings):
pass
class NA_Syndra_Jng_Darius(Ratings):
pass
class NA_Syndra_Jng_Diana(Ratings):
pass
class NA_Syndra_Jng_Draven(Ratings):
pass
class NA_Syndra_Jng_DrMundo(Ratings):
pass
class NA_Syndra_Jng_Ekko(Ratings):
pass
class NA_Syndra_Jng_Elise(Ratings):
pass
class NA_Syndra_Jng_Evelynn(Ratings):
pass
class NA_Syndra_Jng_Ezreal(Ratings):
pass
class NA_Syndra_Jng_Fiddlesticks(Ratings):
pass
class NA_Syndra_Jng_Fiora(Ratings):
pass
class NA_Syndra_Jng_Fizz(Ratings):
pass
class NA_Syndra_Jng_Galio(Ratings):
pass
class NA_Syndra_Jng_Gangplank(Ratings):
pass
class NA_Syndra_Jng_Garen(Ratings):
pass
class NA_Syndra_Jng_Gnar(Ratings):
pass
class NA_Syndra_Jng_Gragas(Ratings):
pass
class NA_Syndra_Jng_Graves(Ratings):
pass
class NA_Syndra_Jng_Hecarim(Ratings):
pass
class NA_Syndra_Jng_Heimerdinger(Ratings):
pass
class NA_Syndra_Jng_Illaoi(Ratings):
pass
class NA_Syndra_Jng_Irelia(Ratings):
pass
class NA_Syndra_Jng_Ivern(Ratings):
pass
class NA_Syndra_Jng_Janna(Ratings):
pass
class NA_Syndra_Jng_JarvanIV(Ratings):
pass
class NA_Syndra_Jng_Jax(Ratings):
pass
class NA_Syndra_Jng_Jayce(Ratings):
pass
class NA_Syndra_Jng_Jhin(Ratings):
pass
class NA_Syndra_Jng_Jinx(Ratings):
pass
class NA_Syndra_Jng_Kalista(Ratings):
pass
class NA_Syndra_Jng_Karma(Ratings):
pass
class NA_Syndra_Jng_Karthus(Ratings):
pass
class NA_Syndra_Jng_Kassadin(Ratings):
pass
class NA_Syndra_Jng_Katarina(Ratings):
pass
class NA_Syndra_Jng_Kayle(Ratings):
pass
class NA_Syndra_Jng_Kayn(Ratings):
pass
class NA_Syndra_Jng_Kennen(Ratings):
pass
class NA_Syndra_Jng_Khazix(Ratings):
pass
class NA_Syndra_Jng_Kindred(Ratings):
pass
class NA_Syndra_Jng_Kled(Ratings):
pass
class NA_Syndra_Jng_KogMaw(Ratings):
pass
class NA_Syndra_Jng_Leblanc(Ratings):
pass
class NA_Syndra_Jng_LeeSin(Ratings):
pass
class NA_Syndra_Jng_Leona(Ratings):
pass
class NA_Syndra_Jng_Lissandra(Ratings):
pass
class NA_Syndra_Jng_Lucian(Ratings):
pass
class NA_Syndra_Jng_Lulu(Ratings):
pass
class NA_Syndra_Jng_Lux(Ratings):
pass
class NA_Syndra_Jng_Malphite(Ratings):
pass
class NA_Syndra_Jng_Malzahar(Ratings):
pass
class NA_Syndra_Jng_Maokai(Ratings):
pass
class NA_Syndra_Jng_MasterYi(Ratings):
pass
class NA_Syndra_Jng_MissFortune(Ratings):
pass
class NA_Syndra_Jng_MonkeyKing(Ratings):
pass
class NA_Syndra_Jng_Mordekaiser(Ratings):
pass
class NA_Syndra_Jng_Morgana(Ratings):
pass
class NA_Syndra_Jng_Nami(Ratings):
pass
class NA_Syndra_Jng_Nasus(Ratings):
pass
class NA_Syndra_Jng_Nautilus(Ratings):
pass
class NA_Syndra_Jng_Nidalee(Ratings):
pass
class NA_Syndra_Jng_Nocturne(Ratings):
pass
class NA_Syndra_Jng_Nunu(Ratings):
pass
class NA_Syndra_Jng_Olaf(Ratings):
pass
class NA_Syndra_Jng_Orianna(Ratings):
pass
class NA_Syndra_Jng_Ornn(Ratings):
pass
class NA_Syndra_Jng_Pantheon(Ratings):
pass
class NA_Syndra_Jng_Poppy(Ratings):
pass
class NA_Syndra_Jng_Quinn(Ratings):
pass
class NA_Syndra_Jng_Rakan(Ratings):
pass
class NA_Syndra_Jng_Rammus(Ratings):
pass
class NA_Syndra_Jng_RekSai(Ratings):
pass
class NA_Syndra_Jng_Renekton(Ratings):
pass
class NA_Syndra_Jng_Rengar(Ratings):
pass
class NA_Syndra_Jng_Riven(Ratings):
pass
class NA_Syndra_Jng_Rumble(Ratings):
pass
class NA_Syndra_Jng_Ryze(Ratings):
pass
class NA_Syndra_Jng_Sejuani(Ratings):
pass
class NA_Syndra_Jng_Shaco(Ratings):
pass
class NA_Syndra_Jng_Shen(Ratings):
pass
class NA_Syndra_Jng_Shyvana(Ratings):
pass
class NA_Syndra_Jng_Singed(Ratings):
pass
class NA_Syndra_Jng_Sion(Ratings):
pass
class NA_Syndra_Jng_Sivir(Ratings):
pass
class NA_Syndra_Jng_Skarner(Ratings):
pass
class NA_Syndra_Jng_Sona(Ratings):
pass
class NA_Syndra_Jng_Soraka(Ratings):
pass
class NA_Syndra_Jng_Swain(Ratings):
pass
class NA_Syndra_Jng_Syndra(Ratings):
pass
class NA_Syndra_Jng_TahmKench(Ratings):
pass
class NA_Syndra_Jng_Taliyah(Ratings):
pass
class NA_Syndra_Jng_Talon(Ratings):
pass
class NA_Syndra_Jng_Taric(Ratings):
pass
class NA_Syndra_Jng_Teemo(Ratings):
pass
class NA_Syndra_Jng_Thresh(Ratings):
pass
class NA_Syndra_Jng_Tristana(Ratings):
pass
class NA_Syndra_Jng_Trundle(Ratings):
pass
class NA_Syndra_Jng_Tryndamere(Ratings):
pass
class NA_Syndra_Jng_TwistedFate(Ratings):
pass
class NA_Syndra_Jng_Twitch(Ratings):
pass
class NA_Syndra_Jng_Udyr(Ratings):
pass
class NA_Syndra_Jng_Urgot(Ratings):
pass
class NA_Syndra_Jng_Varus(Ratings):
pass
class NA_Syndra_Jng_Vayne(Ratings):
pass
class NA_Syndra_Jng_Veigar(Ratings):
pass
class NA_Syndra_Jng_Velkoz(Ratings):
pass
class NA_Syndra_Jng_Vi(Ratings):
pass
class NA_Syndra_Jng_Viktor(Ratings):
pass
class NA_Syndra_Jng_Vladimir(Ratings):
pass
class NA_Syndra_Jng_Volibear(Ratings):
pass
class NA_Syndra_Jng_Warwick(Ratings):
pass
class NA_Syndra_Jng_Xayah(Ratings):
pass
class NA_Syndra_Jng_Xerath(Ratings):
pass
class NA_Syndra_Jng_XinZhao(Ratings):
pass
class NA_Syndra_Jng_Yasuo(Ratings):
pass
class NA_Syndra_Jng_Yorick(Ratings):
pass
class NA_Syndra_Jng_Zac(Ratings):
pass
class NA_Syndra_Jng_Zed(Ratings):
pass
class NA_Syndra_Jng_Ziggs(Ratings):
pass
class NA_Syndra_Jng_Zilean(Ratings):
pass
class NA_Syndra_Jng_Zyra(Ratings):
pass
| 15.695444 | 46 | 0.766692 | from getratings.models.ratings import Ratings
class NA_Syndra_Jng_Aatrox(Ratings):
pass
class NA_Syndra_Jng_Ahri(Ratings):
pass
class NA_Syndra_Jng_Akali(Ratings):
pass
class NA_Syndra_Jng_Alistar(Ratings):
pass
class NA_Syndra_Jng_Amumu(Ratings):
pass
class NA_Syndra_Jng_Anivia(Ratings):
pass
class NA_Syndra_Jng_Annie(Ratings):
pass
class NA_Syndra_Jng_Ashe(Ratings):
pass
class NA_Syndra_Jng_AurelionSol(Ratings):
pass
class NA_Syndra_Jng_Azir(Ratings):
pass
class NA_Syndra_Jng_Bard(Ratings):
pass
class NA_Syndra_Jng_Blitzcrank(Ratings):
pass
class NA_Syndra_Jng_Brand(Ratings):
pass
class NA_Syndra_Jng_Braum(Ratings):
pass
class NA_Syndra_Jng_Caitlyn(Ratings):
pass
class NA_Syndra_Jng_Camille(Ratings):
pass
class NA_Syndra_Jng_Cassiopeia(Ratings):
pass
class NA_Syndra_Jng_Chogath(Ratings):
pass
class NA_Syndra_Jng_Corki(Ratings):
pass
class NA_Syndra_Jng_Darius(Ratings):
pass
class NA_Syndra_Jng_Diana(Ratings):
pass
class NA_Syndra_Jng_Draven(Ratings):
pass
class NA_Syndra_Jng_DrMundo(Ratings):
pass
class NA_Syndra_Jng_Ekko(Ratings):
pass
class NA_Syndra_Jng_Elise(Ratings):
pass
class NA_Syndra_Jng_Evelynn(Ratings):
pass
class NA_Syndra_Jng_Ezreal(Ratings):
pass
class NA_Syndra_Jng_Fiddlesticks(Ratings):
pass
class NA_Syndra_Jng_Fiora(Ratings):
pass
class NA_Syndra_Jng_Fizz(Ratings):
pass
class NA_Syndra_Jng_Galio(Ratings):
pass
class NA_Syndra_Jng_Gangplank(Ratings):
pass
class NA_Syndra_Jng_Garen(Ratings):
pass
class NA_Syndra_Jng_Gnar(Ratings):
pass
class NA_Syndra_Jng_Gragas(Ratings):
pass
class NA_Syndra_Jng_Graves(Ratings):
pass
class NA_Syndra_Jng_Hecarim(Ratings):
pass
class NA_Syndra_Jng_Heimerdinger(Ratings):
pass
class NA_Syndra_Jng_Illaoi(Ratings):
pass
class NA_Syndra_Jng_Irelia(Ratings):
pass
class NA_Syndra_Jng_Ivern(Ratings):
pass
class NA_Syndra_Jng_Janna(Ratings):
pass
class NA_Syndra_Jng_JarvanIV(Ratings):
pass
class NA_Syndra_Jng_Jax(Ratings):
pass
class NA_Syndra_Jng_Jayce(Ratings):
pass
class NA_Syndra_Jng_Jhin(Ratings):
pass
class NA_Syndra_Jng_Jinx(Ratings):
pass
class NA_Syndra_Jng_Kalista(Ratings):
pass
class NA_Syndra_Jng_Karma(Ratings):
pass
class NA_Syndra_Jng_Karthus(Ratings):
pass
class NA_Syndra_Jng_Kassadin(Ratings):
pass
class NA_Syndra_Jng_Katarina(Ratings):
pass
class NA_Syndra_Jng_Kayle(Ratings):
pass
class NA_Syndra_Jng_Kayn(Ratings):
pass
class NA_Syndra_Jng_Kennen(Ratings):
pass
class NA_Syndra_Jng_Khazix(Ratings):
pass
class NA_Syndra_Jng_Kindred(Ratings):
pass
class NA_Syndra_Jng_Kled(Ratings):
pass
class NA_Syndra_Jng_KogMaw(Ratings):
pass
class NA_Syndra_Jng_Leblanc(Ratings):
pass
class NA_Syndra_Jng_LeeSin(Ratings):
pass
class NA_Syndra_Jng_Leona(Ratings):
pass
class NA_Syndra_Jng_Lissandra(Ratings):
pass
class NA_Syndra_Jng_Lucian(Ratings):
pass
class NA_Syndra_Jng_Lulu(Ratings):
pass
class NA_Syndra_Jng_Lux(Ratings):
pass
class NA_Syndra_Jng_Malphite(Ratings):
pass
class NA_Syndra_Jng_Malzahar(Ratings):
pass
class NA_Syndra_Jng_Maokai(Ratings):
pass
class NA_Syndra_Jng_MasterYi(Ratings):
pass
class NA_Syndra_Jng_MissFortune(Ratings):
pass
class NA_Syndra_Jng_MonkeyKing(Ratings):
pass
class NA_Syndra_Jng_Mordekaiser(Ratings):
pass
class NA_Syndra_Jng_Morgana(Ratings):
pass
class NA_Syndra_Jng_Nami(Ratings):
pass
class NA_Syndra_Jng_Nasus(Ratings):
pass
class NA_Syndra_Jng_Nautilus(Ratings):
pass
class NA_Syndra_Jng_Nidalee(Ratings):
pass
class NA_Syndra_Jng_Nocturne(Ratings):
pass
class NA_Syndra_Jng_Nunu(Ratings):
pass
class NA_Syndra_Jng_Olaf(Ratings):
pass
class NA_Syndra_Jng_Orianna(Ratings):
pass
class NA_Syndra_Jng_Ornn(Ratings):
pass
class NA_Syndra_Jng_Pantheon(Ratings):
pass
class NA_Syndra_Jng_Poppy(Ratings):
pass
class NA_Syndra_Jng_Quinn(Ratings):
pass
class NA_Syndra_Jng_Rakan(Ratings):
pass
class NA_Syndra_Jng_Rammus(Ratings):
pass
class NA_Syndra_Jng_RekSai(Ratings):
pass
class NA_Syndra_Jng_Renekton(Ratings):
pass
class NA_Syndra_Jng_Rengar(Ratings):
pass
class NA_Syndra_Jng_Riven(Ratings):
pass
class NA_Syndra_Jng_Rumble(Ratings):
pass
class NA_Syndra_Jng_Ryze(Ratings):
pass
class NA_Syndra_Jng_Sejuani(Ratings):
pass
class NA_Syndra_Jng_Shaco(Ratings):
pass
class NA_Syndra_Jng_Shen(Ratings):
pass
class NA_Syndra_Jng_Shyvana(Ratings):
pass
class NA_Syndra_Jng_Singed(Ratings):
pass
class NA_Syndra_Jng_Sion(Ratings):
pass
class NA_Syndra_Jng_Sivir(Ratings):
pass
class NA_Syndra_Jng_Skarner(Ratings):
pass
class NA_Syndra_Jng_Sona(Ratings):
pass
class NA_Syndra_Jng_Soraka(Ratings):
pass
class NA_Syndra_Jng_Swain(Ratings):
pass
class NA_Syndra_Jng_Syndra(Ratings):
pass
class NA_Syndra_Jng_TahmKench(Ratings):
pass
class NA_Syndra_Jng_Taliyah(Ratings):
pass
class NA_Syndra_Jng_Talon(Ratings):
pass
class NA_Syndra_Jng_Taric(Ratings):
pass
class NA_Syndra_Jng_Teemo(Ratings):
pass
class NA_Syndra_Jng_Thresh(Ratings):
pass
class NA_Syndra_Jng_Tristana(Ratings):
pass
class NA_Syndra_Jng_Trundle(Ratings):
pass
class NA_Syndra_Jng_Tryndamere(Ratings):
pass
class NA_Syndra_Jng_TwistedFate(Ratings):
pass
class NA_Syndra_Jng_Twitch(Ratings):
pass
class NA_Syndra_Jng_Udyr(Ratings):
pass
class NA_Syndra_Jng_Urgot(Ratings):
pass
class NA_Syndra_Jng_Varus(Ratings):
pass
class NA_Syndra_Jng_Vayne(Ratings):
pass
class NA_Syndra_Jng_Veigar(Ratings):
pass
class NA_Syndra_Jng_Velkoz(Ratings):
pass
class NA_Syndra_Jng_Vi(Ratings):
pass
class NA_Syndra_Jng_Viktor(Ratings):
pass
class NA_Syndra_Jng_Vladimir(Ratings):
pass
class NA_Syndra_Jng_Volibear(Ratings):
pass
class NA_Syndra_Jng_Warwick(Ratings):
pass
class NA_Syndra_Jng_Xayah(Ratings):
pass
class NA_Syndra_Jng_Xerath(Ratings):
pass
class NA_Syndra_Jng_XinZhao(Ratings):
pass
class NA_Syndra_Jng_Yasuo(Ratings):
pass
class NA_Syndra_Jng_Yorick(Ratings):
pass
class NA_Syndra_Jng_Zac(Ratings):
pass
class NA_Syndra_Jng_Zed(Ratings):
pass
class NA_Syndra_Jng_Ziggs(Ratings):
pass
class NA_Syndra_Jng_Zilean(Ratings):
pass
class NA_Syndra_Jng_Zyra(Ratings):
pass
| true | true |
f71a4d115d47444e362a89b60f0c30a6669b0ce0 | 606 | py | Python | setup.py | donno2048/BS | ef6539a75770031da5838d1ecdeb83e49e63cf7e | [
"MIT"
] | null | null | null | setup.py | donno2048/BS | ef6539a75770031da5838d1ecdeb83e49e63cf7e | [
"MIT"
] | null | null | null | setup.py | donno2048/BS | ef6539a75770031da5838d1ecdeb83e49e63cf7e | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
setup(
name='backboard',
version='1.0.3',
description='Background noises for your keyboard typing',
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
url='https://github.com/donno2048/BS',
packages=find_packages(),
license='MIT',
author='Elisha Hollander',
classifiers=['Programming Language :: Python :: 3'],
install_requires=['pygame>=1.9.6','keyboard>=0.13.5','numpy>=1.20.3','scipy>=1.6.3'],
entry_points={ 'console_scripts': [ 'backboard=backboard.__main__:main' ] }
)
| 37.875 | 89 | 0.684818 | from setuptools import setup, find_packages
setup(
name='backboard',
version='1.0.3',
description='Background noises for your keyboard typing',
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
url='https://github.com/donno2048/BS',
packages=find_packages(),
license='MIT',
author='Elisha Hollander',
classifiers=['Programming Language :: Python :: 3'],
install_requires=['pygame>=1.9.6','keyboard>=0.13.5','numpy>=1.20.3','scipy>=1.6.3'],
entry_points={ 'console_scripts': [ 'backboard=backboard.__main__:main' ] }
)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.